Skip to content

Commit aeb4a45

Browse files
committed
Adding tests
1 parent 2eac1e8 commit aeb4a45

6 files changed

+244
-63
lines changed

.gitignore

+3
Original file line numberDiff line numberDiff line change
@@ -139,3 +139,6 @@ cython_debug/
139139

140140
# JetBrains
141141
.idea/**
142+
143+
# VSCode
144+
.vscode/**

pyproject.toml

+1-1
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
44

55
[project]
66
name = "pytr"
7-
version = "0.2.5"
7+
version = "0.3.0"
88
description = "Use TradeRepublic in terminal"
99
readme = "README.md"
1010
requires-python = ">=3.8"

pytr/dl.py

+82-62
Original file line numberDiff line numberDiff line change
@@ -10,24 +10,25 @@
1010
from pytr.api import TradeRepublicError
1111
from pytr.timeline import Timeline
1212

13+
1314
class DL:
1415
def __init__(
1516
self,
1617
tr,
1718
output_path,
1819
filename_fmt,
1920
since_timestamp=0,
20-
history_file='pytr_history',
21+
history_file="pytr_history",
2122
max_workers=8,
2223
universal_filepath=False,
23-
sort_export=False
24+
sort_export=False,
2425
):
25-
'''
26+
"""
2627
tr: api object
2728
output_path: name of the directory where the downloaded files are saved
2829
filename_fmt: format string to customize the file names
2930
since_timestamp: downloaded files since this date (unix timestamp)
30-
'''
31+
"""
3132
self.tr = tr
3233
self.output_path = Path(output_path)
3334
self.history_file = self.output_path / history_file
@@ -36,7 +37,9 @@ def __init__(
3637
self.universal_filepath = universal_filepath
3738
self.sort_export = sort_export
3839

39-
self.session = FuturesSession(max_workers=max_workers, session=self.tr._websession)
40+
self.session = FuturesSession(
41+
max_workers=max_workers, session=self.tr._websession
42+
)
4043
self.futures = []
4144

4245
self.docs_request = 0
@@ -49,115 +52,130 @@ def __init__(
4952
self.load_history()
5053

5154
def load_history(self):
52-
'''
55+
"""
5356
Read history file with URLs if it exists, otherwise create empty file
54-
'''
57+
"""
5558
if self.history_file.exists():
5659
with self.history_file.open() as f:
5760
self.doc_urls_history = f.read().splitlines()
58-
self.log.info(f'Found {len(self.doc_urls_history)} lines in history file')
61+
self.log.info(f"Found {len(self.doc_urls_history)} lines in history file")
5962
else:
6063
self.history_file.parent.mkdir(exist_ok=True, parents=True)
6164
self.history_file.touch()
62-
self.log.info('Created history file')
65+
self.log.info("Created history file")
6366

6467
async def dl_loop(self):
6568
await self.tl.get_next_timeline_transactions()
6669

6770
while True:
6871
try:
69-
_subscription_id, subscription, response = await self.tr.recv()
72+
_, subscription, response = await self.tr.recv()
7073
except TradeRepublicError as e:
7174
self.log.fatal(str(e))
7275

73-
if subscription['type'] == 'timelineTransactions':
76+
if subscription.get("type", "") == "timelineTransactions":
7477
await self.tl.get_next_timeline_transactions(response)
75-
elif subscription['type'] == 'timelineActivityLog':
78+
elif subscription.get("type", "") == "timelineActivityLog":
7679
await self.tl.get_next_timeline_activity_log(response)
77-
elif subscription['type'] == 'timelineDetailV2':
80+
elif subscription.get("type", "") == "timelineDetailV2":
7881
await self.tl.process_timelineDetail(response, self)
7982
else:
80-
self.log.warning(f"unmatched subscription of type '{subscription['type']}':\n{preview(response)}")
83+
self.log.warning(
84+
f"unmatched subscription of type '{subscription['type']}':\n{preview(response)}"
85+
)
8186

8287
def dl_doc(self, doc, titleText, subtitleText, subfolder=None):
83-
'''
88+
"""
8489
send asynchronous request, append future with filepath to self.futures
85-
'''
86-
doc_url = doc['action']['payload']
90+
"""
91+
doc_url = doc["action"]["payload"]
8792
if subtitleText is None:
88-
subtitleText = ''
93+
subtitleText = ""
8994

9095
try:
91-
date = doc['detail']
92-
iso_date = '-'.join(date.split('.')[::-1])
96+
date = doc["detail"]
97+
iso_date = "-".join(date.split(".")[::-1])
9398
except KeyError:
94-
date = ''
95-
iso_date = ''
96-
doc_id = doc['id']
99+
date = ""
100+
iso_date = ""
101+
doc_id = doc["id"]
97102

98103
# extract time from subtitleText
99104
try:
100-
time = re.findall('um (\\d+:\\d+) Uhr', subtitleText)
105+
time = re.findall("um (\\d+:\\d+) Uhr", subtitleText)
101106
if time == []:
102-
time = ''
107+
time = ""
103108
else:
104-
time = f' {time[0]}'
109+
time = f" {time[0]}"
105110
except TypeError:
106-
time = ''
111+
time = ""
107112

108113
if subfolder is not None:
109114
directory = self.output_path / subfolder
110115
else:
111116
directory = self.output_path
112117

113118
# If doc_type is something like 'Kosteninformation 2', then strip the 2 and save it in doc_type_num
114-
doc_type = doc['title'].rsplit(' ')
119+
doc_type = doc["title"].rsplit(" ")
115120
if doc_type[-1].isnumeric() is True:
116-
doc_type_num = f' {doc_type.pop()}'
121+
doc_type_num = f" {doc_type.pop()}"
117122
else:
118-
doc_type_num = ''
123+
doc_type_num = ""
119124

120-
doc_type = ' '.join(doc_type)
121-
titleText = titleText.replace('\n', '').replace('/', '-')
122-
subtitleText = subtitleText.replace('\n', '').replace('/', '-')
125+
doc_type = " ".join(doc_type)
126+
titleText = titleText.replace("\n", "").replace("/", "-")
127+
subtitleText = subtitleText.replace("\n", "").replace("/", "-")
123128

124129
filename = self.filename_fmt.format(
125-
iso_date=iso_date, time=time, title=titleText, subtitle=subtitleText, doc_num=doc_type_num, id=doc_id
130+
iso_date=iso_date,
131+
time=time,
132+
title=titleText,
133+
subtitle=subtitleText,
134+
doc_num=doc_type_num,
135+
id=doc_id,
126136
)
127137

128-
filename_with_doc_id = filename + f' ({doc_id})'
138+
filename_with_doc_id = filename + f" ({doc_id})"
129139

130-
if doc_type in ['Kontoauszug', 'Depotauszug']:
131-
filepath = directory / 'Abschlüsse' / f'{filename}' / f'{doc_type}.pdf'
132-
filepath_with_doc_id = directory / 'Abschlüsse' / f'{filename_with_doc_id}' / f'{doc_type}.pdf'
140+
if doc_type in ["Kontoauszug", "Depotauszug"]:
141+
filepath = directory / "Abschlüsse" / f"{filename}" / f"{doc_type}.pdf"
142+
filepath_with_doc_id = (
143+
directory / "Abschlüsse" / f"{filename_with_doc_id}" / f"{doc_type}.pdf"
144+
)
133145
else:
134-
filepath = directory / doc_type / f'{filename}.pdf'
135-
filepath_with_doc_id = directory / doc_type / f'{filename_with_doc_id}.pdf'
146+
filepath = directory / doc_type / f"{filename}.pdf"
147+
filepath_with_doc_id = directory / doc_type / f"{filename_with_doc_id}.pdf"
136148

137149
if self.universal_filepath:
138-
filepath = sanitize_filepath(filepath, '_', 'universal')
139-
filepath_with_doc_id = sanitize_filepath(filepath_with_doc_id, '_', 'universal')
150+
filepath = sanitize_filepath(filepath, "_", "universal")
151+
filepath_with_doc_id = sanitize_filepath(
152+
filepath_with_doc_id, "_", "universal"
153+
)
140154
else:
141-
filepath = sanitize_filepath(filepath, '_', 'auto')
142-
filepath_with_doc_id = sanitize_filepath(filepath_with_doc_id, '_', 'auto')
155+
filepath = sanitize_filepath(filepath, "_", "auto")
156+
filepath_with_doc_id = sanitize_filepath(filepath_with_doc_id, "_", "auto")
143157

144158
if filepath in self.filepaths:
145-
self.log.debug(f'File {filepath} already in queue. Append document id {doc_id}...')
159+
self.log.debug(
160+
f"File {filepath} already in queue. Append document id {doc_id}..."
161+
)
146162
if filepath_with_doc_id in self.filepaths:
147-
self.log.debug(f'File {filepath_with_doc_id} already in queue. Skipping...')
163+
self.log.debug(
164+
f"File {filepath_with_doc_id} already in queue. Skipping..."
165+
)
148166
return
149167
else:
150168
filepath = filepath_with_doc_id
151-
doc['local filepath'] = str(filepath)
169+
doc["local filepath"] = str(filepath)
152170
self.filepaths.append(filepath)
153171

154172
if filepath.is_file() is False:
155-
doc_url_base = doc_url.split('?')[0]
173+
doc_url_base = doc_url.split("?")[0]
156174
if doc_url_base in self.doc_urls:
157-
self.log.debug(f'URL {doc_url_base} already in queue. Skipping...')
175+
self.log.debug(f"URL {doc_url_base} already in queue. Skipping...")
158176
return
159177
elif doc_url_base in self.doc_urls_history:
160-
self.log.debug(f'URL {doc_url_base} already in history. Skipping...')
178+
self.log.debug(f"URL {doc_url_base} already in history. Skipping...")
161179
return
162180
else:
163181
self.doc_urls.append(doc_url_base)
@@ -166,37 +184,39 @@ def dl_doc(self, doc, titleText, subtitleText, subfolder=None):
166184
future.filepath = filepath
167185
future.doc_url_base = doc_url_base
168186
self.futures.append(future)
169-
self.log.debug(f'Added {filepath} to queue')
187+
self.log.debug(f"Added {filepath} to queue")
170188
else:
171-
self.log.debug(f'file {filepath} already exists. Skipping...')
189+
self.log.debug(f"file {filepath} already exists. Skipping...")
172190

173191
def work_responses(self):
174-
'''
192+
"""
175193
process responses of async requests
176-
'''
194+
"""
177195
if len(self.doc_urls) == 0:
178-
self.log.info('Nothing to download')
196+
self.log.info("Nothing to download")
179197
exit(0)
180198

181-
with self.history_file.open('a') as history_file:
182-
self.log.info('Waiting for downloads to complete..')
199+
with self.history_file.open("a") as history_file:
200+
self.log.info("Waiting for downloads to complete..")
183201
for future in as_completed(self.futures):
184202
if future.filepath.is_file() is True:
185-
self.log.debug(f'file {future.filepath} was already downloaded.')
203+
self.log.debug(f"file {future.filepath} was already downloaded.")
186204

187205
try:
188206
r = future.result()
189207
except Exception as e:
190208
self.log.fatal(str(e))
191209

192210
future.filepath.parent.mkdir(parents=True, exist_ok=True)
193-
with open(future.filepath, 'wb') as f:
211+
with open(future.filepath, "wb") as f:
194212
f.write(r.content)
195213
self.done += 1
196-
history_file.write(f'{future.doc_url_base}\n')
214+
history_file.write(f"{future.doc_url_base}\n")
197215

198-
self.log.debug(f'{self.done:>3}/{len(self.doc_urls)} {future.filepath.name}')
216+
self.log.debug(
217+
f"{self.done:>3}/{len(self.doc_urls)} {future.filepath.name}"
218+
)
199219

200220
if self.done == len(self.doc_urls):
201-
self.log.info('Done.')
221+
self.log.info("Done.")
202222
exit(0)

0 commit comments

Comments
 (0)