Skip to content

Commit 02d8488

Browse files
committed
Accounting for startup time since the beggining, this adds 4 second to the app timer, and lets 1 second for unaccounted startup time. Previously we had 5 seconds unaccounted startup time. Fixed tests for the optimized store writing code.
1 parent 4be58b2 commit 02d8488

11 files changed

+154
-95
lines changed

src/__main__.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,9 @@
1717
# You can download the latest version of this tool from:
1818
# https://github.com/MiSTer-devel/Downloader_MiSTer
1919

20+
import time
21+
start_time = time.time()
22+
2023
from sys import exit
2124

2225
try:
@@ -36,5 +39,5 @@
3639
default_commit = None
3740

3841
if __name__ == '__main__':
39-
exit_code = main(read_env(default_commit))
42+
exit_code = main(read_env(default_commit), start_time)
4043
exit(exit_code)

src/build.sh

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,9 @@ cp __main__.py "${TEMPDIR}/__main__.py"
2020
if [[ "${SKIP_REMOVALS:-false}" != "true" ]] ; then
2121
find "${TEMPDIR}" -type f -name '*.py' -exec perl -i -0pe 's/"""(.*?)"""/""/sg; s/^\s*#.*\n//mg; s/^\s*\n//mg' {} +
2222
fi
23+
#if which strip-hints 2>&1 > /dev/null ; then
24+
# find "${TEMPDIR}" -type f -name '*.py' -exec strip-hints --inplace {} + 2> /dev/null
25+
#fi
2326
find "${TEMPDIR}" -type f ! -name '*.py' -exec rm -f {} +
2427
find "${TEMPDIR}" -type f -iname "*.py" -print0 | while IFS= read -r -d '' file ; do pin_metadata "${file}" ; done
2528
pushd "${TEMPDIR}" >/dev/null 2>&1

src/downloader/config_reader.py

Lines changed: 22 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -19,9 +19,9 @@
1919

2020
import configparser
2121
import re
22-
import time
2322
from pathlib import Path
24-
from typing import Optional, List, TypeVar, Union, Dict, Any, SupportsInt
23+
from typing import Optional, List, TypeVar, Union, SupportsInt
24+
2525

2626
from downloader.config import Environment, Config, default_config, InvalidConfigParameter, AllowReboot, \
2727
ConfigDatabaseSection, ConfigMisterSection, AllowDelete
@@ -30,13 +30,14 @@
3030
K_STORAGE_PRIORITY, K_ALLOW_DELETE, K_ALLOW_REBOOT, K_VERBOSE, K_UPDATE_LINUX, K_MINIMUM_SYSTEM_FREE_SPACE_MB, \
3131
K_MINIMUM_EXTERNAL_FREE_SPACE_MB, STORAGE_PRIORITY_OFF, STORAGE_PRIORITY_PREFER_SD, STORAGE_PRIORITY_PREFER_EXTERNAL
3232
from downloader.db_options import DbOptions, DbOptionsProps, DbOptionsValidationException
33-
from downloader.logger import Logger
33+
from downloader.logger import Logger, time_str
3434

3535

3636
class ConfigReader:
37-
def __init__(self, logger: Logger, env: Environment):
37+
def __init__(self, logger: Logger, env: Environment, start_time: float):
3838
self._logger = logger
3939
self._env = env
40+
self._start_time = start_time
4041

4142
def calculate_config_path(self, current_working_dir: str) -> str:
4243
if self._env['PC_LAUNCHER'] is not None:
@@ -75,12 +76,24 @@ def read_config(self, config_path: str) -> Config:
7576
result['debug'] = self._env['DEBUG'] == 'true'
7677
if result['debug']:
7778
result['verbose'] = True
79+
if self._env['LOGLEVEL'] != '':
80+
if 'info' in self._env['LOGLEVEL']:
81+
result['verbose'] = False
82+
if 'debug' in self._env['LOGLEVEL']:
83+
result['verbose'] = True
84+
if 'http' in self._env['LOGLEVEL']:
85+
result['http_logging'] = True
86+
87+
if result['verbose']: self._logger.print(f'BENCH {time_str(self._start_time)}| Read config start.')
7888

7989
if self._env['DEFAULT_BASE_PATH'] is not None:
8090
result['base_path'] = self._env['DEFAULT_BASE_PATH']
8191
result['base_system_path'] = self._env['DEFAULT_BASE_PATH']
8292

8393
ini_config = self._load_ini_config(config_path)
94+
95+
if result['verbose']: self._logger.print(f'BENCH {time_str(self._start_time)}| Load ini done.')
96+
8497
default_db = self._default_db_config()
8598

8699
for section in ini_config.sections():
@@ -97,6 +110,8 @@ def read_config(self, config_path: str) -> Config:
97110
self._logger.print("Reading '%s' db section" % section)
98111
result['databases'][section_id] = self._parse_database_section(default_db, parser, section_id)
99112

113+
if result['verbose']: self._logger.print(f'BENCH {time_str(self._start_time)}| Read sections done.')
114+
100115
if len(result['databases']) == 0:
101116
self._logger.print('Reading default db')
102117
self._add_default_database(ini_config, result)
@@ -115,7 +130,7 @@ def read_config(self, config_path: str) -> Config:
115130
result['fail_on_file_error'] = self._env['FAIL_ON_FILE_ERROR'] == 'true'
116131
result['commit'] = self._valid_max_length('COMMIT', self._env['COMMIT'], 50)
117132
result['default_db_id'] = self._valid_db_id(K_DEFAULT_DB_ID, self._env['DEFAULT_DB_ID'])
118-
result['start_time'] = time.time()
133+
result['start_time'] = self._start_time
119134
result['logfile'] = self._env['LOGFILE']
120135
result['config_path'] = Path(config_path)
121136

@@ -149,15 +164,9 @@ def read_config(self, config_path: str) -> Config:
149164
result['logfile'] = str(launcher_path.with_suffix('.log'))
150165
result['curl_ssl'] = ''
151166

152-
if self._env['LOGLEVEL'] != '':
153-
if 'info' in self._env['LOGLEVEL']:
154-
result['verbose'] = False
155-
if 'debug' in self._env['LOGLEVEL']:
156-
result['verbose'] = True
157-
if 'http' in self._env['LOGLEVEL']:
158-
result['http_logging'] = True
159-
160167
result['environment'] = self._env
168+
169+
if result['verbose']: self._logger.print(f'BENCH {time_str(self._start_time)}| Read config done.')
161170
return result
162171

163172
@staticmethod

src/downloader/jobs/process_db_index_job.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,7 @@ def retry_job(self): return None
5151
duplicated_files: List[str] = field(default_factory=list)
5252

5353
installed_folders: List[PathPackage] = field(default_factory=list)
54-
removed_folders: List[RemovedCopy] = field(default_factory=list) # @TODO: Why there is removed_folders AND directories_to_remove?
54+
removed_folders: List[PathPackage] = field(default_factory=list) # @TODO: Why there is removed_folders AND directories_to_remove?
5555

5656
directories_to_remove: List[PathPackage] = field(default_factory=list)
5757
files_to_remove: List[PathPackage] = field(default_factory=list)

src/downloader/jobs/process_db_index_worker.py

Lines changed: 37 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -127,8 +127,8 @@ def create_packages_from_index(ctx: DownloaderWorkerContext, config: Config, sum
127127
List[_DeleteFolderPackage]
128128
]:
129129
calculator = ctx.target_paths_calculator_factory.target_paths_calculator(config)
130-
check_file_pkgs, remove_files_pkgs = _translate_items(ctx, calculator, summary.files, PathType.FILE, store.files)
131-
create_folder_pkgs, delete_folder_pkgs = _translate_items(ctx, calculator, summary.folders, PathType.FOLDER, store.folders)
130+
check_file_pkgs, remove_files_pkgs = _translate_items(ctx, calculator, summary.files, PathType.FILE, store.all_files())
131+
create_folder_pkgs, delete_folder_pkgs = _translate_items(ctx, calculator, summary.folders, PathType.FOLDER, store.all_folders())
132132
return check_file_pkgs, remove_files_pkgs, create_folder_pkgs, delete_folder_pkgs
133133

134134
def _translate_items(ctx: DownloaderWorkerContext, calculator: TargetPathsCalculator, items: Dict[str, Dict[str, Any]], path_type: PathType, stored: Dict[str, Dict[str, Any]]) -> Tuple[List[PathPackage], List[PathPackage]]:
@@ -216,7 +216,7 @@ def try_reserve_space(ctx: DownloaderWorkerContext, file_pkgs: Iterable[PathPack
216216
return full_partitions
217217

218218

219-
def process_create_folder_packages(ctx: DownloaderWorkerContext, create_folder_pkgs: List[PathPackage], db_id: str, db_folder_index: Dict[str, Any], store: ReadOnlyStoreAdapter) -> Tuple[List[RemovedCopy], List[PathPackage], List[str]]:
219+
def process_create_folder_packages(ctx: DownloaderWorkerContext, create_folder_pkgs: List[PathPackage], db_id: str, db_folder_index: Dict[str, Any], store: ReadOnlyStoreAdapter) -> Tuple[List[PathPackage], List[PathPackage], List[str]]:
220220
if len(create_folder_pkgs) == 0:
221221
return [], [], []
222222

@@ -225,37 +225,52 @@ def process_create_folder_packages(ctx: DownloaderWorkerContext, create_folder_p
225225
except Exception as e:
226226
ctx.swallow_error(e)
227227

228-
folder_copies_to_be_removed: List[Tuple[bool, str, str, PathType]] = []
228+
folder_copies_to_be_removed: List[PathPackage] = []
229229
processing_folders: List[PathPackage] = []
230230

231231
parent_drives: Dict[str, set[str]] = defaultdict(set)
232232
parent_pkgs: Dict[str, PathPackage] = dict()
233-
parents_to_add: List[PathPackage] = []
234233

235234
for pkg in sorted(create_folder_pkgs, key=lambda x: len(x.rel_path)):
236-
if pkg.kind != PATH_PACKAGE_KIND_PEXT:
237-
processing_folders.append(pkg)
238-
elif pkg.is_pext_parent():
235+
if pkg.is_pext_parent():
239236
parent_pkgs[pkg.rel_path] = pkg
240237
continue
241-
else:
242-
processing_folders.append(pkg)
243238

244-
pkg_parent = pkg.pext_props.parent
245-
if pkg_parent in parent_pkgs and pkg.drive not in parent_drives[pkg_parent]:
246-
parent_drives[pkg_parent].add(pkg.drive)
247-
parent_pkg = parent_pkgs[pkg_parent].clone()
248-
parent_pkg.drive = pkg.drive
249-
parent_pkg.pext_props.kind = pkg.pext_props.kind
250-
parent_pkg.pext_props.drive = pkg.pext_props.drive
251-
parent_pkg.pext_props.parent = ''
252-
parents_to_add.append(parent_pkg)
239+
processing_folders.append(pkg)
240+
if pkg.kind != PATH_PACKAGE_KIND_PEXT:
241+
continue
253242

254-
_maybe_add_copies_to_remove(ctx, folder_copies_to_be_removed, store, pkg.rel_path, pkg.pext_drive())
243+
pkg_parent = pkg.pext_props.parent
244+
if pkg_parent not in parent_pkgs or pkg.drive in parent_drives:
245+
continue
255246

256-
for parent_pkg in parents_to_add:
247+
parent_drives[pkg_parent].add(pkg.drive)
248+
parent_pkg = parent_pkgs[pkg_parent].clone()
249+
parent_pkg.drive = pkg.drive
250+
parent_pkg.pext_props.kind = pkg.pext_props.kind
251+
parent_pkg.pext_props.drive = pkg.pext_props.drive
252+
parent_pkg.pext_props.parent = ''
257253
processing_folders.append(parent_pkg)
258-
_maybe_add_copies_to_remove(ctx, folder_copies_to_be_removed, store, parent_pkg.rel_path, parent_pkg.drive)
254+
255+
for pkg in processing_folders:
256+
folder_path, drive = pkg.rel_path, pkg.drive
257+
for is_external, other_drive in store.list_other_drives_for_folder(pkg):
258+
if ctx.file_system.is_folder(os.path.join(other_drive, folder_path)):
259+
continue
260+
261+
if is_external:
262+
removed_pkg = pkg.clone_as_pext()
263+
removed_pkg.drive = other_drive
264+
removed_pkg.pext_props.drive = other_drive
265+
removed_pkg.pext_props.kind = PEXT_KIND_EXTERNAL
266+
else:
267+
removed_pkg = pkg.clone()
268+
removed_pkg.drive = other_drive
269+
if removed_pkg.pext_props is not None:
270+
removed_pkg.pext_props.kind = PEXT_KIND_STANDARD
271+
removed_pkg.pext_props.drive = other_drive
272+
273+
folder_copies_to_be_removed.append(removed_pkg)
259274

260275
ctx.logger.bench('add_processed_folders start: ', db_id, len(processing_folders))
261276
non_existing_folders = ctx.installation_report.add_processed_folders(processing_folders, db_id)
@@ -273,15 +288,6 @@ def process_create_folder_packages(ctx: DownloaderWorkerContext, create_folder_p
273288
installed_folders = [f for f in processing_folders if f.db_path() in db_folder_index]
274289
return folder_copies_to_be_removed, installed_folders, errors
275290

276-
def _maybe_add_copies_to_remove(ctx: DownloaderWorkerContext, copies: List[Tuple[bool, str, str, PathType]], store: ReadOnlyStoreAdapter, folder_path: str, drive: Optional[str]):
277-
if store.folder_drive(folder_path) == drive: return
278-
copies.extend([
279-
(is_external, folder_path, other_drive, PathType.FOLDER)
280-
for is_external, other_drive in store.list_other_drives_for_folder(folder_path, drive)
281-
if not ctx.file_system.is_folder(os.path.join(other_drive, folder_path))
282-
])
283-
284-
285291
def create_fetch_jobs(ctx: DownloaderWorkerContext, db_id: str, non_existing_pkgs: List[_FetchFilePackage], need_update_pkgs: List[_FetchFilePackage], base_files_url: str) -> List[Job]:
286292
if len(non_existing_pkgs) == 0 and len(need_update_pkgs) == 0:
287293
return []

src/downloader/jobs/process_zip_index_job.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,7 @@ def retry_job(self): return None
5757
duplicated_files: List[str] = field(default_factory=list)
5858

5959
installed_folders: List[PathPackage] = field(default_factory=list)
60-
removed_folders: List[RemovedCopy] = field(default_factory=list) # @TODO: Why there is removed_folders AND directories_to_remove?
60+
removed_folders: List[PathPackage] = field(default_factory=list) # @TODO: Why there is removed_folders AND directories_to_remove?
6161

6262
directories_to_remove: List[PathPackage] = field(default_factory=list)
6363
files_to_remove: List[PathPackage] = field(default_factory=list)

src/downloader/local_store_wrapper.py

Lines changed: 42 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -20,9 +20,9 @@
2020
from downloader.jobs.index import Index
2121
from downloader.other import empty_store_without_base_path
2222
from typing import Any, Dict, Optional, Set, Tuple, List, TypedDict
23-
from collections import defaultdict
23+
from collections import defaultdict, ChainMap
2424

25-
from downloader.path_package import PathPackage
25+
from downloader.path_package import PathPackage, PATH_PACKAGE_KIND_PEXT, PextPathProps, PEXT_KIND_EXTERNAL, PathType
2626

2727
NO_HASH_IN_STORE_CODE = 'file_does_not_exist_so_cant_get_hash'
2828

@@ -183,25 +183,25 @@ def add_file(self, file_path, description):
183183
if file_path in self._external_additions['files']:
184184
for drive in self._external_additions['files'][file_path]:
185185
self.remove_external_file(drive, file_path)
186-
self._add_entry('files', file_path, description)
186+
self._add_entry('files', PathType.FILE, file_path, description)
187187

188188
def add_folder(self, folder, description):
189-
self._add_entry('folders', folder, description)
189+
self._add_entry('folders', PathType.FOLDER, folder, description)
190190

191-
def _add_entry(self, kind, path, description):
191+
def _add_entry(self, kind: str, ty: PathType, path: str, description: dict[str, Any]):
192192
self._clean_external_additions(kind, path)
193193

194194
if 'zip_id' not in description and 'tags' in description:
195195
description.pop('tags')
196196

197-
if path in self._store[kind] and equal_dicts(self._store[kind][path], description):
197+
if path in self._store[kind] and equal_descriptions(self._store[kind][path], description, ty):
198198
return
199199

200200
self._store[kind][path] = description
201201
self._top_wrapper.mark_force_save()
202202

203203
def add_external_folder(self, drive, folder_path, description):
204-
self._add_external_entry('folders', drive, folder_path, description)
204+
self._add_external_entry('folders', PathType.FOLDER, drive, folder_path, description)
205205

206206
def add_external_file(self, drive, file_path, description):
207207
if file_path in self._store['files']:
@@ -211,20 +211,18 @@ def add_external_file(self, drive, file_path, description):
211211
if d == drive:
212212
continue
213213
self.remove_external_file(d, file_path)
214-
self._add_external_entry('files', drive, file_path, description)
214+
self._add_external_entry('files', PathType.FILE, drive, file_path, description)
215215

216-
def _add_external_entry(self, kind, drive, path, description):
216+
def _add_external_entry(self, kind: str, ty: PathType, drive: str, path: str, description: dict[str, Any]):
217217
external = self._external_by_drive(drive)
218218

219219
if 'zip_id' not in description and 'tags' in description:
220220
description.pop('tags')
221221

222222
entries = external[kind]
223-
if path in entries and equal_dicts(entries[path], description):
223+
if path in entries and equal_descriptions(entries[path], description, ty):
224224
return
225225

226-
#if path in self._store[kind]: del self._store[kind][path]
227-
228226
entries[path] = description
229227
self._top_wrapper.mark_force_save()
230228

@@ -605,10 +603,22 @@ def zips(self) -> Dict[str, Dict[str, Any]]:
605603
def files(self) -> Dict[str, Dict[str, Any]]:
606604
return self._store['files']
607605

606+
def all_files(self):
607+
if not 'external' in self._store:
608+
return self._store['files']
609+
610+
return ChainMap(self._store['files'], *[external['files'] for external in self._store['external'].values() if 'files' in external])
611+
608612
@property
609613
def folders(self) -> Dict[str, Dict[str, Any]]:
610614
return self._store['folders']
611615

616+
def all_folders(self):
617+
if not 'external' in self._store:
618+
return self._store['folders']
619+
620+
return ChainMap(self._store['folders'], *[external['folders'] for external in self._store['external'].values() if 'folders' in external])
621+
612622
@property
613623
def has_externals(self) -> bool:
614624
return 'external' in self._store
@@ -668,8 +678,8 @@ def list_other_drives_for_file(self, file_path: str, drive: Optional[str]) -> Li
668678

669679
return result
670680

671-
def list_other_drives_for_folder(self, folder_path: str, drive: Optional[str]) -> List[Tuple[bool, str]]:
672-
if drive is None: drive = self.base_path
681+
def list_other_drives_for_folder(self, folder_pkg: PathPackage) -> List[Tuple[bool, str]]:
682+
folder_path, drive = folder_pkg.rel_path, folder_pkg.drive
673683
if 'external' in self._store:
674684
result = [
675685
(True, external_drive)
@@ -689,6 +699,24 @@ def has_no_files(self):
689699
return len(self._store['files']) == 0
690700

691701

702+
def equal_descriptions(lhs: dict[str, Any], b: dict[str, Any], ty: PathType) -> bool:
703+
if ty == PathType.FOLDER: return equal_dicts_or_lhs_bigger(lhs, b)
704+
else: return equal_dicts(lhs, b)
705+
706+
def equal_dicts_or_lhs_bigger(lhs: dict[str, Any], b: dict[str, Any]) -> bool:
707+
if len(b) > len(lhs):
708+
return False
709+
710+
for key, value in lhs.items():
711+
if key not in b:
712+
continue
713+
714+
if not equal_values(value, b[key]):
715+
return False
716+
717+
return True
718+
719+
692720
def equal_dicts(a, b):
693721
if len(a) != len(b):
694722
return False

src/downloader/logger.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -136,8 +136,7 @@ def bench(self, *args):
136136
if self._start_time is None:
137137
return
138138

139-
time_str = str(datetime.timedelta(seconds=time.time() - self._start_time))[0:-3]
140-
bench_header = f'BENCH {time_str}| '
139+
bench_header = f'BENCH {time_str(self._start_time)}| '
141140
self.print_logger.bench(bench_header, *args)
142141
self.file_logger.bench(bench_header, *args)
143142

@@ -193,3 +192,6 @@ def debug(self, *args, sep='', end='\n', flush=True):
193192

194193
def bench(self, *args):
195194
self._decorated_logger.bench(*args)
195+
196+
def time_str(start_time: float) -> str:
197+
return str(datetime.timedelta(seconds=time.time() - start_time))[0:-3]

0 commit comments

Comments
 (0)