Skip to content

Commit b795eda

Browse files
committed
First version of renaming group paths and relabeling the Logger on group relabel.
1 parent 59f6fb2 commit b795eda

File tree

5 files changed

+192
-45
lines changed

5 files changed

+192
-45
lines changed

src/aiida/cmdline/commands/cmd_group.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -319,12 +319,19 @@ def _dry_run_callback(pks):
319319
@with_dbenv()
320320
def group_relabel(group, label):
321321
"""Change the label of a group."""
322+
# TODO: Add a message here that if one has the profile mirrored, they should also run the command `verdi profile
323+
# TODO: mirror relabel-group ` to update the mirrored profile.
322324
try:
323325
group.label = label
324326
except UniquenessError as exception:
325327
echo.echo_critical(str(exception))
326328
else:
327329
echo.echo_success(f"Label changed to '{label}'")
330+
msg = (
331+
'Note that if you are mirroring your profile data to disk, to reflect the relabeling of the group, '
332+
'run the command: `verdi profile mirror --update-groups.'
333+
)
334+
echo.echo_report(msg)
328335

329336

330337
@verdi_group.command('description')

src/aiida/cmdline/commands/cmd_profile.py

Lines changed: 38 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -303,6 +303,14 @@ def profile_delete(force, delete_data, profiles):
303303
show_default=True,
304304
help='If a top-level process calls sub-processes, create a designated directory only for the top-level process.',
305305
)
306+
# TODO: Implement this...
307+
# TODO: Possibly
308+
@click.option(
309+
'--update-groups/--no-update-groups',
310+
default=False,
311+
show_default=True,
312+
help='Update directories if nodes have been added to other groups, or organized differently in terms of groups.',
313+
)
306314
@options.INCLUDE_INPUTS()
307315
@options.INCLUDE_OUTPUTS()
308316
@options.INCLUDE_ATTRIBUTES()
@@ -319,6 +327,7 @@ def profile_mirror(
319327
organize_by_groups,
320328
symlink_duplicates,
321329
delete_missing,
330+
update_groups,
322331
only_top_level_calcs,
323332
only_top_level_workflows,
324333
include_inputs,
@@ -343,6 +352,11 @@ def profile_mirror(
343352
# ? Does it even make sense to provide both options, as they are mutually exclusive?
344353
incremental = not overwrite
345354

355+
if not organize_by_groups and update_groups:
356+
# Add check outside in cmd_profile?
357+
msg = '`--update-groups` selected, even though `--organize-by-groups` is set to False.'
358+
echo.echo_critical(msg)
359+
346360
if path is None:
347361
path = Path.cwd() / f'{profile.name}-mirror'
348362

@@ -421,6 +435,9 @@ def profile_mirror(
421435
num_processes_to_dump = len(profile_dumper.processes_to_dump)
422436
num_processes_to_delete = len(profile_dumper.processes_to_delete)
423437

438+
# num_groups_to_dump = len(profile_dumper.groups_to_dump)
439+
num_groups_to_delete = len(profile_dumper.groups_to_delete)
440+
424441
if dry_run:
425442
dry_run_message = (
426443
f'Dry run for mirroring of profile `{profile.name}`. '
@@ -438,23 +455,35 @@ def profile_mirror(
438455
# TODO: Maybe add y/n confirmation here?
439456
echo.echo_report(msg)
440457

441-
if num_processes_to_dump == 0:
442-
msg = 'No processes to dump.'
443-
echo.echo_success(msg)
444-
else:
445-
profile_dumper.dump_processes()
446-
msg = f'Dumped {num_processes_to_dump} new nodes.'
447-
echo.echo_success(msg)
458+
if dump_processes:
459+
if num_processes_to_dump == 0:
460+
msg = 'No processes to dump.'
461+
echo.echo_success(msg)
462+
else:
463+
profile_dumper.dump_processes()
464+
msg = f'Dumped {num_processes_to_dump} new nodes.'
465+
echo.echo_success(msg)
448466

449467
if delete_missing:
450-
# breakpoint()
451468
if num_processes_to_delete == 0:
452469
echo.echo_success('No processes to delete.')
453470
else:
454471
profile_dumper.delete_processes()
455-
456472
echo.echo_success(f'Deleted {num_processes_to_delete} node directories.')
457473

474+
if num_groups_to_delete == 0:
475+
echo.echo_success('No groups to delete.')
476+
else:
477+
profile_dumper.delete_groups()
478+
echo.echo_success(f'Deleted {num_groups_to_delete} group directories.')
479+
480+
if update_groups:
481+
relabeled_paths = profile_dumper.update_groups()
482+
483+
msg = 'Mirrored directories and '
484+
echo.echo_success(msg)
485+
print(relabeled_paths)
486+
458487
# Append the current dump time to dumping safeguard file
459488
with safeguard_file_path.open('a') as fhandle:
460489
fhandle.write(f'Last profile mirror time: {last_dump_time.isoformat()}\n')

src/aiida/tools/dumping/logger.py

Lines changed: 35 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,9 @@
1515

1616
from aiida.common.exceptions import NotExistent
1717

18+
# TODO: Possibly mirror hierarchy of mirrored directory inside json file
19+
# TODO: Currently, json file has only top-level "groups", "workflows", and "calculations"
20+
1821

1922
@dataclass
2023
class DumpLog:
@@ -94,10 +97,12 @@ class DumpLogStoreCollection:
9497

9598
calculations: DumpLogStore
9699
workflows: DumpLogStore
100+
groups: DumpLogStore
101+
# data: DumpLogStore
97102

98103

99104
class DumpLogger:
100-
"""Main logger class using dataclasses for better structure."""
105+
"""Main dumping logger singleton."""
101106

102107
DUMP_LOG_FILE: str = '.dump_log.json'
103108
_instance: 'DumpLogger | None' = None # Class-level singleton instance
@@ -116,7 +121,8 @@ def __init__(
116121
dump_parent_path: Path | None = None,
117122
calculations: DumpLogStore | None = None,
118123
workflows: DumpLogStore | None = None,
119-
# counter: int = 0,
124+
groups: DumpLogStore | None = None,
125+
# data: DumpLogStore | None = None,
120126
) -> None:
121127
# Ensure __init__ is only called once
122128
if hasattr(self, '_initialized') and self._initialized:
@@ -125,7 +131,8 @@ def __init__(
125131
self.dump_parent_path = dump_parent_path or Path.cwd()
126132
self.calculations = calculations or DumpLogStore()
127133
self.workflows = workflows or DumpLogStore()
128-
# self.counter = counter
134+
self.groups = groups or DumpLogStore()
135+
# self.dat = data or DumpLogStore()
129136

130137
# Mark the object as initialized
131138
self._initialized = True
@@ -144,20 +151,26 @@ def del_entry(self, store: DumpLogStore, uuid: str) -> bool:
144151
@property
145152
def log(self) -> DumpLogStoreCollection:
146153
"""Retrieve the current state of the log as a dataclass."""
147-
return DumpLogStoreCollection(calculations=self.calculations, workflows=self.workflows)
154+
return DumpLogStoreCollection(calculations=self.calculations, workflows=self.workflows, groups=self.groups)
148155

149156
def save_log(self) -> None:
150157
"""Save the log to a JSON file."""
151158

152159
def serialize_logs(container: DumpLogStore) -> dict:
153160
serialized = {}
154161
for uuid, entry in container.entries.items():
155-
serialized[uuid] = {'path': str(entry.path), 'time': entry.time.isoformat()}
162+
serialized[uuid] = {
163+
'path': str(entry.path),
164+
'time': entry.time.isoformat(),
165+
'links': [str(link) for link in entry.links],
166+
}
156167
return serialized
157168

158169
log_dict = {
159170
'calculations': serialize_logs(self.calculations),
160171
'workflows': serialize_logs(self.workflows),
172+
'groups': serialize_logs(self.groups),
173+
# 'data': serialize_logs(self.data),
161174
}
162175

163176
with self.log_file_path.open('w', encoding='utf-8') as f:
@@ -185,12 +198,20 @@ def deserialize_logs(category_data: dict) -> DumpLogStore:
185198
container = DumpLogStore()
186199
for uuid, entry in category_data.items():
187200
container.add_entry(
188-
uuid, DumpLog(path=Path(entry['path']), time=datetime.fromisoformat(entry['time']))
201+
uuid,
202+
DumpLog(
203+
path=Path(entry['path']),
204+
time=datetime.fromisoformat(entry['time']),
205+
links=[Path(p) for p in entry['links']],
206+
),
189207
)
208+
190209
return container
191210

192211
instance.calculations = deserialize_logs(data['calculations'])
193212
instance.workflows = deserialize_logs(data['workflows'])
213+
instance.groups = deserialize_logs(data['groups'])
214+
# instance.data = deserialize_logs(data['data'])
194215

195216
except (json.JSONDecodeError, OSError):
196217
raise
@@ -206,7 +227,7 @@ def get_store_by_uuid(self, uuid: str) -> DumpLogStore:
206227
if uuid in store.entries:
207228
return store
208229

209-
msg = f"No corresponding `DumpLogStore` found for UUID: `{uuid}`."
230+
msg = f'No corresponding `DumpLogStore` found for UUID: `{uuid}`.'
210231
raise NotExistent(msg)
211232

212233
def get_path_by_uuid(self, uuid: str) -> Path | None:
@@ -215,13 +236,17 @@ def get_path_by_uuid(self, uuid: str) -> Path | None:
215236

216237
try:
217238
current_store = self.get_store_by_uuid(uuid=uuid)
218-
path = current_store.entries[uuid].path
219-
return path
220239
except NotExistent as exc:
221240
raise NotExistent(exc.args[0]) from exc
241+
try:
242+
path = current_store.entries[uuid].path
243+
return path
222244
except KeyError as exc:
223-
msg = f"UUID: `{uuid}` not contained in store `{current_store}`."
245+
msg = f'UUID: `{uuid}` not contained in store `{current_store}`.'
224246
raise KeyError(msg) from exc
225247
except:
226248
# For debugging
249+
import ipdb
250+
251+
ipdb.set_trace()
227252
raise

0 commit comments

Comments
 (0)