Skip to content

Commit 89f14bc

Browse files
committed
Do regenerate top level task-* stab file, retain only our custom fields
Otherwise it would be inconsistent and populated only with the first run analysis output, so multiple sessions etc would not really get all common values, deminishing its value. See #277
1 parent 81d0aca commit 89f14bc

File tree

2 files changed

+35
-10
lines changed

2 files changed

+35
-10
lines changed

heudiconv/bids.py

Lines changed: 27 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -76,6 +76,9 @@ def populate_bids_templates(path, defaults={}):
7676
# 'Manufacturer', 'SliceTiming', ''}
7777
for fpath in find_files('.*_task-.*\_bold\.json', topdir=path,
7878
exclude_vcs=True, exclude="/\.(datalad|heudiconv)/"):
79+
#
80+
# According to BIDS spec I think both _task AND _acq (may be more? _rec, _dir, ...?) should be retained?
81+
# TODO: if we are to fix it, then old ones (without _acq) should be removed first
7982
task = re.sub('.*_(task-[^_\.]*(_acq-[^_\.]*)?)_.*', r'\1', fpath)
8083
json_ = load_json(fpath)
8184
if task not in tasks:
@@ -95,17 +98,33 @@ def populate_bids_templates(path, defaults={}):
9598
lgr.debug("Generating %s", events_file)
9699
with open(events_file, 'w') as f:
97100
f.write("onset\tduration\ttrial_type\tresponse_time\tstim_file\tTODO -- fill in rows and add more tab-separated columns if desired")
101+
98102
# extract tasks files stubs
99103
for task_acq, fields in tasks.items():
100104
task_file = op.join(path, task_acq + '_bold.json')
101-
# do not touch any existing thing, it may be precious
102-
if not op.lexists(task_file):
103-
lgr.debug("Generating %s", task_file)
104-
fields["TaskName"] = ("TODO: full task name for %s" %
105-
task_acq.split('_')[0].split('-')[1])
106-
fields["CogAtlasID"] = "TODO"
107-
with open(task_file, 'w') as f:
108-
f.write(json_dumps_pretty(fields, indent=2, sort_keys=True))
105+
# Since we are pulling all unique fields we have to possibly
106+
# rewrite this file to guarantee consistency.
107+
# See https://github.com/nipy/heudiconv/issues/277 for a usecase/bug
108+
# when we didn't touch existing one.
109+
# But the fields we enter (TaskName and CogAtlasID) might need need
110+
# to be populated from the file if it already exists
111+
placeholders = {
112+
"TaskName": ("TODO: full task name for %s" %
113+
task_acq.split('_')[0].split('-')[1]),
114+
"CogAtlasID": "TODO",
115+
}
116+
if op.lexists(task_file):
117+
j = load_json(task_file)
118+
# Retain possibly modified placeholder fields
119+
for f in placeholders:
120+
if f in j:
121+
placeholders[f] = j[f]
122+
act = "Regenerating"
123+
else:
124+
act = "Generating"
125+
lgr.debug("%s %s", act, task_file)
126+
fields.update(placeholders)
127+
save_json(fields, indent=2, sort_keys=True, pretty=True)
109128

110129

111130
def tuneup_bids_json_files(json_files):

heudiconv/utils.py

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -171,7 +171,7 @@ def assure_no_file_exists(path):
171171
os.unlink(path)
172172

173173

174-
def save_json(filename, data, indent=4):
174+
def save_json(filename, data, indent=4, sort_keys=True, pretty=False):
175175
"""Save data to a json file
176176
177177
Parameters
@@ -180,11 +180,17 @@ def save_json(filename, data, indent=4):
180180
Filename to save data in.
181181
data : dict
182182
Dictionary to save in json file.
183+
indent : int, optional
184+
sort_keys : bool, optional
185+
pretty : bool, optional
183186
184187
"""
185188
assure_no_file_exists(filename)
186189
with open(filename, 'w') as fp:
187-
fp.write(_canonical_dumps(data, sort_keys=True, indent=indent))
190+
fp.write(
191+
(json_dumps_pretty if pretty else _canonical_dumps)(
192+
data, sort_keys=sort_keys, indent=indent)
193+
)
188194

189195

190196
def json_dumps_pretty(j, indent=2, sort_keys=True):

0 commit comments

Comments
 (0)