Skip to content

Commit

Permalink
Do regenerate top level task-* stab file, retain only our custom fields
Browse files Browse the repository at this point in the history
Otherwise it would be inconsistent and populated only with the first run analysis output, so multiple sessions etc would not really get all common values,
deminishing its value.
See #277
  • Loading branch information
yarikoptic committed Nov 27, 2018
1 parent 81d0aca commit 89f14bc
Show file tree
Hide file tree
Showing 2 changed files with 35 additions and 10 deletions.
35 changes: 27 additions & 8 deletions heudiconv/bids.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,9 @@ def populate_bids_templates(path, defaults={}):
# 'Manufacturer', 'SliceTiming', ''}
for fpath in find_files('.*_task-.*\_bold\.json', topdir=path,
exclude_vcs=True, exclude="/\.(datalad|heudiconv)/"):
#
# According to BIDS spec I think both _task AND _acq (may be more? _rec, _dir, ...?) should be retained?
# TODO: if we are to fix it, then old ones (without _acq) should be removed first
task = re.sub('.*_(task-[^_\.]*(_acq-[^_\.]*)?)_.*', r'\1', fpath)
json_ = load_json(fpath)
if task not in tasks:
Expand All @@ -95,17 +98,33 @@ def populate_bids_templates(path, defaults={}):
lgr.debug("Generating %s", events_file)
with open(events_file, 'w') as f:
f.write("onset\tduration\ttrial_type\tresponse_time\tstim_file\tTODO -- fill in rows and add more tab-separated columns if desired")

# extract tasks files stubs
for task_acq, fields in tasks.items():
task_file = op.join(path, task_acq + '_bold.json')
# do not touch any existing thing, it may be precious
if not op.lexists(task_file):
lgr.debug("Generating %s", task_file)
fields["TaskName"] = ("TODO: full task name for %s" %
task_acq.split('_')[0].split('-')[1])
fields["CogAtlasID"] = "TODO"
with open(task_file, 'w') as f:
f.write(json_dumps_pretty(fields, indent=2, sort_keys=True))
# Since we are pulling all unique fields we have to possibly
# rewrite this file to guarantee consistency.
# See https://github.com/nipy/heudiconv/issues/277 for a usecase/bug
# when we didn't touch existing one.
# But the fields we enter (TaskName and CogAtlasID) might need need
# to be populated from the file if it already exists
placeholders = {
"TaskName": ("TODO: full task name for %s" %
task_acq.split('_')[0].split('-')[1]),
"CogAtlasID": "TODO",
}
if op.lexists(task_file):
j = load_json(task_file)
# Retain possibly modified placeholder fields
for f in placeholders:
if f in j:
placeholders[f] = j[f]
act = "Regenerating"
else:
act = "Generating"
lgr.debug("%s %s", act, task_file)
fields.update(placeholders)
save_json(fields, indent=2, sort_keys=True, pretty=True)


def tuneup_bids_json_files(json_files):
Expand Down
10 changes: 8 additions & 2 deletions heudiconv/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,7 @@ def assure_no_file_exists(path):
os.unlink(path)


def save_json(filename, data, indent=4):
def save_json(filename, data, indent=4, sort_keys=True, pretty=False):
"""Save data to a json file
Parameters
Expand All @@ -180,11 +180,17 @@ def save_json(filename, data, indent=4):
Filename to save data in.
data : dict
Dictionary to save in json file.
indent : int, optional
sort_keys : bool, optional
pretty : bool, optional
"""
assure_no_file_exists(filename)
with open(filename, 'w') as fp:
fp.write(_canonical_dumps(data, sort_keys=True, indent=indent))
fp.write(
(json_dumps_pretty if pretty else _canonical_dumps)(
data, sort_keys=sort_keys, indent=indent)
)


def json_dumps_pretty(j, indent=2, sort_keys=True):
Expand Down

0 comments on commit 89f14bc

Please sign in to comment.