Skip to content

Commit 303a0e8

Browse files
authored
Merge branch 'master' into replace_imghdr
2 parents 9d401e4 + ccec546 commit 303a0e8

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

57 files changed

+252
-261
lines changed

nipype/algorithms/misc.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -684,7 +684,7 @@ def _run_interface(self, runtime):
684684

685685
output_array = merge_csvs(self.inputs.in_files)
686686
_, name, ext = split_filename(self.inputs.out_file)
687-
if not ext == ".csv":
687+
if ext != ".csv":
688688
ext = ".csv"
689689

690690
out_file = op.abspath(name + ext)
@@ -725,7 +725,7 @@ def _run_interface(self, runtime):
725725
def _list_outputs(self):
726726
outputs = self.output_spec().get()
727727
_, name, ext = split_filename(self.inputs.out_file)
728-
if not ext == ".csv":
728+
if ext != ".csv":
729729
ext = ".csv"
730730
out_file = op.abspath(name + ext)
731731
outputs["csv_file"] = out_file
@@ -771,7 +771,7 @@ class AddCSVColumn(BaseInterface):
771771
def _run_interface(self, runtime):
772772
in_file = open(self.inputs.in_file)
773773
_, name, ext = split_filename(self.inputs.out_file)
774-
if not ext == ".csv":
774+
if ext != ".csv":
775775
ext = ".csv"
776776
out_file = op.abspath(name + ext)
777777

@@ -791,7 +791,7 @@ def _run_interface(self, runtime):
791791
def _list_outputs(self):
792792
outputs = self.output_spec().get()
793793
_, name, ext = split_filename(self.inputs.out_file)
794-
if not ext == ".csv":
794+
if ext != ".csv":
795795
ext = ".csv"
796796
out_file = op.abspath(name + ext)
797797
outputs["csv_file"] = out_file

nipype/algorithms/modelgen.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -161,7 +161,7 @@ def bids_gen_info(
161161
for bids_event_file in bids_event_files:
162162
with open(bids_event_file) as f:
163163
f_events = csv.DictReader(f, skipinitialspace=True, delimiter="\t")
164-
events = [{k: v for k, v in row.items()} for row in f_events]
164+
events = list(f_events)
165165
if not condition_column:
166166
condition_column = "_trial_type"
167167
for i in events:

nipype/algorithms/tests/test_CompCor.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -287,9 +287,9 @@ def run_cc(
287287
components_metadata = [
288288
line.rstrip().split("\t") for line in metadata_file
289289
]
290-
components_metadata = {
291-
i: j for i, j in zip(components_metadata[0], components_metadata[1])
292-
}
290+
components_metadata = dict(
291+
zip(components_metadata[0], components_metadata[1])
292+
)
293293
assert components_metadata == expected_metadata
294294

295295
return ccresult

nipype/algorithms/tests/test_modelgen.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -122,7 +122,7 @@ def test_modelgen_spm_concat(tmpdir):
122122
s = SpecifySPMModel()
123123
s.inputs.input_units = "secs"
124124
s.inputs.concatenate_runs = True
125-
setattr(s.inputs, "output_units", "secs")
125+
s.inputs.output_units = "secs"
126126
assert s.inputs.output_units == "secs"
127127
s.inputs.functional_runs = [filename1, filename2]
128128
s.inputs.time_repetition = 6
@@ -147,7 +147,7 @@ def test_modelgen_spm_concat(tmpdir):
147147
)
148148

149149
# Test case of scans as output units instead of seconds
150-
setattr(s.inputs, "output_units", "scans")
150+
s.inputs.output_units = "scans"
151151
assert s.inputs.output_units == "scans"
152152
s.inputs.subject_info = deepcopy(info)
153153
res = s.run()

nipype/interfaces/afni/base.py

+1-3
Original file line numberDiff line numberDiff line change
@@ -326,6 +326,4 @@ def _cmd_prefix(self):
326326

327327
def no_afni():
328328
"""Check whether AFNI is not available."""
329-
if Info.version() is None:
330-
return True
331-
return False
329+
return Info.version() is None

nipype/interfaces/afni/model.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -636,7 +636,7 @@ def _parse_inputs(self, skip=None):
636636
def _list_outputs(self):
637637
outputs = self.output_spec().get()
638638

639-
for key in outputs.keys():
639+
for key in outputs:
640640
if isdefined(self.inputs.get()[key]):
641641
outputs[key] = os.path.abspath(self.inputs.get()[key])
642642

@@ -722,7 +722,7 @@ class Synthesize(AFNICommand):
722722
def _list_outputs(self):
723723
outputs = self.output_spec().get()
724724

725-
for key in outputs.keys():
725+
for key in outputs:
726726
if isdefined(self.inputs.get()[key]):
727727
outputs[key] = os.path.abspath(self.inputs.get()[key])
728728

nipype/interfaces/afni/utils.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -234,7 +234,7 @@ def aggregate_outputs(self, runtime=None, needed_outputs=None):
234234
m = re.search(pattern, line)
235235
if m:
236236
d = m.groupdict()
237-
outputs.trait_set(**{k: int(d[k]) for k in d.keys()})
237+
outputs.trait_set(**{k: int(v) for k, v in d.items()})
238238
return outputs
239239

240240

@@ -3244,11 +3244,11 @@ def _run_interface(self, runtime):
32443244
for line in runtime.stdout.split("\n")
32453245
if line.strip().startswith("GCOR = ")
32463246
][-1]
3247-
setattr(self, "_gcor", float(gcor_line[len("GCOR = ") :]))
3247+
self._gcor = float(gcor_line[len("GCOR = ") :])
32483248
return runtime
32493249

32503250
def _list_outputs(self):
3251-
return {"out": getattr(self, "_gcor")}
3251+
return {"out": self._gcor}
32523252

32533253

32543254
class AxializeInputSpec(AFNICommandInputSpec):

nipype/interfaces/ants/segmentation.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -194,7 +194,7 @@ def _format_arg(self, opt, spec, val):
194194
priors_paths[0] % i for i in range(1, n_classes + 1)
195195
]
196196

197-
if not all([os.path.exists(p) for p in priors_paths]):
197+
if not all(os.path.exists(p) for p in priors_paths):
198198
raise FileNotFoundError(
199199
"One or more prior images do not exist: "
200200
"%s." % ", ".join(priors_paths)

nipype/interfaces/ants/utils.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -536,7 +536,7 @@ def _format_arg(self, opt, spec, val):
536536
return super()._format_arg(opt, spec, val)
537537

538538
def _list_outputs(self):
539-
return getattr(self, "_output")
539+
return self._output
540540

541541

542542
class AverageAffineTransformInputSpec(ANTSCommandInputSpec):

nipype/interfaces/base/core.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -486,7 +486,7 @@ def load_inputs_from_json(self, json_file, overwrite=True):
486486
if not overwrite:
487487
def_inputs = list(self.inputs.get_traitsfree().keys())
488488

489-
new_inputs = list(set(list(inputs_dict.keys())) - set(def_inputs))
489+
new_inputs = set(inputs_dict) - set(def_inputs)
490490
for key in new_inputs:
491491
if hasattr(self.inputs, key):
492492
setattr(self.inputs, key, inputs_dict[key])

nipype/interfaces/base/specs.py

+3-7
Original file line numberDiff line numberDiff line change
@@ -177,19 +177,15 @@ def get_traitsfree(self, **kwargs):
177177

178178
def _clean_container(self, objekt, undefinedval=None, skipundefined=False):
179179
"""Convert a traited object into a pure python representation."""
180-
if isinstance(objekt, TraitDictObject) or isinstance(objekt, dict):
180+
if isinstance(objekt, (TraitDictObject, dict)):
181181
out = {}
182182
for key, val in list(objekt.items()):
183183
if isdefined(val):
184184
out[key] = self._clean_container(val, undefinedval)
185185
else:
186186
if not skipundefined:
187187
out[key] = undefinedval
188-
elif (
189-
isinstance(objekt, TraitListObject)
190-
or isinstance(objekt, list)
191-
or isinstance(objekt, tuple)
192-
):
188+
elif isinstance(objekt, (TraitListObject, list, tuple)):
193189
out = []
194190
for val in objekt:
195191
if isdefined(val):
@@ -387,7 +383,7 @@ def __deepcopy__(self, memo):
387383
dup_dict = deepcopy(self.trait_get(), memo)
388384
# access all keys
389385
for key in self.copyable_trait_names():
390-
if key in self.__dict__.keys():
386+
if key in self.__dict__:
391387
_ = getattr(self, key)
392388
# clone once
393389
dup = self.clone_traits(memo=memo)

nipype/interfaces/base/support.py

+4-3
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616
import platform
1717

1818
from ... import logging, config
19+
from ...utils.datetime import utcnow
1920
from ...utils.misc import is_container, rgetcwd
2021
from ...utils.filemanip import md5, hash_infile
2122

@@ -72,15 +73,15 @@ def __enter__(self):
7273
if self._runtime.redirect_x:
7374
self._runtime.environ["DISPLAY"] = config.get_display()
7475

75-
self._runtime.startTime = dt.isoformat(dt.utcnow())
76+
self._runtime.startTime = dt.isoformat(utcnow())
7677
self._resmon.start()
7778
# TODO: Perhaps clean-up path and ensure it exists?
7879
os.chdir(self._runtime.cwd)
7980
return self._runtime
8081

8182
def __exit__(self, exc_type, exc_value, exc_tb):
8283
"""Tear-down interface execution."""
83-
self._runtime.endTime = dt.isoformat(dt.utcnow())
84+
self._runtime.endTime = dt.isoformat(utcnow())
8485
timediff = parseutc(self._runtime.endTime) - parseutc(self._runtime.startTime)
8586
self._runtime.duration = (
8687
timediff.days * 86400 + timediff.seconds + timediff.microseconds / 1e6
@@ -99,7 +100,7 @@ def __exit__(self, exc_type, exc_value, exc_tb):
99100
traceback.format_exception(exc_type, exc_value, exc_tb)
100101
)
101102
# Gather up the exception arguments and append nipype info.
102-
exc_args = exc_value.args if getattr(exc_value, "args") else tuple()
103+
exc_args = exc_value.args or ()
103104
exc_args += (
104105
f"An exception of type {exc_type.__name__} occurred while "
105106
f"running interface {self._runtime.interface}.",

nipype/interfaces/cmtk/cmtk.py

+8-8
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,7 @@ def get_rois_crossed(pointsmm, roiData, voxelSize):
7777
x = int(pointsmm[j, 0] / float(voxelSize[0]))
7878
y = int(pointsmm[j, 1] / float(voxelSize[1]))
7979
z = int(pointsmm[j, 2] / float(voxelSize[2]))
80-
if not roiData[x, y, z] == 0:
80+
if roiData[x, y, z] != 0:
8181
rois_crossed.append(roiData[x, y, z])
8282
rois_crossed = list(
8383
dict.fromkeys(rois_crossed).keys()
@@ -91,7 +91,7 @@ def get_connectivity_matrix(n_rois, list_of_roi_crossed_lists):
9191
for idx_i, roi_i in enumerate(rois_crossed):
9292
for idx_j, roi_j in enumerate(rois_crossed):
9393
if idx_i > idx_j:
94-
if not roi_i == roi_j:
94+
if roi_i != roi_j:
9595
connectivity_matrix[roi_i - 1, roi_j - 1] += 1
9696
connectivity_matrix = connectivity_matrix + connectivity_matrix.T
9797
return connectivity_matrix
@@ -248,7 +248,7 @@ def cmat(
248248
axis=1,
249249
)
250250
)
251-
G.nodes[int(u)]["dn_position"] = tuple([xyz[0], xyz[2], -xyz[1]])
251+
G.nodes[int(u)]["dn_position"] = (xyz[0], xyz[2], -xyz[1])
252252

253253
if intersections:
254254
iflogger.info("Filtering tractography from intersections")
@@ -371,7 +371,7 @@ def cmat(
371371
di["fiber_length_mean"] = 0
372372
di["fiber_length_median"] = 0
373373
di["fiber_length_std"] = 0
374-
if not u == v: # Fix for self loop problem
374+
if u != v: # Fix for self loop problem
375375
G.add_edge(u, v, **di)
376376
if "fiblist" in d:
377377
numfib.add_edge(u, v, weight=di["number_of_fibers"])
@@ -400,7 +400,7 @@ def cmat(
400400
pickle.dump(I, f, pickle.HIGHEST_PROTOCOL)
401401

402402
path, name, ext = split_filename(matrix_mat_name)
403-
if not ext == ".mat":
403+
if ext != ".mat":
404404
ext = ".mat"
405405
matrix_mat_name = matrix_mat_name + ext
406406

@@ -608,7 +608,7 @@ def _run_interface(self, runtime):
608608

609609
matrix_mat_file = op.abspath(self.inputs.out_matrix_mat_file)
610610
path, name, ext = split_filename(matrix_mat_file)
611-
if not ext == ".mat":
611+
if ext != ".mat":
612612
ext = ".mat"
613613
matrix_mat_file = matrix_mat_file + ext
614614

@@ -673,7 +673,7 @@ def _list_outputs(self):
673673

674674
matrix_mat_file = op.abspath(self.inputs.out_matrix_mat_file)
675675
path, name, ext = split_filename(matrix_mat_file)
676-
if not ext == ".mat":
676+
if ext != ".mat":
677677
ext = ".mat"
678678
matrix_mat_file = matrix_mat_file + ext
679679

@@ -1070,7 +1070,7 @@ def create_nodes(roi_file, resolution_network_file, out_filename):
10701070
np.where(np.flipud(roiData) == int(d["dn_correspondence_id"])), axis=1
10711071
)
10721072
)
1073-
G.nodes[int(u)]["dn_position"] = tuple([xyz[0], xyz[2], -xyz[1]])
1073+
G.nodes[int(u)]["dn_position"] = (xyz[0], xyz[2], -xyz[1])
10741074
with open(out_filename, 'wb') as f:
10751075
pickle.dump(G, f, pickle.HIGHEST_PROTOCOL)
10761076
return out_filename

nipype/interfaces/cmtk/convert.py

+7-7
Original file line numberDiff line numberDiff line change
@@ -194,17 +194,17 @@ def _run_interface(self, runtime):
194194
for data in self.inputs.data_files:
195195
_, data_name, _ = split_filename(data)
196196
cda = cf.CData(name=data_name, src=data, fileformat="NumPy")
197-
if not string.find(data_name, "lengths") == -1:
197+
if 'lengths' in data_name:
198198
cda.dtype = "FinalFiberLengthArray"
199-
if not string.find(data_name, "endpoints") == -1:
199+
if 'endpoints' in data_name:
200200
cda.dtype = "FiberEndpoints"
201-
if not string.find(data_name, "labels") == -1:
201+
if 'labels' in data_name:
202202
cda.dtype = "FinalFiberLabels"
203203
a.add_connectome_data(cda)
204204

205205
a.print_summary()
206206
_, name, ext = split_filename(self.inputs.out_file)
207-
if not ext == ".cff":
207+
if ext != '.cff':
208208
ext = ".cff"
209209
cf.save_to_cff(a, op.abspath(name + ext))
210210

@@ -213,7 +213,7 @@ def _run_interface(self, runtime):
213213
def _list_outputs(self):
214214
outputs = self._outputs().get()
215215
_, name, ext = split_filename(self.inputs.out_file)
216-
if not ext == ".cff":
216+
if ext != '.cff':
217217
ext = ".cff"
218218
outputs["connectome_file"] = op.abspath(name + ext)
219219
return outputs
@@ -281,7 +281,7 @@ def _run_interface(self, runtime):
281281
metadata.set_email("My Email")
282282

283283
_, name, ext = split_filename(self.inputs.out_file)
284-
if not ext == ".cff":
284+
if ext != '.cff':
285285
ext = ".cff"
286286
cf.save_to_cff(newcon, op.abspath(name + ext))
287287

@@ -290,7 +290,7 @@ def _run_interface(self, runtime):
290290
def _list_outputs(self):
291291
outputs = self._outputs().get()
292292
_, name, ext = split_filename(self.inputs.out_file)
293-
if not ext == ".cff":
293+
if ext != '.cff':
294294
ext = ".cff"
295295
outputs["connectome_file"] = op.abspath(name + ext)
296296
return outputs

nipype/interfaces/cmtk/nx.py

+6-6
Original file line numberDiff line numberDiff line change
@@ -166,8 +166,8 @@ def average_networks(in_files, ntwk_res_file, group_id):
166166
for edge in edges:
167167
data = ntwk.edge[edge[0]][edge[1]]
168168
if ntwk.edge[edge[0]][edge[1]]["count"] >= count_to_keep_edge:
169-
for key in list(data.keys()):
170-
if not key == "count":
169+
for key in data:
170+
if key != "count":
171171
data[key] = data[key] / len(in_files)
172172
ntwk.edge[edge[0]][edge[1]] = data
173173
avg_ntwk.add_edge(edge[0], edge[1], **data)
@@ -183,8 +183,8 @@ def average_networks(in_files, ntwk_res_file, group_id):
183183
avg_edges = avg_ntwk.edges()
184184
for edge in avg_edges:
185185
data = avg_ntwk.edge[edge[0]][edge[1]]
186-
for key in list(data.keys()):
187-
if not key == "count":
186+
for key in data:
187+
if key != "count":
188188
edge_dict[key] = np.zeros(
189189
(avg_ntwk.number_of_nodes(), avg_ntwk.number_of_nodes())
190190
)
@@ -342,7 +342,7 @@ def add_node_data(node_array, ntwk):
342342
node_ntwk = nx.Graph()
343343
newdata = {}
344344
for idx, data in ntwk.nodes(data=True):
345-
if not int(idx) == 0:
345+
if int(idx) != 0:
346346
newdata["value"] = node_array[int(idx) - 1]
347347
data.update(newdata)
348348
node_ntwk.add_node(int(idx), **data)
@@ -354,7 +354,7 @@ def add_edge_data(edge_array, ntwk, above=0, below=0):
354354
data = {}
355355
for x, row in enumerate(edge_array):
356356
for y in range(np.max(np.shape(edge_array[x]))):
357-
if not edge_array[x, y] == 0:
357+
if edge_array[x, y] != 0:
358358
data["value"] = edge_array[x, y]
359359
if data["value"] <= below or data["value"] >= above:
360360
if edge_ntwk.has_edge(x + 1, y + 1):

nipype/interfaces/dcmstack.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -152,7 +152,7 @@ def _run_interface(self, runtime):
152152
meta_filter = dcmstack.make_key_regex_filter(exclude_regexes, include_regexes)
153153
stack = dcmstack.DicomStack(meta_filter=meta_filter)
154154
for src_path in src_paths:
155-
if not puremagic.what(src_path) == "gif":
155+
if puremagic.what(src_path) != "gif":
156156
src_dcm = pydicom.dcmread(src_path, force=self.inputs.force_read)
157157
stack.add_dcm(src_dcm)
158158
nii = stack.to_nifti(embed_meta=True)

nipype/interfaces/diffusion_toolkit/dti.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -97,7 +97,7 @@ class DTIRecon(CommandLine):
9797
def _create_gradient_matrix(self, bvecs_file, bvals_file):
9898
_gradient_matrix_file = "gradient_matrix.txt"
9999
with open(bvals_file) as fbvals:
100-
bvals = [val for val in re.split(r"\s+", fbvals.readline().strip())]
100+
bvals = fbvals.readline().strip().split()
101101
with open(bvecs_file) as fbvecs:
102102
bvecs_x = fbvecs.readline().split()
103103
bvecs_y = fbvecs.readline().split()

0 commit comments

Comments
 (0)