Skip to content

Commit fedef7a

Browse files
STY: Apply ruff/flake8-comprehensions rule C416
C416 Unnecessary `list` comprehension (rewrite using `list()`)
1 parent 851c47e commit fedef7a

File tree

11 files changed

+16
-18
lines changed

11 files changed

+16
-18
lines changed

nipype/algorithms/modelgen.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -161,7 +161,7 @@ def bids_gen_info(
161161
for bids_event_file in bids_event_files:
162162
with open(bids_event_file) as f:
163163
f_events = csv.DictReader(f, skipinitialspace=True, delimiter="\t")
164-
events = [{k: v for k, v in row.items()} for row in f_events]
164+
events = [dict(row.items()) for row in f_events]
165165
if not condition_column:
166166
condition_column = "_trial_type"
167167
for i in events:

nipype/algorithms/tests/test_CompCor.py

+1-3
Original file line numberDiff line numberDiff line change
@@ -287,9 +287,7 @@ def run_cc(
287287
components_metadata = [
288288
line.rstrip().split("\t") for line in metadata_file
289289
]
290-
components_metadata = {
291-
i: j for i, j in zip(components_metadata[0], components_metadata[1])
292-
}
290+
components_metadata = dict(zip(components_metadata[0], components_metadata[1]))
293291
assert components_metadata == expected_metadata
294292

295293
return ccresult

nipype/interfaces/diffusion_toolkit/dti.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -97,7 +97,7 @@ class DTIRecon(CommandLine):
9797
def _create_gradient_matrix(self, bvecs_file, bvals_file):
9898
_gradient_matrix_file = "gradient_matrix.txt"
9999
with open(bvals_file) as fbvals:
100-
bvals = [val for val in re.split(r"\s+", fbvals.readline().strip())]
100+
bvals = list(re.split(r"\s+", fbvals.readline().strip()))
101101
with open(bvecs_file) as fbvecs:
102102
bvecs_x = fbvecs.readline().split()
103103
bvecs_y = fbvecs.readline().split()

nipype/interfaces/diffusion_toolkit/odf.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -98,11 +98,11 @@ class HARDIMat(CommandLine):
9898

9999
def _create_gradient_matrix(self, bvecs_file, bvals_file):
100100
_gradient_matrix_file = "gradient_matrix.txt"
101-
bvals = [val for val in re.split(r"\s+", open(bvals_file).readline().strip())]
101+
bvals = list(re.split(r"\s+", open(bvals_file).readline().strip()))
102102
bvecs_f = open(bvecs_file)
103-
bvecs_x = [val for val in re.split(r"\s+", bvecs_f.readline().strip())]
104-
bvecs_y = [val for val in re.split(r"\s+", bvecs_f.readline().strip())]
105-
bvecs_z = [val for val in re.split(r"\s+", bvecs_f.readline().strip())]
103+
bvecs_x = list(re.split(r"\s+", bvecs_f.readline().strip()))
104+
bvecs_y = list(re.split(r"\s+", bvecs_f.readline().strip()))
105+
bvecs_z = list(re.split(r"\s+", bvecs_f.readline().strip()))
106106
bvecs_f.close()
107107
gradient_matrix_f = open(_gradient_matrix_file, "w")
108108
for i in range(len(bvals)):

nipype/interfaces/io.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -2304,7 +2304,7 @@ def __init__(self, input_names, **inputs):
23042304
super().__init__(**inputs)
23052305

23062306
self._input_names = ensure_list(input_names)
2307-
add_traits(self.inputs, [name for name in self._input_names])
2307+
add_traits(self.inputs, list(self._input_names))
23082308

23092309
def _list_outputs(self):
23102310
"""Execute this module."""
@@ -2366,7 +2366,7 @@ def __init__(self, input_names, **inputs):
23662366
super().__init__(**inputs)
23672367

23682368
self._input_names = ensure_list(input_names)
2369-
add_traits(self.inputs, [name for name in self._input_names])
2369+
add_traits(self.inputs, list(self._input_names))
23702370

23712371
def _list_outputs(self):
23722372
"""Execute this module."""

nipype/interfaces/spm/model.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -158,7 +158,7 @@ def _parse_inputs(self):
158158
"""validate spm realign options if set to None ignore"""
159159
einputs = super()._parse_inputs(skip=("mask_threshold", "flags"))
160160
if isdefined(self.inputs.flags):
161-
einputs[0].update({flag: val for (flag, val) in self.inputs.flags.items()})
161+
einputs[0].update(dict(self.inputs.flags.items()))
162162
for sessinfo in einputs[0]["sess"]:
163163
sessinfo["scans"] = scans_for_fnames(
164164
ensure_list(sessinfo["scans"]), keep4d=False
@@ -308,7 +308,7 @@ def _parse_inputs(self):
308308
"""validate spm realign options if set to None ignore"""
309309
einputs = super()._parse_inputs(skip=("flags"))
310310
if isdefined(self.inputs.flags):
311-
einputs[0].update({flag: val for (flag, val) in self.inputs.flags.items()})
311+
einputs[0].update(dict(self.inputs.flags.items()))
312312
return einputs
313313

314314
def _list_outputs(self):

nipype/interfaces/utility/wrappers.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -95,7 +95,7 @@ def __init__(
9595
self.inputs.on_trait_change(self._set_function_string, "function_str")
9696
self._input_names = ensure_list(input_names)
9797
self._output_names = ensure_list(output_names)
98-
add_traits(self.inputs, [name for name in self._input_names])
98+
add_traits(self.inputs, list(self._input_names))
9999
self.imports = imports
100100
self._out = {}
101101
for name in self._output_names:

nipype/pipeline/engine/utils.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1046,7 +1046,7 @@ def make_field_func(*pair):
10461046
logger.debug("node: %s iterables: %s", inode, iterables)
10471047

10481048
# collect the subnodes to expand
1049-
subnodes = [s for s in dfs_preorder(graph_in, inode)]
1049+
subnodes = list(dfs_preorder(graph_in, inode))
10501050
prior_prefix = [re.findall(r"\.(.)I", s._id) for s in subnodes if s._id]
10511051
prior_prefix = sorted([l for item in prior_prefix for l in item])
10521052
if not prior_prefix:

nipype/pipeline/plugins/base.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -455,7 +455,7 @@ def _remove_node_deps(self, jobid, crashfile, graph):
455455
dfs_preorder = nx.dfs_preorder
456456
except AttributeError:
457457
dfs_preorder = nx.dfs_preorder_nodes
458-
subnodes = [s for s in dfs_preorder(graph, self.procs[jobid])]
458+
subnodes = list(dfs_preorder(graph, self.procs[jobid]))
459459
for node in subnodes:
460460
idx = self.procs.index(node)
461461
self.proc_done[idx] = True

nipype/pipeline/plugins/linear.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ def run(self, graph, config, updatehash=False):
5050
# node might fail
5151
crashfile = report_crash(node)
5252
# remove dependencies from queue
53-
subnodes = [s for s in dfs_preorder(graph, node)]
53+
subnodes = list(dfs_preorder(graph, node))
5454
notrun.append(
5555
{"node": node, "dependents": subnodes, "crashfile": crashfile}
5656
)

nipype/utils/filemanip.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -499,7 +499,7 @@ def ensure_list(filename):
499499
elif isinstance(filename, list):
500500
return filename
501501
elif is_container(filename):
502-
return [x for x in filename]
502+
return list(filename)
503503
else:
504504
return None
505505

0 commit comments

Comments
 (0)