From 714868d2b7297eb844b66d73702122c4cf402579 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Thu, 20 Feb 2020 09:04:51 -0500 Subject: [PATCH 1/4] RF: Drop NUMPY_MMAP constant --- .../nipype1/examples/dmri_camino_dti.py | 9 +++----- .../nipype1/examples/dmri_connectivity.py | 9 +++----- .../nipype1/examples/fmri_ants_openfmri.py | 1 - package/niflow/nipype1/examples/fmri_fsl.py | 3 +-- .../nipype1/examples/fmri_spm_auditory.py | 3 +-- .../niflow/nipype1/examples/fmri_spm_face.py | 3 +-- .../rsfmri_vol_surface_preprocessing.py | 21 +++++++------------ .../rsfmri_vol_surface_preprocessing_nipy.py | 13 ++++++------ 8 files changed, 23 insertions(+), 39 deletions(-) diff --git a/package/niflow/nipype1/examples/dmri_camino_dti.py b/package/niflow/nipype1/examples/dmri_camino_dti.py index eaf9b4f..7928fd7 100755 --- a/package/niflow/nipype1/examples/dmri_camino_dti.py +++ b/package/niflow/nipype1/examples/dmri_camino_dti.py @@ -35,10 +35,9 @@ def get_vox_dims(volume): import nibabel as nb - from nipype.utils import NUMPY_MMAP if isinstance(volume, list): volume = volume[0] - nii = nb.load(volume, mmap=NUMPY_MMAP) + nii = nb.load(volume) hdr = nii.header voxdims = hdr.get_zooms() return [float(voxdims[0]), float(voxdims[1]), float(voxdims[2])] @@ -46,10 +45,9 @@ def get_vox_dims(volume): def get_data_dims(volume): import nibabel as nb - from nipype.utils import NUMPY_MMAP if isinstance(volume, list): volume = volume[0] - nii = nb.load(volume, mmap=NUMPY_MMAP) + nii = nb.load(volume) hdr = nii.header datadims = hdr.get_data_shape() return [int(datadims[0]), int(datadims[1]), int(datadims[2])] @@ -57,8 +55,7 @@ def get_data_dims(volume): def get_affine(volume): import nibabel as nb - from nipype.utils import NUMPY_MMAP - nii = nb.load(volume, mmap=NUMPY_MMAP) + nii = nb.load(volume) return nii.affine diff --git a/package/niflow/nipype1/examples/dmri_connectivity.py b/package/niflow/nipype1/examples/dmri_connectivity.py index fc5b51c..e50bc25 100755 --- a/package/niflow/nipype1/examples/dmri_connectivity.py +++ b/package/niflow/nipype1/examples/dmri_connectivity.py @@ -73,10 +73,9 @@ def get_vox_dims(volume): import nibabel as nb - from nipype.utils import NUMPY_MMAP if isinstance(volume, list): volume = volume[0] - nii = nb.load(volume, mmap=NUMPY_MMAP) + nii = nb.load(volume) hdr = nii.header voxdims = hdr.get_zooms() return [float(voxdims[0]), float(voxdims[1]), float(voxdims[2])] @@ -84,10 +83,9 @@ def get_vox_dims(volume): def get_data_dims(volume): import nibabel as nb - from nipype.utils import NUMPY_MMAP if isinstance(volume, list): volume = volume[0] - nii = nb.load(volume, mmap=NUMPY_MMAP) + nii = nb.load(volume) hdr = nii.header datadims = hdr.get_data_shape() return [int(datadims[0]), int(datadims[1]), int(datadims[2])] @@ -95,8 +93,7 @@ def get_data_dims(volume): def get_affine(volume): import nibabel as nb - from nipype.utils import NUMPY_MMAP - nii = nb.load(volume, mmap=NUMPY_MMAP) + nii = nb.load(volume) return nii.affine diff --git a/package/niflow/nipype1/examples/fmri_ants_openfmri.py b/package/niflow/nipype1/examples/fmri_ants_openfmri.py index 35684cf..73aa2d6 100755 --- a/package/niflow/nipype1/examples/fmri_ants_openfmri.py +++ b/package/niflow/nipype1/examples/fmri_ants_openfmri.py @@ -41,7 +41,6 @@ from nipype.workflows.fmri.fsl import (create_featreg_preproc, create_modelfit_workflow, create_fixed_effects_flow) -from nipype.utils import NUMPY_MMAP config.enable_provenance() version = 0 diff --git a/package/niflow/nipype1/examples/fmri_fsl.py b/package/niflow/nipype1/examples/fmri_fsl.py index 9d4ab71..13ce9fa 100755 --- a/package/niflow/nipype1/examples/fmri_fsl.py +++ b/package/niflow/nipype1/examples/fmri_fsl.py @@ -101,11 +101,10 @@ def pickfirst(files): def getmiddlevolume(func): from nibabel import load - from nipype.utils import NUMPY_MMAP funcfile = func if isinstance(func, list): funcfile = func[0] - _, _, _, timepoints = load(funcfile, mmap=NUMPY_MMAP).shape + _, _, _, timepoints = load(funcfile).shape return int(timepoints / 2) - 1 diff --git a/package/niflow/nipype1/examples/fmri_spm_auditory.py b/package/niflow/nipype1/examples/fmri_spm_auditory.py index e4c6904..29a5972 100755 --- a/package/niflow/nipype1/examples/fmri_spm_auditory.py +++ b/package/niflow/nipype1/examples/fmri_spm_auditory.py @@ -107,10 +107,9 @@ def get_vox_dims(volume): import nibabel as nb - from nipype.utils import NUMPY_MMAP if isinstance(volume, list): volume = volume[0] - nii = nb.load(volume, mmap=NUMPY_MMAP) + nii = nb.load(volume) hdr = nii.header voxdims = hdr.get_zooms() return [float(voxdims[0]), float(voxdims[1]), float(voxdims[2])] diff --git a/package/niflow/nipype1/examples/fmri_spm_face.py b/package/niflow/nipype1/examples/fmri_spm_face.py index 5644398..a60b1d3 100755 --- a/package/niflow/nipype1/examples/fmri_spm_face.py +++ b/package/niflow/nipype1/examples/fmri_spm_face.py @@ -101,10 +101,9 @@ def get_vox_dims(volume): import nibabel as nb - from nipype.utils import NUMPY_MMAP if isinstance(volume, list): volume = volume[0] - nii = nb.load(volume, mmap=NUMPY_MMAP) + nii = nb.load(volume) hdr = nii.header voxdims = hdr.get_zooms() return [float(voxdims[0]), float(voxdims[1]), float(voxdims[2])] diff --git a/package/niflow/nipype1/examples/rsfmri_vol_surface_preprocessing.py b/package/niflow/nipype1/examples/rsfmri_vol_surface_preprocessing.py index 20b150b..43e9d3d 100644 --- a/package/niflow/nipype1/examples/rsfmri_vol_surface_preprocessing.py +++ b/package/niflow/nipype1/examples/rsfmri_vol_surface_preprocessing.py @@ -117,10 +117,9 @@ def median(in_files): """ import numpy as np import nibabel as nb - from nipype.utils import NUMPY_MMAP average = None for idx, filename in enumerate(filename_to_list(in_files)): - img = nb.load(filename, mmap=NUMPY_MMAP) + img = nb.load(filename) data = np.median(img.get_data(), axis=3) if average is None: average = data @@ -146,12 +145,11 @@ def bandpass_filter(files, lowpass_freq, highpass_freq, fs): from nipype.utils.filemanip import split_filename, list_to_filename import numpy as np import nibabel as nb - from nipype.utils import NUMPY_MMAP out_files = [] for filename in filename_to_list(files): path, name, ext = split_filename(filename) out_file = os.path.join(os.getcwd(), name + '_bp' + ext) - img = nb.load(filename, mmap=NUMPY_MMAP) + img = nb.load(filename) timepoints = img.shape[-1] F = np.zeros((timepoints)) lowidx = int(timepoints / 2) + 1 @@ -264,12 +262,11 @@ def extract_noise_components(realigned_file, from scipy.linalg.decomp_svd import svd import numpy as np import nibabel as nb - from nipype.utils import NUMPY_MMAP import os - imgseries = nb.load(realigned_file, mmap=NUMPY_MMAP) + imgseries = nb.load(realigned_file) components = None for filename in filename_to_list(mask_file): - mask = nb.load(filename, mmap=NUMPY_MMAP).get_data() + mask = nb.load(filename).get_data() if len(np.nonzero(mask > 0)[0]) == 0: continue voxel_timecourses = imgseries.get_data()[mask > 0] @@ -334,11 +331,10 @@ def extract_subrois(timeseries_file, label_file, indices): """ from nipype.utils.filemanip import split_filename import nibabel as nb - from nipype.utils import NUMPY_MMAP import os - img = nb.load(timeseries_file, mmap=NUMPY_MMAP) + img = nb.load(timeseries_file) data = img.get_data() - roiimg = nb.load(label_file, mmap=NUMPY_MMAP) + roiimg = nb.load(label_file) rois = roiimg.get_data() prefix = split_filename(timeseries_file)[1] out_ts_file = os.path.join(os.getcwd(), '%s_subcortical_ts.txt' % prefix) @@ -359,9 +355,8 @@ def combine_hemi(left, right): """ import os import numpy as np - from nipype.utils import NUMPY_MMAP - lh_data = nb.load(left, mmap=NUMPY_MMAP).get_data() - rh_data = nb.load(right, mmap=NUMPY_MMAP).get_data() + lh_data = nb.load(left).get_data() + rh_data = nb.load(right).get_data() indices = np.vstack((1000000 + np.arange(0, lh_data.shape[0])[:, None], 2000000 + np.arange(0, rh_data.shape[0])[:, None])) diff --git a/package/niflow/nipype1/examples/rsfmri_vol_surface_preprocessing_nipy.py b/package/niflow/nipype1/examples/rsfmri_vol_surface_preprocessing_nipy.py index d3d9887..2397a13 100644 --- a/package/niflow/nipype1/examples/rsfmri_vol_surface_preprocessing_nipy.py +++ b/package/niflow/nipype1/examples/rsfmri_vol_surface_preprocessing_nipy.py @@ -76,7 +76,6 @@ import numpy as np import scipy as sp import nibabel as nb -from nipype.utils.config import NUMPY_MMAP """ A list of modules and functions to import inside of nodes @@ -129,7 +128,7 @@ def median(in_files): """ average = None for idx, filename in enumerate(filename_to_list(in_files)): - img = nb.load(filename, mmap=NUMPY_MMAP) + img = nb.load(filename) data = np.median(img.get_data(), axis=3) if average is None: average = data @@ -156,7 +155,7 @@ def bandpass_filter(files, lowpass_freq, highpass_freq, fs): for filename in filename_to_list(files): path, name, ext = split_filename(filename) out_file = os.path.join(os.getcwd(), name + '_bp' + ext) - img = nb.load(filename, mmap=NUMPY_MMAP) + img = nb.load(filename) timepoints = img.shape[-1] F = np.zeros((timepoints)) lowidx = int(timepoints / 2) + 1 @@ -282,9 +281,9 @@ def extract_subrois(timeseries_file, label_file, indices): The first four columns are: freesurfer index, i, j, k positions in the label file """ - img = nb.load(timeseries_file, mmap=NUMPY_MMAP) + img = nb.load(timeseries_file) data = img.get_data() - roiimg = nb.load(label_file, mmap=NUMPY_MMAP) + roiimg = nb.load(label_file) rois = roiimg.get_data() prefix = split_filename(timeseries_file)[1] out_ts_file = os.path.join(os.getcwd(), '%s_subcortical_ts.txt' % prefix) @@ -303,8 +302,8 @@ def extract_subrois(timeseries_file, label_file, indices): def combine_hemi(left, right): """Combine left and right hemisphere time series into a single text file """ - lh_data = nb.load(left, mmap=NUMPY_MMAP).get_data() - rh_data = nb.load(right, mmap=NUMPY_MMAP).get_data() + lh_data = nb.load(left).get_data() + rh_data = nb.load(right).get_data() indices = np.vstack((1000000 + np.arange(0, lh_data.shape[0])[:, None], 2000000 + np.arange(0, rh_data.shape[0])[:, None])) From 1a85711d29a58fabc29f8ce8440e33e866e58b80 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Thu, 20 Feb 2020 09:10:41 -0500 Subject: [PATCH 2/4] RF: Import workflows from niflow-nipype1-workflows --- .../niflow/nipype1/examples/dmri_connectivity_advanced.py | 8 ++++---- package/niflow/nipype1/examples/dmri_dtk_dti.py | 2 +- package/niflow/nipype1/examples/dmri_dtk_odf.py | 2 +- package/niflow/nipype1/examples/dmri_fsl_dti.py | 2 +- .../nipype1/examples/dmri_group_connectivity_camino.py | 4 ++-- .../nipype1/examples/dmri_group_connectivity_mrtrix.py | 4 ++-- package/niflow/nipype1/examples/dmri_preprocessing.py | 4 ++-- package/niflow/nipype1/examples/dmri_tbss_nki.py | 4 ++-- package/niflow/nipype1/examples/fmri_ants_openfmri.py | 2 +- package/niflow/nipype1/examples/fmri_fsl_feeds.py | 2 +- package/niflow/nipype1/examples/fmri_fsl_reuse.py | 2 +- package/niflow/nipype1/examples/fmri_spm_dartel.py | 2 +- .../examples/frontiers_paper/smoothing_comparison.py | 2 +- .../niflow/nipype1/examples/smri_ants_build_template.py | 2 +- .../examples/smri_antsregistration_build_template.py | 2 +- package/niflow/nipype1/examples/smri_fsreconall.py | 2 +- package/niflow/nipype1/examples/tessellation_tutorial.py | 4 ++-- package/setup.cfg | 4 +++- 18 files changed, 28 insertions(+), 26 deletions(-) diff --git a/package/niflow/nipype1/examples/dmri_connectivity_advanced.py b/package/niflow/nipype1/examples/dmri_connectivity_advanced.py index 1c97eab..c25f1fe 100755 --- a/package/niflow/nipype1/examples/dmri_connectivity_advanced.py +++ b/package/niflow/nipype1/examples/dmri_connectivity_advanced.py @@ -58,12 +58,12 @@ import inspect import os import os.path as op # system functions -from nipype.workflows.dmri.fsl.dti import create_eddy_correct_pipeline -from nipype.workflows.dmri.camino.connectivity_mapping import select_aparc_annot +from niflow.nipype1.workflows.dmri.fsl.dti import create_eddy_correct_pipeline +from niflow.nipype1.workflows.dmri.camino.connectivity_mapping import select_aparc_annot from nipype.utils.misc import package_check import warnings -from nipype.workflows.dmri.connectivity.nx import create_networkx_pipeline, create_cmats_to_csv_pipeline -from nipype.workflows.smri.freesurfer import create_tessellation_flow +from niflow.nipype1.workflows.dmri.connectivity.nx import create_networkx_pipeline, create_cmats_to_csv_pipeline +from niflow.nipype1.workflows.smri.freesurfer import create_tessellation_flow try: package_check('cmp') diff --git a/package/niflow/nipype1/examples/dmri_dtk_dti.py b/package/niflow/nipype1/examples/dmri_dtk_dti.py index 2946e30..cd02d16 100755 --- a/package/niflow/nipype1/examples/dmri_dtk_dti.py +++ b/package/niflow/nipype1/examples/dmri_dtk_dti.py @@ -26,7 +26,7 @@ import nipype.interfaces.utility as util # utility import nipype.pipeline.engine as pe # pypeline engine import os # system functions -from nipype.workflows.dmri.fsl.dti import create_eddy_correct_pipeline +from niflow.nipype1.workflows.dmri.fsl.dti import create_eddy_correct_pipeline """ Confirm package dependencies are installed. (This is only for the tutorial, rarely would you put this in your own code.) diff --git a/package/niflow/nipype1/examples/dmri_dtk_odf.py b/package/niflow/nipype1/examples/dmri_dtk_odf.py index 1436766..42a3b0e 100755 --- a/package/niflow/nipype1/examples/dmri_dtk_odf.py +++ b/package/niflow/nipype1/examples/dmri_dtk_odf.py @@ -26,7 +26,7 @@ import nipype.interfaces.utility as util # utility import nipype.pipeline.engine as pe # pypeline engine import os # system functions -from nipype.workflows.dmri.fsl.dti import create_eddy_correct_pipeline +from niflow.nipype1.workflows.dmri.fsl.dti import create_eddy_correct_pipeline """ Confirm package dependencies are installed. (This is only for the tutorial, rarely would you put this in your own code.) diff --git a/package/niflow/nipype1/examples/dmri_fsl_dti.py b/package/niflow/nipype1/examples/dmri_fsl_dti.py index 1ac833a..ffd114d 100755 --- a/package/niflow/nipype1/examples/dmri_fsl_dti.py +++ b/package/niflow/nipype1/examples/dmri_fsl_dti.py @@ -25,7 +25,7 @@ import nipype.interfaces.utility as util # utility import nipype.pipeline.engine as pe # pypeline engine import os # system functions -from nipype.workflows.dmri.fsl.dti import create_eddy_correct_pipeline,\ +from niflow.nipype1.workflows.dmri.fsl.dti import create_eddy_correct_pipeline,\ create_bedpostx_pipeline """ Confirm package dependencies are installed. (This is only for the diff --git a/package/niflow/nipype1/examples/dmri_group_connectivity_camino.py b/package/niflow/nipype1/examples/dmri_group_connectivity_camino.py index b244ade..8dbceb6 100644 --- a/package/niflow/nipype1/examples/dmri_group_connectivity_camino.py +++ b/package/niflow/nipype1/examples/dmri_group_connectivity_camino.py @@ -57,8 +57,8 @@ import nipype.interfaces.freesurfer as fs # freesurfer import os.path as op # system functions import cmp -from nipype.workflows.dmri.camino.group_connectivity import create_group_connectivity_pipeline -from nipype.workflows.dmri.connectivity.group_connectivity import ( +from niflow.nipype1.workflows.dmri.camino.group_connectivity import create_group_connectivity_pipeline +from niflow.nipype1.workflows.dmri.connectivity.group_connectivity import ( create_merge_networks_by_group_workflow, create_merge_group_networks_workflow, create_average_networks_by_group_workflow) diff --git a/package/niflow/nipype1/examples/dmri_group_connectivity_mrtrix.py b/package/niflow/nipype1/examples/dmri_group_connectivity_mrtrix.py index e709b9a..cbe7ef7 100644 --- a/package/niflow/nipype1/examples/dmri_group_connectivity_mrtrix.py +++ b/package/niflow/nipype1/examples/dmri_group_connectivity_mrtrix.py @@ -57,8 +57,8 @@ import nipype.interfaces.freesurfer as fs # freesurfer import os.path as op # system functions import cmp -from nipype.workflows.dmri.mrtrix.group_connectivity import create_group_connectivity_pipeline -from nipype.workflows.dmri.connectivity.group_connectivity import ( +from niflow.nipype1.workflows.dmri.mrtrix.group_connectivity import create_group_connectivity_pipeline +from niflow.nipype1.workflows.dmri.connectivity.group_connectivity import ( create_merge_network_results_by_group_workflow, create_merge_group_network_results_workflow, create_average_networks_by_group_workflow) diff --git a/package/niflow/nipype1/examples/dmri_preprocessing.py b/package/niflow/nipype1/examples/dmri_preprocessing.py index 21d594d..0bfbb32 100644 --- a/package/niflow/nipype1/examples/dmri_preprocessing.py +++ b/package/niflow/nipype1/examples/dmri_preprocessing.py @@ -32,13 +32,13 @@ from nipype.interfaces import ants """ Load specific nipype's workflows for preprocessing of dMRI data: -:class:`nipype.workflows.dmri.preprocess.epi.all_peb_pipeline`, +:class:`niflow.nipype1.workflows.dmri.preprocess.epi.all_peb_pipeline`, as data include a *b0* volume with reverse encoding direction (*P>>>A*, or *y*), in contrast with the general acquisition encoding that is *A>>>P* or *-y* (in RAS systems). """ -from nipype.workflows.dmri.fsl.artifacts import all_fsl_pipeline, remove_bias +from niflow.nipype1.workflows.dmri.fsl.artifacts import all_fsl_pipeline, remove_bias """ Map field names into individual subject runs """ diff --git a/package/niflow/nipype1/examples/dmri_tbss_nki.py b/package/niflow/nipype1/examples/dmri_tbss_nki.py index 5f2f3d5..d14b74d 100755 --- a/package/niflow/nipype1/examples/dmri_tbss_nki.py +++ b/package/niflow/nipype1/examples/dmri_tbss_nki.py @@ -10,8 +10,8 @@ """ -from nipype.workflows.dmri.fsl.dti import create_eddy_correct_pipeline -from nipype.workflows.dmri.fsl.tbss import create_tbss_non_FA, create_tbss_all +from niflow.nipype1.workflows.dmri.fsl.dti import create_eddy_correct_pipeline +from niflow.nipype1.workflows.dmri.fsl.tbss import create_tbss_non_FA, create_tbss_all """ Tell python where to find the appropriate functions. """ diff --git a/package/niflow/nipype1/examples/fmri_ants_openfmri.py b/package/niflow/nipype1/examples/fmri_ants_openfmri.py index 73aa2d6..4852f03 100755 --- a/package/niflow/nipype1/examples/fmri_ants_openfmri.py +++ b/package/niflow/nipype1/examples/fmri_ants_openfmri.py @@ -38,7 +38,7 @@ from nipype.interfaces.io import FreeSurferSource import nipype.interfaces.utility as niu from nipype.interfaces.utility import Merge, IdentityInterface -from nipype.workflows.fmri.fsl import (create_featreg_preproc, +from niflow.nipype1.workflows.fmri.fsl import (create_featreg_preproc, create_modelfit_workflow, create_fixed_effects_flow) diff --git a/package/niflow/nipype1/examples/fmri_fsl_feeds.py b/package/niflow/nipype1/examples/fmri_fsl_feeds.py index f7b0aaf..5a90bf9 100755 --- a/package/niflow/nipype1/examples/fmri_fsl_feeds.py +++ b/package/niflow/nipype1/examples/fmri_fsl_feeds.py @@ -22,7 +22,7 @@ from nipype.interfaces import fsl # fsl from nipype.pipeline import engine as pe # pypeline engine from nipype.algorithms import modelgen as model # model generation -from nipype.workflows.fmri.fsl import ( +from niflow.nipype1.workflows.fmri.fsl import ( create_featreg_preproc, create_modelfit_workflow, create_reg_workflow) from nipype.interfaces.base import Bunch """ diff --git a/package/niflow/nipype1/examples/fmri_fsl_reuse.py b/package/niflow/nipype1/examples/fmri_fsl_reuse.py index 7b24dc2..5375f8a 100755 --- a/package/niflow/nipype1/examples/fmri_fsl_reuse.py +++ b/package/niflow/nipype1/examples/fmri_fsl_reuse.py @@ -28,7 +28,7 @@ import nipype.algorithms.modelgen as model # model generation import nipype.algorithms.rapidart as ra # artifact detection -from nipype.workflows.fmri.fsl import (create_featreg_preproc, +from niflow.nipype1.workflows.fmri.fsl import (create_featreg_preproc, create_modelfit_workflow, create_fixed_effects_flow) """ diff --git a/package/niflow/nipype1/examples/fmri_spm_dartel.py b/package/niflow/nipype1/examples/fmri_spm_dartel.py index 587ff9b..7c8406c 100755 --- a/package/niflow/nipype1/examples/fmri_spm_dartel.py +++ b/package/niflow/nipype1/examples/fmri_spm_dartel.py @@ -21,7 +21,7 @@ import nipype.interfaces.io as nio # Data i/o import nipype.interfaces.spm as spm # spm -import nipype.workflows.fmri.spm as spm_wf # spm +import niflow.nipype1.workflows.fmri.spm as spm_wf # spm import nipype.interfaces.fsl as fsl # fsl from nipype.interfaces import utility as niu # Utilities import nipype.pipeline.engine as pe # pypeline engine diff --git a/package/niflow/nipype1/examples/frontiers_paper/smoothing_comparison.py b/package/niflow/nipype1/examples/frontiers_paper/smoothing_comparison.py index c4a31da..696e8a9 100644 --- a/package/niflow/nipype1/examples/frontiers_paper/smoothing_comparison.py +++ b/package/niflow/nipype1/examples/frontiers_paper/smoothing_comparison.py @@ -16,7 +16,7 @@ import nipype.interfaces.utility as util import nipype.pipeline.engine as pe # pypeline engine import nipype.algorithms.modelgen as model # model specification -import nipype.workflows.fmri.fsl as fsl_wf +import niflow.nipype1.workflows.fmri.fsl as fsl_wf from nipype.interfaces.base import Bunch import os # system functions diff --git a/package/niflow/nipype1/examples/smri_ants_build_template.py b/package/niflow/nipype1/examples/smri_ants_build_template.py index a75c0f6..53f3981 100644 --- a/package/niflow/nipype1/examples/smri_ants_build_template.py +++ b/package/niflow/nipype1/examples/smri_ants_build_template.py @@ -23,7 +23,7 @@ import nipype.interfaces.io as io import nipype.pipeline.engine as pe # pypeline engine -from nipype.workflows.smri.ants import ANTSTemplateBuildSingleIterationWF +from niflow.nipype1.workflows.smri.ants import ANTSTemplateBuildSingleIterationWF """ 2. Download T1 volumes into home directory """ diff --git a/package/niflow/nipype1/examples/smri_antsregistration_build_template.py b/package/niflow/nipype1/examples/smri_antsregistration_build_template.py index ecc2142..e84fc5b 100644 --- a/package/niflow/nipype1/examples/smri_antsregistration_build_template.py +++ b/package/niflow/nipype1/examples/smri_antsregistration_build_template.py @@ -22,7 +22,7 @@ import nipype.interfaces.io as io import nipype.pipeline.engine as pe # pypeline engine -from nipype.workflows.smri.ants import antsRegistrationTemplateBuildSingleIterationWF +from niflow.nipype1.workflows.smri.ants import antsRegistrationTemplateBuildSingleIterationWF """ 2. Download T1 volumes into home directory """ diff --git a/package/niflow/nipype1/examples/smri_fsreconall.py b/package/niflow/nipype1/examples/smri_fsreconall.py index 6a9fc54..16d0b4c 100644 --- a/package/niflow/nipype1/examples/smri_fsreconall.py +++ b/package/niflow/nipype1/examples/smri_fsreconall.py @@ -20,7 +20,7 @@ import nipype.pipeline.engine as pe import nipype.interfaces.io as nio -from nipype.workflows.smri.freesurfer import create_reconall_workflow +from niflow.nipype1.workflows.smri.freesurfer import create_reconall_workflow from nipype.interfaces.freesurfer.utils import MakeAverageSubject from nipype.interfaces.utility import IdentityInterface """ diff --git a/package/niflow/nipype1/examples/tessellation_tutorial.py b/package/niflow/nipype1/examples/tessellation_tutorial.py index 832ad9c..58bae09 100644 --- a/package/niflow/nipype1/examples/tessellation_tutorial.py +++ b/package/niflow/nipype1/examples/tessellation_tutorial.py @@ -7,7 +7,7 @@ Introduction ============ -This script, tessellation_tutorial.py, demonstrates the use of create_tessellation_flow from nipype.workflows.smri.freesurfer, and it can be run with:: +This script, tessellation_tutorial.py, demonstrates the use of create_tessellation_flow from niflow.nipype1.workflows.smri.freesurfer, and it can be run with:: python tessellation_tutorial.py @@ -39,7 +39,7 @@ import nipype.interfaces.io as nio # Data i/o import os import os.path as op -from nipype.workflows.smri.freesurfer import create_tessellation_flow +from niflow.nipype1.workflows.smri.freesurfer import create_tessellation_flow """ Directories =========== diff --git a/package/setup.cfg b/package/setup.cfg index aaa4424..d23429f 100644 --- a/package/setup.cfg +++ b/package/setup.cfg @@ -18,7 +18,9 @@ classifiers = [options] packages = find: # Place any Python dependencies here -install_requires = nipype +install_requires = + nipype + niflow-nipype1-workflows # Uncomment the following line if your package should distribute any non-Python files # include_package_data = True From 37a7b2b2d8d4bec8046547a1cfdc6651a6639db0 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Thu, 20 Feb 2020 09:25:53 -0500 Subject: [PATCH 3/4] DOC/STY: Cleanups --- .../nipype1/examples/dmri_connectivity.py | 5 +- .../nipype1/examples/dmri_preprocessing.py | 14 ++ .../nipype1/examples/fmri_spm_auditory.py | 161 ++++++++++-------- .../nipype1/examples/fmri_spm_dartel.py | 49 ++++-- .../niflow/nipype1/examples/fmri_spm_face.py | 42 +++-- .../nipype1/examples/fmri_spm_nested.py | 48 +++--- 6 files changed, 193 insertions(+), 126 deletions(-) diff --git a/package/niflow/nipype1/examples/dmri_connectivity.py b/package/niflow/nipype1/examples/dmri_connectivity.py index e50bc25..fff79bc 100755 --- a/package/niflow/nipype1/examples/dmri_connectivity.py +++ b/package/niflow/nipype1/examples/dmri_connectivity.py @@ -26,9 +26,8 @@ * http://db.tt/1vx4vLeP -Along with `Camino `_, -`Camino-Trackvis `_, `FSL `_, -and `Freesurfer `_, you must also have the Connectome File Format +Along with Camino_, Camino2Trackvis_, FSL_, and FreeSurfer_, +you must also have the Connectome File Format library installed as well as the Connectome Mapper. These are written by Stephan Gerhard and can be obtained from: diff --git a/package/niflow/nipype1/examples/dmri_preprocessing.py b/package/niflow/nipype1/examples/dmri_preprocessing.py index 0bfbb32..1efc4e2 100644 --- a/package/niflow/nipype1/examples/dmri_preprocessing.py +++ b/package/niflow/nipype1/examples/dmri_preprocessing.py @@ -130,6 +130,7 @@ """ bias = remove_bias() + """ Connect nodes in workflow ========================= @@ -148,6 +149,7 @@ (prep, bias, [('outputnode.out_file', 'inputnode.in_file'), ('outputnode.out_mask', 'inputnode.in_mask')]), (datasource, bias, [('bvals', 'inputnode.in_bval')])]) + """ Run the workflow as command line executable """ @@ -155,3 +157,15 @@ if __name__ == '__main__': wf.run() wf.write_graph() + +""" +References +---------- + +.. [Jeurissen2014] Jeurissen et al., Multi-tissue constrained spherical deconvolution + for improved analysis of multi-shell diffusion MRI data. + NeuroImage 103:411--426. 2014. + doi:`10.1016/j.neuroimage.2014.07.061 + `__. + +""" diff --git a/package/niflow/nipype1/examples/fmri_spm_auditory.py b/package/niflow/nipype1/examples/fmri_spm_auditory.py index 29a5972..60572f0 100755 --- a/package/niflow/nipype1/examples/fmri_spm_auditory.py +++ b/package/niflow/nipype1/examples/fmri_spm_auditory.py @@ -8,7 +8,6 @@ Introduction ============ - The fmri_spm_auditory.py recreates the classical workflow described in the `SPM8 manual `_ using auditory dataset that can be downloaded from http://www.fil.ion.ucl.ac.uk/spm/data/auditory/:: @@ -34,32 +33,31 @@ """ -# Set the way matlab should be called +# Set the way Matlab should be called mlab.MatlabCommand.set_default_matlab_cmd("matlab -nodesktop -nosplash") + """ + Setting up workflows -------------------- - -In this tutorial we will be setting up a hierarchical workflow for spm -analysis. This will demonstrate how pre-defined workflows can be setup +In this tutorial we will be setting up a hierarchical workflow for SPM +analysis. This will demonstrate how predefined workflows can be setup and shared across users, projects and labs. - Setup preprocessing workflow ---------------------------- - This is a generic preprocessing workflow that can be used by different analyses """ preproc = pe.Workflow(name='preproc') -"""We strongly encourage to use 4D files insteead of series of 3D for fMRI analyses +"""We strongly encourage to use 4D files instead of series of 3D for fMRI analyses for many reasons (cleanness and saving and filesystem inodes are among them). However, the the workflow presented in the SPM8 manual which this tutorial is based on -uses 3D files. Therefore we leave converting to 4D as an option. We are using `merge_to_4d` -variable, because switching between 3d and 4d requires some additional steps (explauned later on). -Use :class:`nipype.interfaces.fsl.Merge` to merge a series of 3D files along the time -dimension creating a 4d file. +uses 3D files. Therefore we leave converting to 4D as an option. We are using ``merge_to_4d`` +variable, because switching between 3D and 4dD requires some additional steps (explained later on). +Use :ref:`nipype.interfaces.fsl.utils.Merge` to merge a series +of 3D files along the time dimension creating a 4D file. """ merge_to_4d = True @@ -67,26 +65,28 @@ if merge_to_4d: merge = pe.Node(interface=fsl.Merge(), name="merge") merge.inputs.dimension = "t" -"""Use :class:`nipype.interfaces.spm.Realign` for motion correction -and register all images to the mean image. +"""Use :ref:`nipype.interfaces.spm.preprocess.Realign` +for motion correction and register all images to the mean image. """ realign = pe.Node(interface=spm.Realign(), name="realign") -"""Use :class:`nipype.interfaces.spm.Coregister` to perform a rigid -body registration of the functional data to the structural data. +"""Use :ref:`nipype.interfaces.spm.preprocess.Coregister` +to perform a rigid body registration of the functional data to the structural data. """ coregister = pe.Node(interface=spm.Coregister(), name="coregister") coregister.inputs.jobtype = 'estimate' segment = pe.Node(interface=spm.Segment(), name="segment") + """Uncomment the following line for faster execution """ # segment.inputs.gaussians_per_class = [1, 1, 1, 4] + """Warp functional and structural data to SPM's T1 template using -:class:`nipype.interfaces.spm.Normalize`. The tutorial data set -includes the template image, T1.nii. +:ref:`nipype.interfaces.spm.preprocess.Normalize`. +The tutorial data set includes the template image, T1.nii. """ normalize_func = pe.Node(interface=spm.Normalize(), name="normalize_func") @@ -95,16 +95,17 @@ normalize_struc = pe.Node(interface=spm.Normalize(), name="normalize_struc") normalize_struc.inputs.jobtype = "write" """Smooth the functional data using -:class:`nipype.interfaces.spm.Smooth`. +:ref:`nipype.interfaces.spm.preprocess.Smooth`. """ smooth = pe.Node(interface=spm.Smooth(), name="smooth") -"""`write_voxel_sizes` is the input of the normalize interface that is recommended to be set to -the voxel sizes of the target volume. There is no need to set it manually since we van infer it from data + +"""``write_voxel_sizes`` is the input of the normalize interface that is recommended +to be set to the voxel sizes of the target volume. +There is no need to set it manually since we can infer it from data using the following function: """ - def get_vox_dims(volume): import nibabel as nb if isinstance(volume, list): @@ -115,9 +116,10 @@ def get_vox_dims(volume): return [float(voxdims[0]), float(voxdims[1]), float(voxdims[2])] -"""Here we are connecting all the nodes together. Notice that we add the merge node only if you choose -to use 4D. Also `get_vox_dims` function is passed along the input volume of normalise to set the optimal -voxel sizes. +"""Here we are connecting all the nodes together. +Notice that we add the merge node only if you choose to use 4D. +Also, the ``get_vox_dims`` function is passed along the input volume of +:ref:`nipype.interfaces.spm.preprocess.Normalize` to set the optimal voxel sizes. """ if merge_to_4d: @@ -136,34 +138,38 @@ def get_vox_dims(volume): 'write_voxel_sizes')]), (normalize_func, smooth, [('normalized_files', 'in_files')]), ]) + """ Set up analysis workflow ------------------------ - """ l1analysis = pe.Workflow(name='analysis') + """Generate SPM-specific design information using -:class:`nipype.interfaces.spm.SpecifyModel`. +:ref:`nipype.algorithms.modelgen.SpecifySPMModel`. """ modelspec = pe.Node(interface=model.SpecifySPMModel(), name="modelspec") + """Generate a first level SPM.mat file for analysis -:class:`nipype.interfaces.spm.Level1Design`. +:ref:`nipype.interfaces.spm.model.Level1Design`. """ level1design = pe.Node(interface=spm.Level1Design(), name="level1design") level1design.inputs.bases = {'hrf': {'derivs': [0, 0]}} -"""Use :class:`nipype.interfaces.spm.EstimateModel` to determine the -parameters of the model. + +"""Use :ref:`nipype.interfaces.spm.model.EstimateModel` +to determine the parameters of the model. """ level1estimate = pe.Node(interface=spm.EstimateModel(), name="level1estimate") level1estimate.inputs.estimation_method = {'Classical': 1} threshold = pe.Node(interface=spm.Threshold(), name="threshold") -"""Use :class:`nipype.interfaces.spm.EstimateContrast` to estimate the -first level contrasts specified in a few steps above. + +"""Use :ref:`nipype.interfaces.spm.model.EstimateContrast` +to estimate the first level contrasts specified in a few steps above. """ contrastestimate = pe.Node( @@ -179,18 +185,20 @@ def get_vox_dims(volume): ('spmT_images', 'stat_image')]), ]) """ -Preproc + Analysis pipeline ---------------------------- - +Preprocessing and analysis pipeline +----------------------------------- """ l1pipeline = pe.Workflow(name='firstlevel') l1pipeline.connect([(preproc, l1analysis, [('realign.realignment_parameters', 'modelspec.realignment_parameters')])]) -"""Pluging in `functional_runs` is a bit more complicated, because model spec expects a list of `runs`. -Every run can be a 4D file or a list of 3D files. Therefore for 3D analysis we need a list of lists and -to make one we need a helper function. + +""" +Plugging in ``functional_runs`` is a bit more complicated, +because model spec expects a list of ``runs``. +Every run can be a 4D file or a list of 3D files. +Therefore for 3D analysis we need a list of lists and to make one we need a helper function. """ if merge_to_4d: @@ -208,8 +216,7 @@ def makelist(item): """ Data specific components ------------------------ - -In this tutorial there is only one subject `M00223`. +In this tutorial there is only one subject ``M00223``. Below we set some variables to inform the ``datasource`` about the layout of our data. We specify the location of the data, the subject @@ -230,7 +237,9 @@ def makelist(item): infosource = pe.Node( interface=util.IdentityInterface(fields=['subject_id']), name="infosource") -"""Here we set up iteration over all the subjects. The following line + +""" +Here we set up iteration over all the subjects. The following line is a particular example of the flexibility of the system. The ``datasource`` attribute ``iterables`` tells the pipeline engine that it should repeat the analysis on each of the items in the @@ -240,12 +249,10 @@ def makelist(item): """ infosource.iterables = ('subject_id', subject_list) + """ -Now we create a :class:`nipype.interfaces.io.DataGrabber` object and -fill in the information from above about the layout of our data. The -:class:`nipype.pipeline.NodeWrapper` module wraps the interface object -and provides additional housekeeping and pipeline specific -functionality. +Now we create a :ref:`nipype.interfaces.io.DataGrabber` +object and fill in the information from above about the layout of our data. """ datasource = pe.Node( @@ -256,14 +263,14 @@ def makelist(item): datasource.inputs.template = '%s%s/%s%s_%03d.img' datasource.inputs.template_args = info datasource.inputs.sort_filelist = True + """ Experimental paradigm specific components ----------------------------------------- - Here we create a structure that provides information about the experimental paradigm. This is used by the -:class:`nipype.interfaces.spm.SpecifyModel` to create the information -necessary to generate an SPM design matrix. +:ref:`nipype.algorithms.modelgen.SpecifySPMModel` +to create the information necessary to generate an SPM design matrix. """ from nipype.interfaces.base import Bunch @@ -271,11 +278,13 @@ def makelist(item): Bunch( conditions=['Task'], onsets=[list(range(6, 84, 12))], durations=[[6]]) ] -"""Setup the contrast structure that needs to be evaluated. This is a + +""" +Setup the contrast structure that needs to be evaluated. This is a list of lists. The inner list specifies the contrasts and has the -following format - [Name,Stat,[list of condition names],[weights on -those conditions]. The condition names must match the `names` listed -in the `subjectinfo` function described above. +following format - ``[Name,Stat,[list of condition names],[weights on +those conditions]``. The condition names must match the ``names`` listed +in the ``subjectinfo`` function described above. """ cont1 = ('active > rest', 'T', ['Task'], [1]) @@ -296,27 +305,35 @@ def makelist(item): l1pipeline.inputs.analysis.modelspec.subject_info = subjectinfo l1pipeline.inputs.analysis.contrastestimate.contrasts = contrasts l1pipeline.inputs.analysis.threshold.contrast_index = 1 + """ Setup the pipeline ------------------ - The nodes created above do not describe the flow of data. They merely describe the parameters used for each function. In this section we setup the connections between the nodes such that appropriate outputs from nodes are piped into appropriate inputs of other nodes. -Use the :class:`nipype.pipeline.engine.Pipeline` to create a -graph-based execution pipeline for first level analysis. The config -options tells the pipeline engine to use `workdir` as the disk -location to use when running the processes and keeping their -outputs. The `use_parameterized_dirs` tells the engine to create -sub-directories under `workdir` corresponding to the iterables in the -pipeline. Thus for this pipeline there will be subject specific -sub-directories. +Use the :class:`~nipype.pipeline.engine.workflows.Workflow` to create a +graph-based execution pipeline for first level analysis. +Set the :py:attr:`~nipype.pipeline.engine.workflows.base.EngineBase.base_dir` +option to instruct the pipeline engine to use ``spm_auditory_tutorial/workingdir`` +as the filesystem location to use when running the processes and keeping their +outputs. +Other options can be set via `the configuration file +`__. +For example, ``use_parameterized_dirs`` tells the engine to create +sub-directories under :py:attr:`~nipype.pipeline.engine.workflows.Workflow.base_dir`, +corresponding to the iterables in the pipeline. +Thus, for this pipeline there will be subject specific sub-directories. + +When building a workflow, interface objects are wrapped within +a :class:`~nipype.pipeline.engine.nodes.Node` so that they can be inserted +in the workflow. -The ``nipype.pipeline.engine.Pipeline.connect`` function creates the -links between the processes, i.e., how data should flow in and out of -the processing nodes. +The :func:`~nipype.pipeline.engine.workflows.Workflow.connect` method creates the +links between :class:`~nipype.pipeline.engine.nodes.Node` instances, i.e., +how data should flow in and out of the processing nodes. """ level1 = pe.Workflow(name="level1") @@ -331,24 +348,24 @@ def makelist(item): else: level1.connect([(datasource, l1pipeline, [('func', 'preproc.realign.in_files')])]) -""" +""" Setup storage results --------------------- - -Use :class:`nipype.interfaces.io.DataSink` to store selected outputs +Use :ref:`nipype.interfaces.io.DataSink` to store selected outputs from the pipeline in a specific location. This allows the user to selectively choose important output bits from the analysis and keep them. The first step is to create a datasink node and then to connect outputs from the modules above to storage locations. These take the -following form directory_name[.[@]subdir] where parts between [] are +following form ``directory_name[.[@]subdir]`` where parts between ``[]`` are optional. For example 'realign.@mean' below creates a directory called realign in 'l1output/subject_id/' and stores the mean image output from the Realign process in the realign directory. If the @ is left out, then a sub-directory with the name 'mean' would be created and the mean image would be copied to that directory. + """ datasink = pe.Node(interface=nio.DataSink(), name="datasink") @@ -371,15 +388,15 @@ def getstripdir(subject_id): [('analysis.contrastestimate.con_images', 'contrasts.@con'), ('analysis.contrastestimate.spmT_images', 'contrasts.@T')]), ]) + """ Execute the pipeline -------------------- - The code discussed above sets up all the necessary data structures with appropriate parameters and the connectivity between the processes, but does not generate any output. To actually run the -analysis on the data the ``nipype.pipeline.engine.Pipeline.Run`` -function needs to be called. +analysis on the data the :func:`~nipype.pipeline.engine.workflows.Workflow.run` +method needs to be called. """ if __name__ == '__main__': diff --git a/package/niflow/nipype1/examples/fmri_spm_dartel.py b/package/niflow/nipype1/examples/fmri_spm_dartel.py index 7c8406c..815ce5a 100755 --- a/package/niflow/nipype1/examples/fmri_spm_dartel.py +++ b/package/niflow/nipype1/examples/fmri_spm_dartel.py @@ -28,11 +28,10 @@ import nipype.algorithms.rapidart as ra # artifact detection import nipype.algorithms.modelgen as model # model specification import os # system functions -""" +""" Preliminaries ------------- - Set any package specific configuration. The output file format for FSL routines is being set to uncompressed NIFTI and a specific version of matlab is being used. The uncompressed format is required @@ -45,10 +44,10 @@ # Set the way matlab should be called # mlab.MatlabCommand.set_default_matlab_cmd("matlab -nodesktop -nosplash") # mlab.MatlabCommand.set_default_paths('/software/spm8') + """ Setting up workflows -------------------- - In this tutorial we will be setting up a hierarchical workflow for spm analysis. This will demonstrate how pre-defined workflows can be setup and shared across users, projects and labs. @@ -56,18 +55,19 @@ Setup preprocessing workflow ---------------------------- - This is a generic preprocessing workflow that can be used by different analyses """ preproc = pe.Workflow(name='preproc') + """Use :class:`nipype.interfaces.spm.Realign` for motion correction and register all images to the mean image. """ realign = pe.Node(spm.Realign(), name="realign") realign.inputs.register_to_mean = True + """Use :class:`nipype.algorithms.rapidart` to determine which of the images in the functional series are outliers based on deviations in intensity or movement. @@ -80,18 +80,21 @@ art.inputs.zintensity_threshold = 3 art.inputs.mask_type = 'file' art.inputs.parameter_source = 'SPM' + """Skull strip structural images using :class:`nipype.interfaces.fsl.BET`. """ skullstrip = pe.Node(fsl.BET(), name="skullstrip") skullstrip.inputs.mask = True + """Use :class:`nipype.interfaces.spm.Coregister` to perform a rigid body registration of the functional data to the structural data. """ coregister = pe.Node(spm.Coregister(), name="coregister") coregister.inputs.jobtype = 'estimate' + """Normalize and smooth functional data using DARTEL template """ @@ -99,6 +102,7 @@ spm.DARTELNorm2MNI(modulate=True), name='normalize_and_smooth_func') fwhmlist = [4] normalize_and_smooth_func.iterables = ('fwhm', fwhmlist) + """Normalize structural data using DARTEL template """ @@ -117,41 +121,47 @@ 'realigned_files')]), (skullstrip, art, [('mask_file', 'mask_file')]), ]) + """ Set up analysis workflow ------------------------ - """ l1analysis = pe.Workflow(name='analysis') + """Generate SPM-specific design information using :class:`nipype.interfaces.spm.SpecifyModel`. """ modelspec = pe.Node(model.SpecifySPMModel(), name="modelspec") modelspec.inputs.concatenate_runs = True + """Generate a first level SPM.mat file for analysis :class:`nipype.interfaces.spm.Level1Design`. """ level1design = pe.Node(spm.Level1Design(), name="level1design") level1design.inputs.bases = {'hrf': {'derivs': [0, 0]}} + """Use :class:`nipype.interfaces.spm.EstimateModel` to determine the parameters of the model. """ level1estimate = pe.Node(spm.EstimateModel(), name="level1estimate") level1estimate.inputs.estimation_method = {'Classical': 1} + """Use :class:`nipype.interfaces.spm.EstimateContrast` to estimate the first level contrasts specified in a few steps above. """ contrastestimate = pe.Node(spm.EstimateContrast(), name="contrastestimate") + """Use :class: `nipype.interfaces.utility.Select` to select each contrast for reporting. """ selectcontrast = pe.Node(niu.Select(), name="selectcontrast") + """Use :class:`nipype.interfaces.fsl.Overlay` to combine the statistical output of the contrast estimate and a background image into one volume. """ @@ -160,6 +170,7 @@ overlaystats.inputs.stat_thresh = (3, 10) overlaystats.inputs.show_negative_stats = True overlaystats.inputs.auto_thresh_bg = True + """Use :class:`nipype.interfaces.fsl.Slicer` to create images of the overlaid statistical volumes for a report of the first-level results. """ @@ -181,10 +192,10 @@ (selectcontrast, overlaystats, [('out', 'stat_image')]), (overlaystats, slicestats, [('out_file', 'in_file')])]) + """ Preproc + Analysis pipeline --------------------------- - """ l1pipeline = pe.Workflow(name='firstlevel') @@ -198,10 +209,10 @@ 'level1design.mask_image'), ('normalize_struct.normalized_files', 'overlaystats.background_image')]), ]) + """ Data specific components ------------------------ - The nipype tutorial contains data for two subjects. Subject data is in two subdirectories, ``s1`` and ``s2``. Each subject directory contains four functional volumes: f3.nii, f5.nii, f7.nii, f10.nii. And @@ -230,6 +241,7 @@ infosource = pe.Node( niu.IdentityInterface(fields=['subject_id']), name="infosource") + """Here we set up iteration over all the subjects. The following line is a particular example of the flexibility of the system. The ``datasource`` attribute ``iterables`` tells the pipeline engine that @@ -240,6 +252,7 @@ """ infosource.iterables = ('subject_id', subject_list) + """ Now we create a :class:`nipype.interfaces.io.DataGrabber` object and fill in the information from above about the layout of our data. The @@ -256,6 +269,7 @@ datasource.inputs.template = 'nipype-tutorial/data/%s/%s.nii' datasource.inputs.template_args = info datasource.inputs.sort_filelist = True + """We need to create a separate workflow to make the DARTEL template """ @@ -268,6 +282,7 @@ struct=[['subject_id', 'struct']]) datasource_dartel.inputs.sort_filelist = True datasource_dartel.inputs.subject_id = subject_list + """Here we make sure that struct files have names corresponding to the subject ids. This way we will be able to pick the right field flows later. """ @@ -281,10 +296,10 @@ dartel_workflow = spm_wf.create_DARTEL_template(name='dartel_workflow') dartel_workflow.inputs.inputspec.template_prefix = "template" + """This function will allow to pick the right field flow for each subject """ - def pickFieldFlow(dartel_flow_fields, subject_id): from nipype.utils.filemanip import split_filename for f in dartel_flow_fields: @@ -294,17 +309,16 @@ def pickFieldFlow(dartel_flow_fields, subject_id): raise Exception - pick_flow = pe.Node( niu.Function( input_names=['dartel_flow_fields', 'subject_id'], output_names=['dartel_flow_field'], function=pickFieldFlow), name="pick_flow") + """ Experimental paradigm specific components ----------------------------------------- - Here we create a function that returns subject-specific information about the experimental paradigm. This is used by the :class:`nipype.interfaces.spm.SpecifyModel` to create the information @@ -312,7 +326,6 @@ def pickFieldFlow(dartel_flow_fields, subject_id): paradigm was used for every participant. """ - def subjectinfo(subject_id): from nipype.interfaces.base import Bunch from copy import deepcopy @@ -333,7 +346,6 @@ def subjectinfo(subject_id): regressors=None)) return output - """Setup the contrast structure that needs to be evaluated. This is a list of lists. The inner list specifies the contrasts and has the following format - [Name,Stat,[list of condition names],[weights on @@ -360,10 +372,10 @@ def subjectinfo(subject_id): # Iterate over each contrast and create report images. selectcontrast.iterables = ('index', [[i] for i in range(len(contrasts))]) + """ Setup the pipeline ------------------ - The nodes created above do not describe the flow of data. They merely describe the parameters used for each function. In this section we setup the connections between the nodes such that appropriate outputs @@ -411,11 +423,10 @@ def subjectinfo(subject_id): (infosource, l1pipeline, [(('subject_id', subjectinfo), 'analysis.modelspec.subject_info')]), ]) -""" +""" Setup storage results --------------------- - Use :class:`nipype.interfaces.io.DataSink` to store selected outputs from the pipeline in a specific location. This allows the user to selectively choose important output bits from the analysis and keep @@ -457,10 +468,10 @@ def getstripdir(subject_id): (('subject_id', getstripdir), 'strip_dir')]), (l1pipeline, report, [('analysis.slicestats.out_file', '@report')]), ]) + """ Execute the pipeline -------------------- - The code discussed above sets up all the necessary data structures with appropriate parameters and the connectivity between the processes, but does not generate any output. To actually run the @@ -471,10 +482,10 @@ def getstripdir(subject_id): if __name__ == '__main__': level1.run(plugin_args={'n_procs': 4}) level1.write_graph() + """ Setup level 2 pipeline ---------------------- - Use :class:`nipype.interfaces.io.DataGrabber` to extract the contrast images across a group of first level subjects. Unlike the previous pipeline that iterated over subjects, this pipeline will iterate over @@ -490,6 +501,7 @@ def getstripdir(subject_id): # iterate over all contrast images l2source.iterables = [('fwhm', fwhmlist), ('con', contrast_ids)] l2source.inputs.sort_filelist = True + """Use :class:`nipype.interfaces.spm.OneSampleTTestDesign` to perform a simple statistical analysis of the contrasts from the group of subjects (n=2 in this example). @@ -503,6 +515,7 @@ def getstripdir(subject_id): cont1 = ('Group', 'T', ['mean'], [1]) l2conestimate.inputs.contrasts = [cont1] l2conestimate.inputs.group_contrast = True + """As before, we setup a pipeline to connect these two nodes (l2source -> onesamplettest). """ @@ -516,10 +529,10 @@ def getstripdir(subject_id): [('spm_mat_file', 'spm_mat_file'), ('beta_images', 'beta_images'), ('residual_image', 'residual_image')]), ]) + """ Execute the second level pipeline --------------------------------- - """ if __name__ == '__main__': diff --git a/package/niflow/nipype1/examples/fmri_spm_face.py b/package/niflow/nipype1/examples/fmri_spm_face.py index a60b1d3..9bced9f 100755 --- a/package/niflow/nipype1/examples/fmri_spm_face.py +++ b/package/niflow/nipype1/examples/fmri_spm_face.py @@ -27,11 +27,10 @@ import nipype.interfaces.utility as util # utility import nipype.pipeline.engine as pe # pypeline engine import nipype.algorithms.modelgen as model # model specification -""" +""" Preliminaries ------------- - Set any package specific configuration. The output file format for FSL routines is being set to uncompressed NIFTI and a specific version of matlab is being used. The uncompressed format is required @@ -42,22 +41,20 @@ mlab.MatlabCommand.set_default_matlab_cmd("matlab -nodesktop -nosplash") # If SPM is not in your MATLAB path you should add it here # mlab.MatlabCommand.set_default_paths('/path/to/your/spm8') + """ Setting up workflows -------------------- - In this tutorial we will be setting up a hierarchical workflow for spm analysis. It one is slightly different then the one used in spm_tutorial2. - Setup preprocessing workflow ---------------------------- - This is a generic preprocessing workflow that can be used by different analyses - """ preproc = pe.Workflow(name='preproc') + """Use :class:`nipype.interfaces.spm.Realign` for motion correction and register all images to the mean image. """ @@ -65,6 +62,7 @@ realign = pe.Node(interface=spm.Realign(), name="realign") slice_timing = pe.Node(interface=spm.SliceTiming(), name="slice_timing") + """Use :class:`nipype.interfaces.spm.Coregister` to perform a rigid body registration of the functional data to the structural data. """ @@ -74,10 +72,12 @@ segment = pe.Node(interface=spm.Segment(), name="segment") segment.inputs.save_bias_corrected = True + """Uncomment the following line for faster execution """ # segment.inputs.gaussians_per_class = [1, 1, 1, 4] + """Warp functional and structural data to SPM's T1 template using :class:`nipype.interfaces.spm.Normalize`. The tutorial data set includes the template image, T1.nii. @@ -88,11 +88,13 @@ normalize_struc = pe.Node(interface=spm.Normalize(), name="normalize_struc") normalize_struc.inputs.jobtype = "write" + """Smooth the functional data using :class:`nipype.interfaces.spm.Smooth`. """ smooth = pe.Node(interface=spm.Smooth(), name="smooth") + """`write_voxel_sizes` is the input of the normalize interface that is recommended to be set to the voxel sizes of the target volume. There is no need to set it manually since we van infer it from data using the following function: @@ -128,23 +130,26 @@ def get_vox_dims(volume): 'write_voxel_sizes')]), (normalize_func, smooth, [('normalized_files', 'in_files')]), ]) + """ Set up analysis workflow ------------------------ - """ l1analysis = pe.Workflow(name='analysis') + """Generate SPM-specific design information using :class:`nipype.interfaces.spm.SpecifyModel`. """ modelspec = pe.Node(interface=model.SpecifySPMModel(), name="modelspec") + """Generate a first level SPM.mat file for analysis :class:`nipype.interfaces.spm.Level1Design`. """ level1design = pe.Node(interface=spm.Level1Design(), name="level1design") + """Use :class:`nipype.interfaces.spm.EstimateModel` to determine the parameters of the model. """ @@ -153,6 +158,7 @@ def get_vox_dims(volume): level1estimate.inputs.estimation_method = {'Classical': 1} threshold = pe.Node(interface=spm.Threshold(), name="threshold") + """Use :class:`nipype.interfaces.spm.EstimateContrast` to estimate the first level contrasts specified in a few steps above. """ @@ -175,16 +181,17 @@ def pickfirst(l): (('spmT_images', pickfirst), 'stat_image')]), ]) + """ Preproc + Analysis pipeline --------------------------- - """ l1pipeline = pe.Workflow(name='firstlevel') l1pipeline.connect([(preproc, l1analysis, [('realign.realignment_parameters', 'modelspec.realignment_parameters')])]) + """Pluging in `functional_runs` is a bit more complicated, because model spec expects a list of `runs`. Every run can be a 4D file or a list of 3D files. Therefore for 3D analysis we need a list of lists and to make one we need a helper function. @@ -198,10 +205,10 @@ def makelist(item): l1pipeline.connect([(preproc, l1analysis, [(('smooth.smoothed_files', makelist), 'modelspec.functional_runs')])]) + """ Data specific components ------------------------ - In this tutorial there is only one subject `M03953`. Below we set some variables to inform the ``datasource`` about the @@ -222,6 +229,7 @@ def makelist(item): infosource = pe.Node( interface=util.IdentityInterface(fields=['subject_id']), name="infosource") + """Here we set up iteration over all the subjects. The following line is a particular example of the flexibility of the system. The ``datasource`` attribute ``iterables`` tells the pipeline engine that @@ -232,6 +240,7 @@ def makelist(item): """ infosource.iterables = ('subject_id', subject_list) + """ Now we create a :class:`nipype.interfaces.io.DataGrabber` object and fill in the information from above about the layout of our data. The @@ -248,10 +257,10 @@ def makelist(item): datasource.inputs.template = '%s/s%s_%04d%s.img' datasource.inputs.template_args = info datasource.inputs.sort_filelist = True + """ Experimental paradigm specific components ----------------------------------------- - Here we create a structure that provides information about the experimental paradigm. This is used by the :class:`nipype.interfaces.spm.SpecifyModel` to create the information @@ -259,6 +268,7 @@ def makelist(item): """ from nipype.interfaces.base import Bunch + """We're importing the onset times from a mat file (found on http://www.fil.ion.ucl.ac.uk/spm/data/face_rep/) """ @@ -279,6 +289,7 @@ def makelist(item): regressor_names=None, regressors=None) ] + """Setup the contrast structure that needs to be evaluated. This is a list of lists. The inner list specifies the contrasts and has the following format - [Name,Stat,[list of condition names],[weights on @@ -321,6 +332,7 @@ def makelist(item): cond1, cond2, cond3, fam1, fam2, fam3, rep1, rep2, rep3, int1, int2, int3, contf1, contf2, contf3, contf4 ] + """Setting up nodes inputs """ @@ -349,6 +361,7 @@ def makelist(item): l1designref.microtime_resolution = slice_timingref.num_slices l1designref.microtime_onset = slice_timingref.ref_slice l1designref.bases = {'hrf': {'derivs': [1, 1]}} + """ The following lines automatically inform SPM to create a default set of contrats for a factorial design. @@ -360,11 +373,13 @@ def makelist(item): l1pipeline.inputs.analysis.modelspec.subject_info = subjectinfo l1pipeline.inputs.analysis.contrastestimate.contrasts = contrasts l1pipeline.inputs.analysis.threshold.contrast_index = 1 + """ Use derivative estimates in the non-parametric model """ l1pipeline.inputs.analysis.contrastestimate.use_derivs = True + """ Setting up parametricvariation of the model """ @@ -401,10 +416,10 @@ def makelist(item): [(preproc, paramanalysis, [('realign.realignment_parameters', 'modelspec.realignment_parameters'), (('smooth.smoothed_files', makelist), 'modelspec.functional_runs')])]) + """ Setup the pipeline ------------------ - The nodes created above do not describe the flow of data. They merely describe the parameters used for each function. In this section we setup the connections between the nodes such that appropriate outputs @@ -431,11 +446,10 @@ def makelist(item): (datasource, l1pipeline, [('struct', 'preproc.coregister.source'), ('func', 'preproc.realign.in_files')])]) -""" +""" Setup storage results --------------------- - Use :class:`nipype.interfaces.io.DataSink` to store selected outputs from the pipeline in a specific location. This allows the user to selectively choose important output bits from the analysis and keep @@ -474,10 +488,10 @@ def getstripdir(subject_id): 'paramcontrasts.@con'), ('paramanalysis.contrastestimate.spmT_images', 'paramcontrasts.@T')]), ]) + """ Execute the pipeline -------------------- - The code discussed above sets up all the necessary data structures with appropriate parameters and the connectivity between the processes, but does not generate any output. To actually run the diff --git a/package/niflow/nipype1/examples/fmri_spm_nested.py b/package/niflow/nipype1/examples/fmri_spm_nested.py index 534b8c9..e63b3a2 100755 --- a/package/niflow/nipype1/examples/fmri_spm_nested.py +++ b/package/niflow/nipype1/examples/fmri_spm_nested.py @@ -28,11 +28,10 @@ from nipype.pipeline import engine as pe # pypeline engine from nipype.algorithms import rapidart as ra # artifact detection from nipype.algorithms import modelgen as model # model specification -""" +""" Preliminaries ------------- - Set any package specific configuration. The output file format for FSL routines is being set to uncompressed NIFTI and a specific version of matlab is being used. The uncompressed format is required @@ -45,18 +44,16 @@ # Set the way matlab should be called # mlab.MatlabCommand.set_default_matlab_cmd("matlab -nodesktop -nosplash") # mlab.MatlabCommand.set_default_paths('/software/spm8') + """ Setting up workflows -------------------- - In this tutorial we will be setting up a hierarchical workflow for spm analysis. This will demonstrate how pre-defined workflows can be setup and shared across users, projects and labs. - Example of how to inline functions in connect() ----------------------------------------------- - """ @@ -66,15 +63,13 @@ def _template_path(in_data): """ - Set-up preprocessing workflow ----------------------------- - This is a generic preprocessing workflow that can be used by different analyses - """ preproc = pe.Workflow(name='preproc') + """ A node called :code:`inputnode` is set to designate the path in which input data are located: @@ -82,12 +77,14 @@ def _template_path(in_data): inputnode = pe.Node( niu.IdentityInterface(fields=['in_data']), name='inputnode') + """Use :class:`nipype.interfaces.spm.Realign` for motion correction and register all images to the mean image. """ realign = pe.Node(spm.Realign(), name="realign") realign.inputs.register_to_mean = True + """Use :class:`nipype.algorithms.rapidart` to determine which of the images in the functional series are outliers based on deviations in intensity or movement. @@ -100,24 +97,28 @@ def _template_path(in_data): art.inputs.zintensity_threshold = 3 art.inputs.mask_type = 'file' art.inputs.parameter_source = 'SPM' + """Skull strip structural images using :class:`nipype.interfaces.fsl.BET`. """ skullstrip = pe.Node(fsl.BET(), name="skullstrip") skullstrip.inputs.mask = True + """Use :class:`nipype.interfaces.spm.Coregister` to perform a rigid body registration of the functional data to the structural data. """ coregister = pe.Node(spm.Coregister(), name="coregister") coregister.inputs.jobtype = 'estimate' + """Warp functional and structural data to SPM's T1 template using :class:`nipype.interfaces.spm.Normalize`. The tutorial data set includes the template image, T1.nii. """ normalize = pe.Node(spm.Normalize(), name="normalize") + """Smooth the functional data using :class:`nipype.interfaces.spm.Smooth`. """ @@ -137,41 +138,47 @@ def _template_path(in_data): (normalize, art, [('normalized_files', 'realigned_files')]), (skullstrip, art, [('mask_file', 'mask_file')]), ]) + """ Set up analysis workflow ------------------------ - """ l1analysis = pe.Workflow(name='analysis') + """Generate SPM-specific design information using :class:`nipype.interfaces.spm.SpecifyModel`. """ modelspec = pe.Node(model.SpecifySPMModel(), name="modelspec") modelspec.inputs.concatenate_runs = True + """Generate a first level SPM.mat file for analysis :class:`nipype.interfaces.spm.Level1Design`. """ level1design = pe.Node(spm.Level1Design(), name="level1design") level1design.inputs.bases = {'hrf': {'derivs': [0, 0]}} + """Use :class:`nipype.interfaces.spm.EstimateModel` to determine the parameters of the model. """ level1estimate = pe.Node(spm.EstimateModel(), name="level1estimate") level1estimate.inputs.estimation_method = {'Classical': 1} + """Use :class:`nipype.interfaces.spm.EstimateContrast` to estimate the first level contrasts specified in a few steps above. """ contrastestimate = pe.Node(spm.EstimateContrast(), name="contrastestimate") + """Use :class: `nipype.interfaces.utility.Select` to select each contrast for reporting. """ selectcontrast = pe.Node(niu.Select(), name="selectcontrast") + """Use :class:`nipype.interfaces.fsl.Overlay` to combine the statistical output of the contrast estimate and a background image into one volume. """ @@ -180,6 +187,7 @@ def _template_path(in_data): overlaystats.inputs.stat_thresh = (3, 10) overlaystats.inputs.show_negative_stats = True overlaystats.inputs.auto_thresh_bg = True + """Use :class:`nipype.interfaces.fsl.Slicer` to create images of the overlaid statistical volumes for a report of the first-level results. """ @@ -201,10 +209,10 @@ def _template_path(in_data): (selectcontrast, overlaystats, [('out', 'stat_image')]), (overlaystats, slicestats, [('out_file', 'in_file')])]) + """ Preproc + Analysis pipeline --------------------------- - """ l1pipeline = pe.Workflow(name='firstlevel') @@ -218,10 +226,10 @@ def _template_path(in_data): 'level1design.mask_image'), ('normalize.normalized_source', 'overlaystats.background_image')]), ]) + """ Data specific components ------------------------ - The nipype tutorial contains data for two subjects. Subject data is in two subdirectories, ``s1`` and ``s2``. Each subject directory contains four functional volumes: f3.nii, f5.nii, f7.nii, f10.nii. And @@ -236,7 +244,6 @@ def _template_path(in_data): In the example below, run 'f3' is of type 'func' and gets mapped to a nifti filename through a template '%s.nii'. So 'f3' would become 'f3.nii'. - """ # Specify the subject directories @@ -248,6 +255,7 @@ def _template_path(in_data): infosource = pe.Node( niu.IdentityInterface(fields=['subject_id']), name="infosource") + """Here we set up iteration over all the subjects. The following line is a particular example of the flexibility of the system. The ``datasource`` attribute ``iterables`` tells the pipeline engine that @@ -258,6 +266,7 @@ def _template_path(in_data): """ infosource.iterables = ('subject_id', subject_list) + """ Now we create a :class:`nipype.interfaces.io.DataGrabber` object and fill in the information from above about the layout of our data. The @@ -272,10 +281,10 @@ def _template_path(in_data): datasource.inputs.template = 'nipype-tutorial/data/%s/%s.nii' datasource.inputs.template_args = info datasource.inputs.sort_filelist = True + """ Experimental paradigm specific components ----------------------------------------- - Here we create a function that returns subject-specific information about the experimental paradigm. This is used by the :class:`nipype.interfaces.spm.SpecifyModel` to create the information @@ -331,10 +340,10 @@ def subjectinfo(subject_id): # Iterate over each contrast and create report images. selectcontrast.iterables = ('index', [[i] for i in range(len(contrasts))]) + """ Setup the pipeline ------------------ - The nodes created above do not describe the flow of data. They merely describe the parameters used for each function. In this section we setup the connections between the nodes such that appropriate outputs @@ -366,11 +375,10 @@ def subjectinfo(subject_id): (infosource, l1pipeline, [(('subject_id', subjectinfo), 'analysis.modelspec.subject_info')]), ]) -""" +""" Setup storage results --------------------- - Use :class:`nipype.interfaces.io.DataSink` to store selected outputs from the pipeline in a specific location. This allows the user to selectively choose important output bits from the analysis and keep @@ -410,10 +418,10 @@ def getstripdir(subject_id): (('subject_id', getstripdir), 'strip_dir')]), (l1pipeline, report, [('analysis.slicestats.out_file', '@report')]), ]) + """ Execute the pipeline -------------------- - The code discussed above sets up all the necessary data structures with appropriate parameters and the connectivity between the processes, but does not generate any output. To actually run the @@ -424,10 +432,10 @@ def getstripdir(subject_id): if __name__ == '__main__': level1.run('MultiProc') level1.write_graph() + """ Setup level 2 pipeline ---------------------- - Use :class:`nipype.interfaces.io.DataGrabber` to extract the contrast images across a group of first level subjects. Unlike the previous pipeline that iterated over subjects, this pipeline will iterate over @@ -443,6 +451,7 @@ def getstripdir(subject_id): # iterate over all contrast images l2source.iterables = [('fwhm', fwhmlist), ('con', contrast_ids)] l2source.inputs.sort_filelist = True + """Use :class:`nipype.interfaces.spm.OneSampleTTestDesign` to perform a simple statistical analysis of the contrasts from the group of subjects (n=2 in this example). @@ -456,6 +465,7 @@ def getstripdir(subject_id): cont1 = ('Group', 'T', ['mean'], [1]) l2conestimate.inputs.contrasts = [cont1] l2conestimate.inputs.group_contrast = True + """As before, we setup a pipeline to connect these two nodes (l2source -> onesamplettest). """ @@ -469,10 +479,10 @@ def getstripdir(subject_id): [('spm_mat_file', 'spm_mat_file'), ('beta_images', 'beta_images'), ('residual_image', 'residual_image')]), ]) + """ Execute the second level pipeline --------------------------------- - """ if __name__ == '__main__': From b4ebdc38f9e5bf568c8bb341c2f0791c90dd852f Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Thu, 20 Feb 2020 17:18:21 -0500 Subject: [PATCH 4/4] CI: Force reinstall neurdflib to fix prov --- .circleci/Dockerfile | 23 +++++++++++++++++------ 1 file changed, 17 insertions(+), 6 deletions(-) diff --git a/.circleci/Dockerfile b/.circleci/Dockerfile index f3b0517..eebc2bf 100644 --- a/.circleci/Dockerfile +++ b/.circleci/Dockerfile @@ -1,5 +1,6 @@ -# Generated by Neurodocker version 0.4.2-6-g18c7a8b -# Timestamp: 2019-01-04 21:09:17 UTC +# Your version: 0.6.0+5.g74cb187.dirty Latest version: 0.6.0 +# Generated by Neurodocker version 0.6.0+5.g74cb187.dirty +# Timestamp: 2020-02-20 20:39:36 UTC # # Thank you for using Neurodocker. If you discover any issues # or ways to improve this software, please submit an issue or @@ -23,11 +24,17 @@ RUN export PATH="/opt/miniconda-latest/bin:$PATH" \ && conda config --system --prepend channels conda-forge \ && conda config --system --set auto_update_conda false \ && conda config --system --set show_channel_urls true \ - && sync && conda clean -tipsy && sync \ + && sync && conda clean --all && sync \ && conda install -y -q --name base \ - 'python=3.6' \ - 'nipype' \ - && sync && conda clean -tipsy && sync + "python=3.6" \ + "nipype" \ + && sync && conda clean --all && sync \ + && bash -c "source activate base \ + && pip install --no-cache-dir \ + '--force-reinstall' \ + 'neurdflib'" \ + && rm -rf ~/.cache/pip/* \ + && sync COPY [".", "/niflow-src"] @@ -51,6 +58,10 @@ RUN echo '{ \ \n "conda_install": [ \ \n "python=3.6", \ \n "nipype" \ + \n ], \ + \n "pip_install": [ \ + \n "--force-reinstall", \ + \n "neurdflib" \ \n ] \ \n } \ \n ], \