Skip to content

ENH: Re-sync examples with nipype #2

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 4 commits into from
Feb 21, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 17 additions & 6 deletions .circleci/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
# Generated by Neurodocker version 0.4.2-6-g18c7a8b
# Timestamp: 2019-01-04 21:09:17 UTC
# Your version: 0.6.0+5.g74cb187.dirty Latest version: 0.6.0
# Generated by Neurodocker version 0.6.0+5.g74cb187.dirty
# Timestamp: 2020-02-20 20:39:36 UTC
#
# Thank you for using Neurodocker. If you discover any issues
# or ways to improve this software, please submit an issue or
Expand All @@ -23,11 +24,17 @@ RUN export PATH="/opt/miniconda-latest/bin:$PATH" \
&& conda config --system --prepend channels conda-forge \
&& conda config --system --set auto_update_conda false \
&& conda config --system --set show_channel_urls true \
&& sync && conda clean -tipsy && sync \
&& sync && conda clean --all && sync \
&& conda install -y -q --name base \
'python=3.6' \
'nipype' \
&& sync && conda clean -tipsy && sync
"python=3.6" \
"nipype" \
&& sync && conda clean --all && sync \
&& bash -c "source activate base \
&& pip install --no-cache-dir \
'--force-reinstall' \
'neurdflib'" \
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@satra Just a heads up that it doesn't look like our strategy of placing neurdflib after prov in either PyPI or conda-forge metadata is sufficient to ensure order.

Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

thanks - will have to figure out something.

&& rm -rf ~/.cache/pip/* \
&& sync

COPY [".", "/niflow-src"]

Expand All @@ -51,6 +58,10 @@ RUN echo '{ \
\n "conda_install": [ \
\n "python=3.6", \
\n "nipype" \
\n ], \
\n "pip_install": [ \
\n "--force-reinstall", \
\n "neurdflib" \
\n ] \
\n } \
\n ], \
Expand Down
9 changes: 3 additions & 6 deletions package/niflow/nipype1/examples/dmri_camino_dti.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,30 +35,27 @@

def get_vox_dims(volume):
import nibabel as nb
from nipype.utils import NUMPY_MMAP
if isinstance(volume, list):
volume = volume[0]
nii = nb.load(volume, mmap=NUMPY_MMAP)
nii = nb.load(volume)
hdr = nii.header
voxdims = hdr.get_zooms()
return [float(voxdims[0]), float(voxdims[1]), float(voxdims[2])]


def get_data_dims(volume):
import nibabel as nb
from nipype.utils import NUMPY_MMAP
if isinstance(volume, list):
volume = volume[0]
nii = nb.load(volume, mmap=NUMPY_MMAP)
nii = nb.load(volume)
hdr = nii.header
datadims = hdr.get_data_shape()
return [int(datadims[0]), int(datadims[1]), int(datadims[2])]


def get_affine(volume):
import nibabel as nb
from nipype.utils import NUMPY_MMAP
nii = nb.load(volume, mmap=NUMPY_MMAP)
nii = nb.load(volume)
return nii.affine


Expand Down
14 changes: 5 additions & 9 deletions package/niflow/nipype1/examples/dmri_connectivity.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,8 @@

* http://db.tt/1vx4vLeP

Along with `Camino <http://web4.cs.ucl.ac.uk/research/medic/camino/pmwiki/pmwiki.php?n=Main.HomePage>`_,
`Camino-Trackvis <http://www.nitrc.org/projects/camino-trackvis/>`_, `FSL <http://www.fmrib.ox.ac.uk/fsl/>`_,
and `Freesurfer <http://surfer.nmr.mgh.harvard.edu/>`_, you must also have the Connectome File Format
Along with Camino_, Camino2Trackvis_, FSL_, and FreeSurfer_,
you must also have the Connectome File Format
library installed as well as the Connectome Mapper.

These are written by Stephan Gerhard and can be obtained from:
Expand Down Expand Up @@ -73,30 +72,27 @@

def get_vox_dims(volume):
import nibabel as nb
from nipype.utils import NUMPY_MMAP
if isinstance(volume, list):
volume = volume[0]
nii = nb.load(volume, mmap=NUMPY_MMAP)
nii = nb.load(volume)
hdr = nii.header
voxdims = hdr.get_zooms()
return [float(voxdims[0]), float(voxdims[1]), float(voxdims[2])]


def get_data_dims(volume):
import nibabel as nb
from nipype.utils import NUMPY_MMAP
if isinstance(volume, list):
volume = volume[0]
nii = nb.load(volume, mmap=NUMPY_MMAP)
nii = nb.load(volume)
hdr = nii.header
datadims = hdr.get_data_shape()
return [int(datadims[0]), int(datadims[1]), int(datadims[2])]


def get_affine(volume):
import nibabel as nb
from nipype.utils import NUMPY_MMAP
nii = nb.load(volume, mmap=NUMPY_MMAP)
nii = nb.load(volume)
return nii.affine


Expand Down
8 changes: 4 additions & 4 deletions package/niflow/nipype1/examples/dmri_connectivity_advanced.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,12 +58,12 @@
import inspect
import os
import os.path as op # system functions
from nipype.workflows.dmri.fsl.dti import create_eddy_correct_pipeline
from nipype.workflows.dmri.camino.connectivity_mapping import select_aparc_annot
from niflow.nipype1.workflows.dmri.fsl.dti import create_eddy_correct_pipeline
from niflow.nipype1.workflows.dmri.camino.connectivity_mapping import select_aparc_annot
from nipype.utils.misc import package_check
import warnings
from nipype.workflows.dmri.connectivity.nx import create_networkx_pipeline, create_cmats_to_csv_pipeline
from nipype.workflows.smri.freesurfer import create_tessellation_flow
from niflow.nipype1.workflows.dmri.connectivity.nx import create_networkx_pipeline, create_cmats_to_csv_pipeline
from niflow.nipype1.workflows.smri.freesurfer import create_tessellation_flow

try:
package_check('cmp')
Expand Down
2 changes: 1 addition & 1 deletion package/niflow/nipype1/examples/dmri_dtk_dti.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
import nipype.interfaces.utility as util # utility
import nipype.pipeline.engine as pe # pypeline engine
import os # system functions
from nipype.workflows.dmri.fsl.dti import create_eddy_correct_pipeline
from niflow.nipype1.workflows.dmri.fsl.dti import create_eddy_correct_pipeline
"""
Confirm package dependencies are installed. (This is only for the
tutorial, rarely would you put this in your own code.)
Expand Down
2 changes: 1 addition & 1 deletion package/niflow/nipype1/examples/dmri_dtk_odf.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
import nipype.interfaces.utility as util # utility
import nipype.pipeline.engine as pe # pypeline engine
import os # system functions
from nipype.workflows.dmri.fsl.dti import create_eddy_correct_pipeline
from niflow.nipype1.workflows.dmri.fsl.dti import create_eddy_correct_pipeline
"""
Confirm package dependencies are installed. (This is only for the
tutorial, rarely would you put this in your own code.)
Expand Down
2 changes: 1 addition & 1 deletion package/niflow/nipype1/examples/dmri_fsl_dti.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
import nipype.interfaces.utility as util # utility
import nipype.pipeline.engine as pe # pypeline engine
import os # system functions
from nipype.workflows.dmri.fsl.dti import create_eddy_correct_pipeline,\
from niflow.nipype1.workflows.dmri.fsl.dti import create_eddy_correct_pipeline,\
create_bedpostx_pipeline
"""
Confirm package dependencies are installed. (This is only for the
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,8 +57,8 @@
import nipype.interfaces.freesurfer as fs # freesurfer
import os.path as op # system functions
import cmp
from nipype.workflows.dmri.camino.group_connectivity import create_group_connectivity_pipeline
from nipype.workflows.dmri.connectivity.group_connectivity import (
from niflow.nipype1.workflows.dmri.camino.group_connectivity import create_group_connectivity_pipeline
from niflow.nipype1.workflows.dmri.connectivity.group_connectivity import (
create_merge_networks_by_group_workflow,
create_merge_group_networks_workflow,
create_average_networks_by_group_workflow)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,8 +57,8 @@
import nipype.interfaces.freesurfer as fs # freesurfer
import os.path as op # system functions
import cmp
from nipype.workflows.dmri.mrtrix.group_connectivity import create_group_connectivity_pipeline
from nipype.workflows.dmri.connectivity.group_connectivity import (
from niflow.nipype1.workflows.dmri.mrtrix.group_connectivity import create_group_connectivity_pipeline
from niflow.nipype1.workflows.dmri.connectivity.group_connectivity import (
create_merge_network_results_by_group_workflow,
create_merge_group_network_results_workflow,
create_average_networks_by_group_workflow)
Expand Down
18 changes: 16 additions & 2 deletions package/niflow/nipype1/examples/dmri_preprocessing.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,13 +32,13 @@
from nipype.interfaces import ants
"""
Load specific nipype's workflows for preprocessing of dMRI data:
:class:`nipype.workflows.dmri.preprocess.epi.all_peb_pipeline`,
:class:`niflow.nipype1.workflows.dmri.preprocess.epi.all_peb_pipeline`,
as data include a *b0* volume with reverse encoding direction
(*P>>>A*, or *y*), in contrast with the general acquisition encoding
that is *A>>>P* or *-y* (in RAS systems).
"""

from nipype.workflows.dmri.fsl.artifacts import all_fsl_pipeline, remove_bias
from niflow.nipype1.workflows.dmri.fsl.artifacts import all_fsl_pipeline, remove_bias
"""
Map field names into individual subject runs
"""
Expand Down Expand Up @@ -130,6 +130,7 @@
"""

bias = remove_bias()

"""
Connect nodes in workflow
=========================
Expand All @@ -148,10 +149,23 @@
(prep, bias, [('outputnode.out_file', 'inputnode.in_file'),
('outputnode.out_mask', 'inputnode.in_mask')]),
(datasource, bias, [('bvals', 'inputnode.in_bval')])])

"""
Run the workflow as command line executable
"""

if __name__ == '__main__':
wf.run()
wf.write_graph()

"""
References
----------

.. [Jeurissen2014] Jeurissen et al., Multi-tissue constrained spherical deconvolution
for improved analysis of multi-shell diffusion MRI data.
NeuroImage 103:411--426. 2014.
doi:`10.1016/j.neuroimage.2014.07.061
<https://doi.org/10.1016/j.neuroimage.2014.07.061>`__.

"""
4 changes: 2 additions & 2 deletions package/niflow/nipype1/examples/dmri_tbss_nki.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,8 @@

"""

from nipype.workflows.dmri.fsl.dti import create_eddy_correct_pipeline
from nipype.workflows.dmri.fsl.tbss import create_tbss_non_FA, create_tbss_all
from niflow.nipype1.workflows.dmri.fsl.dti import create_eddy_correct_pipeline
from niflow.nipype1.workflows.dmri.fsl.tbss import create_tbss_non_FA, create_tbss_all
"""
Tell python where to find the appropriate functions.
"""
Expand Down
3 changes: 1 addition & 2 deletions package/niflow/nipype1/examples/fmri_ants_openfmri.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,10 +38,9 @@
from nipype.interfaces.io import FreeSurferSource
import nipype.interfaces.utility as niu
from nipype.interfaces.utility import Merge, IdentityInterface
from nipype.workflows.fmri.fsl import (create_featreg_preproc,
from niflow.nipype1.workflows.fmri.fsl import (create_featreg_preproc,
create_modelfit_workflow,
create_fixed_effects_flow)
from nipype.utils import NUMPY_MMAP

config.enable_provenance()
version = 0
Expand Down
3 changes: 1 addition & 2 deletions package/niflow/nipype1/examples/fmri_fsl.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,11 +101,10 @@ def pickfirst(files):

def getmiddlevolume(func):
from nibabel import load
from nipype.utils import NUMPY_MMAP
funcfile = func
if isinstance(func, list):
funcfile = func[0]
_, _, _, timepoints = load(funcfile, mmap=NUMPY_MMAP).shape
_, _, _, timepoints = load(funcfile).shape
return int(timepoints / 2) - 1


Expand Down
2 changes: 1 addition & 1 deletion package/niflow/nipype1/examples/fmri_fsl_feeds.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
from nipype.interfaces import fsl # fsl
from nipype.pipeline import engine as pe # pypeline engine
from nipype.algorithms import modelgen as model # model generation
from nipype.workflows.fmri.fsl import (
from niflow.nipype1.workflows.fmri.fsl import (
create_featreg_preproc, create_modelfit_workflow, create_reg_workflow)
from nipype.interfaces.base import Bunch
"""
Expand Down
2 changes: 1 addition & 1 deletion package/niflow/nipype1/examples/fmri_fsl_reuse.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
import nipype.algorithms.modelgen as model # model generation
import nipype.algorithms.rapidart as ra # artifact detection

from nipype.workflows.fmri.fsl import (create_featreg_preproc,
from niflow.nipype1.workflows.fmri.fsl import (create_featreg_preproc,
create_modelfit_workflow,
create_fixed_effects_flow)
"""
Expand Down
Loading