diff --git a/.gitmodules b/.gitmodules
index 9e03a837ce..7ff8d61885 100644
--- a/.gitmodules
+++ b/.gitmodules
@@ -13,3 +13,6 @@
[submodule "nibabel-data/parrec_oblique"]
path = nibabel-data/parrec_oblique
url = https://github.com/grlee77/parrec_oblique.git
+[submodule "nibabel-data/nitest-cifti2"]
+ path = nibabel-data/nitest-cifti2
+ url = https://github.com/demianw/nibabel-nitest-cifti2.git
diff --git a/nibabel-data/nitest-cifti2 b/nibabel-data/nitest-cifti2
new file mode 160000
index 0000000000..26b7cb95d7
--- /dev/null
+++ b/nibabel-data/nitest-cifti2
@@ -0,0 +1 @@
+Subproject commit 26b7cb95d76066b93fd2ee65121b9bdb7e034713
diff --git a/nibabel/__init__.py b/nibabel/__init__.py
index cf4173fc27..bc97f3ef51 100644
--- a/nibabel/__init__.py
+++ b/nibabel/__init__.py
@@ -51,6 +51,7 @@
from .nifti2 import Nifti2Header, Nifti2Image, Nifti2Pair
from .minc1 import Minc1Image
from .minc2 import Minc2Image
+from .cifti2 import Cifti2Header, Cifti2Image
# Deprecated backwards compatiblity for MINC1
from .deprecated import ModuleProxy as _ModuleProxy
minc = _ModuleProxy('nibabel.minc')
diff --git a/nibabel/arrayproxy.py b/nibabel/arrayproxy.py
index aa0df4eebc..d6796e7762 100644
--- a/nibabel/arrayproxy.py
+++ b/nibabel/arrayproxy.py
@@ -27,6 +27,8 @@
"""
import warnings
+import numpy as np
+
from .volumeutils import array_from_file, apply_read_scaling
from .fileslice import fileslice
from .keywordonly import kw_only_meth
@@ -111,8 +113,12 @@ def shape(self):
return self._shape
@property
- def is_proxy(self):
- return True
+ def dtype(self):
+ return self._dtype
+
+ @property
+ def offset(self):
+ return self._offset
@property
def slope(self):
@@ -123,8 +129,8 @@ def inter(self):
return self._inter
@property
- def offset(self):
- return self._offset
+ def is_proxy(self):
+ return True
def get_unscaled(self):
''' Read of data from file
@@ -164,3 +170,10 @@ def is_proxy(obj):
return obj.is_proxy
except AttributeError:
return False
+
+
+def reshape_dataobj(obj, shape):
+ """ Use `obj` reshape method if possible, else numpy reshape function
+ """
+ return (obj.reshape(shape) if hasattr(obj, 'reshape')
+ else np.reshape(obj, shape))
diff --git a/nibabel/cifti2/__init__.py b/nibabel/cifti2/__init__.py
new file mode 100644
index 0000000000..3025a6f991
--- /dev/null
+++ b/nibabel/cifti2/__init__.py
@@ -0,0 +1,27 @@
+# emacs: -*- mode: python-mode; py-indent-offset: 4; indent-tabs-mode: nil -*-
+# vi: set ft=python sts=4 ts=4 sw=4 et:
+### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
+#
+# See COPYING file distributed along with the NiBabel package for the
+# copyright and license terms.
+#
+### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
+"""CIfTI format IO
+
+.. currentmodule:: nibabel.cifti2
+
+.. autosummary::
+ :toctree: ../generated
+
+ cifti2
+"""
+
+from .parse_cifti2 import Cifti2Extension
+from .cifti2 import (Cifti2MetaData, Cifti2Header, Cifti2Image, Cifti2Label,
+ Cifti2LabelTable, Cifti2VertexIndices,
+ Cifti2VoxelIndicesIJK, Cifti2BrainModel, Cifti2Matrix,
+ Cifti2MatrixIndicesMap, Cifti2NamedMap, Cifti2Parcel,
+ Cifti2Surface,
+ Cifti2TransformationMatrixVoxelIndicesIJKtoXYZ,
+ Cifti2Vertices, Cifti2Volume, CIFTI_BRAIN_STRUCTURES,
+ Cifti2HeaderError, CIFTI_MODEL_TYPES, load, save)
diff --git a/nibabel/cifti2/cifti2.py b/nibabel/cifti2/cifti2.py
new file mode 100644
index 0000000000..3f8b47e897
--- /dev/null
+++ b/nibabel/cifti2/cifti2.py
@@ -0,0 +1,1445 @@
+# emacs: -*- mode: python-mode; py-indent-offset: 4; indent-tabs-mode: nil -*-
+# vi: set ft=python sts=4 ts=4 sw=4 et:
+### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
+#
+# See COPYING file distributed along with the NiBabel package for the
+# copyright and license terms.
+#
+### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
+''' Read / write access to CIfTI2 image format
+
+Format of the NIFTI2 container format described here:
+
+ http://www.nitrc.org/forum/message.php?msg_id=3738
+
+Definition of the CIFTI2 header format and file extensions attached to this
+email:
+
+ http://www.nitrc.org/forum/forum.php?thread_id=4380&forum_id=1955
+
+Filename is ``CIFTI-2_Main_FINAL_1March2014.pdf``.
+'''
+from __future__ import division, print_function, absolute_import
+import re
+import collections
+
+from .. import xmlutils as xml
+from ..filebasedimages import FileBasedHeader
+from ..dataobj_images import DataobjImage
+from ..nifti2 import Nifti2Image, Nifti2Header
+from ..arrayproxy import reshape_dataobj
+
+
+def _float_01(val):
+ out = float(val)
+ if out < 0 or out > 1:
+ raise ValueError('Float must be between 0 and 1 inclusive')
+ return out
+
+
+class Cifti2HeaderError(Exception):
+ """ Error in CIFTI2 header
+ """
+
+
+CIFTI_MAP_TYPES = ('CIFTI_INDEX_TYPE_BRAIN_MODELS',
+ 'CIFTI_INDEX_TYPE_PARCELS',
+ 'CIFTI_INDEX_TYPE_SERIES',
+ 'CIFTI_INDEX_TYPE_SCALARS',
+ 'CIFTI_INDEX_TYPE_LABELS')
+
+CIFTI_MODEL_TYPES = (
+ 'CIFTI_MODEL_TYPE_SURFACE', # Modeled using surface vertices
+ 'CIFTI_MODEL_TYPE_VOXELS' # Modeled using voxels.
+)
+
+CIFTI_SERIESUNIT_TYPES = ('SECOND',
+ 'HERTZ',
+ 'METER',
+ 'RADIAN')
+
+CIFTI_BRAIN_STRUCTURES = ('CIFTI_STRUCTURE_ACCUMBENS_LEFT',
+ 'CIFTI_STRUCTURE_ACCUMBENS_RIGHT',
+ 'CIFTI_STRUCTURE_ALL_WHITE_MATTER',
+ 'CIFTI_STRUCTURE_ALL_GREY_MATTER',
+ 'CIFTI_STRUCTURE_AMYGDALA_LEFT',
+ 'CIFTI_STRUCTURE_AMYGDALA_RIGHT',
+ 'CIFTI_STRUCTURE_BRAIN_STEM',
+ 'CIFTI_STRUCTURE_CAUDATE_LEFT',
+ 'CIFTI_STRUCTURE_CAUDATE_RIGHT',
+ 'CIFTI_STRUCTURE_CEREBELLAR_WHITE_MATTER_LEFT',
+ 'CIFTI_STRUCTURE_CEREBELLAR_WHITE_MATTER_RIGHT',
+ 'CIFTI_STRUCTURE_CEREBELLUM',
+ 'CIFTI_STRUCTURE_CEREBELLUM_LEFT',
+ 'CIFTI_STRUCTURE_CEREBELLUM_RIGHT',
+ 'CIFTI_STRUCTURE_CEREBRAL_WHITE_MATTER_LEFT',
+ 'CIFTI_STRUCTURE_CEREBRAL_WHITE_MATTER_RIGHT',
+ 'CIFTI_STRUCTURE_CORTEX',
+ 'CIFTI_STRUCTURE_CORTEX_LEFT',
+ 'CIFTI_STRUCTURE_CORTEX_RIGHT',
+ 'CIFTI_STRUCTURE_DIENCEPHALON_VENTRAL_LEFT',
+ 'CIFTI_STRUCTURE_DIENCEPHALON_VENTRAL_RIGHT',
+ 'CIFTI_STRUCTURE_HIPPOCAMPUS_LEFT',
+ 'CIFTI_STRUCTURE_HIPPOCAMPUS_RIGHT',
+ 'CIFTI_STRUCTURE_OTHER',
+ 'CIFTI_STRUCTURE_OTHER_GREY_MATTER',
+ 'CIFTI_STRUCTURE_OTHER_WHITE_MATTER',
+ 'CIFTI_STRUCTURE_PALLIDUM_LEFT',
+ 'CIFTI_STRUCTURE_PALLIDUM_RIGHT',
+ 'CIFTI_STRUCTURE_PUTAMEN_LEFT',
+ 'CIFTI_STRUCTURE_PUTAMEN_RIGHT',
+ 'CIFTI_STRUCTURE_THALAMUS_LEFT',
+ 'CIFTI_STRUCTURE_THALAMUS_RIGHT')
+
+
+def _value_if_klass(val, klass):
+ if val is None or isinstance(val, klass):
+ return val
+ raise ValueError('Not a valid %s instance.' % klass.__name__)
+
+
+def _underscore(string):
+ """ Convert a string from CamelCase to underscored """
+ string = re.sub(r'([A-Z]+)([A-Z][a-z])', r'\1_\2', string)
+ return re.sub(r'([a-z0-9])([A-Z])', r'\1_\2', string).lower()
+
+
+class Cifti2MetaData(xml.XmlSerializable, collections.MutableMapping):
+ """ A list of name-value pairs
+
+ * Description - Provides a simple method for user-supplied metadata that
+ associates names with values.
+ * Attributes: [NA]
+ * Child Elements
+
+ * MD (0...N)
+
+ * Text Content: [NA]
+ * Parent Elements - Matrix, NamedMap
+
+ MD elements are a single metadata entry consisting of a name and a value.
+
+ Attributes
+ ----------
+ data : list of (name, value) tuples
+ """
+ def __init__(self, metadata=None):
+ self.data = collections.OrderedDict()
+ if metadata is not None:
+ self.update(metadata)
+
+ def __getitem__(self, key):
+ return self.data[key]
+
+ def __setitem__(self, key, value):
+ self.data[key] = value
+
+ def __delitem__(self, key):
+ del self.data[key]
+
+ def __len__(self):
+ return len(self.data)
+
+ def __iter__(self):
+ return iter(self.data)
+
+ def difference_update(self, metadata):
+ """Remove metadata key-value pairs
+
+ Parameters
+ ----------
+ metadata : dict-like datatype
+
+ Returns
+ -------
+ None
+
+ """
+ if metadata is None:
+ raise ValueError("The metadata parameter can't be None")
+ pairs = dict(metadata)
+ for k in pairs:
+ del self.data[k]
+
+ def _to_xml_element(self):
+ metadata = xml.Element('MetaData')
+
+ for name_text, value_text in self.data.items():
+ md = xml.SubElement(metadata, 'MD')
+ name = xml.SubElement(md, 'Name')
+ name.text = str(name_text)
+ value = xml.SubElement(md, 'Value')
+ value.text = str(value_text)
+ return metadata
+
+
+class Cifti2LabelTable(xml.XmlSerializable, collections.MutableMapping):
+ """ CIFTI2 label table: a sequence of ``Cifti2Label``s
+
+ * Description - Used by NamedMap when IndicesMapToDataType is
+ "CIFTI_INDEX_TYPE_LABELS" in order to associate names and display colors
+ with label keys. Note that LABELS is the only mapping type that uses a
+ LabelTable. Display coloring of continuous-valued data is not specified
+ by CIFTI-2.
+ * Attributes: [NA]
+ * Child Elements
+
+ * Label (0...N)
+
+ * Text Content: [NA]
+ * Parent Element - NamedMap
+ """
+
+ def __init__(self):
+ self._labels = collections.OrderedDict()
+
+ def __len__(self):
+ return len(self._labels)
+
+ def __getitem__(self, key):
+ return self._labels[key]
+
+ def append(self, label):
+ self[label.key] = label
+
+ def __setitem__(self, key, value):
+ if isinstance(value, Cifti2Label):
+ if key != value.key:
+ raise ValueError("The key and the label's key must agree")
+ self._labels[key] = value
+ return
+ if len(value) != 5:
+ raise ValueError('Value should be length 5')
+ try:
+ self._labels[key] = Cifti2Label(*([key] + list(value)))
+ except ValueError:
+ raise ValueError('Key should be int, value should be sequence '
+ 'of str and 4 floats between 0 and 1')
+
+ def __delitem__(self, key):
+ del self._labels[key]
+
+ def __iter__(self):
+ return iter(self._labels)
+
+ def _to_xml_element(self):
+ if len(self) == 0:
+ raise Cifti2HeaderError('LabelTable element requires at least 1 label')
+ labeltable = xml.Element('LabelTable')
+ for ele in self._labels.values():
+ labeltable.append(ele._to_xml_element())
+ return labeltable
+
+
+class Cifti2Label(xml.XmlSerializable):
+ """ CIFTI2 label: association of integer key with a name and RGBA values
+
+ For all color components, value is floating point with range 0.0 to 1.0.
+
+ * Description - Associates a label key value with a name and a display
+ color.
+ * Attributes
+
+ * Key - Integer, data value which is assigned this name and color.
+ * Red - Red color component for label. Value is floating point with
+ range 0.0 to 1.0.
+ * Green - Green color component for label. Value is floating point with
+ range 0.0 to 1.0.
+ * Blue - Blue color component for label. Value is floating point with
+ range 0.0 to 1.0.
+ * Alpha - Alpha color component for label. Value is floating point with
+ range 0.0 to 1.0.
+
+ * Child Elements: [NA]
+ * Text Content - Name of the label.
+ * Parent Element - LabelTable
+
+ Attributes
+ ----------
+ key : int, optional
+ Integer, data value which is assigned this name and color.
+ label : str, optional
+ Name of the label.
+ red : float, optional
+ Red color component for label (between 0 and 1).
+ green : float, optional
+ Green color component for label (between 0 and 1).
+ blue : float, optional
+ Blue color component for label (between 0 and 1).
+ alpha : float, optional
+ Alpha color component for label (between 0 and 1).
+ """
+ def __init__(self, key=0, label='', red=0., green=0., blue=0., alpha=0.):
+ self.key = int(key)
+ self.label = str(label)
+ self.red = _float_01(red)
+ self.green = _float_01(green)
+ self.blue = _float_01(blue)
+ self.alpha = _float_01(alpha)
+
+ @property
+ def rgba(self):
+ """ Returns RGBA as tuple """
+ return (self.red, self.green, self.blue, self.alpha)
+
+ def _to_xml_element(self):
+ if self.label is '':
+ raise Cifti2HeaderError('Label needs a name')
+ try:
+ v = int(self.key)
+ except ValueError:
+ raise Cifti2HeaderError('The key must be an integer')
+ for c_ in ('red', 'blue', 'green', 'alpha'):
+ try:
+ v = _float_01(getattr(self, c_))
+ except ValueError:
+ raise Cifti2HeaderError(
+ 'Label invalid %s needs to be a float between 0 and 1. '
+ 'and it is %s' % (c_, v)
+ )
+
+ lab = xml.Element('Label')
+ lab.attrib['Key'] = str(self.key)
+ lab.text = str(self.label)
+
+ for name in ('red', 'green', 'blue', 'alpha'):
+ val = getattr(self, name)
+ attr = '0' if val == 0 else '1' if val == 1 else str(val)
+ lab.attrib[name.capitalize()] = attr
+ return lab
+
+
+class Cifti2NamedMap(xml.XmlSerializable):
+ """CIFTI2 named map: association of name and optional data with a map index
+
+ Associates a name, optional metadata, and possibly a LabelTable with an
+ index in a map.
+
+ * Description - Associates a name, optional metadata, and possibly a
+ LabelTable with an index in a map.
+ * Attributes: [NA]
+ * Child Elements
+
+ * MapName (1)
+ * LabelTable (0...1)
+ * MetaData (0...1)
+
+ * Text Content: [NA]
+ * Parent Element - MatrixIndicesMap
+
+ Attributes
+ ----------
+ map_name : str
+ Name of map
+ metadata : None or Cifti2MetaData
+ Metadata associated with named map
+ label_table : None or Cifti2LabelTable
+ Label table associated with named map
+ """
+ def __init__(self, map_name=None, metadata=None, label_table=None):
+ self.map_name = map_name
+ self.metadata = metadata
+ self.label_table = label_table
+
+ @property
+ def metadata(self):
+ return self._metadata
+
+ @metadata.setter
+ def metadata(self, metadata):
+ """ Set the metadata for this NamedMap
+
+ Parameters
+ ----------
+ meta : Cifti2MetaData
+
+ Returns
+ -------
+ None
+ """
+ self._metadata = _value_if_klass(metadata, Cifti2MetaData)
+
+ @property
+ def label_table(self):
+ return self._label_table
+
+ @label_table.setter
+ def label_table(self, label_table):
+ """ Set the label_table for this NamedMap
+
+ Parameters
+ ----------
+ label_table : Cifti2LabelTable
+
+ Returns
+ -------
+ None
+ """
+ self._label_table = _value_if_klass(label_table, Cifti2LabelTable)
+
+ def _to_xml_element(self):
+ named_map = xml.Element('NamedMap')
+ if self.metadata:
+ named_map.append(self.metadata._to_xml_element())
+ if self.label_table:
+ named_map.append(self.label_table._to_xml_element())
+ map_name = xml.SubElement(named_map, 'MapName')
+ map_name.text = self.map_name
+ return named_map
+
+
+class Cifti2Surface(xml.XmlSerializable):
+ """Cifti surface: association of brain structure and number of vertices
+
+ * Description - Specifies the number of vertices for a surface, when
+ IndicesMapToDataType is "CIFTI_INDEX_TYPE_PARCELS." This is separate from
+ the Parcel element because there can be multiple parcels on one surface,
+ and one parcel may involve multiple surfaces.
+ * Attributes
+
+ * BrainStructure - A string from the BrainStructure list to identify
+ what surface structure this element refers to (usually left cortex,
+ right cortex, or cerebellum).
+ * SurfaceNumberOfVertices - The number of vertices that this
+ structure's surface contains.
+
+ * Child Elements: [NA]
+ * Text Content: [NA]
+ * Parent Element - MatrixIndicesMap
+
+ Attributes
+ ----------
+ brain_structure : str
+ Name of brain structure
+ surface_number_of_vertices : int
+ Number of vertices on surface
+ """
+ def __init__(self, brain_structure=None, surface_number_of_vertices=None):
+ self.brain_structure = brain_structure
+ self.surface_number_of_vertices = surface_number_of_vertices
+
+ def _to_xml_element(self):
+ if self.brain_structure is None:
+ raise Cifti2HeaderError('Surface element requires at least 1 BrainStructure')
+ surf = xml.Element('Surface')
+ surf.attrib['BrainStructure'] = str(self.brain_structure)
+ surf.attrib['SurfaceNumberOfVertices'] = str(self.surface_number_of_vertices)
+ return surf
+
+
+class Cifti2VoxelIndicesIJK(xml.XmlSerializable, collections.MutableSequence):
+ """CIFTI2 VoxelIndicesIJK: Set of voxel indices contained in a structure
+
+ * Description - Identifies the voxels that model a brain structure, or
+ participate in a parcel. Note that when this is a child of BrainModel,
+ the IndexCount attribute of the BrainModel indicates the number of voxels
+ contained in this element.
+ * Attributes: [NA]
+ * Child Elements: [NA]
+ * Text Content - IJK indices (which are zero-based) of each voxel in this
+ brain model or parcel, with each index separated by a whitespace
+ character. There are three indices per voxel. If the parent element is
+ BrainModel, then the BrainModel element's IndexCount attribute indicates
+ the number of triplets (IJK indices) in this element's content.
+ * Parent Elements - BrainModel, Parcel
+
+ Each element of this sequence is a triple of integers.
+ """
+ def __init__(self, indices=None):
+ self._indices = []
+ if indices is not None:
+ self.extend(indices)
+
+ def __len__(self):
+ return len(self._indices)
+
+ def __delitem__(self, index):
+ if not isinstance(index, int) and len(index) > 1:
+ raise NotImplementedError
+ del self._indices[index]
+
+ def __getitem__(self, index):
+ if isinstance(index, int):
+ return self._indices[index]
+ elif len(index) == 2:
+ if not isinstance(index[0], int):
+ raise NotImplementedError
+ return self._indices[index[0]][index[1]]
+ else:
+ raise ValueError('Only row and row,column access is allowed')
+
+ def __setitem__(self, index, value):
+ if isinstance(index, int):
+ try:
+ value = [int(v) for v in value]
+ if len(value) != 3:
+ raise ValueError('rows are triples of ints')
+ self._indices[index] = value
+ except ValueError:
+ raise ValueError('value must be a triple of ints')
+ elif len(index) == 2:
+ try:
+ if not isinstance(index[0], int):
+ raise NotImplementedError
+ value = int(value)
+ self._indices[index[0]][index[1]] = value
+ except ValueError:
+ raise ValueError('value must be an int')
+ else:
+ raise ValueError
+
+ def insert(self, index, value):
+ if not isinstance(index, int) and len(index) != 1:
+ raise ValueError('Only rows can be inserted')
+ try:
+ value = [int(v) for v in value]
+ if len(value) != 3:
+ raise ValueError
+ self._indices.insert(index, value)
+ except ValueError:
+ raise ValueError('value must be a triple of int')
+
+ def _to_xml_element(self):
+ if len(self) == 0:
+ raise Cifti2HeaderError('VoxelIndicesIJK element require an index table')
+
+ vox_ind = xml.Element('VoxelIndicesIJK')
+ vox_ind.text = '\n'.join(' '.join([str(v) for v in row])
+ for row in self._indices)
+ return vox_ind
+
+
+class Cifti2Vertices(xml.XmlSerializable, collections.MutableSequence):
+ """CIFTI2 vertices - association of brain structure and a list of vertices
+
+ * Description - Contains a BrainStructure type and a list of vertex indices
+ within a Parcel.
+ * Attributes
+
+ * BrainStructure - A string from the BrainStructure list to identify
+ what surface this vertex list is from (usually left cortex, right
+ cortex, or cerebellum).
+
+ * Child Elements: [NA]
+ * Text Content - Vertex indices (which are independent for each surface,
+ and zero-based) separated by whitespace characters.
+ * Parent Element - Parcel
+
+ The class behaves like a list of Vertex indices (which are independent for
+ each surface, and zero-based)
+
+ Attributes
+ ----------
+ brain_structure : str
+ A string from the BrainStructure list to identify what surface this
+ vertex list is from (usually left cortex, right cortex, or cerebellum).
+ """
+ def __init__(self, brain_structure=None, vertices=None):
+ self._vertices = []
+ if vertices is not None:
+ self.extend(vertices)
+
+ self.brain_structure = brain_structure
+
+ def __len__(self):
+ return len(self._vertices)
+
+ def __delitem__(self, index):
+ del self._vertices[index]
+
+ def __getitem__(self, index):
+ return self._vertices[index]
+
+ def __setitem__(self, index, value):
+ try:
+ value = int(value)
+ self._vertices[index] = value
+ except ValueError:
+ raise ValueError('value must be an int')
+
+ def insert(self, index, value):
+ try:
+ value = int(value)
+ self._vertices.insert(index, value)
+ except ValueError:
+ raise ValueError('value must be an int')
+
+ def _to_xml_element(self):
+ if self.brain_structure is None:
+ raise Cifti2HeaderError('Vertices element require a BrainStructure')
+
+ vertices = xml.Element('Vertices')
+ vertices.attrib['BrainStructure'] = str(self.brain_structure)
+
+ vertices.text = ' '.join([str(i) for i in self])
+ return vertices
+
+
+class Cifti2Parcel(xml.XmlSerializable):
+ """CIFTI2 parcel: association of a name with vertices and/or voxels
+
+ * Description - Associates a name, plus vertices and/or voxels, with an
+ index.
+ * Attributes
+
+ * Name - The name of the parcel
+
+ * Child Elements
+
+ * Vertices (0...N)
+ * VoxelIndicesIJK (0...1)
+
+ * Text Content: [NA]
+ * Parent Element - MatrixIndicesMap
+
+ Attributes
+ ----------
+ name : str
+ Name of parcel
+ voxel_indices_ijk : None or Cifti2VoxelIndicesIJK
+ Voxel indices associated with parcel
+ vertices : list of Cifti2Vertices
+ Vertices associated with parcel
+ """
+ def __init__(self, name=None, voxel_indices_ijk=None, vertices=None):
+ self.name = name
+ self._voxel_indices_ijk = voxel_indices_ijk
+ self.vertices = vertices if vertices is not None else []
+ for val in self.vertices:
+ if not isinstance(val, Cifti2Vertices):
+ raise ValueError(('Cifti2Parcel vertices must be instances of '
+ 'Cifti2Vertices'))
+
+ @property
+ def voxel_indices_ijk(self):
+ return self._voxel_indices_ijk
+
+ @voxel_indices_ijk.setter
+ def voxel_indices_ijk(self, value):
+ self._voxel_indices_ijk = _value_if_klass(value, Cifti2VoxelIndicesIJK)
+
+ def append_cifti_vertices(self, vertices):
+ """ Appends a Cifti2Vertices element to the Cifti2Parcel
+
+ Parameters
+ ----------
+ vertices : Cifti2Vertices
+ """
+ if not isinstance(vertices, Cifti2Vertices):
+ raise TypeError("Not a valid Cifti2Vertices instance")
+ self.vertices.append(vertices)
+
+ def pop_cifti2_vertices(self, ith):
+ """ Pops the ith vertices element from the Cifti2Parcel """
+ self.vertices.pop(ith)
+
+ def _to_xml_element(self):
+ if self.name is None:
+ raise Cifti2HeaderError('Parcel element requires a name')
+
+ parcel = xml.Element('Parcel')
+ parcel.attrib['Name'] = str(self.name)
+ if self.voxel_indices_ijk:
+ parcel.append(self.voxel_indices_ijk._to_xml_element())
+ for vertex in self.vertices:
+ parcel.append(vertex._to_xml_element())
+ return parcel
+
+
+class Cifti2TransformationMatrixVoxelIndicesIJKtoXYZ(xml.XmlSerializable):
+ """Matrix that translates voxel indices to spatial coordinates
+
+ * Description - Contains a matrix that translates Voxel IJK Indices to
+ spatial XYZ coordinates (+X=>right, +Y=>anterior, +Z=> superior). The
+ resulting coordinate is the center of the voxel.
+ * Attributes
+
+ * MeterExponent - Integer, specifies that the coordinate result from
+ the transformation matrix should be multiplied by 10 to this power to
+ get the spatial coordinates in meters (e.g., if this is "-3", then
+ the transformation matrix is in millimeters).
+
+ * Child Elements: [NA]
+ * Text Content - Sixteen floating-point values, in row-major order, that
+ form a 4x4 homogeneous transformation matrix.
+ * Parent Element - Volume
+
+ Attributes
+ ----------
+ meter_exponent : int
+ See attribute description above.
+ matrix : array-like shape (4, 4)
+ Affine transformation matrix from voxel indices to RAS space.
+ """
+ # meterExponent = int
+ # matrix = np.array
+
+ def __init__(self, meter_exponent=None, matrix=None):
+ self.meter_exponent = meter_exponent
+ self.matrix = matrix
+
+ def _to_xml_element(self):
+ if self.matrix is None:
+ raise Cifti2HeaderError(
+ 'TransformationMatrixVoxelIndicesIJKtoXYZ element requires a matrix'
+ )
+ trans = xml.Element('TransformationMatrixVoxelIndicesIJKtoXYZ')
+ trans.attrib['MeterExponent'] = str(self.meter_exponent)
+ trans.text = '\n'.join(' '.join(map('{:.10f}'.format, row))
+ for row in self.matrix)
+ return trans
+
+
+class Cifti2Volume(xml.XmlSerializable):
+ """CIFTI2 volume: information about a volume for mappings that use voxels
+
+ * Description - Provides information about the volume for any mappings that
+ use voxels.
+ * Attributes
+
+ * VolumeDimensions - Three integer values separated by commas, the
+ lengths of the three volume file dimensions that are related to
+ spatial coordinates, in number of voxels. Voxel indices (which are
+ zero-based) that are used in the mapping that this element applies to
+ must be within these dimensions.
+
+ * Child Elements
+
+ * TransformationMatrixVoxelIndicesIJKtoXYZ (1)
+
+ * Text Content: [NA]
+ * Parent Element - MatrixIndicesMap
+
+ Attributes
+ ----------
+ volume_dimensions : array-like shape (3,)
+ See attribute description above.
+ transformation_matrix_voxel_indices_ijk_to_xyz \
+ : Cifti2TransformationMatrixVoxelIndicesIJKtoXYZ
+ Matrix that translates voxel indices to spatial coordinates
+ """
+ def __init__(self, volume_dimensions=None, transform_matrix=None):
+ self.volume_dimensions = volume_dimensions
+ self.transformation_matrix_voxel_indices_ijk_to_xyz = transform_matrix
+
+ def _to_xml_element(self):
+ if self.volume_dimensions is None:
+ raise Cifti2HeaderError('Volume element requires dimensions')
+
+ volume = xml.Element('Volume')
+ volume.attrib['VolumeDimensions'] = ','.join(
+ [str(val) for val in self.volume_dimensions])
+ volume.append(self.transformation_matrix_voxel_indices_ijk_to_xyz._to_xml_element())
+ return volume
+
+
+class Cifti2VertexIndices(xml.XmlSerializable, collections.MutableSequence):
+ """CIFTI2 vertex indices: vertex indices for an associated brain model
+
+ The vertex indices (which are independent for each surface, and
+ zero-based) that are used in this brain model[.] The parent
+ BrainModel's ``index_count`` indicates the number of indices.
+
+ * Description - Contains a list of vertex indices for a BrainModel with
+ ModelType equal to CIFTI_MODEL_TYPE_SURFACE.
+ * Attributes: [NA]
+ * Child Elements: [NA]
+ * Text Content - The vertex indices (which are independent for each
+ surface, and zero-based) that are used in this brain model, with each
+ index separated by a whitespace character. The parent BrainModel's
+ IndexCount attribute indicates the number of indices in this element's
+ content.
+ * Parent Element - BrainModel
+ """
+ def __init__(self, indices=None):
+ self._indices = []
+ if indices is not None:
+ self.extend(indices)
+
+ def __len__(self):
+ return len(self._indices)
+
+ def __delitem__(self, index):
+ del self._indices[index]
+
+ def __getitem__(self, index):
+ return self._indices[index]
+
+ def __setitem__(self, index, value):
+ try:
+ value = int(value)
+ self._indices[index] = value
+ except ValueError:
+ raise ValueError('value must be an int')
+
+ def insert(self, index, value):
+ try:
+ value = int(value)
+ self._indices.insert(index, value)
+ except ValueError:
+ raise ValueError('value must be an int')
+
+ def _to_xml_element(self):
+ if len(self) == 0:
+ raise Cifti2HeaderError('VertexIndices element requires indices')
+
+ vert_indices = xml.Element('VertexIndices')
+ vert_indices.text = ' '.join([str(i) for i in self])
+ return vert_indices
+
+
+class Cifti2BrainModel(xml.XmlSerializable):
+ ''' Element representing a mapping of the dimension to vertex or voxels.
+
+ Mapping to vertices of voxels must be specified.
+
+ * Description - Maps a range of indices to surface vertices or voxels when
+ IndicesMapToDataType is "CIFTI_INDEX_TYPE_BRAIN_MODELS."
+ * Attributes
+
+ * IndexOffset - The matrix index of the first brainordinate of this
+ BrainModel. Note that matrix indices are zero-based.
+ * IndexCount - Number of surface vertices or voxels in this brain
+ model, must be positive.
+ * ModelType - Type of model representing the brain structure (surface
+ or voxels). Valid values are listed in the table below.
+ * BrainStructure - Identifies the brain structure. Valid values for
+ BrainStructure are listed in the table below. However, if the needed
+ structure is not listed in the table, a message should be posted to
+ the CIFTI Forum so that a standardized name can be created for the
+ structure and added to the table.
+ * SurfaceNumberOfVertices - When ModelType is CIFTI_MODEL_TYPE_SURFACE
+ this attribute contains the actual (or true) number of vertices in
+ the surface that is associated with this BrainModel. When this
+ BrainModel represents all vertices in the surface, this value is the
+ same as IndexCount. When this BrainModel represents only a subset of
+ the surface's vertices, IndexCount will be less than this value.
+
+ * Child Elements
+
+ * VertexIndices (0...1)
+ * VoxelIndicesIJK (0...1)
+
+ * Text Content: [NA]
+ * Parent Element - MatrixIndicesMap
+
+ For ModelType values, see CIFTI_MODEL_TYPES module attribute.
+
+ For BrainStructure values, see CIFTI_BRAIN_STRUCTURES model attribute.
+
+ Attributes
+ ----------
+ index_offset : int
+ Start of the mapping
+ index_count : int
+ Number of elements in the array to be mapped
+ model_type : str
+ One of CIFTI_MODEL_TYPES
+ brain_structure : str
+ One of CIFTI_BRAIN_STRUCTURES
+ surface_number_of_vertices : int
+ Number of vertices in the surface. Use only for surface-type structure
+ voxel_indices_ijk : Cifti2VoxelIndicesIJK, optional
+ Indices on the image towards where the array indices are mapped
+ vertex_indices : Cifti2VertexIndices, optional
+ Indices of the vertices towards where the array indices are mapped
+ '''
+
+ def __init__(self, index_offset=None, index_count=None, model_type=None,
+ brain_structure=None, n_surface_vertices=None,
+ voxel_indices_ijk=None, vertex_indices=None):
+ self.index_offset = index_offset
+ self.index_count = index_count
+ self.model_type = model_type
+ self.brain_structure = brain_structure
+ self.surface_number_of_vertices = n_surface_vertices
+
+ self.voxel_indices_ijk = voxel_indices_ijk
+ self.vertex_indices = vertex_indices
+
+ @property
+ def voxel_indices_ijk(self):
+ return self._voxel_indices_ijk
+
+ @voxel_indices_ijk.setter
+ def voxel_indices_ijk(self, value):
+ self._voxel_indices_ijk = _value_if_klass(value, Cifti2VoxelIndicesIJK)
+
+ @property
+ def vertex_indices(self):
+ return self._vertex_indices
+
+ @vertex_indices.setter
+ def vertex_indices(self, value):
+ self._vertex_indices = _value_if_klass(value, Cifti2VertexIndices)
+
+ def _to_xml_element(self):
+ brain_model = xml.Element('BrainModel')
+
+ for key in ['IndexOffset', 'IndexCount', 'ModelType', 'BrainStructure',
+ 'SurfaceNumberOfVertices']:
+ attr = _underscore(key)
+ value = getattr(self, attr)
+ if value is not None:
+ brain_model.attrib[key] = str(value)
+ if self.voxel_indices_ijk:
+ brain_model.append(self.voxel_indices_ijk._to_xml_element())
+ if self.vertex_indices:
+ brain_model.append(self.vertex_indices._to_xml_element())
+ return brain_model
+
+
+class Cifti2MatrixIndicesMap(xml.XmlSerializable, collections.MutableSequence):
+ """Class for Matrix Indices Map
+
+ * Description - Provides a mapping between matrix indices and their
+ interpretation.
+ * Attributes
+
+ * AppliesToMatrixDimension - Lists the dimension(s) of the matrix to
+ which this MatrixIndicesMap applies. The dimensions of the matrix
+ start at zero (dimension 0 describes the indices along the first
+ dimension, dimension 1 describes the indices along the second
+ dimension, etc.). If this MatrixIndicesMap applies to more than one
+ matrix dimension, the values are separated by a comma.
+ * IndicesMapToDataType - Type of data to which the MatrixIndicesMap
+ applies.
+ * NumberOfSeriesPoints - Indicates how many samples there are in a
+ series mapping type. For example, this could be the number of
+ timepoints in a timeseries.
+ * SeriesExponent - Integer, SeriesStart and SeriesStep must be
+ multiplied by 10 raised to the power of the value of this attribute
+ to give the actual values assigned to indices (e.g., if SeriesStart
+ is "5" and SeriesExponent is "-3", the value of the first series
+ point is 0.005).
+ * SeriesStart - Indicates what quantity should be assigned to the first
+ series point.
+ * SeriesStep - Indicates amount of change between each series point.
+ * SeriesUnit - Indicates the unit of the result of multiplying
+ SeriesStart and SeriesStep by 10 to the power of SeriesExponent.
+
+ * Child Elements
+
+ * BrainModel (0...N)
+ * NamedMap (0...N)
+ * Parcel (0...N)
+ * Surface (0...N)
+ * Volume (0...1)
+
+ * Text Content: [NA]
+ * Parent Element - Matrix
+
+ Attribute
+ ---------
+ applies_to_matrix_dimension : list of ints
+ Dimensions of this matrix that follow this mapping
+ indices_map_to_data_type : str one of CIFTI_MAP_TYPES
+ Type of mapping to the matrix indices
+ number_of_series_points : int, optional
+ If it is a series, number of points in the series
+ series_exponent : int, optional
+ If it is a series the exponent of the increment
+ series_start : float, optional
+ If it is a series, starting time
+ series_step : float, optional
+ If it is a series, step per element
+ series_unit : str, optional
+ If it is a series, units
+ """
+ _valid_type_mappings_ = {
+ Cifti2BrainModel: ('CIFTI_INDEX_TYPE_BRAIN_MODELS',),
+ Cifti2Parcel: ('CIFTI_INDEX_TYPE_PARCELS',),
+ Cifti2NamedMap: ('CIFTI_INDEX_TYPE_LABELS',),
+ Cifti2Volume: ('CIFTI_INDEX_TYPE_SCALARS', 'CIFTI_INDEX_TYPE_SERIES'),
+ Cifti2Surface: ('CIFTI_INDEX_TYPE_SCALARS', 'CIFTI_INDEX_TYPE_SERIES')
+ }
+
+ def __init__(self, applies_to_matrix_dimension,
+ indices_map_to_data_type,
+ number_of_series_points=None,
+ series_exponent=None,
+ series_start=None,
+ series_step=None,
+ series_unit=None,
+ maps=[],
+ ):
+ self.applies_to_matrix_dimension = applies_to_matrix_dimension
+ self.indices_map_to_data_type = indices_map_to_data_type
+ self.number_of_series_points = number_of_series_points
+ self.series_exponent = series_exponent
+ self.series_start = series_start
+ self.series_step = series_step
+ self.series_unit = series_unit
+ self._maps = []
+ for m in maps:
+ self.append(m)
+
+ def __len__(self):
+ return len(self._maps)
+
+ def __delitem__(self, index):
+ del self._maps[index]
+
+ def __getitem__(self, index):
+ return self._maps[index]
+
+ def __setitem__(self, index, value):
+ if (
+ isinstance(value, Cifti2Volume) and
+ (
+ self.volume is not None and
+ not isinstance(self._maps[index], Cifti2Volume)
+ )
+ ):
+ raise Cifti2HeaderError("Only one Volume can be in a MatrixIndicesMap")
+ self._maps[index] = value
+
+ def insert(self, index, value):
+ if (
+ isinstance(value, Cifti2Volume) and
+ self.volume is not None
+ ):
+ raise Cifti2HeaderError("Only one Volume can be in a MatrixIndicesMap")
+
+ self._maps.insert(index, value)
+
+ @property
+ def named_maps(self):
+ for p in self:
+ if isinstance(p, Cifti2NamedMap):
+ yield p
+
+ @property
+ def surfaces(self):
+ for p in self:
+ if isinstance(p, Cifti2Surface):
+ yield p
+
+ @property
+ def parcels(self):
+ for p in self:
+ if isinstance(p, Cifti2Parcel):
+ yield p
+
+ @property
+ def volume(self):
+ for p in self:
+ if isinstance(p, Cifti2Volume):
+ return p
+ return None
+
+ @volume.setter
+ def volume(self, volume):
+ if not isinstance(volume, Cifti2Volume):
+ raise ValueError("You can only set a volume with a volume")
+ for i, v in enumerate(self):
+ if isinstance(v, Cifti2Volume):
+ break
+ else:
+ self.append(volume)
+ return
+ self[i] = volume
+
+ @volume.deleter
+ def volume(self):
+ for i, v in enumerate(self):
+ if isinstance(v, Cifti2Volume):
+ break
+ else:
+ raise ValueError("No Cifti2Volume element")
+ del self[i]
+
+ @property
+ def brain_models(self):
+ for p in self:
+ if isinstance(p, Cifti2BrainModel):
+ yield p
+
+ def _to_xml_element(self):
+ if self.applies_to_matrix_dimension is None:
+ raise Cifti2HeaderError(
+ 'MatrixIndicesMap element requires to be applied to at least 1 dimension'
+ )
+
+ mat_ind_map = xml.Element('MatrixIndicesMap')
+ dims_as_strings = [str(dim) for dim in self.applies_to_matrix_dimension]
+ mat_ind_map.attrib['AppliesToMatrixDimension'] = ','.join(dims_as_strings)
+ for key in ['IndicesMapToDataType', 'NumberOfSeriesPoints', 'SeriesExponent',
+ 'SeriesStart', 'SeriesStep', 'SeriesUnit']:
+ attr = _underscore(key)
+ value = getattr(self, attr)
+ if value is not None:
+ mat_ind_map.attrib[key] = str(value)
+ for map_ in self:
+ mat_ind_map.append(map_._to_xml_element())
+
+ return mat_ind_map
+
+
+class Cifti2Matrix(xml.XmlSerializable, collections.MutableSequence):
+ """ CIFTI2 Matrix object
+
+ This is a list-like container where the elements are instances of
+ :class:`Cifti2MatrixIndicesMap`.
+
+ * Description: contains child elements that describe the meaning of the
+ values in the matrix.
+ * Attributes: [NA]
+ * Child Elements
+
+ * MetaData (0 .. 1)
+ * MatrixIndicesMap (1 .. N)
+
+ * Text Content: [NA]
+ * Parent Element: CIFTI
+
+ For each matrix (data) dimension, exactly one MatrixIndicesMap element must
+ list it in the AppliesToMatrixDimension attribute.
+ """
+ def __init__(self):
+ self._mims = []
+ self.metadata = None
+
+ @property
+ def metadata(self):
+ return self._meta
+
+ @metadata.setter
+ def metadata(self, meta):
+ """ Set the metadata for this Cifti2Header
+
+ Parameters
+ ----------
+ meta : Cifti2MetaData
+
+ Returns
+ -------
+ None
+ """
+ self._meta = _value_if_klass(meta, Cifti2MetaData)
+
+ def _get_indices_from_mim(self, mim):
+ applies_to_matrix_dimension = mim.applies_to_matrix_dimension
+ if not isinstance(
+ applies_to_matrix_dimension,
+ collections.Iterable
+ ):
+ applies_to_matrix_dimension = (int(applies_to_matrix_dimension),)
+ return applies_to_matrix_dimension
+
+ @property
+ def mapped_indices(self):
+ '''
+ List of matrix indices that are mapped
+ '''
+ mapped_indices = []
+ for v in self:
+ a2md = self._get_indices_from_mim(v)
+ mapped_indices += a2md
+ return mapped_indices
+
+ def get_index_map(self, index):
+ '''
+ Cifti2 Mapping class for a given index
+
+ Parameters
+ ----------
+ index : int
+ Index for which we want to obtain the mapping.
+ Must be in the mapped_indices sequence.
+
+ Returns
+ -------
+ cifti2_map : Cifti2MatrixIndicesMap
+ Returns the Cifti2MatrixIndicesMap corresponding to
+ the given index.
+ '''
+
+ for v in self:
+ a2md = self._get_indices_from_mim(v)
+ if index in a2md:
+ return v
+ else:
+ raise Cifti2HeaderError("Index not mapped")
+
+ def _validate_new_mim(self, value):
+ if value.applies_to_matrix_dimension is None:
+ raise Cifti2HeaderError(
+ "Cifti2MatrixIndicesMap needs to have "
+ "the applies_to_matrix_dimension attribute set"
+ )
+ a2md = self._get_indices_from_mim(value)
+ if not set(self.mapped_indices).isdisjoint(a2md):
+ raise Cifti2HeaderError(
+ "Indices in this Cifti2MatrixIndicesMap "
+ "already mapped in this matrix"
+ )
+
+ def __setitem__(self, key, value):
+ if not isinstance(value, Cifti2MatrixIndicesMap):
+ raise TypeError("Not a valid Cifti2MatrixIndicesMap instance")
+ self._validate_new_mim(value)
+ self._mims[key] = value
+
+ def __getitem__(self, key):
+ return self._mims[key]
+
+ def __delitem__(self, key):
+ del self._mims[key]
+
+ def __len__(self):
+ return len(self._mims)
+
+ def insert(self, index, value):
+ if not isinstance(value, Cifti2MatrixIndicesMap):
+ raise TypeError("Not a valid Cifti2MatrixIndicesMap instance")
+ self._validate_new_mim(value)
+ self._mims.insert(index, value)
+
+ def _to_xml_element(self):
+ # From the spec: "For each matrix dimension, exactly one
+ # MatrixIndicesMap element must list it in the AppliesToMatrixDimension
+ # attribute."
+ mat = xml.Element('Matrix')
+ if self.metadata:
+ mat.append(self.metadata._to_xml_element())
+ for mim in self._mims:
+ mat.append(mim._to_xml_element())
+ return mat
+
+
+class Cifti2Header(FileBasedHeader, xml.XmlSerializable):
+ ''' Class for CIFTI2 header extension '''
+
+ def __init__(self, matrix=None, version="2.0"):
+ FileBasedHeader.__init__(self)
+ xml.XmlSerializable.__init__(self)
+ if matrix is None:
+ matrix = Cifti2Matrix()
+ self.matrix = matrix
+ self.version = version
+
+ def _to_xml_element(self):
+ cifti = xml.Element('CIFTI')
+ cifti.attrib['Version'] = str(self.version)
+ mat_xml = self.matrix._to_xml_element()
+ if mat_xml is not None:
+ cifti.append(mat_xml)
+ return cifti
+
+ @classmethod
+ def may_contain_header(klass, binaryblock):
+ from .parse_cifti2 import _Cifti2AsNiftiHeader
+ return _Cifti2AsNiftiHeader.may_contain_header(binaryblock)
+
+ @property
+ def number_of_mapped_indices(self):
+ '''
+ Number of mapped indices
+ '''
+ return len(self.matrix)
+
+ @property
+ def mapped_indices(self):
+ '''
+ List of matrix indices that are mapped
+ '''
+ return self.matrix.mapped_indices
+
+ def get_index_map(self, index):
+ '''
+ Cifti2 Mapping class for a given index
+
+ Parameters
+ ----------
+ index : int
+ Index for which we want to obtain the mapping.
+ Must be in the mapped_indices sequence.
+
+ Returns
+ -------
+ cifti2_map : Cifti2MatrixIndicesMap
+ Returns the Cifti2MatrixIndicesMap corresponding to
+ the given index.
+ '''
+ return self.matrix.get_index_map(index)
+
+
+class Cifti2Image(DataobjImage):
+ """ Class for single file CIFTI2 format image
+ """
+ header_class = Cifti2Header
+ valid_exts = Nifti2Image.valid_exts
+ files_types = Nifti2Image.files_types
+ makeable = False
+ rw = True
+
+ def __init__(self,
+ dataobj=None,
+ header=None,
+ nifti_header=None,
+ extra=None,
+ file_map=None):
+ ''' Initialize image
+
+ The image is a combination of (dataobj, header), with optional metadata
+ in `nifti_header` (a NIfTI2 header). There may be more metadata in the
+ mapping `extra`. Filename / file-like objects can also go in the
+ `file_map` mapping.
+
+ Parameters
+ ----------
+ dataobj : object
+ Object containing image data. It should be some object that
+ returns an array from ``np.asanyarray``. It should have a
+ ``shape`` attribute or property.
+ header : Cifti2Header instance
+ Header with data for / from XML part of CIFTI2 format.
+ nifti_header : None or mapping or NIfTI2 header instance, optional
+ Metadata for NIfTI2 component of this format.
+ extra : None or mapping
+ Extra metadata not captured by `header` or `nifti_header`.
+ file_map : mapping, optional
+ Mapping giving file information for this image format.
+ '''
+ super(Cifti2Image, self).__init__(dataobj, header=header,
+ extra=extra, file_map=file_map)
+ self._nifti_header = Nifti2Header.from_header(nifti_header)
+ # if NIfTI header not specified, get data type from input array
+ if nifti_header is None:
+ if hasattr(dataobj, 'dtype'):
+ self._nifti_header.set_data_dtype(dataobj.dtype)
+ self.update_headers()
+
+ @property
+ def nifti_header(self):
+ return self._nifti_header
+
+ @classmethod
+ def from_file_map(klass, file_map):
+ """ Load a CIFTI2 image from a file_map
+
+ Parameters
+ ----------
+ file_map : file_map
+
+ Returns
+ -------
+ img : Cifti2Image
+ Returns a Cifti2Image
+ """
+ from .parse_cifti2 import _Cifti2AsNiftiImage, Cifti2Extension
+ nifti_img = _Cifti2AsNiftiImage.from_file_map(file_map)
+
+ # Get cifti2 header
+ for item in nifti_img.header.extensions:
+ if isinstance(item, Cifti2Extension):
+ cifti_header = item.get_content()
+ break
+ else:
+ raise ValueError('NIfTI2 header does not contain a CIFTI2 '
+ 'extension')
+
+ # Construct cifti image.
+ # User array proxy object where possible
+ dataobj = nifti_img.dataobj
+ return Cifti2Image(reshape_dataobj(dataobj, dataobj.shape[4:]),
+ header=cifti_header,
+ nifti_header=nifti_img.header,
+ file_map=file_map)
+
+ @classmethod
+ def from_image(klass, img):
+ ''' Class method to create new instance of own class from `img`
+
+ Parameters
+ ----------
+ img : instance
+ In fact, an object with the API of :class:`DataobjImage`.
+
+ Returns
+ -------
+ cimg : instance
+ Image, of our own class
+ '''
+ if isinstance(img, klass):
+ return img
+ raise NotImplementedError
+
+ def to_file_map(self, file_map=None):
+ """ Write image to `file_map` or contained ``self.file_map``
+
+ Parameters
+ ----------
+ file_map : None or mapping, optional
+ files mapping. If None (default) use object's ``file_map``
+ attribute instead.
+
+ Returns
+ -------
+ None
+ """
+ from .parse_cifti2 import Cifti2Extension
+ self.update_headers()
+ header = self._nifti_header
+ extension = Cifti2Extension(content=self.header.to_xml())
+ header.extensions.append(extension)
+ data = reshape_dataobj(self.dataobj,
+ (1, 1, 1, 1) + self.dataobj.shape)
+ # If qform not set, reset pixdim values so Nifti2 does not complain
+ if header['qform_code'] == 0:
+ header['pixdim'][:4] = 1
+ img = Nifti2Image(data, None, header)
+ img.to_file_map(file_map or self.file_map)
+
+ def update_headers(self):
+ ''' Harmonize CIFTI2 and NIfTI headers with image data
+
+ >>> import numpy as np
+ >>> data = np.zeros((2,3,4))
+ >>> img = Cifti2Image(data)
+ >>> img.shape == (2, 3, 4)
+ True
+ >>> img.update_headers()
+ >>> img.nifti_header.get_data_shape() == (2, 3, 4)
+ True
+ '''
+ self._nifti_header.set_data_shape(self._dataobj.shape)
+
+ def get_data_dtype(self):
+ return self._nifti_header.get_data_dtype()
+
+ def set_data_dtype(self, dtype):
+ self._nifti_header.set_data_dtype(dtype)
+
+
+def load(filename):
+ """ Load cifti2 from `filename`
+
+ Parameters
+ ----------
+ filename : str
+ filename of image to be loaded
+
+ Returns
+ -------
+ img : Cifti2Image
+ cifti image instance
+
+ Raises
+ ------
+ ImageFileError: if `filename` doesn't look like cifti
+ IOError : if `filename` does not exist
+ """
+ return Cifti2Image.from_filename(filename)
+
+
+def save(img, filename):
+ """ Save cifti to `filename`
+
+ Parameters
+ ----------
+ filename : str
+ filename to which to save image
+ """
+ Cifti2Image.instance_to_filename(img, filename)
diff --git a/nibabel/cifti2/parse_cifti2.py b/nibabel/cifti2/parse_cifti2.py
new file mode 100644
index 0000000000..9bbd22a5ac
--- /dev/null
+++ b/nibabel/cifti2/parse_cifti2.py
@@ -0,0 +1,588 @@
+# emacs: -*- mode: python-mode; py-indent-offset: 4; indent-tabs-mode: nil -*-
+# vi: set ft=python sts=4 ts=4 sw=4 et:
+### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
+#
+# See COPYING file distributed along with the NiBabel package for the
+# copyright and license terms.
+#
+### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
+from __future__ import division, print_function, absolute_import
+
+from distutils.version import LooseVersion
+
+import numpy as np
+
+from .cifti2 import (Cifti2MetaData, Cifti2Header, Cifti2Label,
+ Cifti2LabelTable, Cifti2VertexIndices,
+ Cifti2VoxelIndicesIJK, Cifti2BrainModel, Cifti2Matrix,
+ Cifti2MatrixIndicesMap, Cifti2NamedMap, Cifti2Parcel,
+ Cifti2Surface, Cifti2TransformationMatrixVoxelIndicesIJKtoXYZ,
+ Cifti2Vertices, Cifti2Volume, CIFTI_BRAIN_STRUCTURES,
+ CIFTI_MODEL_TYPES, _underscore, Cifti2HeaderError)
+from .. import xmlutils as xml
+from ..spatialimages import HeaderDataError
+from ..externals.six import BytesIO
+from ..batteryrunners import Report
+from ..nifti1 import Nifti1Extension, extension_codes, intent_codes
+from ..nifti2 import Nifti2Header, Nifti2Image
+
+
+class Cifti2Extension(Nifti1Extension):
+ code = 32
+
+ def __init__(self, code=None, content=None):
+ Nifti1Extension.__init__(self, code=code or self.code, content=content)
+
+ def _unmangle(self, value):
+ parser = Cifti2Parser()
+ parser.parse(string=value)
+ self._content = parser.header
+ return self._content
+
+ def _mangle(self, value):
+ if not isinstance(value, Cifti2Header):
+ raise ValueError('Can only mangle a Cifti2Header.')
+ return value.to_xml()
+
+
+extension_codes.add_codes((
+ (Cifti2Extension.code, 'cifti', Cifti2Extension),))
+
+intent_codes.add_codes((
+ # The codes below appear on the CIFTI-2 standard
+ # http://www.nitrc.org/plugins/mwiki/index.php/cifti:ConnectivityMatrixFileFormats
+ # https://www.nitrc.org/forum/attachment.php?attachid=341&group_id=454&forum_id=1955
+ (3000, 'dense fiber/fan samples', (), 'NIFTI_INTENT_CONNECTIVITY_UNKNOWN'),
+ (3001, 'dense connectivity', (), 'NIFTI_INTENT_CONNECTIVITY_DENSE'),
+ (3002, 'dense data series/fiber fans', (),
+ 'NIFTI_INTENT_CONNECTIVITY_DENSE_SERIES'),
+ (3003, 'parcellated connectivity', (),
+ 'NIFTI_INTENT_CONNECTIVITY_PARCELLATED'),
+ (3004, 'parcellated data series', (),
+ "NIFTI_INTENT_CONNECTIVITY_PARCELLATED_SERIES"),
+ (3006, 'dense scalar', (),
+ 'NIFTI_INTENT_CONNECTIVITY_DENSE_SCALARS'),
+ (3007, 'dense label', (),
+ 'NIFTI_INTENT_CONNECTIVITY_DENSE_LABELS'),
+ (3008, 'parcellated scalar', (),
+ 'NIFTI_INTENT_CONNECTIVITY_PARCELLATED_SCALAR'),
+ (3009, 'parcellated dense connectivity', (),
+ 'NIFTI_INTENT_CONNECTIVITY_PARCELLATED_DENSE'),
+ (3010, 'dense parcellated connectivity', (),
+ 'NIFTI_INTENT_CONNECTIVITY_DENSE_PARCELLATED'),
+ (3011, 'parcellated connectivity series', (),
+ 'NIFTI_INTENT_CONNECTIVITY_PARCELLATED_PARCELLATED_SERIES'),
+ (3012, 'parcellated connectivity scalar', (),
+ 'NIFTI_INTENT_CONNECTIVITY_PARCELLATED_PARCELLATED_SCALAR')))
+
+
+class _Cifti2AsNiftiHeader(Nifti2Header):
+ ''' Class for Cifti2 header extension '''
+
+ @classmethod
+ def _valid_intent_code(klass, intent_code):
+ """ Return True if `intent_code` matches our class `klass`
+ """
+ return intent_code >= 3000 and intent_code < 3100
+
+ @classmethod
+ def may_contain_header(klass, binaryblock):
+ if not super(_Cifti2AsNiftiHeader, klass).may_contain_header(binaryblock):
+ return False
+ hdr = klass(binaryblock=binaryblock[:klass.sizeof_hdr])
+ return klass._valid_intent_code(hdr.get_intent('code')[0])
+
+ @staticmethod
+ def _chk_qfac(hdr, fix=False):
+ # Allow qfac of 0 without complaint for CIFTI2
+ rep = Report(HeaderDataError)
+ if hdr['pixdim'][0] in (-1, 0, 1):
+ return hdr, rep
+ rep.problem_level = 20
+ rep.problem_msg = 'pixdim[0] (qfac) should be 1 (default) or 0 or -1'
+ if fix:
+ hdr['pixdim'][0] = 1
+ rep.fix_msg = 'setting qfac to 1'
+ return hdr, rep
+
+ @staticmethod
+ def _chk_pixdims(hdr, fix=False):
+ rep = Report(HeaderDataError)
+ pixdims = hdr['pixdim']
+ spat_dims = pixdims[1:4]
+ if not np.any(spat_dims < 0):
+ return hdr, rep
+ rep.problem_level = 35
+ rep.problem_msg = 'pixdim[1,2,3] should be zero or positive'
+ if fix:
+ hdr['pixdim'][1:4] = np.abs(spat_dims)
+ rep.fix_msg = 'setting to abs of pixdim values'
+ return hdr, rep
+
+
+class _Cifti2AsNiftiImage(Nifti2Image):
+ header_class = _Cifti2AsNiftiHeader
+ files_types = (('image', '.nii'),)
+ valid_exts = ('.nii',)
+ makeable = False
+ rw = True
+
+ def __init__(self, dataobj, affine, header=None,
+ extra=None, file_map=None):
+ """Convert NIFTI-2 file to CIFTI2"""
+ super(_Cifti2AsNiftiImage, self).__init__(dataobj=dataobj,
+ affine=affine,
+ header=header,
+ extra=extra,
+ file_map=file_map)
+
+ # Get cifti header from extension
+ for extension in self.header.extensions:
+ if isinstance(extension, Cifti2Extension):
+ self.cifti_img = extension
+ break
+ else:
+ self.cifti_img = None
+
+ if self.cifti_img is None:
+ raise ValueError('Nifti2 header does not contain a CIFTI2 '
+ 'extension')
+ self.cifti_img.data = self.get_data()
+
+
+class Cifti2Parser(xml.XmlParser):
+ '''Class to parse an XML string into a CIFTI2 header object'''
+ def __init__(self, encoding=None, buffer_size=3500000, verbose=0):
+ super(Cifti2Parser, self).__init__(encoding=encoding,
+ buffer_size=buffer_size,
+ verbose=verbose)
+ self.fsm_state = []
+ self.struct_state = []
+
+ # where to write CDATA:
+ self.write_to = None
+ self.header = None
+
+ # Collecting char buffer fragments
+ self._char_blocks = None
+
+ __init__.__doc__ = xml.XmlParser.__init__.__doc__
+
+ def StartElementHandler(self, name, attrs):
+ self.flush_chardata()
+ if self.verbose > 0:
+ print('Start element:\n\t', repr(name), attrs)
+
+ if name == 'CIFTI':
+ # create cifti2 image
+ self.header = Cifti2Header()
+ self.header.version = attrs['Version']
+ if LooseVersion(self.header.version) < LooseVersion('2'):
+ raise ValueError('Only CIFTI-2 files are supported')
+ self.fsm_state.append('CIFTI')
+ self.struct_state.append(self.header)
+
+ elif name == 'Matrix':
+ self.fsm_state.append('Matrix')
+ matrix = Cifti2Matrix()
+ parent = self.struct_state[-1]
+ if not isinstance(parent, Cifti2Header):
+ raise Cifti2HeaderError(
+ 'Matrix element can only be a child of the CIFTI2 Header element'
+ )
+ parent.matrix = matrix
+ self.struct_state.append(matrix)
+
+ elif name == 'MetaData':
+ self.fsm_state.append('MetaData')
+ meta = Cifti2MetaData()
+ parent = self.struct_state[-1]
+ if not isinstance(parent, (Cifti2Matrix, Cifti2NamedMap)):
+ raise Cifti2HeaderError(
+ 'MetaData element can only be a child of the CIFTI2 Matrix or NamedMap elements'
+ )
+
+ self.struct_state.append(meta)
+
+ elif name == 'MD':
+ pair = ['', '']
+ self.fsm_state.append('MD')
+ self.struct_state.append(pair)
+
+ elif name == 'Name':
+ self.write_to = 'Name'
+
+ elif name == 'Value':
+ self.write_to = 'Value'
+
+ elif name == 'MatrixIndicesMap':
+ self.fsm_state.append('MatrixIndicesMap')
+ dimensions = [int(value) for value in attrs["AppliesToMatrixDimension"].split(',')]
+ mim = Cifti2MatrixIndicesMap(
+ applies_to_matrix_dimension=dimensions,
+ indices_map_to_data_type=attrs["IndicesMapToDataType"])
+ for key, dtype in [("NumberOfSeriesPoints", int),
+ ("SeriesExponent", int),
+ ("SeriesStart", float),
+ ("SeriesStep", float),
+ ("SeriesUnit", str)]:
+ if key in attrs:
+ setattr(mim, _underscore(key), dtype(attrs[key]))
+ matrix = self.struct_state[-1]
+ if not isinstance(matrix, Cifti2Matrix):
+ raise Cifti2HeaderError(
+ 'MatrixIndicesMap element can only be a child of the CIFTI2 Matrix element'
+ )
+ matrix.append(mim)
+ self.struct_state.append(mim)
+
+ elif name == 'NamedMap':
+ self.fsm_state.append('NamedMap')
+ named_map = Cifti2NamedMap()
+ mim = self.struct_state[-1]
+ if not isinstance(mim, Cifti2MatrixIndicesMap):
+ raise Cifti2HeaderError(
+ 'NamedMap element can only be a child of the CIFTI2 MatrixIndicesMap element'
+ )
+ self.struct_state.append(named_map)
+ mim.append(named_map)
+
+ elif name == 'LabelTable':
+ named_map = self.struct_state[-1]
+ mim = self.struct_state[-2]
+ if mim.indices_map_to_data_type != "CIFTI_INDEX_TYPE_LABELS":
+ raise Cifti2HeaderError(
+ 'LabelTable element can only be a child of a MatrixIndicesMap '
+ 'with CIFTI_INDEX_TYPE_LABELS type'
+ )
+ lata = Cifti2LabelTable()
+ if not isinstance(named_map, Cifti2NamedMap):
+ raise Cifti2HeaderError(
+ 'LabelTable element can only be a child of the CIFTI2 NamedMap element'
+ )
+ self.fsm_state.append('LabelTable')
+ self.struct_state.append(lata)
+ named_map.label_table = lata
+
+ elif name == 'Label':
+ lata = self.struct_state[-1]
+ if not isinstance(lata, Cifti2LabelTable):
+ raise Cifti2HeaderError(
+ 'Label element can only be a child of the CIFTI2 LabelTable element'
+ )
+ label = Cifti2Label()
+ label.key = int(attrs["Key"])
+ label.red = float(attrs["Red"])
+ label.green = float(attrs["Green"])
+ label.blue = float(attrs["Blue"])
+ label.alpha = float(attrs["Alpha"])
+ self.write_to = 'Label'
+ self.fsm_state.append('Label')
+ self.struct_state.append(label)
+
+ elif name == "MapName":
+ named_map = self.struct_state[-1]
+ if not isinstance(named_map, Cifti2NamedMap):
+ raise Cifti2HeaderError(
+ 'MapName element can only be a child of the CIFTI2 NamedMap element'
+ )
+
+ self.fsm_state.append('MapName')
+ self.write_to = 'MapName'
+
+ elif name == "Surface":
+ surface = Cifti2Surface()
+ mim = self.struct_state[-1]
+ if not isinstance(mim, Cifti2MatrixIndicesMap):
+ raise Cifti2HeaderError(
+ 'Surface element can only be a child of the CIFTI2 MatrixIndicesMap element'
+ )
+ if mim.indices_map_to_data_type != "CIFTI_INDEX_TYPE_PARCELS":
+ raise Cifti2HeaderError(
+ 'Surface element can only be a child of a MatrixIndicesMap '
+ 'with CIFTI_INDEX_TYPE_PARCELS type'
+ )
+ surface.brain_structure = attrs["BrainStructure"]
+ surface.surface_number_of_vertices = int(attrs["SurfaceNumberOfVertices"])
+ mim.append(surface)
+
+ elif name == "Parcel":
+ parcel = Cifti2Parcel()
+ mim = self.struct_state[-1]
+ if not isinstance(mim, Cifti2MatrixIndicesMap):
+ raise Cifti2HeaderError(
+ 'Parcel element can only be a child of the CIFTI2 MatrixIndicesMap element'
+ )
+ parcel.name = attrs["Name"]
+ mim.append(parcel)
+ self.fsm_state.append('Parcel')
+ self.struct_state.append(parcel)
+
+ elif name == "Vertices":
+ vertices = Cifti2Vertices()
+ parcel = self.struct_state[-1]
+ if not isinstance(parcel, Cifti2Parcel):
+ raise Cifti2HeaderError(
+ 'Vertices element can only be a child of the CIFTI2 Parcel element'
+ )
+ vertices.brain_structure = attrs["BrainStructure"]
+ if vertices.brain_structure not in CIFTI_BRAIN_STRUCTURES:
+ raise Cifti2HeaderError(
+ 'BrainStructure for this Vertices element is not valid'
+ )
+ parcel.append_cifti_vertices(vertices)
+ self.fsm_state.append('Vertices')
+ self.struct_state.append(vertices)
+ self.write_to = 'Vertices'
+
+ elif name == "VoxelIndicesIJK":
+ parent = self.struct_state[-1]
+ if not isinstance(parent, (Cifti2Parcel, Cifti2BrainModel)):
+ raise Cifti2HeaderError(
+ 'VoxelIndicesIJK element can only be a child of the CIFTI2 '
+ 'Parcel or BrainModel elements'
+ )
+ parent.voxel_indices_ijk = Cifti2VoxelIndicesIJK()
+ self.write_to = 'VoxelIndices'
+
+ elif name == "Volume":
+ mim = self.struct_state[-1]
+ if not isinstance(mim, Cifti2MatrixIndicesMap):
+ raise Cifti2HeaderError(
+ 'Volume element can only be a child of the CIFTI2 MatrixIndicesMap element'
+ )
+ dimensions = tuple([int(val) for val in
+ attrs["VolumeDimensions"].split(',')])
+ volume = Cifti2Volume(volume_dimensions=dimensions)
+ mim.append(volume)
+ self.fsm_state.append('Volume')
+ self.struct_state.append(volume)
+
+ elif name == "TransformationMatrixVoxelIndicesIJKtoXYZ":
+ volume = self.struct_state[-1]
+ if not isinstance(volume, Cifti2Volume):
+ raise Cifti2HeaderError(
+ 'TransformationMatrixVoxelIndicesIJKtoXYZ element can only be a child '
+ 'of the CIFTI2 Volume element'
+ )
+ transform = Cifti2TransformationMatrixVoxelIndicesIJKtoXYZ()
+ transform.meter_exponent = int(attrs["MeterExponent"])
+ volume.transformation_matrix_voxel_indices_ijk_to_xyz = transform
+ self.fsm_state.append('TransformMatrix')
+ self.struct_state.append(transform)
+ self.write_to = 'TransformMatrix'
+
+ elif name == "BrainModel":
+ model = Cifti2BrainModel()
+ mim = self.struct_state[-1]
+ if not isinstance(mim, Cifti2MatrixIndicesMap):
+ raise Cifti2HeaderError(
+ 'BrainModel element can only be a child '
+ 'of the CIFTI2 MatrixIndicesMap element'
+ )
+ if mim.indices_map_to_data_type != "CIFTI_INDEX_TYPE_BRAIN_MODELS":
+ raise Cifti2HeaderError(
+ 'BrainModel element can only be a child of a MatrixIndicesMap '
+ 'with CIFTI_INDEX_TYPE_BRAIN_MODELS type'
+ )
+ for key, dtype in [("IndexOffset", int),
+ ("IndexCount", int),
+ ("ModelType", str),
+ ("BrainStructure", str),
+ ("SurfaceNumberOfVertices", int)]:
+ if key in attrs:
+ setattr(model, _underscore(key), dtype(attrs[key]))
+ if model.brain_structure not in CIFTI_BRAIN_STRUCTURES:
+ raise Cifti2HeaderError(
+ 'BrainStructure for this BrainModel element is not valid'
+ )
+ if model.model_type not in CIFTI_MODEL_TYPES:
+ raise Cifti2HeaderError(
+ 'ModelType for this BrainModel element is not valid'
+ )
+ mim.append(model)
+ self.fsm_state.append('BrainModel')
+ self.struct_state.append(model)
+
+ elif name == "VertexIndices":
+ index = Cifti2VertexIndices()
+ model = self.struct_state[-1]
+ if not isinstance(model, Cifti2BrainModel):
+ raise Cifti2HeaderError(
+ 'VertexIndices element can only be a child '
+ 'of the CIFTI2 BrainModel element'
+ )
+ self.fsm_state.append('VertexIndices')
+ model.vertex_indices = index
+ self.struct_state.append(index)
+ self.write_to = "VertexIndices"
+
+ def EndElementHandler(self, name):
+ self.flush_chardata()
+ if self.verbose > 0:
+ print('End element:\n\t', repr(name))
+
+ if name == 'CIFTI':
+ # remove last element of the list
+ self.fsm_state.pop()
+ self.struct_state.pop()
+
+ elif name == 'Matrix':
+ self.fsm_state.pop()
+ self.struct_state.pop()
+
+ elif name == 'MetaData':
+ self.fsm_state.pop()
+ meta = self.struct_state.pop()
+ parent = self.struct_state[-1]
+ parent.metadata = meta
+
+ elif name == 'MD':
+ self.fsm_state.pop()
+ pair = self.struct_state.pop()
+ meta = self.struct_state[-1]
+ meta[pair[0]] = pair[1]
+
+ elif name == 'Name':
+ self.write_to = None
+
+ elif name == 'Value':
+ self.write_to = None
+
+ elif name == 'MatrixIndicesMap':
+ self.fsm_state.pop()
+ self.struct_state.pop()
+
+ elif name == 'NamedMap':
+ self.fsm_state.pop()
+ self.struct_state.pop()
+
+ elif name == 'LabelTable':
+ self.fsm_state.pop()
+ self.struct_state.pop()
+
+ elif name == 'Label':
+ self.fsm_state.pop()
+ label = self.struct_state.pop()
+ lata = self.struct_state[-1]
+ lata.append(label)
+ self.write_to = None
+
+ elif name == "MapName":
+ self.fsm_state.pop()
+ self.write_to = None
+
+ elif name == "Parcel":
+ self.fsm_state.pop()
+ self.struct_state.pop()
+
+ elif name == "Vertices":
+ self.fsm_state.pop()
+ self.struct_state.pop()
+ self.write_to = None
+
+ elif name == "VoxelIndicesIJK":
+ self.write_to = None
+
+ elif name == "Volume":
+ self.fsm_state.pop()
+ self.struct_state.pop()
+
+ elif name == "TransformationMatrixVoxelIndicesIJKtoXYZ":
+ self.fsm_state.pop()
+ self.struct_state.pop()
+ self.write_to = None
+
+ elif name == "BrainModel":
+ self.fsm_state.pop()
+ self.struct_state.pop()
+
+ elif name == "VertexIndices":
+ self.fsm_state.pop()
+ self.struct_state.pop()
+ self.write_to = None
+
+ def CharacterDataHandler(self, data):
+ """ Collect character data chunks pending collation
+
+ The parser breaks the data up into chunks of size depending on the
+ buffer_size of the parser. A large bit of character data, with standard
+ parser buffer_size (such as 8K) can easily span many calls to this
+ function. We thus collect the chunks and process them when we hit start
+ or end tags.
+ """
+ if self._char_blocks is None:
+ self._char_blocks = []
+ self._char_blocks.append(data)
+
+ def flush_chardata(self):
+ """ Collate and process collected character data
+ """
+ if self._char_blocks is None:
+ return
+ # Just join the strings to get the data. Maybe there are some memory
+ # optimizations we could do by passing the list of strings to the
+ # read_data_block function.
+ data = ''.join(self._char_blocks)
+ # Reset the char collector
+ self._char_blocks = None
+ # Process data
+ if self.write_to == 'Name':
+ data = data.strip() # .decode('utf-8')
+ pair = self.struct_state[-1]
+ pair[0] = data
+
+ elif self.write_to == 'Value':
+ data = data.strip() # .decode('utf-8')
+ pair = self.struct_state[-1]
+ pair[1] = data
+
+ elif self.write_to == 'Vertices':
+ # conversion to numpy array
+ c = BytesIO(data.strip().encode('utf-8'))
+ vertices = self.struct_state[-1]
+ vertices.extend(np.genfromtxt(c, dtype=np.int))
+ c.close()
+
+ elif self.write_to == 'VoxelIndices':
+ # conversion to numpy array
+ c = BytesIO(data.strip().encode('utf-8'))
+ parent = self.struct_state[-1]
+ parent.voxel_indices_ijk.extend(np.genfromtxt(c, dtype=np.int).reshape(-1, 3))
+ c.close()
+
+ elif self.write_to == 'VertexIndices':
+ # conversion to numpy array
+ c = BytesIO(data.strip().encode('utf-8'))
+ index = self.struct_state[-1]
+ index.extend(np.genfromtxt(c, dtype=np.int))
+ c.close()
+
+ elif self.write_to == 'TransformMatrix':
+ # conversion to numpy array
+ c = BytesIO(data.strip().encode('utf-8'))
+ transform = self.struct_state[-1]
+ transform.matrix = np.genfromtxt(c, dtype=np.float)
+ c.close()
+
+ elif self.write_to == 'Label':
+ label = self.struct_state[-1]
+ label.label = data.strip()
+
+ elif self.write_to == 'MapName':
+ named_map = self.struct_state[-1]
+ named_map.map_name = data.strip() # .decode('utf-8')
+
+ @property
+ def pending_data(self):
+ " True if there is character data pending for processing "
+ return self._char_blocks is not None
+
+
+# class _Cifti2DenseDataSeriesNiftiHeader(_Cifti2AsNiftiHeader):
+#
+# @classmethod
+# def _valid_intent_code(klass, intent_code):
+# """ Return True if `intent_code` matches our class `klass`
+# """
+# return intent_code == 3002
diff --git a/nibabel/cifti2/tests/__init__.py b/nibabel/cifti2/tests/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/nibabel/cifti2/tests/test_cifti2.py b/nibabel/cifti2/tests/test_cifti2.py
new file mode 100644
index 0000000000..9a2dcec728
--- /dev/null
+++ b/nibabel/cifti2/tests/test_cifti2.py
@@ -0,0 +1,361 @@
+""" Testing CIFTI2 objects
+"""
+import collections
+from xml.etree import ElementTree
+
+import numpy as np
+
+from nibabel import cifti2 as ci
+from nibabel.nifti2 import Nifti2Header
+from nibabel.cifti2.cifti2 import _float_01, _value_if_klass, Cifti2HeaderError
+
+from nose.tools import assert_true, assert_equal, assert_raises, assert_is_none
+
+from nibabel.tests.test_dataobj_images import TestDataobjAPI as _TDA
+
+
+def compare_xml_leaf(str1, str2):
+ x1 = ElementTree.fromstring(str1)
+ x2 = ElementTree.fromstring(str2)
+ if len(x1) > 0 or len(x2) > 0:
+ raise ValueError
+
+ test = (x1.tag == x2.tag) and (x1.attrib == x2.attrib) and (x1.text == x2.text)
+ print((x1.tag, x1.attrib, x1.text))
+ print((x2.tag, x2.attrib, x2.text))
+ return test
+
+
+def test_value_if_klass():
+ assert_equal(_value_if_klass(None, list), None)
+ assert_equal(_value_if_klass([1], list), [1])
+ assert_raises(ValueError, _value_if_klass, 1, list)
+
+
+def test_cifti2_metadata():
+ md = ci.Cifti2MetaData(metadata={'a': 'aval'})
+ assert_equal(len(md), 1)
+ assert_equal(list(iter(md)), ['a'])
+ assert_equal(md['a'], 'aval')
+ assert_equal(md.data, dict([('a', 'aval')]))
+
+ md = ci.Cifti2MetaData()
+ assert_equal(len(md), 0)
+ assert_equal(list(iter(md)), [])
+ assert_equal(md.data, {})
+ assert_raises(ValueError, md.difference_update, None)
+
+ md['a'] = 'aval'
+ assert_equal(md['a'], 'aval')
+ assert_equal(len(md), 1)
+ assert_equal(md.data, dict([('a', 'aval')]))
+
+ del md['a']
+ assert_equal(len(md), 0)
+
+ metadata_test = [('a', 'aval'), ('b', 'bval')]
+ md.update(metadata_test)
+ assert_equal(md.data, dict(metadata_test))
+
+ assert_equal(list(iter(md)), list(iter(collections.OrderedDict(metadata_test))))
+
+ md.update({'a': 'aval', 'b': 'bval'})
+ assert_equal(md.data, dict(metadata_test))
+
+ md.update({'a': 'aval', 'd': 'dval'})
+ assert_equal(md.data, dict(metadata_test + [('d', 'dval')]))
+
+ md.difference_update({'a': 'aval', 'd': 'dval'})
+ assert_equal(md.data, dict(metadata_test[1:]))
+
+ assert_raises(KeyError, md.difference_update, {'a': 'aval', 'd': 'dval'})
+ assert_equal(md.to_xml().decode('utf-8'),
+ 'bbval')
+
+
+def test__float_01():
+ assert_equal(_float_01(0), 0)
+ assert_equal(_float_01(1), 1)
+ assert_equal(_float_01('0'), 0)
+ assert_equal(_float_01('0.2'), 0.2)
+ assert_raises(ValueError, _float_01, 1.1)
+ assert_raises(ValueError, _float_01, -0.1)
+ assert_raises(ValueError, _float_01, 2)
+ assert_raises(ValueError, _float_01, -1)
+ assert_raises(ValueError, _float_01, 'foo')
+
+
+def test_cifti2_labeltable():
+ lt = ci.Cifti2LabelTable()
+ assert_equal(len(lt), 0)
+ assert_raises(ci.Cifti2HeaderError, lt.to_xml)
+ assert_raises(ci.Cifti2HeaderError, lt._to_xml_element)
+
+ label = ci.Cifti2Label(label='Test', key=0)
+ lt[0] = label
+ assert_equal(len(lt), 1)
+ assert_equal(dict(lt), {label.key: label})
+
+ lt.clear()
+ lt.append(label)
+ assert_equal(len(lt), 1)
+ assert_equal(dict(lt), {label.key: label})
+
+ lt.clear()
+ test_tuple = (label.label, label.red, label.green, label.blue, label.alpha)
+ lt[label.key] = test_tuple
+ assert_equal(len(lt), 1)
+ v = lt[label.key]
+ assert_equal(
+ (v.label, v.red, v.green, v.blue, v.alpha),
+ test_tuple
+ )
+
+ assert_raises(ValueError, lt.__setitem__, 1, label)
+ assert_raises(ValueError, lt.__setitem__, 0, test_tuple[:-1])
+ assert_raises(ValueError, lt.__setitem__, 0, ('foo', 1.1, 0, 0, 1))
+ assert_raises(ValueError, lt.__setitem__, 0, ('foo', 1.0, -1, 0, 1))
+ assert_raises(ValueError, lt.__setitem__, 0, ('foo', 1.0, 0, -0.1, 1))
+
+
+def test_cifti2_label():
+ lb = ci.Cifti2Label()
+ lb.label = 'Test'
+ lb.key = 0
+ assert_equal(lb.rgba, (0, 0, 0, 0))
+ assert_true(compare_xml_leaf(
+ lb.to_xml().decode('utf-8'),
+ ""
+ ))
+
+ lb.red = 0
+ lb.green = 0.1
+ lb.blue = 0.2
+ lb.alpha = 0.3
+ assert_equal(lb.rgba, (0, 0.1, 0.2, 0.3))
+
+ assert_true(compare_xml_leaf(
+ lb.to_xml().decode('utf-8'),
+ ""
+ ))
+
+ lb.red = 10
+ assert_raises(ci.Cifti2HeaderError, lb.to_xml)
+ lb.red = 0
+
+ lb.key = 'a'
+ assert_raises(ci.Cifti2HeaderError, lb.to_xml)
+ lb.key = 0
+
+
+def test_cifti2_parcel():
+ pl = ci.Cifti2Parcel()
+ assert_raises(ci.Cifti2HeaderError, pl.to_xml)
+ assert_raises(TypeError, pl.append_cifti_vertices, None)
+
+ assert_raises(ValueError, ci.Cifti2Parcel, **{'vertices': [1, 2, 3]})
+ pl = ci.Cifti2Parcel(name='region',
+ voxel_indices_ijk=ci.Cifti2VoxelIndicesIJK([[1, 2, 3]]),
+ vertices=[ci.Cifti2Vertices([0, 1, 2])])
+ pl.pop_cifti2_vertices(0)
+ assert_equal(len(pl.vertices), 0)
+ assert_equal(
+ pl.to_xml().decode('utf-8'),
+ '1 2 3'
+ )
+
+
+def test_cifti2_vertices():
+ vs = ci.Cifti2Vertices()
+ assert_raises(ci.Cifti2HeaderError, vs.to_xml)
+ vs.brain_structure = 'CIFTI_STRUCTURE_OTHER'
+ assert_equal(
+ vs.to_xml().decode('utf-8'),
+ ''
+ )
+ assert_equal(len(vs), 0)
+ vs.extend(np.array([0, 1, 2]))
+ assert_equal(len(vs), 3)
+ assert_raises(ValueError, vs.__setitem__, 1, 'a')
+ assert_raises(ValueError, vs.insert, 1, 'a')
+ assert_equal(
+ vs.to_xml().decode('utf-8'),
+ '0 1 2'
+ )
+
+ vs[0] = 10
+ assert_equal(vs[0], 10)
+ assert_equal(len(vs), 3)
+ vs = ci.Cifti2Vertices(vertices=[0, 1, 2])
+ assert_equal(len(vs), 3)
+
+
+def test_cifti2_transformationmatrixvoxelindicesijktoxyz():
+ tr = ci.Cifti2TransformationMatrixVoxelIndicesIJKtoXYZ()
+ assert_raises(ci.Cifti2HeaderError, tr.to_xml)
+
+
+def test_cifti2_surface():
+ s = ci.Cifti2Surface()
+ assert_raises(ci.Cifti2HeaderError, s.to_xml)
+
+
+def test_cifti2_volume():
+ vo = ci.Cifti2Volume()
+ assert_raises(ci.Cifti2HeaderError, vo.to_xml)
+
+
+def test_cifti2_vertexindices():
+ vi = ci.Cifti2VertexIndices()
+ assert_equal(len(vi), 0)
+ assert_raises(ci.Cifti2HeaderError, vi.to_xml)
+ vi.extend(np.array([0, 1, 2]))
+ assert_equal(len(vi), 3)
+ assert_equal(
+ vi.to_xml().decode('utf-8'),
+ '0 1 2'
+ )
+ assert_raises(ValueError, vi.__setitem__, 0, 'a')
+ vi[0] = 10
+ assert_equal(vi[0], 10)
+ assert_equal(len(vi), 3)
+
+
+def test_cifti2_voxelindicesijk():
+ vi = ci.Cifti2VoxelIndicesIJK()
+ assert_raises(ci.Cifti2HeaderError, vi.to_xml)
+
+ vi = ci.Cifti2VoxelIndicesIJK()
+ assert_equal(len(vi), 0)
+ assert_raises(ci.Cifti2HeaderError, vi.to_xml)
+ vi.extend(np.array([[0, 1, 2]]))
+ assert_equal(len(vi), 1)
+ assert_equal(vi[0], [0, 1, 2])
+ vi.append([3, 4, 5])
+ assert_equal(len(vi), 2)
+ vi.append([6, 7, 8])
+ assert_equal(len(vi), 3)
+ del vi[-1]
+ assert_equal(len(vi), 2)
+
+ assert_equal(vi[1], [3, 4, 5])
+ vi[1] = [3, 4, 6]
+ assert_equal(vi[1], [3, 4, 6])
+ assert_raises(ValueError, vi.__setitem__, 'a', [1, 2, 3])
+ assert_raises(TypeError, vi.__setitem__, [1, 2], [1, 2, 3])
+ assert_raises(ValueError, vi.__setitem__, 1, [2, 3])
+ assert_equal(vi[1, 1], 4)
+ assert_raises(ValueError, vi.__setitem__, [1, 1], 'a')
+ assert_equal(vi[0, 1:], [1, 2])
+ vi[0, 1] = 10
+ assert_equal(vi[0, 1], 10)
+ vi[0, 1] = 1
+
+ #test for vi[:, 0] and other slices
+ assert_raises(NotImplementedError, vi.__getitem__, (slice(None), 0))
+ assert_raises(NotImplementedError, vi.__setitem__, (slice(None), 0), 0)
+ assert_raises(NotImplementedError, vi.__delitem__, (slice(None), 0))
+ assert_raises(ValueError, vi.__getitem__, (0, 0, 0))
+ assert_raises(ValueError, vi.__setitem__, (0, 0, 0), 0)
+
+ assert_equal(
+ vi.to_xml().decode('utf-8'),
+ '0 1 2\n3 4 6'
+ )
+ assert_raises(TypeError, ci.Cifti2VoxelIndicesIJK, [0, 1])
+ vi = ci.Cifti2VoxelIndicesIJK([[1, 2, 3]])
+ assert_equal(len(vi), 1)
+
+
+def test_matrixindicesmap():
+ mim = ci.Cifti2MatrixIndicesMap(0, 'CIFTI_INDEX_TYPE_LABELS')
+ volume = ci.Cifti2Volume()
+ volume2 = ci.Cifti2Volume()
+ parcel = ci.Cifti2Parcel()
+
+ assert_is_none(mim.volume)
+ mim.append(volume)
+ mim.append(parcel)
+
+
+ assert_equal(mim.volume, volume)
+ assert_raises(ci.Cifti2HeaderError, mim.insert, 0, volume)
+ assert_raises(ci.Cifti2HeaderError, mim.__setitem__, 1, volume)
+
+ mim[0] = volume2
+ assert_equal(mim.volume, volume2)
+
+ del mim.volume
+ assert_is_none(mim.volume)
+ assert_raises(ValueError, delattr, mim, 'volume')
+
+ mim.volume = volume
+ assert_equal(mim.volume, volume)
+ mim.volume = volume2
+ assert_equal(mim.volume, volume2)
+
+ assert_raises(ValueError, setattr, mim, 'volume', parcel)
+
+
+def test_matrix():
+ m = ci.Cifti2Matrix()
+ assert_raises(TypeError, m, setattr, 'metadata', ci.Cifti2Parcel())
+ assert_raises(TypeError, m.__setitem__, 0, ci.Cifti2Parcel())
+ assert_raises(TypeError, m.insert, 0, ci.Cifti2Parcel())
+
+ mim_none = ci.Cifti2MatrixIndicesMap(None, 'CIFTI_INDEX_TYPE_LABELS')
+ mim_0 = ci.Cifti2MatrixIndicesMap(0, 'CIFTI_INDEX_TYPE_LABELS')
+ mim_1 = ci.Cifti2MatrixIndicesMap(1, 'CIFTI_INDEX_TYPE_LABELS')
+ mim_01 = ci.Cifti2MatrixIndicesMap([0, 1], 'CIFTI_INDEX_TYPE_LABELS')
+
+ assert_raises(ci.Cifti2HeaderError, m.insert, 0, mim_none)
+ assert_equal(m.mapped_indices, [])
+
+ h = ci.Cifti2Header(matrix=m)
+ assert_equal(m.mapped_indices, [])
+ m.insert(0, mim_0)
+ assert_equal(h.mapped_indices, [0])
+ assert_equal(h.number_of_mapped_indices, 1)
+ assert_raises(ci.Cifti2HeaderError, m.insert, 0, mim_0)
+ assert_raises(ci.Cifti2HeaderError, m.insert, 0, mim_01)
+ m[0] = mim_1
+ assert_equal(list(m.mapped_indices), [1])
+ m.insert(0, mim_0)
+ assert_equal(list(sorted(m.mapped_indices)), [0, 1])
+ assert_equal(h.number_of_mapped_indices, 2)
+ assert_equal(h.get_index_map(0), mim_0)
+ assert_equal(h.get_index_map(1), mim_1)
+ assert_raises(ci.Cifti2HeaderError, h.get_index_map, 2)
+
+
+def test_underscoring():
+ # Pairs taken from inflection tests
+ # https://github.com/jpvanhal/inflection/blob/663982e/test_inflection.py#L113-L125
+ pairs = (("Product", "product"),
+ ("SpecialGuest", "special_guest"),
+ ("ApplicationController", "application_controller"),
+ ("Area51Controller", "area51_controller"),
+ ("HTMLTidy", "html_tidy"),
+ ("HTMLTidyGenerator", "html_tidy_generator"),
+ ("FreeBSD", "free_bsd"),
+ ("HTML", "html"),
+ )
+
+ for camel, underscored in pairs:
+ assert_equal(ci.cifti2._underscore(camel), underscored)
+
+
+class TestCifti2ImageAPI(_TDA):
+ """ Basic validation for Cifti2Image instances
+ """
+ # A callable returning an image from ``image_maker(data, header)``
+ image_maker = ci.Cifti2Image
+ # A callable returning a header from ``header_maker()``
+ header_maker = ci.Cifti2Header
+ # A callable returning a nifti header
+ ni_header_maker = Nifti2Header
+ example_shapes = ((2,), (2, 3), (2, 3, 4))
+ standard_extension = '.nii'
+
+ def make_imaker(self, arr, header=None, ni_header=None):
+ return lambda: self.image_maker(arr.copy(), header, ni_header)
diff --git a/nibabel/cifti2/tests/test_cifti2io.py b/nibabel/cifti2/tests/test_cifti2io.py
new file mode 100644
index 0000000000..8052f9ac4c
--- /dev/null
+++ b/nibabel/cifti2/tests/test_cifti2io.py
@@ -0,0 +1,454 @@
+# emacs: -*- mode: python-mode; py-indent-offset: 4; indent-tabs-mode: nil -*-
+# vi: set ft=python sts=4 ts=4 sw=4 et:
+### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
+#
+# See COPYING file distributed along with the NiBabel package for the
+# copyright and license terms.
+#
+### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
+from __future__ import division, print_function, absolute_import
+
+from os.path import join as pjoin, dirname
+import io
+
+from distutils.version import LooseVersion
+
+import nibabel as nib
+from nibabel import cifti2 as ci
+from nibabel.cifti2.parse_cifti2 import _Cifti2AsNiftiHeader
+
+from nibabel.tmpdirs import InTemporaryDirectory
+from nibabel.tests.nibabel_data import get_nibabel_data, needs_nibabel_data
+from nibabel.tests.test_nifti2 import TestNifti2SingleHeader
+
+from numpy.testing import assert_array_almost_equal
+from nose.tools import (assert_true, assert_equal, assert_raises)
+
+NIBABEL_TEST_DATA = pjoin(dirname(nib.__file__), 'tests', 'data')
+NIFTI2_DATA = pjoin(NIBABEL_TEST_DATA, 'example_nifti2.nii.gz')
+
+CIFTI2_DATA = pjoin(get_nibabel_data(), 'nitest-cifti2')
+
+DATA_FILE1 = pjoin(CIFTI2_DATA, '')
+DATA_FILE2 = pjoin(CIFTI2_DATA,
+ 'Conte69.MyelinAndCorrThickness.32k_fs_LR.dscalar.nii')
+DATA_FILE3 = pjoin(CIFTI2_DATA,
+ 'Conte69.MyelinAndCorrThickness.32k_fs_LR.dtseries.nii')
+DATA_FILE4 = pjoin(CIFTI2_DATA,
+ 'Conte69.MyelinAndCorrThickness.32k_fs_LR.ptseries.nii')
+DATA_FILE5 = pjoin(CIFTI2_DATA,
+ 'Conte69.parcellations_VGD11b.32k_fs_LR.dlabel.nii')
+DATA_FILE6 = pjoin(CIFTI2_DATA, 'ones.dscalar.nii')
+datafiles = [DATA_FILE2, DATA_FILE3, DATA_FILE4, DATA_FILE5, DATA_FILE6]
+
+
+def test_read_nifti2():
+ # Error trying to read a CIFTI2 image from a NIfTI2-only image.
+ filemap = ci.Cifti2Image.make_file_map()
+ for k in filemap:
+ filemap[k].fileobj = io.open(NIFTI2_DATA)
+ assert_raises(ValueError, ci.Cifti2Image.from_file_map, filemap)
+
+
+@needs_nibabel_data('nitest-cifti2')
+def test_read_internal():
+ img2 = ci.load(DATA_FILE6)
+ assert_true(isinstance(img2.header, ci.Cifti2Header))
+ assert_equal(img2.shape, (1, 91282))
+
+
+@needs_nibabel_data('nitest-cifti2')
+def test_read_and_proxies():
+ img2 = nib.load(DATA_FILE6)
+ assert_true(isinstance(img2.header, ci.Cifti2Header))
+ assert_equal(img2.shape, (1, 91282))
+ # While we cannot reshape arrayproxies, all images are in-memory
+ assert_true(img2.in_memory)
+ data = img2.get_data()
+ assert_true(data is img2.dataobj)
+ # Uncaching has no effect, images are always array images
+ img2.uncache()
+ assert_true(data is img2.get_data())
+
+
+@needs_nibabel_data('nitest-cifti2')
+def test_version():
+ for i, dat in enumerate(datafiles):
+ img = nib.load(dat)
+ assert_equal(LooseVersion(img.header.version), LooseVersion('2'))
+
+
+@needs_nibabel_data('nitest-cifti2')
+def test_readwritedata():
+ with InTemporaryDirectory():
+ for name in datafiles:
+ img = ci.load(name)
+ ci.save(img, 'test.nii')
+ img2 = ci.load('test.nii')
+ assert_equal(len(img.header.matrix),
+ len(img2.header.matrix))
+ # Order should be preserved in load/save
+ for mim1, mim2 in zip(img.header.matrix,
+ img2.header.matrix):
+ named_maps1 = [m_ for m_ in mim1
+ if isinstance(m_, ci.Cifti2NamedMap)]
+ named_maps2 = [m_ for m_ in mim2
+ if isinstance(m_, ci.Cifti2NamedMap)]
+ assert_equal(len(named_maps1), len(named_maps2))
+ for map1, map2 in zip(named_maps1, named_maps2):
+ assert_equal(map1.map_name, map2.map_name)
+ if map1.label_table is None:
+ assert_true(map2.label_table is None)
+ else:
+ assert_equal(len(map1.label_table),
+ len(map2.label_table))
+ assert_array_almost_equal(img.dataobj, img2.dataobj)
+
+
+@needs_nibabel_data('nitest-cifti2')
+def test_nibabel_readwritedata():
+ with InTemporaryDirectory():
+ for name in datafiles:
+ img = nib.load(name)
+ nib.save(img, 'test.nii')
+ img2 = nib.load('test.nii')
+ assert_equal(len(img.header.matrix),
+ len(img2.header.matrix))
+ # Order should be preserved in load/save
+ for mim1, mim2 in zip(img.header.matrix,
+ img2.header.matrix):
+ named_maps1 = [m_ for m_ in mim1
+ if isinstance(m_, ci.Cifti2NamedMap)]
+ named_maps2 = [m_ for m_ in mim2
+ if isinstance(m_, ci.Cifti2NamedMap)]
+ assert_equal(len(named_maps1), len(named_maps2))
+ for map1, map2 in zip(named_maps1, named_maps2):
+ assert_equal(map1.map_name, map2.map_name)
+ if map1.label_table is None:
+ assert_true(map2.label_table is None)
+ else:
+ assert_equal(len(map1.label_table),
+ len(map2.label_table))
+ assert_array_almost_equal(img.dataobj, img2.dataobj)
+
+
+@needs_nibabel_data('nitest-cifti2')
+def test_cifti2types():
+ """Check that we instantiate Cifti2 classes correctly, and that our
+ test files exercise all classes"""
+ counter = {ci.Cifti2LabelTable: 0,
+ ci.Cifti2Label: 0,
+ ci.Cifti2NamedMap: 0,
+ ci.Cifti2Surface: 0,
+ ci.Cifti2VoxelIndicesIJK: 0,
+ ci.Cifti2Vertices: 0,
+ ci.Cifti2Parcel: 0,
+ ci.Cifti2TransformationMatrixVoxelIndicesIJKtoXYZ: 0,
+ ci.Cifti2Volume: 0,
+ ci.Cifti2VertexIndices: 0,
+ ci.Cifti2BrainModel: 0,
+ ci.Cifti2MatrixIndicesMap: 0,
+ }
+
+ for name in datafiles:
+ hdr = ci.load(name).header
+ # Matrix and MetaData aren't conditional, so don't bother counting
+ assert_true(isinstance(hdr.matrix, ci.Cifti2Matrix))
+ assert_true(isinstance(hdr.matrix.metadata, ci.Cifti2MetaData))
+ for mim in hdr.matrix:
+ assert_true(isinstance(mim, ci.Cifti2MatrixIndicesMap))
+ counter[ci.Cifti2MatrixIndicesMap] += 1
+ for map_ in mim:
+ print(map_)
+ if isinstance(map_, ci.Cifti2BrainModel):
+ counter[ci.Cifti2BrainModel] += 1
+ if isinstance(map_.vertex_indices, ci.Cifti2VertexIndices):
+ counter[ci.Cifti2VertexIndices] += 1
+ if isinstance(map_.voxel_indices_ijk,
+ ci.Cifti2VoxelIndicesIJK):
+ counter[ci.Cifti2VoxelIndicesIJK] += 1
+ elif isinstance(map_, ci.Cifti2NamedMap):
+ counter[ci.Cifti2NamedMap] += 1
+ assert_true(isinstance(map_.metadata, ci.Cifti2MetaData))
+ if isinstance(map_.label_table, ci.Cifti2LabelTable):
+ counter[ci.Cifti2LabelTable] += 1
+ for label in map_.label_table:
+ assert_true(isinstance(map_.label_table[label],
+ ci.Cifti2Label))
+ counter[ci.Cifti2Label] += 1
+ elif isinstance(map_, ci.Cifti2Parcel):
+ counter[ci.Cifti2Parcel] += 1
+ if isinstance(map_.voxel_indices_ijk,
+ ci.Cifti2VoxelIndicesIJK):
+ counter[ci.Cifti2VoxelIndicesIJK] += 1
+ assert_true(isinstance(map_.vertices, list))
+ for vtcs in map_.vertices:
+ assert_true(isinstance(vtcs, ci.Cifti2Vertices))
+ counter[ci.Cifti2Vertices] += 1
+ elif isinstance(map_, ci.Cifti2Surface):
+ counter[ci.Cifti2Surface] += 1
+ elif isinstance(map_, ci.Cifti2Volume):
+ counter[ci.Cifti2Volume] += 1
+ if isinstance(map_.transformation_matrix_voxel_indices_ijk_to_xyz,
+ ci.Cifti2TransformationMatrixVoxelIndicesIJKtoXYZ):
+ counter[ci.Cifti2TransformationMatrixVoxelIndicesIJKtoXYZ] += 1
+
+ assert_equal(list(mim.named_maps),
+ [m_ for m_ in mim if isinstance(m_, ci.Cifti2NamedMap)])
+ assert_equal(list(mim.surfaces),
+ [m_ for m_ in mim if isinstance(m_, ci.Cifti2Surface)])
+ assert_equal(list(mim.parcels),
+ [m_ for m_ in mim if isinstance(m_, ci.Cifti2Parcel)])
+ assert_equal(list(mim.brain_models),
+ [m_ for m_ in mim if isinstance(m_, ci.Cifti2BrainModel)])
+ assert_equal([mim.volume] if mim.volume else [],
+ [m_ for m_ in mim if isinstance(m_, ci.Cifti2Volume)])
+
+ for klass, count in counter.items():
+ assert_true(count > 0, "No exercise of " + klass.__name__)
+
+
+@needs_nibabel_data('nitest-cifti2')
+def test_read_geometry():
+ img = ci.Cifti2Image.from_filename(DATA_FILE6)
+ geometry_mapping = img.header.matrix.get_index_map(1)
+
+ # For every brain model in ones.dscalar.nii defines:
+ # brain structure name, number of grayordinates, first vertex or voxel, last vertex or voxel
+ expected_geometry = [('CIFTI_STRUCTURE_CORTEX_LEFT', 29696, 0, 32491),
+ ('CIFTI_STRUCTURE_CORTEX_RIGHT', 29716, 0, 32491),
+ ('CIFTI_STRUCTURE_ACCUMBENS_LEFT', 135, [49, 66, 28], [48, 72, 35]),
+ ('CIFTI_STRUCTURE_ACCUMBENS_RIGHT', 140, [40, 66, 29], [43, 66, 36]),
+ ('CIFTI_STRUCTURE_AMYGDALA_LEFT', 315, [55, 61, 21], [56, 58, 31]),
+ ('CIFTI_STRUCTURE_AMYGDALA_RIGHT', 332, [34, 62, 20], [36, 61, 31]),
+ ('CIFTI_STRUCTURE_BRAIN_STEM', 3472, [42, 41, 0], [46, 50, 36]),
+ ('CIFTI_STRUCTURE_CAUDATE_LEFT', 728, [50, 72, 32], [53, 60, 49]),
+ ('CIFTI_STRUCTURE_CAUDATE_RIGHT', 755, [40, 68, 33], [37, 62, 49]),
+ ('CIFTI_STRUCTURE_CEREBELLUM_LEFT', 8709, [49, 35, 4], [46, 37, 37]),
+ ('CIFTI_STRUCTURE_CEREBELLUM_RIGHT', 9144, [38, 35, 4], [44, 38, 36]),
+ ('CIFTI_STRUCTURE_DIENCEPHALON_VENTRAL_LEFT', 706, [52, 53, 26], [56, 49, 35]),
+ ('CIFTI_STRUCTURE_DIENCEPHALON_VENTRAL_RIGHT', 712, [39, 54, 26], [35, 49, 36]),
+ ('CIFTI_STRUCTURE_HIPPOCAMPUS_LEFT', 764, [55, 60, 21], [54, 44, 39]),
+ ('CIFTI_STRUCTURE_HIPPOCAMPUS_RIGHT', 795, [33, 60, 21], [38, 45, 39]),
+ ('CIFTI_STRUCTURE_PALLIDUM_LEFT', 297, [56, 59, 32], [55, 61, 39]),
+ ('CIFTI_STRUCTURE_PALLIDUM_RIGHT', 260, [36, 62, 32], [35, 62, 39]),
+ ('CIFTI_STRUCTURE_PUTAMEN_LEFT', 1060, [51, 66, 28], [58, 64, 43]),
+ ('CIFTI_STRUCTURE_PUTAMEN_RIGHT', 1010, [34, 66, 29], [31, 62, 43]),
+ ('CIFTI_STRUCTURE_THALAMUS_LEFT', 1288, [55, 47, 33], [52, 53, 46]),
+ ('CIFTI_STRUCTURE_THALAMUS_RIGHT', 1248, [32, 47, 34], [38, 55, 46])]
+ current_index = 0
+ for from_file, expected in zip(geometry_mapping.brain_models, expected_geometry):
+ assert_true(from_file.model_type in ("CIFTI_MODEL_TYPE_SURFACE", "CIFTI_MODEL_TYPE_VOXELS"))
+ assert_equal(from_file.brain_structure, expected[0])
+ assert_equal(from_file.index_offset, current_index)
+ assert_equal(from_file.index_count, expected[1])
+ current_index += from_file.index_count
+
+ if from_file.model_type == 'CIFTI_MODEL_TYPE_SURFACE':
+ assert_equal(from_file.voxel_indices_ijk, None)
+ assert_equal(len(from_file.vertex_indices), expected[1])
+ assert_equal(from_file.vertex_indices[0], expected[2])
+ assert_equal(from_file.vertex_indices[-1], expected[3])
+ assert_equal(from_file.surface_number_of_vertices, 32492)
+ else:
+ assert_equal(from_file.vertex_indices, None)
+ assert_equal(from_file.surface_number_of_vertices, None)
+ assert_equal(len(from_file.voxel_indices_ijk), expected[1])
+ assert_equal(from_file.voxel_indices_ijk[0], expected[2])
+ assert_equal(from_file.voxel_indices_ijk[-1], expected[3])
+ assert_equal(current_index, img.shape[1])
+
+ expected_affine = [[-2, 0, 0, 90],
+ [ 0, 2, 0, -126],
+ [ 0, 0, 2, -72],
+ [ 0, 0, 0, 1]]
+ expected_dimensions = (91, 109, 91)
+ assert_true((geometry_mapping.volume.transformation_matrix_voxel_indices_ijk_to_xyz.matrix ==
+ expected_affine).all())
+ assert_equal(geometry_mapping.volume.volume_dimensions, expected_dimensions)
+
+
+@needs_nibabel_data('nitest-cifti2')
+def test_read_parcels():
+ img = ci.Cifti2Image.from_filename(DATA_FILE4)
+ parcel_mapping = img.header.matrix.get_index_map(1)
+
+ expected_parcels = [('MEDIAL.WALL', ((719, 20, 28550), (810, 21, 28631))),
+ ('BA2_FRB08', ((516, 6757, 17888), (461, 6757, 17887))),
+ ('BA1_FRB08', ((211, 5029, 17974), (214, 3433, 17934))),
+ ('BA3b_FRB08', ((444, 3436, 18065), (397, 3436, 18065))),
+ ('BA4p_FRB08', ((344, 3445, 18164), (371, 3443, 18175))),
+ ('BA3a_FRB08', ((290, 3441, 18140), (289, 3440, 18140))),
+ ('BA4a_FRB08', ((471, 3446, 18181), (455, 3446, 19759))),
+ ('BA6_FRB08', ((1457, 2, 30951), (1400, 2, 30951))),
+ ('BA17_V1_FRB08', ((629, 23155, 25785), (635, 23155, 25759))),
+ ('BA45_FRB08', ((245, 10100, 18774), (214, 10103, 18907))),
+ ('BA44_FRB08', ((226, 10118, 19240), (273, 10119, 19270))),
+ ('hOc5_MT_FRB08', ((104, 15019, 23329), (80, 15023, 23376))),
+ ('BA18_V2_FRB08', ((702, 95, 25902), (651, 98, 25903))),
+ ('V3A_SHM07', ((82, 4, 25050), (82, 4, 25050))),
+ ('V3B_SHM07', ((121, 13398, 23303), (121, 13398, 23303))),
+ ('LO1_KPO10', ((54, 15007, 23543), (54, 15007, 23543))),
+ ('LO2_KPO10', ((79, 15013, 23636), (79, 15013, 23636))),
+ ('PITd_KPO10', ((53, 15018, 23769), (65, 15018, 23769))),
+ ('PITv_KPO10', ((72, 23480, 23974), (72, 23480, 23974))),
+ ('OP1_BSW08', ((470, 8421, 18790), (470, 8421, 18790))),
+ ('OP2_BSW08', ((67, 10, 31060), (67, 10, 31060))),
+ ('OP3_BSW08', ((119, 10137, 18652), (119, 10137, 18652))),
+ ('OP4_BSW08', ((191, 16613, 19429), (192, 16613, 19429))),
+ ('IPS1_SHM07', ((54, 11775, 14496), (54, 11775, 14496))),
+ ('IPS2_SHM07', ((71, 11771, 14587), (71, 11771, 14587))),
+ ('IPS3_SHM07', ((114, 11764, 14783), (114, 11764, 14783))),
+ ('IPS4_SHM07', ((101, 11891, 12653), (101, 11891, 12653))),
+ ('V7_SHM07', ((140, 11779, 14002), (140, 11779, 14002))),
+ ('V4v_SHM07', ((81, 23815, 24557), (90, 23815, 24557))),
+ ('V3d_KPO10', ((90, 23143, 25192), (115, 23143, 25192))),
+ ('14c_OFP03', ((22, 19851, 21311), (22, 19851, 21311))),
+ ('13a_OFP03', ((20, 20963, 21154), (20, 20963, 21154))),
+ ('47s_OFP03', ((211, 10182, 20343), (211, 10182, 20343))),
+ ('14r_OFP03', ((54, 21187, 21324), (54, 21187, 21324))),
+ ('13m_OFP03', ((103, 20721, 21075), (103, 20721, 21075))),
+ ('13l_OFP03', ((101, 20466, 20789), (101, 20466, 20789))),
+ ('32pl_OFP03', ((14, 19847, 21409), (14, 19847, 21409))),
+ ('25_OFP03', ((8, 19844, 27750), (8, 19844, 27750))),
+ ('47m_OFP03', ((200, 10174, 20522), (200, 10174, 20522))),
+ ('47l_OFP03', ((142, 10164, 19969), (160, 10164, 19969))),
+ ('Iai_OFP03', ((153, 10188, 20199), (153, 10188, 20199))),
+ ('10r_OFP03', ((138, 19811, 28267), (138, 19811, 28267))),
+ ('11m_OFP03', ((92, 20850, 21165), (92, 20850, 21165))),
+ ('11l_OFP03', ((200, 20275, 21029), (200, 20275, 21029))),
+ ('47r_OFP03', ((259, 10094, 20535), (259, 10094, 20535))),
+ ('10m_OFP03', ((102, 19825, 21411), (102, 19825, 21411))),
+ ('Iam_OFP03', ((15, 20346, 20608), (15, 20346, 20608))),
+ ('Ial_OFP03', ((89, 10194, 11128), (89, 10194, 11128))),
+ ('24_OFP03', ((39, 19830, 28279), (36, 19830, 28279))),
+ ('Iapm_OFP03', ((7, 20200, 20299), (7, 20200, 20299))),
+ ('10p_OFP03', ((480, 19780, 28640), (480, 19780, 28640))),
+ ('V6_PHG06', ((72, 12233, 12869), (72, 12233, 12869))),
+ ('ER_FRB08', ((103, 21514, 26470), (103, 21514, 26470))),
+ ('13b_OFP03', ((60, 21042, 21194), (71, 21040, 21216)))]
+
+ assert_equal(img.shape[1], len(expected_parcels))
+ assert_equal(len(list(parcel_mapping.parcels)), len(expected_parcels))
+
+ for (name, expected_surfaces), parcel in zip(expected_parcels, parcel_mapping.parcels):
+ assert_equal(parcel.name, name)
+ assert_equal(len(parcel.vertices), 2)
+ for vertices, orientation, (length, first_element, last_element) in zip(parcel.vertices, ('LEFT', 'RIGHT'),
+ expected_surfaces):
+ assert_equal(len(vertices), length)
+ assert_equal(vertices[0], first_element)
+ assert_equal(vertices[-1], last_element)
+ assert_equal(vertices.brain_structure, 'CIFTI_STRUCTURE_CORTEX_%s' % orientation)
+
+
+@needs_nibabel_data('nitest-cifti2')
+def test_read_scalar():
+ img = ci.Cifti2Image.from_filename(DATA_FILE2)
+ scalar_mapping = img.header.matrix.get_index_map(0)
+
+ expected_names = ('MyelinMap_BC_decurv', 'corrThickness')
+ assert_equal(img.shape[0], len(expected_names))
+ assert_equal(len(list(scalar_mapping.named_maps)), len(expected_names))
+
+ expected_meta = [('PaletteColorMapping', '\n >> import os
+ >>> import nibabel as nib
+ >>> from nibabel.testing import data_path
+ >>> img_fname = os.path.join(data_path, 'example4d.nii.gz')
+
+ >>> img = nib.load(img_fname) # This is a proxy image
+ >>> nib.is_proxy(img.dataobj)
+ True
+
+ The array is not yet cached by a call to "get_data", so:
+
+ >>> img.in_memory
+ False
+
+ After we call ``get_data`` using the default `caching` == 'fill', the
+ cache contains a reference to the returned array ``data``:
+
+ >>> data = img.get_data()
+ >>> img.in_memory
+ True
+
+ We modify an element in the returned data array:
+
+ >>> data[0, 0, 0, 0]
+ 0
+ >>> data[0, 0, 0, 0] = 99
+ >>> data[0, 0, 0, 0]
+ 99
+
+ The next time we call 'get_data', the method returns the cached
+ reference to the (modified) array:
+
+ >>> data_again = img.get_data()
+ >>> data_again is data
+ True
+ >>> data_again[0, 0, 0, 0]
+ 99
+
+ If you had *initially* used `caching` == 'unchanged' then the returned
+ ``data`` array would have been loaded from file, but not cached, and:
+
+ >>> img = nib.load(img_fname) # a proxy image again
+ >>> data = img.get_data(caching='unchanged')
+ >>> img.in_memory
+ False
+ >>> data[0, 0, 0] = 99
+ >>> data_again = img.get_data(caching='unchanged')
+ >>> data_again is data
+ False
+ >>> data_again[0, 0, 0, 0]
+ 0
+ """
+ if caching not in ('fill', 'unchanged'):
+ raise ValueError('caching value should be "fill" or "unchanged"')
+ if self._data_cache is not None:
+ return self._data_cache
+ data = np.asanyarray(self._dataobj)
+ if caching == 'fill':
+ self._data_cache = data
+ return data
+
+ @property
+ def in_memory(self):
+ """ True when array data is in memory
+ """
+ return (isinstance(self._dataobj, np.ndarray) or
+ self._data_cache is not None)
+
+ def uncache(self):
+ """ Delete any cached read of data from proxied data
+
+ Remember there are two types of images:
+
+ * *array images* where the data ``img.dataobj`` is an array
+ * *proxy images* where the data ``img.dataobj`` is a proxy object
+
+ If you call ``img.get_data()`` on a proxy image, the result of reading
+ from the proxy gets cached inside the image object, and this cache is
+ what gets returned from the next call to ``img.get_data()``. If you
+ modify the returned data, as in::
+
+ data = img.get_data()
+ data[:] = 42
+
+ then the next call to ``img.get_data()`` returns the modified array,
+ whether the image is an array image or a proxy image::
+
+ assert np.all(img.get_data() == 42)
+
+ When you uncache an array image, this has no effect on the return of
+ ``img.get_data()``, but when you uncache a proxy image, the result of
+ ``img.get_data()`` returns to its original value.
+ """
+ self._data_cache = None
+
+ @property
+ def shape(self):
+ return self._dataobj.shape
+
+ @deprecate_with_version('get_shape method is deprecated.\n'
+ 'Please use the ``img.shape`` property '
+ 'instead.',
+ '1.2', '3.0')
+ def get_shape(self):
+ """ Return shape for image
+ """
+ return self.shape
diff --git a/nibabel/filebasedimages.py b/nibabel/filebasedimages.py
index 3517845137..7cc5b10648 100644
--- a/nibabel/filebasedimages.py
+++ b/nibabel/filebasedimages.py
@@ -8,6 +8,7 @@
### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
''' Common interface for any image format--volume or surface, binary or xml.'''
+from copy import deepcopy
from six import string_types
from .fileholders import FileHolder
from .filename_parser import (types_filenames, TypesFilenamesError,
@@ -55,7 +56,7 @@ def copy(self):
The copy should not be affected by any changes to the original
object.
'''
- raise NotImplementedError
+ return deepcopy(self)
class FileBasedImage(object):
@@ -188,11 +189,7 @@ def __init__(self, header=None, extra=None, file_map=None):
file_map : mapping, optional
mapping giving file information for this image format
'''
-
- if header or self.header_class:
- self._header = self.header_class.from_header(header)
- else:
- self._header = None
+ self._header = self.header_class.from_header(header)
if extra is None:
extra = {}
self.extra = extra
@@ -230,10 +227,9 @@ def get_filename(self):
-------
fname : None or str
Returns None if there is no filename, or a filename string.
- If an image may have several filenames assoctiated with it
- (e.g Analyze ``.img, .hdr`` pair) then we return the more
- characteristic filename (the ``.img`` filename in the case of
- Analyze')
+ If an image may have several filenames associated with it (e.g.
+ Analyze ``.img, .hdr`` pair) then we return the more characteristic
+ filename (the ``.img`` filename in the case of Analyze')
'''
# which filename is returned depends on the ordering of the
# 'files_types' class attribute - we return the name
diff --git a/nibabel/imageclasses.py b/nibabel/imageclasses.py
index dca230f67c..f136a070be 100644
--- a/nibabel/imageclasses.py
+++ b/nibabel/imageclasses.py
@@ -9,6 +9,7 @@
''' Define supported image classes and names '''
from .analyze import AnalyzeImage
+from .cifti2 import Cifti2Image
from .freesurfer import MGHImage
from .gifti import GiftiImage
from .minc1 import Minc1Image
@@ -26,7 +27,8 @@
# Ordered by the load/save priority.
-all_image_classes = [Nifti1Pair, Nifti1Image, Nifti2Pair, Nifti2Image,
+all_image_classes = [Nifti1Pair, Nifti1Image, Nifti2Pair,
+ Cifti2Image, Nifti2Image, # Cifti2 before Nifti2
Spm2AnalyzeImage, Spm99AnalyzeImage, AnalyzeImage,
Minc1Image, Minc2Image, MGHImage,
PARRECImage, GiftiImage]
diff --git a/nibabel/loadsave.py b/nibabel/loadsave.py
index d2c413d97d..1dc576498b 100644
--- a/nibabel/loadsave.py
+++ b/nibabel/loadsave.py
@@ -42,7 +42,8 @@ def load(filename, **kwargs):
for image_klass in all_image_classes:
is_valid, sniff = image_klass.path_maybe_image(filename, sniff)
if is_valid:
- return image_klass.from_filename(filename, **kwargs)
+ img = image_klass.from_filename(filename, **kwargs)
+ return img
raise ImageFileError('Cannot work out file type of "%s"' %
filename)
@@ -105,6 +106,10 @@ def save(img, filename):
# Inline imports, as this module really shouldn't reference any image type
from .nifti1 import Nifti1Image, Nifti1Pair
from .nifti2 import Nifti2Image, Nifti2Pair
+
+ klass = None
+ converted = None
+
if type(img) == Nifti1Image and lext in ('.img', '.hdr'):
klass = Nifti1Pair
elif type(img) == Nifti2Image and lext in ('.img', '.hdr'):
@@ -119,8 +124,23 @@ def save(img, filename):
if not valid_klasses: # if list is empty
raise ImageFileError('Cannot work out file type of "%s"' %
filename)
- klass = valid_klasses[0]
- converted = klass.from_image(img)
+
+ # Got a list of valid extensions, but that's no guarantee
+ # the file conversion will work. So, try each image
+ # in order...
+ for klass in valid_klasses:
+ try:
+ converted = klass.from_image(img)
+ break
+ except Exception as e:
+ continue
+ # ... and if none of them work, raise an error.
+ if converted is None:
+ raise e
+
+ # Here, we either have a klass or a converted image.
+ if converted is None:
+ converted = klass.from_image(img)
converted.to_filename(filename)
diff --git a/nibabel/nifti1.py b/nibabel/nifti1.py
index d3653065fd..10369800b8 100644
--- a/nibabel/nifti1.py
+++ b/nibabel/nifti1.py
@@ -232,18 +232,6 @@
(2003, 'rgb vector', (), "NIFTI_INTENT_RGB_VECTOR"),
(2004, 'rgba vector', (), "NIFTI_INTENT_RGBA_VECTOR"),
(2005, 'shape', (), "NIFTI_INTENT_SHAPE"),
- # The codes below appear on the CIFTI page, but don't appear to have
- # reached the nifti standard as of 19 August 2013
- # https://www.nitrc.org/plugins/mwiki/index.php/cifti:ConnectivityMatrixFileFormats
- (3001, 'dense connectivity', (), 'NIFTI_INTENT_CONNECTIVITY_DENSE'),
- (3002, 'dense time connectivity', (),
- 'NIFTI_INTENT_CONNECTIVITY_DENSE_TIME'),
- (3003, 'parcellated connectivity', (),
- 'NIFTI_INTENT_CONNECTIVITY_PARCELLATED'),
- (3004, 'parcellated time connectivity', (),
- "NIFTI_INTENT_CONNECTIVITY_PARCELLATED_TIME"),
- (3005, 'trajectory connectivity', (),
- 'NIFTI_INTENT_CONNECTIVITY_CONNECTIVITY_TRAJECTORY'),
), fields=('code', 'label', 'parameters', 'niistring'))
@@ -482,9 +470,8 @@ def _mangle(self, dataset):
(10, "jimdiminfo", Nifti1Extension),
(12, "workflow_fwds", Nifti1Extension),
(14, "freesurfer", Nifti1Extension),
- (16, "pypickle", Nifti1Extension)
-),
- fields=('code', 'label', 'handler'))
+ (16, "pypickle", Nifti1Extension),
+), fields=('code', 'label', 'handler'))
class Nifti1Extensions(list):
diff --git a/nibabel/nifti2.py b/nibabel/nifti2.py
index 93c8ebfe5e..45e834b29a 100644
--- a/nibabel/nifti2.py
+++ b/nibabel/nifti2.py
@@ -11,10 +11,6 @@
Format described here:
https://www.nitrc.org/forum/message.php?msg_id=3738
-
-Stuff about the CIFTI file format here:
-
- https://www.nitrc.org/plugins/mwiki/index.php/cifti:ConnectivityMatrixFileFormats
'''
import numpy as np
diff --git a/nibabel/spatialimages.py b/nibabel/spatialimages.py
index d910f7bb22..78d3c9a9dd 100644
--- a/nibabel/spatialimages.py
+++ b/nibabel/spatialimages.py
@@ -135,7 +135,8 @@
import numpy as np
-from .filebasedimages import FileBasedHeader, FileBasedImage
+from .filebasedimages import FileBasedHeader
+from .dataobj_images import DataobjImage
from .filebasedimages import ImageFileError # flake8: noqa; for back-compat
from .viewers import OrthoSlicer3D
from .volumeutils import shape_zoom_affine
@@ -319,7 +320,7 @@ class ImageDataError(Exception):
pass
-class SpatialImage(FileBasedImage):
+class SpatialImage(DataobjImage):
''' Template class for volumetric (3D/4D) images '''
header_class = SpatialHeader
@@ -327,7 +328,7 @@ def __init__(self, dataobj, affine, header=None,
extra=None, file_map=None):
''' Initialize image
- The image is a combination of (array, affine matrix, header), with
+ The image is a combination of (array-like, affine matrix, header), with
optional metadata in `extra`, and filename / file-like objects
contained in the `file_map` mapping.
@@ -350,9 +351,8 @@ def __init__(self, dataobj, affine, header=None,
file_map : mapping, optional
mapping giving file information for this image format
'''
- super(SpatialImage, self).__init__(header=header, extra=extra,
+ super(SpatialImage, self).__init__(dataobj, header=header, extra=extra,
file_map=file_map)
- self._dataobj = dataobj
if not affine is None:
# Check that affine is array-like 4,4. Maybe this is too strict at
# this abstract level, but so far I think all image formats we know
@@ -370,20 +370,8 @@ def __init__(self, dataobj, affine, header=None,
self._header.set_data_dtype(dataobj.dtype)
# make header correspond with image and affine
self.update_header()
- self._load_cache = None
self._data_cache = None
- @property
- @deprecate_with_version('_data attribute not part of public API. '
- 'please use "dataobj" property instead.',
- '2.0', '4.0')
- def _data(self):
- return self._dataobj
-
- @property
- def dataobj(self):
- return self._dataobj
-
@property
def affine(self):
return self._affine
@@ -410,7 +398,7 @@ def update_header(self):
if hdr.get_data_shape() != shape:
hdr.set_data_shape(shape)
# If the affine is not None, and it is different from the main affine
- # in the header, update the heaader
+ # in the header, update the header
if self._affine is None:
return
if np.allclose(self._affine, hdr.get_best_affine()):
@@ -437,190 +425,6 @@ def __str__(self):
'metadata:',
'%s' % self._header))
- def get_data(self, caching='fill'):
- """ Return image data from image with any necessary scalng applied
-
- The image ``dataobj`` property can be an array proxy or an array. An
- array proxy is an object that knows how to load the image data from
- disk. An image with an array proxy ``dataobj`` is a *proxy image*; an
- image with an array in ``dataobj`` is an *array image*.
-
- The default behavior for ``get_data()`` on a proxy image is to read the
- data from the proxy, and store in an internal cache. Future calls to
- ``get_data`` will return the cached array. This is the behavior
- selected with `caching` == "fill".
-
- Once the data has been cached and returned from an array proxy, if you
- modify the returned array, you will also modify the cached array
- (because they are the same array). Regardless of the `caching` flag,
- this is always true of an array image.
-
- Parameters
- ----------
- caching : {'fill', 'unchanged'}, optional
- See the Notes section for a detailed explanation. This argument
- specifies whether the image object should fill in an internal
- cached reference to the returned image data array. "fill" specifies
- that the image should fill an internal cached reference if
- currently empty. Future calls to ``get_data`` will return this
- cached reference. You might prefer "fill" to save the image object
- from having to reload the array data from disk on each call to
- ``get_data``. "unchanged" means that the image should not fill in
- the internal cached reference if the cache is currently empty. You
- might prefer "unchanged" to "fill" if you want to make sure that
- the call to ``get_data`` does not create an extra (cached)
- reference to the returned array. In this case it is easier for
- Python to free the memory from the returned array.
-
- Returns
- -------
- data : array
- array of image data
-
- See also
- --------
- uncache: empty the array data cache
-
- Notes
- -----
- All images have a property ``dataobj`` that represents the image array
- data. Images that have been loaded from files usually do not load the
- array data from file immediately, in order to reduce image load time
- and memory use. For these images, ``dataobj`` is an *array proxy*; an
- object that knows how to load the image array data from file.
-
- By default (`caching` == "fill"), when you call ``get_data`` on a
- proxy image, we load the array data from disk, store (cache) an
- internal reference to this array data, and return the array. The next
- time you call ``get_data``, you will get the cached reference to the
- array, so we don't have to load the array data from disk again.
-
- Array images have a ``dataobj`` property that already refers to an
- array in memory, so there is no benefit to caching, and the `caching`
- keywords have no effect.
-
- For proxy images, you may not want to fill the cache after reading the
- data from disk because the cache will hold onto the array memory until
- the image object is deleted, or you use the image ``uncache`` method.
- If you don't want to fill the cache, then always use
- ``get_data(caching='unchanged')``; in this case ``get_data`` will not
- fill the cache (store the reference to the array) if the cache is empty
- (no reference to the array). If the cache is full, "unchanged" leaves
- the cache full and returns the cached array reference.
-
- The cache can effect the behavior of the image, because if the cache is
- full, or you have an array image, then modifying the returned array
- will modify the result of future calls to ``get_data()``. For example
- you might do this:
-
- >>> import os
- >>> import nibabel as nib
- >>> from nibabel.testing import data_path
- >>> img_fname = os.path.join(data_path, 'example4d.nii.gz')
-
- >>> img = nib.load(img_fname) # This is a proxy image
- >>> nib.is_proxy(img.dataobj)
- True
-
- The array is not yet cached by a call to "get_data", so:
-
- >>> img.in_memory
- False
-
- After we call ``get_data`` using the default `caching` == 'fill', the
- cache contains a reference to the returned array ``data``:
-
- >>> data = img.get_data()
- >>> img.in_memory
- True
-
- We modify an element in the returned data array:
-
- >>> data[0, 0, 0, 0]
- 0
- >>> data[0, 0, 0, 0] = 99
- >>> data[0, 0, 0, 0]
- 99
-
- The next time we call 'get_data', the method returns the cached
- reference to the (modified) array:
-
- >>> data_again = img.get_data()
- >>> data_again is data
- True
- >>> data_again[0, 0, 0, 0]
- 99
-
- If you had *initially* used `caching` == 'unchanged' then the returned
- ``data`` array would have been loaded from file, but not cached, and:
-
- >>> img = nib.load(img_fname) # a proxy image again
- >>> data = img.get_data(caching='unchanged')
- >>> img.in_memory
- False
- >>> data[0, 0, 0] = 99
- >>> data_again = img.get_data(caching='unchanged')
- >>> data_again is data
- False
- >>> data_again[0, 0, 0, 0]
- 0
- """
- if caching not in ('fill', 'unchanged'):
- raise ValueError('caching value should be "fill" or "unchanged"')
- if self._data_cache is not None:
- return self._data_cache
- data = np.asanyarray(self._dataobj)
- if caching == 'fill':
- self._data_cache = data
- return data
-
- @property
- def in_memory(self):
- """ True when array data is in memory
- """
- return (isinstance(self._dataobj, np.ndarray)
- or self._data_cache is not None)
-
- def uncache(self):
- """ Delete any cached read of data from proxied data
-
- Remember there are two types of images:
-
- * *array images* where the data ``img.dataobj`` is an array
- * *proxy images* where the data ``img.dataobj`` is a proxy object
-
- If you call ``img.get_data()`` on a proxy image, the result of reading
- from the proxy gets cached inside the image object, and this cache is
- what gets returned from the next call to ``img.get_data()``. If you
- modify the returned data, as in::
-
- data = img.get_data()
- data[:] = 42
-
- then the next call to ``img.get_data()`` returns the modified array,
- whether the image is an array image or a proxy image::
-
- assert np.all(img.get_data() == 42)
-
- When you uncache an array image, this has no effect on the return of
- ``img.get_data()``, but when you uncache a proxy image, the result of
- ``img.get_data()`` returns to its original value.
- """
- self._data_cache = None
-
- @property
- def shape(self):
- return self._dataobj.shape
-
- @deprecate_with_version('get_shape method is deprecated.\n'
- 'Please use the ``img.shape`` property '
- 'instead.',
- '1.2', '3.0')
- def get_shape(self):
- """ Return shape for image
- """
- return self.shape
-
def get_data_dtype(self):
return self._header.get_data_dtype()
diff --git a/nibabel/tests/test_analyze.py b/nibabel/tests/test_analyze.py
index da89808e4f..48a40b36c1 100644
--- a/nibabel/tests/test_analyze.py
+++ b/nibabel/tests/test_analyze.py
@@ -130,6 +130,9 @@ def test_checks(self):
hdr = hdr_t.copy()
hdr['bitpix'] = 0
assert_equal(self._dxer(hdr), 'bitpix does not match datatype')
+
+ def test_pixdim_checks(self):
+ hdr_t = self.header_class()
for i in (1, 2, 3):
hdr = hdr_t.copy()
hdr['pixdim'][i] = -1
@@ -176,7 +179,10 @@ def test_log_checks(self):
assert_equal(message, 'bitpix does not match datatype; '
'setting bitpix to match datatype')
assert_raises(*raiser)
+
+ def test_pixdim_log_checks(self):
# pixdim positive
+ HC = self.header_class
hdr = HC()
hdr['pixdim'][1] = -2 # severity 35
fhdr, message, raiser = self.log_chk(hdr, 35)
@@ -232,18 +238,21 @@ def test_logger_error(self):
# Make a new logger
str_io = StringIO()
logger = logging.getLogger('test.logger')
- logger.setLevel(30) # defaultish level
logger.addHandler(logging.StreamHandler(str_io))
- # Prepare an error
- hdr['pixdim'][1] = 0 # severity 30
+ # Prepare a defect: bitpix not matching data type
+ hdr['datatype'] = 16 # float32
+ hdr['bitpix'] = 16 # severity 10
+ logger.setLevel(10)
log_cache = imageglobals.logger, imageglobals.error_level
try:
# Check log message appears in new logger
imageglobals.logger = logger
hdr.copy().check_fix()
- assert_equal(str_io.getvalue(), PIXDIM0_MSG + '\n')
+ assert_equal(str_io.getvalue(),
+ 'bitpix does not match datatype; '
+ 'setting bitpix to match datatype\n')
# Check that error_level in fact causes error to be raised
- imageglobals.error_level = 30
+ imageglobals.error_level = 10
assert_raises(HeaderDataError, hdr.copy().check_fix)
finally:
imageglobals.logger, imageglobals.error_level = log_cache
diff --git a/nibabel/tests/test_arrayproxy.py b/nibabel/tests/test_arrayproxy.py
index 86f7c7f9b5..e381f52ad9 100644
--- a/nibabel/tests/test_arrayproxy.py
+++ b/nibabel/tests/test_arrayproxy.py
@@ -17,7 +17,7 @@
import numpy as np
-from ..arrayproxy import ArrayProxy, is_proxy
+from ..arrayproxy import ArrayProxy, is_proxy, reshape_dataobj
from ..nifti1 import Nifti1Header
from numpy.testing import assert_array_equal, assert_array_almost_equal
@@ -158,6 +158,33 @@ class NP(object):
assert_false(is_proxy(NP()))
+def test_reshape_dataobj():
+ # Test function that reshapes using method if possible
+ shape = (1, 2, 3, 4)
+ hdr = FunkyHeader(shape)
+ bio = BytesIO()
+ prox = ArrayProxy(bio, hdr)
+ arr = np.arange(np.prod(shape), dtype=prox.dtype).reshape(shape)
+ bio.write(b'\x00' * prox.offset + arr.tostring(order='F'))
+ assert_array_equal(prox, arr)
+ assert_array_equal(reshape_dataobj(prox, (2, 3, 4)),
+ np.reshape(arr, (2, 3, 4)))
+ assert_equal(prox.shape, shape)
+ assert_equal(arr.shape, shape)
+ assert_array_equal(reshape_dataobj(arr, (2, 3, 4)),
+ np.reshape(arr, (2, 3, 4)))
+ assert_equal(arr.shape, shape)
+
+ class ArrGiver(object):
+
+ def __array__(self):
+ return arr
+
+ assert_array_equal(reshape_dataobj(ArrGiver(), (2, 3, 4)),
+ np.reshape(arr, (2, 3, 4)))
+ assert_equal(arr.shape, shape)
+
+
def test_get_unscaled():
# Test fetch of raw array
class FunkyHeader2(FunkyHeader):
diff --git a/nibabel/tests/test_dataobj_images.py b/nibabel/tests/test_dataobj_images.py
new file mode 100644
index 0000000000..4c40ff9f17
--- /dev/null
+++ b/nibabel/tests/test_dataobj_images.py
@@ -0,0 +1,40 @@
+""" Testing dataobj_images module
+"""
+
+import numpy as np
+
+from nibabel.filebasedimages import FileBasedHeader
+from nibabel.dataobj_images import DataobjImage
+
+from nibabel.tests.test_image_api import DataInterfaceMixin
+from nibabel.tests.test_filebasedimages import TestFBImageAPI as _TFI
+
+
+class DoNumpyImage(DataobjImage):
+ header_class = FileBasedHeader
+ valid_exts = ('.npy',)
+ files_types = (('image', '.npy'),)
+
+ @classmethod
+ def from_file_map(klass, file_map):
+ with file_map['image'].get_prepare_fileobj('rb') as fobj:
+ arr = np.load(fobj)
+ return klass(arr)
+
+ def to_file_map(self, file_map=None):
+ file_map = self.file_map if file_map is None else file_map
+ with file_map['image'].get_prepare_fileobj('wb') as fobj:
+ np.save(fobj, self.dataobj)
+
+ def get_data_dtype(self):
+ return self.dataobj.dtype
+
+ def set_data_dtype(self, dtype):
+ self._dataobj = self._dataobj.astype(dtype)
+
+
+class TestDataobjAPI(_TFI, DataInterfaceMixin):
+ """ Validation for DataobjImage instances
+ """
+ # A callable returning an image from ``image_maker(data, header)``
+ image_maker = DoNumpyImage
diff --git a/nibabel/tests/test_filebasedimages.py b/nibabel/tests/test_filebasedimages.py
new file mode 100644
index 0000000000..469c60d803
--- /dev/null
+++ b/nibabel/tests/test_filebasedimages.py
@@ -0,0 +1,109 @@
+""" Testing filebasedimages module
+"""
+
+from itertools import product
+
+import numpy as np
+
+from nibabel.filebasedimages import FileBasedHeader, FileBasedImage
+
+from nibabel.tests.test_image_api import GenericImageAPI
+
+from nose.tools import (assert_true, assert_false, assert_equal,
+ assert_not_equal)
+
+
+class FBNumpyImage(FileBasedImage):
+ header_class = FileBasedHeader
+ valid_exts = ('.npy',)
+ files_types = (('image', '.npy'),)
+
+ def __init__(self, arr, header=None, extra=None, file_map=None):
+ super(FBNumpyImage, self).__init__(header, extra, file_map)
+ self.arr = arr
+
+ @property
+ def shape(self):
+ return self.arr.shape
+
+ def get_data(self):
+ return self.arr
+
+ @classmethod
+ def from_file_map(klass, file_map):
+ with file_map['image'].get_prepare_fileobj('rb') as fobj:
+ arr = np.load(fobj)
+ return klass(arr)
+
+ def to_file_map(self, file_map=None):
+ file_map = self.file_map if file_map is None else file_map
+ with file_map['image'].get_prepare_fileobj('wb') as fobj:
+ np.save(fobj, self.arr)
+
+ def get_data_dtype(self):
+ return self.arr.dtype
+
+ def set_data_dtype(self, dtype):
+ self.arr = self.arr.astype(dtype)
+
+
+class TestFBImageAPI(GenericImageAPI):
+ """ Validation for FileBasedImage instances
+ """
+ # A callable returning an image from ``image_maker(data, header)``
+ image_maker = FBNumpyImage
+ # A callable returning a header from ``header_maker()``
+ header_maker = FileBasedHeader
+ # Example shapes for created images
+ example_shapes = ((2,), (2, 3), (2, 3, 4), (2, 3, 4, 5))
+ example_dtypes = (np.int8, np.uint16, np.int32, np.float32)
+ can_save = True
+ standard_extension = '.npy'
+
+ def make_imaker(self, arr, header=None):
+ return lambda: self.image_maker(arr, header)
+
+ def obj_params(self):
+ # Create new images
+ for shape, dtype in product(self.example_shapes, self.example_dtypes):
+ arr = np.arange(np.prod(shape), dtype=dtype).reshape(shape)
+ hdr = self.header_maker()
+ func = self.make_imaker(arr.copy(), hdr)
+ params = dict(
+ dtype=dtype,
+ data=arr,
+ shape=shape,
+ is_proxy=False)
+ yield func, params
+
+
+def test_filebased_header():
+ # Test stuff about the default FileBasedHeader
+
+ class H(FileBasedHeader):
+
+ def __init__(self, seq=None):
+ if seq is None:
+ seq = []
+ self.a_list = list(seq)
+
+ in_list = [1, 3, 2]
+ hdr = H(in_list)
+ hdr_c = hdr.copy()
+ assert_equal(hdr_c.a_list, hdr.a_list)
+ # Copy is independent of original
+ hdr_c.a_list[0] = 99
+ assert_not_equal(hdr_c.a_list, hdr.a_list)
+ # From header does a copy
+ hdr2 = H.from_header(hdr)
+ assert_true(isinstance(hdr2, H))
+ assert_equal(hdr2.a_list, hdr.a_list)
+ hdr2.a_list[0] = 42
+ assert_not_equal(hdr2.a_list, hdr.a_list)
+ # Default header input to from_heder gives new empty header
+ hdr3 = H.from_header()
+ assert_true(isinstance(hdr3, H))
+ assert_equal(hdr3.a_list, [])
+ hdr4 = H.from_header(None)
+ assert_true(isinstance(hdr4, H))
+ assert_equal(hdr4.a_list, [])
diff --git a/nibabel/tests/test_files_interface.py b/nibabel/tests/test_files_interface.py
index 765e4410f2..81fbfec0eb 100644
--- a/nibabel/tests/test_files_interface.py
+++ b/nibabel/tests/test_files_interface.py
@@ -89,7 +89,7 @@ def test_round_trip_spatialimages():
data = np.arange(24, dtype='i4').reshape((2, 3, 4))
aff = np.eye(4)
klasses = [klass for klass in all_image_classes
- if klass.rw and issubclass(klass, SpatialImage)]
+ if klass.rw and klass.makeable and issubclass(klass, SpatialImage)]
for klass in klasses:
file_map = klass.make_file_map()
for key in file_map:
diff --git a/nibabel/tests/test_image_api.py b/nibabel/tests/test_image_api.py
index a76cb2737b..c2f177ff79 100644
--- a/nibabel/tests/test_image_api.py
+++ b/nibabel/tests/test_image_api.py
@@ -47,7 +47,8 @@
from ..tmpdirs import InTemporaryDirectory
from .test_api_validators import ValidateAPI
-from .test_helpers import bytesio_round_trip, assert_data_similar
+from .test_helpers import (bytesio_round_trip, bytesio_filemap,
+ assert_data_similar)
from .test_minc1 import EXAMPLE_IMAGES as MINC1_EXAMPLE_IMAGES
from .test_minc2 import EXAMPLE_IMAGES as MINC2_EXAMPLE_IMAGES
from .test_parrec import EXAMPLE_IMAGES as PARREC_EXAMPLE_IMAGES
@@ -100,38 +101,10 @@ def obj_params(self):
"""
raise NotImplementedError
- def validate_affine(self, imaker, params):
- # Check affine API
- img = imaker()
- assert_almost_equal(img.affine, params['affine'], 6)
- assert_equal(img.affine.dtype, np.float64)
- img.affine[0, 0] = 1.5
- assert_equal(img.affine[0, 0], 1.5)
- # Read only
- assert_raises(AttributeError, setattr, img, 'affine', np.eye(4))
-
- def validate_affine_deprecated(self, imaker, params):
- # Check deprecated affine API
- img = imaker()
- with clear_and_catch_warnings() as w:
- warnings.simplefilter('always', DeprecationWarning)
- assert_almost_equal(img.get_affine(), params['affine'], 6)
- assert_equal(len(w), 1)
- assert_equal(img.get_affine().dtype, np.float64)
- aff = img.get_affine()
- aff[0, 0] = 1.5
- assert_true(aff is img.get_affine())
-
def validate_header(self, imaker, params):
# Check header API
img = imaker()
hdr = img.header # we can fetch it
- # Change shape in header, check this changes img.header
- shape = hdr.get_data_shape()
- new_shape = (shape[0] + 1,) + shape[1:]
- hdr.set_data_shape(new_shape)
- assert_true(img.header is hdr)
- assert_equal(img.header.get_data_shape(), new_shape)
# Read only
assert_raises(AttributeError, setattr, img, 'header', hdr)
@@ -144,25 +117,50 @@ def validate_header_deprecated(self, imaker, params):
assert_equal(len(w), 1)
assert_true(hdr is img.header)
- def validate_shape(self, imaker, params):
- # Validate shape
+ def validate_filenames(self, imaker, params):
+ # Validate the filename, file_map interface
+ if not self.can_save:
+ raise SkipTest
img = imaker()
- # Same as expected shape
- assert_equal(img.shape, params['shape'])
- # Same as array shape if passed
- if 'data' in params:
- assert_equal(img.shape, params['data'].shape)
- assert_equal(img.shape, img.get_data().shape)
- # Read only
- assert_raises(AttributeError, setattr, img, 'shape', np.eye(4))
+ img.set_data_dtype(np.float32) # to avoid rounding in load / save
+ # Make sure the object does not have a file_map
+ img.file_map = None
+ # The bytesio_round_trip helper tests bytesio load / save via file_map
+ rt_img = bytesio_round_trip(img)
+ assert_array_equal(img.shape, rt_img.shape)
+ assert_almost_equal(img.get_data(), rt_img.get_data())
+ # Give the image a file map
+ klass = type(img)
+ rt_img.file_map = bytesio_filemap(klass)
+ # This object can now be saved and loaded from its own file_map
+ rt_img.to_file_map()
+ rt_rt_img = klass.from_file_map(rt_img.file_map)
+ assert_almost_equal(img.get_data(), rt_rt_img.get_data())
+ # get_ / set_ filename
+ fname = 'an_image' + self.standard_extension
+ img.set_filename(fname)
+ assert_equal(img.get_filename(), fname)
+ assert_equal(img.file_map['image'].filename, fname)
+ # to_ / from_ filename
+ fname = 'another_image' + self.standard_extension
+ with InTemporaryDirectory():
+ img.to_filename(fname)
+ rt_img = img.__class__.from_filename(fname)
+ assert_array_equal(img.shape, rt_img.shape)
+ assert_almost_equal(img.get_data(), rt_img.get_data())
+ del rt_img # to allow windows to delete the directory
- def validate_shape_deprecated(self, imaker, params):
- # Check deprecated get_shape API
- with clear_and_catch_warnings() as w:
- warnings.simplefilter('always', DeprecationWarning)
- img = imaker()
- assert_equal(img.get_shape(), params['shape'])
- assert_equal(len(w), 1)
+ def validate_no_slicing(self, imaker, params):
+ img = imaker()
+ assert_raises(TypeError, img.__getitem__, 'string')
+ assert_raises(TypeError, img.__getitem__, slice(None))
+
+
+class GetSetDtypeMixin(object):
+ """ Adds dtype tests
+
+ Add this one if your image has ``get_data_dtype`` and ``set_data_dtype``.
+ """
def validate_dtype(self, imaker, params):
# data / storage dtype
@@ -182,9 +180,17 @@ def validate_dtype(self, imaker, params):
rt_img = bytesio_round_trip(img)
assert_equal(rt_img.get_data_dtype().type, np.float32)
- def validate_data(self, imaker, params):
+
+class DataInterfaceMixin(GetSetDtypeMixin):
+ """ Test dataobj interface for images with array backing
+
+ Use this mixin if your image has a ``dataobj`` property that contains an
+ array or an array-like thing.
+ """
+ def validate_data_interface(self, imaker, params):
# Check get data returns array, and caches
img = imaker()
+ assert_equal(img.shape, img.dataobj.shape)
assert_data_similar(img.dataobj, params)
if params['is_proxy']:
assert_false(isinstance(img.dataobj, np.ndarray))
@@ -243,6 +249,8 @@ def validate_data(self, imaker, params):
img.uncache()
assert_array_equal(get_data_func(), 42)
assert_true(img.in_memory)
+ # Data shape is same as image shape
+ assert_equal(img.shape, img.get_data().shape)
# dataobj is read only
fake_data = np.zeros(img.shape).astype(img.get_data_dtype())
assert_raises(AttributeError, setattr, img, 'dataobj', fake_data)
@@ -262,37 +270,80 @@ def validate_data_deprecated(self, imaker, params):
fake_data = np.zeros(img.shape).astype(img.get_data_dtype())
assert_raises(AttributeError, setattr, img, '_data', fake_data)
- def validate_filenames(self, imaker, params):
- # Validate the filename, file_map interface
- if not self.can_save:
- raise SkipTest
+ def validate_shape(self, imaker, params):
+ # Validate shape
img = imaker()
- img.set_data_dtype(np.float32) # to avoid rounding in load / save
- # The bytesio_round_trip helper tests bytesio load / save via file_map
- rt_img = bytesio_round_trip(img)
- assert_array_equal(img.shape, rt_img.shape)
- assert_almost_equal(img.get_data(), rt_img.get_data())
- # get_ / set_ filename
- fname = 'an_image' + self.standard_extension
- img.set_filename(fname)
- assert_equal(img.get_filename(), fname)
- assert_equal(img.file_map['image'].filename, fname)
- # to_ / from_ filename
- fname = 'another_image' + self.standard_extension
- with InTemporaryDirectory():
- img.to_filename(fname)
- rt_img = img.__class__.from_filename(fname)
- assert_array_equal(img.shape, rt_img.shape)
- assert_almost_equal(img.get_data(), rt_img.get_data())
- del rt_img # to allow windows to delete the directory
+ # Same as expected shape
+ assert_equal(img.shape, params['shape'])
+ # Same as array shape if passed
+ if 'data' in params:
+ assert_equal(img.shape, params['data'].shape)
+ # Read only
+ assert_raises(AttributeError, setattr, img, 'shape', np.eye(4))
- def validate_no_slicing(self, imaker, params):
+ def validate_shape_deprecated(self, imaker, params):
+ # Check deprecated get_shape API
+ with clear_and_catch_warnings() as w:
+ warnings.simplefilter('always', DeprecationWarning)
+ img = imaker()
+ assert_equal(img.get_shape(), params['shape'])
+ assert_equal(len(w), 1)
+
+
+
+class HeaderShapeMixin(object):
+ """ Tests that header shape can be set and got
+
+ Add this one of your header supports ``get_data_shape`` and
+ ``set_data_shape``.
+ """
+
+ def validate_header_shape(self, imaker, params):
+ # Change shape in header, check this changes img.header
img = imaker()
- assert_raises(TypeError, img.__getitem__, 'string')
- assert_raises(TypeError, img.__getitem__, slice(None))
+ hdr = img.header
+ shape = hdr.get_data_shape()
+ new_shape = (shape[0] + 1,) + shape[1:]
+ hdr.set_data_shape(new_shape)
+ assert_true(img.header is hdr)
+ assert_equal(img.header.get_data_shape(), new_shape)
+
+class AffineMixin(object):
+ """ Adds test of affine property, method
+
+ Add this one if your image has an ``affine`` property. If so, it should
+ (for now) also have a ``get_affine`` method returning the same result.
+ """
-class LoadImageAPI(GenericImageAPI):
+ def validate_affine(self, imaker, params):
+ # Check affine API
+ img = imaker()
+ assert_almost_equal(img.affine, params['affine'], 6)
+ assert_equal(img.affine.dtype, np.float64)
+ img.affine[0, 0] = 1.5
+ assert_equal(img.affine[0, 0], 1.5)
+ # Read only
+ assert_raises(AttributeError, setattr, img, 'affine', np.eye(4))
+
+ def validate_affine_deprecated(self, imaker, params):
+ # Check deprecated affine API
+ img = imaker()
+ with clear_and_catch_warnings() as w:
+ warnings.simplefilter('always', DeprecationWarning)
+ assert_almost_equal(img.get_affine(), params['affine'], 6)
+ assert_equal(len(w), 1)
+ assert_equal(img.get_affine().dtype, np.float64)
+ aff = img.get_affine()
+ aff[0, 0] = 1.5
+ assert_true(aff is img.get_affine())
+
+
+class LoadImageAPI(GenericImageAPI,
+ DataInterfaceMixin,
+ AffineMixin,
+ GetSetDtypeMixin,
+ HeaderShapeMixin):
# Callable returning an image from a filename
loader = None
# Sequence of dictionaries, where dictionaries have keys
@@ -324,9 +375,6 @@ class MakeImageAPI(LoadImageAPI):
# Example shapes for created images
example_shapes = ((2,), (2, 3), (2, 3, 4), (2, 3, 4, 5))
- def img_from_arr_aff(self, arr, aff, header=None):
- return self.image_maker(arr, aff, header)
-
def obj_params(self):
# Return any obj_params from superclass
for func, params in super(MakeImageAPI, self).obj_params():
diff --git a/nibabel/tests/test_nifti1.py b/nibabel/tests/test_nifti1.py
index b6ffdf094e..b0f801b626 100644
--- a/nibabel/tests/test_nifti1.py
+++ b/nibabel/tests/test_nifti1.py
@@ -186,17 +186,25 @@ def test_slope_inter(self):
assert_array_equal([hdr['scl_slope'], hdr['scl_inter']],
raw_values)
- def test_nifti_qsform_checks(self):
- # qfac, qform, sform checks
- # qfac
- HC = self.header_class
- hdr = HC()
+ def test_nifti_qfac_checks(self):
+ # Test qfac is 1 or -1
+ hdr = self.header_class()
+ # 1, -1 OK
+ hdr['pixdim'][0] = 1
+ self.log_chk(hdr, 0)
+ hdr['pixdim'][0] = -1
+ self.log_chk(hdr, 0)
+ # 0 is not
hdr['pixdim'][0] = 0
fhdr, message, raiser = self.log_chk(hdr, 20)
assert_equal(fhdr['pixdim'][0], 1)
assert_equal(message,
'pixdim[0] (qfac) should be 1 '
'(default) or -1; setting qfac to 1')
+
+ def test_nifti_qsform_checks(self):
+ # qform, sform checks
+ HC = self.header_class
# qform, sform
hdr = HC()
hdr['qform_code'] = -1
diff --git a/nibabel/tests/test_nifti2.py b/nibabel/tests/test_nifti2.py
index 91f40773c7..8c7afd9ea4 100644
--- a/nibabel/tests/test_nifti2.py
+++ b/nibabel/tests/test_nifti2.py
@@ -78,11 +78,6 @@ class TestNifti2Image(TestNifti1Image):
image_class = Nifti2Image
-class TestNifti2Image(TestNifti1Image):
- # Run analyze-flavor spatialimage tests
- image_class = Nifti2Image
-
-
class TestNifti2Pair(TestNifti1Pair):
# Run analyze-flavor spatialimage tests
image_class = Nifti2Pair
diff --git a/nibabel/tests/test_proxy_api.py b/nibabel/tests/test_proxy_api.py
index b6fd1a004c..fce79dcc60 100644
--- a/nibabel/tests/test_proxy_api.py
+++ b/nibabel/tests/test_proxy_api.py
@@ -31,6 +31,7 @@
from os.path import join as pjoin
import warnings
+from itertools import product
from io import BytesIO
import numpy as np
@@ -58,7 +59,7 @@
from numpy.testing import (assert_almost_equal, assert_array_equal)
-from ..testing import data_path as DATA_PATH
+from ..testing import data_path as DATA_PATH, assert_dt_equal
from ..tmpdirs import InTemporaryDirectory
@@ -121,7 +122,7 @@ def validate_asarray(self, pmaker, params):
prox, fio, hdr = pmaker()
out = np.asarray(prox)
assert_array_equal(out, params['arr_out'])
- assert_equal(out.dtype.type, params['dtype_out'])
+ assert_dt_equal(out.dtype, params['dtype_out'])
# Shape matches expected shape
assert_equal(out.shape, params['shape'])
@@ -194,72 +195,81 @@ def obj_params(self):
offsets = (self.header_class().get_data_offset(),)
else:
offsets = (0, 16)
- for shape in self.shapes:
+ slopes = (1., 2.) if self.has_slope else (1.,)
+ inters = (0., 10.) if self.has_inter else (0.,)
+ dtypes = (np.uint8, np.int16, np.float32)
+ for shape, dtype, offset, slope, inter in product(self.shapes,
+ dtypes,
+ offsets,
+ slopes,
+ inters):
n_els = np.prod(shape)
- for dtype in (np.uint8, np.int16, np.float32):
- dt = np.dtype(dtype).newbyteorder(self.data_endian)
- arr = np.arange(n_els, dtype=dt).reshape(shape)
- data = arr.tostring(order=self.array_order)
- for offset in offsets:
- slopes = (1., 2.) if self.has_slope else (1.,)
- inters = (0., 10.) if self.has_inter else (0.,)
- for slope in slopes:
- for inter in inters:
- hdr = self.header_class()
- hdr.set_data_dtype(dtype)
- hdr.set_data_shape(shape)
- if self.settable_offset:
- hdr.set_data_offset(offset)
- if (slope, inter) == (1, 0): # No scaling applied
- # dtype from array
- dtype_out = dtype
- else: # scaling or offset applied
- # out dtype predictable from apply_read_scaling
- # and datatypes of slope, inter
- hdr.set_slope_inter(slope, inter)
- s, i = hdr.get_slope_inter()
- tmp = apply_read_scaling(arr,
- 1. if s is None else s,
- 0. if i is None else i)
- dtype_out = tmp.dtype.type
-
- def sio_func():
- fio = BytesIO()
- fio.truncate(0)
- fio.seek(offset)
- fio.write(data)
- # Use a copy of the header to avoid changing
- # global header in test functions.
- new_hdr = hdr.copy()
- return (self.proxy_class(fio, new_hdr),
- fio,
- new_hdr)
- params = dict(
- dtype=dtype,
- dtype_out=dtype_out,
- arr=arr.copy(),
- arr_out=arr * slope + inter,
- shape=shape,
- offset=offset,
- slope=slope,
- inter=inter)
- yield sio_func, params
- # Same with filenames
- with InTemporaryDirectory():
- fname = 'data.bin'
-
- def fname_func():
- with open(fname, 'wb') as fio:
- fio.seek(offset)
- fio.write(data)
- # Use a copy of the header to avoid changing
- # global header in test functions.
- new_hdr = hdr.copy()
- return (self.proxy_class(fname, new_hdr),
- fname,
- new_hdr)
- params = params.copy()
- yield fname_func, params
+ dtype = np.dtype(dtype).newbyteorder(self.data_endian)
+ arr = np.arange(n_els, dtype=dtype).reshape(shape)
+ data = arr.tostring(order=self.array_order)
+ hdr = self.header_class()
+ hdr.set_data_dtype(dtype)
+ hdr.set_data_shape(shape)
+ if self.settable_offset:
+ hdr.set_data_offset(offset)
+ if (slope, inter) == (1, 0): # No scaling applied
+ # dtype from array
+ dtype_out = dtype
+ else: # scaling or offset applied
+ # out dtype predictable from apply_read_scaling
+ # and datatypes of slope, inter
+ hdr.set_slope_inter(slope, inter)
+ s, i = hdr.get_slope_inter()
+ tmp = apply_read_scaling(arr,
+ 1. if s is None else s,
+ 0. if i is None else i)
+ dtype_out = tmp.dtype.type
+
+ def sio_func():
+ fio = BytesIO()
+ fio.truncate(0)
+ fio.seek(offset)
+ fio.write(data)
+ # Use a copy of the header to avoid changing
+ # global header in test functions.
+ new_hdr = hdr.copy()
+ return (self.proxy_class(fio, new_hdr),
+ fio,
+ new_hdr)
+
+ params = dict(
+ dtype=dtype,
+ dtype_out=dtype_out,
+ arr=arr.copy(),
+ arr_out=arr * slope + inter,
+ shape=shape,
+ offset=offset,
+ slope=slope,
+ inter=inter)
+ yield sio_func, params
+ # Same with filenames
+ with InTemporaryDirectory():
+ fname = 'data.bin'
+
+ def fname_func():
+ with open(fname, 'wb') as fio:
+ fio.seek(offset)
+ fio.write(data)
+ # Use a copy of the header to avoid changing
+ # global header in test functions.
+ new_hdr = hdr.copy()
+ return (self.proxy_class(fname, new_hdr),
+ fname,
+ new_hdr)
+ params = params.copy()
+ yield fname_func, params
+
+ def validate_dtype(self, pmaker, params):
+ # Read-only dtype attribute
+ prox, fio, hdr = pmaker()
+ assert_dt_equal(prox.dtype, params['dtype'])
+ assert_raises(AttributeError,
+ prox.__setattr__, 'dtype', np.dtype(prox.dtype))
def validate_slope_inter_offset(self, pmaker, params):
# Check slope, inter, offset
diff --git a/nibabel/tests/test_wrapstruct.py b/nibabel/tests/test_wrapstruct.py
index 5ea346dab9..fa73e3b510 100644
--- a/nibabel/tests/test_wrapstruct.py
+++ b/nibabel/tests/test_wrapstruct.py
@@ -43,6 +43,69 @@
INTEGER_TYPES = np.sctypes['int'] + np.sctypes['uint']
+def log_chk(hdr, level):
+ """ Utility method to check header checking / logging
+
+ Asserts that log entry appears during ``hdr.check_fix`` for logging level
+ below `level`.
+
+ Parameters
+ ----------
+ hdr : instance
+ Instance of header class, with methods ``copy`` and check_fix``. The
+ header has some minor error (defect) which can be detected with
+ ``check_fix``.
+ level : int
+ Level (severity) of defect present in `hdr`. When logging threshold is
+ at or below `level`, a message appears in the default log (we test that
+ happens).
+
+ Returns
+ -------
+ hdrc : instance
+ Header, with defect corrected.
+ message : str
+ Message generated in log when defect was detected.
+ raiser : tuple
+ Tuple of error type, callable, arguments that will raise an exception
+ when then defect is detected. Can be empty. Check with ``if raiser !=
+ (): assert_raises(*raiser)``.
+ """
+ str_io = StringIO()
+ logger = logging.getLogger('test.logger')
+ handler = logging.StreamHandler(str_io)
+ logger.addHandler(handler)
+ str_io.truncate(0)
+ hdrc = hdr.copy()
+ if level == 0: # Should never log or raise error
+ logger.setLevel(0)
+ hdrc.check_fix(logger=logger, error_level=0)
+ assert_equal(str_io.getvalue(), '')
+ logger.removeHandler(handler)
+ return hdrc, '', ()
+ # Non zero defect level, test above and below threshold.
+ # Set error level above defect level to prevent exception when defect
+ # detected.
+ e_lev = level + 1
+ # Logging level above threshold, no log.
+ logger.setLevel(level + 1)
+ hdrc.check_fix(logger=logger, error_level=e_lev)
+ assert_equal(str_io.getvalue(), '')
+ # Logging level below threshold, log appears, store logged message
+ logger.setLevel(level - 1)
+ hdrc = hdr.copy()
+ hdrc.check_fix(logger=logger, error_level=e_lev)
+ assert_true(str_io.getvalue() != '')
+ message = str_io.getvalue().strip()
+ logger.removeHandler(handler)
+ # When error level == level, check_fix should raise an error
+ hdrc2 = hdr.copy()
+ raiser = (HeaderDataError,
+ hdrc2.check_fix,
+ logger,
+ level)
+ return hdrc, message, raiser
+
class _TestWrapStructBase(TestCase):
''' Class implements base tests for binary headers
@@ -161,40 +224,7 @@ def test_structarr(self):
assert_raises(AttributeError, hdr.__setattr__, 'structarr', 0)
def log_chk(self, hdr, level):
- # utility method to check header checking / logging
- # If level == 0, this header should always be OK
- str_io = StringIO()
- logger = logging.getLogger('test.logger')
- handler = logging.StreamHandler(str_io)
- logger.addHandler(handler)
- str_io.truncate(0)
- hdrc = hdr.copy()
- if level == 0: # Should never log or raise error
- logger.setLevel(0)
- hdrc.check_fix(logger=logger, error_level=0)
- assert_equal(str_io.getvalue(), '')
- logger.removeHandler(handler)
- return hdrc, '', ()
- # Non zero level, test above and below threshold
- # Logging level above threshold, no log
- logger.setLevel(level + 1)
- e_lev = level + 1
- hdrc.check_fix(logger=logger, error_level=e_lev)
- assert_equal(str_io.getvalue(), '')
- # Logging level below threshold, log appears
- logger.setLevel(level + 1)
- logger.setLevel(level - 1)
- hdrc = hdr.copy()
- hdrc.check_fix(logger=logger, error_level=e_lev)
- assert_true(str_io.getvalue() != '')
- message = str_io.getvalue().strip()
- logger.removeHandler(handler)
- hdrc2 = hdr.copy()
- raiser = (HeaderDataError,
- hdrc2.check_fix,
- logger,
- level)
- return hdrc, message, raiser
+ return log_chk(hdr, level)
def assert_no_log_err(self, hdr):
""" Assert that no logging or errors result from this `hdr`
diff --git a/nibabel/wrapstruct.py b/nibabel/wrapstruct.py
index 8d940ef68d..990a7e7f4e 100644
--- a/nibabel/wrapstruct.py
+++ b/nibabel/wrapstruct.py
@@ -346,7 +346,15 @@ def get(self, k, d=None):
return (k in self.keys()) and self._structarr[k] or d
def check_fix(self, logger=None, error_level=None):
- ''' Check structured data with checks '''
+ ''' Check structured data with checks
+
+ Parameters
+ ----------
+ logger : None or logging.Logger
+ error_level : None or int
+ Level of error severity at which to raise error. Any error of
+ severity >= `error_level` will cause an exception.
+ '''
if logger is None:
logger = imageglobals.logger
if error_level is None:
diff --git a/nibabel/xmlutils.py b/nibabel/xmlutils.py
index 40e2162907..08794a7550 100644
--- a/nibabel/xmlutils.py
+++ b/nibabel/xmlutils.py
@@ -27,7 +27,8 @@ def _to_xml_element(self):
def to_xml(self, enc='utf-8'):
""" Output should be an xml string with the given encoding.
(default: utf-8)"""
- return tostring(self._to_xml_element(), enc)
+ ele = self._to_xml_element()
+ return '' if ele is None else tostring(ele, enc)
class XmlBasedHeader(FileBasedHeader, XmlSerializable):
@@ -47,7 +48,7 @@ class XmlParser(object):
'EndElementHandler',
'CharacterDataHandler']
- def __init__(self, encoding=None, buffer_size=35000000, verbose=0):
+ def __init__(self, encoding='utf-8', buffer_size=35000000, verbose=0):
"""
Parameters
----------
diff --git a/setup.py b/setup.py
index 4d3c088fb4..a0f98c4395 100755
--- a/setup.py
+++ b/setup.py
@@ -84,6 +84,8 @@ def main(**extra_args):
'nibabel.externals.tests',
'nibabel.gifti',
'nibabel.gifti.tests',
+ 'nibabel.cifti2',
+ 'nibabel.cifti2.tests',
'nibabel.nicom',
'nibabel.freesurfer',
'nibabel.freesurfer.tests',