1010Class for reading AFNI BRIK/HEAD datasets
1111
1212See https://afni.nimh.nih.gov/pub/dist/doc/program_help/README.attributes.html
13- for more information on required information to have a valid BRIK/HEAD dataset.
13+ for information on what is required to have a valid BRIK/HEAD dataset.
14+
15+ Some notes on the AFNI BRIK/HEAD format:
16+
17+ * In the AFNI HEAD file, the first two values of the attribute DATASET_RANK
18+ determine the shape of the data array stored in the corresponding BRIK file.
19+ The first value, DATASET_RANK[0], must be set to 3 denoting a 3D image. The
20+ second value, DATASET_RANK[1], determines how many "sub-bricks" (in AFNI
21+ parlance) / volumes there are along the fourth (traditionally, but not
22+ exclusively) time axis. Thus, DATASET_RANK[1] will (at least as far as I (RM)
23+ am aware) always be >= 1. This permits sub-brick indexing common in AFNI
24+ programs (e.g., example4d+orig'[0]').
1425"""
1526from __future__ import print_function , division
1627
1930import re
2031
2132import numpy as np
33+ from six import string_types
2234
2335from .arrayproxy import ArrayProxy
24- from .fileholders import FileHolder
25- from .filename_parser import (types_filenames , TypesFilenamesError )
2636from .fileslice import strided_scalar
2737from .keywordonly import kw_only_meth
2838from .spatialimages import (
6474
6575class AFNIImageError (ImageDataError ):
6676 """Error when reading AFNI BRIK files"""
67- pass
6877
6978
7079class AFNIHeaderError (HeaderDataError ):
7180 """Error when reading AFNI HEAD file"""
72- pass
7381
7482
7583DATA_OFFSET = 0
@@ -107,7 +115,6 @@ def _unpack_var(var):
107115
108116 err_msg = ('Please check HEAD file to ensure it is AFNI compliant. '
109117 'Offending attribute:\n %s' % var )
110-
111118 atype , aname = TYPE_RE .findall (var ), NAME_RE .findall (var )
112119 if len (atype ) != 1 :
113120 raise AFNIHeaderError ('Invalid attribute type entry in HEAD file. '
@@ -116,22 +123,20 @@ def _unpack_var(var):
116123 raise AFNIHeaderError ('Invalid attribute name entry in HEAD file. '
117124 '%s' % err_msg )
118125 atype = _attr_dic .get (atype [0 ], str )
119- attr = ' ' .join (var .strip ().split ( ' \n ' )[3 :])
126+ attr = ' ' .join (var .strip ().splitlines ( )[3 :])
120127 if atype is not str :
121128 try :
122129 attr = [atype (f ) for f in attr .split ()]
123130 except ValueError :
124131 raise AFNIHeaderError ('Failed to read variable from HEAD file due '
125132 'to improper type casting. %s' % err_msg )
126- if len (attr ) == 1 :
127- attr = attr [0 ]
128133 else :
129134 # AFNI string attributes will always start with open single quote and
130135 # end with a tilde (NUL). These attributes CANNOT contain tildes (so
131136 # stripping is safe), but can contain single quotes (so we replace)
132137 attr = attr .replace ('\' ' , '' , 1 ).rstrip ('~' )
133138
134- return aname [0 ], attr
139+ return aname [0 ], attr [ 0 ] if len ( attr ) == 1 else attr
135140
136141
137142def _get_datatype (info ):
@@ -158,7 +163,6 @@ def _get_datatype(info):
158163 bt = _dtype_dict .get (bt , None )
159164 if bt is None :
160165 raise AFNIImageError ('Can\' t deduce image data type.' )
161-
162166 return np .dtype (bo + bt )
163167
164168
@@ -186,20 +190,23 @@ def parse_AFNI_header(fobj):
186190 >>> print(info['BRICK_TYPES'])
187191 [1, 1, 1]
188192 """
189- from six import string_types
190-
191193 # edge case for being fed a filename instead of a file object
192194 if isinstance (fobj , string_types ):
193195 with open (fobj , 'rt' ) as src :
194196 return parse_AFNI_header (src )
195197 # unpack variables in HEAD file
196198 head = fobj .read ().split ('\n \n ' )
197199 info = {key : value for key , value in map (_unpack_var , head )}
198-
199200 return info
200201
201202
202203class AFNIArrayProxy (ArrayProxy ):
204+ """
205+ Attributes
206+ ----------
207+ scaling : np.ndarray
208+ Scaling factor (one factor per volume/subbrick) for data. Default: None
209+ """
203210 @kw_only_meth (2 )
204211 def __init__ (self , file_like , header , mmap = True , keep_file_open = None ):
205212 """
@@ -210,7 +217,7 @@ def __init__(self, file_like, header, mmap=True, keep_file_open=None):
210217 file_like : file-like object
211218 File-like object or filename. If file-like object, should implement
212219 at least ``read`` and ``seek``.
213- header : AFNIHeader object
220+ header : `` AFNIHeader`` object
214221 mmap : {True, False, 'c', 'r'}, optional, keyword only
215222 `mmap` controls the use of numpy memory mapping for reading data.
216223 If False, do not try numpy ``memmap`` for data array. If one of
@@ -244,11 +251,8 @@ def scaling(self):
244251
245252 def __array__ (self ):
246253 raw_data = self .get_unscaled ()
247- # apply volume specific scaling (may change datatype!)
248- if self ._scaling is not None :
249- return raw_data * self ._scaling
250-
251- return raw_data
254+ # datatype may change if applying self._scaling
255+ return raw_data if self .scaling is None else raw_data * self .scaling
252256
253257 def __getitem__ (self , slicer ):
254258 raw_data = super (AFNIArrayProxy , self ).__getitem__ (slicer )
@@ -258,7 +262,6 @@ def __getitem__(self, slicer):
258262 fake_data = strided_scalar (self ._shape )
259263 _ , scaling = np .broadcast_arrays (fake_data , scaling )
260264 raw_data = raw_data * scaling [slicer ]
261-
262265 return raw_data
263266
264267
@@ -320,15 +323,14 @@ def _calc_data_shape(self):
320323 dset_rank = self .info ['DATASET_RANK' ]
321324 shape = tuple (self .info ['DATASET_DIMENSIONS' ][:dset_rank [0 ]])
322325 n_vols = dset_rank [1 ]
323-
324326 return shape + (n_vols ,)
325327
326328 def _calc_zooms (self ):
327329 """
328330 Get image zooms from header data
329331
330332 Spatial axes are first three indices, time axis is last index. If
331- dataset is not a time series the last index will be zero.
333+ dataset is not a time series the last value will be zero.
332334
333335 Returns
334336 -------
@@ -338,7 +340,6 @@ def _calc_zooms(self):
338340 t_step = self .info .get ('TAXIS_FLOATS' , (0 , 0 ,))
339341 if len (t_step ) > 0 :
340342 t_step = (t_step [1 ],)
341-
342343 return xyz_step + t_step
343344
344345 def get_space (self ):
@@ -352,7 +353,6 @@ def get_space(self):
352353 """
353354 listed_space = self .info .get ('TEMPLATE_SPACE' , 0 )
354355 space = space_codes .space [listed_space ]
355-
356356 return space
357357
358358 def get_affine (self ):
@@ -374,7 +374,6 @@ def get_affine(self):
374374 affine = np .asarray (self .info ['IJK_TO_DICOM_REAL' ]).reshape (3 , 4 )
375375 affine = np .row_stack ((affine * [[- 1 ], [- 1 ], [1 ]],
376376 [0 , 0 , 0 , 1 ]))
377-
378377 return affine
379378
380379 def get_data_scaling (self ):
@@ -394,11 +393,15 @@ def get_data_scaling(self):
394393 scale = np .ones (self .info ['DATASET_RANK' ][1 ])
395394 floatfacs = np .atleast_1d (floatfacs )
396395 scale [floatfacs .nonzero ()] = floatfacs [floatfacs .nonzero ()]
397-
398396 return scale
399397
400398 def get_slope_inter (self ):
401- """Use `self.get_data_scaling()` instead"""
399+ """
400+ Use `self.get_data_scaling()` instead
401+
402+ Holdover because ``AFNIArrayProxy`` (inheriting from ``ArrayProxy``)
403+ requires this functionality so as to not error.
404+ """
402405 return None , None
403406
404407 def get_data_offset (self ):
@@ -423,7 +426,6 @@ def get_volume_labels(self):
423426 labels = self .info .get ('BRICK_LABS' , None )
424427 if labels is not None :
425428 labels = labels .split ('~' )
426-
427429 return labels
428430
429431
@@ -435,7 +437,7 @@ class AFNIImage(SpatialImage):
435437
436438 Examples
437439 --------
438- >>> brik = load(os.path.join(datadir, 'example4d+orig.BRIK.gz'))
440+ >>> brik = nib. load(os.path.join(datadir, 'example4d+orig.BRIK.gz'))
439441 >>> brik.shape
440442 (33, 41, 25, 3)
441443 >>> brik.affine
@@ -451,7 +453,7 @@ class AFNIImage(SpatialImage):
451453 header_class = AFNIHeader
452454 valid_exts = ('.brik' , '.head' )
453455 files_types = (('image' , '.brik' ), ('header' , '.head' ))
454- _compressed_suffixes = ('.gz' , '.bz2' )
456+ _compressed_suffixes = ('.gz' , '.bz2' , '.Z' )
455457 makeable = False
456458 rw = False
457459 ImageArrayProxy = AFNIArrayProxy
@@ -510,7 +512,13 @@ def filespec_to_file_map(klass, filespec):
510512 """
511513 Make `file_map` from filename `filespec`
512514
513- Deals with idiosyncracies of AFNI BRIK / HEAD formats
515+ AFNI BRIK files can be compressed, but HEAD files cannot - see
516+ afni.nimh.nih.gov/pub/dist/doc/program_help/README.compression.html.
517+ Thus, if you have AFNI files my_image.HEAD and my_image.BRIK.gz and you
518+ want to load the AFNI BRIK / HEAD pair, you can specify:
519+ * The HEAD filename - e.g., my_image.HEAD
520+ * The BRIK filename w/o compressed extension - e.g., my_image.BRIK
521+ * The full BRIK filename - e.g., my_image.BRIK.gz
514522
515523 Parameters
516524 ----------
@@ -529,18 +537,10 @@ def filespec_to_file_map(klass, filespec):
529537 If `filespec` is not recognizable as being a filename for this
530538 image type.
531539 """
532- # copied from filebasedimages.py
533- try :
534- filenames = types_filenames (
535- filespec , klass .files_types ,
536- trailing_suffixes = klass ._compressed_suffixes )
537- except TypesFilenamesError :
538- raise ImageFileError (
539- 'Filespec "{0}" does not look right for class {1}' .format (
540- filespec , klass ))
541- file_map = {}
540+ file_map = super (AFNIImage , klass ).filespec_to_file_map (filespec )
542541 # check for AFNI-specific BRIK/HEAD compression idiosyncracies
543- for key , fname in filenames .items ():
542+ for key , fholder in file_map .items ():
543+ fname = fholder .filename
544544 if key == 'header' and not os .path .exists (fname ):
545545 for ext in klass ._compressed_suffixes :
546546 fname = fname [:- len (ext )] if fname .endswith (ext ) else fname
@@ -549,7 +549,7 @@ def filespec_to_file_map(klass, filespec):
549549 if os .path .exists (fname + ext ):
550550 fname += ext
551551 break
552- file_map [key ] = FileHolder ( filename = fname )
552+ file_map [key ]. filename = fname
553553 return file_map
554554
555555 load = from_filename
0 commit comments