66# copyright and license terms.
77#
88### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
9-
9+ """ Class for reading AFNI BRIK/HEAD datasets
10+
11+ See https://afni.nimh.nih.gov/pub/dist/doc/program_help/README.attributes.html
12+ for more information on required information to have a valid BRIK/HEAD dataset.
13+
14+ Examples
15+ --------
16+ .. testsetup::
17+ # change directory to provide relative paths for doctests
18+ >>> filepath = os.path.dirname(os.path.realpath( __file__ ))
19+ >>> datadir = os.path.realpath(os.path.join(filepath, 'tests/data'))
20+ >>> os.chdir(datadir)
21+ """
1022from __future__ import print_function , division
1123
1224from copy import deepcopy
13- import os . path as op
25+ import os
1426import re
1527
1628import numpy as np
5668
5769
5870class AFNIError (Exception ):
59- """ Error when reading AFNI files
60- """
71+ """Error when reading AFNI files"""
6172
6273
6374DATA_OFFSET = 0
@@ -66,17 +77,29 @@ class AFNIError(Exception):
6677
6778
6879def _unpack_var (var ):
69- """ Parses key, value pair from `var`
80+ """
81+ Parses key : value pair from `var`
7082
7183 Parameters
7284 ----------
7385 var : str
74- Example: 'type = integer-attribute \n name = BRICK_TYPES \n count = 1 \n 1 \n '
86+ Entry from HEAD file
7587
7688 Returns
7789 -------
7890 (key, value)
79- Example: ('BRICK_TYPES', [1])
91+ Parsed entry from HEAD file
92+
93+ Examples
94+ --------
95+ >>> var = "type = integer-attribute\\ nname = BRICK_TYPES\\ ncount = 1\\ n1\\ n"
96+ >>> name, attr = _unpack_var(var)
97+ >>> print(name, attr)
98+ BRICK_TYPES 1
99+ >>> var = "type = string-attribute\\ nname = TEMPLATE_SPACE\\ ncount = 5\\ n'ORIG~"
100+ >>> name, attr = _unpack_var(var)
101+ >>> print(name, attr)
102+ TEMPLATE_SPACE ORIG
80103 """
81104 # data type and key
82105 atype = TYPE_RE .findall (var )[0 ]
@@ -94,12 +117,23 @@ def _unpack_var(var):
94117
95118
96119def _get_datatype (info ):
97- """ Gets datatype from `info` header information
120+ """
121+ Gets datatype of BRIK file associated with HEAD file yielding `info`
122+
123+ Parameters
124+ ----------
125+ info : dict
126+ As obtained by `parse_AFNI_header()`
127+
128+ Returns
129+ -------
130+ np.dtype
131+ Datatype of BRIK file associated with HEAD
98132 """
99133 bo = info ['BYTEORDER_STRING' ]
100134 bt = info ['BRICK_TYPES' ]
101135 if isinstance (bt , list ):
102- if len ( np .unique (bt )) > 1 :
136+ if np .unique (bt ). size > 1 :
103137 raise AFNIError ('Can\' t load dataset with multiple data types.' )
104138 else :
105139 bt = bt [0 ]
@@ -112,7 +146,8 @@ def _get_datatype(info):
112146
113147
114148def parse_AFNI_header (fobj ):
115- """ Parses HEAD file for relevant information
149+ """
150+ Parses `fobj` to extract information from HEAD file
116151
117152 Parameters
118153 ----------
@@ -121,19 +156,33 @@ def parse_AFNI_header(fobj):
121156
122157 Returns
123158 -------
124- all_info : dict
125- Contains all the information from the HEAD file
159+ info : dict
160+ Dictionary containing AFNI-style key:value pairs from HEAD file
161+
162+ Examples
163+ --------
164+ >>> info = parse_AFNI_header('example4d+orig.HEAD')
165+ >>> print(info['BYTEORDER_STRING'])
166+ LSB_FIRST
167+ >>> print(info['BRICK_TYPES'])
168+ [1, 1, 1]
126169 """
170+ # edge case for being fed a filename instead of a file object
171+ if isinstance (fobj , str ):
172+ with open (fobj , 'r' ) as src :
173+ return parse_AFNI_header (src )
174+ # unpack variables in HEAD file
127175 head = fobj .read ().split ('\n \n ' )
128- all_info = {key : value for key , value in map (_unpack_var , head )}
176+ info = {key : value for key , value in map (_unpack_var , head )}
129177
130- return all_info
178+ return info
131179
132180
133181class AFNIArrayProxy (ArrayProxy ):
134182 @kw_only_meth (2 )
135183 def __init__ (self , file_like , header , mmap = True , keep_file_open = None ):
136- """ Initialize AFNI array proxy
184+ """
185+ Initialize AFNI array proxy
137186
138187 Parameters
139188 ----------
@@ -192,15 +241,23 @@ def __getitem__(self, slicer):
192241
193242
194243class AFNIHeader (SpatialHeader ):
195- """ Class for AFNI header
196- """
244+ """Class for AFNI header"""
197245 def __init__ (self , info ):
198246 """
199247 Parameters
200248 ----------
201249 info : dict
202- Information from AFNI HEAD file (as obtained by
203- `parse_AFNI_header()`)
250+ Information from HEAD file as obtained by `parse_AFNI_header()`
251+
252+ Examples
253+ --------
254+ >>> header = AFNIHeader(parse_AFNI_header('example4d+orig.HEAD'))
255+ >>> header.get_data_dtype()
256+ dtype('int16')
257+ >>> header.get_zooms()
258+ (3.0, 3.0, 3.0, 3.0)
259+ >>> header.get_data_shape()
260+ (33, 41, 25, 3)
204261 """
205262 self .info = info
206263 dt = _get_datatype (self .info )
@@ -225,7 +282,7 @@ def copy(self):
225282 return AFNIHeader (deepcopy (self .info ))
226283
227284 def _calc_data_shape (self ):
228- """ Calculate the output shape of the image data
285+ """Calculate the output shape of the image data
229286
230287 Returns length 3 tuple for 3D image, length 4 tuple for 4D.
231288
@@ -256,21 +313,21 @@ def _calc_zooms(self):
256313 return xyz_step + t_step
257314
258315 def get_orient (self ):
259- """ Returns orientation of data
316+ """Returns orientation of data
260317
261- Three letter string of {('L','R'),('P','A'),('I','S')} specifying
318+ Three letter string of {('L','R'), ('P','A'), ('I','S')} specifying
262319 data orientation
263320
264321 Returns
265322 -------
266323 orient : str
267324 """
268- orient = [_orient_dict [f ][ 0 ] for f in self .info ['ORIENT_SPECIFIC' ]]
325+ orient = [_orient_dict [f ] for f in self .info ['ORIENT_SPECIFIC' ]]
269326
270327 return '' .join (orient )
271328
272329 def get_space (self ):
273- """ Returns space of dataset
330+ """Returns space of dataset
274331
275332 Returns
276333 -------
@@ -283,8 +340,17 @@ def get_space(self):
283340
284341 def get_affine (self ):
285342 """ Returns affine of dataset
343+
344+ Examples
345+ --------
346+ >>> header = AFNIHeader(parse_AFNI_header('example4d+orig.HEAD'))
347+ >>> header.get_affine()
348+ array([[ -3. , -0. , -0. , 49.5 ],
349+ [ -0. , -3. , -0. , 82.312 ],
350+ [ 0. , 0. , 3. , -52.3511],
351+ [ 0. , 0. , 0. , 1. ]])
286352 """
287- # AFNI default is RAI/DICOM order (i.e., RAI are - axis)
353+ # AFNI default is RAI- /DICOM order (i.e., RAI are - axis)
288354 # need to flip RA sign to align with nibabel RAS+ system
289355 affine = np .asarray (self .info ['IJK_TO_DICOM_REAL' ]).reshape (3 , 4 )
290356 affine = np .row_stack ((affine * [[- 1 ], [- 1 ], [1 ]],
@@ -294,6 +360,11 @@ def get_affine(self):
294360
295361 def get_data_scaling (self ):
296362 """ AFNI applies volume-specific data scaling
363+
364+ Examples
365+ --------
366+ >>> header = AFNIHeader(parse_AFNI_header('example4d+orig.HEAD'))
367+ >>> header.get_data_scaling()
297368 """
298369 floatfacs = self .info .get ('BRICK_FLOAT_FACS' , None )
299370 if floatfacs is None or not np .any (floatfacs ):
@@ -305,19 +376,25 @@ def get_data_scaling(self):
305376 return scale
306377
307378 def get_slope_inter (self ):
308- """ Use `self.get_data_scaling()` instead
309- """
379+ """Use `self.get_data_scaling()` instead"""
310380 return None , None
311381
312382 def get_data_offset (self ):
383+ """Data offset in BRIK file"""
313384 return DATA_OFFSET
314385
315386 def get_volume_labels (self ):
316- """ Returns volume labels
387+ """Returns volume labels
317388
318389 Returns
319390 -------
320391 labels : list of str
392+
393+ Examples
394+ --------
395+ >>> header = AFNIHeader(parse_AFNI_header('example4d+orig.HEAD'))
396+ >>> header.get_volume_labels()
397+ ['#0', '#1', '#2']
321398 """
322399 labels = self .info .get ('BRICK_LABS' , None )
323400 if labels is not None :
@@ -327,7 +404,23 @@ def get_volume_labels(self):
327404
328405
329406class AFNIImage (SpatialImage ):
330- """ AFNI image file
407+ """AFNI Image file
408+
409+ Can be loaded from either the BRIK or HEAD file (but MUST specify one!)
410+
411+ Examples
412+ --------
413+ >>> brik = load('example4d+orig.BRIK')
414+ >>> brik.shape
415+ (33, 41, 25, 3)
416+ >>> brik.affine
417+ array([[ -3. , -0. , -0. , 49.5 ],
418+ [ -0. , -3. , -0. , 82.312 ],
419+ [ 0. , 0. , 3. , -52.3511],
420+ [ 0. , 0. , 0. , 1. ]])
421+ >>> head = load('example4d+orig.HEAD')
422+ >>> (head.get_data() == brik.get_data()).all()
423+ True
331424 """
332425
333426 header_class = AFNIHeader
@@ -346,7 +439,7 @@ def from_file_map(klass, file_map, mmap=True):
346439 ----------
347440 file_map : dict
348441 dict with keys ``image, header`` and values being fileholder
349- objects for the respective REC and PAR files.
442+ objects for the respective BRIK and HEAD files
350443 mmap : {True, False, 'c', 'r'}, optional, keyword only
351444 `mmap` controls the use of numpy memory mapping for reading image
352445 array data. If False, do not try numpy ``memmap`` for data array.
@@ -372,7 +465,7 @@ def from_filename(klass, filename, mmap=True):
372465 ----------
373466 file_map : dict
374467 dict with keys ``image, header`` and values being fileholder
375- objects for the respective REC and PAR files.
468+ objects for the respective BRIK and HEAD files
376469 mmap : {True, False, 'c', 'r'}, optional, keyword only
377470 `mmap` controls the use of numpy memory mapping for reading image
378471 array data. If False, do not try numpy ``memmap`` for data array.
@@ -385,16 +478,16 @@ def from_filename(klass, filename, mmap=True):
385478 # only BRIK can be compressed, but `filespec_to_file_map` doesn't
386479 # handle that case; remove potential compression suffixes from HEAD
387480 head_fname = file_map ['header' ].filename
388- if not op .exists (head_fname ):
481+ if not os . path .exists (head_fname ):
389482 for ext in klass ._compressed_suffixes :
390483 head_fname = re .sub (ext , '' , head_fname )
391484 file_map ['header' ].filename = head_fname
392485 # if HEAD is read in and BRIK is compressed, function won't detect the
393486 # compressed format; check for these cases
394- if not op .exists (file_map ['image' ].filename ):
487+ if not os . path .exists (file_map ['image' ].filename ):
395488 for ext in klass ._compressed_suffixes :
396489 im_ext = file_map ['image' ].filename + ext
397- if op .exists (im_ext ):
490+ if os . path .exists (im_ext ):
398491 file_map ['image' ].filename = im_ext
399492 break
400493 return klass .from_file_map (file_map ,
0 commit comments