From 7a07d5a9613f61b71decfb9e7e61cf2c2dce1785 Mon Sep 17 00:00:00 2001 From: Jonathan Daniel Date: Tue, 26 May 2020 18:24:26 +0300 Subject: [PATCH 1/2] RF: Use triple double-quoted string in docstrings ''' -> """ + Some style in dicom/derivations --- bin/nib-nifti-dx | 2 +- doc/source/dicom/derivations/dicom_mosaic.py | 6 +- .../dicom/derivations/spm_dicom_orient.py | 82 +++++------ doc/tools/apigen.py | 40 +++--- nibabel/analyze.py | 92 ++++++------- nibabel/batteryrunners.py | 40 +++--- nibabel/cifti2/cifti2.py | 54 ++++---- nibabel/cifti2/parse_cifti2.py | 4 +- nibabel/cmdline/nifti_dx.py | 2 +- nibabel/data.py | 42 +++--- nibabel/dataobj_images.py | 14 +- nibabel/ecat.py | 42 +++--- nibabel/environment.py | 8 +- nibabel/eulerangles.py | 28 ++-- nibabel/externals/tests/test_netcdf.py | 2 +- nibabel/filebasedimages.py | 48 +++---- nibabel/fileholders.py | 18 +-- nibabel/filename_parser.py | 14 +- nibabel/freesurfer/mghformat.py | 118 ++++++++-------- nibabel/freesurfer/tests/test_mghformat.py | 6 +- nibabel/funcs.py | 20 +-- nibabel/imageclasses.py | 2 +- nibabel/loadsave.py | 8 +- nibabel/minc1.py | 16 +-- nibabel/minc2.py | 12 +- nibabel/nicom/csareader.py | 20 +-- nibabel/nicom/dicomreaders.py | 14 +- nibabel/nicom/dicomwrappers.py | 2 +- nibabel/nicom/dwiparams.py | 12 +- nibabel/nicom/structreader.py | 18 +-- nibabel/nicom/tests/data_pkgs.py | 2 +- nibabel/nifti1.py | 128 +++++++++--------- nibabel/nifti2.py | 18 +-- nibabel/orientations.py | 18 +-- nibabel/pkg_info.py | 8 +- nibabel/quaternions.py | 56 ++++---- nibabel/spatialimages.py | 48 +++---- nibabel/spm2analyze.py | 10 +- nibabel/spm99analyze.py | 36 ++--- nibabel/testing/__init__.py | 2 +- nibabel/tests/test_analyze.py | 4 +- nibabel/tests/test_batteryrunners.py | 4 +- nibabel/tests/test_data.py | 2 +- nibabel/tests/test_endiancodes.py | 2 +- nibabel/tests/test_euler.py | 6 +- nibabel/tests/test_filename_parser.py | 2 +- nibabel/tests/test_funcs.py | 2 +- nibabel/tests/test_image_load_save.py | 2 +- nibabel/tests/test_image_types.py | 2 +- nibabel/tests/test_nifti1.py | 2 +- nibabel/tests/test_nifti2.py | 2 +- nibabel/tests/test_openers.py | 2 +- nibabel/tests/test_orientations.py | 2 +- nibabel/tests/test_quaternions.py | 4 +- nibabel/tests/test_recoder.py | 2 +- nibabel/tests/test_scaling.py | 2 +- nibabel/tests/test_spm2analyze.py | 2 +- nibabel/tests/test_trackvis.py | 2 +- nibabel/tests/test_volumeutils.py | 8 +- nibabel/tests/test_wrapstruct.py | 16 +-- nibabel/tmpdirs.py | 8 +- nibabel/trackvis.py | 26 ++-- nibabel/tripwire.py | 2 +- nibabel/volumeutils.py | 66 ++++----- nibabel/wrapstruct.py | 76 +++++------ nisext/sexts.py | 8 +- nisext/testers.py | 20 +-- tools/bisect_nose.py | 2 +- tools/gitwash_dumper.py | 10 +- tools/profile | 30 ++-- 70 files changed, 715 insertions(+), 715 deletions(-) diff --git a/bin/nib-nifti-dx b/bin/nib-nifti-dx index d317585286..b395ee1d9a 100755 --- a/bin/nib-nifti-dx +++ b/bin/nib-nifti-dx @@ -7,7 +7,7 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -''' Print nifti diagnostics for header files ''' +""" Print nifti diagnostics for header files """ from nibabel.cmdline.nifti_dx import main diff --git a/doc/source/dicom/derivations/dicom_mosaic.py b/doc/source/dicom/derivations/dicom_mosaic.py index b4c173c2b2..074c5491c1 100644 --- a/doc/source/dicom/derivations/dicom_mosaic.py +++ b/doc/source/dicom/derivations/dicom_mosaic.py @@ -1,4 +1,4 @@ -''' Just showing the mosaic simplification ''' +""" Just showing the mosaic simplification """ from sympy import Matrix, Symbol, symbols, simplify @@ -19,12 +19,12 @@ def numbered_vector(nrows, symbol_prefix): 'md_{cols} md_{rows} rd_{cols} rd_{rows}') md_adj = Matrix((mdc - 1, mdr - 1, 0)) / -2 -rd_adj = Matrix((rdc - 1 , rdr - 1, 0)) / -2 +rd_adj = Matrix((rdc - 1, rdr - 1, 0)) / -2 adj = -(RS * md_adj) + RS * rd_adj adj.simplify() -Q = RS[:,:2] * Matrix(( +Q = RS[:, :2] * Matrix(( (mdc - rdc) / 2, (mdr - rdr) / 2)) diff --git a/doc/source/dicom/derivations/spm_dicom_orient.py b/doc/source/dicom/derivations/spm_dicom_orient.py index 381ffdb3ef..936e807ce1 100644 --- a/doc/source/dicom/derivations/spm_dicom_orient.py +++ b/doc/source/dicom/derivations/spm_dicom_orient.py @@ -1,11 +1,11 @@ -''' Symbolic versions of the DICOM orientation mathemeatics. +""" Symbolic versions of the DICOM orientation mathemeatics. Notes on the SPM orientation machinery. There are symbolic versions of the code in ``spm_dicom_convert``, ``write_volume`` subfunction, around line 509 in the version I have (SPM8, late 2009 vintage). -''' +""" import numpy as np @@ -16,21 +16,21 @@ # The code below is general (independent of SPMs code) def numbered_matrix(nrows, ncols, symbol_prefix): return Matrix(nrows, ncols, lambda i, j: Symbol( - symbol_prefix + '_{%d%d}' % (i+1, j+1))) + symbol_prefix + '_{%d%d}' % (i + 1, j + 1))) def numbered_vector(nrows, symbol_prefix): return Matrix(nrows, 1, lambda i, j: Symbol( - symbol_prefix + '_{%d}' % (i+1))) + symbol_prefix + '_{%d}' % (i + 1))) # premultiplication matrix to go from 0 based to 1 based indexing one_based = eye(4) -one_based[:3,3] = (1,1,1) +one_based[:3, 3] = (1, 1, 1) # premult for swapping row and column indices row_col_swap = eye(4) -row_col_swap[:,0] = eye(4)[:,1] -row_col_swap[:,1] = eye(4)[:,0] +row_col_swap[:, 0] = eye(4)[:, 1] +row_col_swap[:, 1] = eye(4)[:, 0] # various worming matrices orient_pat = numbered_matrix(3, 2, 'F') @@ -40,47 +40,49 @@ def numbered_vector(nrows, symbol_prefix): pos_pat_N = numbered_vector(3, 'T^N') pixel_spacing = symbols((r'\Delta{r}', r'\Delta{c}')) NZ = Symbol('N') -slice_spacing = Symbol('\Delta{s}') +slice_spacing = Symbol(r'\Delta{s}') R3 = orient_pat * np.diag(pixel_spacing) R = zeros(4, 2) -R[:3,:] = R3 +R[:3, :] = R3 # The following is specific to the SPM algorithm. x1 = ones(4, 1) y1 = ones(4, 1) -y1[:3,:] = pos_pat_0 +y1[:3, :] = pos_pat_0 to_inv = zeros(4, 4) -to_inv[:,0] = x1 -to_inv[:,1] = symbols('a b c d') -to_inv[0,2] = 1 -to_inv[1,3] = 1 +to_inv[:, 0] = x1 +to_inv[:, 1] = symbols('a b c d') +to_inv[0, 2] = 1 +to_inv[1, 3] = 1 inv_lhs = zeros(4, 4) -inv_lhs[:,0] = y1 -inv_lhs[:,1] = symbols('e f g h') -inv_lhs[:,2:] = R +inv_lhs[:, 0] = y1 +inv_lhs[:, 1] = symbols('e f g h') +inv_lhs[:, 2:] = R + def spm_full_matrix(x2, y2): - rhs = to_inv[:,:] - rhs[:,1] = x2 - lhs = inv_lhs[:,:] - lhs[:,1] = y2 + rhs = to_inv[:, :] + rhs[:, 1] = x2 + lhs = inv_lhs[:, :] + lhs[:, 1] = y2 return lhs * rhs.inv() + # single slice case orient = zeros(3, 3) -orient[:3,:2] = orient_pat -orient[:,2] = orient_cross -x2_ss = Matrix((0,0,1,0)) +orient[:3, :2] = orient_pat +orient[:, 2] = orient_cross +x2_ss = Matrix((0, 0, 1, 0)) y2_ss = zeros(4, 1) -y2_ss[:3,:] = orient * Matrix((0,0,slice_spacing)) +y2_ss[:3, :] = orient * Matrix((0, 0, slice_spacing)) A_ss = spm_full_matrix(x2_ss, y2_ss) # many slice case -x2_ms = Matrix((1,1,NZ,1)) +x2_ms = Matrix((1, 1, NZ, 1)) y2_ms = ones(4, 1) -y2_ms[:3,:] = pos_pat_N +y2_ms[:3, :] = pos_pat_N A_ms = spm_full_matrix(x2_ms, y2_ms) # End of SPM algorithm @@ -92,22 +94,22 @@ def spm_full_matrix(x2, y2): single_aff = eye(4) rot = orient rot_scale = rot * np.diag(pixel_spacing[:] + (slice_spacing,)) -single_aff[:3,:3] = rot_scale -single_aff[:3,3] = pos_pat_0 +single_aff[:3, :3] = rot_scale +single_aff[:3, 3] = pos_pat_0 # For multi-slice case, we have the start and the end slice position # patient. This gives us the third column of the affine, because, # ``pat_pos_N = aff * [[0,0,ZN-1,1]].T multi_aff = eye(4) -multi_aff[:3,:2] = R3 -trans_z_N = Matrix((0,0, NZ-1, 1)) +multi_aff[:3, :2] = R3 +trans_z_N = Matrix((0, 0, NZ - 1, 1)) multi_aff[:3, 2] = missing_r_col multi_aff[:3, 3] = pos_pat_0 est_pos_pat_N = multi_aff * trans_z_N -eqns = tuple(est_pos_pat_N[:3,0] - pos_pat_N) -solved = sympy.solve(eqns, tuple(missing_r_col)) -multi_aff_solved = multi_aff[:,:] -multi_aff_solved[:3,2] = multi_aff_solved[:3,2].subs(solved) +eqns = tuple(est_pos_pat_N[:3, 0] - pos_pat_N) +solved = sympy.solve(eqns, tuple(missing_r_col)) +multi_aff_solved = multi_aff[:, :] +multi_aff_solved[:3, 2] = multi_aff_solved[:3, 2].subs(solved) # Check that SPM gave us the same result A_ms_0based = A_ms * one_based @@ -121,10 +123,10 @@ def spm_full_matrix(x2, y2): A_i = single_aff nz_trans = eye(4) NZT = Symbol('d') -nz_trans[2,3] = NZT +nz_trans[2, 3] = NZT A_j = A_i * nz_trans -IPP_i = A_i[:3,3] -IPP_j = A_j[:3,3] +IPP_i = A_i[:3, 3] +IPP_j = A_j[:3, 3] # SPM does it with the inner product of the vectors spm_z = IPP_j.T * orient_cross @@ -135,11 +137,13 @@ def spm_full_matrix(x2, y2): ipp_sum_div = sum(IPP_j) / sum(orient_cross) ipp_sum_div = sympy.simplify(ipp_sum_div) + # Dump out the formulae here to latex for the RST docs def my_latex(expr): S = sympy.latex(expr) return S[1:-1] + print('Latex stuff') print(' R = ' + my_latex(to_inv)) print(' ') @@ -159,4 +163,4 @@ def my_latex(expr): print() print(' T^j = ' + my_latex(IPP_j)) print() -print(' T^j \cdot \mathbf{c} = ' + my_latex(spm_z)) +print(r' T^j \cdot \mathbf{c} = ' + my_latex(spm_z)) diff --git a/doc/tools/apigen.py b/doc/tools/apigen.py index 4be721733d..05498c69a9 100644 --- a/doc/tools/apigen.py +++ b/doc/tools/apigen.py @@ -29,8 +29,8 @@ class ApiDocWriter(object): - ''' Class for automatic detection and parsing of API docs - to Sphinx-parsable reST format''' + """ Class for automatic detection and parsing of API docs + to Sphinx-parsable reST format""" # only separating first two levels rst_section_levels = ['*', '=', '-', '~', '^'] @@ -42,7 +42,7 @@ def __init__(self, module_skip_patterns=None, other_defines=True ): - ''' Initialize package for parsing + """ Initialize package for parsing Parameters ---------- @@ -70,7 +70,7 @@ def __init__(self, other_defines : {True, False}, optional Whether to include classes and functions that are imported in a particular module but not defined there. - ''' + """ if package_skip_patterns is None: package_skip_patterns = ['\\.tests$'] if module_skip_patterns is None: @@ -85,7 +85,7 @@ def get_package_name(self): return self._package_name def set_package_name(self, package_name): - ''' Set package_name + """ Set package_name >>> docwriter = ApiDocWriter('sphinx') >>> import sphinx @@ -95,7 +95,7 @@ def set_package_name(self, package_name): >>> import docutils >>> docwriter.root_path == docutils.__path__[0] True - ''' + """ # It's also possible to imagine caching the module parsing here self._package_name = package_name root_module = self._import(package_name) @@ -106,7 +106,7 @@ def set_package_name(self, package_name): 'get/set package_name') def _import(self, name): - ''' Import namespace package ''' + """ Import namespace package """ mod = __import__(name) components = name.split('.') for comp in components[1:]: @@ -114,7 +114,7 @@ def _import(self, name): return mod def _get_object_name(self, line): - ''' Get second token in line + """ Get second token in line >>> docwriter = ApiDocWriter('sphinx') >>> docwriter._get_object_name(" def func(): ") 'func' @@ -122,14 +122,14 @@ def _get_object_name(self, line): 'Klass' >>> docwriter._get_object_name(" class Klass: ") 'Klass' - ''' + """ name = line.split()[1].split('(')[0].strip() # in case we have classes which are not derived from object # ie. old style classes return name.rstrip(':') def _uri2path(self, uri): - ''' Convert uri to absolute filepath + """ Convert uri to absolute filepath Parameters ---------- @@ -155,7 +155,7 @@ def _uri2path(self, uri): True >>> docwriter._uri2path('sphinx.does_not_exist') - ''' + """ if uri == self.package_name: return os.path.join(self.root_path, '__init__.py') path = uri.replace(self.package_name + '.', '') @@ -171,7 +171,7 @@ def _uri2path(self, uri): return path def _path2uri(self, dirpath): - ''' Convert directory path to uri ''' + """ Convert directory path to uri """ package_dir = self.package_name.replace('.', os.path.sep) relpath = dirpath.replace(self.root_path, package_dir) if relpath.startswith(os.path.sep): @@ -179,7 +179,7 @@ def _path2uri(self, dirpath): return relpath.replace(os.path.sep, '.') def _parse_module(self, uri): - ''' Parse module defined in *uri* ''' + """ Parse module defined in *uri* """ filename = self._uri2path(uri) if filename is None: print(filename, 'erk') @@ -233,7 +233,7 @@ def _parse_module_with_import(self, uri): return functions, classes def _parse_lines(self, linesource): - ''' Parse lines of text for functions and classes ''' + """ Parse lines of text for functions and classes """ functions = [] classes = [] for line in linesource: @@ -254,7 +254,7 @@ def _parse_lines(self, linesource): return functions, classes def generate_api_doc(self, uri): - '''Make autodoc documentation template string for a module + """Make autodoc documentation template string for a module Parameters ---------- @@ -267,7 +267,7 @@ def generate_api_doc(self, uri): Module name, table of contents. body : string Function and class docstrings. - ''' + """ # get the names of all classes and functions functions, classes = self._parse_module_with_import(uri) if not len(functions) and not len(classes) and DEBUG: @@ -317,7 +317,7 @@ def generate_api_doc(self, uri): return head, body def _survives_exclude(self, matchstr, match_type): - ''' Returns True if *matchstr* does not match patterns + """ Returns True if *matchstr* does not match patterns ``self.package_name`` removed from front of string if present @@ -336,7 +336,7 @@ def _survives_exclude(self, matchstr, match_type): >>> dw.module_skip_patterns.append('^\\.badmod$') >>> dw._survives_exclude('sphinx.badmod', 'module') False - ''' + """ if match_type == 'module': patterns = self.module_skip_patterns elif match_type == 'package': @@ -359,7 +359,7 @@ def _survives_exclude(self, matchstr, match_type): return True def discover_modules(self): - ''' Return module sequence discovered from ``self.package_name`` + """ Return module sequence discovered from ``self.package_name`` Parameters @@ -381,7 +381,7 @@ def discover_modules(self): >>> 'sphinx.util' in dw.discover_modules() False >>> - ''' + """ modules = [self.package_name] # raw directory parsing for dirpath, dirnames, filenames in os.walk(self.root_path): diff --git a/nibabel/analyze.py b/nibabel/analyze.py index 219aa42993..53e01db64c 100644 --- a/nibabel/analyze.py +++ b/nibabel/analyze.py @@ -6,7 +6,7 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -''' Read / write access to the basic Mayo Analyze format +""" Read / write access to the basic Mayo Analyze format =========================== The Analyze header format @@ -80,7 +80,7 @@ zooms, in particular, negative X zooms. We did not do this because the image can be loaded with and without a default flip, so the saved zoom will not constrain the affine. -''' +""" import numpy as np @@ -171,11 +171,11 @@ class AnalyzeHeader(LabeledWrapStruct): - ''' Class for basic analyze header + """ Class for basic analyze header Implements zoom-only setting of affine transform, and no image scaling - ''' + """ # Copies of module-level definitions template_dtype = header_dtype _data_type_codes = data_type_codes @@ -194,7 +194,7 @@ def __init__(self, binaryblock=None, endianness=None, check=True): - ''' Initialize header from binary data block + """ Initialize header from binary data block Parameters ---------- @@ -247,12 +247,12 @@ def __init__(self, >>> hdr4 = AnalyzeHeader(binblock3) >>> hdr4.endianness == swapped_code True - ''' + """ super(AnalyzeHeader, self).__init__(binaryblock, endianness, check) @classmethod def guessed_endian(klass, hdr): - ''' Guess intended endianness from mapping-like ``hdr`` + """ Guess intended endianness from mapping-like ``hdr`` Parameters ---------- @@ -323,7 +323,7 @@ def guessed_endian(klass, hdr): >>> hdr_data['dim'][0] = 1 >>> AnalyzeHeader.guessed_endian(hdr_data) == native_code True - ''' + """ dim0 = int(hdr['dim'][0]) if dim0 == 0: if hdr['sizeof_hdr'].byteswap() == klass.sizeof_hdr: @@ -335,8 +335,8 @@ def guessed_endian(klass, hdr): @classmethod def default_structarr(klass, endianness=None): - ''' Return header data for empty header with given endianness - ''' + """ Return header data for empty header with given endianness + """ hdr_data = super(AnalyzeHeader, klass).default_structarr(endianness) hdr_data['sizeof_hdr'] = klass.sizeof_hdr hdr_data['dim'] = 1 @@ -348,7 +348,7 @@ def default_structarr(klass, endianness=None): @classmethod def from_header(klass, header=None, check=True): - ''' Class method to create header from another header + """ Class method to create header from another header Parameters ---------- @@ -362,7 +362,7 @@ def from_header(klass, header=None, check=True): ------- hdr : header instance fresh header instance of our own class - ''' + """ # own type, return copy if type(header) == klass: obj = header.copy() @@ -407,7 +407,7 @@ def from_header(klass, header=None, check=True): return obj def _clean_after_mapping(self): - ''' Set format-specific stuff after converting header from mapping + """ Set format-specific stuff after converting header from mapping This routine cleans up Analyze-type headers that have had their fields set from an Analyze map returned by the ``as_analyze_map`` method. @@ -422,13 +422,13 @@ def _clean_after_mapping(self): magic to "n+1", when it should be "ni1" for the pair header. This method is for that kind of case - so the specific header can set fields like magic correctly, even though the mapping has given a wrong value. - ''' + """ # All current Nifti etc fields that are present in the Analyze header # have the same meaning as they do for Analyze. pass def raw_data_from_fileobj(self, fileobj): - ''' Read unscaled data array from `fileobj` + """ Read unscaled data array from `fileobj` Parameters ---------- @@ -439,14 +439,14 @@ def raw_data_from_fileobj(self, fileobj): ------- arr : ndarray unscaled data array - ''' + """ dtype = self.get_data_dtype() shape = self.get_data_shape() offset = self.get_data_offset() return array_from_file(shape, dtype, fileobj, offset) def data_from_fileobj(self, fileobj): - ''' Read scaled data array from `fileobj` + """ Read scaled data array from `fileobj` Use this routine to get the scaled image data from an image file `fileobj`, given a header `self`. "Scaled" means, with any header @@ -469,7 +469,7 @@ def data_from_fileobj(self, fileobj): data. Raw Analyze files don't have scale factors or intercepts, but this routine also works with formats based on Analyze, that do have scaling, such as SPM analyze formats and NIfTI. - ''' + """ # read unscaled data data = self.raw_data_from_fileobj(fileobj) # get scalings from header. Value of None means not present in header @@ -480,7 +480,7 @@ def data_from_fileobj(self, fileobj): return apply_read_scaling(data, slope, inter) def data_to_fileobj(self, data, fileobj, rescale=True): - ''' Write `data` to `fileobj`, maybe rescaling data, modifying `self` + """ Write `data` to `fileobj`, maybe rescaling data, modifying `self` In writing the data, we match the header to the written data, by setting the header scaling factors, iff `rescale` is True. Thus we @@ -510,7 +510,7 @@ def data_to_fileobj(self, data, fileobj, rescale=True): >>> hdr.data_to_fileobj(data, str_io) >>> data.astype(np.float64).tobytes('F') == str_io.getvalue() True - ''' + """ data = np.asanyarray(data) shape = self.get_data_shape() if data.shape != shape: @@ -532,16 +532,16 @@ def data_to_fileobj(self, data, fileobj, rescale=True): self.set_slope_inter(*get_slope_inter(arr_writer)) def get_data_dtype(self): - ''' Get numpy dtype for data + """ Get numpy dtype for data For examples see ``set_data_dtype`` - ''' + """ code = int(self._structarr['datatype']) dtype = self._data_type_codes.dtype[code] return dtype.newbyteorder(self.endianness) def set_data_dtype(self, datatype): - ''' Set numpy dtype for data from code or dtype or type + """ Set numpy dtype for data from code or dtype or type Examples -------- @@ -564,7 +564,7 @@ def set_data_dtype(self, datatype): Traceback (most recent call last): ... HeaderDataError: data dtype "" known but not supported - ''' + """ dt = datatype if dt not in self._data_type_codes: try: @@ -585,7 +585,7 @@ def set_data_dtype(self, datatype): self._structarr['bitpix'] = dtype.itemsize * 8 def get_data_shape(self): - ''' Get shape of data + """ Get shape of data Examples -------- @@ -600,7 +600,7 @@ def get_data_shape(self): >>> hdr.get_zooms() (1.0, 1.0, 1.0) - ''' + """ dims = self._structarr['dim'] ndims = dims[0] if ndims == 0: @@ -608,7 +608,7 @@ def get_data_shape(self): return tuple(int(d) for d in dims[1:ndims + 1]) def set_data_shape(self, shape): - ''' Set shape of data + """ Set shape of data If ``ndims == len(shape)`` then we set zooms for dimensions higher than ``ndims`` to 1.0 @@ -617,7 +617,7 @@ def set_data_shape(self, shape): ---------- shape : sequence sequence of integers specifying data array shape - ''' + """ dims = self._structarr['dim'] ndims = len(shape) dims[:] = 1 @@ -637,7 +637,7 @@ def set_data_shape(self, shape): self._structarr['pixdim'][ndims + 1:] = 1.0 def get_base_affine(self): - ''' Get affine from basic (shared) header fields + """ Get affine from basic (shared) header fields Note that we get the translations from the center of the image. @@ -654,7 +654,7 @@ def get_base_affine(self): [ 0., 2., 0., -4.], [ 0., 0., 1., -3.], [ 0., 0., 0., 1.]]) - ''' + """ hdr = self._structarr dims = hdr['dim'] ndim = dims[0] @@ -665,7 +665,7 @@ def get_base_affine(self): get_best_affine = get_base_affine def get_zooms(self): - ''' Get zooms from header + """ Get zooms from header Returns ------- @@ -683,7 +683,7 @@ def get_zooms(self): >>> hdr.set_zooms((3, 4)) >>> hdr.get_zooms() (3.0, 4.0) - ''' + """ hdr = self._structarr dims = hdr['dim'] ndim = dims[0] @@ -693,10 +693,10 @@ def get_zooms(self): return tuple(pixdims[1:ndim + 1]) def set_zooms(self, zooms): - ''' Set zooms into header fields + """ Set zooms into header fields See docstring for ``get_zooms`` for examples - ''' + """ hdr = self._structarr dims = hdr['dim'] ndim = dims[0] @@ -754,7 +754,7 @@ def set_data_offset(self, offset): self._structarr['vox_offset'] = offset def get_data_offset(self): - ''' Return offset into data file to read data + """ Return offset into data file to read data Examples -------- @@ -764,18 +764,18 @@ def get_data_offset(self): >>> hdr['vox_offset'] = 12 >>> hdr.get_data_offset() 12 - ''' + """ return int(self._structarr['vox_offset']) def get_slope_inter(self): - ''' Get scalefactor and intercept + """ Get scalefactor and intercept These are not implemented for basic Analyze - ''' + """ return None, None def set_slope_inter(self, slope, inter=None): - ''' Set slope and / or intercept into header + """ Set slope and / or intercept into header Set slope and intercept for image data, such that, if the image data is ``arr``, then the scaled image data will be ``(arr * @@ -791,7 +791,7 @@ def set_slope_inter(self, slope, inter=None): If float, value must be NaN or 1.0 or we raise a ``HeaderTypeError`` inter : None or float, optional If float, value must be 0.0 or we raise a ``HeaderTypeError`` - ''' + """ if ((slope in (None, 1) or np.isnan(slope)) and (inter in (None, 0) or np.isnan(inter))): return @@ -800,13 +800,13 @@ def set_slope_inter(self, slope, inter=None): @classmethod def _get_checks(klass): - ''' Return sequence of check functions for this class ''' + """ Return sequence of check functions for this class """ return (klass._chk_sizeof_hdr, klass._chk_datatype, klass._chk_bitpix, klass._chk_pixdims) - ''' Check functions in format expected by BatteryRunner class ''' + """ Check functions in format expected by BatteryRunner class """ @classmethod def _chk_sizeof_hdr(klass, hdr, fix=False): @@ -933,7 +933,7 @@ def set_data_dtype(self, dtype): @classmethod def from_file_map(klass, file_map, *, mmap=True, keep_file_open=None): - ''' Class method to create image from mapping in ``file_map`` + """ Class method to create image from mapping in ``file_map`` .. deprecated:: 2.4.1 ``keep_file_open='auto'`` is redundant with `False` and has @@ -965,7 +965,7 @@ def from_file_map(klass, file_map, *, mmap=True, keep_file_open=None): Returns ------- img : AnalyzeImage instance - ''' + """ if mmap not in (True, False, 'c', 'r'): raise ValueError("mmap should be one of {True, False, 'c', 'r'}") hdr_fh, img_fh = klass._get_fileholders(file_map) @@ -997,14 +997,14 @@ def _get_fileholders(file_map): return file_map['header'], file_map['image'] def to_file_map(self, file_map=None): - ''' Write image to `file_map` or contained ``self.file_map`` + """ Write image to `file_map` or contained ``self.file_map`` Parameters ---------- file_map : None or mapping, optional files mapping. If None (default) use object's ``file_map`` attribute instead - ''' + """ if file_map is None: file_map = self.file_map data = np.asanyarray(self.dataobj) diff --git a/nibabel/batteryrunners.py b/nibabel/batteryrunners.py index b77c8b8858..78a887fb56 100644 --- a/nibabel/batteryrunners.py +++ b/nibabel/batteryrunners.py @@ -6,7 +6,7 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -''' Battery runner classes and Report classes +""" Battery runner classes and Report classes These classes / objects are for generic checking / fixing batteries @@ -105,14 +105,14 @@ def chk_pixdims(hdr, fix=True): rep.fix_msg = 'setting to abs of pixdim values' return hdr, rep -''' +""" class BatteryRunner(object): - ''' Class to run set of checks ''' + """ Class to run set of checks """ def __init__(self, checks): - ''' Initialize instance from sequence of `checks` + """ Initialize instance from sequence of `checks` Parameters ---------- @@ -126,11 +126,11 @@ def __init__(self, checks): >>> def chk(obj, fix=False): # minimal check ... return obj, Report() >>> btrun = BatteryRunner((chk,)) - ''' + """ self._checks = checks def check_only(self, obj): - ''' Run checks on `obj` returning reports + """ Run checks on `obj` returning reports Parameters ---------- @@ -142,7 +142,7 @@ def check_only(self, obj): reports : sequence sequence of report objects reporting on result of running checks (without fixes) on `obj` - ''' + """ reports = [] for check in self._checks: obj, rep = check(obj, False) @@ -150,7 +150,7 @@ def check_only(self, obj): return reports def check_fix(self, obj): - ''' Run checks, with fixes, on `obj` returning `obj`, reports + """ Run checks, with fixes, on `obj` returning `obj`, reports Parameters ---------- @@ -163,7 +163,7 @@ def check_fix(self, obj): possibly modified or replaced `obj`, after fixes reports : sequence sequence of reports on checks, fixes - ''' + """ reports = [] for check in self._checks: obj, report = check(obj, True) @@ -181,7 +181,7 @@ def __init__(self, problem_level=0, problem_msg='', fix_msg=''): - ''' Initialize report with values + """ Initialize report with values Parameters ---------- @@ -207,7 +207,7 @@ def __init__(self, >>> rep = Report(TypeError, 10) >>> rep.problem_level 10 - ''' + """ self.error = error self.problem_level = problem_level self.problem_msg = problem_msg @@ -223,7 +223,7 @@ def __getstate__(self): return self.error, self.problem_level, self.problem_msg, self.fix_msg def __eq__(self, other): - ''' are two BatteryRunner-like objects equal? + """ are two BatteryRunner-like objects equal? Parameters ---------- @@ -239,7 +239,7 @@ def __eq__(self, other): >>> rep3 = Report(problem_level=20) >>> rep == rep3 False - ''' + """ return self.__getstate__() == other.__getstate__() def __ne__(self, other): @@ -250,19 +250,19 @@ def __ne__(self, other): return not self == other def __str__(self): - ''' Printable string for object ''' + """ Printable string for object """ return self.__dict__.__str__() @property def message(self): - ''' formatted message string, including fix message if present - ''' + """ formatted message string, including fix message if present + """ if self.fix_msg: return '; '.join((self.problem_msg, self.fix_msg)) return self.problem_msg def log_raise(self, logger, error_level=40): - ''' Log problem, raise error if problem >= `error_level` + """ Log problem, raise error if problem >= `error_level` Parameters ---------- @@ -270,14 +270,14 @@ def log_raise(self, logger, error_level=40): log object, implementing ``log`` method error_level : int, optional If ``self.problem_level`` >= `error_level`, raise error - ''' + """ logger.log(self.problem_level, self.message) if self.problem_level and self.problem_level >= error_level: if self.error: raise self.error(self.problem_msg) def write_raise(self, stream, error_level=40, log_level=30): - ''' Write report to `stream` + """ Write report to `stream` Parameters ---------- @@ -289,7 +289,7 @@ def write_raise(self, stream, error_level=40, log_level=30): log_level : int, optional Such that if `log_level` is >= ``self.problem_level`` we write the report to `stream`, otherwise we write nothing. - ''' + """ if self.problem_level >= log_level: stream.write('Level %s: %s\n' % (self.problem_level, self.message)) diff --git a/nibabel/cifti2/cifti2.py b/nibabel/cifti2/cifti2.py index c83c991573..bd86ebfaa7 100644 --- a/nibabel/cifti2/cifti2.py +++ b/nibabel/cifti2/cifti2.py @@ -6,7 +6,7 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -''' Read / write access to CIFTI-2 image format +""" Read / write access to CIFTI-2 image format Format of the NIFTI2 container format described here: @@ -15,7 +15,7 @@ Definition of the CIFTI-2 header format and file extensions can be found at: http://www.nitrc.org/projects/cifti -''' +""" import re from collections.abc import MutableSequence, MutableMapping, Iterable from collections import OrderedDict @@ -786,7 +786,7 @@ def _to_xml_element(self): class Cifti2BrainModel(xml.XmlSerializable): - ''' Element representing a mapping of the dimension to vertex or voxels. + """ Element representing a mapping of the dimension to vertex or voxels. Mapping to vertices of voxels must be specified. @@ -840,7 +840,7 @@ class Cifti2BrainModel(xml.XmlSerializable): Indices on the image towards where the array indices are mapped vertex_indices : Cifti2VertexIndices, optional Indices of the vertices towards where the array indices are mapped - ''' + """ def __init__(self, index_offset=None, index_count=None, model_type=None, brain_structure=None, n_surface_vertices=None, @@ -1126,9 +1126,9 @@ def _get_indices_from_mim(self, mim): @property def mapped_indices(self): - ''' + """ List of matrix indices that are mapped - ''' + """ mapped_indices = [] for v in self: a2md = self._get_indices_from_mim(v) @@ -1136,7 +1136,7 @@ def mapped_indices(self): return mapped_indices def get_index_map(self, index): - ''' + """ Cifti2 Mapping class for a given index Parameters @@ -1150,7 +1150,7 @@ def get_index_map(self, index): cifti2_map : Cifti2MatrixIndicesMap Returns the Cifti2MatrixIndicesMap corresponding to the given index. - ''' + """ for v in self: a2md = self._get_indices_from_mim(v) @@ -1205,7 +1205,7 @@ def _to_xml_element(self): return mat def get_axis(self, index): - ''' + """ Generates the Cifti2 axis for a given dimension Parameters @@ -1216,7 +1216,7 @@ def get_axis(self, index): Returns ------- axis : :class:`.cifti2_axes.Axis` - ''' + """ from . import cifti2_axes return cifti2_axes.from_index_mapping(self.get_index_map(index)) @@ -1238,7 +1238,7 @@ def get_data_shape(self): class Cifti2Header(FileBasedHeader, xml.XmlSerializable): - ''' Class for CIFTI-2 header extension ''' + """ Class for CIFTI-2 header extension """ def __init__(self, matrix=None, version="2.0"): FileBasedHeader.__init__(self) @@ -1263,20 +1263,20 @@ def may_contain_header(klass, binaryblock): @property def number_of_mapped_indices(self): - ''' + """ Number of mapped indices - ''' + """ return len(self.matrix) @property def mapped_indices(self): - ''' + """ List of matrix indices that are mapped - ''' + """ return self.matrix.mapped_indices def get_index_map(self, index): - ''' + """ Cifti2 Mapping class for a given index Parameters @@ -1290,11 +1290,11 @@ def get_index_map(self, index): cifti2_map : Cifti2MatrixIndicesMap Returns the Cifti2MatrixIndicesMap corresponding to the given index. - ''' + """ return self.matrix.get_index_map(index) def get_axis(self, index): - ''' + """ Generates the Cifti2 axis for a given dimension Parameters @@ -1305,12 +1305,12 @@ def get_axis(self, index): Returns ------- axis : :class:`.cifti2_axes.Axis` - ''' + """ return self.matrix.get_axis(index) @classmethod def from_axes(cls, axes): - ''' + """ Creates a new Cifti2 header based on the Cifti2 axes Parameters @@ -1322,7 +1322,7 @@ def from_axes(cls, axes): ------- header : Cifti2Header new header describing the rows/columns in a format consistent with Cifti2 - ''' + """ from . import cifti2_axes return cifti2_axes.to_header(axes) @@ -1342,7 +1342,7 @@ def __init__(self, nifti_header=None, extra=None, file_map=None): - ''' Initialize image + """ Initialize image The image is a combination of (dataobj, header), with optional metadata in `nifti_header` (a NIfTI2 header). There may be more metadata in the @@ -1365,7 +1365,7 @@ def __init__(self, Extra metadata not captured by `header` or `nifti_header`. file_map : mapping, optional Mapping giving file information for this image format. - ''' + """ if not isinstance(header, Cifti2Header) and header: header = Cifti2Header.from_axes(header) super(Cifti2Image, self).__init__(dataobj, header=header, @@ -1423,7 +1423,7 @@ def from_file_map(klass, file_map, *, mmap=True, keep_file_open=None): @classmethod def from_image(klass, img): - ''' Class method to create new instance of own class from `img` + """ Class method to create new instance of own class from `img` Parameters ---------- @@ -1434,7 +1434,7 @@ def from_image(klass, img): ------- cimg : instance Image, of our own class - ''' + """ if isinstance(img, klass): return img raise NotImplementedError @@ -1474,7 +1474,7 @@ def to_file_map(self, file_map=None): img.to_file_map(file_map or self.file_map) def update_headers(self): - ''' Harmonize NIfTI headers with image data + """ Harmonize NIfTI headers with image data >>> import numpy as np >>> data = np.zeros((2,3,4)) @@ -1484,7 +1484,7 @@ def update_headers(self): >>> img.update_headers() >>> img.nifti_header.get_data_shape() == (2, 3, 4) True - ''' + """ self._nifti_header.set_data_shape(self._dataobj.shape) def get_data_dtype(self): diff --git a/nibabel/cifti2/parse_cifti2.py b/nibabel/cifti2/parse_cifti2.py index 8c3d40cd56..50d2c3a5c0 100644 --- a/nibabel/cifti2/parse_cifti2.py +++ b/nibabel/cifti2/parse_cifti2.py @@ -76,7 +76,7 @@ def _mangle(self, value): class _Cifti2AsNiftiHeader(Nifti2Header): - ''' Class for Cifti2 header extension ''' + """ Class for Cifti2 header extension """ @classmethod def _valid_intent_code(klass, intent_code): @@ -126,7 +126,7 @@ class _Cifti2AsNiftiImage(Nifti2Image): class Cifti2Parser(xml.XmlParser): - '''Class to parse an XML string into a CIFTI-2 header object''' + """Class to parse an XML string into a CIFTI-2 header object""" def __init__(self, encoding=None, buffer_size=3500000, verbose=0): super(Cifti2Parser, self).__init__(encoding=encoding, buffer_size=buffer_size, diff --git a/nibabel/cmdline/nifti_dx.py b/nibabel/cmdline/nifti_dx.py index e478b5a5c2..259c24d97d 100644 --- a/nibabel/cmdline/nifti_dx.py +++ b/nibabel/cmdline/nifti_dx.py @@ -7,7 +7,7 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -''' Print nifti diagnostics for header files ''' +""" Print nifti diagnostics for header files """ import sys from optparse import OptionParser diff --git a/nibabel/data.py b/nibabel/data.py index cbf35819c1..793b6b310c 100644 --- a/nibabel/data.py +++ b/nibabel/data.py @@ -32,10 +32,10 @@ class BomberError(DataError, AttributeError): class Datasource(object): - ''' Simple class to add base path to relative path ''' + """ Simple class to add base path to relative path """ def __init__(self, base_path): - ''' Initialize datasource + """ Initialize datasource Parameters ---------- @@ -49,11 +49,11 @@ def __init__(self, base_path): >>> fname = repo.get_filename('somedir', 'afile.txt') >>> fname == pjoin('a', 'path', 'somedir', 'afile.txt') True - ''' + """ self.base_path = base_path def get_filename(self, *path_parts): - ''' Prepend base path to `*path_parts` + """ Prepend base path to `*path_parts` We make no check whether the returned path exists. @@ -67,11 +67,11 @@ def get_filename(self, *path_parts): result of ``os.path.join(*path_parts), with ``self.base_path`` prepended - ''' + """ return pjoin(self.base_path, *path_parts) def list_files(self, relative=True): - ''' Recursively list the files in the data source directory. + """ Recursively list the files in the data source directory. Parameters ---------- @@ -84,7 +84,7 @@ def list_files(self, relative=True): file_list: list of strings List of the paths of all the files in the data source. - ''' + """ out_list = list() for base, dirs, files in os.walk(self.base_path): if relative: @@ -95,12 +95,12 @@ def list_files(self, relative=True): class VersionedDatasource(Datasource): - ''' Datasource with version information in config file + """ Datasource with version information in config file - ''' + """ def __init__(self, base_path, config_filename=None): - ''' Initialize versioned datasource + """ Initialize versioned datasource We assume that there is a configuration file with version information in datasource directory tree. @@ -120,7 +120,7 @@ def __init__(self, base_path, config_filename=None): config_filaname : None or str relative path to configuration file containing version - ''' + """ Datasource.__init__(self, base_path) if config_filename is None: config_filename = 'config.ini' @@ -153,7 +153,7 @@ def _cfg_value(fname, section='DATA', value='path'): def get_data_path(): - ''' Return specified or guessed locations of NIPY data files + """ Return specified or guessed locations of NIPY data files The algorithm is to return paths, extracted from strings, where strings are found in the following order: @@ -193,7 +193,7 @@ def get_data_path(): * https://www.debian.org/doc/packaging-manuals/python-policy/ap-packaging_tools.html#s-distutils * https://www.mail-archive.com/debian-python@lists.debian.org/msg05084.html - ''' + """ paths = [] try: var = os.environ['NIPY_DATA_PATH'] @@ -217,7 +217,7 @@ def get_data_path(): def find_data_dir(root_dirs, *names): - ''' Find relative path given path prefixes to search + """ Find relative path given path prefixes to search We raise a DataError if we can't find the relative path @@ -234,7 +234,7 @@ def find_data_dir(root_dirs, *names): data_dir : str full path (root path added to `*names` above) - ''' + """ ds_relative = pjoin(*names) for path in root_dirs: pth = pjoin(path, ds_relative) @@ -246,7 +246,7 @@ def find_data_dir(root_dirs, *names): def make_datasource(pkg_def, **kwargs): - ''' Return datasource defined by `pkg_def` as found in `data_path` + """ Return datasource defined by `pkg_def` as found in `data_path` `data_path` is the only allowed keyword argument. @@ -280,7 +280,7 @@ def make_datasource(pkg_def, **kwargs): ------- datasource : ``VersionedDatasource`` An initialized ``VersionedDatasource`` instance - ''' + """ if any(key for key in kwargs if key != 'data_path'): raise ValueError('Unexpected keyword argument(s)') data_path = kwargs.get('data_path') @@ -305,14 +305,14 @@ def make_datasource(pkg_def, **kwargs): class Bomber(object): - ''' Class to raise an informative error when used ''' + """ Class to raise an informative error when used """ def __init__(self, name, msg): self.name = name self.msg = msg def __getattr__(self, attr_name): - ''' Raise informative error accessing not-found attributes ''' + """ Raise informative error accessing not-found attributes """ raise BomberError( 'Trying to access attribute "%s" ' 'of non-existent data "%s"\n\n%s\n' % @@ -320,7 +320,7 @@ def __getattr__(self, attr_name): def datasource_or_bomber(pkg_def, **options): - ''' Return a viable datasource or a Bomber + """ Return a viable datasource or a Bomber This is to allow module level creation of datasource objects. We create the objects, so that, if the data exist, and are the correct @@ -341,7 +341,7 @@ def datasource_or_bomber(pkg_def, **options): Returns ------- ds : datasource or ``Bomber`` instance - ''' + """ unix_relpath = pkg_def['relpath'] version = pkg_def.get('min version') pkg_hint = pkg_def.get('install hint', DEFAULT_INSTALL_HINT) diff --git a/nibabel/dataobj_images.py b/nibabel/dataobj_images.py index 4d86810d5d..e0e3d52849 100644 --- a/nibabel/dataobj_images.py +++ b/nibabel/dataobj_images.py @@ -15,10 +15,10 @@ class DataobjImage(FileBasedImage): - ''' Template class for images that have dataobj data stores''' + """ Template class for images that have dataobj data stores""" def __init__(self, dataobj, header=None, extra=None, file_map=None): - ''' Initialize dataobj image + """ Initialize dataobj image The datobj image is a combination of (dataobj, header), with optional metadata in `extra`, and filename / file-like objects contained in the @@ -37,7 +37,7 @@ def __init__(self, dataobj, header=None, extra=None, file_map=None): metadata of this image type file_map : mapping, optional mapping giving file information for this image format - ''' + """ super(DataobjImage, self).__init__(header=header, extra=extra, file_map=file_map) self._dataobj = dataobj @@ -415,7 +415,7 @@ def get_shape(self): @classmethod def from_file_map(klass, file_map, *, mmap=True, keep_file_open=None): - ''' Class method to create image from mapping in ``file_map`` + """ Class method to create image from mapping in ``file_map`` .. deprecated:: 2.4.1 ``keep_file_open='auto'`` is redundant with `False` and has @@ -447,12 +447,12 @@ def from_file_map(klass, file_map, *, mmap=True, keep_file_open=None): Returns ------- img : DataobjImage instance - ''' + """ raise NotImplementedError @classmethod def from_filename(klass, filename, *, mmap=True, keep_file_open=None): - '''Class method to create image from filename `filename` + """Class method to create image from filename `filename` .. deprecated:: 2.4.1 ``keep_file_open='auto'`` is redundant with `False` and has @@ -481,7 +481,7 @@ def from_filename(klass, filename, *, mmap=True, keep_file_open=None): Returns ------- img : DataobjImage instance - ''' + """ if mmap not in (True, False, 'c', 'r'): raise ValueError("mmap should be one of {True, False, 'c', 'r'}") file_map = klass.filespec_to_file_map(filename) diff --git a/nibabel/ecat.py b/nibabel/ecat.py index e5af36cb1f..a917a25f79 100644 --- a/nibabel/ecat.py +++ b/nibabel/ecat.py @@ -291,8 +291,8 @@ def guessed_endian(klass, hdr): @classmethod def default_structarr(klass, endianness=None): - ''' Return header data for empty header with given endianness - ''' + """ Return header data for empty header with given endianness + """ hdr_data = super(EcatHeader, klass).default_structarr(endianness) hdr_data['magic_number'] = 'MATRIX72' hdr_data['sw_version'] = 74 @@ -323,7 +323,7 @@ def get_filetype(self): @classmethod def _get_checks(klass): - ''' Return sequence of check functions for this class ''' + """ Return sequence of check functions for this class """ return () @@ -597,7 +597,7 @@ def _get_frame_offset(self, frame=0): return int(self._mlist[frame][1] * BLOCK_SIZE) def _get_oriented_data(self, raw_data, orientation=None): - ''' + """ Get data oriented following ``patient_orientation`` header field. If the ``orientation`` parameter is given, return data according to this orientation. @@ -605,7 +605,7 @@ def _get_oriented_data(self, raw_data, orientation=None): :param raw_data: Numpy array containing the raw data :param orientation: None (default), 'neurological' or 'radiological' :rtype: Numpy array containing the oriented data - ''' + """ if orientation is None: orientation = self._header['patient_orientation'] elif orientation == 'neurological': @@ -624,7 +624,7 @@ def _get_oriented_data(self, raw_data, orientation=None): return raw_data def raw_data_from_fileobj(self, frame=0, orientation=None): - ''' + """ Get raw data from file object. :param frame: Time frame index from where to fetch data @@ -632,7 +632,7 @@ def raw_data_from_fileobj(self, frame=0, orientation=None): :rtype: Numpy array containing (possibly oriented) raw data .. seealso:: data_from_fileobj - ''' + """ dtype = self._get_data_dtype(frame) if self._header.endianness is not native_code: dtype = dtype.newbyteorder(self._header.endianness) @@ -644,7 +644,7 @@ def raw_data_from_fileobj(self, frame=0, orientation=None): return raw_data def data_from_fileobj(self, frame=0, orientation=None): - ''' + """ Read scaled data from file for a given frame :param frame: Time frame index from where to fetch data @@ -652,7 +652,7 @@ def data_from_fileobj(self, frame=0, orientation=None): :rtype: Numpy array containing (possibly oriented) raw data .. seealso:: raw_data_from_fileobj - ''' + """ header = self._header subhdr = self.subheaders[frame] raw_data = self.raw_data_from_fileobj(frame, orientation) @@ -663,11 +663,11 @@ def data_from_fileobj(self, frame=0, orientation=None): class EcatImageArrayProxy(object): - ''' Ecat implemention of array proxy protocol + """ Ecat implemention of array proxy protocol The array proxy allows us to freeze the passed fileobj and header such that it returns the expected data array. - ''' + """ def __init__(self, subheader): self._subheader = subheader @@ -689,7 +689,7 @@ def is_proxy(self): return True def __array__(self, dtype=None): - ''' Read of data from file + """ Read of data from file This reads ALL FRAMES into one array, can be memory expensive. @@ -705,7 +705,7 @@ def __array__(self, dtype=None): ------- array Scaled image data with type `dtype`. - ''' + """ # dtype=None is interpreted as float64 data = np.empty(self.shape) frame_mapping = get_frame_order(self._subheader._mlist) @@ -837,13 +837,13 @@ def get_frame_affine(self, frame): return self._subheader.get_frame_affine(frame=frame) def get_frame(self, frame, orientation=None): - ''' + """ Get full volume for a time frame :param frame: Time frame index from where to fetch data :param orientation: None (default), 'neurological' or 'radiological' :rtype: Numpy array containing (possibly oriented) raw data - ''' + """ return self._subheader.data_from_fileobj(frame, orientation) def get_data_dtype(self, frame): @@ -915,21 +915,21 @@ def from_file_map(klass, file_map, *, mmap=True, keep_file_open=None): return img def _get_empty_dir(self): - ''' + """ Get empty directory entry of the form [numAvail, nextDir, previousDir, numUsed] - ''' + """ return np.array([31, 2, 0, 0], dtype=np.int32) def _write_data(self, data, stream, pos, dtype=None, endianness=None): - ''' + """ Write data to ``stream`` using an array_writer :param data: Numpy array containing the dat :param stream: The file-like object to write the data to :param pos: The position in the stream to write the data to :param endianness: Endianness code of the data to write - ''' + """ if dtype is None: dtype = data.dtype @@ -941,7 +941,7 @@ def _write_data(self, data, stream, pos, dtype=None, endianness=None): dtype).to_fileobj(stream) def to_file_map(self, file_map=None): - ''' Write ECAT7 image to `file_map` or contained ``self.file_map`` + """ Write ECAT7 image to `file_map` or contained ``self.file_map`` The format consist of: @@ -950,7 +950,7 @@ def to_file_map(self, file_map=None): - For every frame (3D volume in 4D data) - A subheader (size = frame_offset) - Frame data (3D volume) - ''' + """ if file_map is None: file_map = self.file_map diff --git a/nibabel/environment.py b/nibabel/environment.py index 95cb08700c..768b4de34b 100644 --- a/nibabel/environment.py +++ b/nibabel/environment.py @@ -1,8 +1,8 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -''' +""" Settings from the system environment relevant to NIPY -''' +""" import os from os.path import join as pjoin @@ -66,7 +66,7 @@ def get_nipy_user_dir(): def get_nipy_system_dir(): - r''' Get systemwide NIPY configuration file directory + r""" Get systemwide NIPY configuration file directory On posix systems this will be ``/etc/nipy``. On Windows, the directory is less useful, but by default it will be @@ -87,7 +87,7 @@ def get_nipy_system_dir(): Examples -------- >>> pth = get_nipy_system_dir() - ''' + """ if os.name == 'nt': return r'C:\etc\nipy' if os.name == 'posix': diff --git a/nibabel/eulerangles.py b/nibabel/eulerangles.py index 0928cd39d3..11a10bbe2b 100644 --- a/nibabel/eulerangles.py +++ b/nibabel/eulerangles.py @@ -6,7 +6,7 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -''' Module implementing Euler angle rotations and their conversions +""" Module implementing Euler angle rotations and their conversions See: @@ -81,7 +81,7 @@ The convention of rotation around ``z``, followed by rotation around ``y``, followed by rotation around ``x``, is known (confusingly) as "xyz", pitch-roll-yaw, Cardan angles, or Tait-Bryan angles. -''' +""" import math @@ -94,7 +94,7 @@ def euler2mat(z=0, y=0, x=0): - ''' Return matrix for rotations around z, y and x axes + """ Return matrix for rotations around z, y and x axes Uses the z, then y, then x convention above @@ -165,7 +165,7 @@ def euler2mat(z=0, y=0, x=0): curl your fingers; the direction your fingers curl is the direction of rotation). Therefore, the rotations are counterclockwise if looking along the axis of rotation from positive to negative. - ''' + """ Ms = [] if z: cosz = math.cos(z) @@ -191,7 +191,7 @@ def euler2mat(z=0, y=0, x=0): def mat2euler(M, cy_thresh=None): - ''' Discover Euler angle vector from 3x3 matrix + """ Discover Euler angle vector from 3x3 matrix Uses the conventions above. @@ -241,7 +241,7 @@ def mat2euler(M, cy_thresh=None): The code appears to be licensed (from the website) as "can be used without restrictions". - ''' + """ M = np.asarray(M) if cy_thresh is None: try: @@ -264,7 +264,7 @@ def mat2euler(M, cy_thresh=None): def euler2quat(z=0, y=0, x=0): - ''' Return quaternion corresponding to these Euler angles + """ Return quaternion corresponding to these Euler angles Uses the z, then y, then x convention above @@ -294,7 +294,7 @@ def euler2quat(z=0, y=0, x=0): 3. Apply quaternion multiplication formula - https://en.wikipedia.org/wiki/Quaternions#Hamilton_product - to formulae from 2.) to give formula for combined rotations. - ''' + """ z = z / 2.0 y = y / 2.0 x = x / 2.0 @@ -311,7 +311,7 @@ def euler2quat(z=0, y=0, x=0): def quat2euler(q): - ''' Return Euler angles corresponding to quaternion `q` + """ Return Euler angles corresponding to quaternion `q` Parameters ---------- @@ -333,14 +333,14 @@ def quat2euler(q): combining parts of the ``quat2mat`` and ``mat2euler`` functions, but the reduction in computation is small, and the code repetition is large. - ''' + """ # delayed import to avoid cyclic dependencies from . import quaternions as nq return mat2euler(nq.quat2mat(q)) def euler2angle_axis(z=0, y=0, x=0): - ''' Return angle, axis corresponding to these Euler angles + """ Return angle, axis corresponding to these Euler angles Uses the z, then y, then x convention above @@ -367,14 +367,14 @@ def euler2angle_axis(z=0, y=0, x=0): 1.5 >>> np.allclose(vec, [0, 1, 0]) True - ''' + """ # delayed import to avoid cyclic dependencies from . import quaternions as nq return nq.quat2angle_axis(euler2quat(z, y, x)) def angle_axis2euler(theta, vector, is_normalized=False): - ''' Convert angle, axis pair to Euler angles + """ Convert angle, axis pair to Euler angles Parameters ---------- @@ -405,7 +405,7 @@ def angle_axis2euler(theta, vector, is_normalized=False): combining parts of the ``angle_axis2mat`` and ``mat2euler`` functions, but the reduction in computation is small, and the code repetition is large. - ''' + """ # delayed import to avoid cyclic dependencies from . import quaternions as nq M = nq.angle_axis2mat(theta, vector, is_normalized) diff --git a/nibabel/externals/tests/test_netcdf.py b/nibabel/externals/tests/test_netcdf.py index f85393be4e..08a336d26f 100644 --- a/nibabel/externals/tests/test_netcdf.py +++ b/nibabel/externals/tests/test_netcdf.py @@ -1,4 +1,4 @@ -''' Tests for netcdf ''' +""" Tests for netcdf """ import os from os.path import join as pjoin, dirname diff --git a/nibabel/filebasedimages.py b/nibabel/filebasedimages.py index 90bbd8e652..fdc8a00e7f 100644 --- a/nibabel/filebasedimages.py +++ b/nibabel/filebasedimages.py @@ -6,7 +6,7 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -''' Common interface for any image format--volume or surface, binary or xml.''' +""" Common interface for any image format--volume or surface, binary or xml.""" import io from copy import deepcopy @@ -22,7 +22,7 @@ class ImageFileError(Exception): class FileBasedHeader(object): - ''' Template class to implement header protocol ''' + """ Template class to implement header protocol """ @classmethod def from_header(klass, header=None): @@ -51,16 +51,16 @@ def __ne__(self, other): return not self == other def copy(self): - ''' Copy object to independent representation + """ Copy object to independent representation The copy should not be affected by any changes to the original object. - ''' + """ return deepcopy(self) class FileBasedImage(object): - ''' + """ Abstract image class with interface for loading/saving images from disk. The class doesn't define any image properties. @@ -162,7 +162,7 @@ class FileBasedImage(object): carry the position at which a write (with ``to_files``) should place the data. The ``file_map`` contents should therefore be such, that this will work: - ''' + """ header_class = FileBasedHeader _meta_sniff_len = 0 files_types = (('image', None),) @@ -173,7 +173,7 @@ class FileBasedImage(object): rw = True # Used in test code def __init__(self, header=None, extra=None, file_map=None): - ''' Initialize image + """ Initialize image The image is a combination of (header), with optional metadata in `extra`, and filename / file-like objects @@ -188,7 +188,7 @@ def __init__(self, header=None, extra=None, file_map=None): metadata of this image type file_map : mapping, optional mapping giving file information for this image format - ''' + """ self._header = self.header_class.from_header(header) if extra is None: extra = {} @@ -203,8 +203,8 @@ def header(self): return self._header def __getitem__(self): - ''' No slicing or dictionary interface for images - ''' + """ No slicing or dictionary interface for images + """ raise TypeError("Cannot slice image objects.") @deprecate_with_version('get_header method is deprecated.\n' @@ -217,7 +217,7 @@ def get_header(self): return self.header def get_filename(self): - ''' Fetch the image filename + """ Fetch the image filename Parameters ---------- @@ -230,7 +230,7 @@ def get_filename(self): If an image may have several filenames associated with it (e.g. Analyze ``.img, .hdr`` pair) then we return the more characteristic filename (the ``.img`` filename in the case of Analyze') - ''' + """ # which filename is returned depends on the ordering of the # 'files_types' class attribute - we return the name # corresponding to the first in that tuple @@ -238,7 +238,7 @@ def get_filename(self): return self.file_map[characteristic_type].filename def set_filename(self, filename): - ''' Sets the files in the object from a given filename + """ Sets the files in the object from a given filename The different image formats may check whether the filename has an extension characteristic of the format, and raise an error if @@ -251,7 +251,7 @@ def set_filename(self, filename): this will be the only filename set into the image ``.file_map`` attribute. Otherwise, the image instance will try and guess the other filenames from this given filename. - ''' + """ self.file_map = self.__class__.filespec_to_file_map(filename) @classmethod @@ -317,7 +317,7 @@ def filespec_to_files(klass, filespec): return klass.filespec_to_file_map(filespec) def to_filename(self, filename): - ''' Write image to files implied by filename string + """ Write image to files implied by filename string Parameters ---------- @@ -329,7 +329,7 @@ def to_filename(self, filename): Returns ------- None - ''' + """ self.file_map = self.filespec_to_file_map(filename) self.to_file_map() @@ -350,7 +350,7 @@ def to_files(self, file_map=None): @classmethod def make_file_map(klass, mapping=None): - ''' Class method to make files holder for this image type + """ Class method to make files holder for this image type Parameters ---------- @@ -366,7 +366,7 @@ def make_file_map(klass, mapping=None): sequence klass.files_types, and values of type FileHolder, where FileHolder objects have default values, other than those given by `mapping` - ''' + """ if mapping is None: mapping = {} file_map = {} @@ -383,7 +383,7 @@ def make_file_map(klass, mapping=None): @classmethod def instance_to_filename(klass, img, filename): - ''' Save `img` in our own format, to name implied by `filename` + """ Save `img` in our own format, to name implied by `filename` This is a class method @@ -393,13 +393,13 @@ def instance_to_filename(klass, img, filename): filename : str Filename, implying name to which to save image. - ''' + """ img = klass.from_image(img) img.to_filename(filename) @classmethod def from_image(klass, img): - ''' Class method to create new instance of own class from `img` + """ Class method to create new instance of own class from `img` Parameters ---------- @@ -410,7 +410,7 @@ def from_image(klass, img): ------- cimg : ``spatialimage`` instance Image, of our own class - ''' + """ raise NotImplementedError() @classmethod @@ -514,7 +514,7 @@ def path_maybe_image(klass, filename, sniff=None, sniff_max=1024): class SerializableImage(FileBasedImage): - ''' + """ Abstract image class for (de)serializing images to/from byte strings. The class doesn't define any image properties. @@ -562,7 +562,7 @@ class SerializableImage(FileBasedImage): images) currently do not support this interface. For multi-file images, ``to_bytes()`` and ``from_bytes()`` must be overridden, and any encoding details should be documented. - ''' + """ @classmethod def from_bytes(klass, bytestring): diff --git a/nibabel/fileholders.py b/nibabel/fileholders.py index 35cfd3c348..c996725991 100644 --- a/nibabel/fileholders.py +++ b/nibabel/fileholders.py @@ -6,7 +6,7 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -''' Fileholder class ''' +""" Fileholder class """ from copy import copy @@ -18,14 +18,14 @@ class FileHolderError(Exception): class FileHolder(object): - ''' class to contain filename, fileobj and file position - ''' + """ class to contain filename, fileobj and file position + """ def __init__(self, filename=None, fileobj=None, pos=0): - ''' Initialize FileHolder instance + """ Initialize FileHolder instance Parameters ---------- @@ -37,13 +37,13 @@ def __init__(self, pos : int, optional position in filename or fileobject at which to start reading or writing data; defaults to 0 - ''' + """ self.filename = filename self.fileobj = fileobj self.pos = pos def get_prepare_fileobj(self, *args, **kwargs): - ''' Return fileobj if present, or return fileobj from filename + """ Return fileobj if present, or return fileobj from filename Set position to that given in self.pos @@ -62,7 +62,7 @@ def get_prepare_fileobj(self, *args, **kwargs): fileobj : file-like object object has position set (via ``fileobj.seek()``) to ``self.pos`` - ''' + """ if self.fileobj is not None: obj = ImageOpener(self.fileobj) # for context manager obj.seek(self.pos) @@ -99,7 +99,7 @@ def file_like(self): def copy_file_map(file_map): - r''' Copy mapping of fileholders given by `file_map` + r""" Copy mapping of fileholders given by `file_map` Parameters ---------- @@ -111,7 +111,7 @@ def copy_file_map(file_map): fm_copy : dict Copy of `file_map`, using shallow copy of ``FileHolder``\s - ''' + """ fm_copy = {} for key, fh in file_map.items(): fm_copy[key] = copy(fh) diff --git a/nibabel/filename_parser.py b/nibabel/filename_parser.py index ed04610fdd..5d84a9d6dc 100644 --- a/nibabel/filename_parser.py +++ b/nibabel/filename_parser.py @@ -6,7 +6,7 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -''' Create filename pairs, triplets etc, with expected extensions ''' +""" Create filename pairs, triplets etc, with expected extensions """ import os import pathlib @@ -50,7 +50,7 @@ def types_filenames(template_fname, types_exts, trailing_suffixes=('.gz', '.bz2'), enforce_extensions=True, match_case=False): - ''' Return filenames with standard extensions from template name + """ Return filenames with standard extensions from template name The typical case is returning image and header filenames for an Analyze image, that expects an 'image' file type with extension ``.img``, @@ -108,7 +108,7 @@ def types_filenames(template_fname, types_exts, ... enforce_extensions=False) >>> tfns == {'t1': '/path/test.funny', 't2': '/path/test.ext2'} True - ''' + """ template_fname = _stringify_path(template_fname) if not isinstance(template_fname, str): raise TypesFilenamesError('Need file name as input ' @@ -175,7 +175,7 @@ def parse_filename(filename, types_exts, trailing_suffixes, match_case=False): - '''Split filename into fileroot, extension, trailing suffix; guess type. + """Split filename into fileroot, extension, trailing suffix; guess type. Parameters ---------- @@ -217,7 +217,7 @@ def parse_filename(filename, ('/path/fname', 'ext2', None, 't2') >>> parse_filename('/path/fnameext2.gz', types_exts, ('.gz',)) ('/path/fname', 'ext2', '.gz', 't2') - ''' + """ filename = _stringify_path(filename) ignored = None @@ -256,7 +256,7 @@ def _iendswith(whole, end): def splitext_addext(filename, addexts=('.gz', '.bz2'), match_case=False): - ''' Split ``/pth/fname.ext.gz`` into ``/pth/fname, .ext, .gz`` + """ Split ``/pth/fname.ext.gz`` into ``/pth/fname, .ext, .gz`` where ``.gz`` may be any of passed `addext` trailing suffixes. @@ -286,7 +286,7 @@ def splitext_addext(filename, ('fname', '.ext', '') >>> splitext_addext('fname.ext.foo', ('.foo', '.bar')) ('fname', '.ext', '.foo') - ''' + """ filename = _stringify_path(filename) if match_case: diff --git a/nibabel/freesurfer/mghformat.py b/nibabel/freesurfer/mghformat.py index 3d85ad4ea4..72a754efe8 100644 --- a/nibabel/freesurfer/mghformat.py +++ b/nibabel/freesurfer/mghformat.py @@ -6,10 +6,10 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -''' Header and image reading / writing functions for MGH image format +""" Header and image reading / writing functions for MGH image format Author: Krish Subramaniam -''' +""" from os.path import splitext import numpy as np @@ -81,11 +81,11 @@ class MGHError(Exception): class MGHHeader(LabeledWrapStruct): - ''' Class for MGH format header + """ Class for MGH format header The header also consists of the footer data which MGH places after the data chunk. - ''' + """ # Copies of module-level definitions template_dtype = hf_dtype _hdrdtype = header_dtype @@ -95,7 +95,7 @@ class MGHHeader(LabeledWrapStruct): def __init__(self, binaryblock=None, check=True): - ''' Initialize header from binary data block + """ Initialize header from binary data block Parameters ---------- @@ -105,7 +105,7 @@ def __init__(self, check : bool, optional Whether to check content of header in initialization. Default is True. - ''' + """ min_size = self._hdrdtype.itemsize full_size = self.template_dtype.itemsize if binaryblock is not None and len(binaryblock) >= min_size: @@ -138,8 +138,8 @@ def _get_checks(klass): @classmethod def from_header(klass, header=None, check=True): - ''' Class method to create MGH header from another MGH header - ''' + """ Class method to create MGH header from another MGH header + """ # own type, return copy if type(header) == klass: obj = header.copy() @@ -152,9 +152,9 @@ def from_header(klass, header=None, check=True): @classmethod def from_fileobj(klass, fileobj, check=True): - ''' + """ classmethod for loading a MGH fileobject - ''' + """ # We need the following hack because MGH data stores header information # after the data chunk too. We read the header initially, deduce the # dimensions from the header, skip over and then read the footer @@ -172,12 +172,12 @@ def from_fileobj(klass, fileobj, check=True): return klass(hdr_str + ftr_str, check=check) def get_affine(self): - ''' Get the affine transform from the header information. + """ Get the affine transform from the header information. MGH format doesn't store the transform directly. Instead it's gleaned from the zooms ( delta ), direction cosines ( Mdc ), RAS centers ( Pxyz_c ) and the dimensions. - ''' + """ hdr = self._structarr MdcD = hdr['Mdc'].T * hdr['delta'] vol_center = MdcD.dot(hdr['dims'][:3]) / 2 @@ -187,14 +187,14 @@ def get_affine(self): get_best_affine = get_affine def get_vox2ras(self): - '''return the get_affine() - ''' + """return the get_affine() + """ return self.get_affine() def get_vox2ras_tkr(self): - ''' Get the vox2ras-tkr transform. See "Torig" here: + """ Get the vox2ras-tkr transform. See "Torig" here: https://surfer.nmr.mgh.harvard.edu/fswiki/CoordinateSystems - ''' + """ ds = self._structarr['delta'] ns = self._structarr['dims'][:3] * ds / 2.0 v2rtkr = np.array([[-ds[0], 0, 0, ns[0]], @@ -204,22 +204,22 @@ def get_vox2ras_tkr(self): return v2rtkr def get_ras2vox(self): - '''return the inverse get_affine() - ''' + """return the inverse get_affine() + """ return np.linalg.inv(self.get_affine()) def get_data_dtype(self): - ''' Get numpy dtype for MGH data + """ Get numpy dtype for MGH data For examples see ``set_data_dtype`` - ''' + """ code = int(self._structarr['type']) dtype = self._data_type_codes.numpy_dtype[code] return dtype def set_data_dtype(self, datatype): - ''' Set numpy dtype for data from code or dtype or type - ''' + """ Set numpy dtype for data from code or dtype or type + """ try: code = self._data_type_codes[datatype] except KeyError: @@ -227,7 +227,7 @@ def set_data_dtype(self, datatype): self._structarr['type'] = code def _ndims(self): - ''' Get dimensionality of data + """ Get dimensionality of data MGH does not encode dimensionality explicitly, so an image where the fourth dimension is 1 is treated as three-dimensional. @@ -235,11 +235,11 @@ def _ndims(self): Returns ------- ndims : 3 or 4 - ''' + """ return 3 + (self._structarr['dims'][3] > 1) def get_zooms(self): - ''' Get zooms from header + """ Get zooms from header Returns the spacing of voxels in the x, y, and z dimensions. For four-dimensional files, a fourth zoom is included, equal to the @@ -254,13 +254,13 @@ def get_zooms(self): tuple of header zoom values .. _mghformat: https://surfer.nmr.mgh.harvard.edu/fswiki/FsTutorial/MghFormat#line-82 - ''' + """ # Do not return time zoom (TR) if 3D image tzoom = (self['tr'],) if self._ndims() > 3 else () return tuple(self._structarr['delta']) + tzoom def set_zooms(self, zooms): - ''' Set zooms into header fields + """ Set zooms into header fields Sets the spacing of voxels in the x, y, and z dimensions. For four-dimensional files, a temporal zoom (repetition time, or TR, in @@ -271,7 +271,7 @@ def set_zooms(self, zooms): zooms : sequence sequence of floats specifying spatial and (optionally) temporal zooms - ''' + """ hdr = self._structarr zooms = np.asarray(zooms) ndims = self._ndims() @@ -289,8 +289,8 @@ def set_zooms(self, zooms): hdr['tr'] = zooms[3] def get_data_shape(self): - ''' Get shape of data - ''' + """ Get shape of data + """ shape = tuple(self._structarr['dims']) # If last dimension (nframes) is 1, remove it because # we want to maintain 3D and it's redundant @@ -299,13 +299,13 @@ def get_data_shape(self): return shape def set_data_shape(self, shape): - ''' Set shape of data + """ Set shape of data Parameters ---------- shape : sequence sequence of integers specifying data array shape - ''' + """ shape = tuple(shape) if len(shape) > 4: raise ValueError("Shape may be at most 4 dimensional") @@ -313,29 +313,29 @@ def set_data_shape(self, shape): self._structarr['delta'] = 1 def get_data_bytespervox(self): - ''' Get the number of bytes per voxel of the data - ''' + """ Get the number of bytes per voxel of the data + """ return int(self._data_type_codes.bytespervox[ int(self._structarr['type'])]) def get_data_size(self): - ''' Get the number of bytes the data chunk occupies. - ''' + """ Get the number of bytes the data chunk occupies. + """ return self.get_data_bytespervox() * np.prod(self._structarr['dims']) def get_data_offset(self): - ''' Return offset into data file to read data - ''' + """ Return offset into data file to read data + """ return DATA_OFFSET def get_footer_offset(self): - ''' Return offset where the footer resides. + """ Return offset where the footer resides. Occurs immediately after the data chunk. - ''' + """ return self.get_data_offset() + self.get_data_size() def data_from_fileobj(self, fileobj): - ''' Read data array from `fileobj` + """ Read data array from `fileobj` Parameters ---------- @@ -346,7 +346,7 @@ def data_from_fileobj(self, fileobj): ------- arr : ndarray data array - ''' + """ dtype = self.get_data_dtype() shape = self.get_data_shape() offset = self.get_data_offset() @@ -364,10 +364,10 @@ def guessed_endian(klass, mapping): @classmethod def default_structarr(klass, endianness=None): - ''' Return header data for empty header + """ Return header data for empty header Ignores byte order; always big endian - ''' + """ if endianness is not None and endian_codes[endianness] != '>': raise ValueError('MGHHeader must always be big endian') structarr = super(MGHHeader, @@ -381,15 +381,15 @@ def default_structarr(klass, endianness=None): return structarr def _set_affine_default(self): - ''' If goodRASFlag is 0, set the default affine - ''' + """ If goodRASFlag is 0, set the default affine + """ self._structarr['goodRASFlag'] = 1 self._structarr['delta'] = 1 self._structarr['Mdc'] = [[-1, 0, 0], [0, 0, 1], [0, -1, 0]] self._structarr['Pxyz_c'] = 0 def writehdr_to(self, fileobj): - ''' Write header to fileobj + """ Write header to fileobj Write starts at the beginning. @@ -401,7 +401,7 @@ def writehdr_to(self, fileobj): Returns ------- None - ''' + """ hdr_nofooter = np.ndarray((), dtype=self._hdrdtype, buffer=self.binaryblock) # goto the very beginning of the file-like obj @@ -409,7 +409,7 @@ def writehdr_to(self, fileobj): fileobj.write(hdr_nofooter.tobytes()) def writeftr_to(self, fileobj): - ''' Write footer to fileobj + """ Write footer to fileobj Footer data is located after the data chunk. So move there and write. @@ -421,7 +421,7 @@ def writeftr_to(self, fileobj): Returns ------- None - ''' + """ ftr_loc_in_hdr = len(self.binaryblock) - self._ftrdtype.itemsize ftr_nd = np.ndarray((), dtype=self._ftrdtype, buffer=self.binaryblock, offset=ftr_loc_in_hdr) @@ -429,11 +429,11 @@ def writeftr_to(self, fileobj): fileobj.write(ftr_nd.tobytes()) def copy(self): - ''' Return copy of structure ''' + """ Return copy of structure """ return self.__class__(self.binaryblock, check=False) def as_byteswapped(self, endianness=None): - ''' Return new object with given ``endianness`` + """ Return new object with given ``endianness`` If big endian, returns a copy of the object. Otherwise raises ValueError. @@ -448,7 +448,7 @@ def as_byteswapped(self, endianness=None): wstr : ``MGHHeader`` ``MGHHeader`` object - ''' + """ if endianness is None or endian_codes[endianness] != '>': raise ValueError('Cannot byteswap MGHHeader - ' 'must always be big endian') @@ -537,7 +537,7 @@ def filespec_to_file_map(klass, filespec): @classmethod def from_file_map(klass, file_map, *, mmap=True, keep_file_open=None): - ''' Class method to create image from mapping in ``file_map`` + """ Class method to create image from mapping in ``file_map`` .. deprecated:: 2.4.1 ``keep_file_open='auto'`` is redundant with `False` and has @@ -569,7 +569,7 @@ def from_file_map(klass, file_map, *, mmap=True, keep_file_open=None): Returns ------- img : MGHImage instance - ''' + """ if mmap not in (True, False, 'c', 'r'): raise ValueError("mmap should be one of {True, False, 'c', 'r'}") img_fh = file_map['image'] @@ -584,14 +584,14 @@ def from_file_map(klass, file_map, *, mmap=True, keep_file_open=None): return img def to_file_map(self, file_map=None): - ''' Write image to `file_map` or contained ``self.file_map`` + """ Write image to `file_map` or contained ``self.file_map`` Parameters ---------- file_map : None or mapping, optional files mapping. If None (default) use object's ``file_map`` attribute instead - ''' + """ if file_map is None: file_map = self.file_map data = np.asanyarray(self.dataobj) @@ -605,7 +605,7 @@ def to_file_map(self, file_map=None): self.file_map = file_map def _write_data(self, mghfile, data, header): - ''' Utility routine to write image + """ Utility routine to write image Parameters ---------- @@ -616,7 +616,7 @@ def _write_data(self, mghfile, data, header): array to write header : analyze-type header object header - ''' + """ shape = header.get_data_shape() if data.shape != shape: raise HeaderDataError('Data should be shape (%s)' % diff --git a/nibabel/freesurfer/tests/test_mghformat.py b/nibabel/freesurfer/tests/test_mghformat.py index e1cfc56b18..f05d85f905 100644 --- a/nibabel/freesurfer/tests/test_mghformat.py +++ b/nibabel/freesurfer/tests/test_mghformat.py @@ -6,7 +6,7 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -'''Tests for mghformat reading writing''' +"""Tests for mghformat reading writing""" import os import io @@ -162,9 +162,9 @@ def test_set_zooms(): def bad_dtype_mgh(): - ''' This function raises an MGHError exception because + """ This function raises an MGHError exception because uint16 is not a valid MGH datatype. - ''' + """ # try to write an unsigned short and make sure it # raises MGHError v = np.ones((7, 13, 3, 22)).astype(np.uint16) diff --git a/nibabel/funcs.py b/nibabel/funcs.py index 2f293c4434..21246d8ec6 100644 --- a/nibabel/funcs.py +++ b/nibabel/funcs.py @@ -7,7 +7,7 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -''' Processor functions for images ''' +""" Processor functions for images """ import numpy as np from .orientations import io_orientation, OrientationError @@ -15,7 +15,7 @@ def squeeze_image(img): - ''' Return image, remove axes length 1 at end of image shape + """ Return image, remove axes length 1 at end of image shape For example, an image may have shape (10,20,30,1,1). In this case squeeze will result in an image with shape (10,20,30). See doctests @@ -65,7 +65,7 @@ def squeeze_image(img): >>> img2 = squeeze_image(img) >>> img2.shape == (1, 1, 5, 1, 2) True - ''' + """ klass = img.__class__ shape = img.shape slen = len(shape) @@ -87,7 +87,7 @@ def squeeze_image(img): def concat_images(images, check_affines=True, axis=None): - r''' Concatenate images in list to single image, along specified dimension + r""" Concatenate images in list to single image, along specified dimension Parameters ---------- @@ -107,7 +107,7 @@ def concat_images(images, check_affines=True, axis=None): concat_img : ``SpatialImage`` New image resulting from concatenating `images` across last dimension - ''' + """ images = [load(img) if not hasattr(img, 'get_data') else img for img in images] n_imgs = len(images) @@ -155,7 +155,7 @@ def concat_images(images, check_affines=True, axis=None): def four_to_three(img): - ''' Create 3D images from 4D image by slicing over last axis + """ Create 3D images from 4D image by slicing over last axis Parameters ---------- @@ -168,7 +168,7 @@ def four_to_three(img): ------- imgs : list list of 3D images - ''' + """ arr = np.asanyarray(img.dataobj) header = img.header affine = img.affine @@ -184,7 +184,7 @@ def four_to_three(img): def as_closest_canonical(img, enforce_diag=False): - ''' Return `img` with data reordered to be closest to canonical + """ Return `img` with data reordered to be closest to canonical Canonical order is the ordering of the output axes. @@ -204,7 +204,7 @@ def as_closest_canonical(img, enforce_diag=False): orientation. We modify the affine accordingly. If `img` is already has the correct data ordering, we just return `img` unmodified. - ''' + """ # Get the image class to transform the data for us img = img.as_reoriented(io_orientation(img.affine)) @@ -216,6 +216,6 @@ def as_closest_canonical(img, enforce_diag=False): def _aff_is_diag(aff): - ''' Utility function returning True if affine is nearly diagonal ''' + """ Utility function returning True if affine is nearly diagonal """ rzs_aff = aff[:3, :3] return np.allclose(rzs_aff, np.diag(np.diag(rzs_aff))) diff --git a/nibabel/imageclasses.py b/nibabel/imageclasses.py index c1a0b7133a..1d33db8ed1 100644 --- a/nibabel/imageclasses.py +++ b/nibabel/imageclasses.py @@ -6,7 +6,7 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -''' Define supported image classes and names ''' +""" Define supported image classes and names """ from .analyze import AnalyzeImage from .brikhead import AFNIImage diff --git a/nibabel/loadsave.py b/nibabel/loadsave.py index f8c3e3be0b..85713aa24b 100644 --- a/nibabel/loadsave.py +++ b/nibabel/loadsave.py @@ -21,7 +21,7 @@ def load(filename, **kwargs): - r''' Load file given filename, guessing at file type + r""" Load file given filename, guessing at file type Parameters ---------- @@ -34,7 +34,7 @@ def load(filename, **kwargs): ------- img : ``SpatialImage`` Image of guessed type - ''' + """ filename = _stringify_path(filename) # Check file exists and is not empty @@ -83,7 +83,7 @@ def guessed_image_type(filename): def save(img, filename): - ''' Save an image to file adapting format to `filename` + """ Save an image to file adapting format to `filename` Parameters ---------- @@ -95,7 +95,7 @@ def save(img, filename): Returns ------- None - ''' + """ filename = _stringify_path(filename) # Save the type as expected diff --git a/nibabel/minc1.py b/nibabel/minc1.py index 6dfe7dde67..ebf883d7b8 100644 --- a/nibabel/minc1.py +++ b/nibabel/minc1.py @@ -42,12 +42,12 @@ class MincError(Exception): class Minc1File(object): - ''' Class to wrap MINC1 format opened netcdf object + """ Class to wrap MINC1 format opened netcdf object Although it has some of the same methods as a ``Header``, we use this only when reading a MINC file, to pull out useful header information, and for the method of reading the data out - ''' + """ def __init__(self, mincfile): self._mincfile = mincfile @@ -118,12 +118,12 @@ def get_affine(self): return aff def _get_valid_range(self): - ''' Return valid range for image data + """ Return valid range for image data The valid range can come from the image 'valid_range' or image 'valid_min' and 'valid_max', or, failing that, from the data type range - ''' + """ ddt = self.get_data_dtype() info = np.iinfo(ddt.type) try: @@ -238,11 +238,11 @@ def get_scaled_data(self, sliceobj=()): class MincImageArrayProxy(object): - ''' MINC implementation of array proxy protocol + """ MINC implementation of array proxy protocol The array proxy allows us to freeze the passed fileobj and header such that it returns the expected data array. - ''' + """ def __init__(self, minc_file): self.minc_file = minc_file @@ -308,12 +308,12 @@ def may_contain_header(klass, binaryblock): class Minc1Image(SpatialImage): - ''' Class for MINC1 format images + """ Class for MINC1 format images The MINC1 image class uses the default header type, rather than a specific MINC header type - and reads the relevant information from the MINC file on load. - ''' + """ header_class = Minc1Header _meta_sniff_len = 4 valid_exts = ('.mnc',) diff --git a/nibabel/minc2.py b/nibabel/minc2.py index 90b039d8da..ba5d91d47a 100644 --- a/nibabel/minc2.py +++ b/nibabel/minc2.py @@ -40,12 +40,12 @@ def __init__(self, var): class Minc2File(Minc1File): - ''' Class to wrap MINC2 format file + """ Class to wrap MINC2 format file Although it has some of the same methods as a ``Header``, we use this only when reading a MINC2 file, to pull out useful header information, and for the method of reading the data out - ''' + """ def __init__(self, mincfile): self._mincfile = mincfile @@ -86,11 +86,11 @@ def get_data_shape(self): return self._image.shape def _get_valid_range(self): - ''' Return valid range for image data + """ Return valid range for image data The valid range can come from the image 'valid_range' or failing that, from the data type range - ''' + """ ddt = self.get_data_dtype() info = np.iinfo(ddt.type) try: @@ -144,12 +144,12 @@ def may_contain_header(klass, binaryblock): class Minc2Image(Minc1Image): - ''' Class for MINC2 images + """ Class for MINC2 images The MINC2 image class uses the default header type, rather than a specific MINC header type - and reads the relevant information from the MINC file on load. - ''' + """ # MINC2 does not do compressed whole files _compressed_suffixes = () header_class = Minc2Header diff --git a/nibabel/nicom/csareader.py b/nibabel/nicom/csareader.py index 1764e2878c..6ef089e301 100644 --- a/nibabel/nicom/csareader.py +++ b/nibabel/nicom/csareader.py @@ -1,6 +1,6 @@ -''' CSA header reader from SPM spec +""" CSA header reader from SPM spec -''' +""" import numpy as np from .structreader import Unpacker @@ -30,7 +30,7 @@ class CSAReadError(CSAError): def get_csa_header(dcm_data, csa_type='image'): - ''' Get CSA header information from DICOM header + """ Get CSA header information from DICOM header Return None if the header does not contain CSA information of the specified `csa_type` @@ -49,7 +49,7 @@ def get_csa_header(dcm_data, csa_type='image'): csa_info : None or dict Parsed CSA field of `csa_type` or None, if we cannot find the CSA information. - ''' + """ csa_type = csa_type.lower() if csa_type == 'image': element_offset = 0x10 @@ -72,7 +72,7 @@ def get_csa_header(dcm_data, csa_type='image'): def read(csa_str): - ''' Read CSA header from string `csa_str` + """ Read CSA header from string `csa_str` Parameters ---------- @@ -85,7 +85,7 @@ def read(csa_str): header information as dict, where `header` has fields (at least) ``type, n_tags, tags``. ``header['tags']`` is also a dictionary with one key, value pair for each tag in the header. - ''' + """ csa_len = len(csa_str) csa_dict = {'tags': {}} hdr_id = csa_str[:4] @@ -185,7 +185,7 @@ def get_vector(csa_dict, tag_name, n): def is_mosaic(csa_dict): - ''' Return True if the data is of Mosaic type + """ Return True if the data is of Mosaic type Parameters ---------- @@ -197,7 +197,7 @@ def is_mosaic(csa_dict): tf : bool True if the `dcm_data` appears to be of Siemens mosaic type, False otherwise - ''' + """ if csa_dict is None: return False if get_acq_mat_txt(csa_dict) is None: @@ -244,7 +244,7 @@ def get_ice_dims(csa_dict): def nt_str(s): - ''' Strip string to first null + """ Strip string to first null Parameters ---------- @@ -254,7 +254,7 @@ def nt_str(s): ------- sdash : str s stripped to first occurence of null (0) - ''' + """ zero_pos = s.find(b'\x00') if zero_pos == -1: return s diff --git a/nibabel/nicom/dicomreaders.py b/nibabel/nicom/dicomreaders.py index ad8d9c6b64..dee8b507d5 100644 --- a/nibabel/nicom/dicomreaders.py +++ b/nibabel/nicom/dicomreaders.py @@ -16,7 +16,7 @@ class DicomReadError(Exception): def mosaic_to_nii(dcm_data): - ''' Get Nifti file from Siemens + """ Get Nifti file from Siemens Parameters ---------- @@ -27,7 +27,7 @@ def mosaic_to_nii(dcm_data): ------- img : ``Nifti1Image`` Nifti image object - ''' + """ dcm_w = wrapper_from_data(dcm_data) if not dcm_w.is_mosaic: raise DicomReadError('data does not appear to be in mosaic format') @@ -45,7 +45,7 @@ def read_mosaic_dwi_dir(dicom_path, globber='*.dcm', dicom_kwargs=None): def read_mosaic_dir(dicom_path, globber='*.dcm', check_is_dwi=False, dicom_kwargs=None): - ''' Read all Siemens mosaic DICOMs in directory, return arrays, params + """ Read all Siemens mosaic DICOMs in directory, return arrays, params Parameters ---------- @@ -74,7 +74,7 @@ def read_mosaic_dir(dicom_path, unit_gradients : (N, 3) array gradient directions of unit length for each acquisition. (nan, nan, nan) if we did not find diffusion information. - ''' + """ if dicom_kwargs is None: dicom_kwargs = {} full_globber = pjoin(dicom_path, globber) @@ -116,7 +116,7 @@ def read_mosaic_dir(dicom_path, def slices_to_series(wrappers): - ''' Sort sequence of slice wrappers into series + """ Sort sequence of slice wrappers into series This follows the SPM model fairly closely @@ -130,7 +130,7 @@ def slices_to_series(wrappers): series : sequence sequence of sequences of wrapper objects, where each sequence is wrapper objects comprising a series, sorted into slice order - ''' + """ # first pass volume_lists = [wrappers[0:1]] for dw in wrappers[1:]: @@ -171,7 +171,7 @@ def _instance_sorter(s): def _third_pass(wrappers): - ''' What we do when there are not unique zs in a slice set ''' + """ What we do when there are not unique zs in a slice set """ inos = [s.instance_number for s in wrappers] msg_fmt = ('Plausibly matching slices, but where some have ' 'the same apparent slice location, and %s; ' diff --git a/nibabel/nicom/dicomwrappers.py b/nibabel/nicom/dicomwrappers.py index f37d0323a8..b718b980aa 100755 --- a/nibabel/nicom/dicomwrappers.py +++ b/nibabel/nicom/dicomwrappers.py @@ -600,7 +600,7 @@ def image_orient_patient(self): @one_time def voxel_sizes(self): - ''' Get i, j, k voxel sizes ''' + """ Get i, j, k voxel sizes """ try: pix_measures = self.shared.PixelMeasuresSequence[0] except AttributeError: diff --git a/nibabel/nicom/dwiparams.py b/nibabel/nicom/dwiparams.py index e9d05c0d57..6146897377 100644 --- a/nibabel/nicom/dwiparams.py +++ b/nibabel/nicom/dwiparams.py @@ -1,4 +1,4 @@ -''' Process diffusion imaging parameters +""" Process diffusion imaging parameters * ``q`` is a vector in Q space * ``b`` is a b value @@ -18,13 +18,13 @@ B ~ (q_est . q_est.T) / norm(q_est) -''' +""" import numpy as np import numpy.linalg as npl def B2q(B, tol=None): - ''' Estimate q vector from input B matrix `B` + """ Estimate q vector from input B matrix `B` We require that the input `B` is symmetric positive definite. @@ -47,7 +47,7 @@ def B2q(B, tol=None): ------- q : (3,) vector Estimated q vector from B matrix `B` - ''' + """ B = np.asarray(B) if not np.allclose(B - B.T, 0): raise ValueError('B matrix is not symmetric enough') @@ -68,7 +68,7 @@ def B2q(B, tol=None): def nearest_pos_semi_def(B): - ''' Least squares positive semi-definite tensor estimation + """ Least squares positive semi-definite tensor estimation Reference: Niethammer M, San Jose Estepar R, Bouix S, Shenton M, Westin CF. On diffusion tensor estimation. Conf Proc IEEE Eng Med @@ -92,7 +92,7 @@ def nearest_pos_semi_def(B): array([[ 0.75, 0. , 0. ], [ 0. , 0.75, 0. ], [ 0. , 0. , 0. ]]) - ''' + """ B = np.asarray(B) vals, vecs = npl.eigh(B) # indices of eigenvalues in descending order diff --git a/nibabel/nicom/structreader.py b/nibabel/nicom/structreader.py index c40975b168..644f50d345 100644 --- a/nibabel/nicom/structreader.py +++ b/nibabel/nicom/structreader.py @@ -1,4 +1,4 @@ -''' Stream-like reader for packed data ''' +""" Stream-like reader for packed data """ from struct import Struct @@ -6,7 +6,7 @@ class Unpacker(object): - ''' Class to unpack values from buffer object + """ Class to unpack values from buffer object The buffer object is usually a string. Caches compiled :mod:`struct` format strings so that repeated unpacking with the same format @@ -26,10 +26,10 @@ class Unpacker(object): True >>> upk.ptr 7 - ''' + """ def __init__(self, buf, ptr=0, endian=None): - ''' Initialize unpacker + """ Initialize unpacker Parameters ---------- @@ -43,14 +43,14 @@ def __init__(self, buf, ptr=0, endian=None): behavior of ``struct`` - assuming system endian unless you specify the byte order specifically in the format string passed to ``unpack`` - ''' + """ self.buf = buf self.ptr = ptr self.endian = endian self._cache = {} def unpack(self, fmt): - ''' Unpack values from contained buffer + """ Unpack values from contained buffer Unpacks values from ``self.buf`` and updates ``self.ptr`` to the position after the read data. @@ -64,7 +64,7 @@ def unpack(self, fmt): ------- values : tuple values as unpacked from ``self.buf`` according to `fmt` - ''' + """ # try and get a struct corresponding to the format string from # the cache pkst = self._cache.get(fmt) @@ -89,7 +89,7 @@ def unpack(self, fmt): return values def read(self, n_bytes=-1): - ''' Return byte string of length `n_bytes` at current position + """ Return byte string of length `n_bytes` at current position Returns sub-string from ``self.buf`` and updates ``self.ptr`` to the position after the read data. @@ -103,7 +103,7 @@ def read(self, n_bytes=-1): Returns ------- s : byte string - ''' + """ start = self.ptr if n_bytes == -1: end = len(self.buf) diff --git a/nibabel/nicom/tests/data_pkgs.py b/nibabel/nicom/tests/data_pkgs.py index 56c135fd5b..2424666a72 100644 --- a/nibabel/nicom/tests/data_pkgs.py +++ b/nibabel/nicom/tests/data_pkgs.py @@ -1,4 +1,4 @@ -''' Data packages for DICOM testing ''' +""" Data packages for DICOM testing """ from ... import data as nibd diff --git a/nibabel/nifti1.py b/nibabel/nifti1.py index dbaf4a97b6..202decd8e0 100644 --- a/nibabel/nifti1.py +++ b/nibabel/nifti1.py @@ -6,10 +6,10 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -''' Read / write access to NIfTI1 image format +""" Read / write access to NIfTI1 image format NIfTI1 format defined at http://nifti.nimh.nih.gov/nifti-1/ -''' +""" import warnings from io import BytesIO @@ -355,7 +355,7 @@ def __ne__(self, other): return not self == other def write_to(self, fileobj, byteswap): - ''' Write header extensions to fileobj + """ Write header extensions to fileobj Write starts at fileobj current file position. @@ -369,7 +369,7 @@ def write_to(self, fileobj, byteswap): Returns ------- None - ''' + """ extstart = fileobj.tell() rawsize = self.get_sizeondisk() # write esize and ecode first @@ -526,7 +526,7 @@ def __cmp__(self, other): return cmp(list(self), list(other)) def write_to(self, fileobj, byteswap): - ''' Write header extensions to fileobj + """ Write header extensions to fileobj Write starts at fileobj current file position. @@ -540,13 +540,13 @@ def write_to(self, fileobj, byteswap): Returns ------- None - ''' + """ for e in self: e.write_to(fileobj, byteswap) @classmethod def from_fileobj(klass, fileobj, size, byteswap): - '''Read header extensions from a fileobj + """Read header extensions from a fileobj Parameters ---------- @@ -562,7 +562,7 @@ def from_fileobj(klass, fileobj, size, byteswap): ------- An extension list. This list might be empty in case not extensions were present in fileobj. - ''' + """ # make empty extension list extensions = klass() # assume the file pointer is at the beginning of any extensions. @@ -611,7 +611,7 @@ def from_fileobj(klass, fileobj, size, byteswap): class Nifti1Header(SpmAnalyzeHeader): - ''' Class for NIfTI1 header + """ Class for NIfTI1 header The NIfTI1 header has many more coded fields than the simpler Analyze variants. NIfTI1 headers also have extensions. @@ -622,7 +622,7 @@ class Nifti1Header(SpmAnalyzeHeader): data, extension reading, and writing the correct magic string. This class handles the header-preceding-data case. - ''' + """ # Copies of module level definitions template_dtype = header_dtype _data_type_codes = data_type_codes @@ -661,18 +661,18 @@ def __init__(self, endianness=None, check=True, extensions=()): - ''' Initialize header from binary data block and extensions - ''' + """ Initialize header from binary data block and extensions + """ super(Nifti1Header, self).__init__(binaryblock, endianness, check) self.extensions = self.exts_klass(extensions) def copy(self): - ''' Return copy of header + """ Return copy of header Take reference to extensions as well as copy of header contents - ''' + """ return self.__class__( self.binaryblock, self.endianness, @@ -726,7 +726,7 @@ def write_to(self, fileobj): self.extensions.write_to(fileobj, byteswap) def get_best_affine(self): - ''' Select best of available transforms ''' + """ Select best of available transforms """ hdr = self._structarr if hdr['sform_code'] != 0: return self.get_sform() @@ -736,7 +736,7 @@ def get_best_affine(self): @classmethod def default_structarr(klass, endianness=None): - ''' Create empty header binary block with given endianness ''' + """ Create empty header binary block with given endianness """ hdr_data = super(Nifti1Header, klass).default_structarr(endianness) if klass.is_single: hdr_data['magic'] = klass.single_magic @@ -746,7 +746,7 @@ def default_structarr(klass, endianness=None): @classmethod def from_header(klass, header=None, check=True): - ''' Class method to create header from another header + """ Class method to create header from another header Extend Analyze header copy by copying extensions from other Nifti types. @@ -763,14 +763,14 @@ def from_header(klass, header=None, check=True): ------- hdr : header instance fresh header instance of our own class - ''' + """ new_hdr = super(Nifti1Header, klass).from_header(header, check) if isinstance(header, Nifti1Header): new_hdr.extensions[:] = header.extensions[:] return new_hdr def get_data_shape(self): - ''' Get shape of data + """ Get shape of data Examples -------- @@ -793,7 +793,7 @@ def get_data_shape(self): Allows for freesurfer hack for 7th order icosahedron surface described in `issue 309`_, load_nifti.m_, and `save_nifti.m `_. - ''' + """ shape = super(Nifti1Header, self).get_data_shape() # Apply freesurfer hack for large vectors if shape[:3] == (-1, 1, 1): @@ -809,7 +809,7 @@ def get_data_shape(self): return shape def set_data_shape(self, shape): - ''' Set shape of data # noqa + """ Set shape of data # noqa If ``ndims == len(shape)`` then we set zooms for dimensions higher than ``ndims`` to 1.0 @@ -858,7 +858,7 @@ def set_data_shape(self, shape): .. _load_nifti.m: https://github.com/fieldtrip/fieldtrip/blob/428798b/external/freesurfer/load_nifti.m#L86-L89 .. _standard header: http://nifti.nimh.nih.gov/pub/dist/src/niftilib/nifti1.h - ''' + """ hdr = self._structarr shape = tuple(shape) @@ -883,10 +883,10 @@ def set_data_shape(self, shape): super(Nifti1Header, self).set_data_shape(shape) def get_qform_quaternion(self): - ''' Compute quaternion from b, c, d of quaternion + """ Compute quaternion from b, c, d of quaternion Fills a value by assuming this is a unit quaternion - ''' + """ hdr = self._structarr bcd = [hdr['quatern_b'], hdr['quatern_c'], hdr['quatern_d']] # Adjust threshold to precision of stored values in header @@ -934,7 +934,7 @@ def get_qform(self, coded=False): return out def set_qform(self, affine, code=None, strip_shears=True): - ''' Set qform header values from 4x4 affine + """ Set qform header values from 4x4 affine Parameters ---------- @@ -989,7 +989,7 @@ def set_qform(self, affine, code=None, strip_shears=True): >>> hdr.set_qform(None) >>> int(hdr['qform_code']) 0 - ''' + """ hdr = self._structarr old_code = hdr['qform_code'] if code is None: @@ -1067,7 +1067,7 @@ def get_sform(self, coded=False): return out def set_sform(self, affine, code=None): - ''' Set sform transform from 4x4 affine + """ Set sform transform from 4x4 affine Parameters ---------- @@ -1108,7 +1108,7 @@ def set_sform(self, affine, code=None): >>> hdr.set_sform(None) >>> int(hdr['sform_code']) 0 - ''' + """ hdr = self._structarr old_code = hdr['sform_code'] if code is None: @@ -1129,7 +1129,7 @@ def set_sform(self, affine, code=None): hdr['srow_z'][:] = affine[2, :] def get_slope_inter(self): - ''' Get data scaling (slope) and DC offset (intercept) from header data + """ Get data scaling (slope) and DC offset (intercept) from header data Returns ------- @@ -1160,7 +1160,7 @@ def get_slope_inter(self): Traceback (most recent call last): ... HeaderDataError: Valid slope but invalid intercept inf - ''' + """ # Note that we are returning float (float64) scalefactors and # intercepts, although they are stored as in nifti1 as float32. slope = float(self['scl_slope']) @@ -1173,7 +1173,7 @@ def get_slope_inter(self): return slope, inter def set_slope_inter(self, slope, inter=None): - ''' Set slope and / or intercept into header + """ Set slope and / or intercept into header Set slope and intercept for image data, such that, if the image data is ``arr``, then the scaled image data will be ``(arr * @@ -1192,7 +1192,7 @@ def set_slope_inter(self, slope, inter=None): Intercept. If None, implies `inter` of NaN. If `slope` is None or NaN then `inter` should be None or NaN. Values of Inf or -Inf raise HeaderDataError - ''' + """ if slope is None: slope = np.nan if inter is None: @@ -1207,7 +1207,7 @@ def set_slope_inter(self, slope, inter=None): self._structarr['scl_inter'] = inter def get_dim_info(self): - ''' Gets NIfTI MRI slice etc dimension information + """ Gets NIfTI MRI slice etc dimension information Returns ------- @@ -1231,7 +1231,7 @@ def get_dim_info(self): -------- See set_dim_info function - ''' + """ hdr = self._structarr info = int(hdr['dim_info']) freq = info & 3 @@ -1242,7 +1242,7 @@ def get_dim_info(self): slice - 1 if slice else None) def set_dim_info(self, freq=None, phase=None, slice=None): - ''' Sets nifti MRI slice etc dimension information + """ Sets nifti MRI slice etc dimension information Parameters ---------- @@ -1274,7 +1274,7 @@ def set_dim_info(self, freq=None, phase=None, slice=None): Notes ----- This is stored in one byte in the header - ''' + """ for inp in (freq, phase, slice): # Don't use == on None to avoid a FutureWarning in python3 if inp is not None and inp not in (0, 1, 2): @@ -1289,7 +1289,7 @@ def set_dim_info(self, freq=None, phase=None, slice=None): self._structarr['dim_info'] = info def get_intent(self, code_repr='label'): - ''' Get intent code, parameters and name + """ Get intent code, parameters and name Parameters ---------- @@ -1314,7 +1314,7 @@ def get_intent(self, code_repr='label'): ('t test', (10.0,), 'some score') >>> hdr.get_intent('code') (3, (10.0,), 'some score') - ''' + """ hdr = self._structarr recoder = self._field_recoders['intent_code'] code = int(hdr['intent_code']) @@ -1334,7 +1334,7 @@ def get_intent(self, code_repr='label'): return label, tuple(params), name def set_intent(self, code, params=(), name='', allow_unknown=False): - ''' Set the intent code, parameters and name + """ Set the intent code, parameters and name If parameters are not specified, assumed to be all zero. Each intent code has a set number of parameters associated. If you @@ -1382,7 +1382,7 @@ def set_intent(self, code, params=(), name='', allow_unknown=False): >>> hdr.set_intent(9999, allow_unknown=True) # unknown code >>> hdr.get_intent() ('unknown code 9999', (), '') - ''' + """ hdr = self._structarr known_intent = code in intent_codes if not known_intent: @@ -1407,7 +1407,7 @@ def set_intent(self, code, params=(), name='', allow_unknown=False): hdr['intent_p%d' % (i + 1)] = param def get_slice_duration(self): - ''' Get slice duration + """ Get slice duration Returns ------- @@ -1426,7 +1426,7 @@ def get_slice_duration(self): ----- The NIfTI1 spec appears to require the slice dimension to be defined for slice_duration to have meaning. - ''' + """ _, _, slice_dim = self.get_dim_info() if slice_dim is None: raise HeaderDataError('Slice dimension must be set ' @@ -1434,7 +1434,7 @@ def get_slice_duration(self): return float(self._structarr['slice_duration']) def set_slice_duration(self, duration): - ''' Set slice duration + """ Set slice duration Parameters ---------- @@ -1444,7 +1444,7 @@ def set_slice_duration(self, duration): Examples -------- See ``get_slice_duration`` - ''' + """ _, _, slice_dim = self.get_dim_info() if slice_dim is None: raise HeaderDataError('Slice dimension must be set ' @@ -1452,8 +1452,8 @@ def set_slice_duration(self, duration): self._structarr['slice_duration'] = duration def get_n_slices(self): - ''' Return the number of slices - ''' + """ Return the number of slices + """ _, _, slice_dim = self.get_dim_info() if slice_dim is None: raise HeaderDataError('Slice dimension not set in header ' @@ -1468,7 +1468,7 @@ def get_n_slices(self): return slice_len def get_slice_times(self): - ''' Get slice times from slice timing information + """ Get slice times from slice timing information Returns ------- @@ -1490,7 +1490,7 @@ def get_slice_times(self): >>> slice_times = hdr.get_slice_times() >>> np.allclose(slice_times, [0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6]) True - ''' + """ hdr = self._structarr slice_len = self.get_n_slices() duration = self.get_slice_duration() @@ -1514,7 +1514,7 @@ def get_slice_times(self): (None,) * (slice_len - slice_end - 1)) def set_slice_times(self, slice_times): - ''' Set slice times into *hdr* + """ Set slice times into *hdr* Parameters ---------- @@ -1536,7 +1536,7 @@ def set_slice_times(self, slice_times): 1 >>> int(hdr['slice_end']) 5 - ''' + """ # Check if number of slices matches header hdr = self._structarr slice_len = self.get_n_slices() @@ -1597,7 +1597,7 @@ def set_slice_times(self, slice_times): hdr['slice_code'] = slice_order_codes.code[label] def _slice_time_order(self, slabel, n_slices): - ''' Supporting function to give time order of slices from label ''' + """ Supporting function to give time order of slices from label """ if slabel == 'sequential increasing': sp_ind_time_order = list(range(n_slices)) elif slabel == 'sequential decreasing': @@ -1637,18 +1637,18 @@ def set_xyzt_units(self, xyz=None, t=None): self.structarr['xyzt_units'] = xyz_code + t_code def _clean_after_mapping(self): - ''' Set format-specific stuff after converting header from mapping + """ Set format-specific stuff after converting header from mapping Clean up header after it has been initialized from an ``as_analyze_map`` method of another header type See :meth:`nibabel.analyze.AnalyzeHeader._clean_after_mapping` for a more detailed description. - ''' + """ self._structarr['magic'] = (self.single_magic if self.is_single else self.pair_magic) - ''' Checks only below here ''' + """ Checks only below here """ @classmethod def _get_checks(klass): @@ -1751,7 +1751,7 @@ def may_contain_header(klass, binaryblock): class Nifti1PairHeader(Nifti1Header): - ''' Class for NIfTI1 pair header ''' + """ Class for NIfTI1 pair header """ # Signal whether this is single (header + data) file is_single = False @@ -1774,7 +1774,7 @@ def __init__(self, dataobj, affine, header=None, if header is None and affine is not None: self._affine2header() # Copy docstring - __init__.__doc__ = analyze.AnalyzeImage.__init__.__doc__ + ''' + __init__.__doc__ = analyze.AnalyzeImage.__init__.__doc__ + """ Notes ----- @@ -1786,10 +1786,10 @@ def __init__(self, dataobj, affine, header=None, space to which the affine is pointing. The :meth:`set_sform` and :meth:`set_qform` methods can be used to update the codes after an image has been created - see those methods, and the :ref:`manual - ` for more details. ''' + ` for more details. """ def update_header(self): - ''' Harmonize header with image data and affine + """ Harmonize header with image data and affine See AnalyzeImage.update_header for more examples @@ -1803,7 +1803,7 @@ def update_header(self): True >>> np.all(hdr.get_sform() == affine) True - ''' + """ super(Nifti1Pair, self).update_header() hdr = self._header hdr['magic'] = hdr.pair_magic @@ -1843,7 +1843,7 @@ def get_qform(self, coded=False): return self._header.get_qform(coded) def set_qform(self, affine, code=None, strip_shears=True, **kwargs): - ''' Set qform header values from 4x4 affine + """ Set qform header values from 4x4 affine Parameters ---------- @@ -1892,7 +1892,7 @@ def set_qform(self, affine, code=None, strip_shears=True, **kwargs): True >>> int(code) 3 - ''' + """ update_affine = kwargs.pop('update_affine', True) if kwargs: raise TypeError('Unexpected keyword argument(s) %s' % kwargs) @@ -1930,7 +1930,7 @@ def get_sform(self, coded=False): return self._header.get_sform(coded) def set_sform(self, affine, code=None, **kwargs): - ''' Set sform transform from 4x4 affine + """ Set sform transform from 4x4 affine Parameters ---------- @@ -1981,7 +1981,7 @@ def set_sform(self, affine, code=None, **kwargs): True >>> int(code) 3 - ''' + """ update_affine = kwargs.pop('update_affine', True) if kwargs: raise TypeError('Unexpected keyword argument(s) %s' % kwargs) @@ -2040,7 +2040,7 @@ def _get_fileholders(file_map): return file_map['image'], file_map['image'] def update_header(self): - ''' Harmonize header with image data and affine ''' + """ Harmonize header with image data and affine """ super(Nifti1Image, self).update_header() hdr = self._header hdr['magic'] = hdr.single_magic diff --git a/nibabel/nifti2.py b/nibabel/nifti2.py index 45e834b29a..8c58569d96 100644 --- a/nibabel/nifti2.py +++ b/nibabel/nifti2.py @@ -6,12 +6,12 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -''' Read / write access to NIfTI2 image format +""" Read / write access to NIfTI2 image format Format described here: https://www.nitrc.org/forum/message.php?msg_id=3738 -''' +""" import numpy as np @@ -141,7 +141,7 @@ class Nifti2Header(Nifti1Header): quaternion_threshold = -np.finfo(np.float64).eps * 3 def get_data_shape(self): - ''' Get shape of data + """ Get shape of data Examples -------- @@ -161,11 +161,11 @@ def get_data_shape(self): ----- Does not use Nifti1 freesurfer hack for large vectors described in :meth:`Nifti1Header.set_data_shape` - ''' + """ return AnalyzeHeader.get_data_shape(self) def set_data_shape(self, shape): - ''' Set shape of data + """ Set shape of data If ``ndims == len(shape)`` then we set zooms for dimensions higher than ``ndims`` to 1.0 @@ -179,17 +179,17 @@ def set_data_shape(self, shape): ----- Does not apply nifti1 Freesurfer hack for long vectors (see :meth:`Nifti1Header.set_data_shape`) - ''' + """ AnalyzeHeader.set_data_shape(self, shape) @classmethod def default_structarr(klass, endianness=None): - ''' Create empty header binary block with given endianness ''' + """ Create empty header binary block with given endianness """ hdr_data = super(Nifti2Header, klass).default_structarr(endianness) hdr_data['eol_check'] = (13, 10, 26, 10) return hdr_data - ''' Checks only below here ''' + """ Checks only below here """ @classmethod def _get_checks(klass): @@ -229,7 +229,7 @@ def may_contain_header(klass, binaryblock): class Nifti2PairHeader(Nifti2Header): - ''' Class for NIfTI2 pair header ''' + """ Class for NIfTI2 pair header """ # Signal whether this is single (header + data) file is_single = False diff --git a/nibabel/orientations.py b/nibabel/orientations.py index 9005e54540..baf08c549e 100644 --- a/nibabel/orientations.py +++ b/nibabel/orientations.py @@ -6,7 +6,7 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -''' Utilities for calculating and applying affine orientations ''' +""" Utilities for calculating and applying affine orientations """ import numpy as np @@ -20,7 +20,7 @@ class OrientationError(Exception): def io_orientation(affine, tol=None): - ''' Orientation of input axes in terms of output axes for `affine` + """ Orientation of input axes in terms of output axes for `affine` Valid for an affine transformation from ``p`` dimensions to ``q`` dimensions (``affine.shape == (q + 1, p + 1)``). @@ -50,7 +50,7 @@ def io_orientation(affine, tol=None): input axis is in the same direction as the corresponding output axis and -1 if it is in the opposite direction. If a row is [np.nan, np.nan], which can happen when p > q, then this row should be considered dropped. - ''' + """ affine = np.asarray(affine) q, p = affine.shape[0] - 1, affine.shape[1] - 1 # extract the underlying rotation, zoom, shear matrix @@ -93,7 +93,7 @@ def io_orientation(affine, tol=None): def ornt_transform(start_ornt, end_ornt): - '''Return the orientation that transforms from `start_ornt` to `end_ornt`. + """Return the orientation that transforms from `start_ornt` to `end_ornt`. Parameters ---------- @@ -107,7 +107,7 @@ def ornt_transform(start_ornt, end_ornt): ------- orientations : (p, 2) ndarray The orientation that will transform the `start_ornt` to the `end_ornt`. - ''' + """ start_ornt = np.asarray(start_ornt) end_ornt = np.asarray(end_ornt) if start_ornt.shape != end_ornt.shape: @@ -132,7 +132,7 @@ def ornt_transform(start_ornt, end_ornt): def apply_orientation(arr, ornt): - ''' Apply transformations implied by `ornt` to the first + """ Apply transformations implied by `ornt` to the first n axes of the array `arr` Parameters @@ -151,7 +151,7 @@ def apply_orientation(arr, ornt): ------- t_arr : ndarray data array `arr` transformed according to ornt - ''' + """ t_arr = np.asarray(arr) ornt = np.asarray(ornt) n = ornt.shape[0] @@ -174,7 +174,7 @@ def apply_orientation(arr, ornt): def inv_ornt_aff(ornt, shape): - ''' Affine transform reversing transforms implied in `ornt` + """ Affine transform reversing transforms implied in `ornt` Imagine you have an array ``arr`` of shape `shape`, and you apply the transforms implied by `ornt` (more below), to get ``tarr``. @@ -209,7 +209,7 @@ def inv_ornt_aff(ornt, shape): influence the output space, and is thus effectively dropped from the output space. In that case one ``tarr`` coordinate maps to many ``arr`` coordinates, we can't invert the transform, and we raise an error - ''' + """ ornt = np.asarray(ornt) if np.any(np.isnan(ornt)): raise OrientationError("We cannot invert orientation transform") diff --git a/nibabel/pkg_info.py b/nibabel/pkg_info.py index 7be15315d2..43b39f4e89 100644 --- a/nibabel/pkg_info.py +++ b/nibabel/pkg_info.py @@ -63,7 +63,7 @@ def cmp_pkg_version(version_str, pkg_version_str=__version__): def pkg_commit_hash(pkg_path=None): - ''' Get short form of commit hash + """ Get short form of commit hash Versioneer placed a ``_version.py`` file in the package directory. This file gets updated on installation or ``git archive``. @@ -83,7 +83,7 @@ def pkg_commit_hash(pkg_path=None): Where we got the hash from - description hash_str : str short form of hash - ''' + """ versions = _version.get_versions() hash_str = versions['full-revisionid'][:7] if hasattr(_version, 'version_json'): @@ -98,7 +98,7 @@ def pkg_commit_hash(pkg_path=None): def get_pkg_info(pkg_path): - ''' Return dict describing the context of this package + """ Return dict describing the context of this package Parameters ---------- @@ -109,7 +109,7 @@ def get_pkg_info(pkg_path): ------- context : dict with named parameters of interest - ''' + """ src, hsh = pkg_commit_hash() import numpy return dict( diff --git a/nibabel/quaternions.py b/nibabel/quaternions.py index adc2367238..cd3646853d 100644 --- a/nibabel/quaternions.py +++ b/nibabel/quaternions.py @@ -6,7 +6,7 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -''' +""" Functions to operate on, or return, quaternions. The module also includes functions for the closely related angle, axis @@ -23,7 +23,7 @@ >>> M = quat2mat(q) # from this module >>> vec = np.array([1, 2, 3]).reshape((3,1)) # column vector >>> tvec = np.dot(M, vec) -''' +""" import math import numpy as np @@ -33,7 +33,7 @@ def fillpositive(xyz, w2_thresh=None): - ''' Compute unit quaternion from last 3 values + """ Compute unit quaternion from last 3 values Parameters ---------- @@ -80,7 +80,7 @@ def fillpositive(xyz, w2_thresh=None): True >>> np.dot(wxyz, wxyz) 1.0 - ''' + """ # Check inputs (force error if < 3 values) if len(xyz) != 3: raise ValueError('xyz should have length 3') @@ -104,7 +104,7 @@ def fillpositive(xyz, w2_thresh=None): def quat2mat(q): - ''' Calculate rotation matrix corresponding to quaternion + """ Calculate rotation matrix corresponding to quaternion Parameters ---------- @@ -135,7 +135,7 @@ def quat2mat(q): >>> M = quat2mat([0, 1, 0, 0]) # 180 degree rotn around axis 0 >>> np.allclose(M, np.diag([1, -1, -1])) True - ''' + """ w, x, y, z = q Nq = w * w + x * x + y * y + z * z if Nq < FLOAT_EPS: @@ -153,7 +153,7 @@ def quat2mat(q): def mat2quat(M): - ''' Calculate quaternion corresponding to given rotation matrix + """ Calculate quaternion corresponding to given rotation matrix Parameters ---------- @@ -195,7 +195,7 @@ def mat2quat(M): >>> np.allclose(q, [0, 1, 0, 0]) # 180 degree rotn around axis 0 True - ''' + """ # Qyx refers to the contribution of the y input vector component to # the x output vector component. Qyx is therefore the same as # M[0,1]. The notation is from the Wikipedia article. @@ -219,7 +219,7 @@ def mat2quat(M): def mult(q1, q2): - ''' Multiply two quaternions + """ Multiply two quaternions Parameters ---------- @@ -233,7 +233,7 @@ def mult(q1, q2): Notes ----- See : https://en.wikipedia.org/wiki/Quaternions#Hamilton_product - ''' + """ w1, x1, y1, z1 = q1 w2, x2, y2, z2 = q2 w = w1 * w2 - x1 * x2 - y1 * y2 - z1 * z2 @@ -244,7 +244,7 @@ def mult(q1, q2): def conjugate(q): - ''' Conjugate of quaternion + """ Conjugate of quaternion Parameters ---------- @@ -255,12 +255,12 @@ def conjugate(q): ------- conjq : array shape (4,) w, i, j, k of conjugate of `q` - ''' + """ return np.array(q) * np.array([1.0, -1, -1, -1]) def norm(q): - ''' Return norm of quaternion + """ Return norm of quaternion Parameters ---------- @@ -271,17 +271,17 @@ def norm(q): ------- n : scalar quaternion norm - ''' + """ return np.dot(q, q) def isunit(q): - ''' Return True is this is very nearly a unit quaternion ''' + """ Return True is this is very nearly a unit quaternion """ return np.allclose(norm(q), 1) def inverse(q): - ''' Return multiplicative inverse of quaternion `q` + """ Return multiplicative inverse of quaternion `q` Parameters ---------- @@ -292,17 +292,17 @@ def inverse(q): ------- invq : array shape (4,) w, i, j, k of quaternion inverse - ''' + """ return conjugate(q) / norm(q) def eye(): - ''' Return identity quaternion ''' + """ Return identity quaternion """ return np.array([1.0, 0, 0, 0]) def rotate_vector(v, q): - ''' Apply transformation in quaternion `q` to vector `v` + """ Apply transformation in quaternion `q` to vector `v` Parameters ---------- @@ -321,14 +321,14 @@ def rotate_vector(v, q): See: https://en.wikipedia.org/wiki/Quaternions_and_spatial_rotation#Describing_rotations_with_quaternions - ''' + """ varr = np.zeros((4,)) varr[1:] = v return mult(q, mult(varr, conjugate(q)))[1:] def nearly_equivalent(q1, q2, rtol=1e-5, atol=1e-8): - ''' Returns True if `q1` and `q2` give near equivalent transforms + """ Returns True if `q1` and `q2` give near equivalent transforms `q1` may be nearly numerically equal to `q2`, or nearly equal to `q2` * -1 (because a quaternion multiplied by -1 gives the same transform). @@ -354,7 +354,7 @@ def nearly_equivalent(q1, q2, rtol=1e-5, atol=1e-8): True >>> nearly_equivalent(q1, [-1, 0, 0, 0]) True - ''' + """ q1 = np.array(q1) q2 = np.array(q2) if np.allclose(q1, q2, rtol, atol): @@ -363,7 +363,7 @@ def nearly_equivalent(q1, q2, rtol=1e-5, atol=1e-8): def angle_axis2quat(theta, vector, is_normalized=False): - ''' Quaternion for rotation of angle `theta` around `vector` + """ Quaternion for rotation of angle `theta` around `vector` Parameters ---------- @@ -389,7 +389,7 @@ def angle_axis2quat(theta, vector, is_normalized=False): Notes ----- Formula from http://mathworld.wolfram.com/EulerParameters.html - ''' + """ vector = np.array(vector) if not is_normalized: # Cannot divide in-place because input vector may be integer type, @@ -403,7 +403,7 @@ def angle_axis2quat(theta, vector, is_normalized=False): def angle_axis2mat(theta, vector, is_normalized=False): - ''' Rotation matrix of angle `theta` around `vector` + """ Rotation matrix of angle `theta` around `vector` Parameters ---------- @@ -423,7 +423,7 @@ def angle_axis2mat(theta, vector, is_normalized=False): Notes ----- From: https://en.wikipedia.org/wiki/Rotation_matrix#Axis_and_angle - ''' + """ x, y, z = vector if not is_normalized: n = math.sqrt(x * x + y * y + z * z) @@ -441,7 +441,7 @@ def angle_axis2mat(theta, vector, is_normalized=False): def quat2angle_axis(quat, identity_thresh=None): - ''' Convert quaternion to rotation of angle around axis + """ Convert quaternion to rotation of angle around axis Parameters ---------- @@ -479,7 +479,7 @@ def quat2angle_axis(quat, identity_thresh=None): A quaternion for which x, y, z are all equal to 0, is an identity rotation. In this case we return a 0 angle and an arbitrary vector, here [1, 0, 0] - ''' + """ w, x, y, z = quat vec = np.asarray([x, y, z]) if identity_thresh is None: diff --git a/nibabel/spatialimages.py b/nibabel/spatialimages.py index 0a140ec589..19dd2a0247 100644 --- a/nibabel/spatialimages.py +++ b/nibabel/spatialimages.py @@ -6,7 +6,7 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -''' A simple spatial image class +""" A simple spatial image class The image class maintains the association between a 3D (or greater) array, and an affine transform that maps voxel coordinates to some world space. @@ -132,7 +132,7 @@ >>> np.all(img3.get_fdata(dtype=np.float32) == data) True -''' +""" import numpy as np @@ -147,15 +147,15 @@ class HeaderDataError(Exception): - ''' Class to indicate error in getting or setting header data ''' + """ Class to indicate error in getting or setting header data """ class HeaderTypeError(Exception): - ''' Class to indicate error in parameters into header functions ''' + """ Class to indicate error in parameters into header functions """ class SpatialHeader(FileBasedHeader): - ''' Template class to implement header protocol ''' + """ Template class to implement header protocol """ default_x_flip = True data_layout = 'F' @@ -202,11 +202,11 @@ def __ne__(self, other): return not self == other def copy(self): - ''' Copy object to independent representation + """ Copy object to independent representation The copy should not be affected by any changes to the original object. - ''' + """ return self.__class__(self._dtype, self._shape, self._zooms) def get_data_dtype(self): @@ -252,7 +252,7 @@ def get_base_affine(self): get_best_affine = get_base_affine def data_to_fileobj(self, data, fileobj, rescale=True): - ''' Write array data `data` as binary to `fileobj` + """ Write array data `data` as binary to `fileobj` Parameters ---------- @@ -263,13 +263,13 @@ def data_to_fileobj(self, data, fileobj, rescale=True): rescale : {True, False}, optional Whether to try and rescale data to match output dtype specified by header. For this minimal header, `rescale` has no effect - ''' + """ data = np.asarray(data) dtype = self.get_data_dtype() fileobj.write(data.astype(dtype).tobytes(order=self.data_layout)) def data_from_fileobj(self, fileobj): - ''' Read binary image data from `fileobj` ''' + """ Read binary image data from `fileobj` """ dtype = self.get_data_dtype() shape = self.get_data_shape() data_size = int(np.prod(shape) * dtype.itemsize) @@ -309,7 +309,7 @@ def supported_np_types(obj): class Header(SpatialHeader): - '''Alias for SpatialHeader; kept for backwards compatibility.''' + """Alias for SpatialHeader; kept for backwards compatibility.""" @deprecate_with_version('Header class is deprecated.\n' 'Please use SpatialHeader instead.' @@ -324,10 +324,10 @@ class ImageDataError(Exception): class SpatialFirstSlicer(object): - ''' Slicing interface that returns a new image with an updated affine + """ Slicing interface that returns a new image with an updated affine Checks that an image's first three axes are spatial - ''' + """ def __init__(self, img): # Local import to avoid circular import on module load from .imageclasses import spatial_axes_first @@ -350,7 +350,7 @@ def __getitem__(self, slicer): return self.img.__class__(dataobj.copy(), affine, self.img.header) def check_slicing(self, slicer, return_spatial=False): - ''' Canonicalize slicers and check for scalar indices in spatial dims + """ Canonicalize slicers and check for scalar indices in spatial dims Parameters ---------- @@ -365,7 +365,7 @@ def check_slicing(self, slicer, return_spatial=False): slicer : object Validated slicer object that will slice image's `dataobj` without collapsing spatial dimensions - ''' + """ slicer = canonical_slicers(slicer, self.img.shape) # We can get away with this because we've checked the image's # first three axes are spatial. @@ -417,13 +417,13 @@ def slice_affine(self, slicer): class SpatialImage(DataobjImage): - ''' Template class for volumetric (3D/4D) images ''' + """ Template class for volumetric (3D/4D) images """ header_class = SpatialHeader ImageSlicer = SpatialFirstSlicer def __init__(self, dataobj, affine, header=None, extra=None, file_map=None): - ''' Initialize image + """ Initialize image The image is a combination of (array-like, affine matrix, header), with optional metadata in `extra`, and filename / file-like objects @@ -447,7 +447,7 @@ def __init__(self, dataobj, affine, header=None, metadata of this image type file_map : mapping, optional mapping giving file information for this image format - ''' + """ super(SpatialImage, self).__init__(dataobj, header=header, extra=extra, file_map=file_map) if affine is not None: @@ -474,7 +474,7 @@ def affine(self): return self._affine def update_header(self): - ''' Harmonize header with image data and affine + """ Harmonize header with image data and affine >>> data = np.zeros((2,3,4)) >>> affine = np.diag([1.0,2.0,3.0,1.0]) @@ -486,7 +486,7 @@ def update_header(self): True >>> img.header.get_zooms() (1.0, 2.0, 3.0) - ''' + """ hdr = self._header shape = self._dataobj.shape # We need to update the header if the data shape has changed. It's a @@ -539,7 +539,7 @@ def get_affine(self): @classmethod def from_image(klass, img): - ''' Class method to create new instance of own class from `img` + """ Class method to create new instance of own class from `img` Parameters ---------- @@ -551,7 +551,7 @@ def from_image(klass, img): ------- cimg : ``spatialimage`` instance Image, of our own class - ''' + """ return klass(img.dataobj, img.affine, klass.header_class.from_header(img.header), @@ -578,11 +578,11 @@ def slicer(self): return self.ImageSlicer(self) def __getitem__(self, idx): - ''' No slicing or dictionary interface for images + """ No slicing or dictionary interface for images Use the slicer attribute to perform cropping and subsampling at your own risk. - ''' + """ raise TypeError( "Cannot slice image objects; consider using `img.slicer[slice]` " "to generate a sliced image (see documentation for caveats) or " diff --git a/nibabel/spm2analyze.py b/nibabel/spm2analyze.py index 1b53238b44..6786b19a0c 100644 --- a/nibabel/spm2analyze.py +++ b/nibabel/spm2analyze.py @@ -6,7 +6,7 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -''' Read / write access to SPM2 version of analyze image format ''' +""" Read / write access to SPM2 version of analyze image format """ import numpy as np from . import spm99analyze as spm99 # module import @@ -23,20 +23,20 @@ class Spm2AnalyzeHeader(spm99.Spm99AnalyzeHeader): - ''' Class for SPM2 variant of basic Analyze header + """ Class for SPM2 variant of basic Analyze header SPM2 variant adds the following to basic Analyze format: * voxel origin; * slope scaling of data; * reading - but not writing - intercept of data. - ''' + """ # Copies of module level definitions template_dtype = header_dtype def get_slope_inter(self): - ''' Get data scaling (slope) and intercept from header data + """ Get data scaling (slope) and intercept from header data Uses the algorithm from SPM2 spm_vol_ana.m by John Ashburner @@ -95,7 +95,7 @@ def get_slope_inter(self): >>> hdr['glmin'] = 10 >>> np.allclose(hdr.get_slope_inter(), [0.6/100, 0.2-0.6/100*10]) True - ''' + """ # get scaling factor from 'scl_slope' (funused1) slope = float(self['scl_slope']) if np.isfinite(slope) and slope: diff --git a/nibabel/spm99analyze.py b/nibabel/spm99analyze.py index 2ae780ebde..4d14724807 100644 --- a/nibabel/spm99analyze.py +++ b/nibabel/spm99analyze.py @@ -6,7 +6,7 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -''' Read / write access to SPM99 version of analyze image format ''' +""" Read / write access to SPM99 version of analyze image format """ import warnings import numpy as np @@ -19,7 +19,7 @@ from .optpkg import optional_package have_scipy = optional_package('scipy')[1] -''' Support subtle variations of SPM version of Analyze ''' +""" Support subtle variations of SPM version of Analyze """ header_key_dtd = analyze.header_key_dtd # funused1 in dime subfield is scalefactor image_dimension_dtd = analyze.image_dimension_dtd[:] @@ -39,7 +39,7 @@ class SpmAnalyzeHeader(analyze.AnalyzeHeader): - ''' Basic scaling Spm Analyze header ''' + """ Basic scaling Spm Analyze header """ # Copies of module level definitions template_dtype = header_dtype @@ -49,17 +49,17 @@ class SpmAnalyzeHeader(analyze.AnalyzeHeader): @classmethod def default_structarr(klass, endianness=None): - ''' Create empty header binary block with given endianness ''' + """ Create empty header binary block with given endianness """ hdr_data = super(SpmAnalyzeHeader, klass).default_structarr(endianness) hdr_data['scl_slope'] = 1 return hdr_data def get_slope_inter(self): - ''' Get scalefactor and intercept + """ Get scalefactor and intercept If scalefactor is 0.0 return None to indicate no scalefactor. Intercept is always None because SPM99 analyze cannot store intercepts. - ''' + """ slope = self._structarr['scl_slope'] # Return invalid slopes as None if np.isnan(slope) or slope in (0, -np.inf, np.inf): @@ -67,7 +67,7 @@ def get_slope_inter(self): return slope, None def set_slope_inter(self, slope, inter=None): - ''' Set slope and / or intercept into header + """ Set slope and / or intercept into header Set slope and intercept for image data, such that, if the image data is ``arr``, then the scaled image data will be ``(arr * @@ -85,7 +85,7 @@ def set_slope_inter(self, slope, inter=None): inter : None or float, optional intercept. Must be None, NaN or 0, because SPM99 cannot store intercepts. - ''' + """ if slope is None: slope = np.nan if slope in (0, -np.inf, np.inf): @@ -98,16 +98,16 @@ def set_slope_inter(self, slope, inter=None): class Spm99AnalyzeHeader(SpmAnalyzeHeader): - ''' Class for SPM99 variant of basic Analyze header + """ Class for SPM99 variant of basic Analyze header SPM99 variant adds the following to basic Analyze format: * voxel origin; * slope scaling of data. - ''' + """ def get_origin_affine(self): - ''' Get affine from header, using SPM origin field if sensible + """ Get affine from header, using SPM origin field if sensible The default translations are got from the ``origin`` field, if set, or from the center of the image otherwise. @@ -137,7 +137,7 @@ def get_origin_affine(self): [ 0., 2., 0., -4.], [ 0., 0., 1., -3.], [ 0., 0., 0., 1.]]) - ''' + """ hdr = self._structarr zooms = hdr['pixdim'][1:4].copy() if self.default_x_flip: @@ -159,7 +159,7 @@ def get_origin_affine(self): get_best_affine = get_origin_affine def set_origin_from_affine(self, affine): - ''' Set SPM origin to header from affine matrix. + """ Set SPM origin to header from affine matrix. The ``origin`` field was read but not written by SPM99 and 2. It was used for storing a central voxel coordinate, that could be used in @@ -201,7 +201,7 @@ def set_origin_from_affine(self, affine): [ 0., 2., 0., -6.], [ 0., 0., 1., -4.], [ 0., 0., 0., 1.]]) - ''' + """ if affine.shape != (4, 4): raise ValueError('Need 4x4 affine to set') hdr = self._structarr @@ -244,7 +244,7 @@ class Spm99AnalyzeImage(analyze.AnalyzeImage): @classmethod def from_file_map(klass, file_map, *, mmap=True, keep_file_open=None): - ''' Class method to create image from mapping in ``file_map`` + """ Class method to create image from mapping in ``file_map`` .. deprecated:: 2.4.1 ``keep_file_open='auto'`` is redundant with `False` and has @@ -277,7 +277,7 @@ def from_file_map(klass, file_map, *, mmap=True, keep_file_open=None): ------- img : Spm99AnalyzeImage instance - ''' + """ ret = super(Spm99AnalyzeImage, klass).from_file_map( file_map, mmap=mmap, keep_file_open=keep_file_open) try: @@ -313,7 +313,7 @@ def from_file_map(klass, file_map, *, mmap=True, keep_file_open=None): return ret def to_file_map(self, file_map=None): - ''' Write image to `file_map` or contained ``self.file_map`` + """ Write image to `file_map` or contained ``self.file_map`` Extends Analyze ``to_file_map`` method by writing ``mat`` file @@ -322,7 +322,7 @@ def to_file_map(self, file_map=None): file_map : None or mapping, optional files mapping. If None (default) use object's ``file_map`` attribute instead - ''' + """ if file_map is None: file_map = self.file_map super(Spm99AnalyzeImage, self).to_file_map(file_map) diff --git a/nibabel/testing/__init__.py b/nibabel/testing/__init__.py index 52055ebcc3..71f9e84db2 100644 --- a/nibabel/testing/__init__.py +++ b/nibabel/testing/__init__.py @@ -6,7 +6,7 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -''' Utilities for testing ''' +""" Utilities for testing """ import re import os diff --git a/nibabel/tests/test_analyze.py b/nibabel/tests/test_analyze.py index b092a2334c..a71ba3339f 100644 --- a/nibabel/tests/test_analyze.py +++ b/nibabel/tests/test_analyze.py @@ -6,11 +6,11 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -''' Test Analyze headers +""" Test Analyze headers See test_wrapstruct.py for tests of the wrapped structarr-ness of the Analyze header -''' +""" import os import re diff --git a/nibabel/tests/test_batteryrunners.py b/nibabel/tests/test_batteryrunners.py index 69f18b75ac..586f277150 100644 --- a/nibabel/tests/test_batteryrunners.py +++ b/nibabel/tests/test_batteryrunners.py @@ -6,8 +6,8 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -''' Tests for BatteryRunner and Report objects -''' +""" Tests for BatteryRunner and Report objects +""" from io import StringIO diff --git a/nibabel/tests/test_data.py b/nibabel/tests/test_data.py index e5d5000438..57d5b36f38 100644 --- a/nibabel/tests/test_data.py +++ b/nibabel/tests/test_data.py @@ -1,6 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -''' Tests for data module ''' +""" Tests for data module """ import os from os.path import join as pjoin from os import environ as env diff --git a/nibabel/tests/test_endiancodes.py b/nibabel/tests/test_endiancodes.py index 94c9ea0344..a9af11f052 100644 --- a/nibabel/tests/test_endiancodes.py +++ b/nibabel/tests/test_endiancodes.py @@ -6,7 +6,7 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -''' Tests for endiancodes module ''' +""" Tests for endiancodes module """ import sys diff --git a/nibabel/tests/test_euler.py b/nibabel/tests/test_euler.py index 915e65e552..836444b3ba 100644 --- a/nibabel/tests/test_euler.py +++ b/nibabel/tests/test_euler.py @@ -6,7 +6,7 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -''' Tests for Euler angles ''' +""" Tests for Euler angles """ import math import numpy as np @@ -20,7 +20,7 @@ FLOAT_EPS = np.finfo(np.float).eps -# Example rotations ''' +# Example rotations """ eg_rots = [] params = np.arange(-pi * 2, pi * 2.5, pi / 2) for x in params: @@ -135,7 +135,7 @@ def sympy_euler2quat(z=0, y=0, x=0): def crude_mat2euler(M): - ''' The simplest possible - ignoring atan2 instability ''' + """ The simplest possible - ignoring atan2 instability """ r11, r12, r13, r21, r22, r23, r31, r32, r33 = M.flat return math.atan2(-r12, r11), math.asin(r13), math.atan2(-r23, r33) diff --git a/nibabel/tests/test_filename_parser.py b/nibabel/tests/test_filename_parser.py index b0abc6d608..e53d6ebd29 100644 --- a/nibabel/tests/test_filename_parser.py +++ b/nibabel/tests/test_filename_parser.py @@ -6,7 +6,7 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -''' Tests for filename container ''' +""" Tests for filename container """ from ..filename_parser import (types_filenames, TypesFilenamesError, parse_filename, splitext_addext) diff --git a/nibabel/tests/test_funcs.py b/nibabel/tests/test_funcs.py index db196995e0..f6f7b59d34 100644 --- a/nibabel/tests/test_funcs.py +++ b/nibabel/tests/test_funcs.py @@ -6,7 +6,7 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -''' Test for image funcs ''' +""" Test for image funcs """ import numpy as np diff --git a/nibabel/tests/test_image_load_save.py b/nibabel/tests/test_image_load_save.py index 429144108c..b06d95c041 100644 --- a/nibabel/tests/test_image_load_save.py +++ b/nibabel/tests/test_image_load_save.py @@ -6,7 +6,7 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -''' Tests for loader function ''' +""" Tests for loader function """ from io import BytesIO import shutil diff --git a/nibabel/tests/test_image_types.py b/nibabel/tests/test_image_types.py index 632e23224d..a19289037f 100644 --- a/nibabel/tests/test_image_types.py +++ b/nibabel/tests/test_image_types.py @@ -6,7 +6,7 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -''' Tests for is_image / may_contain_header functions ''' +""" Tests for is_image / may_contain_header functions """ import copy from os.path import dirname, basename, join as pjoin diff --git a/nibabel/tests/test_nifti1.py b/nibabel/tests/test_nifti1.py index 0494b8fe98..901c94ccf5 100644 --- a/nibabel/tests/test_nifti1.py +++ b/nibabel/tests/test_nifti1.py @@ -6,7 +6,7 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -''' Tests for nifti reading package ''' +""" Tests for nifti reading package """ import os import warnings import struct diff --git a/nibabel/tests/test_nifti2.py b/nibabel/tests/test_nifti2.py index ca6e7d8125..83fefbc74b 100644 --- a/nibabel/tests/test_nifti2.py +++ b/nibabel/tests/test_nifti2.py @@ -6,7 +6,7 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -''' Tests for nifti2 reading package ''' +""" Tests for nifti2 reading package """ import os import numpy as np diff --git a/nibabel/tests/test_openers.py b/nibabel/tests/test_openers.py index 85a8f4a0a7..02744fd866 100644 --- a/nibabel/tests/test_openers.py +++ b/nibabel/tests/test_openers.py @@ -6,7 +6,7 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -''' Test for openers module ''' +""" Test for openers module """ import os import contextlib from gzip import GzipFile diff --git a/nibabel/tests/test_orientations.py b/nibabel/tests/test_orientations.py index 6272ffdb37..24a22a086b 100644 --- a/nibabel/tests/test_orientations.py +++ b/nibabel/tests/test_orientations.py @@ -6,7 +6,7 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -''' Testing for orientations module ''' +""" Testing for orientations module """ import numpy as np import warnings diff --git a/nibabel/tests/test_quaternions.py b/nibabel/tests/test_quaternions.py index cb24c7d0ce..ac76023584 100644 --- a/nibabel/tests/test_quaternions.py +++ b/nibabel/tests/test_quaternions.py @@ -6,7 +6,7 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -''' Test quaternion calculations ''' +""" Test quaternion calculations """ import numpy as np from numpy import pi @@ -18,7 +18,7 @@ from .. import quaternions as nq from .. import eulerangles as nea -# Example rotations ''' +# Example rotations """ eg_rots = [] params = (-pi, pi, pi / 2) zs = np.arange(*params) diff --git a/nibabel/tests/test_recoder.py b/nibabel/tests/test_recoder.py index d6206df978..713e192707 100644 --- a/nibabel/tests/test_recoder.py +++ b/nibabel/tests/test_recoder.py @@ -6,7 +6,7 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -''' Tests recoder class ''' +""" Tests recoder class """ import numpy as np diff --git a/nibabel/tests/test_scaling.py b/nibabel/tests/test_scaling.py index f314e6b572..9ef8dd3bad 100644 --- a/nibabel/tests/test_scaling.py +++ b/nibabel/tests/test_scaling.py @@ -6,7 +6,7 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -''' Test for scaling / rounding in volumeutils module ''' +""" Test for scaling / rounding in volumeutils module """ import numpy as np diff --git a/nibabel/tests/test_spm2analyze.py b/nibabel/tests/test_spm2analyze.py index a88d3cafd4..582f6b70bd 100644 --- a/nibabel/tests/test_spm2analyze.py +++ b/nibabel/tests/test_spm2analyze.py @@ -6,7 +6,7 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -''' Tests for SPM2 header stuff ''' +""" Tests for SPM2 header stuff """ import numpy as np diff --git a/nibabel/tests/test_trackvis.py b/nibabel/tests/test_trackvis.py index 562b2a128b..3591878be2 100644 --- a/nibabel/tests/test_trackvis.py +++ b/nibabel/tests/test_trackvis.py @@ -1,4 +1,4 @@ -''' Testing trackvis module ''' +""" Testing trackvis module """ from functools import partial diff --git a/nibabel/tests/test_volumeutils.py b/nibabel/tests/test_volumeutils.py index 823350a3cd..f84878f55c 100644 --- a/nibabel/tests/test_volumeutils.py +++ b/nibabel/tests/test_volumeutils.py @@ -6,7 +6,7 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -''' Test for volumeutils module ''' +""" Test for volumeutils module """ import os from os.path import exists @@ -221,7 +221,7 @@ def test_array_from_file_mmap(): def buf_chk(in_arr, out_buf, in_buf, offset): - ''' Write contents of in_arr into fileobj, read back, check same ''' + """ Write contents of in_arr into fileobj, read back, check same """ instr = b' ' * offset + in_arr.tobytes(order='F') out_buf.write(instr) out_buf.flush() @@ -1279,7 +1279,7 @@ def run(self): def _calculate_scale(data, out_dtype, allow_intercept): - ''' Calculate scaling and optional intercept for data + """ Calculate scaling and optional intercept for data Copy of the deprecated volumeutils.calculate_scale, to preserve tests @@ -1303,7 +1303,7 @@ def _calculate_scale(data, out_dtype, allow_intercept): mx : None or float minimum of finite value in data, or None if this will not be used to threshold data - ''' + """ # Code here is a compatibility shell around arraywriters refactor in_dtype = data.dtype out_dtype = np.dtype(out_dtype) diff --git a/nibabel/tests/test_wrapstruct.py b/nibabel/tests/test_wrapstruct.py index a75cf0548b..fc63556edc 100644 --- a/nibabel/tests/test_wrapstruct.py +++ b/nibabel/tests/test_wrapstruct.py @@ -6,7 +6,7 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -''' Test binary header objects +""" Test binary header objects This is a root testing class, used in the Analyze and other tests as a framework for all the tests common to the Analyze types @@ -22,7 +22,7 @@ With deprecation warnings _field_recoders -> field_recoders -''' +""" import logging import numpy as np @@ -107,10 +107,10 @@ def log_chk(hdr, level): class _TestWrapStructBase(BaseTestCase): - ''' Class implements base tests for binary headers + """ Class implements base tests for binary headers It serves as a base class for other binary header tests - ''' + """ header_class = None def get_bad_bb(self): @@ -193,12 +193,12 @@ def test_mappingness(self): def test_endianness_ro(self): # endianness is a read only property - ''' Its use in initialization tested in the init tests. + """ Its use in initialization tested in the init tests. Endianness gives endian interpretation of binary data. It is read only because the only common use case is to set the endianness on initialization (or occasionally byteswapping the data) - but this is done via via the as_byteswapped method - ''' + """ hdr = self.header_class() with pytest.raises(AttributeError): hdr.endianness = '<' @@ -369,11 +369,11 @@ def default_structarr(klass, endianness=None): @classmethod def _get_checks(klass): - ''' Return sequence of check functions for this class ''' + """ Return sequence of check functions for this class """ return (klass._chk_integer, klass._chk_string) - ''' Check functions in format expected by BatteryRunner class ''' + """ Check functions in format expected by BatteryRunner class """ @staticmethod def _chk_integer(hdr, fix=False): rep = Report(HeaderDataError) diff --git a/nibabel/tmpdirs.py b/nibabel/tmpdirs.py index 2636d8acb7..5ae4097c29 100644 --- a/nibabel/tmpdirs.py +++ b/nibabel/tmpdirs.py @@ -6,8 +6,8 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -''' Contexts for *with* statement providing temporary directories -''' +""" Contexts for *with* statement providing temporary directories +""" import os import shutil from tempfile import template, mkdtemp @@ -49,7 +49,7 @@ def __exit__(self, exc, value, tb): class InTemporaryDirectory(TemporaryDirectory): - ''' Create, return, and change directory to a temporary directory + """ Create, return, and change directory to a temporary directory Examples -------- @@ -63,7 +63,7 @@ class InTemporaryDirectory(TemporaryDirectory): False >>> os.getcwd() == my_cwd True - ''' + """ def __enter__(self): self._pwd = os.getcwd() diff --git a/nibabel/trackvis.py b/nibabel/trackvis.py index 09d7602117..3b46336bd8 100644 --- a/nibabel/trackvis.py +++ b/nibabel/trackvis.py @@ -103,7 +103,7 @@ class DataError(Exception): 'nibabel.streamlines.load, instead.', since='2.5.0', until='4.0.0') def read(fileobj, as_generator=False, points_space=None, strict=True): - ''' Read trackvis file from `fileobj`, return `streamlines`, `header` + """ Read trackvis file from `fileobj`, return `streamlines`, `header` Parameters ---------- @@ -152,7 +152,7 @@ def read(fileobj, as_generator=False, points_space=None, strict=True): coordinates, ``x, y, z``, where ``x`` is the floating point voxel coordinate along the first image axis, multiplied by the voxel size for that axis. - ''' + """ fileobj = ImageOpener(fileobj) hdr_str = fileobj.read(header_2_dtype.itemsize) # try defaulting to version 2 format @@ -262,7 +262,7 @@ def track_gen(): since='2.5.0', until='4.0.0') def write(fileobj, streamlines, hdr_mapping=None, endianness=None, points_space=None): - ''' Write header and `streamlines` to trackvis file `fileobj` + """ Write header and `streamlines` to trackvis file `fileobj` The parameters from the streamlines override conflicting parameters in the `hdr_mapping` information. In particular, the number of @@ -356,7 +356,7 @@ def write(fileobj, streamlines, hdr_mapping=None, endianness=None, This information comes from some helpful replies on the trackvis forum about `interpreting point coordiantes `_ - ''' + """ stream_iter = iter(streamlines) try: streams0 = next(stream_iter) @@ -511,7 +511,7 @@ def _check_hdr_points_space(hdr, points_space): def _hdr_from_mapping(hdr=None, mapping=None, endianness=native_code): - ''' Fill `hdr` from mapping `mapping`, with given endianness ''' + """ Fill `hdr` from mapping `mapping`, with given endianness """ if hdr is None: # passed a valid mapping as header? Copy and return if isinstance(mapping, np.ndarray): @@ -546,7 +546,7 @@ def _hdr_from_mapping(hdr=None, mapping=None, endianness=native_code): 'nibabel.streamlines.TrkFile.create_empty_header, instead.', since='2.5.0', until='4.0.0') def empty_header(endianness=None, version=2): - ''' Empty trackvis header + """ Empty trackvis header Parameters ---------- @@ -583,7 +583,7 @@ def empty_header(endianness=None, version=2): missing. We make no attempt to fill it with sensible defaults on the basis that, if the information is missing, it is better to be explicit. - ''' + """ if version == 1: dt = header_1_dtype elif version == 2: @@ -603,7 +603,7 @@ def empty_header(endianness=None, version=2): 'nibabel.streamlines.trk.get_affine_trackvis_to_rasmm, instead.', since='2.5.0', until='4.0.0') def aff_from_hdr(trk_hdr, atleast_v2=True): - ''' Return voxel to mm affine from trackvis header + """ Return voxel to mm affine from trackvis header Affine is mapping from voxel space to Nifti (RAS) output coordinate system convention; x: Left -> Right, y: Posterior -> Anterior, z: @@ -636,7 +636,7 @@ def aff_from_hdr(trk_hdr, atleast_v2=True): allow negative voxel sizes (needed for recording axis flips) and sets the origin field to 0. In future, we'll raise an error rather than try and estimate the affine from version 1 fields - ''' + """ if trk_hdr['version'] == 2: aff = trk_hdr['vox_to_ras'] if aff[3, 3] != 0: @@ -683,7 +683,7 @@ def aff_from_hdr(trk_hdr, atleast_v2=True): 'nibabel.streamlines.TrkFile.affine_to_rasmm property, instead.', since='2.5.0', until='4.0.0') def aff_to_hdr(affine, trk_hdr, pos_vox=True, set_order=True): - ''' Set affine `affine` into trackvis header `trk_hdr` + """ Set affine `affine` into trackvis header `trk_hdr` Affine is mapping from voxel space to Nifti RAS) output coordinate system convention; x: Left -> Right, y: Posterior -> Anterior, z: @@ -723,7 +723,7 @@ def aff_to_hdr(affine, trk_hdr, pos_vox=True, set_order=True): reliably. It turns out that negative flips upset trackvis (the application). The application also ignores the origin field, and may not use the 'image_orientation_patient' field. - ''' + """ try: version = trk_hdr['version'] except (KeyError, ValueError): # dict or structured array @@ -765,7 +765,7 @@ class TrackvisFileError(Exception): class TrackvisFile(object): - ''' Convenience class to encapsulate trackvis file information + """ Convenience class to encapsulate trackvis file information Parameters ---------- @@ -792,7 +792,7 @@ class TrackvisFile(object): Affine expressing relationship of voxels in an image to mm in RAS mm space. If 'points_space' is not None, you can use this to give the relationship between voxels, rasmm and voxmm space (above). - ''' + """ @deprecate_with_version('TrackvisFile is deprecated; please use ' 'nibabel.streamlines.TrkFile, instead.', diff --git a/nibabel/tripwire.py b/nibabel/tripwire.py index e31cfe7258..abb54268d4 100644 --- a/nibabel/tripwire.py +++ b/nibabel/tripwire.py @@ -47,5 +47,5 @@ def __init__(self, msg): self._msg = msg def __getattr__(self, attr_name): - ''' Raise informative error accessing attributes ''' + """ Raise informative error accessing attributes """ raise TripWireError(self._msg) diff --git a/nibabel/volumeutils.py b/nibabel/volumeutils.py index c3a82db418..606e06f52f 100644 --- a/nibabel/volumeutils.py +++ b/nibabel/volumeutils.py @@ -6,7 +6,7 @@ # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -''' Utility functions for analyze-like formats ''' +""" Utility functions for analyze-like formats """ import sys import warnings @@ -42,7 +42,7 @@ class Recoder(object): - ''' class to return canonical code(s) from code or aliases + """ class to return canonical code(s) from code or aliases The concept is a lot easier to read in the implementation and tests than it is to explain, so... @@ -73,10 +73,10 @@ class Recoder(object): >>> # indexing the object directly >>> recodes[2] 2 - ''' + """ def __init__(self, codes, fields=('code',), map_maker=OrderedDict): - ''' Create recoder object + """ Create recoder object ``codes`` give a sequence of code, alias sequences ``fields`` are names by which the entries in these sequences can be @@ -103,7 +103,7 @@ def __init__(self, codes, fields=('code',), map_maker=OrderedDict): Default is ``dict``. ``map_maker()`` generates an empty mapping. The mapping need only implement ``__getitem__, __setitem__, keys, values``. - ''' + """ self.fields = tuple(fields) self.field1 = {} # a placeholder for the check below for name in fields: @@ -115,7 +115,7 @@ def __init__(self, codes, fields=('code',), map_maker=OrderedDict): self.add_codes(codes) def add_codes(self, code_syn_seqs): - ''' Add codes to object + """ Add codes to object Parameters ---------- @@ -139,7 +139,7 @@ def add_codes(self, code_syn_seqs): True >>> print(rc.value_set()) # set is actually ordered OrderedSet([2, 1, 3]) - ''' + """ for code_syns in code_syn_seqs: # Add all the aliases for alias in code_syns: @@ -149,7 +149,7 @@ def add_codes(self, code_syn_seqs): self.__dict__[field_name][alias] = code_syns[field_ind] def __getitem__(self, key): - ''' Return value from field1 dictionary (first column of values) + """ Return value from field1 dictionary (first column of values) Returns same value as ``obj.field1[key]`` and, with the default initializing ``fields`` argument of fields=('code',), @@ -158,7 +158,7 @@ def __getitem__(self, key): >>> codes = ((1, 'one'), (2, 'two')) >>> Recoder(codes)['two'] 2 - ''' + """ return self.field1[key] def __contains__(self, key): @@ -171,7 +171,7 @@ def __contains__(self, key): return True def keys(self): - ''' Return all available code and alias values + """ Return all available code and alias values Returns same value as ``obj.field1.keys()`` and, with the default initializing ``fields`` argument of fields=('code',), @@ -181,11 +181,11 @@ def keys(self): >>> k = Recoder(codes).keys() >>> set(k) == set([1, 2, 'one', 'repeat value', 'two']) True - ''' + """ return self.field1.keys() def value_set(self, name=None): - ''' Return OrderedSet of possible returned values for column + """ Return OrderedSet of possible returned values for column By default, the column is the first column. @@ -206,7 +206,7 @@ def value_set(self, name=None): >>> rc = Recoder(codes, fields=('code', 'label')) >>> rc.value_set('label') == set(('one', 'two', 'repeat value')) True - ''' + """ if name is None: d = self.field1 else: @@ -274,7 +274,7 @@ def __getitem__(self, key): def pretty_mapping(mapping, getterfunc=None): - ''' Make pretty string from mapping + """ Make pretty string from mapping Adjusts text column to print values on basis of longest key. Probably only sensible if keys are mainly strings. @@ -320,7 +320,7 @@ def pretty_mapping(mapping, getterfunc=None): >>> print(pretty_mapping(C(), getter)) short_field : 0 longer_field : method string - ''' + """ if getterfunc is None: getterfunc = lambda obj, key: obj[key] lens = [len(str(name)) for name in mapping] @@ -334,7 +334,7 @@ def pretty_mapping(mapping, getterfunc=None): def make_dt_codes(codes_seqs): - ''' Create full dt codes Recoder instance from datatype codes + """ Create full dt codes Recoder instance from datatype codes Include created numpy dtype (from numpy type) and opposite endian numpy dtype @@ -354,7 +354,7 @@ def make_dt_codes(codes_seqs): of the corresponding code, name, type, dtype, or swapped dtype. You can also index with ``niistring`` values if codes_seqs had sequences of length 4 instead of 3. - ''' + """ fields = ['code', 'label', 'type'] len0 = len(codes_seqs[0]) if len0 not in (3, 4): @@ -378,7 +378,7 @@ def make_dt_codes(codes_seqs): '1.2', '3.0') def can_cast(in_type, out_type, has_intercept=False, has_slope=False): - ''' Return True if we can safely cast ``in_type`` to ``out_type`` + """ Return True if we can safely cast ``in_type`` to ``out_type`` Parameters ---------- @@ -424,7 +424,7 @@ def can_cast(in_type, out_type, has_intercept=False, has_slope=False): False >>> can_cast(np.int16, np.uint8, True, True) # doctest: +SKIP True - ''' + """ in_dtype = np.dtype(in_type) # Whether we can cast depends on the data, and we've only got the type. # Let's assume integers use all of their range but floats etc not @@ -448,7 +448,7 @@ def _is_compressed_fobj(fobj): def array_from_file(shape, in_dtype, infile, offset=0, order='F', mmap=True): - ''' Get array from file with specified shape, dtype and file offset + """ Get array from file with specified shape, dtype and file offset Parameters ---------- @@ -489,7 +489,7 @@ def array_from_file(shape, in_dtype, infile, offset=0, order='F', mmap=True): >>> arr2 = array_from_file((1,2,3), arr.dtype, bio, 10) >>> np.all(arr == arr2) True - ''' + """ if mmap not in (True, False, 'c', 'r', 'r+'): raise ValueError("mmap value should be one of True, False, 'c', " "'r', 'r+'") @@ -542,7 +542,7 @@ def array_from_file(shape, in_dtype, infile, offset=0, order='F', mmap=True): def array_to_file(data, fileobj, out_dtype=None, offset=0, intercept=0.0, divslope=1.0, mn=None, mx=None, order='F', nan2zero=True): - ''' Helper function for writing arrays to file objects + """ Helper function for writing arrays to file objects Writes arrays as scaled by `intercept` and `divslope`, and clipped at (prescaling) `mn` minimum, and `mx` maximum. @@ -621,7 +621,7 @@ def array_to_file(data, fileobj, out_dtype=None, offset=0, >>> array_to_file(data, sio, np.float, order='C') >>> sio.getvalue() == data.tobytes('C') True - ''' + """ # Shield special case div_none = divslope is None if not np.all( @@ -1015,7 +1015,7 @@ def working_type(in_type, slope=1.0, inter=0.0): '1.2', '3.0') def calculate_scale(data, out_dtype, allow_intercept): - ''' Calculate scaling and optional intercept for data + """ Calculate scaling and optional intercept for data Parameters ---------- @@ -1037,7 +1037,7 @@ def calculate_scale(data, out_dtype, allow_intercept): mx : None or float minimum of finite value in data, or None if this will not be used to threshold data - ''' + """ # Code here is a compatibility shell around arraywriters refactor in_dtype = data.dtype out_dtype = np.dtype(out_dtype) @@ -1063,7 +1063,7 @@ def calculate_scale(data, out_dtype, allow_intercept): '1.2', '3.0') def scale_min_max(mn, mx, out_type, allow_intercept): - ''' Return scaling and intercept min, max of data, given output type + """ Return scaling and intercept min, max of data, given output type Returns ``scalefactor`` and ``intercept`` to best fit data with given ``mn`` and ``mx`` min and max values into range of data type @@ -1124,7 +1124,7 @@ def scale_min_max(mn, mx, out_type, allow_intercept): The large integers lead to python long types as max / min for type. To contain the rounding error, we need to use the maximum numpy float types when casting to float. - ''' + """ if mn > mx: raise ValueError('min value > max value') info = type_info(out_type) @@ -1361,7 +1361,7 @@ def _ftype4scaled_finite(tst_arr, slope, inter, direction='read', def finite_range(arr, check_nan=False): - ''' Get range (min, max) or range and flag (min, max, has_nan) from `arr` + """ Get range (min, max) or range and flag (min, max, has_nan) from `arr` Parameters ---------- @@ -1405,7 +1405,7 @@ def finite_range(arr, check_nan=False): Traceback (most recent call last): ... TypeError: Can only handle numeric types - ''' + """ arr = np.asarray(arr) if arr.size == 0: return (np.inf, -np.inf) + (False,) * check_nan @@ -1457,7 +1457,7 @@ def finite_range(arr, check_nan=False): def shape_zoom_affine(shape, zooms, x_flip=True): - ''' Get affine implied by given shape and zooms + """ Get affine implied by given shape and zooms We get the translations from the center of the image (implied by `shape`). @@ -1492,7 +1492,7 @@ def shape_zoom_affine(shape, zooms, x_flip=True): [ 0., 2., 0., -4.], [ 0., 0., 1., -3.], [ 0., 0., 0., 1.]]) - ''' + """ shape = np.asarray(shape) zooms = np.array(zooms) # copy because of flip below ndims = len(shape) @@ -1519,7 +1519,7 @@ def shape_zoom_affine(shape, zooms, x_flip=True): def rec2dict(rec): - ''' Convert recarray to dictionary + """ Convert recarray to dictionary Also converts scalar values to scalars @@ -1539,7 +1539,7 @@ def rec2dict(rec): >>> d = rec2dict(r) >>> d == {'x': 0, 's': b''} True - ''' + """ dct = {} for key in rec.dtype.fields: val = rec[key] diff --git a/nibabel/wrapstruct.py b/nibabel/wrapstruct.py index d6dbde6290..4eabe2504a 100644 --- a/nibabel/wrapstruct.py +++ b/nibabel/wrapstruct.py @@ -129,7 +129,7 @@ def __init__(self, binaryblock=None, endianness=None, check=True): - ''' Initialize WrapStruct from binary data block + """ Initialize WrapStruct from binary data block Parameters ---------- @@ -153,7 +153,7 @@ def __init__(self, >>> wstr1['integer'] = 1 >>> wstr1['integer'] array(1, dtype=int16) - ''' + """ if binaryblock is None: self._structarr = self.__class__.default_structarr(endianness) return @@ -176,7 +176,7 @@ def __init__(self, @classmethod def from_fileobj(klass, fileobj, endianness=None, check=True): - ''' Return read structure with given or guessed endiancode + """ Return read structure with given or guessed endiancode Parameters ---------- @@ -189,13 +189,13 @@ def from_fileobj(klass, fileobj, endianness=None, check=True): ------- wstr : WrapStruct object WrapStruct object initialized from data in fileobj - ''' + """ raw_str = fileobj.read(klass.template_dtype.itemsize) return klass(raw_str, endianness, check) @property def binaryblock(self): - ''' binary block of data as string + """ binary block of data as string Returns ------- @@ -208,11 +208,11 @@ def binaryblock(self): >>> wstr = WrapStruct() >>> len(wstr.binaryblock) 2 - ''' + """ return self._structarr.tobytes() def write_to(self, fileobj): - ''' Write structure to fileobj + """ Write structure to fileobj Write starts at fileobj current file position. @@ -233,12 +233,12 @@ def write_to(self, fileobj): >>> wstr.write_to(str_io) >>> wstr.binaryblock == str_io.getvalue() True - ''' + """ fileobj.write(self.binaryblock) @property def endianness(self): - ''' endian code of binary data + """ endian code of binary data The endianness code gives the current byte order interpretation of the binary data. @@ -256,13 +256,13 @@ def endianness(self): read only because the only common use case is to set the endianness on initialization, or occasionally byteswapping the data - but this is done via the as_byteswapped method - ''' + """ if self._structarr.dtype.isnative: return native_code return swapped_code def copy(self): - ''' Return copy of structure + """ Return copy of structure >>> wstr = WrapStruct() >>> wstr['integer'] = 3 @@ -271,11 +271,11 @@ def copy(self): False >>> wstr2['integer'] array(3, dtype=int16) - ''' + """ return self.__class__(self.binaryblock, self.endianness, check=False) def __eq__(self, other): - ''' equality between two structures defined by binaryblock + """ equality between two structures defined by binaryblock Examples -------- @@ -286,7 +286,7 @@ def __eq__(self, other): >>> wstr3 = WrapStruct(endianness=swapped_code) >>> wstr == wstr3 True - ''' + """ this_end = self.endianness this_bb = self.binaryblock try: @@ -303,18 +303,18 @@ def __ne__(self, other): return not self == other def __getitem__(self, item): - ''' Return values from structure data + """ Return values from structure data Examples -------- >>> wstr = WrapStruct() >>> wstr['integer'] == 0 True - ''' + """ return self._structarr[item] def __setitem__(self, item, value): - ''' Set values in structured data + """ Set values in structured data Examples -------- @@ -322,31 +322,31 @@ def __setitem__(self, item, value): >>> wstr['integer'] = 3 >>> wstr['integer'] array(3, dtype=int16) - ''' + """ self._structarr[item] = value def __iter__(self): return iter(self.keys()) def keys(self): - ''' Return keys from structured data''' + """ Return keys from structured data""" return list(self.template_dtype.names) def values(self): - ''' Return values from structured data''' + """ Return values from structured data""" data = self._structarr return [data[key] for key in self.template_dtype.names] def items(self): - ''' Return items from structured data''' + """ Return items from structured data""" return zip(self.keys(), self.values()) def get(self, k, d=None): - ''' Return value for the key k if present or d otherwise''' + """ Return value for the key k if present or d otherwise""" return self._structarr[k] if k in self.keys() else d def check_fix(self, logger=None, error_level=None): - ''' Check structured data with checks + """ Check structured data with checks Parameters ---------- @@ -354,7 +354,7 @@ def check_fix(self, logger=None, error_level=None): error_level : None or int Level of error severity at which to raise error. Any error of severity >= `error_level` will cause an exception. - ''' + """ if logger is None: logger = imageglobals.logger if error_level is None: @@ -366,7 +366,7 @@ def check_fix(self, logger=None, error_level=None): @classmethod def diagnose_binaryblock(klass, binaryblock, endianness=None): - ''' Run checks over binary data, return string ''' + """ Run checks over binary data, return string """ wstr = klass(binaryblock, endianness=endianness, check=False) battrun = BatteryRunner(klass._get_checks()) reports = battrun.check_only(wstr) @@ -375,7 +375,7 @@ def diagnose_binaryblock(klass, binaryblock, endianness=None): @classmethod def guessed_endian(self, mapping): - ''' Guess intended endianness from mapping-like ``mapping`` + """ Guess intended endianness from mapping-like ``mapping`` Parameters ---------- @@ -387,13 +387,13 @@ def guessed_endian(self, mapping): ------- endianness : {'<', '>'} Guessed endianness of binary data in ``wstr`` - ''' + """ raise NotImplementedError @classmethod def default_structarr(klass, endianness=None): - ''' Return structured array for default structure with given endianness - ''' + """ Return structured array for default structure with given endianness + """ dt = klass.template_dtype if endianness is not None: endianness = endian_codes[endianness] @@ -402,7 +402,7 @@ def default_structarr(klass, endianness=None): @property def structarr(self): - ''' Structured data, with data fields + """ Structured data, with data fields Examples -------- @@ -412,17 +412,17 @@ def structarr(self): Traceback (most recent call last): ... AttributeError: can't set attribute - ''' + """ return self._structarr def __str__(self): - ''' Return string representation for printing ''' + """ Return string representation for printing """ summary = "%s object, endian='%s'" % (self.__class__, self.endianness) return '\n'.join([summary, pretty_mapping(self)]) def as_byteswapped(self, endianness=None): - ''' return new byteswapped object with given ``endianness`` + """ return new byteswapped object with given ``endianness`` Guaranteed to make a copy even if endianness is the same as the current endianness. @@ -468,7 +468,7 @@ def as_byteswapped(self, endianness=None): True >>> nbs_wstr is wstr False - ''' + """ current = self.endianness if endianness is None: if current == native_code: @@ -484,7 +484,7 @@ def as_byteswapped(self, endianness=None): @classmethod def _get_checks(klass): - ''' Return sequence of check functions for this class ''' + """ Return sequence of check functions for this class """ return () @@ -494,7 +494,7 @@ class LabeledWrapStruct(WrapStruct): _field_recoders = {} # for recoding values for str def get_value_label(self, fieldname): - ''' Returns label for coded field + """ Returns label for coded field A coded field is an int field containing codes that stand for discrete values that also have string labels. @@ -527,7 +527,7 @@ def get_value_label(self, fieldname): >>> hdr['datatype'] = 2 >>> hdr.get_value_label('datatype') 'two' - ''' + """ if fieldname not in self._field_recoders: raise ValueError('%s not a coded field' % fieldname) code = int(self._structarr[fieldname]) @@ -537,7 +537,7 @@ def get_value_label(self, fieldname): return ''.format(code) def __str__(self): - ''' Return string representation for printing ''' + """ Return string representation for printing """ summary = "%s object, endian='%s'" % (self.__class__, self.endianness) def _getter(obj, key): diff --git a/nisext/sexts.py b/nisext/sexts.py index c8090f5eac..9ca3519f45 100644 --- a/nisext/sexts.py +++ b/nisext/sexts.py @@ -1,4 +1,4 @@ -''' Distutils / setuptools helpers ''' +""" Distutils / setuptools helpers """ import os from os.path import join as pjoin, split as psplit, splitext @@ -48,7 +48,7 @@ def get_comrec_build(pkg_dir, build_cmd=build_py): package for an example. """ class MyBuildPy(build_cmd): - ''' Subclass to write commit data into installation tree ''' + """ Subclass to write commit data into installation tree """ def run(self): build_cmd.run(self) import subprocess @@ -88,7 +88,7 @@ def package_check(pkg_name, version=None, messages=None, setuptools_args=None ): - ''' Check if package `pkg_name` is present and has good enough version + """ Check if package `pkg_name` is present and has good enough version Has two modes of operation. If `setuptools_args` is None (the default), raise an error for missing non-optional dependencies and log warnings for @@ -130,7 +130,7 @@ def package_check(pkg_name, version=None, If None, raise errors / warnings for missing non-optional / optional dependencies. If dict fill key values ``install_requires`` and ``extras_require`` for non-optional and optional dependencies. - ''' + """ setuptools_mode = not setuptools_args is None optional_tf = bool(optional) if version_getter is None: diff --git a/nisext/testers.py b/nisext/testers.py index e0ca4a040a..f324d272b4 100644 --- a/nisext/testers.py +++ b/nisext/testers.py @@ -1,4 +1,4 @@ -''' Test package information in various install settings +""" Test package information in various install settings The routines here install the package from source directories, zips or eggs, and check these installations by running tests, checking version information, @@ -27,7 +27,7 @@ bdist-egg-tests: $(PYTHON) -c 'from nisext.testers import bdist_egg_tests; bdist_egg_tests("nibabel")' -''' +""" import os @@ -94,7 +94,7 @@ def back_tick(cmd, ret_err=False, as_str=True): def run_mod_cmd(mod_name, pkg_path, cmd, script_dir=None, print_location=True): - ''' Run command in own process in anonymous path + """ Run command in own process in anonymous path Parameters ---------- @@ -116,7 +116,7 @@ def run_mod_cmd(mod_name, pkg_path, cmd, script_dir=None, print_location=True): stdout as str stderr : str stderr as str - ''' + """ if script_dir is None: paths_add = '' else: @@ -162,11 +162,11 @@ def run_mod_cmd(mod_name, pkg_path, cmd, script_dir=None, print_location=True): def zip_extract_all(fname, path=None): - ''' Extract all members from zipfile + """ Extract all members from zipfile Deals with situation where the directory is stored in the zipfile as a name, as well as files that have to go into this directory. - ''' + """ zf = zipfile.ZipFile(fname) members = zf.namelist() # Remove members that are just bare directories @@ -241,7 +241,7 @@ def install_from_zip(zip_fname, install_path, pkg_finder=None, def contexts_print_info(mod_name, repo_path, install_path): - ''' Print result of get_info from different installation routes + """ Print result of get_info from different installation routes Runs installation from: @@ -260,7 +260,7 @@ def contexts_print_info(mod_name, repo_path, install_path): path to location of git repository install_path : str path into which to install temporary installations - ''' + """ site_pkgs_path = os.path.join(install_path, PY_LIB_SDIR) # first test archive pwd = os.path.abspath(os.getcwd()) @@ -282,7 +282,7 @@ def contexts_print_info(mod_name, repo_path, install_path): def info_from_here(mod_name): - ''' Run info context checks starting in working directory + """ Run info context checks starting in working directory Runs checks from current working directory, installing temporary installations into a new temporary directory @@ -291,7 +291,7 @@ def info_from_here(mod_name): ---------- mod_name : str package name that will be installed, and tested - ''' + """ repo_path = os.path.abspath(os.getcwd()) install_path = tempfile.mkdtemp() try: diff --git a/tools/bisect_nose.py b/tools/bisect_nose.py index 8a1b2cc2fe..3f9092564b 100755 --- a/tools/bisect_nose.py +++ b/tools/bisect_nose.py @@ -66,7 +66,7 @@ def main(): parser = ArgumentParser(description=DESCRIP, epilog=EPILOG, formatter_class=RawDescriptionHelpFormatter) - parser.add_argument('test_path', type=str, + parser.add_argument('test_path', type=str, help='Path to test') parser.add_argument('--error-txt', type=str, help='regular expression for error of interest') diff --git a/tools/gitwash_dumper.py b/tools/gitwash_dumper.py index 038fe936fe..86b0a74221 100755 --- a/tools/gitwash_dumper.py +++ b/tools/gitwash_dumper.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -''' Checkout gitwash repo into directory and do search replace on name ''' +""" Checkout gitwash repo into directory and do search replace on name """ import os from os.path import join as pjoin @@ -51,9 +51,9 @@ def cp_files(in_path, globs, out_path): def filename_search_replace(sr_pairs, filename, backup=False): - ''' Search and replace for expressions in files + """ Search and replace for expressions in files - ''' + """ in_txt = open(filename, 'rt').read(-1) out_txt = in_txt[:] for in_exp, out_exp in sr_pairs: @@ -152,13 +152,13 @@ def make_link_targets(proj_name, out_links.close() -USAGE = ''' +USAGE = """ If not set with options, the repository name is the same as the If not set with options, the main github user is the same as the -repository name.''' +repository name.""" GITWASH_CENTRAL = 'git://github.com/matthew-brett/gitwash.git' diff --git a/tools/profile b/tools/profile index c4ae6d6a35..b17ac454cb 100755 --- a/tools/profile +++ b/tools/profile @@ -11,17 +11,15 @@ __docformat__ = 'restructuredtext' -import sys, os - -from optparse import OptionParser -from os import environ, path +import os +import sys +from os import path if __name__ == "__main__": usage = """Usage: %s [options] ... """ % sys.argv[0] - # default options convert2kcache = True displaykcachegrinder = True @@ -35,8 +33,8 @@ if __name__ == "__main__": removed = sys.argv.pop(0) if not len(sys.argv): - print usage - sys.exit(1) + print(usage) + sys.exit(1) while sys.argv[0].startswith('-'): if sys.argv[0] in ["-l", "--level"]: @@ -60,7 +58,7 @@ if __name__ == "__main__": convert2kcache = False displaykcachegrinder = False else: - print usage + print(usage) sys.exit(1) sys.argv.pop(0) @@ -80,11 +78,10 @@ if __name__ == "__main__": pfilename = cmdname + ".prof" if run: - exec "import %s as runnable" % root + exec(f"import {root} as runnable") if not 'main' in runnable.__dict__: - print "OOPS: file/module %s has no function main defined" \ - % cmdname + print(f"OOPS: file/module {cmdname} has no function main defined") sys.exit(1) prof = hotshot.Profile(pfilename, lineevents=profilelines) @@ -96,13 +93,12 @@ if __name__ == "__main__": except SystemExit: pass - print "Saving profile data into %s" % pfilename + print(f"Saving profile data into {pfilename}") prof.close() - if printstats or pstatsfilename: import hotshot.stats - print "Loading profile file to print statistics" + print("Loading profile file to print statistics") stats = hotshot.stats.load(pfilename) if printstats: stats.strip_dirs() @@ -115,13 +111,13 @@ if __name__ == "__main__": if convert2kcache: cmd = "hotshot2calltree -o %s %s" % (kfilename, pfilename) if os.system(cmd): - print "!!! Make sure to install kcachegrind-converters ;-)" + print("!!! Make sure to install kcachegrind-converters ;-)") sys.exit(1) if displaykcachegrinder: if os.system('kcachegrind %s' % kfilename): - print "!!! Make sure to install kcachegrind ;-)" + print("!!! Make sure to install kcachegrind ;-)") sys.exit(1) else: - print "Go away -- nothing to look here for as a module" + print("Go away -- nothing to look here for as a module") From 29ad0251c135b8ec50fbe6bda00903ad35118b96 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Wed, 27 May 2020 10:35:09 -0400 Subject: [PATCH 2/2] Update nibabel/tests/test_quaternions.py --- nibabel/tests/test_quaternions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/tests/test_quaternions.py b/nibabel/tests/test_quaternions.py index ac76023584..de2c78dd77 100644 --- a/nibabel/tests/test_quaternions.py +++ b/nibabel/tests/test_quaternions.py @@ -18,7 +18,7 @@ from .. import quaternions as nq from .. import eulerangles as nea -# Example rotations """ +# Example rotations eg_rots = [] params = (-pi, pi, pi / 2) zs = np.arange(*params)