diff --git a/.zenodo.json b/.zenodo.json index db64d56b49..881af6d4c4 100644 --- a/.zenodo.json +++ b/.zenodo.json @@ -330,6 +330,11 @@ "name": "Liem, Franz", "orcid": "0000-0003-0646-4810" }, + { + "affiliation": "Stanford University", + "name": "Ciric, Rastko", + "orcid": "0000-0001-6347-7939" + }, { "affiliation": "The Centre for Addiction and Mental Health", "name": "Joseph, Michael", diff --git a/nipype/algorithms/confounds.py b/nipype/algorithms/confounds.py index 4274e2f9fc..a987c98e55 100644 --- a/nipype/algorithms/confounds.py +++ b/nipype/algorithms/confounds.py @@ -10,6 +10,8 @@ import os import os.path as op +from collections import OrderedDict +from itertools import chain import nibabel as nb import numpy as np @@ -19,7 +21,8 @@ from ..external.due import BibTeX from ..interfaces.base import (traits, TraitedSpec, BaseInterface, BaseInterfaceInputSpec, File, isdefined, - InputMultiPath, OutputMultiPath) + InputMultiPath, OutputMultiPath, + SimpleInterface) from ..utils import NUMPY_MMAP from ..utils.misc import normalize_mc_params @@ -386,11 +389,32 @@ class CompCorInputSpec(BaseInterfaceInputSpec): requires=['mask_files'], desc=('Position of mask in `mask_files` to use - ' 'first is the default.')) + mask_names = traits.List( + traits.Str, + desc='Names for provided masks (for printing into metadata). ' + 'If provided, it must be as long as the final mask list ' + '(after any merge and indexing operations).') components_file = traits.Str( 'components_file.txt', usedefault=True, desc='Filename to store physiological components') - num_components = traits.Int(6, usedefault=True) # 6 for BOLD, 4 for ASL + num_components = traits.Either( + 'all', traits.Range(low=1), xor=['variance_threshold'], + desc='Number of components to return from the decomposition. If ' + '`num_components` is `all`, then all components will be ' + 'retained.') + # 6 for BOLD, 4 for ASL + # automatically instantiated to 6 in CompCor below if neither + # `num_components` nor `variance_threshold` is defined (for + # backward compatibility) + variance_threshold = traits.Range( + low=0.0, high=1.0, exclude_low=True, exclude_high=True, xor=['num_components'], + desc='Select the number of components to be returned automatically ' + 'based on their ability to explain variance in the dataset. ' + '`variance_threshold` is a fractional value between 0 and 1; ' + 'the number of components retained will be equal to the minimum ' + 'number of components necessary to explain the provided ' + 'fraction of variance in the masked time series.') pre_filter = traits.Enum( 'polynomial', 'cosine', @@ -417,7 +441,11 @@ class CompCorInputSpec(BaseInterfaceInputSpec): desc='Repetition time (TR) of series - derived from image header if ' 'unspecified') save_pre_filter = traits.Either( - traits.Bool, File, desc='Save pre-filter basis as text file') + traits.Bool, File, default=False, usedefault=True, + desc='Save pre-filter basis as text file') + save_metadata = traits.Either( + traits.Bool, File, default=False, usedefault=True, + desc='Save component metadata as text file') ignore_initial_volumes = traits.Range( low=0, usedefault=True, @@ -433,9 +461,10 @@ class CompCorOutputSpec(TraitedSpec): components_file = File( exists=True, desc='text file containing the noise components') pre_filter_file = File(desc='text file containing high-pass filter basis') + metadata_file = File(desc='text file containing component metadata') -class CompCor(BaseInterface): +class CompCor(SimpleInterface): """ Interface with core CompCor computation, used in aCompCor and tCompCor @@ -473,20 +502,20 @@ class CompCor(BaseInterface): input_spec = CompCorInputSpec output_spec = CompCorOutputSpec references_ = [{ + 'tags': ['method', 'implementation'], 'entry': - BibTeX( - "@article{compcor_2007," - "title = {A component based noise correction method (CompCor) for BOLD and perfusion based}," - "volume = {37}," - "number = {1}," - "doi = {10.1016/j.neuroimage.2007.04.042}," - "urldate = {2016-08-13}," - "journal = {NeuroImage}," - "author = {Behzadi, Yashar and Restom, Khaled and Liau, Joy and Liu, Thomas T.}," - "year = {2007}," - "pages = {90-101},}"), - 'tags': ['method', 'implementation'] - }] + BibTeX("""\ +@article{compcor_2007, + title = {A component based noise correction method (CompCor) for BOLD and perfusion based}, + volume = {37}, + number = {1}, + doi = {10.1016/j.neuroimage.2007.04.042}, + urldate = {2016-08-13}, + journal = {NeuroImage}, + author = {Behzadi, Yashar and Restom, Khaled and Liau, Joy and Liu, Thomas T.}, + year = {2007}, + pages = {90-101} +}""")}] def __init__(self, *args, **kwargs): ''' exactly the same as compcor except the header ''' @@ -548,9 +577,22 @@ def _run_interface(self, runtime): '{} cannot detect repetition time from image - ' 'Set the repetition_time input'.format(self._header)) - components, filter_basis = compute_noise_components( - imgseries.get_data(), mask_images, self.inputs.num_components, - self.inputs.pre_filter, degree, self.inputs.high_pass_cutoff, TR) + if isdefined(self.inputs.variance_threshold): + components_criterion = self.inputs.variance_threshold + elif isdefined(self.inputs.num_components): + components_criterion = self.inputs.num_components + else: + components_criterion = 6 + IFLOGGER.warning('`num_components` and `variance_threshold` are ' + 'not defined. Setting number of components to 6 ' + 'for backward compatibility. Please set either ' + '`num_components` or `variance_threshold`, as ' + 'this feature may be deprecated in the future.') + + components, filter_basis, metadata = compute_noise_components( + imgseries.get_data(), mask_images, components_criterion, + self.inputs.pre_filter, degree, self.inputs.high_pass_cutoff, TR, + self.inputs.failure_mode, self.inputs.mask_names) if skip_vols: old_comp = components @@ -561,16 +603,27 @@ def _run_interface(self, runtime): components_file = os.path.join(os.getcwd(), self.inputs.components_file) + components_header = self._make_headers(components.shape[1]) np.savetxt( components_file, components, fmt=b"%.10f", delimiter='\t', - header=self._make_headers(components.shape[1]), + header='\t'.join(components_header), comments='') + self._results['components_file'] = os.path.join( + runtime.cwd, self.inputs.components_file) + + save_pre_filter = False + if self.inputs.pre_filter in ['polynomial', 'cosine']: + save_pre_filter = self.inputs.save_pre_filter + + if save_pre_filter: + self._results['pre_filter_file'] = save_pre_filter + if save_pre_filter is True: + self._results['pre_filter_file'] = os.path.join( + runtime.cwd, 'pre_filter.tsv') - if self.inputs.pre_filter and self.inputs.save_pre_filter: - pre_filter_file = self._list_outputs()['pre_filter_file'] ftype = { 'polynomial': 'Legendre', 'cosine': 'Cosine' @@ -590,36 +643,42 @@ def _run_interface(self, runtime): for i in range(skip_vols) ]) np.savetxt( - pre_filter_file, + self._results['pre_filter_file'], filter_basis, fmt=b'%.10f', delimiter='\t', header='\t'.join(header), comments='') + metadata_file = self.inputs.save_metadata + if metadata_file: + self._results['metadata_file'] = metadata_file + if metadata_file is True: + self._results['metadata_file'] = ( + os.path.join(runtime.cwd, 'component_metadata.tsv')) + components_names = np.empty(len(metadata['mask']), + dtype='object_') + retained = np.where(metadata['retained']) + not_retained = np.where(np.logical_not(metadata['retained'])) + components_names[retained] = components_header + components_names[not_retained] = ([ + 'dropped{}'.format(i) for i in range(len(not_retained[0]))]) + with open(self._results['metadata_file'], 'w') as f: + f.write('\t'.join(['component'] + list(metadata.keys())) + '\n') + for i in zip(components_names, *metadata.values()): + f.write('{0[0]}\t{0[1]}\t{0[2]:.10f}\t' + '{0[3]:.10f}\t{0[4]:.10f}\t{0[5]}\n'.format(i)) + return runtime def _process_masks(self, mask_images, timeseries=None): return mask_images - def _list_outputs(self): - outputs = self._outputs().get() - outputs['components_file'] = os.path.abspath( - self.inputs.components_file) - - save_pre_filter = self.inputs.save_pre_filter - if save_pre_filter: - if isinstance(save_pre_filter, bool): - save_pre_filter = os.path.abspath('pre_filter.tsv') - outputs['pre_filter_file'] = save_pre_filter - - return outputs - def _make_headers(self, num_col): header = self.inputs.header_prefix if \ isdefined(self.inputs.header_prefix) else self._header headers = ['{}{:02d}'.format(header, i) for i in range(num_col)] - return '\t'.join(headers) + return headers class ACompCor(CompCor): @@ -1024,9 +1083,12 @@ def is_outlier(points, thresh=3.5): return timepoints_to_discard -def cosine_filter(data, timestep, period_cut, remove_mean=True, axis=-1): +def cosine_filter(data, timestep, period_cut, remove_mean=True, axis=-1, + failure_mode='error'): datashape = data.shape timepoints = datashape[axis] + if datashape[0] == 0 and failure_mode != 'error': + return data, np.array([]) data = data.reshape((-1, timepoints)) @@ -1045,7 +1107,8 @@ def cosine_filter(data, timestep, period_cut, remove_mean=True, axis=-1): return residuals.reshape(datashape), non_constant_regressors -def regress_poly(degree, data, remove_mean=True, axis=-1): +def regress_poly(degree, data, remove_mean=True, axis=-1, + failure_mode='error'): """ Returns data with degree polynomial regressed out. @@ -1058,6 +1121,8 @@ def regress_poly(degree, data, remove_mean=True, axis=-1): datashape = data.shape timepoints = datashape[axis] + if datashape[0] == 0 and failure_mode != 'error': + return data, np.array([]) # Rearrange all voxel-wise time-series in rows data = data.reshape((-1, timepoints)) @@ -1140,35 +1205,78 @@ def combine_mask_files(mask_files, mask_method=None, mask_index=None): return [img] -def compute_noise_components(imgseries, mask_images, num_components, - filter_type, degree, period_cut, repetition_time): +def compute_noise_components(imgseries, mask_images, components_criterion=0.5, + filter_type=False, degree=0, period_cut=128, + repetition_time=None, failure_mode='error', + mask_names=None): """Compute the noise components from the imgseries for each mask - imgseries: a nibabel img - mask_images: a list of nibabel images - num_components: number of noise components to return - filter_type: type off filter to apply to time series before computing - noise components. + Parameters + ---------- + imgseries: nibabel image + Time series data to be decomposed. + mask_images: list + List of nibabel images. Time series data from `img_series` is subset + according to the spatial extent of each mask, and the subset data is + then decomposed using principal component analysis. Masks should be + coextensive with either anatomical or spatial noise ROIs. + components_criterion: float + Number of noise components to return. If this is a decimal value + between 0 and 1, then `create_noise_components` will instead return + the smallest number of components necessary to explain the indicated + fraction of variance. If `components_criterion` is `all`, then all + components will be returned. + filter_type: str + Type of filter to apply to time series before computing + noise components. 'polynomial' - Legendre polynomial basis 'cosine' - Discrete cosine (DCT) basis False - None (mean-removal only) + failure_mode: str + Action to be taken in the event that any decomposition fails to + identify any components. `error` indicates that the routine should + raise an exception and exit, while any other value indicates that the + routine should return a matrix of NaN values equal in size to the + requested decomposition matrix. + mask_names: list or None + List of names for each image in `mask_images`. This should be equal in + length to `mask_images`, with the ith element of `mask_names` naming + the ith element of `mask_images`. Filter options: - degree: order of polynomial used to remove trends from the timeseries - period_cut: minimum period (in sec) for DCT high-pass filter - repetition_time: time (in sec) between volume acquisitions - - returns: - - components: a numpy array - basis: a numpy array containing the (non-constant) filter regressors + degree: int + Order of polynomial used to remove trends from the timeseries + period_cut: float + Minimum period (in sec) for DCT high-pass filter + repetition_time: float + Time (in sec) between volume acquisitions. This must be defined if + the `filter_type` is `cosine`. + Returns + ------- + components: numpy array + Numpy array containing the requested set of noise components + basis: numpy array + Numpy array containing the (non-constant) filter regressors + metadata: OrderedDict{str: numpy array} + Dictionary of eigenvalues, fractional explained variances, and + cumulative explained variances. """ - components = None basis = np.array([]) - for img in mask_images: - mask = img.get_data().astype(np.bool) + if components_criterion == 'all': + components_criterion = -1 + mask_names = mask_names or range(len(mask_images)) + + components = [] + md_mask = [] + md_sv = [] + md_var = [] + md_cumvar = [] + md_retained = [] + + for name, img in zip(mask_names, mask_images): + mask = nb.squeeze_image(img).get_data().astype(np.bool) if imgseries.shape[:3] != mask.shape: raise ValueError( 'Inputs for CompCor, timeseries and mask, do not have ' @@ -1183,13 +1291,18 @@ def compute_noise_components(imgseries, mask_images, num_components, # Currently support Legendre-polynomial or cosine or detrending # With no filter, the mean is nonetheless removed (poly w/ degree 0) if filter_type == 'cosine': + if repetition_time is None: + raise ValueError( + 'Repetition time must be provided for cosine filter') voxel_timecourses, basis = cosine_filter( - voxel_timecourses, repetition_time, period_cut) + voxel_timecourses, repetition_time, period_cut, + failure_mode=failure_mode) elif filter_type in ('polynomial', False): # from paper: # "The constant and linear trends of the columns in the matrix M were # removed [prior to ...]" - voxel_timecourses, basis = regress_poly(degree, voxel_timecourses) + voxel_timecourses, basis = regress_poly(degree, voxel_timecourses, + failure_mode=failure_mode) # "Voxel time series from the noise ROI (either anatomical or tSTD) were # placed in a matrix M of size Nxm, with time along the row dimension @@ -1202,20 +1315,55 @@ def compute_noise_components(imgseries, mask_images, num_components, # "The covariance matrix C = MMT was constructed and decomposed into its # principal components using a singular value decomposition." try: - u, _, _ = fallback_svd(M, full_matrices=False) - except np.linalg.LinAlgError: - if self.inputs.failure_mode == 'error': + u, s, _ = fallback_svd(M, full_matrices=False) + except (np.linalg.LinAlgError, ValueError): + if failure_mode == 'error': raise - u = np.ones((M.shape[0], num_components), dtype=np.float32) * np.nan - if components is None: - components = u[:, :num_components] - else: - components = np.hstack((components, u[:, :num_components])) - if components is None and num_components > 0: - if self.inputs.failure_mode == 'error': + s = np.full(M.shape[0], np.nan, dtype=np.float32) + if components_criterion >= 1: + u = np.full((M.shape[0], components_criterion), + np.nan, dtype=np.float32) + else: + u = np.full((M.shape[0], 1), np.nan, dtype=np.float32) + + variance_explained = (s ** 2) / np.sum(s ** 2) + cumulative_variance_explained = np.cumsum(variance_explained) + + num_components = int(components_criterion) + if 0 < components_criterion < 1: + num_components = np.searchsorted(cumulative_variance_explained, + components_criterion) + 1 + elif components_criterion == -1: + num_components = len(s) + + num_components = int(num_components) + if num_components == 0: + break + + components.append(u[:, :num_components]) + md_mask.append([name] * len(s)) + md_sv.append(s) + md_var.append(variance_explained) + md_cumvar.append(cumulative_variance_explained) + md_retained.append([i < num_components for i in range(len(s))]) + + if len(components) > 0: + components = np.hstack(components) + else: + if failure_mode == 'error': raise ValueError('No components found') - components = np.ones((M.shape[0], num_components), dtype=np.float32) * np.nan - return components, basis + components = np.full((M.shape[0], num_components), + np.nan, dtype=np.float32) + + metadata = OrderedDict([ + ('mask', list(chain(*md_mask))), + ('singular_value', np.hstack(md_sv)), + ('variance_explained', np.hstack(md_var)), + ('cumulative_variance_explained', np.hstack(md_cumvar)), + ('retained', list(chain(*md_retained))) + ]) + + return components, basis, metadata def _compute_tSTD(M, x, axis=0): diff --git a/nipype/algorithms/tests/test_CompCor.py b/nipype/algorithms/tests/test_CompCor.py index 6fdae49f18..3aa535dc19 100644 --- a/nipype/algorithms/tests/test_CompCor.py +++ b/nipype/algorithms/tests/test_CompCor.py @@ -48,20 +48,48 @@ def test_compcor(self): self.run_cc( CompCor( + num_components=6, realigned_file=self.realigned_file, mask_files=self.mask_files, mask_index=0), expected_components) self.run_cc( ACompCor( + num_components=6, realigned_file=self.realigned_file, mask_files=self.mask_files, mask_index=0, components_file='acc_components_file'), expected_components, 'aCompCor') + def test_compcor_variance_threshold_and_metadata(self): + expected_components = [['-0.2027150345', '-0.4954813834'], + ['0.2565929051', '0.7866217875'], + ['-0.3550986008', '-0.0089784905'], + ['0.7512786244', '-0.3599828482'], + ['-0.4500578942', '0.0778209345']] + expected_metadata = { + 'component': 'CompCor00', + 'mask': 'mask', + 'singular_value': '4.0720553036', + 'variance_explained': '0.5527211465', + 'cumulative_variance_explained': '0.5527211465', + 'retained': 'True', + } + ccinterface = CompCor( + variance_threshold=0.7, + realigned_file=self.realigned_file, + mask_files=self.mask_files, + mask_names=['mask'], + mask_index=1, + save_metadata=True) + self.run_cc(ccinterface=ccinterface, + expected_components=expected_components, + expected_n_components=2, + expected_metadata=expected_metadata) + def test_tcompcor(self): - ccinterface = TCompCor( + ccinterface = TCompCor(num_components=6, realigned_file=self.realigned_file, percentile_threshold=0.75) self.run_cc(ccinterface, [['-0.1114536190', '-0.4632908609'], [ '0.4566907310', '0.6983205193' @@ -70,7 +98,8 @@ def test_tcompcor(self): ], ['-0.1342351356', '0.1407855119']], 'tCompCor') def test_tcompcor_no_percentile(self): - ccinterface = TCompCor(realigned_file=self.realigned_file) + ccinterface = TCompCor(num_components=6, + realigned_file=self.realigned_file) ccinterface.run() mask = nb.load('mask_000.nii.gz').get_data() @@ -80,6 +109,7 @@ def test_tcompcor_no_percentile(self): def test_compcor_no_regress_poly(self): self.run_cc( CompCor( + num_components=6, realigned_file=self.realigned_file, mask_files=self.mask_files, mask_index=0, @@ -151,7 +181,9 @@ def test_tcompcor_multi_mask_no_index(self): def run_cc(self, ccinterface, expected_components, - expected_header='CompCor'): + expected_header='CompCor', + expected_n_components=None, + expected_metadata=None): # run ccresult = ccinterface.run() @@ -160,13 +192,14 @@ def run_cc(self, assert ccresult.outputs.components_file == expected_file assert os.path.exists(expected_file) assert os.path.getsize(expected_file) > 0 - assert ccinterface.inputs.num_components == 6 with open(ccresult.outputs.components_file, 'r') as components_file: - expected_n_components = min(ccinterface.inputs.num_components, - self.fake_data.shape[3]) + if expected_n_components is None: + expected_n_components = min(ccinterface.inputs.num_components, + self.fake_data.shape[3]) - components_data = [line.split('\t') for line in components_file] + components_data = [line.rstrip().split('\t') + for line in components_file] # the first item will be '#', we can throw it out header = components_data.pop(0) @@ -180,9 +213,24 @@ def run_cc(self, num_got_timepoints = len(components_data) assert num_got_timepoints == self.fake_data.shape[3] for index, timepoint in enumerate(components_data): - assert (len(timepoint) == ccinterface.inputs.num_components - or len(timepoint) == self.fake_data.shape[3]) + assert (len(timepoint) == expected_n_components) assert timepoint[:2] == expected_components[index] + + if ccinterface.inputs.save_metadata: + expected_metadata_file = ( + ccinterface._list_outputs()['metadata_file']) + assert ccresult.outputs.metadata_file == expected_metadata_file + assert os.path.exists(expected_metadata_file) + assert os.path.getsize(expected_metadata_file) > 0 + + with open(ccresult.outputs.metadata_file, 'r') as metadata_file: + components_metadata = [line.rstrip().split('\t') + for line in metadata_file] + components_metadata = {i: j for i, j in + zip(components_metadata[0], + components_metadata[1])} + assert components_metadata == expected_metadata + return ccresult @staticmethod diff --git a/nipype/algorithms/tests/test_auto_ACompCor.py b/nipype/algorithms/tests/test_auto_ACompCor.py index 235d15da9e..95a9f51a88 100644 --- a/nipype/algorithms/tests/test_auto_ACompCor.py +++ b/nipype/algorithms/tests/test_auto_ACompCor.py @@ -15,20 +15,23 @@ def test_ACompCor_inputs(): requires=['mask_files'], xor=['merge_method'], ), + mask_names=dict(), merge_method=dict( requires=['mask_files'], xor=['mask_index'], ), - num_components=dict(usedefault=True, ), + num_components=dict(xor=['variance_threshold'], ), pre_filter=dict(usedefault=True, ), realigned_file=dict(mandatory=True, ), regress_poly_degree=dict(usedefault=True, ), repetition_time=dict(), + save_metadata=dict(), save_pre_filter=dict(), use_regress_poly=dict( deprecated='0.15.0', new_name='pre_filter', ), + variance_threshold=dict(xor=['num_components'], ), ) inputs = ACompCor.input_spec() @@ -38,6 +41,7 @@ def test_ACompCor_inputs(): def test_ACompCor_outputs(): output_map = dict( components_file=dict(), + metadata_file=dict(), pre_filter_file=dict(), ) outputs = ACompCor.output_spec() diff --git a/nipype/algorithms/tests/test_auto_TCompCor.py b/nipype/algorithms/tests/test_auto_TCompCor.py index 59a5b84f76..1e94ef4241 100644 --- a/nipype/algorithms/tests/test_auto_TCompCor.py +++ b/nipype/algorithms/tests/test_auto_TCompCor.py @@ -15,21 +15,24 @@ def test_TCompCor_inputs(): requires=['mask_files'], xor=['merge_method'], ), + mask_names=dict(), merge_method=dict( requires=['mask_files'], xor=['mask_index'], ), - num_components=dict(usedefault=True, ), + num_components=dict(xor=['variance_threshold'], ), percentile_threshold=dict(usedefault=True, ), pre_filter=dict(usedefault=True, ), realigned_file=dict(mandatory=True, ), regress_poly_degree=dict(usedefault=True, ), repetition_time=dict(), + save_metadata=dict(), save_pre_filter=dict(), use_regress_poly=dict( deprecated='0.15.0', new_name='pre_filter', ), + variance_threshold=dict(xor=['num_components'], ), ) inputs = TCompCor.input_spec() @@ -40,6 +43,7 @@ def test_TCompCor_outputs(): output_map = dict( components_file=dict(), high_variance_masks=dict(), + metadata_file=dict(), pre_filter_file=dict(), ) outputs = TCompCor.output_spec() diff --git a/nipype/info.py b/nipype/info.py index a3a0dfeec0..f9361031bb 100644 --- a/nipype/info.py +++ b/nipype/info.py @@ -141,7 +141,7 @@ def get_nipype_gitversion(): 'numpy>=%s ; python_version >= "3.7"' % NUMPY_MIN_VERSION_37, 'python-dateutil>=%s' % DATEUTIL_MIN_VERSION, 'scipy>=%s' % SCIPY_MIN_VERSION, - 'traits>=%s' % TRAITS_MIN_VERSION, + 'traits>=%s,!=5.0' % TRAITS_MIN_VERSION, 'future>=%s' % FUTURE_MIN_VERSION, 'simplejson>=%s' % SIMPLEJSON_MIN_VERSION, 'prov>=%s' % PROV_VERSION, diff --git a/nipype/workflows/rsfmri/fsl/resting.py b/nipype/workflows/rsfmri/fsl/resting.py index 12d44a83cf..176a0ed6f7 100644 --- a/nipype/workflows/rsfmri/fsl/resting.py +++ b/nipype/workflows/rsfmri/fsl/resting.py @@ -3,7 +3,6 @@ # vi: set ft=python sts=4 ts=4 sw=4 et: from __future__ import (print_function, division, unicode_literals, absolute_import) -from builtins import str from ....interfaces import fsl as fsl # fsl from ....interfaces import utility as util # utility diff --git a/nipype/workflows/rsfmri/fsl/tests/test_resting.py b/nipype/workflows/rsfmri/fsl/tests/test_resting.py index 799041df37..eba73a75b1 100644 --- a/nipype/workflows/rsfmri/fsl/tests/test_resting.py +++ b/nipype/workflows/rsfmri/fsl/tests/test_resting.py @@ -89,16 +89,17 @@ def test_create_resting_preproc(self, mock_node, mock_realign_wf): # assert expected_file = os.path.abspath(self.out_filenames['components_file']) with open(expected_file, 'r') as components_file: - components_data = [line.split() for line in components_file] - num_got_components = len(components_data) - assert (num_got_components == self.num_noise_components - or num_got_components == self.fake_data.shape[3]) - first_two = [row[:2] for row in components_data[1:]] - assert first_two == [['-0.5172356654', '-0.6973053243'], [ - '0.2574722644', '0.1645270737' - ], ['-0.0806469590', - '0.5156853779'], ['0.7187176051', '-0.3235820287'], - ['-0.3783072450', '0.3406749013']] + components_data = [line.rstrip().split() + for line in components_file] + num_got_components = len(components_data) + assert (num_got_components == self.num_noise_components or + num_got_components == self.fake_data.shape[3]) + first_two = [row[:2] for row in components_data[1:]] + assert first_two == [['-0.5172356654', '-0.6973053243'], + ['0.2574722644', '0.1645270737'], + ['-0.0806469590', '0.5156853779'], + ['0.7187176051', '-0.3235820287'], + ['-0.3783072450', '0.3406749013']] fake_data = np.array([[[[2, 4, 3, 9, 1], [3, 6, 4, 7, 4]], [[8, 3, 4, 6, 2], [4, 0, 4, 4, 2]]],