-
Notifications
You must be signed in to change notification settings - Fork 533
ENH signal extraction interface #1647
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
d45dc87
b736086
e46bd00
d51ab43
02faccc
5709f05
0a59333
e0faaf5
1609946
fffd9b5
700ecce
4f79c18
b11c5f5
fb0d6b9
c1b3a12
bc48a74
3883fe9
397ce99
77f68cf
7286093
18c57cc
34fb134
ab1e5f1
97dd3aa
74b804e
5acb6dc
9df8d8a
9dad02e
850ed41
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,141 @@ | ||
# -*- coding: utf-8 -*- | ||
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- | ||
# vi: set ft=python sts=4 ts=4 sw=4 et: | ||
''' | ||
Algorithms to compute statistics on :abbr:`fMRI (functional MRI)` | ||
|
||
Change directory to provide relative paths for doctests | ||
>>> import os | ||
>>> filepath = os.path.dirname(os.path.realpath(__file__)) | ||
>>> datadir = os.path.realpath(os.path.join(filepath, '../testing/data')) | ||
>>> os.chdir(datadir) | ||
|
||
''' | ||
from __future__ import (print_function, division, unicode_literals, | ||
absolute_import) | ||
|
||
import numpy as np | ||
import nibabel as nb | ||
|
||
from .. import logging | ||
from ..interfaces.base import (traits, TraitedSpec, BaseInterface, | ||
BaseInterfaceInputSpec, File, InputMultiPath) | ||
IFLOG = logging.getLogger('interface') | ||
|
||
class SignalExtractionInputSpec(BaseInterfaceInputSpec): | ||
in_file = File(exists=True, mandatory=True, desc='4-D fMRI nii file') | ||
label_files = InputMultiPath(File(exists=True), mandatory=True, | ||
desc='a 3-D label image, with 0 denoting ' | ||
'background, or a list of 3-D probability ' | ||
'maps (one per label) or the equivalent 4D ' | ||
'file.') | ||
class_labels = traits.List(mandatory=True, | ||
desc='Human-readable labels for each segment ' | ||
'in the label file, in order. The length of ' | ||
'class_labels must be equal to the number of ' | ||
'segments (background excluded). This list ' | ||
'corresponds to the class labels in label_file ' | ||
'in ascending order') | ||
out_file = File('signals.tsv', usedefault=True, exists=False, | ||
mandatory=False, desc='The name of the file to output to. ' | ||
'signals.tsv by default') | ||
incl_shared_variance = traits.Bool(True, usedefault=True, mandatory=False, desc='By default ' | ||
'(True), returns simple time series calculated from each ' | ||
'region independently (e.g., for noise regression). If ' | ||
'False, returns unique signals for each region, discarding ' | ||
'shared variance (e.g., for connectivity. Only has effect ' | ||
'with 4D probability maps.') | ||
include_global = traits.Bool(False, usedefault=True, mandatory=False, | ||
desc='If True, include an extra column ' | ||
'labeled "global", with values calculated from the entire brain ' | ||
'(instead of just regions).') | ||
detrend = traits.Bool(False, usedefault=True, mandatory=False, | ||
desc='If True, perform detrending using nilearn.') | ||
|
||
class SignalExtractionOutputSpec(TraitedSpec): | ||
out_file = File(exists=True, desc='tsv file containing the computed ' | ||
'signals, with as many columns as there are labels and as ' | ||
'many rows as there are timepoints in in_file, plus a ' | ||
'header row with values from class_labels') | ||
|
||
class SignalExtraction(BaseInterface): | ||
''' | ||
Extracts signals over tissue classes or brain regions | ||
|
||
>>> seinterface = SignalExtraction() | ||
>>> seinterface.inputs.in_file = 'functional.nii' | ||
>>> seinterface.inputs.label_files = 'segmentation0.nii.gz' | ||
>>> seinterface.inputs.out_file = 'means.tsv' | ||
>>> segments = ['CSF', 'gray', 'white'] | ||
>>> seinterface.inputs.class_labels = segments | ||
>>> seinterface.inputs.detrend = True | ||
>>> seinterface.inputs.include_global = True | ||
''' | ||
input_spec = SignalExtractionInputSpec | ||
output_spec = SignalExtractionOutputSpec | ||
|
||
def _run_interface(self, runtime): | ||
maskers = self._process_inputs() | ||
|
||
signals = [] | ||
for masker in maskers: | ||
signals.append(masker.fit_transform(self.inputs.in_file)) | ||
region_signals = np.hstack(signals) | ||
|
||
output = np.vstack((self.inputs.class_labels, region_signals.astype(str))) | ||
|
||
# save output | ||
np.savetxt(self.inputs.out_file, output, fmt=b'%s', delimiter='\t') | ||
return runtime | ||
|
||
def _process_inputs(self): | ||
''' validate and process inputs into useful form. | ||
Returns a list of nilearn maskers and the list of corresponding label names.''' | ||
import nilearn.input_data as nl | ||
import nilearn.image as nli | ||
|
||
label_data = nli.concat_imgs(self.inputs.label_files) | ||
maskers = [] | ||
|
||
# determine form of label files, choose appropriate nilearn masker | ||
if np.amax(label_data.get_data()) > 1: # 3d label file | ||
n_labels = np.amax(label_data.get_data()) | ||
maskers.append(nl.NiftiLabelsMasker(label_data)) | ||
else: # 4d labels | ||
n_labels = label_data.get_data().shape[3] | ||
if self.inputs.incl_shared_variance: # 4d labels, independent computation | ||
for img in nli.iter_img(label_data): | ||
maskers.append(nl.NiftiMapsMasker(self._4d(img.get_data(), img.affine))) | ||
else: # 4d labels, one computation fitting all | ||
maskers.append(nl.NiftiMapsMasker(label_data)) | ||
|
||
# check label list size | ||
if len(self.inputs.class_labels) != n_labels: | ||
raise ValueError('The length of class_labels {} does not ' | ||
'match the number of regions {} found in ' | ||
'label_files {}'.format(self.inputs.class_labels, | ||
n_labels, | ||
self.inputs.label_files)) | ||
|
||
if self.inputs.include_global: | ||
global_label_data = label_data.get_data().sum(axis=3) # sum across all regions | ||
global_label_data = np.rint(global_label_data).astype(int).clip(0, 1) # binarize | ||
global_label_data = self._4d(global_label_data, label_data.affine) | ||
global_masker = nl.NiftiLabelsMasker(global_label_data, detrend=self.inputs.detrend) | ||
maskers.insert(0, global_masker) | ||
self.inputs.class_labels.insert(0, 'global') | ||
|
||
for masker in maskers: | ||
masker.set_params(detrend=self.inputs.detrend) | ||
|
||
return maskers | ||
|
||
def _4d(self, array, affine): | ||
''' takes a 3-dimensional numpy array and an affine, | ||
returns the equivalent 4th dimensional nifti file ''' | ||
return nb.Nifti1Image(array[:, :, :, np.newaxis], affine) | ||
|
||
def _list_outputs(self): | ||
outputs = self._outputs().get() | ||
outputs['out_file'] = self.inputs.out_file | ||
return outputs |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,199 @@ | ||
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- | ||
# vi: set ft=python sts=4 ts=4 sw=4 et: | ||
import unittest | ||
import os | ||
import tempfile | ||
import shutil | ||
|
||
import numpy as np | ||
|
||
from ...testing import (assert_equal, utils, assert_almost_equal, raises, | ||
skipif) | ||
from .. import nilearn as iface | ||
|
||
no_nilearn = True | ||
try: | ||
__import__('nilearn') | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. why this form of import - out of curiosity? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. AFAIK, |
||
no_nilearn = False | ||
except ImportError: | ||
pass | ||
|
||
class TestSignalExtraction(unittest.TestCase): | ||
|
||
filenames = { | ||
'in_file': 'fmri.nii', | ||
'label_files': 'labels.nii', | ||
'4d_label_file': '4dlabels.nii', | ||
'out_file': 'signals.tsv' | ||
} | ||
labels = ['csf', 'gray', 'white'] | ||
global_labels = ['global'] + labels | ||
|
||
def setUp(self): | ||
self.orig_dir = os.getcwd() | ||
self.temp_dir = tempfile.mkdtemp() | ||
os.chdir(self.temp_dir) | ||
|
||
utils.save_toy_nii(self.fake_fmri_data, self.filenames['in_file']) | ||
utils.save_toy_nii(self.fake_label_data, self.filenames['label_files']) | ||
|
||
@skipif(no_nilearn) | ||
def test_signal_extract_no_shared(self): | ||
# run | ||
iface.SignalExtraction(in_file=self.filenames['in_file'], | ||
label_files=self.filenames['label_files'], | ||
class_labels=self.labels, | ||
incl_shared_variance=False).run() | ||
# assert | ||
self.assert_expected_output(self.labels, self.base_wanted) | ||
|
||
|
||
@skipif(no_nilearn) | ||
@raises(ValueError) | ||
def test_signal_extr_bad_label_list(self): | ||
# run | ||
iface.SignalExtraction(in_file=self.filenames['in_file'], | ||
label_files=self.filenames['label_files'], | ||
class_labels=['bad'], | ||
incl_shared_variance=False).run() | ||
|
||
@skipif(no_nilearn) | ||
def test_signal_extr_equiv_4d_no_shared(self): | ||
self._test_4d_label(self.base_wanted, self.fake_equiv_4d_label_data, | ||
incl_shared_variance=False) | ||
|
||
@skipif(no_nilearn) | ||
def test_signal_extr_4d_no_shared(self): | ||
# set up & run & assert | ||
self._test_4d_label(self.fourd_wanted, self.fake_4d_label_data, incl_shared_variance=False) | ||
|
||
@skipif(no_nilearn) | ||
def test_signal_extr_global_no_shared(self): | ||
# set up | ||
wanted_global = [[-4./6], [-1./6], [3./6], [-1./6], [-7./6]] | ||
for i, vals in enumerate(self.base_wanted): | ||
wanted_global[i].extend(vals) | ||
|
||
# run | ||
iface.SignalExtraction(in_file=self.filenames['in_file'], | ||
label_files=self.filenames['label_files'], | ||
class_labels=self.labels, | ||
include_global=True, | ||
incl_shared_variance=False).run() | ||
|
||
# assert | ||
self.assert_expected_output(self.global_labels, wanted_global) | ||
|
||
@skipif(no_nilearn) | ||
def test_signal_extr_4d_global_no_shared(self): | ||
# set up | ||
wanted_global = [[3./8], [-3./8], [1./8], [-7./8], [-9./8]] | ||
for i, vals in enumerate(self.fourd_wanted): | ||
wanted_global[i].extend(vals) | ||
|
||
# run & assert | ||
self._test_4d_label(wanted_global, self.fake_4d_label_data, | ||
include_global=True, incl_shared_variance=False) | ||
|
||
@skipif(no_nilearn) | ||
def test_signal_extr_shared(self): | ||
# set up | ||
wanted = [] | ||
for vol in range(self.fake_fmri_data.shape[3]): | ||
volume = self.fake_fmri_data[:, :, :, vol].flatten() | ||
wanted_row = [] | ||
for reg in range(self.fake_4d_label_data.shape[3]): | ||
region = self.fake_4d_label_data[:, :, :, reg].flatten() | ||
wanted_row.append((volume*region).sum()/(region*region).sum()) | ||
|
||
wanted.append(wanted_row) | ||
# run & assert | ||
self._test_4d_label(wanted, self.fake_4d_label_data) | ||
|
||
def _test_4d_label(self, wanted, fake_labels, include_global=False, incl_shared_variance=True): | ||
# set up | ||
utils.save_toy_nii(fake_labels, self.filenames['4d_label_file']) | ||
|
||
# run | ||
iface.SignalExtraction(in_file=self.filenames['in_file'], | ||
label_files=self.filenames['4d_label_file'], | ||
class_labels=self.labels, | ||
incl_shared_variance=incl_shared_variance, | ||
include_global=include_global).run() | ||
|
||
wanted_labels = self.global_labels if include_global else self.labels | ||
|
||
# assert | ||
self.assert_expected_output(wanted_labels, wanted) | ||
|
||
def assert_expected_output(self, labels, wanted): | ||
with open(self.filenames['out_file'], 'r') as output: | ||
got = [line.split() for line in output] | ||
labels_got = got.pop(0) # remove header | ||
assert_equal(labels_got, labels) | ||
assert_equal(len(got), self.fake_fmri_data.shape[3], | ||
'num rows and num volumes') | ||
# convert from string to float | ||
got = [[float(num) for num in row] for row in got] | ||
for i, time in enumerate(got): | ||
assert_equal(len(labels), len(time)) | ||
for j, segment in enumerate(time): | ||
assert_almost_equal(segment, wanted[i][j], decimal=1) | ||
|
||
|
||
def tearDown(self): | ||
os.chdir(self.orig_dir) | ||
shutil.rmtree(self.temp_dir) | ||
|
||
fake_fmri_data = np.array([[[[2, -1, 4, -2, 3], | ||
[4, -2, -5, -1, 0]], | ||
|
||
[[-2, 0, 1, 4, 4], | ||
[-5, 3, -3, 1, -5]]], | ||
|
||
|
||
[[[2, -2, -1, -2, -5], | ||
[3, 0, 3, -5, -2]], | ||
|
||
[[-4, -2, -2, 1, -2], | ||
[3, 1, 4, -3, -2]]]]) | ||
|
||
fake_label_data = np.array([[[1, 0], | ||
[3, 1]], | ||
|
||
[[2, 0], | ||
[1, 3]]]) | ||
|
||
fake_equiv_4d_label_data = np.array([[[[1., 0., 0.], | ||
[0., 0., 0.]], | ||
[[0., 0., 1.], | ||
[1., 0., 0.]]], | ||
[[[0., 1., 0.], | ||
[0., 0., 0.]], | ||
[[1., 0., 0.], | ||
[0., 0., 1.]]]]) | ||
|
||
base_wanted = [[-2.33333, 2, .5], | ||
[0, -2, .5], | ||
[-.3333333, -1, 2.5], | ||
[0, -2, .5], | ||
[-1.3333333, -5, 1]] | ||
|
||
fake_4d_label_data = np.array([[[[0.2, 0.3, 0.5], | ||
[0.1, 0.1, 0.8]], | ||
|
||
[[0.1, 0.3, 0.6], | ||
[0.3, 0.4, 0.3]]], | ||
|
||
[[[0.2, 0.2, 0.6], | ||
[0., 0.3, 0.7]], | ||
|
||
[[0.3, 0.3, 0.4], | ||
[0.3, 0.4, 0.3]]]]) | ||
|
||
|
||
fourd_wanted = [[-5.0652173913, -5.44565217391, 5.50543478261], | ||
[-7.02173913043, 11.1847826087, -4.33152173913], | ||
[-19.0869565217, 21.2391304348, -4.57608695652], | ||
[5.19565217391, -3.66304347826, -1.51630434783], | ||
[-12.0, 3., 0.5]] |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
why is this check here and not in the nilearn interfaces file? see the nipy or dipy interfaces.
the check should be used inside run interface to indicate to the user that they need to install nilearn. at this point if they use this interface, it will result in an error deep in the code.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I was using the recent dvars testing file as a model (https://github.com/nipy/nipype/blob/master/nipype/algorithms/tests/test_confounds.py), which used nitime. I'm guessing that needs to be fixed too? Since this and the dvars interfaces have already been merged, I opened a separate issue for this: #1661