Skip to content

Replace deprecated HasTraits.set with trait_set #2048

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 6 commits into from
May 24, 2017
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions circle.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
machine:
pre:
- curl -sSL https://s3.amazonaws.com/circle-downloads/install-circleci-docker.sh | bash -s -- 1.10.0
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This would break the cache (if it worked with 1.9, we didn't get it to work, right @satra?). Caching does not work with 1.10.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

sth weird happened with docker. i thought i had removed 1.9 and caching worked, but not after it got merged. we still have to revisit the cache situation. from a time perspective restoring and saving the cache takes almost as time as building it.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

yes, we should probably roll back to the docker pull solution, and manually update nipype/base

Copy link
Contributor

@oesteban oesteban May 24, 2017

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

(I don't have hopes for getting the cache to work. Definitely not with 1.10 but 1.9 neither)

environment:
OSF_NIPYPE_URL: "https://files.osf.io/v1/resources/nefdp/providers/osfstorage"
DATA_NIPYPE_TUTORIAL_URL: "${OSF_NIPYPE_URL}/57f4739cb83f6901ed94bf21"
Expand Down
18 changes: 9 additions & 9 deletions nipype/algorithms/confounds.py
Original file line number Diff line number Diff line change
Expand Up @@ -398,7 +398,7 @@ def _run_interface(self, runtime):
if len(mask_images) == 0:
img = nb.Nifti1Image(np.ones(imgseries.shape[:3], dtype=np.bool),
affine=imgseries.affine,
header=imgseries.get_header())
header=imgseries.header)
mask_images = [img]

mask_images = self._process_masks(mask_images, imgseries.get_data())
Expand Down Expand Up @@ -501,7 +501,7 @@ def _process_masks(self, mask_images, timeseries=None):
mask_data = np.zeros_like(mask)
mask_data[mask != 0] = tSTD >= threshold_std
out_image = nb.Nifti1Image(mask_data, affine=img.affine,
header=img.get_header())
header=img.header)

# save mask
mask_file = os.path.abspath('mask_{:03d}.nii.gz'.format(i))
Expand Down Expand Up @@ -561,7 +561,7 @@ def _run_interface(self, runtime):
header = img.header.copy()
vollist = [nb.load(filename, mmap=NUMPY_MMAP) for filename in self.inputs.in_file]
data = np.concatenate([vol.get_data().reshape(
vol.get_shape()[:3] + (-1,)) for vol in vollist], axis=3)
vol.shape[:3] + (-1,)) for vol in vollist], axis=3)
data = np.nan_to_num(data)

if data.dtype.kind == 'i':
Expand All @@ -570,18 +570,18 @@ def _run_interface(self, runtime):

if isdefined(self.inputs.regress_poly):
data = regress_poly(self.inputs.regress_poly, data, remove_mean=False)
img = nb.Nifti1Image(data, img.get_affine(), header)
img = nb.Nifti1Image(data, img.affine, header)
nb.save(img, op.abspath(self.inputs.detrended_file))

meanimg = np.mean(data, axis=3)
stddevimg = np.std(data, axis=3)
tsnr = np.zeros_like(meanimg)
tsnr[stddevimg > 1.e-3] = meanimg[stddevimg > 1.e-3] / stddevimg[stddevimg > 1.e-3]
img = nb.Nifti1Image(tsnr, img.get_affine(), header)
img = nb.Nifti1Image(tsnr, img.affine, header)
nb.save(img, op.abspath(self.inputs.tsnr_file))
img = nb.Nifti1Image(meanimg, img.get_affine(), header)
img = nb.Nifti1Image(meanimg, img.affine, header)
nb.save(img, op.abspath(self.inputs.mean_file))
img = nb.Nifti1Image(stddevimg, img.get_affine(), header)
img = nb.Nifti1Image(stddevimg, img.affine, header)
nb.save(img, op.abspath(self.inputs.stddev_file))
return runtime

Expand Down Expand Up @@ -872,7 +872,7 @@ def combine_mask_files(mask_files, mask_method=None, mask_index=None):
if mask is None:
mask = img.get_data() > 0
np.logical_or(mask, img.get_data() > 0, mask)
img = nb.Nifti1Image(mask, img.affine, header=img.get_header())
img = nb.Nifti1Image(mask, img.affine, header=img.header)
return [img]

if mask_method == 'intersect':
Expand All @@ -882,7 +882,7 @@ def combine_mask_files(mask_files, mask_method=None, mask_index=None):
if mask is None:
mask = img.get_data() > 0
np.logical_and(mask, img.get_data() > 0, mask)
img = nb.Nifti1Image(mask, img.affine, header=img.get_header())
img = nb.Nifti1Image(mask, img.affine, header=img.header)
return [img]


Expand Down
2 changes: 1 addition & 1 deletion nipype/algorithms/tests/test_tsnr.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ def assert_default_outputs(self, outputs):

def assert_unchanged(self, expected_ranges):
for key, (min_, max_) in expected_ranges.items():
data = np.asarray(nb.load(self.out_filenames[key])._data)
data = np.asarray(nb.load(self.out_filenames[key]).dataobj)
npt.assert_almost_equal(np.amin(data), min_, decimal=1)
npt.assert_almost_equal(np.amax(data), max_, decimal=1)

Expand Down
2 changes: 1 addition & 1 deletion nipype/caching/memory.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ def __call__(self, **kwargs):
kwargs = modify_paths(kwargs, relative=False)
interface = self.interface()
# Set the inputs early to get some argument checking
interface.inputs.set(**kwargs)
interface.inputs.trait_set(**kwargs)
# Make a name for our node
inputs = interface.inputs.get_hashval()
hasher = hashlib.new('md5')
Expand Down
5 changes: 1 addition & 4 deletions nipype/interfaces/afni/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,13 +171,10 @@ def aggregate_outputs(self, runtime=None, needed_outputs=None):
m = re.search(pattern, line)
if m:
d = m.groupdict()
for k in list(d.keys()):
d[k] = int(d[k])
outputs.set(**d)
outputs.trait_set(**{k: int(d[k]) for k in d.keys()})
return outputs



class BrickStatInputSpec(CommandLineInputSpec):
in_file = File(
desc='input file to 3dmaskave',
Expand Down
6 changes: 3 additions & 3 deletions nipype/interfaces/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -367,7 +367,7 @@ def __init__(self, **kwargs):
undefined_traits[trait] = Undefined
self.trait_set(trait_change_notify=False, **undefined_traits)
self._generate_handlers()
self.set(**kwargs)
self.trait_set(**kwargs)

def items(self):
""" Name, trait generator for user modifiable traits
Expand Down Expand Up @@ -650,7 +650,7 @@ def __deepcopy__(self, memo):
pass
# clone twice
dup = self.clone_traits(memo=memo)
dup.set(**dup_dict)
dup.trait_set(**dup_dict)
return dup


Expand Down Expand Up @@ -1060,7 +1060,7 @@ def run(self, **inputs):
results : an InterfaceResult object containing a copy of the instance
that was executed, provenance information and, if successful, results
"""
self.inputs.set(**inputs)
self.inputs.trait_set(**inputs)
self._check_mandatory_inputs()
self._check_version_requirements(self.inputs)
interface = self.__class__
Expand Down
16 changes: 8 additions & 8 deletions nipype/interfaces/dipy/reconstruction.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,8 +74,8 @@ def _run_interface(self, runtime):
import gc

img = nb.load(self.inputs.in_file)
hdr = img.get_header().copy()
affine = img.get_affine()
hdr = img.header.copy()
affine = img.affine
data = img.get_data()
gtab = self._get_gradient_table()

Expand Down Expand Up @@ -210,14 +210,14 @@ def _run_interface(self, runtime):

img = nb.load(self.inputs.in_file)
imref = nb.four_to_three(img)[0]
affine = img.get_affine()
affine = img.affine

if isdefined(self.inputs.in_mask):
msk = nb.load(self.inputs.in_mask).get_data()
msk[msk > 0] = 1
msk[msk < 0] = 0
else:
msk = np.ones(imref.get_shape())
msk = np.ones(imref.shape)

data = img.get_data().astype(np.float32)
gtab = self._get_gradient_table()
Expand Down Expand Up @@ -326,15 +326,15 @@ def _run_interface(self, runtime):

img = nb.load(self.inputs.in_file)
imref = nb.four_to_three(img)[0]
affine = img.get_affine()
affine = img.affine

if isdefined(self.inputs.in_mask):
msk = nb.load(self.inputs.in_mask).get_data()
else:
msk = np.ones(imref.get_shape())
msk = np.ones(imref.shape)

data = img.get_data().astype(np.float32)
hdr = imref.get_header().copy()
hdr = imref.header.copy()

gtab = self._get_gradient_table()
resp_file = np.loadtxt(self.inputs.response)
Expand All @@ -359,7 +359,7 @@ def _run_interface(self, runtime):
if self.inputs.save_fods:
sphere = get_sphere('symmetric724')
fods = csd_fit.odf(sphere)
nb.Nifti1Image(fods.astype(np.float32), img.get_affine(),
nb.Nifti1Image(fods.astype(np.float32), img.affine,
None).to_filename(self._gen_filename('fods'))

return runtime
Expand Down
2 changes: 1 addition & 1 deletion nipype/interfaces/dipy/tensors.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ def _run_interface(self, runtime):
# Load the 4D image files
img = nb.load(self.inputs.in_file)
data = img.get_data()
affine = img.get_affine()
affine = img.affine

# Load the gradient strengths and directions
gtab = self._get_gradient_table()
Expand Down
6 changes: 3 additions & 3 deletions nipype/interfaces/dipy/tracks.py
Original file line number Diff line number Diff line change
Expand Up @@ -177,10 +177,10 @@ def _run_interface(self, runtime):

img = nb.load(self.inputs.in_file)
imref = nb.four_to_three(img)[0]
affine = img.get_affine()
affine = img.affine

data = img.get_data().astype(np.float32)
hdr = imref.get_header().copy()
hdr = imref.header.copy()
hdr.set_data_dtype(np.float32)
hdr['data_type'] = 16

Expand Down Expand Up @@ -222,7 +222,7 @@ def _run_interface(self, runtime):
msk[msk > 0] = 1
msk[msk < 0] = 0
else:
msk = np.ones(imref.get_shape())
msk = np.ones(imref.shape)

gfa = peaks.gfa * msk
seeds = self.inputs.num_seeds
Expand Down
8 changes: 4 additions & 4 deletions nipype/interfaces/utility/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ def __init__(self, fields=None, mandatory_inputs=True, **inputs):
# Adding any traits wipes out all input values set in superclass initialization,
# even it the trait is not in the add_traits argument. The work-around is to reset
# the values after adding the traits.
self.inputs.set(**inputs)
self.inputs.trait_set(**inputs)

def _add_output_traits(self, base):
return add_traits(base, self._fields)
Expand Down Expand Up @@ -319,7 +319,7 @@ class Split(IOBase):

>>> from nipype.interfaces.utility import Split
>>> sp = Split()
>>> _ = sp.inputs.set(inlist=[1, 2, 3], splits=[2, 1])
>>> _ = sp.inputs.trait_set(inlist=[1, 2, 3], splits=[2, 1])
>>> out = sp.run()
>>> out.outputs.out1
[1, 2]
Expand Down Expand Up @@ -373,12 +373,12 @@ class Select(IOBase):

>>> from nipype.interfaces.utility import Select
>>> sl = Select()
>>> _ = sl.inputs.set(inlist=[1, 2, 3, 4, 5], index=[3])
>>> _ = sl.inputs.trait_set(inlist=[1, 2, 3, 4, 5], index=[3])
>>> out = sl.run()
>>> out.outputs.out
4

>>> _ = sl.inputs.set(inlist=[1, 2, 3, 4, 5], index=[3, 4])
>>> _ = sl.inputs.trait_set(inlist=[1, 2, 3, 4, 5], index=[3, 4])
>>> out = sl.run()
>>> out.outputs.out
[4, 5]
Expand Down
6 changes: 3 additions & 3 deletions nipype/pipeline/engine/nodes.py
Original file line number Diff line number Diff line change
Expand Up @@ -561,7 +561,7 @@ def _load_results(self, cwd):
logger.debug('aggregating results')
if attribute_error:
old_inputs = loadpkl(op.join(cwd, '_inputs.pklz'))
self.inputs.set(**old_inputs)
self.inputs.trait_set(**old_inputs)
if not isinstance(self, MapNode):
self._copyfiles_to_wd(cwd, True, linksonly=True)
aggouts = self._interface.aggregate_outputs(
Expand Down Expand Up @@ -1121,7 +1121,7 @@ def _make_nodes(self, cwd=None):
base_dir=op.join(cwd, 'mapflow'),
name=nodename)
node.plugin_args = self.plugin_args
node._interface.inputs.set(
node._interface.inputs.trait_set(
**deepcopy(self._interface.inputs.get()))
for field in self.iterfield:
if self.nested:
Expand Down Expand Up @@ -1246,7 +1246,7 @@ def _get_inputs(self):
old_inputs = self._inputs.get()
self._inputs = self._create_dynamic_traits(self._interface.inputs,
fields=self.iterfield)
self._inputs.set(**old_inputs)
self._inputs.trait_set(**old_inputs)
super(MapNode, self)._get_inputs()

def _check_iterfield(self):
Expand Down
11 changes: 7 additions & 4 deletions nipype/workflows/dmri/fsl/epi.py
Original file line number Diff line number Diff line change
Expand Up @@ -743,13 +743,15 @@ def _dilate_mask(in_file, iterations=4):
import os
from nipype.utils import NUMPY_MMAP
img = nb.load(in_file, mmap=NUMPY_MMAP)
img._data = ndimage.binary_dilation(img.get_data(), iterations=iterations)
dilated_img = img.__class__(ndimage.binary_dilation(img.get_data(),
iterations=iterations),
img.affine, img.header)

name, fext = os.path.splitext(os.path.basename(in_file))
if fext == '.gz':
name, _ = os.path.splitext(name)
out_file = os.path.abspath('./%s_dil.nii.gz' % name)
nb.save(img, out_file)
nb.save(dilated_img, out_file)
return out_file


Expand Down Expand Up @@ -781,12 +783,13 @@ def _vsm_remove_mean(in_file, mask_file, in_unwarped):
img_data[msk == 0] = 0
vsmmag_masked = ma.masked_values(img_data.reshape(-1), 0.0)
vsmmag_masked = vsmmag_masked - vsmmag_masked.mean()
img._data = vsmmag_masked.reshape(img.shape)
masked_img = img.__class__(vsmmag_masked.reshape(img.shape),
img.affine, img.header)
name, fext = os.path.splitext(os.path.basename(in_file))
if fext == '.gz':
name, _ = os.path.splitext(name)
out_file = os.path.abspath('./%s_demeaned.nii.gz' % name)
nb.save(img, out_file)
nb.save(masked_img, out_file)
return out_file


Expand Down