Skip to content

ENH: added Apply VDM functionality to FieldMap SPM interface #3394

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 13 commits into from
Oct 21, 2021
Merged
  •  
  •  
  •  
5 changes: 5 additions & 0 deletions .zenodo.json
Original file line number Diff line number Diff line change
Expand Up @@ -313,6 +313,11 @@
"name": "Geisler, Daniel",
"orcid": "0000-0003-2076-5329"
},
{
"affiliation": "Division of Psychological and Social Medicine and Developmental Neuroscience, Faculty of Medicine, Technische Universit\u00e4t Dresden, Dresden, Germany",
"name": "Bernardoni, Fabio",
"orcid": "0000-0002-5112-405X"
},
{
"name": "Salvatore, John"
},
Expand Down
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
# Files are then pushed to sourceforge using rsync with a command like this:
# rsync -e ssh nipype-0.1-py2.5.egg cburns,nipy@frs.sourceforge.net:/home/frs/project/n/ni/nipy/nipype/nipype-0.1/

PYTHON ?= python
PYTHON ?= python3

.PHONY: zipdoc sdist egg upload_to_pypi trailing-spaces clean-pyc clean-so clean-build clean-ctags clean in inplace test-code test-coverage test html specs check-before-commit check gen-base-dockerfile gen-main-dockerfile gen-dockerfiles

Expand Down
72 changes: 17 additions & 55 deletions nipype/algorithms/tests/test_auto_ACompCor.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,56 +4,24 @@

def test_ACompCor_inputs():
input_map = dict(
components_file=dict(
usedefault=True,
),
failure_mode=dict(
usedefault=True,
),
components_file=dict(usedefault=True),
failure_mode=dict(usedefault=True),
header_prefix=dict(),
high_pass_cutoff=dict(
usedefault=True,
),
ignore_initial_volumes=dict(
usedefault=True,
),
high_pass_cutoff=dict(usedefault=True),
ignore_initial_volumes=dict(usedefault=True),
mask_files=dict(),
mask_index=dict(
requires=["mask_files"],
xor=["merge_method"],
),
mask_index=dict(requires=["mask_files"], xor=["merge_method"]),
mask_names=dict(),
merge_method=dict(
requires=["mask_files"],
xor=["mask_index"],
),
num_components=dict(
xor=["variance_threshold"],
),
pre_filter=dict(
usedefault=True,
),
realigned_file=dict(
extensions=None,
mandatory=True,
),
regress_poly_degree=dict(
usedefault=True,
),
merge_method=dict(requires=["mask_files"], xor=["mask_index"]),
num_components=dict(xor=["variance_threshold"]),
pre_filter=dict(usedefault=True),
realigned_file=dict(extensions=None, mandatory=True),
regress_poly_degree=dict(usedefault=True),
repetition_time=dict(),
save_metadata=dict(
usedefault=True,
),
save_pre_filter=dict(
usedefault=True,
),
use_regress_poly=dict(
deprecated="0.15.0",
new_name="pre_filter",
),
variance_threshold=dict(
xor=["num_components"],
),
save_metadata=dict(usedefault=True),
save_pre_filter=dict(usedefault=True),
use_regress_poly=dict(deprecated="0.15.0", new_name="pre_filter"),
variance_threshold=dict(xor=["num_components"]),
)
inputs = ACompCor.input_spec()

Expand All @@ -64,15 +32,9 @@ def test_ACompCor_inputs():

def test_ACompCor_outputs():
output_map = dict(
components_file=dict(
extensions=None,
),
metadata_file=dict(
extensions=None,
),
pre_filter_file=dict(
extensions=None,
),
components_file=dict(extensions=None),
metadata_file=dict(extensions=None),
pre_filter_file=dict(extensions=None),
)
outputs = ACompCor.output_spec()

Expand Down
21 changes: 4 additions & 17 deletions nipype/algorithms/tests/test_auto_ActivationCount.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,7 @@


def test_ActivationCount_inputs():
input_map = dict(
in_files=dict(
mandatory=True,
),
threshold=dict(
mandatory=True,
),
)
input_map = dict(in_files=dict(mandatory=True), threshold=dict(mandatory=True))
inputs = ActivationCount.input_spec()

for key, metadata in list(input_map.items()):
Expand All @@ -20,15 +13,9 @@ def test_ActivationCount_inputs():

def test_ActivationCount_outputs():
output_map = dict(
acm_neg=dict(
extensions=None,
),
acm_pos=dict(
extensions=None,
),
out_file=dict(
extensions=None,
),
acm_neg=dict(extensions=None),
acm_pos=dict(extensions=None),
out_file=dict(extensions=None),
)
outputs = ActivationCount.output_spec()

Expand Down
16 changes: 3 additions & 13 deletions nipype/algorithms/tests/test_auto_AddCSVColumn.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,14 +6,8 @@ def test_AddCSVColumn_inputs():
input_map = dict(
extra_column_heading=dict(),
extra_field=dict(),
in_file=dict(
extensions=None,
mandatory=True,
),
out_file=dict(
extensions=None,
usedefault=True,
),
in_file=dict(extensions=None, mandatory=True),
out_file=dict(extensions=None, usedefault=True),
)
inputs = AddCSVColumn.input_spec()

Expand All @@ -23,11 +17,7 @@ def test_AddCSVColumn_inputs():


def test_AddCSVColumn_outputs():
output_map = dict(
csv_file=dict(
extensions=None,
),
)
output_map = dict(csv_file=dict(extensions=None))
outputs = AddCSVColumn.output_spec()

for key, metadata in list(output_map.items()):
Expand Down
14 changes: 2 additions & 12 deletions nipype/algorithms/tests/test_auto_AddCSVRow.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,7 @@

def test_AddCSVRow_inputs():
input_map = dict(
_outputs=dict(
usedefault=True,
),
in_file=dict(
extensions=None,
mandatory=True,
),
_outputs=dict(usedefault=True), in_file=dict(extensions=None, mandatory=True)
)
inputs = AddCSVRow.input_spec()

Expand All @@ -20,11 +14,7 @@ def test_AddCSVRow_inputs():


def test_AddCSVRow_outputs():
output_map = dict(
csv_file=dict(
extensions=None,
),
)
output_map = dict(csv_file=dict(extensions=None))
outputs = AddCSVRow.output_spec()

for key, metadata in list(output_map.items()):
Expand Down
33 changes: 7 additions & 26 deletions nipype/algorithms/tests/test_auto_AddNoise.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,27 +4,12 @@

def test_AddNoise_inputs():
input_map = dict(
bg_dist=dict(
mandatory=True,
usedefault=True,
),
dist=dict(
mandatory=True,
usedefault=True,
),
in_file=dict(
extensions=None,
mandatory=True,
),
in_mask=dict(
extensions=None,
),
out_file=dict(
extensions=None,
),
snr=dict(
usedefault=True,
),
bg_dist=dict(mandatory=True, usedefault=True),
dist=dict(mandatory=True, usedefault=True),
in_file=dict(extensions=None, mandatory=True),
in_mask=dict(extensions=None),
out_file=dict(extensions=None),
snr=dict(usedefault=True),
)
inputs = AddNoise.input_spec()

Expand All @@ -34,11 +19,7 @@ def test_AddNoise_inputs():


def test_AddNoise_outputs():
output_map = dict(
out_file=dict(
extensions=None,
),
)
output_map = dict(out_file=dict(extensions=None))
outputs = AddNoise.output_spec()

for key, metadata in list(output_map.items()):
Expand Down
70 changes: 17 additions & 53 deletions nipype/algorithms/tests/test_auto_ArtifactDetect.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,61 +4,25 @@

def test_ArtifactDetect_inputs():
input_map = dict(
bound_by_brainmask=dict(
usedefault=True,
),
global_threshold=dict(
usedefault=True,
),
intersect_mask=dict(
usedefault=True,
),
mask_file=dict(
extensions=None,
),
bound_by_brainmask=dict(usedefault=True),
global_threshold=dict(usedefault=True),
intersect_mask=dict(usedefault=True),
mask_file=dict(extensions=None),
mask_threshold=dict(),
mask_type=dict(
mandatory=True,
),
mask_type=dict(mandatory=True),
norm_threshold=dict(
mandatory=True,
xor=["rotation_threshold", "translation_threshold"],
),
parameter_source=dict(
mandatory=True,
),
plot_type=dict(
usedefault=True,
),
realigned_files=dict(
mandatory=True,
),
realignment_parameters=dict(
mandatory=True,
),
rotation_threshold=dict(
mandatory=True,
xor=["norm_threshold"],
),
save_plot=dict(
usedefault=True,
),
translation_threshold=dict(
mandatory=True,
xor=["norm_threshold"],
),
use_differences=dict(
maxlen=2,
minlen=2,
usedefault=True,
),
use_norm=dict(
requires=["norm_threshold"],
usedefault=True,
),
zintensity_threshold=dict(
mandatory=True,
),
mandatory=True, xor=["rotation_threshold", "translation_threshold"]
),
parameter_source=dict(mandatory=True),
plot_type=dict(usedefault=True),
realigned_files=dict(mandatory=True),
realignment_parameters=dict(mandatory=True),
rotation_threshold=dict(mandatory=True, xor=["norm_threshold"]),
save_plot=dict(usedefault=True),
translation_threshold=dict(mandatory=True, xor=["norm_threshold"]),
use_differences=dict(maxlen=2, minlen=2, usedefault=True),
use_norm=dict(requires=["norm_threshold"], usedefault=True),
zintensity_threshold=dict(mandatory=True),
)
inputs = ArtifactDetect.input_spec()

Expand Down
10 changes: 2 additions & 8 deletions nipype/algorithms/tests/test_auto_CalculateMedian.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,7 @@

def test_CalculateMedian_inputs():
input_map = dict(
in_files=dict(),
median_file=dict(),
median_per_file=dict(
usedefault=True,
),
in_files=dict(), median_file=dict(), median_per_file=dict(usedefault=True)
)
inputs = CalculateMedian.input_spec()

Expand All @@ -18,9 +14,7 @@ def test_CalculateMedian_inputs():


def test_CalculateMedian_outputs():
output_map = dict(
median_files=dict(),
)
output_map = dict(median_files=dict())
outputs = CalculateMedian.output_spec()

for key, metadata in list(output_map.items()):
Expand Down
13 changes: 3 additions & 10 deletions nipype/algorithms/tests/test_auto_CalculateNormalizedMoments.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,8 @@

def test_CalculateNormalizedMoments_inputs():
input_map = dict(
moment=dict(
mandatory=True,
),
timeseries_file=dict(
extensions=None,
mandatory=True,
),
moment=dict(mandatory=True),
timeseries_file=dict(extensions=None, mandatory=True),
)
inputs = CalculateNormalizedMoments.input_spec()

Expand All @@ -20,9 +15,7 @@ def test_CalculateNormalizedMoments_inputs():


def test_CalculateNormalizedMoments_outputs():
output_map = dict(
moments=dict(),
)
output_map = dict(moments=dict())
outputs = CalculateNormalizedMoments.output_spec()

for key, metadata in list(output_map.items()):
Expand Down
Loading