Skip to content

Commit a8389aa

Browse files
committed
fix: autotests
1 parent 98c6d11 commit a8389aa

File tree

782 files changed

+33820
-34168
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

782 files changed

+33820
-34168
lines changed

nipype/algorithms/tests/test_auto_ACompCor.py

Lines changed: 32 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -4,48 +4,45 @@
44

55

66
def test_ACompCor_inputs():
7-
input_map = dict(components_file=dict(usedefault=True,
8-
),
9-
header_prefix=dict(),
10-
high_pass_cutoff=dict(usedefault=True,
11-
),
12-
ignore_exception=dict(deprecated='1.0.0',
13-
nohash=True,
14-
usedefault=True,
15-
),
16-
ignore_initial_volumes=dict(usedefault=True,
17-
),
18-
mask_files=dict(),
19-
mask_index=dict(requires=['mask_files'],
20-
xor=['merge_method'],
21-
),
22-
merge_method=dict(requires=['mask_files'],
23-
xor=['mask_index'],
24-
),
25-
num_components=dict(usedefault=True,
26-
),
27-
pre_filter=dict(usedefault=True,
28-
),
29-
realigned_file=dict(mandatory=True,
30-
),
31-
regress_poly_degree=dict(usedefault=True,
32-
),
33-
repetition_time=dict(),
34-
save_pre_filter=dict(),
35-
use_regress_poly=dict(deprecated='0.15.0',
36-
new_name='pre_filter',
37-
),
7+
input_map = dict(
8+
components_file=dict(usedefault=True, ),
9+
header_prefix=dict(),
10+
high_pass_cutoff=dict(usedefault=True, ),
11+
ignore_exception=dict(
12+
deprecated='1.0.0',
13+
nohash=True,
14+
usedefault=True,
15+
),
16+
ignore_initial_volumes=dict(usedefault=True, ),
17+
mask_files=dict(),
18+
mask_index=dict(
19+
requires=['mask_files'],
20+
xor=['merge_method'],
21+
),
22+
merge_method=dict(
23+
requires=['mask_files'],
24+
xor=['mask_index'],
25+
),
26+
num_components=dict(usedefault=True, ),
27+
pre_filter=dict(usedefault=True, ),
28+
realigned_file=dict(mandatory=True, ),
29+
regress_poly_degree=dict(usedefault=True, ),
30+
repetition_time=dict(),
31+
save_pre_filter=dict(),
32+
use_regress_poly=dict(
33+
deprecated='0.15.0',
34+
new_name='pre_filter',
35+
),
3836
)
3937
inputs = ACompCor.input_spec()
4038

4139
for key, metadata in list(input_map.items()):
4240
for metakey, value in list(metadata.items()):
4341
assert getattr(inputs.traits()[key], metakey) == value
44-
45-
4642
def test_ACompCor_outputs():
47-
output_map = dict(components_file=dict(),
48-
pre_filter_file=dict(),
43+
output_map = dict(
44+
components_file=dict(),
45+
pre_filter_file=dict(),
4946
)
5047
outputs = ACompCor.output_spec()
5148

nipype/algorithms/tests/test_auto_AddCSVColumn.py

Lines changed: 6 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -4,23 +4,19 @@
44

55

66
def test_AddCSVColumn_inputs():
7-
input_map = dict(extra_column_heading=dict(),
8-
extra_field=dict(),
9-
in_file=dict(mandatory=True,
10-
),
11-
out_file=dict(usedefault=True,
12-
),
7+
input_map = dict(
8+
extra_column_heading=dict(),
9+
extra_field=dict(),
10+
in_file=dict(mandatory=True, ),
11+
out_file=dict(usedefault=True, ),
1312
)
1413
inputs = AddCSVColumn.input_spec()
1514

1615
for key, metadata in list(input_map.items()):
1716
for metakey, value in list(metadata.items()):
1817
assert getattr(inputs.traits()[key], metakey) == value
19-
20-
2118
def test_AddCSVColumn_outputs():
22-
output_map = dict(csv_file=dict(),
23-
)
19+
output_map = dict(csv_file=dict(), )
2420
outputs = AddCSVColumn.output_spec()
2521

2622
for key, metadata in list(output_map.items()):

nipype/algorithms/tests/test_auto_AddCSVRow.py

Lines changed: 9 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -4,25 +4,22 @@
44

55

66
def test_AddCSVRow_inputs():
7-
input_map = dict(_outputs=dict(usedefault=True,
8-
),
9-
ignore_exception=dict(deprecated='1.0.0',
10-
nohash=True,
11-
usedefault=True,
12-
),
13-
in_file=dict(mandatory=True,
14-
),
7+
input_map = dict(
8+
_outputs=dict(usedefault=True, ),
9+
ignore_exception=dict(
10+
deprecated='1.0.0',
11+
nohash=True,
12+
usedefault=True,
13+
),
14+
in_file=dict(mandatory=True, ),
1515
)
1616
inputs = AddCSVRow.input_spec()
1717

1818
for key, metadata in list(input_map.items()):
1919
for metakey, value in list(metadata.items()):
2020
assert getattr(inputs.traits()[key], metakey) == value
21-
22-
2321
def test_AddCSVRow_outputs():
24-
output_map = dict(csv_file=dict(),
25-
)
22+
output_map = dict(csv_file=dict(), )
2623
outputs = AddCSVRow.output_spec()
2724

2825
for key, metadata in list(output_map.items()):

nipype/algorithms/tests/test_auto_AddNoise.py

Lines changed: 14 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -4,29 +4,27 @@
44

55

66
def test_AddNoise_inputs():
7-
input_map = dict(bg_dist=dict(mandatory=True,
8-
usedefault=True,
9-
),
10-
dist=dict(mandatory=True,
11-
usedefault=True,
12-
),
13-
in_file=dict(mandatory=True,
14-
),
15-
in_mask=dict(),
16-
out_file=dict(),
17-
snr=dict(usedefault=True,
18-
),
7+
input_map = dict(
8+
bg_dist=dict(
9+
mandatory=True,
10+
usedefault=True,
11+
),
12+
dist=dict(
13+
mandatory=True,
14+
usedefault=True,
15+
),
16+
in_file=dict(mandatory=True, ),
17+
in_mask=dict(),
18+
out_file=dict(),
19+
snr=dict(usedefault=True, ),
1920
)
2021
inputs = AddNoise.input_spec()
2122

2223
for key, metadata in list(input_map.items()):
2324
for metakey, value in list(metadata.items()):
2425
assert getattr(inputs.traits()[key], metakey) == value
25-
26-
2726
def test_AddNoise_outputs():
28-
output_map = dict(out_file=dict(),
29-
)
27+
output_map = dict(out_file=dict(), )
3028
outputs = AddNoise.output_spec()
3129

3230
for key, metadata in list(output_map.items()):

nipype/algorithms/tests/test_auto_ArtifactDetect.py

Lines changed: 47 additions & 50 deletions
Original file line numberDiff line numberDiff line change
@@ -4,63 +4,60 @@
44

55

66
def test_ArtifactDetect_inputs():
7-
input_map = dict(bound_by_brainmask=dict(usedefault=True,
8-
),
9-
global_threshold=dict(usedefault=True,
10-
),
11-
ignore_exception=dict(deprecated='1.0.0',
12-
nohash=True,
13-
usedefault=True,
14-
),
15-
intersect_mask=dict(),
16-
mask_file=dict(),
17-
mask_threshold=dict(),
18-
mask_type=dict(mandatory=True,
19-
),
20-
norm_threshold=dict(mandatory=True,
21-
xor=['rotation_threshold', 'translation_threshold'],
22-
),
23-
parameter_source=dict(mandatory=True,
24-
),
25-
plot_type=dict(usedefault=True,
26-
),
27-
realigned_files=dict(mandatory=True,
28-
),
29-
realignment_parameters=dict(mandatory=True,
30-
),
31-
rotation_threshold=dict(mandatory=True,
32-
xor=['norm_threshold'],
33-
),
34-
save_plot=dict(usedefault=True,
35-
),
36-
translation_threshold=dict(mandatory=True,
37-
xor=['norm_threshold'],
38-
),
39-
use_differences=dict(maxlen=2,
40-
minlen=2,
41-
usedefault=True,
42-
),
43-
use_norm=dict(requires=['norm_threshold'],
44-
usedefault=True,
45-
),
46-
zintensity_threshold=dict(mandatory=True,
47-
),
7+
input_map = dict(
8+
bound_by_brainmask=dict(usedefault=True, ),
9+
global_threshold=dict(usedefault=True, ),
10+
ignore_exception=dict(
11+
deprecated='1.0.0',
12+
nohash=True,
13+
usedefault=True,
14+
),
15+
intersect_mask=dict(),
16+
mask_file=dict(),
17+
mask_threshold=dict(),
18+
mask_type=dict(mandatory=True, ),
19+
norm_threshold=dict(
20+
mandatory=True,
21+
xor=['rotation_threshold', 'translation_threshold'],
22+
),
23+
parameter_source=dict(mandatory=True, ),
24+
plot_type=dict(usedefault=True, ),
25+
realigned_files=dict(mandatory=True, ),
26+
realignment_parameters=dict(mandatory=True, ),
27+
rotation_threshold=dict(
28+
mandatory=True,
29+
xor=['norm_threshold'],
30+
),
31+
save_plot=dict(usedefault=True, ),
32+
translation_threshold=dict(
33+
mandatory=True,
34+
xor=['norm_threshold'],
35+
),
36+
use_differences=dict(
37+
maxlen=2,
38+
minlen=2,
39+
usedefault=True,
40+
),
41+
use_norm=dict(
42+
requires=['norm_threshold'],
43+
usedefault=True,
44+
),
45+
zintensity_threshold=dict(mandatory=True, ),
4846
)
4947
inputs = ArtifactDetect.input_spec()
5048

5149
for key, metadata in list(input_map.items()):
5250
for metakey, value in list(metadata.items()):
5351
assert getattr(inputs.traits()[key], metakey) == value
54-
55-
5652
def test_ArtifactDetect_outputs():
57-
output_map = dict(displacement_files=dict(),
58-
intensity_files=dict(),
59-
mask_files=dict(),
60-
norm_files=dict(),
61-
outlier_files=dict(),
62-
plot_files=dict(),
63-
statistic_files=dict(),
53+
output_map = dict(
54+
displacement_files=dict(),
55+
intensity_files=dict(),
56+
mask_files=dict(),
57+
norm_files=dict(),
58+
outlier_files=dict(),
59+
plot_files=dict(),
60+
statistic_files=dict(),
6461
)
6562
outputs = ArtifactDetect.output_spec()
6663

nipype/algorithms/tests/test_auto_CalculateMedian.py

Lines changed: 10 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -4,25 +4,23 @@
44

55

66
def test_CalculateMedian_inputs():
7-
input_map = dict(ignore_exception=dict(deprecated='1.0.0',
8-
nohash=True,
9-
usedefault=True,
10-
),
11-
in_files=dict(),
12-
median_file=dict(),
13-
median_per_file=dict(usedefault=True,
14-
),
7+
input_map = dict(
8+
ignore_exception=dict(
9+
deprecated='1.0.0',
10+
nohash=True,
11+
usedefault=True,
12+
),
13+
in_files=dict(),
14+
median_file=dict(),
15+
median_per_file=dict(usedefault=True, ),
1516
)
1617
inputs = CalculateMedian.input_spec()
1718

1819
for key, metadata in list(input_map.items()):
1920
for metakey, value in list(metadata.items()):
2021
assert getattr(inputs.traits()[key], metakey) == value
21-
22-
2322
def test_CalculateMedian_outputs():
24-
output_map = dict(median_files=dict(),
25-
)
23+
output_map = dict(median_files=dict(), )
2624
outputs = CalculateMedian.output_spec()
2725

2826
for key, metadata in list(output_map.items()):

nipype/algorithms/tests/test_auto_CalculateNormalizedMoments.py

Lines changed: 4 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -4,21 +4,17 @@
44

55

66
def test_CalculateNormalizedMoments_inputs():
7-
input_map = dict(moment=dict(mandatory=True,
8-
),
9-
timeseries_file=dict(mandatory=True,
10-
),
7+
input_map = dict(
8+
moment=dict(mandatory=True, ),
9+
timeseries_file=dict(mandatory=True, ),
1110
)
1211
inputs = CalculateNormalizedMoments.input_spec()
1312

1413
for key, metadata in list(input_map.items()):
1514
for metakey, value in list(metadata.items()):
1615
assert getattr(inputs.traits()[key], metakey) == value
17-
18-
1916
def test_CalculateNormalizedMoments_outputs():
20-
output_map = dict(moments=dict(),
21-
)
17+
output_map = dict(moments=dict(), )
2218
outputs = CalculateNormalizedMoments.output_spec()
2319

2420
for key, metadata in list(output_map.items()):

0 commit comments

Comments
 (0)