Skip to content

Commit 2c89c58

Browse files
committed
Merge remote-tracking branch 'upstream/master' into fix/VirtualDisplay
2 parents 7644acf + 6e95b3c commit 2c89c58

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

53 files changed

+1735
-1885
lines changed

.circle/tests.sh

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,8 @@ fi
1717
# They may need to be rebalanced in the future.
1818
case ${CIRCLE_NODE_INDEX} in
1919
0)
20-
docker run --rm=false -it -e FSL_COURSE_DATA="/data/examples/nipype-fsl_course_data" -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py36 /usr/bin/run_pytests.sh && \
21-
docker run --rm=false -it -e FSL_COURSE_DATA="/data/examples/nipype-fsl_course_data" -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py27 /usr/bin/run_pytests.sh && \
20+
docker run --rm=false -it -e CI_SKIP_TEST=1 -e NIPYPE_RESOURCE_MONITOR=1 -e FSL_COURSE_DATA="/data/examples/nipype-fsl_course_data" -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py36 /usr/bin/run_pytests.sh && \
21+
docker run --rm=false -it -e CI_SKIP_TEST=1 -e NIPYPE_RESOURCE_MONITOR=1 -e FSL_COURSE_DATA="/data/examples/nipype-fsl_course_data" -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py27 /usr/bin/run_pytests.sh && \
2222
docker run --rm=false -it -v $WORKDIR:/work -w /src/nipype/doc --entrypoint=/usr/bin/run_builddocs.sh nipype/nipype:py36 /usr/bin/run_builddocs.sh && \
2323
docker run --rm=false -it -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py36 /usr/bin/run_examples.sh test_spm Linear /data/examples/ workflow3d && \
2424
docker run --rm=false -it -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py36 /usr/bin/run_examples.sh test_spm Linear /data/examples/ workflow4d
@@ -30,8 +30,8 @@ case ${CIRCLE_NODE_INDEX} in
3030
exitcode=$?
3131
;;
3232
2)
33-
docker run --rm=false -it -e NIPYPE_NUMBER_OF_CPUS=4 -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py27 /usr/bin/run_examples.sh fmri_spm_nested MultiProc /data/examples/ level1 && \
34-
docker run --rm=false -it -e NIPYPE_NUMBER_OF_CPUS=4 -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py36 /usr/bin/run_examples.sh fmri_spm_nested MultiProc /data/examples/ l2pipeline
33+
docker run --rm=false -it -e NIPYPE_NUMBER_OF_CPUS=4 -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py36 /usr/bin/run_examples.sh fmri_spm_nested MultiProc /data/examples/ level1 && \
34+
docker run --rm=false -it -e NIPYPE_NUMBER_OF_CPUS=4 -e NIPYPE_RESOURCE_MONITOR=1 -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py27 /usr/bin/run_examples.sh fmri_spm_nested MultiProc /data/examples/ l2pipeline
3535
exitcode=$?
3636
;;
3737
3)

.travis.yml

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -8,10 +8,10 @@ python:
88
- 3.5
99
- 3.6
1010
env:
11-
- INSTALL_DEB_DEPENDECIES=true NIPYPE_EXTRAS="doc,tests,fmri,profiler"
12-
- INSTALL_DEB_DEPENDECIES=false NIPYPE_EXTRAS="doc,tests,fmri,profiler"
13-
- INSTALL_DEB_DEPENDECIES=true NIPYPE_EXTRAS="doc,tests,fmri,profiler,duecredit"
14-
- INSTALL_DEB_DEPENDECIES=true NIPYPE_EXTRAS="doc,tests,fmri,profiler" PIP_FLAGS="--pre"
11+
- INSTALL_DEB_DEPENDECIES=true NIPYPE_EXTRAS="doc,tests,fmri,profiler" CI_SKIP_TEST=1
12+
- INSTALL_DEB_DEPENDECIES=false NIPYPE_EXTRAS="doc,tests,fmri,profiler" CI_SKIP_TEST=1
13+
- INSTALL_DEB_DEPENDECIES=true NIPYPE_EXTRAS="doc,tests,fmri,profiler,duecredit" CI_SKIP_TEST=1
14+
- INSTALL_DEB_DEPENDECIES=true NIPYPE_EXTRAS="doc,tests,fmri,profiler" PIP_FLAGS="--pre" CI_SKIP_TEST=1
1515
before_install:
1616
- function apt_inst {
1717
if $INSTALL_DEB_DEPENDECIES; then sudo rm -rf /dev/shm; fi &&

doc/users/config_file.rst

Lines changed: 70 additions & 57 deletions
Original file line numberDiff line numberDiff line change
@@ -14,59 +14,63 @@ Logging
1414
~~~~~~~
1515

1616
*workflow_level*
17-
How detailed the logs regarding workflow should be (possible values:
18-
``INFO`` and ``DEBUG``; default value: ``INFO``)
19-
*filemanip_level*
20-
How detailed the logs regarding file operations (for example overwriting
21-
warning) should be (possible values: ``INFO`` and ``DEBUG``; default value:
22-
``INFO``)
17+
How detailed the logs regarding workflow should be (possible values:
18+
``INFO`` and ``DEBUG``; default value: ``INFO``)
19+
*utils_level*
20+
How detailed the logs regarding nipype utils, like file operations
21+
(for example overwriting warning) or the resource profiler, should be
22+
(possible values: ``INFO`` and ``DEBUG``; default value:
23+
``INFO``)
2324
*interface_level*
24-
How detailed the logs regarding interface execution should be (possible
25-
values: ``INFO`` and ``DEBUG``; default value: ``INFO``)
25+
How detailed the logs regarding interface execution should be (possible
26+
values: ``INFO`` and ``DEBUG``; default value: ``INFO``)
27+
*filemanip_level* (deprecated as of 1.0)
28+
How detailed the logs regarding file operations (for example overwriting
29+
warning) should be (possible values: ``INFO`` and ``DEBUG``)
2630
*log_to_file*
2731
Indicates whether logging should also send the output to a file (possible
2832
values: ``true`` and ``false``; default value: ``false``)
2933
*log_directory*
30-
Where to store logs. (string, default value: home directory)
34+
Where to store logs. (string, default value: home directory)
3135
*log_size*
32-
Size of a single log file. (integer, default value: 254000)
36+
Size of a single log file. (integer, default value: 254000)
3337
*log_rotate*
34-
How many rotation should the log file make. (integer, default value: 4)
38+
How many rotation should the log file make. (integer, default value: 4)
3539

3640
Execution
3741
~~~~~~~~~
3842

3943
*plugin*
40-
This defines which execution plugin to use. (possible values: ``Linear``,
41-
``MultiProc``, ``SGE``, ``IPython``; default value: ``Linear``)
44+
This defines which execution plugin to use. (possible values: ``Linear``,
45+
``MultiProc``, ``SGE``, ``IPython``; default value: ``Linear``)
4246

4347
*stop_on_first_crash*
44-
Should the workflow stop upon first node crashing or try to execute as many
45-
nodes as possible? (possible values: ``true`` and ``false``; default value:
46-
``false``)
48+
Should the workflow stop upon first node crashing or try to execute as many
49+
nodes as possible? (possible values: ``true`` and ``false``; default value:
50+
``false``)
4751

4852
*stop_on_first_rerun*
49-
Should the workflow stop upon first node trying to recompute (by that we
50-
mean rerunning a node that has been run before - this can happen due changed
51-
inputs and/or hash_method since the last run). (possible values: ``true``
52-
and ``false``; default value: ``false``)
53+
Should the workflow stop upon first node trying to recompute (by that we
54+
mean rerunning a node that has been run before - this can happen due changed
55+
inputs and/or hash_method since the last run). (possible values: ``true``
56+
and ``false``; default value: ``false``)
5357

5458
*hash_method*
55-
Should the input files be checked for changes using their content (slow, but
56-
100% accurate) or just their size and modification date (fast, but
57-
potentially prone to errors)? (possible values: ``content`` and
58-
``timestamp``; default value: ``timestamp``)
59+
Should the input files be checked for changes using their content (slow, but
60+
100% accurate) or just their size and modification date (fast, but
61+
potentially prone to errors)? (possible values: ``content`` and
62+
``timestamp``; default value: ``timestamp``)
5963

6064
*keep_inputs*
6165
Ensures that all inputs that are created in the nodes working directory are
6266
kept after node execution (possible values: ``true`` and ``false``; default
6367
value: ``false``)
6468

6569
*single_thread_matlab*
66-
Should all of the Matlab interfaces (including SPM) use only one thread?
67-
This is useful if you are parallelizing your workflow using MultiProc or
68-
IPython on a single multicore machine. (possible values: ``true`` and
69-
``false``; default value: ``true``)
70+
Should all of the Matlab interfaces (including SPM) use only one thread?
71+
This is useful if you are parallelizing your workflow using MultiProc or
72+
IPython on a single multicore machine. (possible values: ``true`` and
73+
``false``; default value: ``true``)
7074

7175
*display_variable*
7276
What ``$DISPLAY`` environment variable should utilize those interfaces
@@ -83,29 +87,29 @@ Execution
8387
set)
8488

8589
*remove_unnecessary_outputs*
86-
This will remove any interface outputs not needed by the workflow. If the
87-
required outputs from a node changes, rerunning the workflow will rerun the
88-
node. Outputs of leaf nodes (nodes whose outputs are not connected to any
89-
other nodes) will never be deleted independent of this parameter. (possible
90-
values: ``true`` and ``false``; default value: ``true``)
90+
This will remove any interface outputs not needed by the workflow. If the
91+
required outputs from a node changes, rerunning the workflow will rerun the
92+
node. Outputs of leaf nodes (nodes whose outputs are not connected to any
93+
other nodes) will never be deleted independent of this parameter. (possible
94+
values: ``true`` and ``false``; default value: ``true``)
9195

9296
*try_hard_link_datasink*
93-
When the DataSink is used to produce an orginized output file outside
94-
of nipypes internal cache structure, a file system hard link will be
95-
attempted first. A hard link allow multiple file paths to point to the
96-
same physical storage location on disk if the conditions allow. By
97-
refering to the same physical file on disk (instead of copying files
98-
byte-by-byte) we can avoid unnecessary data duplication. If hard links
99-
are not supported for the source or destination paths specified, then
100-
a standard byte-by-byte copy is used. (possible values: ``true`` and
101-
``false``; default value: ``true``)
97+
When the DataSink is used to produce an orginized output file outside
98+
of nipypes internal cache structure, a file system hard link will be
99+
attempted first. A hard link allow multiple file paths to point to the
100+
same physical storage location on disk if the conditions allow. By
101+
refering to the same physical file on disk (instead of copying files
102+
byte-by-byte) we can avoid unnecessary data duplication. If hard links
103+
are not supported for the source or destination paths specified, then
104+
a standard byte-by-byte copy is used. (possible values: ``true`` and
105+
``false``; default value: ``true``)
102106

103107
*use_relative_paths*
104-
Should the paths stored in results (and used to look for inputs)
105-
be relative or absolute. Relative paths allow moving the whole
106-
working directory around but may cause problems with
107-
symlinks. (possible values: ``true`` and ``false``; default
108-
value: ``false``)
108+
Should the paths stored in results (and used to look for inputs)
109+
be relative or absolute. Relative paths allow moving the whole
110+
working directory around but may cause problems with
111+
symlinks. (possible values: ``true`` and ``false``; default
112+
value: ``false``)
109113

110114
*local_hash_check*
111115
Perform the hash check on the job submission machine. This option minimizes
@@ -120,10 +124,10 @@ Execution
120124
done after a job finish is detected. (float in seconds; default value: 5)
121125

122126
*remove_node_directories (EXPERIMENTAL)*
123-
Removes directories whose outputs have already been used
124-
up. Doesn't work with IdentiInterface or any node that patches
125-
data through (without copying) (possible values: ``true`` and
126-
``false``; default value: ``false``)
127+
Removes directories whose outputs have already been used
128+
up. Doesn't work with IdentiInterface or any node that patches
129+
data through (without copying) (possible values: ``true`` and
130+
``false``; default value: ``false``)
127131

128132
*stop_on_unknown_version*
129133
If this is set to True, an underlying interface will raise an error, when no
@@ -151,18 +155,27 @@ Execution
151155
crashfiles allow portability across machines and shorter load time.
152156
(possible values: ``pklz`` and ``txt``; default value: ``pklz``)
153157

158+
*resource_monitor*
159+
Enables monitoring the resources occupation (possible values: ``true`` and
160+
``false``; default value: ``false``)
161+
162+
*resource_monitor_frequency*
163+
Sampling period (in seconds) between measurements of resources (memory, cpus)
164+
being used by an interface. Requires ``resource_monitor`` to be ``true``.
165+
(default value: ``1``)
166+
154167
Example
155168
~~~~~~~
156169

157170
::
158171

159-
[logging]
160-
workflow_level = DEBUG
172+
[logging]
173+
workflow_level = DEBUG
161174

162-
[execution]
163-
stop_on_first_crash = true
164-
hash_method = timestamp
165-
display_variable = :1
175+
[execution]
176+
stop_on_first_crash = true
177+
hash_method = timestamp
178+
display_variable = :1
166179

167180
Workflow.config property has a form of a nested dictionary reflecting the
168181
structure of the .cfg file.

doc/users/plugins.rst

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -74,6 +74,14 @@ Optional arguments::
7474
n_procs : Number of processes to launch in parallel, if not set number of
7575
processors/threads will be automatically detected
7676

77+
memory_gb : Total memory available to be shared by all simultaneous tasks
78+
currently running, if not set it will be automatically set to 90\% of
79+
system RAM.
80+
81+
raise_insufficient : Raise exception when the estimated resources of a node
82+
exceed the total amount of resources available (memory and threads), when
83+
``False`` (default), only a warning will be issued.
84+
7785
To distribute processing on a multicore machine, simply call::
7886

7987
workflow.run(plugin='MultiProc')

doc/users/resource_sched_profiler.rst

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -82,7 +82,7 @@ by setting the ``status_callback`` parameter to point to this function in the
8282

8383
::
8484

85-
from nipype.pipeline.plugins.callback_log import log_nodes_cb
85+
from nipype.utils.profiler import log_nodes_cb
8686
args_dict = {'n_procs' : 8, 'memory_gb' : 10, 'status_callback' : log_nodes_cb}
8787

8888
To set the filepath for the callback log the ``'callback'`` logger must be
@@ -141,7 +141,7 @@ The pandas_ Python package is required to use this feature.
141141

142142
::
143143

144-
from nipype.pipeline.plugins.callback_log import log_nodes_cb
144+
from nipype.utils.profiler import log_nodes_cb
145145
args_dict = {'n_procs' : 8, 'memory_gb' : 10, 'status_callback' : log_nodes_cb}
146146
workflow.run(plugin='MultiProc', plugin_args=args_dict)
147147

docker/files/run_examples.sh

Lines changed: 13 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -12,10 +12,18 @@ mkdir -p ${HOME}/.nipype ${WORKDIR}/logs/example_${example_id} ${WORKDIR}/tests
1212
echo "[logging]" > ${HOME}/.nipype/nipype.cfg
1313
echo "workflow_level = DEBUG" >> ${HOME}/.nipype/nipype.cfg
1414
echo "interface_level = DEBUG" >> ${HOME}/.nipype/nipype.cfg
15-
echo "filemanip_level = DEBUG" >> ${HOME}/.nipype/nipype.cfg
15+
echo "utils_level = DEBUG" >> ${HOME}/.nipype/nipype.cfg
1616
echo "log_to_file = true" >> ${HOME}/.nipype/nipype.cfg
1717
echo "log_directory = ${WORKDIR}/logs/example_${example_id}" >> ${HOME}/.nipype/nipype.cfg
1818

19+
echo '[execution]' >> ${HOME}/.nipype/nipype.cfg
20+
echo 'crashfile_format = txt' >> ${HOME}/.nipype/nipype.cfg
21+
22+
if [[ "${NIPYPE_RESOURCE_MONITOR:-0}" == "1" ]]; then
23+
echo 'resource_monitor = true' >> ${HOME}/.nipype/nipype.cfg
24+
echo 'resource_monitor_frequency = 3' >> ${HOME}/.nipype/nipype.cfg
25+
fi
26+
1927
# Set up coverage
2028
export COVERAGE_FILE=${WORKDIR}/tests/.coverage.${example_id}
2129
if [ "$2" == "MultiProc" ]; then
@@ -25,8 +33,10 @@ fi
2533
coverage run /src/nipype/tools/run_examples.py $@
2634
exit_code=$?
2735

36+
if [[ "${NIPYPE_RESOURCE_MONITOR:-0}" == "1" ]]; then
37+
cp resource_monitor.json 2>/dev/null ${WORKDIR}/logs/example_${example_id}/ || :
38+
fi
2839
# Collect crashfiles and generate xml report
2940
coverage xml -o ${WORKDIR}/tests/smoketest_${example_id}.xml
30-
find /work -name "crash-*" -maxdepth 1 -exec mv {} ${WORKDIR}/crashfiles/ \;
41+
find /work -maxdepth 1 -name "crash-*" -exec mv {} ${WORKDIR}/crashfiles/ \;
3142
exit $exit_code
32-

docker/files/run_pytests.sh

Lines changed: 6 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -17,28 +17,20 @@ echo '[logging]' > ${HOME}/.nipype/nipype.cfg
1717
echo 'log_to_file = true' >> ${HOME}/.nipype/nipype.cfg
1818
echo "log_directory = ${WORKDIR}/logs/py${PYTHON_VERSION}" >> ${HOME}/.nipype/nipype.cfg
1919

20-
# Enable profile_runtime tests only for python 2.7
21-
if [[ "${PYTHON_VERSION}" -lt "30" ]]; then
22-
echo '[execution]' >> ${HOME}/.nipype/nipype.cfg
23-
echo 'profile_runtime = true' >> ${HOME}/.nipype/nipype.cfg
20+
echo '[execution]' >> ${HOME}/.nipype/nipype.cfg
21+
echo 'crashfile_format = txt' >> ${HOME}/.nipype/nipype.cfg
22+
23+
if [[ "${NIPYPE_RESOURCE_MONITOR:-0}" == "1" ]]; then
24+
echo 'resource_monitor = true' >> ${HOME}/.nipype/nipype.cfg
2425
fi
2526

2627
# Run tests using pytest
2728
export COVERAGE_FILE=${WORKDIR}/tests/.coverage.py${PYTHON_VERSION}
2829
py.test -v --junitxml=${WORKDIR}/tests/pytests_py${PYTHON_VERSION}.xml --cov nipype --cov-config /src/nipype/.coveragerc --cov-report xml:${WORKDIR}/tests/coverage_py${PYTHON_VERSION}.xml ${TESTPATH}
2930
exit_code=$?
3031

31-
# Workaround: run here the profiler tests in python 3
32-
if [[ "${PYTHON_VERSION}" -ge "30" ]]; then
33-
echo '[execution]' >> ${HOME}/.nipype/nipype.cfg
34-
echo 'profile_runtime = true' >> ${HOME}/.nipype/nipype.cfg
35-
export COVERAGE_FILE=${WORKDIR}/tests/.coverage.py${PYTHON_VERSION}_extra
36-
py.test -v --junitxml=${WORKDIR}/tests/pytests_py${PYTHON_VERSION}_extra.xml --cov nipype --cov-report xml:${WORKDIR}/tests/coverage_py${PYTHON_VERSION}_extra.xml /src/nipype/nipype/interfaces/tests/test_runtime_profiler.py /src/nipype/nipype/pipeline/plugins/tests/test_multiproc*.py
37-
exit_code=$(( $exit_code + $? ))
38-
fi
39-
4032
# Collect crashfiles
41-
find ${WORKDIR} -name "crash-*" -maxdepth 1 -exec mv {} ${WORKDIR}/crashfiles/ \;
33+
find ${WORKDIR} -maxdepth 1 -name "crash-*" -exec mv {} ${WORKDIR}/crashfiles/ \;
4234

4335
echo "Unit tests finished with exit code ${exit_code}"
4436
exit ${exit_code}

0 commit comments

Comments
 (0)