Description
Hi,
I've adapted the smri_fsreconall.py tutorial to work with my own data but it seems that I've found a bug.
here is my code:
import os
import re
import nipype.pipeline.engine as pe
import nipype.interfaces.io as nio
from nipype.workflows.smri.freesurfer import create_reconall_workflow
from nipype.interfaces.freesurfer.utils import MakeAverageSubject
from nipype.interfaces.utility import IdentityInterface
work_dir = '/Users/Raquel/Desktop/DATA'
print 'The working directory is: ' + work_dir + '\n'
dcm_folder = work_dir + '/ADNI_DATA_nii'
print 'The raw data directory is: ' + dcm_folder + '\n'
recon_all_folder = work_dir + '/recon_all'
print 'After applying RECON-ALL the data will be in: ' + recon_all_folder + '\n'
base_folder = work_dir + '/workdir'
print 'The base directory is: ' + base_folder + '\n'
Assign the workflow directory
workflow_dir = os.path.abspath('workflow_dir')
if not os.path.isdir(workflow_dir):
os.mkdir(workflow_dir)
Define the workflow directories
subject_list = os.listdir(dcm_folder)
subject_list=[os.path.basename(x) for x in glob.glob(dcm_folder + '/*')] #Generates subject list and drops path from list
print subject_list
data_dir = os.path.abspath(dcm_folder)
print data_dir
subjects_dir = os.path.abspath('recon_all')
print subjects_dir
if not os.path.exists(subjects_dir):
os.mkdir(subjects_dir)
wf = pe.Workflow(name="l1workflow")
wf.base_dir = os.path.join(workflow_dir, 'workdir')
Create inputspec
Set up iteration over subjects
inputspec = pe.Node(interface=IdentityInterface(['subject_id']),
name="inputspec")
inputspec.iterables = ("subject_id", subject_list)
Grab data
datasource = pe.Node(interface=nio.DataGrabber(infields=['subject_id'],
outfields=['struct']),
name='datasource')
datasource.inputs.base_directory = data_dir
datasource.inputs.template = ''
datasource.inputs.field_template = {'struct': '%s/MP/2_/S_/*.nii.gz'}
datasource.inputs.template_args = {'struct': [['subject_id']]}
datasource.inputs.subject_id = subject_list
datasource.inputs.sort_filelist = True
wf.connect(inputspec, 'subject_id', datasource, 'subject_id')
Run recon-all
recon_all = create_reconall_workflow()
recon_all.inputs.inputspec.subjects_dir = subjects_dir
wf.connect(datasource, 'struct', recon_all, 'inputspec.T1_files')
wf.connect(inputspec, 'subject_id', recon_all, 'inputspec.subject_id')
Make average subject
average = pe.JoinNode(interface=MakeAverageSubject(),
joinsource="inputspec",
joinfield="subjects_ids",
name="average")
average.inputs.subjects_dir = subjects_dir
wf.connect(recon_all, 'postdatasink_outputspec.subject_id', average, 'subjects_ids')
wf.run("MultiProc", plugin_args={'n_procs': 4})
Here is the log file:
File: /Users/Raquel/Python/crash-20160512-004117-Raquel-Robust_Template.a2.pklz
Node: l1workflow.ReconAll.AutoRecon1.Robust_Template.a2
Working directory: /Users/Raquel/Python/workflow_dir/workdir/l1workflow/ReconAll/AutoRecon1/_subject_id_002_S_0413/Robust_Template
Node inputs:
function_str = def createTemplate(in_files, out_file):
import os
import shutil
if len(in_files) == 1:
print("WARNING: only one run found. This is OK, but motion correction " +
"cannot be performed on one run, so I'll copy the run to rawavg " +
"and continue.")
shutil.copyfile(in_files[0], out_file)
intensity_scales = None
transforms = None
else:
from nipype.interfaces.freesurfer import RobustTemplate
# if multiple T1 scans are given
intensity_scales = [os.path.basename(f.replace('.mgz', '-iscale.txt')) for f in in_files]
transforms = [os.path.basename(f.replace('.mgz', '.lta')) for f in in_files]
robtemp = RobustTemplate()
robtemp.inputs.in_files = in_files
robtemp.inputs.average_metric = 'median'
robtemp.inputs.out_file = out_file
robtemp.inputs.no_iteration = True
robtemp.inputs.fixed_timepoint = True
robtemp.inputs.auto_detect_sensitivity = True
robtemp.inputs.initial_timepoint = 1
robtemp.inputs.scaled_intensity_outputs = intensity_scales
robtemp.inputs.transform_outputs = transforms
robtemp.inputs.subsample_threshold = 200
robtemp.inputs.intensity_scaling = True
robtemp.run()
intensity_scales = [os.path.abspath(f) for f in robtemp.outputs.scaled_intensity_outputs]
transforms = [os.path.abspath(f) for f in robtemp.outputs.transform_outputs]
out_file = robtemp.outputs.out_file
out_file = os.path.abspath(out_file)
return out_file, intensity_scales, transforms
ignore_exception = False
in_files = ['/Users/Raquel/Python/workflow_dir/workdir/l1workflow/ReconAll/AutoRecon1/_subject_id_002_S_0413/T1_prep/mapflow/_T1_prep0/001.mgz', '/Users/Raquel/Python/workflow_dir/workdir/l1workflow/ReconAll/AutoRecon1/_subject_id_002_S_0413/T1_prep/mapflow/_T1_prep1/002.mgz', '/Users/Raquel/Python/workflow_dir/workdir/l1workflow/ReconAll/AutoRecon1/_subject_id_002_S_0413/T1_prep/mapflow/_T1_prep2/003.mgz', '/Users/Raquel/Python/workflow_dir/workdir/l1workflow/ReconAll/AutoRecon1/_subject_id_002_S_0413/T1_prep/mapflow/_T1_prep3/004.mgz']
out_file = rawavg.mgz
Traceback:
Traceback (most recent call last):
File "/Users/Raquel/anaconda/lib/python2.7/site-packages/nipype/pipeline/plugins/multiproc.py", line 19, in run_node
result['result'] = node.run(updatehash=updatehash)
File "/Users/Raquel/anaconda/lib/python2.7/site-packages/nipype/pipeline/engine/nodes.py", line 392, in run
self._run_interface()
File "/Users/Raquel/anaconda/lib/python2.7/site-packages/nipype/pipeline/engine/nodes.py", line 502, in _run_interface
self._result = self._run_command(execute)
File "/Users/Raquel/anaconda/lib/python2.7/site-packages/nipype/pipeline/engine/nodes.py", line 628, in _run_command
result = self._interface.run()
File "/Users/Raquel/anaconda/lib/python2.7/site-packages/nipype/interfaces/base.py", line 1032, in run
runtime = self._run_wrapper(runtime)
File "/Users/Raquel/anaconda/lib/python2.7/site-packages/nipype/interfaces/base.py", line 989, in _run_wrapper
runtime = self._run_interface(runtime)
File "/Users/Raquel/anaconda/lib/python2.7/site-packages/nipype/interfaces/utility.py", line 447, in _run_interface
out = function_handle(**args)
File "", line 29, in createTemplate
AttributeError: 'RobustTemplate' object has no attribute 'outputs'
Interface Function failed to run.