Skip to content

[REF] Fix use of logging module in utils, algorithms and interfaces. #2276

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 5 commits into from
Nov 13, 2017
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 7 additions & 6 deletions nipype/algorithms/confounds.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
from ..utils import NUMPY_MMAP
from ..utils.misc import normalize_mc_params

IFLOG = logging.getLogger('interface')
IFLOGGER = logging.getLogger('interface')


class ComputeDVARSInputSpec(BaseInterfaceInputSpec):
Expand Down Expand Up @@ -286,7 +286,7 @@ def _run_interface(self, runtime):
tr = self.inputs.series_tr

if self.inputs.normalize and tr is None:
IFLOG.warn('FD plot cannot be normalized if TR is not set')
IFLOGGER.warn('FD plot cannot be normalized if TR is not set')

self._results['out_figure'] = op.abspath(self.inputs.out_figure)
fig = plot_confound(fd_res, self.inputs.figsize, 'FD', units='mm',
Expand Down Expand Up @@ -601,8 +601,8 @@ def _process_masks(self, mask_images, timeseries=None):
# save mask
mask_file = os.path.abspath('mask_{:03d}.nii.gz'.format(i))
out_image.to_filename(mask_file)
IFLOG.debug('tCompcor computed and saved mask of shape {} to '
'mask_file {}'.format(mask.shape, mask_file))
IFLOGGER.debug('tCompcor computed and saved mask of shape %s to '
'mask_file %s', str(mask.shape), mask_file)
self._mask_files.append(mask_file)
out_images.append(out_image)
return out_images
Expand Down Expand Up @@ -919,7 +919,8 @@ def regress_poly(degree, data, remove_mean=True, axis=-1):
:param int axis: numpy array axes along which regression is performed

"""
IFLOG.debug('Performing polynomial regression on data of shape ' + str(data.shape))
IFLOGGER.debug('Performing polynomial regression on data of shape %s',
str(data.shape))

datashape = data.shape
timepoints = datashape[axis]
Expand Down Expand Up @@ -1147,7 +1148,7 @@ def _full_rank(X, cmax=1e15):
c = smax / smin
if c < cmax:
return X, c
IFLOG.warn('Matrix is singular at working precision, regularizing...')
IFLOGGER.warn('Matrix is singular at working precision, regularizing...')
lda = (smax - cmax * smin) / (cmax - 1)
s = s + lda
X = np.dot(U, np.dot(np.diag(s), V))
Expand Down
42 changes: 18 additions & 24 deletions nipype/algorithms/misc.py
Original file line number Diff line number Diff line change
Expand Up @@ -362,26 +362,23 @@ def _run_interface(self, runtime):
if isinstance(in_dict[key][0], np.ndarray):
saved_variables.append(key)
else:
iflogger.info('One of the keys in the input file, {k}, is not a Numpy array'.format(k=key))
iflogger.info('One of the keys in the input file, %s, is '
'not a Numpy array', key)

if len(saved_variables) > 1:
iflogger.info(
'{N} variables found:'.format(N=len(saved_variables)))
iflogger.info('%i variables found:', len(saved_variables))
iflogger.info(saved_variables)
for variable in saved_variables:
iflogger.info(
'...Converting {var} - type {ty} - to\
CSV'.format(var=variable, ty=type(in_dict[variable]))
)
matlab2csv(
in_dict[variable], variable, self.inputs.reshape_matrix)
iflogger.info('...Converting %s - type %s - to CSV',
variable, type(in_dict[variable]))
matlab2csv(in_dict[variable], variable, self.inputs.reshape_matrix)
elif len(saved_variables) == 1:
_, name, _ = split_filename(self.inputs.in_file)
variable = saved_variables[0]
iflogger.info('Single variable found {var}, type {ty}:'.format(
var=variable, ty=type(in_dict[variable])))
iflogger.info('...Converting {var} to CSV from {f}'.format(
var=variable, f=self.inputs.in_file))
iflogger.info('Single variable found %s, type %s:', variable,
type(in_dict[variable]))
iflogger.info('...Converting %s to CSV from %s', variable,
self.inputs.in_file)
matlab2csv(in_dict[variable], name, self.inputs.reshape_matrix)
else:
iflogger.error('No values in the MATLAB file?!')
Expand All @@ -396,8 +393,8 @@ def _list_outputs(self):
if isinstance(in_dict[key][0], np.ndarray):
saved_variables.append(key)
else:
iflogger.error('One of the keys in the input file, {k}, is\
not a Numpy array'.format(k=key))
iflogger.error('One of the keys in the input file, %s, is '
'not a Numpy array', key)

if len(saved_variables) > 1:
outputs['csv_files'] = replaceext(saved_variables, '.csv')
Expand Down Expand Up @@ -555,28 +552,25 @@ def _run_interface(self, runtime):
iflogger.info('Column headings have been provided:')
headings = self.inputs.column_headings
else:
iflogger.info(
'Column headings not provided! Pulled from input filenames:')
iflogger.info('Column headings not provided! Pulled from input filenames:')
headings = remove_identical_paths(self.inputs.in_files)

if isdefined(self.inputs.extra_field):
if isdefined(self.inputs.extra_column_heading):
extraheading = self.inputs.extra_column_heading
iflogger.info('Extra column heading provided: {col}'.format(
col=extraheading))
iflogger.info('Extra column heading provided: %s', extraheading)
else:
extraheading = 'type'
iflogger.info(
'Extra column heading was not defined. Using "type"')
iflogger.info('Extra column heading was not defined. Using "type"')
headings.append(extraheading)
extraheadingBool = True

if len(self.inputs.in_files) == 1:
iflogger.warn('Only one file input!')

if isdefined(self.inputs.row_headings):
iflogger.info('Row headings have been provided. Adding "labels"\
column header.')
iflogger.info('Row headings have been provided. Adding "labels"'
'column header.')
prefix = '"{p}","'.format(p=self.inputs.row_heading_title)
csv_headings = prefix + '","'.join(itertools.chain(
headings)) + '"\n'
Expand Down Expand Up @@ -1310,7 +1304,7 @@ def merge_rois(in_files, in_idxs, in_ref,
# to avoid memory errors
if op.splitext(in_ref)[1] == '.gz':
try:
iflogger.info('uncompress %i' % in_ref)
iflogger.info('uncompress %i', in_ref)
sp.check_call(['gunzip', in_ref], stdout=sp.PIPE, shell=True)
in_ref = op.splitext(in_ref)[0]
except:
Expand Down
14 changes: 7 additions & 7 deletions nipype/algorithms/modelgen.py
Original file line number Diff line number Diff line change
Expand Up @@ -374,9 +374,9 @@ def _generate_standard_design(self, infolist, functional_runs=None,
for f in filename_to_list(sessinfo[i]['scans']):
shape = load(f, mmap=NUMPY_MMAP).shape
if len(shape) == 3 or shape[3] == 1:
iflogger.warning(('You are using 3D instead of 4D '
'files. Are you sure this was '
'intended?'))
iflogger.warning('You are using 3D instead of 4D '
'files. Are you sure this was '
'intended?')
numscans += 1
else:
numscans += shape[3]
Expand Down Expand Up @@ -686,7 +686,7 @@ def _gen_regress(self, i_onsets, i_durations, i_amplitudes, nscans):

if dt < 1:
raise Exception('Time multiple less than 1 ms')
iflogger.info('Setting dt = %d ms\n' % dt)
iflogger.info('Setting dt = %d ms\n', dt)
npts = int(np.ceil(total_time / dt))
times = np.arange(0, total_time, dt) * 1e-3
timeline = np.zeros((npts))
Expand All @@ -705,9 +705,9 @@ def _gen_regress(self, i_onsets, i_durations, i_amplitudes, nscans):
if isdefined(self.inputs.model_hrf) and self.inputs.model_hrf:
response = np.convolve(boxcar, hrf)
reg_scale = 1.0 / response.max()
iflogger.info('response sum: %.4f max: %.4f' % (response.sum(),
response.max()))
iflogger.info('reg_scale: %.4f' % reg_scale)
iflogger.info('response sum: %.4f max: %.4f', response.sum(),
response.max())
iflogger.info('reg_scale: %.4f', reg_scale)

for i, t in enumerate(onsets):
idx = int(np.round(t / dt))
Expand Down
2 changes: 1 addition & 1 deletion nipype/interfaces/ants/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
from ... import logging, LooseVersion
from ..base import (CommandLine, CommandLineInputSpec, traits, isdefined,
PackageInfo)
logger = logging.getLogger('interface')
iflogger = logging.getLogger('interface')

# -Using -1 gives primary responsibilty to ITKv4 to do the correct
# thread limitings.
Expand Down
2 changes: 1 addition & 1 deletion nipype/interfaces/ants/registration.py
Original file line number Diff line number Diff line change
Expand Up @@ -672,7 +672,7 @@ class Registration(ANTSCommand):

One can use multiple similarity metrics in a single registration stage.The Node below first
performs a linear registation using only the Mutual Information ('Mattes')-metric.
In a second stage, it performs a non-linear registration ('Syn') using both a
In a second stage, it performs a non-linear registration ('Syn') using both a
Mutual Information and a local cross-correlation ('CC')-metric. Both metrics are weighted
equally ('metric_weight' is .5 for both). The Mutual Information- metric uses 32 bins.
The local cross-correlations (correlations between every voxel's neighborhoods) is computed
Expand Down
Loading