From 3a437c022f03a569318971c64583f37c9e0dd8a5 Mon Sep 17 00:00:00 2001 From: Taylor Salo Date: Sat, 11 Nov 2017 22:18:35 -0500 Subject: [PATCH 1/3] Fix logging formatting in several interfaces. MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fix logging formatting in ants, cmtk, elastic, freesurfer, fsl, mrtrix, mrtrix3, nilearn, and spm interfaces. Okay, it was mostly in cmtk, io, and mrtrix. - In nipype/interfaces/io.py logging is imported multiple times, and iflogger is redefined a couple of times. Not sure if that’s the right way to go. - Is this okay: - `iflogger.debug('saving inputs {}', inputs)` (nipype/interfaces/base.py#L1210) - Also caught a couple of things in nipype/interfaces/cmtk/nx.py. Namely a couple of unused variables and at one point redefining `file`. --- nipype/interfaces/ants/base.py | 2 +- nipype/interfaces/ants/registration.py | 2 +- nipype/interfaces/cmtk/cmtk.py | 76 ++++++++++++---------- nipype/interfaces/cmtk/nbs.py | 7 +- nipype/interfaces/cmtk/nx.py | 42 ++++++------ nipype/interfaces/cmtk/parcellation.py | 35 +++++----- nipype/interfaces/elastix/base.py | 2 +- nipype/interfaces/elastix/registration.py | 2 +- nipype/interfaces/elastix/utils.py | 2 +- nipype/interfaces/freesurfer/preprocess.py | 2 +- nipype/interfaces/freesurfer/utils.py | 9 +-- nipype/interfaces/fsl/base.py | 6 +- nipype/interfaces/io.py | 35 +++++----- nipype/interfaces/mrtrix/convert.py | 28 ++++---- nipype/interfaces/mrtrix3/base.py | 4 +- nipype/interfaces/nilearn.py | 2 +- nipype/interfaces/spm/model.py | 2 +- 17 files changed, 135 insertions(+), 123 deletions(-) diff --git a/nipype/interfaces/ants/base.py b/nipype/interfaces/ants/base.py index 3ab50a24f5..193e80a0fc 100644 --- a/nipype/interfaces/ants/base.py +++ b/nipype/interfaces/ants/base.py @@ -11,7 +11,7 @@ # Local imports from ... import logging, LooseVersion from ..base import CommandLine, CommandLineInputSpec, traits, isdefined -logger = logging.getLogger('interface') +iflogger = logging.getLogger('interface') # -Using -1 gives primary responsibilty to ITKv4 to do the correct # thread limitings. diff --git a/nipype/interfaces/ants/registration.py b/nipype/interfaces/ants/registration.py index c166bec792..6d82a2e9f1 100644 --- a/nipype/interfaces/ants/registration.py +++ b/nipype/interfaces/ants/registration.py @@ -672,7 +672,7 @@ class Registration(ANTSCommand): One can use multiple similarity metrics in a single registration stage.The Node below first performs a linear registation using only the Mutual Information ('Mattes')-metric. - In a second stage, it performs a non-linear registration ('Syn') using both a + In a second stage, it performs a non-linear registration ('Syn') using both a Mutual Information and a local cross-correlation ('CC')-metric. Both metrics are weighted equally ('metric_weight' is .5 for both). The Mutual Information- metric uses 32 bins. The local cross-correlations (correlations between every voxel's neighborhoods) is computed diff --git a/nipype/interfaces/cmtk/cmtk.py b/nipype/interfaces/cmtk/cmtk.py index 4eeec3e370..2f29bbb2e2 100644 --- a/nipype/interfaces/cmtk/cmtk.py +++ b/nipype/interfaces/cmtk/cmtk.py @@ -120,8 +120,11 @@ def create_allpoints_cmat(streamlines, roiData, voxelSize, n_rois): connectivity_matrix = get_connectivity_matrix(n_rois, list_of_roi_crossed_lists) dis = n_fib - len(final_fiber_ids) - iflogger.info("Found %i (%f percent out of %i fibers) fibers that start or terminate in a voxel which is not labeled. (orphans)" % (dis, dis * 100.0 / n_fib, n_fib)) - iflogger.info("Valid fibers: %i (%f percent)" % (n_fib - dis, 100 - dis * 100.0 / n_fib)) + iflogger.info('Found %i (%f percent out of %i fibers) fibers that start or ' + 'terminate in a voxel which is not labeled. (orphans)', + dis, dis * 100.0 / n_fib, n_fib) + iflogger.info('Valid fibers: %i (%f percent)', n_fib - dis, + 100 - dis * 100.0 / n_fib) iflogger.info('Returning the intersecting point connectivity matrix') return connectivity_matrix, final_fiber_ids @@ -181,7 +184,7 @@ def cmat(track_file, roi_file, resolution_network_file, matrix_name, matrix_mat_ en_fname = op.abspath(endpoint_name + '_endpoints.npy') en_fnamemm = op.abspath(endpoint_name + '_endpointsmm.npy') - iflogger.info('Reading Trackvis file {trk}'.format(trk=track_file)) + iflogger.info('Reading Trackvis file %s', track_file) fib, hdr = nb.trackvis.read(track_file, False) stats['orig_n_fib'] = len(fib) @@ -191,13 +194,13 @@ def cmat(track_file, roi_file, resolution_network_file, matrix_name, matrix_mat_ (endpoints, endpointsmm) = create_endpoints_array(fib, roiVoxelSize) # Output endpoint arrays - iflogger.info('Saving endpoint array: {array}'.format(array=en_fname)) + iflogger.info('Saving endpoint array: %s', en_fname) np.save(en_fname, endpoints) - iflogger.info('Saving endpoint array in mm: {array}'.format(array=en_fnamemm)) + iflogger.info('Saving endpoint array in mm: %s', en_fnamemm) np.save(en_fnamemm, endpointsmm) n = len(fib) - iflogger.info('Number of fibers {num}'.format(num=n)) + iflogger.info('Number of fibers: %i', n) # Create empty fiber label array fiberlabels = np.zeros((n, 2)) @@ -244,7 +247,8 @@ def cmat(track_file, roi_file, resolution_network_file, matrix_name, matrix_mat_ startROI = int(roiData[endpoints[i, 0, 0], endpoints[i, 0, 1], endpoints[i, 0, 2]]) endROI = int(roiData[endpoints[i, 1, 0], endpoints[i, 1, 1], endpoints[i, 1, 2]]) except IndexError: - iflogger.error(("AN INDEXERROR EXCEPTION OCCURED FOR FIBER %s. PLEASE CHECK ENDPOINT GENERATION" % i)) + iflogger.error('AN INDEXERROR EXCEPTION OCCURED FOR FIBER %s. ' + 'PLEASE CHECK ENDPOINT GENERATION', i) break # Filter @@ -256,7 +260,7 @@ def cmat(track_file, roi_file, resolution_network_file, matrix_name, matrix_mat_ if startROI > nROIs or endROI > nROIs: iflogger.error("Start or endpoint of fiber terminate in a voxel which is labeled higher") iflogger.error("than is expected by the parcellation node information.") - iflogger.error("Start ROI: %i, End ROI: %i" % (startROI, endROI)) + iflogger.error("Start ROI: %i, End ROI: %i", startROI, endROI) iflogger.error("This needs bugfixing!") continue @@ -296,8 +300,10 @@ def cmat(track_file, roi_file, resolution_network_file, matrix_name, matrix_mat_ # make final fiber labels as array final_fiberlabels_array = np.array(final_fiberlabels, dtype=int) - iflogger.info("Found %i (%f percent out of %i fibers) fibers that start or terminate in a voxel which is not labeled. (orphans)" % (dis, dis * 100.0 / n, n)) - iflogger.info("Valid fibers: %i (%f percent)" % (n - dis, 100 - dis * 100.0 / n)) + iflogger.info('Found %i (%f percent out of %i fibers) fibers that start or ' + 'terminate in a voxel which is not labeled. (orphans)', + dis, dis * 100.0 / n, n) + iflogger.info('Valid fibers: %i (%f%%)', n - dis, 100 - dis * 100.0 / n) numfib = nx.Graph() numfib.add_nodes_from(G) @@ -326,7 +332,7 @@ def cmat(track_file, roi_file, resolution_network_file, matrix_name, matrix_mat_ fibmedian.add_edge(u, v, weight=di['fiber_length_median']) fibdev.add_edge(u, v, weight=di['fiber_length_std']) - iflogger.info('Writing network as {ntwk}'.format(ntwk=matrix_name)) + iflogger.info('Writing network as %s', matrix_name) nx.write_gpickle(G, op.abspath(matrix_name)) numfib_mlab = nx.to_numpy_matrix(numfib, dtype=int) @@ -341,7 +347,7 @@ def cmat(track_file, roi_file, resolution_network_file, matrix_name, matrix_mat_ if intersections: path, name, ext = split_filename(matrix_name) intersection_matrix_name = op.abspath(name + '_intersections') + ext - iflogger.info('Writing intersection network as {ntwk}'.format(ntwk=intersection_matrix_name)) + iflogger.info('Writing intersection network as %s', intersection_matrix_name) nx.write_gpickle(I, intersection_matrix_name) path, name, ext = split_filename(matrix_mat_name) @@ -349,37 +355,41 @@ def cmat(track_file, roi_file, resolution_network_file, matrix_name, matrix_mat_ ext = '.mat' matrix_mat_name = matrix_mat_name + ext - iflogger.info('Writing matlab matrix as {mat}'.format(mat=matrix_mat_name)) + iflogger.info('Writing matlab matrix as %s', matrix_mat_name) sio.savemat(matrix_mat_name, numfib_dict) if intersections: intersect_dict = {'intersections': intersection_matrix} intersection_matrix_mat_name = op.abspath(name + '_intersections') + ext - iflogger.info('Writing intersection matrix as {mat}'.format(mat=intersection_matrix_mat_name)) + iflogger.info('Writing intersection matrix as %s', intersection_matrix_mat_name) sio.savemat(intersection_matrix_mat_name, intersect_dict) mean_fiber_length_matrix_name = op.abspath(name + '_mean_fiber_length') + ext - iflogger.info('Writing matlab mean fiber length matrix as {mat}'.format(mat=mean_fiber_length_matrix_name)) + iflogger.info('Writing matlab mean fiber length matrix as %s', + mean_fiber_length_matrix_name) sio.savemat(mean_fiber_length_matrix_name, fibmean_dict) median_fiber_length_matrix_name = op.abspath(name + '_median_fiber_length') + ext - iflogger.info('Writing matlab median fiber length matrix as {mat}'.format(mat=median_fiber_length_matrix_name)) + iflogger.info('Writing matlab median fiber length matrix as %s', + median_fiber_length_matrix_name) sio.savemat(median_fiber_length_matrix_name, fibmedian_dict) fiber_length_std_matrix_name = op.abspath(name + '_fiber_length_std') + ext - iflogger.info('Writing matlab fiber length deviation matrix as {mat}'.format(mat=fiber_length_std_matrix_name)) + iflogger.info('Writing matlab fiber length deviation matrix as %s', + fiber_length_std_matrix_name) sio.savemat(fiber_length_std_matrix_name, fibdev_dict) fiberlengths_fname = op.abspath(endpoint_name + '_final_fiberslength.npy') - iflogger.info("Storing final fiber length array as %s" % fiberlengths_fname) + iflogger.info('Storing final fiber length array as %s', fiberlengths_fname) np.save(fiberlengths_fname, final_fiberlength_array) fiberlabels_fname = op.abspath(endpoint_name + '_filtered_fiberslabel.npy') - iflogger.info("Storing all fiber labels (with orphans) as %s" % fiberlabels_fname) + iflogger.info('Storing all fiber labels (with orphans) as %s', fiberlabels_fname) np.save(fiberlabels_fname, np.array(fiberlabels, dtype=np.int32),) fiberlabels_noorphans_fname = op.abspath(endpoint_name + '_final_fiberslabels.npy') - iflogger.info("Storing final fiber labels (no orphans) as %s" % fiberlabels_noorphans_fname) + iflogger.info('Storing final fiber labels (no orphans) as %s', + fiberlabels_noorphans_fname) np.save(fiberlabels_noorphans_fname, final_fiberlabels_array) iflogger.info("Filtering tractography - keeping only no orphan fibers") @@ -389,7 +399,7 @@ def cmat(track_file, roi_file, resolution_network_file, matrix_name, matrix_mat_ stats['intersections_percent'] = float(stats['intersections_n_fib']) / float(stats['orig_n_fib']) * 100 out_stats_file = op.abspath(endpoint_name + '_statistics.mat') - iflogger.info("Saving matrix creation statistics as %s" % out_stats_file) + iflogger.info('Saving matrix creation statistics as %s', out_stats_file) sio.savemat(out_stats_file, stats) @@ -401,7 +411,7 @@ def save_fibers(oldhdr, oldfib, fname, indices): outstreams.append(oldfib[i]) n_fib_out = len(outstreams) hdrnew['n_count'] = n_fib_out - iflogger.info("Writing final non-orphan fibers as %s" % fname) + iflogger.info('Writing final non-orphan fibers as %s', fname) nb.trackvis.write(fname, outstreams, hdrnew) return n_fib_out @@ -620,22 +630,22 @@ class ROIGen(BaseInterface): def _run_interface(self, runtime): aparc_aseg_file = self.inputs.aparc_aseg_file aparcpath, aparcname, aparcext = split_filename(aparc_aseg_file) - iflogger.info('Using Aparc+Aseg file: {name}'.format(name=aparcname + aparcext)) + iflogger.info('Using Aparc+Aseg file: %s', aparcname + aparcext) niiAPARCimg = nb.load(aparc_aseg_file, mmap=NUMPY_MMAP) niiAPARCdata = niiAPARCimg.get_data() niiDataLabels = np.unique(niiAPARCdata) numDataLabels = np.size(niiDataLabels) - iflogger.info('Number of labels in image: {n}'.format(n=numDataLabels)) + iflogger.info('Number of labels in image: %s', numDataLabels) write_dict = True if self.inputs.use_freesurfer_LUT: self.LUT_file = self.inputs.freesurfer_dir + '/FreeSurferColorLUT.txt' - iflogger.info('Using Freesurfer LUT: {name}'.format(name=self.LUT_file)) + iflogger.info('Using Freesurfer LUT: %s', self.LUT_file) prefix = 'fsLUT' elif not self.inputs.use_freesurfer_LUT and isdefined(self.inputs.LUT_file): self.LUT_file = op.abspath(self.inputs.LUT_file) lutpath, lutname, lutext = split_filename(self.LUT_file) - iflogger.info('Using Custom LUT file: {name}'.format(name=lutname + lutext)) + iflogger.info('Using Custom LUT file: %s', lutname + lutext) prefix = lutname else: prefix = 'hardcoded' @@ -652,14 +662,14 @@ def _run_interface(self, runtime): dict_file = op.abspath(prefix + '_' + aparcname + '.pck') if write_dict: - iflogger.info('Lookup table: {name}'.format(name=op.abspath(self.LUT_file))) + iflogger.info('Lookup table: %s', op.abspath(self.LUT_file)) LUTlabelsRGBA = np.loadtxt(self.LUT_file, skiprows=4, usecols=[0, 1, 2, 3, 4, 5], comments='#', dtype={'names': ('index', 'label', 'R', 'G', 'B', 'A'), 'formats': ('int', '|S30', 'int', 'int', 'int', 'int')}) numLUTLabels = np.size(LUTlabelsRGBA) if numLUTLabels < numDataLabels: iflogger.error('LUT file provided does not contain all of the regions in the image') iflogger.error('Removing unmapped regions') - iflogger.info('Number of labels in LUT: {n}'.format(n=numLUTLabels)) + iflogger.info('Number of labels in LUT: %s', numLUTLabels) LUTlabelDict = {} """ Create dictionary for input LUT table""" @@ -687,7 +697,7 @@ def _run_interface(self, runtime): iflogger.info('Grey matter mask created') greyMaskLabels = np.unique(niiGM) numGMLabels = np.size(greyMaskLabels) - iflogger.info('Number of grey matter labels: {num}'.format(num=numGMLabels)) + iflogger.info('Number of grey matter labels: %s', numGMLabels) labelDict = {} GMlabelDict = {} @@ -697,7 +707,7 @@ def _run_interface(self, runtime): if write_dict: GMlabelDict['originalID'] = mapDict[label] except: - iflogger.info('Label {lbl} not in provided mapping'.format(lbl=label)) + iflogger.info('Label %s not in provided mapping', label) if write_dict: del GMlabelDict GMlabelDict = {} @@ -708,11 +718,11 @@ def _run_interface(self, runtime): roi_image = nb.Nifti1Image(niiGM, niiAPARCimg.affine, niiAPARCimg.header) - iflogger.info('Saving ROI File to {path}'.format(path=roi_file)) + iflogger.info('Saving ROI File to %s', roi_file) nb.save(roi_image, roi_file) if write_dict: - iflogger.info('Saving Dictionary File to {path} in Pickle format'.format(path=dict_file)) + iflogger.info('Saving Dictionary File to %s in Pickle format', dict_file) with open(dict_file, 'w') as f: pickle.dump(labelDict, f) return runtime @@ -785,7 +795,7 @@ class CreateNodes(BaseInterface): def _run_interface(self, runtime): iflogger.info('Creating nodes...') create_nodes(self.inputs.roi_file, self.inputs.resolution_network_file, self.inputs.out_filename) - iflogger.info('Saving node network to {path}'.format(path=op.abspath(self.inputs.out_filename))) + iflogger.info('Saving node network to %s', op.abspath(self.inputs.out_filename)) return runtime def _list_outputs(self): diff --git a/nipype/interfaces/cmtk/nbs.py b/nipype/interfaces/cmtk/nbs.py index fde691f3c5..7410227565 100644 --- a/nipype/interfaces/cmtk/nbs.py +++ b/nipype/interfaces/cmtk/nbs.py @@ -118,7 +118,8 @@ def _run_interface(self, runtime): node_ntwk_name = self.inputs.in_group1[0] node_network = nx.read_gpickle(node_ntwk_name) - iflogger.info('Populating node dictionaries with attributes from {node}'.format(node=node_ntwk_name)) + iflogger.info('Populating node dictionaries with attributes from %s', + node_ntwk_name) for nid, ndata in node_network.nodes(data=True): nbsgraph.nodes[nid] = ndata @@ -127,12 +128,12 @@ def _run_interface(self, runtime): path = op.abspath('NBS_Result_' + details) iflogger.info(path) nx.write_gpickle(nbsgraph, path) - iflogger.info('Saving output NBS edge network as {out}'.format(out=path)) + iflogger.info('Saving output NBS edge network as %s', path) pval_path = op.abspath('NBS_P_vals_' + details) iflogger.info(pval_path) nx.write_gpickle(nbs_pval_graph, pval_path) - iflogger.info('Saving output p-value network as {out}'.format(out=pval_path)) + iflogger.info('Saving output p-value network as %s', pval_path) return runtime def _list_outputs(self): diff --git a/nipype/interfaces/cmtk/nx.py b/nipype/interfaces/cmtk/nx.py index c2f6d7c361..ec3c01336c 100644 --- a/nipype/interfaces/cmtk/nx.py +++ b/nipype/interfaces/cmtk/nx.py @@ -38,7 +38,7 @@ def read_unknown_ntwk(ntwk): if not isinstance(ntwk, nx.classes.graph.Graph): - path, name, ext = split_filename(ntwk) + _, _, ext = split_filename(ntwk) if ext == '.pck': ntwk = nx.read_gpickle(ntwk) elif ext == '.graphml': @@ -104,27 +104,24 @@ def average_networks(in_files, ntwk_res_file, group_id): """ import networkx as nx import os.path as op - iflogger.info(("Creating average network for group: " - "{grp}").format(grp=group_id)) + iflogger.info('Creating average network for group: %s', group_id) matlab_network_list = [] if len(in_files) == 1: avg_ntwk = read_unknown_ntwk(in_files[0]) else: count_to_keep_edge = np.round(len(in_files) / 2.0) - iflogger.info(("Number of networks: {L}, an edge must occur in at " - "least {c} to remain in the " - "average network").format(L=len(in_files), - c=count_to_keep_edge)) + iflogger.info('Number of networks: %i, an edge must occur in at ' + 'least %i to remain in the average network', + len(in_files), count_to_keep_edge) ntwk_res_file = read_unknown_ntwk(ntwk_res_file) - iflogger.info(("{n} Nodes found in network resolution " - "file").format(n=ntwk_res_file.number_of_nodes())) + iflogger.info('%i nodes found in network resolution file', + ntwk_res_file.number_of_nodes()) ntwk = remove_all_edges(ntwk_res_file) counting_ntwk = ntwk.copy() # Sums all the relevant variables for index, subject in enumerate(in_files): tmp = nx.read_gpickle(subject) - iflogger.info(('File {s} has {n} ' - 'edges').format(s=subject, n=tmp.number_of_edges())) + iflogger.info('File %s has %i edges', subject, tmp.number_of_edges()) edges = list(tmp.edges()) for edge in edges: data = {} @@ -146,8 +143,7 @@ def average_networks(in_files, ntwk_res_file, group_id): # Divides each value by the number of files nodes = list(ntwk.nodes()) edges = list(ntwk.edges()) - iflogger.info(('Total network has {n} ' - 'edges').format(n=ntwk.number_of_edges())) + iflogger.info('Total network has %i edges', ntwk.number_of_edges()) avg_ntwk = nx.Graph() newdata = {} for node in nodes: @@ -171,7 +167,8 @@ def average_networks(in_files, ntwk_res_file, group_id): avg_ntwk.add_edge(edge[0], edge[1], **data) edge_dict['count'][edge[0] - 1][edge[1] - 1] = ntwk.edge[edge[0]][edge[1]]['count'] - iflogger.info('After thresholding, the average network has has {n} edges'.format(n=avg_ntwk.number_of_edges())) + iflogger.info('After thresholding, the average network has %i edges', + avg_ntwk.number_of_edges()) avg_edges = avg_ntwk.edges() for edge in avg_edges: @@ -187,16 +184,17 @@ def average_networks(in_files, ntwk_res_file, group_id): matlab_network_list.append(op.abspath(network_name)) tmp[key] = edge_dict[key] sio.savemat(op.abspath(network_name), tmp) - iflogger.info('Saving average network for key: {k} as {out}'.format(k=key, out=op.abspath(network_name))) + iflogger.info('Saving average network for key: %s as %s', key, + op.abspath(network_name)) # Writes the networks and returns the name network_name = group_id + '_average.pck' nx.write_gpickle(avg_ntwk, op.abspath(network_name)) - iflogger.info('Saving average network as {out}'.format(out=op.abspath(network_name))) + iflogger.info('Saving average network as %s', op.abspath(network_name)) avg_ntwk = fix_keys_for_gexf(avg_ntwk) network_name = group_id + '_average.gexf' nx.write_gexf(avg_ntwk, op.abspath(network_name)) - iflogger.info('Saving average network as {out}'.format(out=op.abspath(network_name))) + iflogger.info('Saving average network as %s', op.abspath(network_name)) return network_name, matlab_network_list @@ -453,12 +451,12 @@ def _run_interface(self, runtime): out_pickled_extra_measures = op.abspath(self._gen_outfilename(self.inputs.out_pickled_extra_measures, 'pck')) dict_measures = compute_dict_measures(ntwk) - iflogger.info('Saving extra measure file to {path} in Pickle format'.format(path=op.abspath(out_pickled_extra_measures))) - file = open(out_pickled_extra_measures, 'w') - pickle.dump(dict_measures, file) - file.close() + iflogger.info('Saving extra measure file to %s in Pickle format', + op.abspath(out_pickled_extra_measures)) + with open(out_pickled_extra_measures, 'w') as fo: + pickle.dump(dict_measures, fo) - iflogger.info('Saving MATLAB measures as {m}'.format(m=matlab)) + iflogger.info('Saving MATLAB measures as %s', matlab) # Loops through the measures which return a dictionary, # converts the keys and values to a Numpy array, diff --git a/nipype/interfaces/cmtk/parcellation.py b/nipype/interfaces/cmtk/parcellation.py index 7a2340cb4d..22214c0036 100644 --- a/nipype/interfaces/cmtk/parcellation.py +++ b/nipype/interfaces/cmtk/parcellation.py @@ -223,22 +223,22 @@ def create_roi(subject_id, subjects_dir, fs_dir, parcellation_name, dilation): hemi = 'rh' if brv['dn_region'] == 'subcortical': iflogger.info(brv) - iflogger.info("---------------------") - iflogger.info("Work on brain region: %s" % (brv['dn_region'])) - iflogger.info("Freesurfer Name: %s" % brv['dn_fsname']) - iflogger.info("Region %s of %s " % (count, pg.number_of_nodes())) - iflogger.info("---------------------") + iflogger.info('---------------------') + iflogger.info('Work on brain region: %s', brv['dn_region']) + iflogger.info('Freesurfer Name: %s', brv['dn_fsname']) + iflogger.info('Region %s of %s', count, pg.number_of_nodes()) + iflogger.info('---------------------') # if it is subcortical, retrieve roi from aseg idx = np.where(asegd == int(brv['dn_fs_aseg_val'])) rois[idx] = int(brv['dn_correspondence_id']) elif brv['dn_region'] == 'cortical': iflogger.info(brv) - iflogger.info("---------------------") - iflogger.info("Work on brain region: %s" % (brv['dn_region'])) - iflogger.info("Freesurfer Name: %s" % brv['dn_fsname']) - iflogger.info("Region %s of %s " % (count, pg.number_of_nodes())) - iflogger.info("---------------------") + iflogger.info('---------------------') + iflogger.info('Work on brain region: %s', brv['dn_region']) + iflogger.info('Freesurfer Name: %s', brv['dn_fsname']) + iflogger.info('Region %s of %s', count, pg.number_of_nodes()) + iflogger.info('---------------------') labelpath = op.join( output_dir, parval['fs_label_subdir_name'] % hemi) @@ -294,7 +294,7 @@ def create_roi(subject_id, subjects_dir, fs_dir, parcellation_name, dilation): # store volume eg in ROIv_scale33.nii.gz out_roi = op.abspath('ROIv_%s.nii.gz' % parcellation_name) - iflogger.info("Save output image to %s" % out_roi) + iflogger.info('Save output image to %s', out_roi) img = nb.Nifti1Image(rois, aseg.affine, hdr2) nb.save(img, out_roi) @@ -424,22 +424,23 @@ def create_wm_mask(subject_id, subjects_dir, fs_dir, parcellation_name): wmmask[idx] = 1 # check if we should subtract the cortical rois from this parcellation - iflogger.info("Loading %s to subtract cortical ROIs from white matter mask" % ('ROI_%s.nii.gz' % parcellation_name)) + iflogger.info('Loading ROI_%s.nii.gz to subtract cortical ROIs from white ' + 'matter mask', parcellation_name) roi = nb.load(op.join(op.curdir, 'ROI_%s.nii.gz' % parcellation_name)) roid = roi.get_data() assert roid.shape[0] == wmmask.shape[0] pg = nx.read_graphml(pgpath) for brk, brv in pg.nodes(data=True): if brv['dn_region'] == 'cortical': - iflogger.info("Subtracting region %s with intensity value %s" % - (brv['dn_region'], brv['dn_correspondence_id'])) + iflogger.info('Subtracting region %s with intensity value %s', + brv['dn_region'], brv['dn_correspondence_id']) idx = np.where(roid == int(brv['dn_correspondence_id'])) wmmask[idx] = 0 # output white matter mask. crop and move it afterwards wm_out = op.join(fs_dir, 'mri', 'fsmask_1mm.nii.gz') img = nb.Nifti1Image(wmmask, fsmask.affine, fsmask.header) - iflogger.info("Save white matter mask: %s" % wm_out) + iflogger.info('Save white matter mask: %s', wm_out) nb.save(img, wm_out) @@ -450,7 +451,7 @@ def crop_and_move_datasets(subject_id, subjects_dir, fs_dir, parcellation_name, log = cmp_config.get_logger() output_dir = op.abspath(op.curdir) - iflogger.info("Cropping and moving datasets to %s" % output_dir) + iflogger.info('Cropping and moving datasets to %s', output_dir) ds = [ (op.join(fs_dir, 'mri', 'aseg.nii.gz'), op.abspath('aseg.nii.gz')), @@ -469,7 +470,7 @@ def crop_and_move_datasets(subject_id, subjects_dir, fs_dir, parcellation_name, op.abspath('ROIv_HR_th.nii.gz'))) orig = op.join(fs_dir, 'mri', 'orig', '001.mgz') for d in ds: - iflogger.info("Processing %s:" % d[0]) + iflogger.info('Processing %s:', d[0]) if not op.exists(d[0]): raise Exception('File %s does not exist.' % d[0]) # reslice to original volume because the roi creation with freesurfer diff --git a/nipype/interfaces/elastix/base.py b/nipype/interfaces/elastix/base.py index afdb0a1ff4..746e571f3f 100644 --- a/nipype/interfaces/elastix/base.py +++ b/nipype/interfaces/elastix/base.py @@ -14,7 +14,7 @@ from ... import logging from ..base import CommandLineInputSpec, Directory, traits -logger = logging.getLogger('interface') +iflogger = logging.getLogger('interface') class ElastixBaseInputSpec(CommandLineInputSpec): diff --git a/nipype/interfaces/elastix/registration.py b/nipype/interfaces/elastix/registration.py index 77b868c76c..5038447465 100644 --- a/nipype/interfaces/elastix/registration.py +++ b/nipype/interfaces/elastix/registration.py @@ -18,7 +18,7 @@ from .base import ElastixBaseInputSpec from ..base import CommandLine, TraitedSpec, File, traits, InputMultiPath -logger = logging.getLogger('interface') +iflogger = logging.getLogger('interface') class RegistrationInputSpec(ElastixBaseInputSpec): diff --git a/nipype/interfaces/elastix/utils.py b/nipype/interfaces/elastix/utils.py index 42fab68377..718f5310fd 100644 --- a/nipype/interfaces/elastix/utils.py +++ b/nipype/interfaces/elastix/utils.py @@ -16,7 +16,7 @@ from ... import logging from ..base import (BaseInterface, BaseInterfaceInputSpec, isdefined, TraitedSpec, File, traits) -logger = logging.getLogger('interface') +iflogger = logging.getLogger('interface') class EditTransformInputSpec(BaseInterfaceInputSpec): diff --git a/nipype/interfaces/freesurfer/preprocess.py b/nipype/interfaces/freesurfer/preprocess.py index 6b408304d3..2f8b432bb3 100644 --- a/nipype/interfaces/freesurfer/preprocess.py +++ b/nipype/interfaces/freesurfer/preprocess.py @@ -1079,7 +1079,7 @@ def cmdline(self): return "echo recon-all: nothing to do" cmd += ' ' + ' '.join(flags) - iflogger.info('resume recon-all : %s' % cmd) + iflogger.info('resume recon-all : %s', cmd) return cmd def _prep_expert_file(self): diff --git a/nipype/interfaces/freesurfer/utils.py b/nipype/interfaces/freesurfer/utils.py index b5cd404b30..a5568ebbcb 100644 --- a/nipype/interfaces/freesurfer/utils.py +++ b/nipype/interfaces/freesurfer/utils.py @@ -233,8 +233,9 @@ def _format_arg(self, name, spec, value): "Cannot create {} file with extension " "{}".format(value, ext)) else: - logger.warn("Creating {} file with extension {}: " - "{}{}".format(value, ext, base, ext)) + logger.warn('Creating %s file with extension %s: %s%s', + value, ext, base, ext) + if value in implicit_filetypes: return "" return super(SampleToSurface, self)._format_arg(name, spec, value) @@ -423,8 +424,8 @@ def _format_arg(self, name, spec, value): "Cannot create {} file with extension " "{}".format(value, ext)) else: - logger.warn("Creating {} file with extension {}: " - "{}{}".format(value, ext, base, ext)) + logger.warn('Creating %s file with extension %s: %s%s', + value, ext, base, ext) if value in implicit_filetypes: return "" return super(SurfaceTransform, self)._format_arg(name, spec, value) diff --git a/nipype/interfaces/fsl/base.py b/nipype/interfaces/fsl/base.py index f5353f2b06..6d16817e09 100644 --- a/nipype/interfaces/fsl/base.py +++ b/nipype/interfaces/fsl/base.py @@ -36,7 +36,7 @@ from ..base import traits, isdefined, CommandLine, CommandLineInputSpec from ...external.due import BibTeX -LOGGER = logging.getLogger('interface') +IFLOGGER = logging.getLogger('interface') class Info(object): @@ -113,8 +113,8 @@ def output_type(cls): try: return os.environ['FSLOUTPUTTYPE'] except KeyError: - LOGGER.warn('FSLOUTPUTTYPE environment variable is not set. ' - 'Setting FSLOUTPUTTYPE=NIFTI') + IFLOGGER.warn('FSLOUTPUTTYPE environment variable is not set. ' + 'Setting FSLOUTPUTTYPE=NIFTI') return 'NIFTI' @staticmethod diff --git a/nipype/interfaces/io.py b/nipype/interfaces/io.py index f02f655cf1..0793b955bd 100644 --- a/nipype/interfaces/io.py +++ b/nipype/interfaces/io.py @@ -351,17 +351,17 @@ def _substitute(self, pathstr): oldpathstr = pathstr pathstr = pathstr.replace(key, val) if pathstr != oldpathstr: - iflogger.debug('sub.str: %s -> %s using %r -> %r' - % (oldpathstr, pathstr, key, val)) + iflogger.debug('sub.str: %s -> %s using %r -> %r', + oldpathstr, pathstr, key, val) if isdefined(self.inputs.regexp_substitutions): for key, val in self.inputs.regexp_substitutions: oldpathstr = pathstr pathstr, _ = re.subn(key, val, pathstr) if pathstr != oldpathstr: - iflogger.debug('sub.regexp: %s -> %s using %r -> %r' - % (oldpathstr, pathstr, key, val)) + iflogger.debug('sub.regexp: %s -> %s using %r -> %r', + oldpathstr, pathstr, key, val) if pathstr_ != pathstr: - iflogger.info('sub: %s -> %s' % (pathstr_, pathstr)) + iflogger.info('sub: %s -> %s', pathstr_, pathstr) return pathstr # Check for s3 in base directory @@ -514,8 +514,8 @@ def _fetch_bucket(self, bucket_name): # Try and get AWS credentials if a creds_path is specified if aws_access_key_id and aws_secret_access_key: # Init connection - iflogger.info('Connecting to S3 bucket: %s with credentials...'\ - % bucket_name) + iflogger.info('Connecting to S3 bucket: %s with credentials...', + bucket_name) # Use individual session for each instance of DataSink # Better when datasinks are being used in multi-threading, see: # http://boto3.readthedocs.org/en/latest/guide/resources.html#multithreading @@ -525,8 +525,7 @@ def _fetch_bucket(self, bucket_name): # Otherwise, connect anonymously else: - iflogger.info('Connecting to AWS: %s anonymously...'\ - % bucket_name) + iflogger.info('Connecting to AWS: %s anonymously...', bucket_name) session = boto3.session.Session() s3_resource = session.resource('s3', use_ssl=True) s3_resource.meta.client.meta.events.register('choose-signer.s3.*', @@ -611,7 +610,7 @@ def _upload_to_s3(self, bucket, src, dst): src_md5 = hashlib.md5(src_read).hexdigest() # Move to next loop iteration if dst_md5 == src_md5: - iflogger.info('File %s already exists on S3, skipping...' % dst_f) + iflogger.info('File %s already exists on S3, skipping...', dst_f) continue else: iflogger.info('Overwriting previous S3 file...') @@ -620,8 +619,8 @@ def _upload_to_s3(self, bucket, src, dst): iflogger.info('New file to S3') # Copy file up to S3 (either encrypted or not) - iflogger.info('Uploading %s to S3 bucket, %s, as %s...'\ - % (src_f, bucket.name, dst_f)) + iflogger.info('Uploading %s to S3 bucket, %s, as %s...', src_f, + bucket.name, dst_f) if self.inputs.encrypt_bucket_keys: extra_args = {'ServerSideEncryption' : 'AES256'} else: @@ -671,7 +670,7 @@ def _list_outputs(self): outdir = local_out_exception # Log local copying directory iflogger.info('Access to S3 failed! Storing outputs locally at: '\ - '%s\nError: %s' %(outdir, exc)) + '%s\nError: %s', outdir, exc) else: s3dir = '' @@ -697,7 +696,7 @@ def _list_outputs(self): for key, files in list(self.inputs._outputs.items()): if not isdefined(files): continue - iflogger.debug("key: %s files: %s" % (key, str(files))) + iflogger.debug("key: %s files: %s", key, str(files)) files = filename_to_list(files) tempoutdir = outdir if s3_flag: @@ -745,16 +744,16 @@ def _list_outputs(self): raise(inst) # If src is a file, copy it to dst if os.path.isfile(src): - iflogger.debug('copyfile: %s %s' % (src, dst)) + iflogger.debug('copyfile: %s %s', src, dst) copyfile(src, dst, copy=True, hashmethod='content', use_hardlink=use_hardlink) out_files.append(dst) # If src is a directory, copy entire contents to dst dir elif os.path.isdir(src): if os.path.exists(dst) and self.inputs.remove_dest_dir: - iflogger.debug('removing: %s' % dst) + iflogger.debug('removing: %s', dst) shutil.rmtree(dst) - iflogger.debug('copydir: %s %s' % (src, dst)) + iflogger.debug('copydir: %s %s', src, dst) copytree(src, dst) out_files.append(dst) @@ -2430,7 +2429,7 @@ def _list_outputs(self): try: sftp.get(os.path.join(filledtemplate_dir, f), f) except IOError: - iflogger.info('remote file %s not found' % f) + iflogger.info('remote file %s not found', f) if any([val is None for val in outputs[key]]): outputs[key] = [] if len(outputs[key]) == 0: diff --git a/nipype/interfaces/mrtrix/convert.py b/nipype/interfaces/mrtrix/convert.py index 00e87ec0dd..eb34de974e 100644 --- a/nipype/interfaces/mrtrix/convert.py +++ b/nipype/interfaces/mrtrix/convert.py @@ -68,7 +68,7 @@ def read_mrtrix_header(in_file): key = line.split(': ')[0] value = line.split(': ')[1] header[key] = value - iflogger.info('...adding "{v}" to header for key "{k}"'.format(v=value, k=key)) + iflogger.info('...adding "%s" to header for key "%s"', value, key) fileobj.close() header['count'] = int(header['count'].replace('\n', '')) header['offset'] = int(header['file'].replace('.', '')) @@ -118,8 +118,8 @@ def track_gen(track_points): raise HeaderError( 'Expecting %s points, found only %s' % ( stream_count, n_streams)) - iflogger.error('Expecting %s points, found only %s' % ( - stream_count, n_streams)) + iflogger.error('Expecting %s points, found only %s', + stream_count, n_streams) break pts = np.ndarray( shape=(n_pts, pt_cols), @@ -136,16 +136,15 @@ def track_gen(track_points): yield xyz n_streams += 1 if n_streams == stream_count: - iflogger.info('100% : {n} tracks read'.format(n=n_streams)) + iflogger.info('100%% : %i tracks read', n_streams) raise StopIteration try: if n_streams % int(stream_count / 100) == 0: percent = int(float(n_streams) / float(stream_count) * 100) - iflogger.info('{p}% : {n} tracks read'.format(p=percent, - n=n_streams)) + iflogger.info('%i%% : %i tracks read', percent, n_streams) except ZeroDivisionError: - iflogger.info('{} stream read out of {}'.format(n_streams, - stream_count)) + iflogger.info('%i stream read out of %i', n_streams, + stream_count) track_points, nonfinite_list = points_per_track(offset) fileobj.seek(offset) streamlines = track_gen(track_points) @@ -200,14 +199,16 @@ def _run_interface(self, runtime): trk_header['n_count'] = header['count'] if isdefined(self.inputs.matrix_file) and isdefined(self.inputs.registration_image_file): - iflogger.info('Applying transformation from matrix file {m}'.format(m=self.inputs.matrix_file)) + iflogger.info('Applying transformation from matrix file %s', + self.inputs.matrix_file) xfm = np.genfromtxt(self.inputs.matrix_file) iflogger.info(xfm) registration_image_file = nb.load(self.inputs.registration_image_file) reg_affine = registration_image_file.affine r_dx, r_dy, r_dz = get_data_dims(self.inputs.registration_image_file) r_vx, r_vy, r_vz = get_vox_dims(self.inputs.registration_image_file) - iflogger.info('Using affine from registration image file {r}'.format(r=self.inputs.registration_image_file)) + iflogger.info('Using affine from registration image file %s', + self.inputs.registration_image_file) iflogger.info(reg_affine) trk_header['vox_to_ras'] = reg_affine trk_header['dim'] = [r_dx, r_dy, r_dz] @@ -225,18 +226,19 @@ def _run_interface(self, runtime): final_streamlines = move_streamlines(transformed_streamlines, aff) trk_tracks = ((ii, None, None) for ii in final_streamlines) trk.write(out_filename, trk_tracks, trk_header) - iflogger.info('Saving transformed Trackvis file as {out}'.format(out=out_filename)) + iflogger.info('Saving transformed Trackvis file as %s', out_filename) iflogger.info('New TrackVis Header:') iflogger.info(trk_header) else: - iflogger.info('Applying transformation from scanner coordinates to {img}'.format(img=self.inputs.image_file)) + iflogger.info('Applying transformation from scanner coordinates to %s', + self.inputs.image_file) axcode = aff2axcodes(affine) trk_header['voxel_order'] = axcode[0] + axcode[1] + axcode[2] trk_header['vox_to_ras'] = affine transformed_streamlines = transform_to_affine(streamlines, trk_header, affine) trk_tracks = ((ii, None, None) for ii in transformed_streamlines) trk.write(out_filename, trk_tracks, trk_header) - iflogger.info('Saving Trackvis file as {out}'.format(out=out_filename)) + iflogger.info('Saving Trackvis file as %s', out_filename) iflogger.info('TrackVis Header:') iflogger.info(trk_header) return runtime diff --git a/nipype/interfaces/mrtrix3/base.py b/nipype/interfaces/mrtrix3/base.py index ab982b816a..a9890d9653 100644 --- a/nipype/interfaces/mrtrix3/base.py +++ b/nipype/interfaces/mrtrix3/base.py @@ -16,7 +16,7 @@ from ... import logging from ..traits_extension import isdefined from ..base import (CommandLineInputSpec, CommandLine, traits, File) -logger = logging.getLogger('interface') +iflogger = logging.getLogger('interface') class MRTrix3BaseInputSpec(CommandLineInputSpec): @@ -52,7 +52,7 @@ def _format_arg(self, name, trait_spec, value): from multiprocessing import cpu_count value = cpu_count() except: - logger.warn('Number of threads could not be computed') + iflogger.warn('Number of threads could not be computed') pass return trait_spec.argstr % value diff --git a/nipype/interfaces/nilearn.py b/nipype/interfaces/nilearn.py index e7984c654a..db47b57e8b 100644 --- a/nipype/interfaces/nilearn.py +++ b/nipype/interfaces/nilearn.py @@ -21,7 +21,7 @@ from .. import logging from ..interfaces.base import (traits, TraitedSpec, BaseInterface, BaseInterfaceInputSpec, File, InputMultiPath) -IFLOG = logging.getLogger('interface') +IFLOGGER = logging.getLogger('interface') class SignalExtractionInputSpec(BaseInterfaceInputSpec): in_file = File(exists=True, mandatory=True, desc='4-D fMRI nii file') diff --git a/nipype/interfaces/spm/model.py b/nipype/interfaces/spm/model.py index ddf35ef449..8ddc06a9b0 100644 --- a/nipype/interfaces/spm/model.py +++ b/nipype/interfaces/spm/model.py @@ -32,7 +32,7 @@ scans_for_fnames, ImageFileSPM) __docformat__ = 'restructuredtext' -logger = logging.getLogger('interface') +iflogger = logging.getLogger('interface') class Level1DesignInputSpec(SPMCommandInputSpec): From 01de3a406c255ad077cd9d7edfc9cf0f41a9e541 Mon Sep 17 00:00:00 2001 From: Taylor Salo Date: Sat, 11 Nov 2017 22:29:26 -0500 Subject: [PATCH 2/3] Fix logging in dipy interface. --- nipype/interfaces/dipy/anisotropic_power.py | 2 +- nipype/interfaces/dipy/preprocess.py | 6 +++--- nipype/interfaces/dipy/reconstruction.py | 22 ++++++++++----------- nipype/interfaces/dipy/simulate.py | 6 +++--- nipype/interfaces/dipy/tensors.py | 6 +++--- nipype/interfaces/dipy/tracks.py | 20 +++++++++---------- nipype/interfaces/utility/wrappers.py | 2 +- 7 files changed, 30 insertions(+), 34 deletions(-) diff --git a/nipype/interfaces/dipy/anisotropic_power.py b/nipype/interfaces/dipy/anisotropic_power.py index f1d41ab118..2a678dfd1f 100644 --- a/nipype/interfaces/dipy/anisotropic_power.py +++ b/nipype/interfaces/dipy/anisotropic_power.py @@ -67,7 +67,7 @@ def _run_interface(self, runtime): apm = shm.anisotropic_power(peaks.shm_coeff) out_file = self._gen_filename('apm') nb.Nifti1Image(apm.astype("float32"), affine).to_filename(out_file) - IFLOGGER.info('APM qball image saved as {i}'.format(i=out_file)) + IFLOGGER.info('APM qball image saved as %s', out_file) return runtime diff --git a/nipype/interfaces/dipy/preprocess.py b/nipype/interfaces/dipy/preprocess.py index 19b76b800b..bfe197cae2 100644 --- a/nipype/interfaces/dipy/preprocess.py +++ b/nipype/interfaces/dipy/preprocess.py @@ -68,7 +68,7 @@ def _run_interface(self, runtime): resample_proxy(self.inputs.in_file, order=order, new_zooms=vox_size, out_file=out_file) - IFLOGGER.info('Resliced image saved as {i}'.format(i=out_file)) + IFLOGGER.info('Resliced image saved as %s', out_file) return runtime def _list_outputs(self): @@ -159,8 +159,8 @@ def _run_interface(self, runtime): smask=signal_mask, nmask=noise_mask, out_file=out_file) - IFLOGGER.info(('Denoised image saved as {i}, estimated ' - 'SNR={s}').format(i=out_file, s=str(s))) + IFLOGGER.info('Denoised image saved as %s, estimated SNR=%s', + out_file, str(s)) return runtime def _list_outputs(self): diff --git a/nipype/interfaces/dipy/reconstruction.py b/nipype/interfaces/dipy/reconstruction.py index ee3fffce9a..d10e51dede 100644 --- a/nipype/interfaces/dipy/reconstruction.py +++ b/nipype/interfaces/dipy/reconstruction.py @@ -123,13 +123,12 @@ def _run_interface(self, runtime): sigma = mean_std * (1 + bias) if sigma == 0: - IFLOGGER.warn( - ('Noise std is 0.0, looks like data was masked and noise' - ' cannot be estimated correctly. Using default tensor ' - 'model instead of RESTORE.')) + IFLOGGER.warn('Noise std is 0.0, looks like data was masked and noise ' + 'cannot be estimated correctly. Using default tensor ' + 'model instead of RESTORE.') dti = TensorModel(gtab) else: - IFLOGGER.info(('Performing RESTORE with noise std=%.4f.') % sigma) + IFLOGGER.info('Performing RESTORE with noise std=%.4f.', sigma) dti = TensorModel(gtab, fit_method='RESTORE', sigma=sigma) try: @@ -252,14 +251,13 @@ def _run_interface(self, runtime): ratio = abs(response[1] / response[0]) if ratio > 0.25: - IFLOGGER.warn(('Estimated response is not prolate enough. ' - 'Ratio=%0.3f.') % ratio) + IFLOGGER.warn('Estimated response is not prolate enough. ' + 'Ratio=%0.3f.', ratio) elif ratio < 1.e-5 or np.any(np.isnan(response)): response = np.array([1.8e-3, 3.6e-4, 3.6e-4, S0]) - IFLOGGER.warn( - ('Estimated response is not valid, using a default one')) + IFLOGGER.warn('Estimated response is not valid, using a default one') else: - IFLOGGER.info(('Estimated response: %s') % str(response[:3])) + IFLOGGER.info('Estimated response: %s', str(response[:3])) np.savetxt(op.abspath(self.inputs.response), response) @@ -343,8 +341,8 @@ def _run_interface(self, runtime): ratio = response[0][1] / response[0][0] if abs(ratio - 0.2) > 0.1: - IFLOGGER.warn(('Estimated response is not prolate enough. ' - 'Ratio=%0.3f.') % ratio) + IFLOGGER.warn('Estimated response is not prolate enough. ' + 'Ratio=%0.3f.', ratio) csd_model = ConstrainedSphericalDeconvModel( gtab, response, sh_order=self.inputs.sh_order) diff --git a/nipype/interfaces/dipy/simulate.py b/nipype/interfaces/dipy/simulate.py index 0331171811..f008948c97 100644 --- a/nipype/interfaces/dipy/simulate.py +++ b/nipype/interfaces/dipy/simulate.py @@ -10,6 +10,7 @@ import os.path as op from builtins import range +import numpy as np import nibabel as nb from ... import logging @@ -227,8 +228,8 @@ def _run_interface(self, runtime): pool = Pool(processes=n_proc) # Simulate sticks using dipy - IFLOGGER.info(('Starting simulation of %d voxels, %d diffusion' - ' directions.') % (len(args), ndirs)) + IFLOGGER.info('Starting simulation of %d voxels, %d diffusion directions.', + len(args), ndirs) result = np.array(pool.map(_compute_voxel, args)) if np.shape(result)[1] != ndirs: raise RuntimeError(('Computed directions do not match number' @@ -288,7 +289,6 @@ def _compute_voxel(args): angles=args['sticks'], fractions=ffs, snr=snr) except Exception as e: pass - # IFLOGGER.warn('Exception simulating dwi signal: %s' % e) return signal.tolist() diff --git a/nipype/interfaces/dipy/tensors.py b/nipype/interfaces/dipy/tensors.py index 7d9ab3867f..e5518f4ea0 100644 --- a/nipype/interfaces/dipy/tensors.py +++ b/nipype/interfaces/dipy/tensors.py @@ -65,14 +65,14 @@ def _run_interface(self, runtime): img = nifti1_symmat(lower_triangular, affine) out_file = self._gen_filename('dti') nb.save(img, out_file) - IFLOGGER.info('DTI parameters image saved as {i}'.format(i=out_file)) + IFLOGGER.info('DTI parameters image saved as %s', out_file) #FA MD RD and AD for metric in ["fa", "md", "rd", "ad"]: data = getattr(ten_fit,metric).astype("float32") out_name = self._gen_filename(metric) nb.Nifti1Image(data, affine).to_filename(out_name) - IFLOGGER.info('DTI {metric} image saved as {i}'.format(i=out_name, metric=metric)) + IFLOGGER.info('DTI %s image saved as %s', metric, out_name) return runtime @@ -147,7 +147,7 @@ def _run_interface(self, runtime): img = nb.Nifti1Image(mode_data, affine) out_file = self._gen_filename('mode') nb.save(img, out_file) - IFLOGGER.info('Tensor mode image saved as {i}'.format(i=out_file)) + IFLOGGER.info('Tensor mode image saved as %s', out_file) return runtime def _list_outputs(self): diff --git a/nipype/interfaces/dipy/tracks.py b/nipype/interfaces/dipy/tracks.py index 4a74b36b53..bd52fe937e 100644 --- a/nipype/interfaces/dipy/tracks.py +++ b/nipype/interfaces/dipy/tracks.py @@ -71,9 +71,8 @@ def _run_interface(self, runtime): data_dims = refnii.shape[:3] kwargs = dict(affine=affine) else: - IFLOGGER.warn( - 'voxel_dims and data_dims are deprecated as of dipy 0.7.1. Please use reference ' - 'input instead') + IFLOGGER.warn('voxel_dims and data_dims are deprecated as of dipy ' + '0.7.1. Please use reference input instead') if not isdefined(self.inputs.data_dims): data_dims = header['dim'] @@ -93,9 +92,8 @@ def _run_interface(self, runtime): out_file = op.abspath(self.inputs.out_filename) nb.save(img, out_file) - IFLOGGER.info( - 'Track density map saved as %s, size=%s, dimensions=%s', - out_file, img.shape, img.header.get_zooms()) + IFLOGGER.info('Track density map saved as %s, size=%s, dimensions=%s', + out_file, img.shape, img.header.get_zooms()) return runtime @@ -238,12 +236,12 @@ def _run_interface(self, runtime): seedps = np.array(np.where(seedmsk == 1), dtype=np.float32).T vseeds = seedps.shape[0] nsperv = (seeds // vseeds) + 1 - IFLOGGER.info(('Seed mask is provided (%d voxels inside ' - 'mask), computing seeds (%d seeds/voxel).') % - (vseeds, nsperv)) + IFLOGGER.info('Seed mask is provided (%d voxels inside ' + 'mask), computing seeds (%d seeds/voxel).', + vseeds, nsperv) if nsperv > 1: - IFLOGGER.info(('Needed %d seeds per selected voxel ' - '(total %d).') % (nsperv, vseeds)) + IFLOGGER.info('Needed %d seeds per selected voxel (total %d).', + nsperv, vseeds) seedps = np.vstack(np.array([seedps] * nsperv)) voxcoord = seedps + np.random.uniform(-1, 1, size=seedps.shape) nseeds = voxcoord.shape[0] diff --git a/nipype/interfaces/utility/wrappers.py b/nipype/interfaces/utility/wrappers.py index 4684acba42..9999c4af6a 100644 --- a/nipype/interfaces/utility/wrappers.py +++ b/nipype/interfaces/utility/wrappers.py @@ -21,7 +21,7 @@ from ...utils.filemanip import filename_to_list from ...utils.functions import getsource, create_function_from_source -logger = logging.getLogger('interface') +iflogger = logging.getLogger('interface') class FunctionInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): function_str = traits.Str(mandatory=True, desc='code for function') From 05bf324c5ffc84f817c9ff0260287b9943598c3f Mon Sep 17 00:00:00 2001 From: Taylor Salo Date: Sat, 11 Nov 2017 22:44:27 -0500 Subject: [PATCH 3/3] Fix logging in utils and algorithms. --- nipype/algorithms/confounds.py | 13 ++++++----- nipype/algorithms/misc.py | 42 +++++++++++++++------------------- nipype/algorithms/modelgen.py | 14 ++++++------ nipype/utils/filemanip.py | 14 ++++++------ 4 files changed, 39 insertions(+), 44 deletions(-) diff --git a/nipype/algorithms/confounds.py b/nipype/algorithms/confounds.py index 1d31f2ab6c..39cafebe8c 100644 --- a/nipype/algorithms/confounds.py +++ b/nipype/algorithms/confounds.py @@ -30,7 +30,7 @@ from ..utils import NUMPY_MMAP from ..utils.misc import normalize_mc_params -IFLOG = logging.getLogger('interface') +IFLOGGER = logging.getLogger('interface') class ComputeDVARSInputSpec(BaseInterfaceInputSpec): @@ -286,7 +286,7 @@ def _run_interface(self, runtime): tr = self.inputs.series_tr if self.inputs.normalize and tr is None: - IFLOG.warn('FD plot cannot be normalized if TR is not set') + IFLOGGER.warn('FD plot cannot be normalized if TR is not set') self._results['out_figure'] = op.abspath(self.inputs.out_figure) fig = plot_confound(fd_res, self.inputs.figsize, 'FD', units='mm', @@ -601,8 +601,8 @@ def _process_masks(self, mask_images, timeseries=None): # save mask mask_file = os.path.abspath('mask_{:03d}.nii.gz'.format(i)) out_image.to_filename(mask_file) - IFLOG.debug('tCompcor computed and saved mask of shape {} to ' - 'mask_file {}'.format(mask.shape, mask_file)) + IFLOGGER.debug('tCompcor computed and saved mask of shape %s to ' + 'mask_file %s', str(mask.shape), mask_file) self._mask_files.append(mask_file) out_images.append(out_image) return out_images @@ -919,7 +919,8 @@ def regress_poly(degree, data, remove_mean=True, axis=-1): :param int axis: numpy array axes along which regression is performed """ - IFLOG.debug('Performing polynomial regression on data of shape ' + str(data.shape)) + IFLOGGER.debug('Performing polynomial regression on data of shape %s', + str(data.shape)) datashape = data.shape timepoints = datashape[axis] @@ -1147,7 +1148,7 @@ def _full_rank(X, cmax=1e15): c = smax / smin if c < cmax: return X, c - IFLOG.warn('Matrix is singular at working precision, regularizing...') + IFLOGGER.warn('Matrix is singular at working precision, regularizing...') lda = (smax - cmax * smin) / (cmax - 1) s = s + lda X = np.dot(U, np.dot(np.diag(s), V)) diff --git a/nipype/algorithms/misc.py b/nipype/algorithms/misc.py index f1cd8179fa..a16507bf36 100644 --- a/nipype/algorithms/misc.py +++ b/nipype/algorithms/misc.py @@ -362,26 +362,23 @@ def _run_interface(self, runtime): if isinstance(in_dict[key][0], np.ndarray): saved_variables.append(key) else: - iflogger.info('One of the keys in the input file, {k}, is not a Numpy array'.format(k=key)) + iflogger.info('One of the keys in the input file, %s, is ' + 'not a Numpy array', key) if len(saved_variables) > 1: - iflogger.info( - '{N} variables found:'.format(N=len(saved_variables))) + iflogger.info('%i variables found:', len(saved_variables)) iflogger.info(saved_variables) for variable in saved_variables: - iflogger.info( - '...Converting {var} - type {ty} - to\ - CSV'.format(var=variable, ty=type(in_dict[variable])) - ) - matlab2csv( - in_dict[variable], variable, self.inputs.reshape_matrix) + iflogger.info('...Converting %s - type %s - to CSV', + variable, type(in_dict[variable])) + matlab2csv(in_dict[variable], variable, self.inputs.reshape_matrix) elif len(saved_variables) == 1: _, name, _ = split_filename(self.inputs.in_file) variable = saved_variables[0] - iflogger.info('Single variable found {var}, type {ty}:'.format( - var=variable, ty=type(in_dict[variable]))) - iflogger.info('...Converting {var} to CSV from {f}'.format( - var=variable, f=self.inputs.in_file)) + iflogger.info('Single variable found %s, type %s:', variable, + type(in_dict[variable])) + iflogger.info('...Converting %s to CSV from %s', variable, + self.inputs.in_file) matlab2csv(in_dict[variable], name, self.inputs.reshape_matrix) else: iflogger.error('No values in the MATLAB file?!') @@ -396,8 +393,8 @@ def _list_outputs(self): if isinstance(in_dict[key][0], np.ndarray): saved_variables.append(key) else: - iflogger.error('One of the keys in the input file, {k}, is\ - not a Numpy array'.format(k=key)) + iflogger.error('One of the keys in the input file, %s, is ' + 'not a Numpy array', key) if len(saved_variables) > 1: outputs['csv_files'] = replaceext(saved_variables, '.csv') @@ -555,19 +552,16 @@ def _run_interface(self, runtime): iflogger.info('Column headings have been provided:') headings = self.inputs.column_headings else: - iflogger.info( - 'Column headings not provided! Pulled from input filenames:') + iflogger.info('Column headings not provided! Pulled from input filenames:') headings = remove_identical_paths(self.inputs.in_files) if isdefined(self.inputs.extra_field): if isdefined(self.inputs.extra_column_heading): extraheading = self.inputs.extra_column_heading - iflogger.info('Extra column heading provided: {col}'.format( - col=extraheading)) + iflogger.info('Extra column heading provided: %s', extraheading) else: extraheading = 'type' - iflogger.info( - 'Extra column heading was not defined. Using "type"') + iflogger.info('Extra column heading was not defined. Using "type"') headings.append(extraheading) extraheadingBool = True @@ -575,8 +569,8 @@ def _run_interface(self, runtime): iflogger.warn('Only one file input!') if isdefined(self.inputs.row_headings): - iflogger.info('Row headings have been provided. Adding "labels"\ - column header.') + iflogger.info('Row headings have been provided. Adding "labels"' + 'column header.') prefix = '"{p}","'.format(p=self.inputs.row_heading_title) csv_headings = prefix + '","'.join(itertools.chain( headings)) + '"\n' @@ -1310,7 +1304,7 @@ def merge_rois(in_files, in_idxs, in_ref, # to avoid memory errors if op.splitext(in_ref)[1] == '.gz': try: - iflogger.info('uncompress %i' % in_ref) + iflogger.info('uncompress %i', in_ref) sp.check_call(['gunzip', in_ref], stdout=sp.PIPE, shell=True) in_ref = op.splitext(in_ref)[0] except: diff --git a/nipype/algorithms/modelgen.py b/nipype/algorithms/modelgen.py index 87367f7955..2c994bf20d 100644 --- a/nipype/algorithms/modelgen.py +++ b/nipype/algorithms/modelgen.py @@ -374,9 +374,9 @@ def _generate_standard_design(self, infolist, functional_runs=None, for f in filename_to_list(sessinfo[i]['scans']): shape = load(f, mmap=NUMPY_MMAP).shape if len(shape) == 3 or shape[3] == 1: - iflogger.warning(('You are using 3D instead of 4D ' - 'files. Are you sure this was ' - 'intended?')) + iflogger.warning('You are using 3D instead of 4D ' + 'files. Are you sure this was ' + 'intended?') numscans += 1 else: numscans += shape[3] @@ -686,7 +686,7 @@ def _gen_regress(self, i_onsets, i_durations, i_amplitudes, nscans): if dt < 1: raise Exception('Time multiple less than 1 ms') - iflogger.info('Setting dt = %d ms\n' % dt) + iflogger.info('Setting dt = %d ms\n', dt) npts = int(np.ceil(total_time / dt)) times = np.arange(0, total_time, dt) * 1e-3 timeline = np.zeros((npts)) @@ -705,9 +705,9 @@ def _gen_regress(self, i_onsets, i_durations, i_amplitudes, nscans): if isdefined(self.inputs.model_hrf) and self.inputs.model_hrf: response = np.convolve(boxcar, hrf) reg_scale = 1.0 / response.max() - iflogger.info('response sum: %.4f max: %.4f' % (response.sum(), - response.max())) - iflogger.info('reg_scale: %.4f' % reg_scale) + iflogger.info('response sum: %.4f max: %.4f', response.sum(), + response.max()) + iflogger.info('reg_scale: %.4f', reg_scale) for i, t in enumerate(onsets): idx = int(np.round(t / dt)) diff --git a/nipype/utils/filemanip.py b/nipype/utils/filemanip.py index be71424a5a..16eabbb69c 100644 --- a/nipype/utils/filemanip.py +++ b/nipype/utils/filemanip.py @@ -368,13 +368,13 @@ def copyfile(originalfile, newfile, copy=False, create_new=False, elif hashmethod == 'content': hashfn = hash_infile newhash = hashfn(newfile) - fmlogger.debug("File: %s already exists,%s, copy:%d" % - (newfile, newhash, copy)) + fmlogger.debug('File: %s already exists,%s, copy:%d', newfile, + newhash, copy) orighash = hashfn(originalfile) keep = newhash == orighash if keep: - fmlogger.debug("File: %s already exists, not overwriting, copy:%d" - % (newfile, copy)) + fmlogger.debug('File: %s already exists, not overwriting, copy:%d', + newfile, copy) else: os.unlink(newfile) @@ -385,7 +385,7 @@ def copyfile(originalfile, newfile, copy=False, create_new=False, # ~hardlink & ~symlink => copy if not keep and use_hardlink: try: - fmlogger.debug("Linking File: %s->%s" % (newfile, originalfile)) + fmlogger.debug('Linking File: %s->%s', newfile, originalfile) # Use realpath to avoid hardlinking symlinks os.link(os.path.realpath(originalfile), newfile) except OSError: @@ -395,7 +395,7 @@ def copyfile(originalfile, newfile, copy=False, create_new=False, if not keep and not copy and os.name == 'posix': try: - fmlogger.debug("Symlinking File: %s->%s" % (newfile, originalfile)) + fmlogger.debug('Symlinking File: %s->%s', newfile, originalfile) os.symlink(originalfile, newfile) except OSError: copy = True # Disable symlink for associated files @@ -404,7 +404,7 @@ def copyfile(originalfile, newfile, copy=False, create_new=False, if not keep: try: - fmlogger.debug("Copying File: %s->%s" % (newfile, originalfile)) + fmlogger.debug('Copying File: %s->%s', newfile, originalfile) shutil.copyfile(originalfile, newfile) except shutil.Error as e: fmlogger.warn(e.message)