Skip to content

WIP/REF/FIX: Don't throw away exception stack traces #1490

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 10 commits into from
Jun 8, 2016
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 4 additions & 3 deletions nipype/algorithms/misc.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
from __future__ import division
from builtins import zip
from builtins import range
from future.utils import raise_from

import os
import os.path as op
Expand Down Expand Up @@ -857,9 +858,9 @@ def __init__(self, infields=None, force_run=True, **kwargs):
def _run_interface(self, runtime):
try:
import pandas as pd
except ImportError:
raise ImportError(('This interface requires pandas '
'(http://pandas.pydata.org/) to run.'))
except ImportError as e:
raise_from(ImportError('This interface requires pandas '
'(http://pandas.pydata.org/) to run.'), e)

try:
import lockfile as pl
Expand Down
5 changes: 3 additions & 2 deletions nipype/external/six.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
# SOFTWARE.

from __future__ import absolute_import
from future.utils import raise_from

import functools
import itertools
Expand Down Expand Up @@ -186,8 +187,8 @@ def find_module(self, fullname, path=None):
def __get_module(self, fullname):
try:
return self.known_modules[fullname]
except KeyError:
raise ImportError("This loader does not know module " + fullname)
except KeyError as e:
raise_from(ImportError("This loader does not know module " + fullname), e)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

since this is an external package, we shouldn't make this change. it may also be time to update the six.py included in nipype.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I see. Are all the files in external this way? If possible, we should avoid this

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

we should update external files before a release, but any changes should really be upstream. six and future are competing packages and at present we are using both. it will require a focused sprint to use one or the other.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

got it, that makes sense


def load_module(self, fullname):
try:
Expand Down
5 changes: 3 additions & 2 deletions nipype/interfaces/afni/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import os
from sys import platform
from builtins import object
from future.utils import raise_from

from ... import logging
from ...utils.filemanip import split_filename
Expand Down Expand Up @@ -82,9 +83,9 @@ def outputtype_to_ext(cls, outputtype):

try:
return cls.ftypes[outputtype]
except KeyError:
except KeyError as e:
msg = 'Invalid AFNIOUTPUTTYPE: ', outputtype
raise KeyError(msg)
raise_from(KeyError(msg), e)

@classmethod
def outputtype(cls):
Expand Down
16 changes: 9 additions & 7 deletions nipype/pipeline/plugins/ipython.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@

from future import standard_library
standard_library.install_aliases()
from future.utils import raise_from

from pickle import dumps

Expand Down Expand Up @@ -63,19 +64,20 @@ def run(self, graph, config, updatehash=False):
name = 'ipyparallel'
__import__(name)
self.iparallel = sys.modules[name]
except ImportError:
raise ImportError("Ipython kernel not found. Parallel execution "
"will be unavailable")
except ImportError as e:
raise_from(ImportError("Ipython kernel not found. Parallel execution "
"will be unavailable"), e)
try:
self.taskclient = self.iparallel.Client()
except Exception as e:
if isinstance(e, TimeoutError):
raise Exception("No IPython clients found.")
raise_from(Exception("No IPython clients found."), e)
if isinstance(e, IOError):
raise Exception("ipcluster/ipcontroller has not been started")
raise_from(Exception("ipcluster/ipcontroller has not been started"), e)
if isinstance(e, ValueError):
raise Exception("Ipython kernel not installed")
raise e
raise_from(Exception("Ipython kernel not installed"), e)
else:
raise e
return super(IPythonPlugin, self).run(graph, config, updatehash=updatehash)

def _get_result(self, taskid):
Expand Down
11 changes: 6 additions & 5 deletions nipype/pipeline/plugins/ipythonx.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
"""

import sys
from future.utils import raise_from

IPython_not_loaded = False
try:
Expand Down Expand Up @@ -36,16 +37,16 @@ def run(self, graph, config, updatehash=False):
name = 'IPython.kernel.client'
__import__(name)
self.ipyclient = sys.modules[name]
except ImportError:
raise ImportError("Ipython kernel not found. Parallel execution "
"will be unavailable")
except ImportError as e:
raise_from(ImportError("Ipython kernel not found. Parallel execution "
"will be unavailable"), e)
try:
self.taskclient = self.ipyclient.TaskClient()
except Exception as e:
if isinstance(e, ConnectionRefusedError):
raise Exception("No IPython clients found.")
raise_from(Exception("No IPython clients found."), e)
if isinstance(e, ValueError):
raise Exception("Ipython kernel not installed")
raise_from(Exception("Ipython kernel not installed"), e)
return super(IPythonXPlugin, self).run(graph, config, updatehash=updatehash)

def _get_result(self, taskid):
Expand Down
10 changes: 5 additions & 5 deletions nipype/testing/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
from tempfile import mkdtemp
from ..utils.misc import package_check
from nose import SkipTest

from future.utils import raise_from

def skip_if_no_package(*args, **kwargs):
"""Raise SkipTest if package_check fails
Expand Down Expand Up @@ -62,15 +62,15 @@ def __init__(self, size_in_mbytes=8, delay=0.5):
try:
subprocess.check_call(args=mkfs_args, stdout=self.dev_null,
stderr=self.dev_null)
except subprocess.CalledProcessError:
raise IOError("mkfs.vfat failed")
except subprocess.CalledProcessError as e:
raise_from(IOError("mkfs.vfat failed"), e)

try:
self.fusefat = subprocess.Popen(args=mount_args,
stdout=self.dev_null,
stderr=self.dev_null)
except OSError:
raise IOError("fusefat is not installed")
except OSError as e:
raise_from(IOError("fusefat is not installed"), e)

time.sleep(self.delay)

Expand Down
15 changes: 8 additions & 7 deletions nipype/utils/misc.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@

from future import standard_library
standard_library.install_aliases()
from future.utils import raise_from
from builtins import next
from pickle import dumps, loads
import inspect
Expand Down Expand Up @@ -90,14 +91,14 @@ def create_function_from_source(function_source, imports=None):
import_keys = list(ns.keys())
exec(function_source, ns)

except Exception as msg:
msg = str(msg) + '\nError executing function:\n %s\n' % function_source
except Exception as e:
msg = '\nError executing function:\n %s\n' % function_source
msg += '\n'.join(["Functions in connection strings have to be standalone.",
"They cannot be declared either interactively or inside",
"another function or inline in the connect string. Any",
"imports should be done inside the function"
])
raise RuntimeError(msg)
raise_from(RuntimeError(msg), e)
ns_funcs = list(set(ns) - set(import_keys + ['__builtins__']))
assert len(ns_funcs) == 1, "Function or inputs are ill-defined"
funcname = ns_funcs[0]
Expand Down Expand Up @@ -199,14 +200,14 @@ def package_check(pkg_name, version=None, app=None, checker=LooseVersion,
msg += ' with version >= %s' % (version,)
try:
mod = __import__(pkg_name)
except ImportError:
raise exc_failed_import(msg)
except ImportError as e:
raise_from(exc_failed_import(msg), e)
if not version:
return
try:
have_version = mod.__version__
except AttributeError:
raise exc_failed_check('Cannot find version for %s' % pkg_name)
except AttributeError as e:
raise_from(exc_failed_check('Cannot find version for %s' % pkg_name), e)
if checker(have_version) < checker(version):
raise exc_failed_check(msg)

Expand Down
5 changes: 3 additions & 2 deletions nipype/utils/spm_docs.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

import os

from future.utils import raise_from
from nipype.interfaces import matlab


Expand Down Expand Up @@ -55,5 +56,5 @@ def _strip_header(doc):
except ValueError:
index = len(doc)
return doc[:index]
except KeyError:
raise IOError('This docstring was not generated by Nipype!\n')
except KeyError as e:
raise_from(IOError('This docstring was not generated by Nipype!\n'), e)
6 changes: 3 additions & 3 deletions nipype/workflows/dmri/connectivity/group_connectivity.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from __future__ import print_function
from future.utils import raise_from

import os.path as op

Expand Down Expand Up @@ -459,9 +460,8 @@ def create_average_networks_by_group_workflow(group_list, data_dir, subjects_dir
try:
l4infosource.inputs.group_id1 = list(group_list.keys())[0]
l4infosource.inputs.group_id2 = list(group_list.keys())[1]
except IndexError:
print('The create_average_networks_by_group_workflow requires 2 groups')
raise Exception
except IndexError as e:
raise_from(Exception('The create_average_networks_by_group_workflow requires 2 groups'), e)

l4info = dict(networks=[['group_id', '']], CMatrices=[['group_id', '']], fibmean=[['group_id', 'mean_fiber_length']],
fibdev=[['group_id', 'fiber_length_std']])
Expand Down