Skip to content

CLN: Replace bare exceptions with more descriptive ones #3924

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 15 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions pandas/core/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -580,7 +580,7 @@ def take_2d_multi(arr, indexer, out=None, fill_value=np.nan,
mask_info = (row_mask, col_mask), (row_needs, col_needs)
if row_needs or col_needs:
if out is not None and out.dtype != dtype:
raise Exception('Incompatible type for fill_value')
raise TypeError('Incompatible type for fill_value')
else:
# if not, then depromote, set fill_value to dummy
# (it won't be used but we don't want the cython code
Expand Down Expand Up @@ -814,7 +814,7 @@ def changeit():
# if we are trying to do something unsafe
# like put a bigger dtype in a smaller one, use the smaller one
if change.dtype.itemsize < r.dtype.itemsize:
raise Exception("cannot change dtype of input to smaller size")
raise TypeError("cannot change dtype of input to smaller size")
change.dtype = r.dtype
change[:] = r

Expand Down Expand Up @@ -1259,7 +1259,7 @@ def ensure_float(arr):

def _mut_exclusive(arg1, arg2):
if arg1 is not None and arg2 is not None:
raise Exception('mutually exclusive arguments')
raise TypeError('mutually exclusive arguments')
elif arg1 is not None:
return arg1
else:
Expand Down
2 changes: 1 addition & 1 deletion pandas/core/format.py
Original file line number Diff line number Diff line change
Expand Up @@ -825,7 +825,7 @@ def __init__(self, obj, path_or_buf, sep=",", na_rep='', float_format=None,
# validate mi options
if self.has_mi_columns:
if cols is not None:
raise Exception("cannot specify cols with a multi_index on the columns")
raise TypeError("cannot specify cols with a multi_index on the columns")

if cols is not None:
if isinstance(cols,Index):
Expand Down
10 changes: 5 additions & 5 deletions pandas/core/frame.py
Original file line number Diff line number Diff line change
Expand Up @@ -933,7 +933,7 @@ def dot(self, other):
lvals = self.values
rvals = np.asarray(other)
if lvals.shape[1] != rvals.shape[0]:
raise Exception('Dot product shape mismatch, %s vs %s' %
raise ValueError('Dot product shape mismatch, %s vs %s' %
(lvals.shape, rvals.shape))

if isinstance(other, DataFrame):
Expand Down Expand Up @@ -2844,7 +2844,7 @@ def set_index(self, keys, drop=True, append=False, inplace=False,

if verify_integrity and not index.is_unique:
duplicates = index.get_duplicates()
raise Exception('Index has duplicate keys: %s' % duplicates)
raise ValueError('Index has duplicate keys: %s' % duplicates)

for c in to_remove:
del frame[c]
Expand Down Expand Up @@ -3315,7 +3315,7 @@ def sortlevel(self, level=0, axis=0, ascending=True, inplace=False):
axis = self._get_axis_number(axis)
the_axis = self._get_axis(axis)
if not isinstance(the_axis, MultiIndex):
raise Exception('can only sort by level with a hierarchical index')
raise TypeError('can only sort by level with a hierarchical index')

new_axis, indexer = the_axis.sortlevel(level, ascending=ascending)

Expand Down Expand Up @@ -3377,7 +3377,7 @@ def reorder_levels(self, order, axis=0):
axis = self._get_axis_number(axis)
if not isinstance(self._get_axis(axis),
MultiIndex): # pragma: no cover
raise Exception('Can only reorder levels on a hierarchical axis.')
raise TypeError('Can only reorder levels on a hierarchical axis.')

result = self.copy()

Expand Down Expand Up @@ -3789,7 +3789,7 @@ def rename(self, index=None, columns=None, copy=True, inplace=False):
from pandas.core.series import _get_rename_function

if index is None and columns is None:
raise Exception('must pass either index or columns')
raise TypeError('must pass either index or columns')

index_f = _get_rename_function(index)
columns_f = _get_rename_function(columns)
Expand Down
16 changes: 8 additions & 8 deletions pandas/core/index.py
Original file line number Diff line number Diff line change
Expand Up @@ -361,7 +361,7 @@ def __hash__(self):
return hash(self.view(np.ndarray))

def __setitem__(self, key, value):
raise Exception(str(self.__class__) + ' object is immutable')
raise TypeError(str(self.__class__) + ' does not support item assignment')

def __getitem__(self, key):
"""Override numpy.ndarray's __getitem__ method to work as desired"""
Expand Down Expand Up @@ -547,7 +547,7 @@ def order(self, return_indexer=False, ascending=True):
return sorted_index

def sort(self, *args, **kwargs):
raise Exception('Cannot sort an Index object')
raise TypeError('Cannot sort an %r object' % self.__class__.__name__)

def shift(self, periods=1, freq=None):
"""
Expand Down Expand Up @@ -606,7 +606,7 @@ def union(self, other):
union : Index
"""
if not hasattr(other, '__iter__'):
raise Exception('Input must be iterable!')
raise TypeError('Input must be iterable!')

if len(other) == 0 or self.equals(other):
return self
Expand Down Expand Up @@ -671,7 +671,7 @@ def intersection(self, other):
intersection : Index
"""
if not hasattr(other, '__iter__'):
raise Exception('Input must be iterable!')
raise TypeError('Input must be iterable!')

self._assert_can_do_setop(other)

Expand Down Expand Up @@ -713,7 +713,7 @@ def diff(self, other):
"""

if not hasattr(other, '__iter__'):
raise Exception('Input must be iterable!')
raise TypeError('Input must be iterable!')

if self.equals(other):
return Index([], name=self.name)
Expand Down Expand Up @@ -1080,7 +1080,7 @@ def _join_level(self, other, level, how='left', return_indexers=False):
the MultiIndex will not be changed (currently)
"""
if isinstance(self, MultiIndex) and isinstance(other, MultiIndex):
raise Exception('Join on level between two MultiIndex objects '
raise TypeError('Join on level between two MultiIndex objects '
'is ambiguous')

left, right = self, other
Expand Down Expand Up @@ -2298,7 +2298,7 @@ def _partial_tup_index(self, tup, side='left'):

if lab not in lev:
if not lev.is_type_compatible(lib.infer_dtype([lab])):
raise Exception('Level type mismatch: %s' % lab)
raise TypeError('Level type mismatch: %s' % lab)

# short circuit
loc = lev.searchsorted(lab, side=side)
Expand Down Expand Up @@ -2738,7 +2738,7 @@ def _ensure_index(index_like):

def _validate_join_method(method):
if method not in ['left', 'right', 'inner', 'outer']:
raise Exception('do not recognize join method %s' % method)
raise ValueError('do not recognize join method %s' % method)


# TODO: handle index names!
Expand Down
8 changes: 4 additions & 4 deletions pandas/core/internals.py
Original file line number Diff line number Diff line change
Expand Up @@ -1571,7 +1571,7 @@ def xs(self, key, axis=1, copy=True):
new_blocks = []
if len(self.blocks) > 1:
if not copy:
raise Exception('cannot get view of mixed-type or '
raise TypeError('cannot get view of mixed-type or '
'non-consolidated DataFrame')
for blk in self.blocks:
newb = make_block(blk.values[slicer],
Expand Down Expand Up @@ -1604,7 +1604,7 @@ def fast_2d_xs(self, loc, copy=False):
return result

if not copy:
raise Exception('cannot get view of mixed-type or '
raise TypeError('cannot get view of mixed-type or '
'non-consolidated DataFrame')

dtype = _interleaved_dtype(self.blocks)
Expand Down Expand Up @@ -2093,7 +2093,7 @@ def _maybe_rename_join(self, other, lsuffix, rsuffix, copydata=True):
to_rename = self.items.intersection(other.items)
if len(to_rename) > 0:
if not lsuffix and not rsuffix:
raise Exception('columns overlap: %s' % to_rename)
raise ValueError('columns overlap: %s' % to_rename)

def lrenamer(x):
if x in to_rename:
Expand Down Expand Up @@ -2377,7 +2377,7 @@ def _shape_compat(x):
else:
items = _ensure_index([ n for n in names if n in ref_items ])
if len(items) != len(stacked):
raise Exception("invalid names passed _stack_arrays")
raise ValueError("invalid names passed _stack_arrays")

return items, stacked, placement

Expand Down
2 changes: 1 addition & 1 deletion pandas/core/panel.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ def f(self, other):
if isinstance(other, self._constructor):
return self._compare_constructor(other, func)
elif isinstance(other, (self._constructor_sliced, DataFrame, Series)):
raise Exception("input needs alignment for this object [%s]" %
raise ValueError("input needs alignment for this object [%s]" %
self._constructor)
else:
return self._combine_const(other, na_op)
Expand Down
2 changes: 1 addition & 1 deletion pandas/core/panelnd.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ def create_nd_panel_factory(klass_name, axis_orders, axis_slices, slicer, axis_a
#### define the methods ####
def __init__(self, *args, **kwargs):
if not (kwargs.get('data') or len(args)):
raise Exception(
raise TypeError(
"must supply at least a data argument to [%s]" % klass_name)
if 'copy' not in kwargs:
kwargs['copy'] = False
Expand Down
15 changes: 8 additions & 7 deletions pandas/core/series.py
Original file line number Diff line number Diff line change
Expand Up @@ -1962,7 +1962,8 @@ def clip(self, lower=None, upper=None, out=None):
clipped : Series
"""
if out is not None: # pragma: no cover
raise Exception('out argument is not supported yet')
# TODO: Support out argument?
raise NotImplementedError('out argument is not supported yet')

result = self
if lower is not None:
Expand Down Expand Up @@ -2028,7 +2029,7 @@ def dot(self, other):
lvals = self.values
rvals = np.asarray(other)
if lvals.shape[0] != rvals.shape[0]:
raise Exception('Dot product shape mismatch, %s vs %s' %
raise ValueError('Dot product shape mismatch, %s vs %s' %
(lvals.shape, rvals.shape))

if isinstance(other, DataFrame):
Expand Down Expand Up @@ -2379,7 +2380,7 @@ def sortlevel(self, level=0, ascending=True):
sorted : Series
"""
if not isinstance(self.index, MultiIndex):
raise Exception('can only sort by level with a hierarchical index')
raise TypeError('can only sort by level with a hierarchical index')

new_index, indexer = self.index.sortlevel(level, ascending=ascending)
new_values = self.values.take(indexer)
Expand Down Expand Up @@ -2417,7 +2418,7 @@ def reorder_levels(self, order):
type of caller (new object)
"""
if not isinstance(self.index, MultiIndex): # pragma: no cover
raise Exception('Can only reorder levels on a hierarchical axis.')
raise TypeError('Can only reorder levels on a hierarchical axis.')

result = self.copy()
result.index = result.index.reorder_levels(order)
Expand Down Expand Up @@ -3156,7 +3157,7 @@ def interpolate(self, method='linear'):
"""
if method == 'time':
if not isinstance(self, TimeSeries):
raise Exception('time-weighted interpolation only works'
raise TypeError('time-weighted interpolation only works'
'on TimeSeries')
method = 'values'
# inds = pa.array([d.toordinal() for d in self.index])
Expand Down Expand Up @@ -3279,7 +3280,7 @@ def tz_localize(self, tz, copy=True):

if not isinstance(self.index, DatetimeIndex):
if len(self.index) > 0:
raise Exception('Cannot tz-localize non-time series')
raise TypeError('Cannot tz-localize non-time series')

new_index = DatetimeIndex([], tz=tz)
else:
Expand Down Expand Up @@ -3413,7 +3414,7 @@ def _try_cast(arr, take_fast_path):

elif subarr.ndim > 1:
if isinstance(data, pa.Array):
raise Exception('Data must be 1-dimensional')
raise ValueError('Data must be 1-dimensional')
else:
subarr = _asarray_tuplesafe(data, dtype=dtype)

Expand Down
2 changes: 1 addition & 1 deletion pandas/io/excel.py
Original file line number Diff line number Diff line change
Expand Up @@ -368,7 +368,7 @@ def write_cells(self, cells, sheet_name=None, startrow=0, startcol=0):
if sheet_name is None:
sheet_name = self.cur_sheet
if sheet_name is None: # pragma: no cover
raise Exception('Must pass explicit sheet_name or set '
raise TypeError('Must pass explicit sheet_name or set '
'cur_sheet property')
if self.use_xlsx:
self._writecells_xlsx(cells, sheet_name, startrow, startcol)
Expand Down
2 changes: 1 addition & 1 deletion pandas/rpy/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -195,7 +195,7 @@ def convert_robj(obj, use_pandas=True):
if isinstance(obj, rpy_type):
return converter(obj)

raise Exception('Do not know what to do with %s object' % type(obj))
raise TypeError('Do not know what to do with %s object' % type(obj))


def convert_to_r_posixct(obj):
Expand Down
10 changes: 5 additions & 5 deletions pandas/sparse/array.py
Original file line number Diff line number Diff line change
Expand Up @@ -261,7 +261,7 @@ def _get_val_at(self, loc):
loc += n

if loc >= len(self) or loc < 0:
raise Exception('Out of bounds access')
raise IndexError('Out of bounds access')

sp_loc = self.sp_index.lookup(loc)
if sp_loc == -1:
Expand All @@ -283,7 +283,7 @@ def take(self, indices, axis=0):

n = len(self)
if (indices < 0).any() or (indices >= n).any():
raise Exception('out of bounds access')
raise IndexError('out of bounds access')

if self.sp_index.npoints > 0:
locs = np.array([self.sp_index.lookup(loc) for loc in indices])
Expand All @@ -296,10 +296,10 @@ def take(self, indices, axis=0):
return result

def __setitem__(self, key, value):
raise Exception('SparseArray objects are immutable')
raise TypeError('%r object does not support item assignment' % self.__class__.__name__)

def __setslice__(self, i, j, value):
raise Exception('SparseArray objects are immutable')
raise TypeError('%r object does not support item assignment' % self.__class__.__name__)

def to_dense(self):
"""
Expand All @@ -313,7 +313,7 @@ def astype(self, dtype=None):
"""
dtype = np.dtype(dtype)
if dtype is not None and dtype not in (np.float_, float):
raise Exception('Can only support floating point data for now')
raise TypeError('Can only support floating point data for now')
return self.copy()

def copy(self, deep=True):
Expand Down
12 changes: 6 additions & 6 deletions pandas/sparse/frame.py
Original file line number Diff line number Diff line change
Expand Up @@ -195,10 +195,10 @@ def _init_matrix(self, data, index, columns, dtype=None):
columns = _default_index(K)

if len(columns) != K:
raise Exception('Column length mismatch: %d vs. %d' %
raise ValueError('Column length mismatch: %d vs. %d' %
(len(columns), K))
if len(index) != N:
raise Exception('Index length mismatch: %d vs. %d' %
raise ValueError('Index length mismatch: %d vs. %d' %
(len(index), N))

data = dict([(idx, data[:, i]) for i, idx in enumerate(columns)])
Expand Down Expand Up @@ -585,7 +585,7 @@ def _combine_const(self, other, func):
def _reindex_index(self, index, method, copy, level, fill_value=np.nan,
limit=None):
if level is not None:
raise Exception('Reindex by level not supported for sparse')
raise TypeError('Reindex by level not supported for sparse')

if self.index.equals(index):
if copy:
Expand Down Expand Up @@ -616,7 +616,7 @@ def _reindex_index(self, index, method, copy, level, fill_value=np.nan,

def _reindex_columns(self, columns, copy, level, fill_value, limit=None):
if level is not None:
raise Exception('Reindex by level not supported for sparse')
raise TypeError('Reindex by level not supported for sparse')

if com.notnull(fill_value):
raise NotImplementedError
Expand Down Expand Up @@ -891,7 +891,7 @@ def stack_sparse_frame(frame):
vals_to_concat = []
for _, series in frame.iteritems():
if not np.isnan(series.fill_value):
raise Exception('This routine assumes NaN fill value')
raise TypeError('This routine assumes NaN fill value')

int_index = series.sp_index.to_int_index()
inds_to_concat.append(int_index.indices)
Expand Down Expand Up @@ -931,7 +931,7 @@ def homogenize(series_dict):

for _, series in series_dict.iteritems():
if not np.isnan(series.fill_value):
raise Exception('this method is only valid with NaN fill values')
raise TypeError('this method is only valid with NaN fill values')

if index is None:
index = series.sp_index
Expand Down
Loading