Skip to content

Parse raised exceptions and their error messages sans interpolation/format spec #3519

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 3 additions & 2 deletions pandas/core/frame.py
Original file line number Diff line number Diff line change
Expand Up @@ -710,8 +710,9 @@ def __unicode__(self):
self.info(buf=buf, verbose=verbose)

value = buf.getvalue()
if not type(value) == unicode:
raise AssertionError()
if not isinstance(value, unicode):
raise AssertionError("'{0}' is not of type 'unicode', it has "
"type '{0}'".format(type(value)))

return value

Expand Down
34 changes: 23 additions & 11 deletions pandas/core/panel.py
Original file line number Diff line number Diff line change
Expand Up @@ -631,8 +631,10 @@ def get_value(self, *args):
value : scalar value
"""
# require an arg for each axis
if not ((len(args) == self._AXIS_LEN)):
raise AssertionError()
if len(args) != self._AXIS_LEN:
raise AssertionError('There must be an argument for each axis, '
'you gave {0} args, but {1} are '
'required'.format(len(args), self._AXIS_LEN))

# hm, two layers to the onion
frame = self._get_item_cache(args[0])
Expand All @@ -656,8 +658,12 @@ def set_value(self, *args):
otherwise a new object
"""
# require an arg for each axis and the value
if not ((len(args) == self._AXIS_LEN + 1)):
raise AssertionError()
if len(args) != self._AXIS_LEN + 1:
raise AssertionError('There must be an argument for each axis plus'
' the value provided, you gave {0} args, '
'but {1} are required'.format(len(args),
self._AXIS_LEN
+ 1))

try:
frame = self._get_item_cache(args[0])
Expand All @@ -667,7 +673,7 @@ def set_value(self, *args):
axes = self._expand_axes(args)
d = self._construct_axes_dict_from(self, axes, copy=False)
result = self.reindex(**d)
args = list(args)
args = list(args)
likely_dtype, args[-1] = _infer_dtype_from_scalar(args[-1])
made_bigger = not np.array_equal(
axes[0], getattr(self, self._info_axis))
Expand Down Expand Up @@ -702,8 +708,10 @@ def __setitem__(self, key, value):
**self._construct_axes_dict_for_slice(self._AXIS_ORDERS[1:]))
mat = value.values
elif isinstance(value, np.ndarray):
if not ((value.shape == shape[1:])):
raise AssertionError()
if value.shape != shape[1:]:
raise AssertionError('shape of value must be {0}, shape of '
'given object was '
'{1}'.format(shape[1:], value.shape))
mat = np.asarray(value)
elif np.isscalar(value):
dtype, value = _infer_dtype_from_scalar(value)
Expand Down Expand Up @@ -1513,8 +1521,9 @@ def _extract_axes(self, data, axes, **kwargs):
@staticmethod
def _extract_axes_for_slice(self, axes):
""" return the slice dictionary for these axes """
return dict([(self._AXIS_SLICEMAP[i], a) for i, a
in zip(self._AXIS_ORDERS[self._AXIS_LEN - len(axes):], axes)])
return dict([(self._AXIS_SLICEMAP[i], a)
for i, a in zip(self._AXIS_ORDERS[self._AXIS_LEN -
len(axes):], axes)])

@staticmethod
def _prep_ndarray(self, values, copy=True):
Expand All @@ -1526,8 +1535,11 @@ def _prep_ndarray(self, values, copy=True):
else:
if copy:
values = values.copy()
if not ((values.ndim == self._AXIS_LEN)):
raise AssertionError()
if values.ndim != self._AXIS_LEN:
raise AssertionError("The number of dimensions required is {0}, "
"but the number of dimensions of the "
"ndarray given was {1}".format(self._AXIS_LEN,
values.ndim))
return values

@staticmethod
Expand Down
23 changes: 13 additions & 10 deletions pandas/core/series.py
Original file line number Diff line number Diff line change
Expand Up @@ -1146,8 +1146,9 @@ def __unicode__(self):
else:
result = u'Series([], dtype: %s)' % self.dtype

if not ( type(result) == unicode):
raise AssertionError()
if not isinstance(result, unicode):
raise AssertionError("result must be of type unicode, type"
" of result is '{0}'".format(type(result)))
return result

def __repr__(self):
Expand Down Expand Up @@ -1216,9 +1217,9 @@ def to_string(self, buf=None, na_rep='NaN', float_format=None,
length=length, dtype=dtype, name=name)

# catch contract violations
if not type(the_repr) == unicode:
raise AssertionError("expected unicode string")

if not isinstance(the_repr, unicode):
raise AssertionError("result must be of type unicode, type"
" of result is '{0}'".format(type(the_repr)))
if buf is None:
return the_repr
else:
Expand All @@ -1228,19 +1229,21 @@ def to_string(self, buf=None, na_rep='NaN', float_format=None,
with open(buf, 'w') as f:
f.write(the_repr)

def _get_repr(self, name=False, print_header=False, length=True, dtype=True,
na_rep='NaN', float_format=None):
def _get_repr(self, name=False, print_header=False, length=True,
dtype=True, na_rep='NaN', float_format=None):
"""

Internal function, should always return unicode string
"""

formatter = fmt.SeriesFormatter(self, name=name, header=print_header,
length=length, dtype=dtype, na_rep=na_rep,
length=length, dtype=dtype,
na_rep=na_rep,
float_format=float_format)
result = formatter.to_string()
if not ( type(result) == unicode):
raise AssertionError()
if not isinstance(result, unicode):
raise AssertionError("result must be of type unicode, type"
" of result is '{0}'".format(type(result)))
return result

def __iter__(self):
Expand Down
10 changes: 6 additions & 4 deletions pandas/io/date_converters.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,12 +46,14 @@ def _maybe_cast(arr):


def _check_columns(cols):
if not ((len(cols) > 0)):
raise AssertionError()
if not len(cols):
raise AssertionError("There must be at least 1 column")

N = len(cols[0])
for c in cols[1:]:
if not ((len(c) == N)):
raise AssertionError()
if len(c) != N:
raise AssertionError('All columns must have the same length: '
'{0}, at least one column has '
'length {1}'.format(N, len(c)))

return N
27 changes: 16 additions & 11 deletions pandas/io/parsers.py
Original file line number Diff line number Diff line change
Expand Up @@ -630,8 +630,10 @@ def _clean_options(self, options, engine):

# type conversion-related
if converters is not None:
if not (isinstance(converters, dict)):
raise AssertionError()
if not isinstance(converters, dict):
raise AssertionError('Type converters must be a dict or'
' subclass, input was '
'a {0}'.format(type(converters)))
else:
converters = {}

Expand Down Expand Up @@ -1649,8 +1651,8 @@ def _rows_to_cols(self, content):
if self._implicit_index:
col_len += len(self.index_col)

if not ((self.skip_footer >= 0)):
raise AssertionError()
if self.skip_footer < 0:
raise AssertionError('skip footer cannot be negative')

if col_len != zip_len and self.index_col is not False:
row_num = -1
Expand Down Expand Up @@ -1946,15 +1948,18 @@ def __init__(self, f, colspecs, filler, thousands=None):
self.filler = filler # Empty characters between fields.
self.thousands = thousands

if not ( isinstance(colspecs, (tuple, list))):
raise AssertionError()
if not isinstance(colspecs, (tuple, list)):
raise AssertionError("column specifications must be a list or"
" tuple, input was "
"a {0}".format(type(colspecs)))

for colspec in colspecs:
if not ( isinstance(colspec, (tuple, list)) and
len(colspec) == 2 and
isinstance(colspec[0], int) and
isinstance(colspec[1], int) ):
raise AssertionError()
if not (isinstance(colspec, (tuple, list)) and
len(colspec) == 2 and
isinstance(colspec[0], int) and
isinstance(colspec[1], int)):
raise AssertionError('Each column specification must be '
'2 element tuple or list of integers')

def next(self):
line = next(self.f)
Expand Down
23 changes: 12 additions & 11 deletions pandas/sparse/array.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@

from pandas._sparse import BlockIndex, IntIndex
import pandas._sparse as splib
import pandas.lib as lib
import pandas.index as _index


Expand All @@ -25,8 +24,8 @@ def _sparse_op_wrap(op, name):
"""
def wrapper(self, other):
if isinstance(other, np.ndarray):
if not ((len(self) == len(other))):
raise AssertionError()
if len(self) != len(other):
raise AssertionError("Operands must be of the same size")
if not isinstance(other, SparseArray):
other = SparseArray(other, fill_value=self.fill_value)
return _sparse_array_op(self, other, op, name)
Expand Down Expand Up @@ -130,8 +129,10 @@ def __new__(cls, data, sparse_index=None, kind='integer', fill_value=None,
fill_value=fill_value)
else:
values = data
if not ((len(values) == sparse_index.npoints)):
raise AssertionError()
if len(values) != sparse_index.npoints:
raise AssertionError("Non array-like type {0} must have"
" the same length as the"
" index".format(type(values)))

# Create array, do *not* copy data by default
if copy:
Expand Down Expand Up @@ -277,13 +278,13 @@ def take(self, indices, axis=0):
-------
taken : ndarray
"""
if not ((axis == 0)):
raise AssertionError()
if axis:
raise AssertionError("axis must be 0, input was {0}".format(axis))
indices = np.asarray(indices, dtype=int)

n = len(self)
if (indices < 0).any() or (indices >= n).any():
raise Exception('out of bounds access')
raise IndexError('out of bounds access')

if self.sp_index.npoints > 0:
locs = np.array([self.sp_index.lookup(loc) for loc in indices])
Expand All @@ -296,10 +297,10 @@ def take(self, indices, axis=0):
return result

def __setitem__(self, key, value):
raise Exception('SparseArray objects are immutable')
raise TypeError('SparseArray objects are immutable')

def __setslice__(self, i, j, value):
raise Exception('SparseArray objects are immutable')
raise TypeError('SparseArray objects are immutable')

def to_dense(self):
"""
Expand All @@ -313,7 +314,7 @@ def astype(self, dtype=None):
"""
dtype = np.dtype(dtype)
if dtype is not None and dtype not in (np.float_, float):
raise Exception('Can only support floating point data for now')
raise TypeError('Can only support floating point data for now')
return self.copy()

def copy(self, deep=True):
Expand Down
8 changes: 3 additions & 5 deletions pandas/sparse/frame.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,19 +8,17 @@
from numpy import nan
import numpy as np

from pandas.core.common import _pickle_array, _unpickle_array, _try_sort
from pandas.core.common import _unpickle_array, _try_sort
from pandas.core.index import Index, MultiIndex, _ensure_index
from pandas.core.indexing import _check_slice_bounds, _maybe_convert_indices
from pandas.core.series import Series
from pandas.core.frame import (DataFrame, extract_index, _prep_ndarray,
_default_index)
from pandas.util.decorators import cache_readonly
import pandas.core.common as com
import pandas.core.datetools as datetools

from pandas.sparse.series import SparseSeries
from pandas.util.decorators import Appender
import pandas.lib as lib


class _SparseMockBlockManager(object):
Expand Down Expand Up @@ -713,8 +711,8 @@ def _join_compat(self, other, on=None, how='left', lsuffix='', rsuffix='',

def _join_index(self, other, how, lsuffix, rsuffix):
if isinstance(other, Series):
if not (other.name is not None):
raise AssertionError()
if other.name is None:
raise AssertionError('Cannot join series with no name')

other = SparseDataFrame({other.name: other},
default_fill_value=self.default_fill_value)
Expand Down
3 changes: 2 additions & 1 deletion pandas/sparse/panel.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,8 @@ def __init__(self, frames, items=None, major_axis=None, minor_axis=None,
frames = new_frames

if not (isinstance(frames, dict)):
raise AssertionError()
raise AssertionError('input must be a dict, a {0} was'
' passed'.format(type(frames)))

self.default_fill_value = fill_value = default_fill_value
self.default_kind = kind = default_kind
Expand Down
19 changes: 14 additions & 5 deletions pandas/sparse/series.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,8 +110,11 @@ def __new__(cls, data, index=None, sparse_index=None, kind='block',
if isinstance(data, SparseSeries) and index is None:
index = data.index
elif index is not None:
if not (len(index) == len(data)):
raise AssertionError()
if len(index) != len(data):
raise AssertionError('Passed index and data must have the '
'same length, len(data) == {0}, '
'len(index) == '
'{1}'.format(len(data), len(index)))

sparse_index = data.sp_index
values = np.asarray(data)
Expand All @@ -129,8 +132,14 @@ def __new__(cls, data, index=None, sparse_index=None, kind='block',
fill_value=fill_value)
else:
values = data
if not (len(values) == sparse_index.npoints):
raise AssertionError()
if len(values) != sparse_index.npoints:
raise AssertionError('length of input must the same as the'
' length of the given index, '
'len(values) == {0}, '
'sparse_index.npoints'
' == '
'{1}'.format(len(values),
sparse_index.npoints))
else:
if index is None:
raise Exception('must pass index!')
Expand Down Expand Up @@ -449,7 +458,7 @@ def sparse_reindex(self, new_index):
reindexed : SparseSeries
"""
if not (isinstance(new_index, splib.SparseIndex)):
raise AssertionError()
raise AssertionError('new index must be a SparseIndex')

new_values = self.sp_index.to_int_index().reindex(self.sp_values,
self.fill_value,
Expand Down
13 changes: 7 additions & 6 deletions pandas/stats/ols.py
Original file line number Diff line number Diff line change
Expand Up @@ -634,8 +634,8 @@ def _set_window(self, window_type, window, min_periods):
self._window_type = scom._get_window_type(window_type)

if self._is_rolling:
if not ((window is not None)):
raise AssertionError()
if window is None:
raise AssertionError("'window' cannot be None")
if min_periods is None:
min_periods = window
else:
Expand Down Expand Up @@ -1212,8 +1212,9 @@ def _nobs_raw(self):
return result.astype(int)

def _beta_matrix(self, lag=0):
if not ((lag >= 0)):
raise AssertionError()
if lag < 0:
raise AssertionError("'lag' must be greater than or equal to 0, "
"input was {0}".format(lag))

betas = self._beta_raw

Expand Down Expand Up @@ -1276,8 +1277,8 @@ def _filter_data(lhs, rhs, weights=None):
Cleaned lhs and rhs
"""
if not isinstance(lhs, Series):
if not ((len(lhs) == len(rhs))):
raise AssertionError()
if len(lhs) != len(rhs):
raise AssertionError("length of lhs must equal length of rhs")
lhs = Series(lhs, index=rhs.index)

rhs = _combine_rhs(rhs)
Expand Down
Loading