Skip to content

Commit 24827e5

Browse files
authored
Fix exception causes all over the code (pandas-dev#32322)
1 parent fc60870 commit 24827e5

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

52 files changed

+243
-217
lines changed

doc/make.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -83,8 +83,8 @@ def _process_single_doc(self, single_doc):
8383
obj = pandas # noqa: F821
8484
for name in single_doc.split("."):
8585
obj = getattr(obj, name)
86-
except AttributeError:
87-
raise ImportError(f"Could not import {single_doc}")
86+
except AttributeError as err:
87+
raise ImportError(f"Could not import {single_doc}") from err
8888
else:
8989
return single_doc[len("pandas.") :]
9090
else:

pandas/__init__.py

+7-5
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@
3737
f"C extension: {module} not built. If you want to import "
3838
"pandas from the source directory, you may need to run "
3939
"'python setup.py build_ext --inplace --force' to build the C extensions first."
40-
)
40+
) from e
4141

4242
from pandas._config import (
4343
get_option,
@@ -290,8 +290,8 @@ def __getattr__(self, item):
290290

291291
try:
292292
return getattr(self.np, item)
293-
except AttributeError:
294-
raise AttributeError(f"module numpy has no attribute {item}")
293+
except AttributeError as err:
294+
raise AttributeError(f"module numpy has no attribute {item}") from err
295295

296296
np = __numpy()
297297

@@ -306,8 +306,10 @@ def __getattr__(cls, item):
306306

307307
try:
308308
return getattr(cls.datetime, item)
309-
except AttributeError:
310-
raise AttributeError(f"module datetime has no attribute {item}")
309+
except AttributeError as err:
310+
raise AttributeError(
311+
f"module datetime has no attribute {item}"
312+
) from err
311313

312314
def __instancecheck__(cls, other):
313315
return isinstance(other, cls.datetime)

pandas/_config/config.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -213,8 +213,8 @@ def __getattr__(self, key: str):
213213
prefix += key
214214
try:
215215
v = object.__getattribute__(self, "d")[key]
216-
except KeyError:
217-
raise OptionError("No such option")
216+
except KeyError as err:
217+
raise OptionError("No such option") from err
218218
if isinstance(v, dict):
219219
return DictWrapper(v, prefix)
220220
else:

pandas/core/algorithms.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -686,8 +686,8 @@ def value_counts(
686686
values = Series(values)
687687
try:
688688
ii = cut(values, bins, include_lowest=True)
689-
except TypeError:
690-
raise TypeError("bins argument only works with numeric data.")
689+
except TypeError as err:
690+
raise TypeError("bins argument only works with numeric data.") from err
691691

692692
# count, remove nulls (from the index), and but the bins
693693
result = ii.value_counts(dropna=dropna)

pandas/core/arrays/_ranges.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -121,8 +121,8 @@ def _generate_range_overflow_safe(
121121
# we cannot salvage the operation by recursing, so raise
122122
try:
123123
addend = np.uint64(periods) * np.uint64(np.abs(stride))
124-
except FloatingPointError:
125-
raise OutOfBoundsDatetime(msg)
124+
except FloatingPointError as err:
125+
raise OutOfBoundsDatetime(msg) from err
126126

127127
if np.abs(addend) <= i64max:
128128
# relatively easy case without casting concerns

pandas/core/arrays/categorical.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -350,7 +350,7 @@ def __init__(
350350
if dtype.categories is None:
351351
try:
352352
codes, categories = factorize(values, sort=True)
353-
except TypeError:
353+
except TypeError as err:
354354
codes, categories = factorize(values, sort=False)
355355
if dtype.ordered:
356356
# raise, as we don't have a sortable data structure and so
@@ -359,13 +359,13 @@ def __init__(
359359
"'values' is not ordered, please "
360360
"explicitly specify the categories order "
361361
"by passing in a categories argument."
362-
)
363-
except ValueError:
362+
) from err
363+
except ValueError as err:
364364

365365
# FIXME
366366
raise NotImplementedError(
367367
"> 1 ndim Categorical are not supported at this time"
368-
)
368+
) from err
369369

370370
# we're inferring from values
371371
dtype = CategoricalDtype(categories, dtype.ordered)

pandas/core/arrays/datetimes.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -2080,11 +2080,11 @@ def _infer_tz_from_endpoints(start, end, tz):
20802080
"""
20812081
try:
20822082
inferred_tz = timezones.infer_tzinfo(start, end)
2083-
except AssertionError:
2083+
except AssertionError as err:
20842084
# infer_tzinfo raises AssertionError if passed mismatched timezones
20852085
raise TypeError(
20862086
"Start and end cannot both be tz-aware with different timezones"
2087-
)
2087+
) from err
20882088

20892089
inferred_tz = timezones.maybe_get_tz(inferred_tz)
20902090
tz = timezones.maybe_get_tz(tz)

pandas/core/arrays/integer.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -154,15 +154,15 @@ def safe_cast(values, dtype, copy: bool):
154154
"""
155155
try:
156156
return values.astype(dtype, casting="safe", copy=copy)
157-
except TypeError:
157+
except TypeError as err:
158158

159159
casted = values.astype(dtype, copy=copy)
160160
if (casted == values).all():
161161
return casted
162162

163163
raise TypeError(
164164
f"cannot safely cast non-equivalent {values.dtype} to {np.dtype(dtype)}"
165-
)
165+
) from err
166166

167167

168168
def coerce_to_array(
@@ -199,8 +199,8 @@ def coerce_to_array(
199199
if not issubclass(type(dtype), _IntegerDtype):
200200
try:
201201
dtype = _dtypes[str(np.dtype(dtype))]
202-
except KeyError:
203-
raise ValueError(f"invalid dtype specified {dtype}")
202+
except KeyError as err:
203+
raise ValueError(f"invalid dtype specified {dtype}") from err
204204

205205
if isinstance(values, IntegerArray):
206206
values, mask = values._data, values._mask

pandas/core/arrays/interval.py

+14-14
Original file line numberDiff line numberDiff line change
@@ -448,12 +448,12 @@ def from_tuples(cls, data, closed="right", copy=False, dtype=None):
448448
try:
449449
# need list of length 2 tuples, e.g. [(0, 1), (1, 2), ...]
450450
lhs, rhs = d
451-
except ValueError:
451+
except ValueError as err:
452452
msg = f"{name}.from_tuples requires tuples of length 2, got {d}"
453-
raise ValueError(msg)
454-
except TypeError:
453+
raise ValueError(msg) from err
454+
except TypeError as err:
455455
msg = f"{name}.from_tuples received an invalid item, {d}"
456-
raise TypeError(msg)
456+
raise TypeError(msg) from err
457457
left.append(lhs)
458458
right.append(rhs)
459459

@@ -538,10 +538,10 @@ def __setitem__(self, key, value):
538538
try:
539539
array = IntervalArray(value)
540540
value_left, value_right = array.left, array.right
541-
except TypeError:
541+
except TypeError as err:
542542
# wrong type: not interval or NA
543543
msg = f"'value' should be an interval type, got {type(value)} instead."
544-
raise TypeError(msg)
544+
raise TypeError(msg) from err
545545

546546
key = check_array_indexer(self, key)
547547
# Need to ensure that left and right are updated atomically, so we're
@@ -688,20 +688,20 @@ def astype(self, dtype, copy=True):
688688
try:
689689
new_left = self.left.astype(dtype.subtype)
690690
new_right = self.right.astype(dtype.subtype)
691-
except TypeError:
691+
except TypeError as err:
692692
msg = (
693693
f"Cannot convert {self.dtype} to {dtype}; subtypes are incompatible"
694694
)
695-
raise TypeError(msg)
695+
raise TypeError(msg) from err
696696
return self._shallow_copy(new_left, new_right)
697697
elif is_categorical_dtype(dtype):
698698
return Categorical(np.asarray(self))
699699
# TODO: This try/except will be repeated.
700700
try:
701701
return np.asarray(self).astype(dtype, copy=copy)
702-
except (TypeError, ValueError):
702+
except (TypeError, ValueError) as err:
703703
msg = f"Cannot cast {type(self).__name__} to dtype {dtype}"
704-
raise TypeError(msg)
704+
raise TypeError(msg) from err
705705

706706
@classmethod
707707
def _concat_same_type(cls, to_concat):
@@ -1020,13 +1020,13 @@ def length(self):
10201020
"""
10211021
try:
10221022
return self.right - self.left
1023-
except TypeError:
1023+
except TypeError as err:
10241024
# length not defined for some types, e.g. string
10251025
msg = (
10261026
"IntervalArray contains Intervals without defined length, "
10271027
"e.g. Intervals with string endpoints"
10281028
)
1029-
raise TypeError(msg)
1029+
raise TypeError(msg) from err
10301030

10311031
@property
10321032
def mid(self):
@@ -1100,11 +1100,11 @@ def __arrow_array__(self, type=None):
11001100

11011101
try:
11021102
subtype = pyarrow.from_numpy_dtype(self.dtype.subtype)
1103-
except TypeError:
1103+
except TypeError as err:
11041104
raise TypeError(
11051105
f"Conversion to arrow with subtype '{self.dtype.subtype}' "
11061106
"is not supported"
1107-
)
1107+
) from err
11081108
interval_type = ArrowIntervalType(subtype, self.closed)
11091109
storage_array = pyarrow.StructArray.from_arrays(
11101110
[

pandas/core/arrays/sparse/dtype.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -217,8 +217,8 @@ def construct_from_string(cls, string: str) -> "SparseDtype":
217217
if string.startswith("Sparse"):
218218
try:
219219
sub_type, has_fill_value = cls._parse_subtype(string)
220-
except ValueError:
221-
raise TypeError(msg)
220+
except ValueError as err:
221+
raise TypeError(msg) from err
222222
else:
223223
result = SparseDtype(sub_type)
224224
msg = (

pandas/core/arrays/sparse/scipy_sparse.py

+4-2
Original file line numberDiff line numberDiff line change
@@ -134,8 +134,10 @@ def _coo_to_sparse_series(A, dense_index: bool = False):
134134

135135
try:
136136
s = Series(A.data, MultiIndex.from_arrays((A.row, A.col)))
137-
except AttributeError:
138-
raise TypeError(f"Expected coo_matrix. Got {type(A).__name__} instead.")
137+
except AttributeError as err:
138+
raise TypeError(
139+
f"Expected coo_matrix. Got {type(A).__name__} instead."
140+
) from err
139141
s = s.sort_index()
140142
s = s.astype(SparseDtype(s.dtype))
141143
if dense_index:

pandas/core/arrays/timedeltas.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -451,10 +451,10 @@ def _addsub_object_array(self, other, op):
451451
# subclasses. Incompatible classes will raise AttributeError,
452452
# which we re-raise as TypeError
453453
return super()._addsub_object_array(other, op)
454-
except AttributeError:
454+
except AttributeError as err:
455455
raise TypeError(
456456
f"Cannot add/subtract non-tick DateOffset to {type(self).__name__}"
457-
)
457+
) from err
458458

459459
def __mul__(self, other):
460460
other = lib.item_from_zerodim(other)

pandas/core/base.py

+6-4
Original file line numberDiff line numberDiff line change
@@ -458,7 +458,7 @@ def is_any_frame() -> bool:
458458
# return a MI Series
459459
try:
460460
result = concat(result)
461-
except TypeError:
461+
except TypeError as err:
462462
# we want to give a nice error here if
463463
# we have non-same sized objects, so
464464
# we don't automatically broadcast
@@ -467,7 +467,7 @@ def is_any_frame() -> bool:
467467
"cannot perform both aggregation "
468468
"and transformation operations "
469469
"simultaneously"
470-
)
470+
) from err
471471

472472
return result, True
473473

@@ -553,7 +553,7 @@ def _aggregate_multiple_funcs(self, arg, _axis):
553553

554554
try:
555555
return concat(results, keys=keys, axis=1, sort=False)
556-
except TypeError:
556+
except TypeError as err:
557557

558558
# we are concatting non-NDFrame objects,
559559
# e.g. a list of scalars
@@ -562,7 +562,9 @@ def _aggregate_multiple_funcs(self, arg, _axis):
562562

563563
result = Series(results, index=keys, name=self.name)
564564
if is_nested_object(result):
565-
raise ValueError("cannot combine transform and aggregation operations")
565+
raise ValueError(
566+
"cannot combine transform and aggregation operations"
567+
) from err
566568
return result
567569

568570
def _get_cython_func(self, arg: str) -> Optional[str]:

pandas/core/computation/eval.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -362,8 +362,8 @@ def eval(
362362
if not inplace and first_expr:
363363
try:
364364
target = env.target.copy()
365-
except AttributeError:
366-
raise ValueError("Cannot return a copy of the target")
365+
except AttributeError as err:
366+
raise ValueError("Cannot return a copy of the target") from err
367367
else:
368368
target = env.target
369369

@@ -375,8 +375,8 @@ def eval(
375375
with warnings.catch_warnings(record=True):
376376
# TODO: Filter the warnings we actually care about here.
377377
target[assigner] = ret
378-
except (TypeError, IndexError):
379-
raise ValueError("Cannot assign expression output to target")
378+
except (TypeError, IndexError) as err:
379+
raise ValueError("Cannot assign expression output to target") from err
380380

381381
if not resolvers:
382382
resolvers = ({assigner: ret},)

pandas/core/computation/ops.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -372,12 +372,12 @@ def __init__(self, op: str, lhs, rhs):
372372

373373
try:
374374
self.func = _binary_ops_dict[op]
375-
except KeyError:
375+
except KeyError as err:
376376
# has to be made a list for python3
377377
keys = list(_binary_ops_dict.keys())
378378
raise ValueError(
379379
f"Invalid binary operator {repr(op)}, valid operators are {keys}"
380-
)
380+
) from err
381381

382382
def __call__(self, env):
383383
"""
@@ -550,11 +550,11 @@ def __init__(self, op: str, operand):
550550

551551
try:
552552
self.func = _unary_ops_dict[op]
553-
except KeyError:
553+
except KeyError as err:
554554
raise ValueError(
555555
f"Invalid unary operator {repr(op)}, "
556556
f"valid operators are {_unary_ops_syms}"
557-
)
557+
) from err
558558

559559
def __call__(self, env):
560560
operand = self.operand(env)

pandas/core/computation/parsing.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -185,7 +185,7 @@ def tokenize_string(source: str) -> Iterator[Tuple[int, str]]:
185185
yield tokenize_backtick_quoted_string(
186186
token_generator, source, string_start=start[1] + 1
187187
)
188-
except Exception:
189-
raise SyntaxError(f"Failed to parse backticks in '{source}'.")
188+
except Exception as err:
189+
raise SyntaxError(f"Failed to parse backticks in '{source}'.") from err
190190
else:
191191
yield toknum, tokval

0 commit comments

Comments
 (0)