Skip to content

TST: fix pyarrow arithmetic xfails #50877

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Feb 2, 2023
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
76 changes: 64 additions & 12 deletions pandas/tests/extension/test_arrow.py
Original file line number Diff line number Diff line change
Expand Up @@ -1010,14 +1010,29 @@ def _patch_combine(self, obj, other, op):
else:
expected_data = expected
original_dtype = obj.dtype
pa_array = pa.array(expected_data._values).cast(original_dtype.pyarrow_dtype)
pd_array = type(expected_data._values)(pa_array)

pa_expected = pa.array(expected_data._values)

if pa.types.is_duration(pa_expected.type):
# pyarrow sees sequence of datetime/timedelta objects and defaults
# to "us" but the non-pointwise op retains unit
unit = original_dtype.pyarrow_dtype.unit
if type(other) in [datetime, timedelta] and unit in ["s", "ms"]:
# pydatetime/pytimedelta objects have microsecond reso, so we
# take the higher reso of the original and microsecond. Note
# this matches what we would do with DatetimeArray/TimedeltaArray
unit = "us"
pa_expected = pa_expected.cast(f"duration[{unit}]")
else:
pa_expected = pa_expected.cast(original_dtype.pyarrow_dtype)

pd_expected = type(expected_data._values)(pa_expected)
if was_frame:
expected = pd.DataFrame(
pd_array, index=expected.index, columns=expected.columns
pd_expected, index=expected.index, columns=expected.columns
)
else:
expected = pd.Series(pd_array)
expected = pd.Series(pd_expected)
return expected

def _is_temporal_supported(self, opname, pa_dtype):
Expand Down Expand Up @@ -1097,7 +1112,14 @@ def test_arith_series_with_scalar(
if mark is not None:
request.node.add_marker(mark)

if all_arithmetic_operators == "__floordiv__" and pa.types.is_integer(pa_dtype):
if (
(
all_arithmetic_operators == "__floordiv__"
and pa.types.is_integer(pa_dtype)
)
or pa.types.is_duration(pa_dtype)
or pa.types.is_timestamp(pa_dtype)
):
# BaseOpsUtil._combine always returns int64, while ArrowExtensionArray does
# not upcast
monkeypatch.setattr(TestBaseArithmeticOps, "_combine", self._patch_combine)
Expand All @@ -1121,7 +1143,14 @@ def test_arith_frame_with_scalar(
if mark is not None:
request.node.add_marker(mark)

if all_arithmetic_operators == "__floordiv__" and pa.types.is_integer(pa_dtype):
if (
(
all_arithmetic_operators == "__floordiv__"
and pa.types.is_integer(pa_dtype)
)
or pa.types.is_duration(pa_dtype)
or pa.types.is_timestamp(pa_dtype)
):
# BaseOpsUtil._combine always returns int64, while ArrowExtensionArray does
# not upcast
monkeypatch.setattr(TestBaseArithmeticOps, "_combine", self._patch_combine)
Expand Down Expand Up @@ -1165,18 +1194,41 @@ def test_arith_series_with_array(
# since ser.iloc[0] is a python scalar
other = pd.Series(pd.array([ser.iloc[0]] * len(ser), dtype=data.dtype))

if pa.types.is_floating(pa_dtype) or (
pa.types.is_integer(pa_dtype) and all_arithmetic_operators != "__truediv__"
if (
pa.types.is_floating(pa_dtype)
or (
pa.types.is_integer(pa_dtype)
and all_arithmetic_operators != "__truediv__"
)
or pa.types.is_duration(pa_dtype)
or pa.types.is_timestamp(pa_dtype)
):
monkeypatch.setattr(TestBaseArithmeticOps, "_combine", self._patch_combine)
self.check_opname(ser, op_name, other, exc=self.series_array_exc)

def test_add_series_with_extension_array(self, data, request):
pa_dtype = data.dtype.pyarrow_dtype
if not (
pa.types.is_integer(pa_dtype)
or pa.types.is_floating(pa_dtype)
or (not pa_version_under8p0 and pa.types.is_duration(pa_dtype))

if pa.types.is_temporal(pa_dtype) and not pa.types.is_duration(pa_dtype):
# i.e. timestamp, date, time, but not timedelta; these *should*
# raise when trying to add
ser = pd.Series(data)
if pa_version_under7p0:
msg = "Function add_checked has no kernel matching input types"
else:
msg = "Function 'add_checked' has no kernel matching input types"
with pytest.raises(NotImplementedError, match=msg):
# TODO: this is a pa.lib.ArrowNotImplementedError, might
# be better to reraise a TypeError; more consistent with
# non-pyarrow cases
ser + data

return

if (pa_version_under8p0 and pa.types.is_duration(pa_dtype)) or (
pa.types.is_binary(pa_dtype)
or pa.types.is_string(pa_dtype)
or pa.types.is_boolean(pa_dtype)
):
request.node.add_marker(
pytest.mark.xfail(
Expand Down