Skip to content

STYLE: Inconsistent namespace - arrays #39992 #40221

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Mar 4, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions pandas/tests/arrays/categorical/test_missing.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,14 +154,14 @@ def test_use_inf_as_na_outside_context(self, values, expected):
cat = Categorical(values)

with pd.option_context("mode.use_inf_as_na", True):
result = pd.isna(cat)
result = isna(cat)
tm.assert_numpy_array_equal(result, expected)

result = pd.isna(Series(cat))
result = isna(Series(cat))
expected = Series(expected)
tm.assert_series_equal(result, expected)

result = pd.isna(DataFrame(cat))
result = isna(DataFrame(cat))
expected = DataFrame(expected)
tm.assert_frame_equal(result, expected)

Expand Down
22 changes: 11 additions & 11 deletions pandas/tests/arrays/datetimes/test_reductions.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,33 +40,33 @@ def test_min_max(self, arr1d):
assert result == expected

result = arr.min(skipna=False)
assert result is pd.NaT
assert result is NaT

result = arr.max(skipna=False)
assert result is pd.NaT
assert result is NaT

@pytest.mark.parametrize("tz", [None, "US/Central"])
@pytest.mark.parametrize("skipna", [True, False])
def test_min_max_empty(self, skipna, tz):
dtype = DatetimeTZDtype(tz=tz) if tz is not None else np.dtype("M8[ns]")
arr = DatetimeArray._from_sequence([], dtype=dtype)
result = arr.min(skipna=skipna)
assert result is pd.NaT
assert result is NaT

result = arr.max(skipna=skipna)
assert result is pd.NaT
assert result is NaT

@pytest.mark.parametrize("tz", [None, "US/Central"])
@pytest.mark.parametrize("skipna", [True, False])
def test_median_empty(self, skipna, tz):
dtype = DatetimeTZDtype(tz=tz) if tz is not None else np.dtype("M8[ns]")
arr = DatetimeArray._from_sequence([], dtype=dtype)
result = arr.median(skipna=skipna)
assert result is pd.NaT
assert result is NaT

arr = arr.reshape(0, 3)
result = arr.median(axis=0, skipna=skipna)
expected = type(arr)._from_sequence([pd.NaT, pd.NaT, pd.NaT], dtype=arr.dtype)
expected = type(arr)._from_sequence([NaT, NaT, NaT], dtype=arr.dtype)
tm.assert_equal(result, expected)

result = arr.median(axis=1, skipna=skipna)
Expand All @@ -79,7 +79,7 @@ def test_median(self, arr1d):
result = arr.median()
assert result == arr[0]
result = arr.median(skipna=False)
assert result is pd.NaT
assert result is NaT

result = arr.dropna().median(skipna=False)
assert result == arr[0]
Expand All @@ -90,7 +90,7 @@ def test_median(self, arr1d):
def test_median_axis(self, arr1d):
arr = arr1d
assert arr.median(axis=0) == arr.median()
assert arr.median(axis=0, skipna=False) is pd.NaT
assert arr.median(axis=0, skipna=False) is NaT

msg = r"abs\(axis\) must be less than ndim"
with pytest.raises(ValueError, match=msg):
Expand All @@ -102,7 +102,7 @@ def test_median_2d(self, arr1d):

# axis = None
assert arr.median() == arr1d.median()
assert arr.median(skipna=False) is pd.NaT
assert arr.median(skipna=False) is NaT

# axis = 0
result = arr.median(axis=0)
Expand All @@ -120,7 +120,7 @@ def test_median_2d(self, arr1d):
tm.assert_equal(result, expected)

result = arr.median(axis=1, skipna=False)
expected = type(arr)._from_sequence([pd.NaT], dtype=arr.dtype)
expected = type(arr)._from_sequence([NaT], dtype=arr.dtype)
tm.assert_equal(result, expected)

def test_mean(self, arr1d):
Expand All @@ -132,7 +132,7 @@ def test_mean(self, arr1d):
result = arr.mean()
assert result == expected
result = arr.mean(skipna=False)
assert result is pd.NaT
assert result is NaT

result = arr.dropna().mean(skipna=False)
assert result == expected
Expand Down
6 changes: 3 additions & 3 deletions pandas/tests/arrays/interval/test_interval.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ def test_shift(self):
tm.assert_interval_array_equal(result, expected)

def test_shift_datetime(self):
a = IntervalArray.from_breaks(pd.date_range("2000", periods=4))
a = IntervalArray.from_breaks(date_range("2000", periods=4))
result = a.shift(2)
expected = a.take([-1, -1, 0], allow_fill=True)
tm.assert_interval_array_equal(result, expected)
Expand Down Expand Up @@ -248,7 +248,7 @@ def test_arrow_array_missing():
@pyarrow_skip
@pytest.mark.parametrize(
"breaks",
[[0.0, 1.0, 2.0, 3.0], pd.date_range("2017", periods=4, freq="D")],
[[0.0, 1.0, 2.0, 3.0], date_range("2017", periods=4, freq="D")],
ids=["float", "datetime64[ns]"],
)
def test_arrow_table_roundtrip(breaks):
Expand All @@ -275,7 +275,7 @@ def test_arrow_table_roundtrip(breaks):
@pyarrow_skip
@pytest.mark.parametrize(
"breaks",
[[0.0, 1.0, 2.0, 3.0], pd.date_range("2017", periods=4, freq="D")],
[[0.0, 1.0, 2.0, 3.0], date_range("2017", periods=4, freq="D")],
ids=["float", "datetime64[ns]"],
)
def test_arrow_table_roundtrip_without_metadata(breaks):
Expand Down
4 changes: 2 additions & 2 deletions pandas/tests/arrays/sparse/test_array.py
Original file line number Diff line number Diff line change
Expand Up @@ -185,8 +185,8 @@ def test_constructor_spindex_dtype_scalar_broadcasts(self):
def test_constructor_inferred_fill_value(self, data, fill_value):
result = SparseArray(data).fill_value

if pd.isna(fill_value):
assert pd.isna(result)
if isna(fill_value):
assert isna(result)
else:
assert result == fill_value

Expand Down
52 changes: 26 additions & 26 deletions pandas/tests/arrays/test_datetimelike.py
Original file line number Diff line number Diff line change
Expand Up @@ -184,13 +184,13 @@ def test_take_fill(self):
arr = self.array_cls(data, freq="D")

result = arr.take([-1, 1], allow_fill=True, fill_value=None)
assert result[0] is pd.NaT
assert result[0] is NaT

result = arr.take([-1, 1], allow_fill=True, fill_value=np.nan)
assert result[0] is pd.NaT
assert result[0] is NaT

result = arr.take([-1, 1], allow_fill=True, fill_value=pd.NaT)
assert result[0] is pd.NaT
result = arr.take([-1, 1], allow_fill=True, fill_value=NaT)
assert result[0] is NaT

def test_take_fill_str(self, arr1d):
# Cast str fill_value matching other fill_value-taking methods
Expand All @@ -205,7 +205,7 @@ def test_take_fill_str(self, arr1d):
def test_concat_same_type(self, arr1d):
arr = arr1d
idx = self.index_cls(arr)
idx = idx.insert(0, pd.NaT)
idx = idx.insert(0, NaT)
arr = self.array_cls(idx)

result = arr._concat_same_type([arr[:-1], arr[1:], arr])
Expand All @@ -221,7 +221,7 @@ def test_unbox_scalar(self):
expected = arr._data.dtype.type
assert isinstance(result, expected)

result = arr._unbox_scalar(pd.NaT)
result = arr._unbox_scalar(NaT)
assert isinstance(result, expected)

msg = f"'value' should be a {self.dtype.__name__}."
Expand All @@ -234,7 +234,7 @@ def test_check_compatible_with(self):

arr._check_compatible_with(arr[0])
arr._check_compatible_with(arr[:1])
arr._check_compatible_with(pd.NaT)
arr._check_compatible_with(NaT)

def test_scalar_from_string(self):
data = np.arange(10, dtype="i8") * 24 * 3600 * 10 ** 9
Expand All @@ -254,15 +254,15 @@ def test_reduce_invalid(self):
def test_fillna_method_doesnt_change_orig(self, method):
data = np.arange(10, dtype="i8") * 24 * 3600 * 10 ** 9
arr = self.array_cls(data, freq="D")
arr[4] = pd.NaT
arr[4] = NaT

fill_value = arr[3] if method == "pad" else arr[5]

result = arr.fillna(method=method)
assert result[4] == fill_value

# check that the original was not changed
assert arr[4] is pd.NaT
assert arr[4] is NaT

def test_searchsorted(self):
data = np.arange(10, dtype="i8") * 24 * 3600 * 10 ** 9
Expand All @@ -286,7 +286,7 @@ def test_searchsorted(self):

# GH#29884 match numpy convention on whether NaT goes
# at the end or the beginning
result = arr.searchsorted(pd.NaT)
result = arr.searchsorted(NaT)
if np_version_under1p18:
# Following numpy convention, NaT goes at the beginning
# (unlike NaN which goes at the end)
Expand Down Expand Up @@ -616,7 +616,7 @@ def test_median(self, arr1d):


class TestDatetimeArray(SharedTests):
index_cls = pd.DatetimeIndex
index_cls = DatetimeIndex
array_cls = DatetimeArray
dtype = Timestamp

Expand Down Expand Up @@ -749,7 +749,7 @@ def test_from_dti(self, arr1d):

# Check that Index.__new__ knows what to do with DatetimeArray
dti2 = pd.Index(arr)
assert isinstance(dti2, pd.DatetimeIndex)
assert isinstance(dti2, DatetimeIndex)
assert list(dti2) == list(arr)

def test_astype_object(self, arr1d):
Expand Down Expand Up @@ -800,7 +800,7 @@ def test_to_period_2d(self, arr1d):
expected = arr1d.to_period("D").reshape(1, -1)
tm.assert_period_array_equal(result, expected)

@pytest.mark.parametrize("propname", pd.DatetimeIndex._bool_ops)
@pytest.mark.parametrize("propname", DatetimeIndex._bool_ops)
def test_bool_properties(self, arr1d, propname):
# in this case _bool_ops is just `is_leap_year`
dti = self.index_cls(arr1d)
Expand All @@ -812,7 +812,7 @@ def test_bool_properties(self, arr1d, propname):

tm.assert_numpy_array_equal(result, expected)

@pytest.mark.parametrize("propname", pd.DatetimeIndex._field_ops)
@pytest.mark.parametrize("propname", DatetimeIndex._field_ops)
def test_int_properties(self, arr1d, propname):
if propname in ["week", "weekofyear"]:
# GH#33595 Deprecate week and weekofyear
Expand Down Expand Up @@ -849,7 +849,7 @@ def test_take_fill_valid(self, arr1d):
# Timestamp with mismatched tz-awareness
arr.take([-1, 1], allow_fill=True, fill_value=now)

value = pd.NaT.value
value = NaT.value
msg = f"value should be a '{arr1d._scalar_type.__name__}' or 'NaT'. Got"
with pytest.raises(TypeError, match=msg):
# require NaT, not iNaT, as it could be confused with an integer
Expand Down Expand Up @@ -908,7 +908,7 @@ def test_strftime(self, arr1d):

def test_strftime_nat(self):
# GH 29578
arr = DatetimeArray(DatetimeIndex(["2019-01-01", pd.NaT]))
arr = DatetimeArray(DatetimeIndex(["2019-01-01", NaT]))

result = arr.strftime("%Y-%m-%d")
expected = np.array(["2019-01-01", np.nan], dtype=object)
Expand Down Expand Up @@ -1064,7 +1064,7 @@ def test_astype_object(self, arr1d):
def test_take_fill_valid(self, arr1d):
arr = arr1d

value = pd.NaT.value
value = NaT.value
msg = f"value should be a '{arr1d._scalar_type.__name__}' or 'NaT'. Got"
with pytest.raises(TypeError, match=msg):
# require NaT, not iNaT, as it could be confused with an integer
Expand Down Expand Up @@ -1152,7 +1152,7 @@ def test_strftime(self, arr1d):

def test_strftime_nat(self):
# GH 29578
arr = PeriodArray(PeriodIndex(["2019-01-01", pd.NaT], dtype="period[D]"))
arr = PeriodArray(PeriodIndex(["2019-01-01", NaT], dtype="period[D]"))

result = arr.strftime("%Y-%m-%d")
expected = np.array(["2019-01-01", np.nan], dtype=object)
Expand All @@ -1164,18 +1164,18 @@ def test_strftime_nat(self):
[
(
TimedeltaIndex(["1 Day", "3 Hours", "NaT"])._data,
(pd.NaT, np.timedelta64("NaT", "ns")),
(NaT, np.timedelta64("NaT", "ns")),
),
(
pd.date_range("2000-01-01", periods=3, freq="D")._data,
(pd.NaT, np.datetime64("NaT", "ns")),
(NaT, np.datetime64("NaT", "ns")),
),
(pd.period_range("2000-01-01", periods=3, freq="D")._data, (pd.NaT,)),
(pd.period_range("2000-01-01", periods=3, freq="D")._data, (NaT,)),
],
ids=lambda x: type(x).__name__,
)
def test_casting_nat_setitem_array(array, casting_nats):
expected = type(array)._from_sequence([pd.NaT, array[1], array[2]])
expected = type(array)._from_sequence([NaT, array[1], array[2]])

for nat in casting_nats:
arr = array.copy()
Expand All @@ -1188,15 +1188,15 @@ def test_casting_nat_setitem_array(array, casting_nats):
[
(
TimedeltaIndex(["1 Day", "3 Hours", "NaT"])._data,
(np.datetime64("NaT", "ns"), pd.NaT.value),
(np.datetime64("NaT", "ns"), NaT.value),
),
(
pd.date_range("2000-01-01", periods=3, freq="D")._data,
(np.timedelta64("NaT", "ns"), pd.NaT.value),
(np.timedelta64("NaT", "ns"), NaT.value),
),
(
pd.period_range("2000-01-01", periods=3, freq="D")._data,
(np.datetime64("NaT", "ns"), np.timedelta64("NaT", "ns"), pd.NaT.value),
(np.datetime64("NaT", "ns"), np.timedelta64("NaT", "ns"), NaT.value),
),
],
ids=lambda x: type(x).__name__,
Expand Down Expand Up @@ -1226,7 +1226,7 @@ def test_to_numpy_extra(array):
else:
isnan = np.isnan

array[0] = pd.NaT
array[0] = NaT
original = array.copy()

result = array.to_numpy()
Expand Down