Skip to content

for issue #49656 STYLE enable pylint's redefined-outer-name #49708

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 12 commits into from
Nov 15, 2022
46 changes: 27 additions & 19 deletions pandas/core/generic.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,8 @@
from __future__ import annotations

import collections
from datetime import timedelta
import datetime as dt
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

now you'll need to replace timedelta (not the function argument) with dt.timedelta

import gc
import json
import operator
import pickle
import re
Expand Down Expand Up @@ -32,6 +31,7 @@
from pandas._config import config

from pandas._libs import lib
import json
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

the imports are in the wrong order, if you run

pre-commit run black --files pandas/core/generic.py
pre-commit run isort --files pandas/core/generic.py
pre-commit run flake8 --files pandas/core/generic.py

before committing it should fix up most of the reported issues

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Done sir, and ran these tests all of them passed

from pandas._libs.tslibs import (
Period,
Tick,
Expand Down Expand Up @@ -130,10 +130,10 @@
notna,
)

from pandas.core import common # noqa: PDF018
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

remove this, the next lines should be

from pandas.core import (
    algorithms as algos,
    arraylike,
    common,  # noqa: PDF018

from pandas.core import (
algorithms as algos,
arraylike,
common as com,
indexing,
missing,
nanops,
Expand All @@ -142,6 +142,14 @@
from pandas.core.array_algos.replace import should_use_regex
from pandas.core.arrays import ExtensionArray
from pandas.core.base import PandasObject
from pandas.core.common import ( # noqa: PDF018
apply_if_callable,
count_not_none,
get_rename_function,
index_labels_to_array,
maybe_make_list,
pipe,
)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

can remove these now

from pandas.core.construction import extract_array
from pandas.core.describe import describe_ndframe
from pandas.core.flags import Flags
Expand Down Expand Up @@ -1009,7 +1017,7 @@ def _rename(
continue

ax = self._get_axis(axis_no)
f = com.get_rename_function(replacements)
f = get_rename_function(replacements)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this will have to be common.get_rename_function (similarly for the others)


if level is not None:
level = ax._get_level_number(level)
Expand Down Expand Up @@ -1240,7 +1248,7 @@ class name
if non_mapper:
newnames = v
else:
f = com.get_rename_function(v)
f = get_rename_function(v)
curnames = self._get_axis(axis).names
newnames = [f(name) for name in curnames]
result._set_axis_name(newnames, axis=axis, inplace=True)
Expand Down Expand Up @@ -1826,7 +1834,7 @@ def _drop_labels_or_levels(self, keys, axis: AxisInt = 0):
axis = self._get_axis_number(axis)

# Validate keys
keys = com.maybe_make_list(keys)
keys = maybe_make_list(keys)
invalid_keys = [
k for k in keys if not self._is_label_or_level_reference(k, axis=axis)
]
Expand Down Expand Up @@ -4445,7 +4453,7 @@ def _drop_axis(
# Case for non-unique axis
else:
is_tuple_labels = is_nested_list_like(labels) or isinstance(labels, tuple)
labels = ensure_object(com.index_labels_to_array(labels))
labels = ensure_object(index_labels_to_array(labels))
if level is not None:
if not isinstance(axis, MultiIndex):
raise AssertionError("axis must be a MultiIndex")
Expand Down Expand Up @@ -5236,7 +5244,7 @@ def _reindex_axes(
def _needs_reindex_multi(self, axes, method, level) -> bool_t:
"""Check if we do need a multi reindex."""
return (
(com.count_not_none(*axes.values()) == self._AXIS_LEN)
(count_not_none(*axes.values()) == self._AXIS_LEN)
and method is None
and level is None
and not self._is_mixed_type
Expand Down Expand Up @@ -5359,7 +5367,7 @@ def filter(
one two three
rabbit 4 5 6
"""
nkw = com.count_not_none(items, like, regex)
nkw = count_not_none(items, like, regex)
if nkw > 1:
raise TypeError(
"Keyword arguments `items`, `like`, or `regex` "
Expand Down Expand Up @@ -5684,7 +5692,7 @@ def sample(
obj_len = self.shape[axis]

# Process random_state argument
rs = com.random_state(random_state)
rs = random_state(random_state)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

now the random_state argument from this function shadows the random_state import

Might have to change the pandas.core.common import back to from pandas.core import common, but add the noqa command to that line to silence the flake8 warning of the missing as com alias

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Done sir


size = sample.process_sampling_size(n, frac, replace)
if size is None:
Expand Down Expand Up @@ -5760,7 +5768,7 @@ def pipe(
... .pipe((func, 'arg2'), arg1=a, arg3=c)
... ) # doctest: +SKIP
"""
return com.pipe(self, func, *args, **kwargs)
return pipe(self, func, *args, **kwargs)

# ----------------------------------------------------------------------
# Attribute access
Expand Down Expand Up @@ -9445,7 +9453,7 @@ def _where(
axis = self._get_axis_number(axis)

# align the cond to same shape as myself
cond = com.apply_if_callable(cond, self)
cond = apply_if_callable(cond, self)
if isinstance(cond, NDFrame):
cond, _ = cond.align(self, join="right", broadcast_axis=1, copy=False)
else:
Expand All @@ -9467,9 +9475,9 @@ def _where(
if not is_bool_dtype(cond):
raise ValueError(msg.format(dtype=cond.dtype))
else:
for dt in cond.dtypes:
if not is_bool_dtype(dt):
raise ValueError(msg.format(dtype=dt))
for _dt in cond.dtypes:
if not is_bool_dtype(_dt):
raise ValueError(msg.format(dtype=_dt))
else:
# GH#21947 we have an empty DataFrame/Series, could be object-dtype
cond = cond.astype(bool)
Expand Down Expand Up @@ -9747,7 +9755,7 @@ def where(
3 True True
4 True True
"""
other = com.apply_if_callable(other, self)
other = apply_if_callable(other, self)
return self._where(cond, other, inplace, axis, level)

@overload
Expand Down Expand Up @@ -9805,7 +9813,7 @@ def mask(
) -> NDFrameT | None:

inplace = validate_bool_kwarg(inplace, "inplace")
cond = com.apply_if_callable(cond, self)
cond = apply_if_callable(cond, self)

# see gh-21891
if not hasattr(cond, "__invert__"):
Expand Down Expand Up @@ -10317,7 +10325,7 @@ def tz_localize(
"""
nonexistent_options = ("raise", "NaT", "shift_forward", "shift_backward")
if nonexistent not in nonexistent_options and not isinstance(
nonexistent, timedelta
nonexistent, dt.timedelta
):
raise ValueError(
"The nonexistent argument must be one of 'raise', "
Expand Down Expand Up @@ -11470,7 +11478,7 @@ def min(
@doc(Rolling)
def rolling(
self,
window: int | timedelta | str | BaseOffset | BaseIndexer,
window: int | dt.timedelta | str | BaseOffset | BaseIndexer,
min_periods: int | None = None,
center: bool_t = False,
win_type: str | None = None,
Expand Down