Skip to content

STY: de-privatize names imported across modules #36178

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 6 commits into from
Sep 8, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions pandas/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,9 @@

# numpy compat
from pandas.compat.numpy import (
_np_version_under1p17,
_np_version_under1p18,
_is_numpy_dev,
np_version_under1p17 as _np_version_under1p17,
np_version_under1p18 as _np_version_under1p18,
is_numpy_dev as _is_numpy_dev,
)

try:
Expand Down
2 changes: 1 addition & 1 deletion pandas/_testing.py
Original file line number Diff line number Diff line change
Expand Up @@ -2713,7 +2713,7 @@ def use_numexpr(use, min_elements=None):
if min_elements is None:
min_elements = expr._MIN_ELEMENTS

olduse = expr._USE_NUMEXPR
olduse = expr.USE_NUMEXPR
oldmin = expr._MIN_ELEMENTS
expr.set_use_numexpr(use)
expr._MIN_ELEMENTS = min_elements
Expand Down
10 changes: 5 additions & 5 deletions pandas/compat/numpy/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,11 @@
# numpy versioning
_np_version = np.__version__
_nlv = LooseVersion(_np_version)
_np_version_under1p17 = _nlv < LooseVersion("1.17")
_np_version_under1p18 = _nlv < LooseVersion("1.18")
np_version_under1p17 = _nlv < LooseVersion("1.17")
np_version_under1p18 = _nlv < LooseVersion("1.18")
_np_version_under1p19 = _nlv < LooseVersion("1.19")
_np_version_under1p20 = _nlv < LooseVersion("1.20")
_is_numpy_dev = ".dev" in str(_nlv)
is_numpy_dev = ".dev" in str(_nlv)
_min_numpy_ver = "1.16.5"


Expand Down Expand Up @@ -65,6 +65,6 @@ def np_array_datetime64_compat(arr, *args, **kwargs):
__all__ = [
"np",
"_np_version",
"_np_version_under1p17",
"_is_numpy_dev",
"np_version_under1p17",
"is_numpy_dev",
]
4 changes: 2 additions & 2 deletions pandas/core/array_algos/masked_reductions.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
import numpy as np

from pandas._libs import missing as libmissing
from pandas.compat.numpy import _np_version_under1p17
from pandas.compat.numpy import np_version_under1p17

from pandas.core.nanops import check_below_min_count

Expand Down Expand Up @@ -46,7 +46,7 @@ def _sumprod(
if check_below_min_count(values.shape, mask, min_count):
return libmissing.NA

if _np_version_under1p17:
if np_version_under1p17:
return func(values[~mask])
else:
return func(values, where=~mask)
Expand Down
8 changes: 4 additions & 4 deletions pandas/core/arrays/sparse/accessor.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,9 +88,9 @@ def from_coo(cls, A, dense_index=False):
dtype: Sparse[float64, nan]
"""
from pandas import Series
from pandas.core.arrays.sparse.scipy_sparse import _coo_to_sparse_series
from pandas.core.arrays.sparse.scipy_sparse import coo_to_sparse_series

result = _coo_to_sparse_series(A, dense_index=dense_index)
result = coo_to_sparse_series(A, dense_index=dense_index)
result = Series(result.array, index=result.index, copy=False)

return result
Expand Down Expand Up @@ -168,9 +168,9 @@ def to_coo(self, row_levels=(0,), column_levels=(1,), sort_labels=False):
>>> columns
[('a', 0), ('a', 1), ('b', 0), ('b', 1)]
"""
from pandas.core.arrays.sparse.scipy_sparse import _sparse_series_to_coo
from pandas.core.arrays.sparse.scipy_sparse import sparse_series_to_coo

A, rows, columns = _sparse_series_to_coo(
A, rows, columns = sparse_series_to_coo(
self._parent, row_levels, column_levels, sort_labels=sort_labels
)
return A, rows, columns
Expand Down
4 changes: 2 additions & 2 deletions pandas/core/arrays/sparse/scipy_sparse.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ def _get_index_subset_to_coord_dict(index, subset, sort_labels=False):
return values, i_coord, j_coord, i_labels, j_labels


def _sparse_series_to_coo(ss, row_levels=(0,), column_levels=(1,), sort_labels=False):
def sparse_series_to_coo(ss, row_levels=(0,), column_levels=(1,), sort_labels=False):
"""
Convert a sparse Series to a scipy.sparse.coo_matrix using index
levels row_levels, column_levels as the row and column
Expand Down Expand Up @@ -113,7 +113,7 @@ def _sparse_series_to_coo(ss, row_levels=(0,), column_levels=(1,), sort_labels=F
return sparse_matrix, rows, columns


def _coo_to_sparse_series(A, dense_index: bool = False):
def coo_to_sparse_series(A, dense_index: bool = False):
"""
Convert a scipy.sparse.coo_matrix to a SparseSeries.

Expand Down
4 changes: 2 additions & 2 deletions pandas/core/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@

from pandas._libs import lib, tslibs
from pandas._typing import AnyArrayLike, Scalar, T
from pandas.compat.numpy import _np_version_under1p18
from pandas.compat.numpy import np_version_under1p18

from pandas.core.dtypes.cast import construct_1d_object_array_from_listlike
from pandas.core.dtypes.common import (
Expand Down Expand Up @@ -425,7 +425,7 @@ def random_state(state=None):
if (
is_integer(state)
or is_array_like(state)
or (not _np_version_under1p18 and isinstance(state, np.random.BitGenerator))
or (not np_version_under1p18 and isinstance(state, np.random.BitGenerator))
):
return np.random.RandomState(state)
elif isinstance(state, np.random.RandomState):
Expand Down
4 changes: 2 additions & 2 deletions pandas/core/computation/engines.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,11 @@
from typing import Dict, Type

from pandas.core.computation.align import align_terms, reconstruct_object
from pandas.core.computation.ops import _mathops, _reductions
from pandas.core.computation.ops import MATHOPS, REDUCTIONS

import pandas.io.formats.printing as printing

_ne_builtins = frozenset(_mathops + _reductions)
_ne_builtins = frozenset(MATHOPS + REDUCTIONS)


class NumExprClobberingError(NameError):
Expand Down
24 changes: 12 additions & 12 deletions pandas/core/computation/expr.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,13 @@

import pandas.core.common as com
from pandas.core.computation.ops import (
_LOCAL_TAG,
ARITH_OPS_SYMS,
BOOL_OPS_SYMS,
CMP_OPS_SYMS,
LOCAL_TAG,
MATHOPS,
REDUCTIONS,
UNARY_OPS_SYMS,
BinOp,
Constant,
Div,
Expand All @@ -21,12 +27,6 @@
Term,
UnaryOp,
UndefinedVariableError,
_arith_ops_syms,
_bool_ops_syms,
_cmp_ops_syms,
_mathops,
_reductions,
_unary_ops_syms,
is_term,
)
from pandas.core.computation.parsing import clean_backtick_quoted_toks, tokenize_string
Expand Down Expand Up @@ -101,7 +101,7 @@ def _replace_locals(tok: Tuple[int, str]) -> Tuple[int, str]:
"""
toknum, tokval = tok
if toknum == tokenize.OP and tokval == "@":
return tokenize.OP, _LOCAL_TAG
return tokenize.OP, LOCAL_TAG
return toknum, tokval


Expand Down Expand Up @@ -338,7 +338,7 @@ class BaseExprVisitor(ast.NodeVisitor):
const_type: Type[Term] = Constant
term_type = Term

binary_ops = _cmp_ops_syms + _bool_ops_syms + _arith_ops_syms
binary_ops = CMP_OPS_SYMS + BOOL_OPS_SYMS + ARITH_OPS_SYMS
binary_op_nodes = (
"Gt",
"Lt",
Expand All @@ -362,7 +362,7 @@ class BaseExprVisitor(ast.NodeVisitor):
)
binary_op_nodes_map = dict(zip(binary_ops, binary_op_nodes))

unary_ops = _unary_ops_syms
unary_ops = UNARY_OPS_SYMS
unary_op_nodes = "UAdd", "USub", "Invert", "Not"
unary_op_nodes_map = {k: v for k, v in zip(unary_ops, unary_op_nodes)}

Expand Down Expand Up @@ -494,7 +494,7 @@ def _maybe_evaluate_binop(

if self.engine != "pytables":
if (
res.op in _cmp_ops_syms
res.op in CMP_OPS_SYMS
and getattr(lhs, "is_datetime", False)
or getattr(rhs, "is_datetime", False)
):
Expand Down Expand Up @@ -726,7 +726,7 @@ def visitor(x, y):


_python_not_supported = frozenset(["Dict", "BoolOp", "In", "NotIn"])
_numexpr_supported_calls = frozenset(_reductions + _mathops)
_numexpr_supported_calls = frozenset(REDUCTIONS + MATHOPS)


@disallow(
Expand Down
12 changes: 6 additions & 6 deletions pandas/core/computation/expressions.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@

_TEST_MODE = None
_TEST_RESULT: List[bool] = list()
_USE_NUMEXPR = NUMEXPR_INSTALLED
USE_NUMEXPR = NUMEXPR_INSTALLED
_evaluate = None
_where = None

Expand All @@ -39,21 +39,21 @@

def set_use_numexpr(v=True):
# set/unset to use numexpr
global _USE_NUMEXPR
global USE_NUMEXPR
if NUMEXPR_INSTALLED:
_USE_NUMEXPR = v
USE_NUMEXPR = v

# choose what we are going to do
global _evaluate, _where

_evaluate = _evaluate_numexpr if _USE_NUMEXPR else _evaluate_standard
_where = _where_numexpr if _USE_NUMEXPR else _where_standard
_evaluate = _evaluate_numexpr if USE_NUMEXPR else _evaluate_standard
_where = _where_numexpr if USE_NUMEXPR else _where_standard


def set_numexpr_threads(n=None):
# if we are using numexpr, set the threads to n
# otherwise reset
if NUMEXPR_INSTALLED and _USE_NUMEXPR:
if NUMEXPR_INSTALLED and USE_NUMEXPR:
if n is None:
n = ne.detect_number_of_cores()
ne.set_num_threads(n)
Expand Down
38 changes: 19 additions & 19 deletions pandas/core/computation/ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,11 @@

import pandas.core.common as com
from pandas.core.computation.common import ensure_decoded, result_type_many
from pandas.core.computation.scope import _DEFAULT_GLOBALS
from pandas.core.computation.scope import DEFAULT_GLOBALS

from pandas.io.formats.printing import pprint_thing, pprint_thing_encoded

_reductions = ("sum", "prod")
REDUCTIONS = ("sum", "prod")

_unary_math_ops = (
"sin",
Expand All @@ -46,10 +46,10 @@
)
_binary_math_ops = ("arctan2",)

_mathops = _unary_math_ops + _binary_math_ops
MATHOPS = _unary_math_ops + _binary_math_ops


_LOCAL_TAG = "__pd_eval_local_"
LOCAL_TAG = "__pd_eval_local_"


class UndefinedVariableError(NameError):
Expand Down Expand Up @@ -80,13 +80,13 @@ def __init__(self, name, env, side=None, encoding=None):
self.env = env
self.side = side
tname = str(name)
self.is_local = tname.startswith(_LOCAL_TAG) or tname in _DEFAULT_GLOBALS
self.is_local = tname.startswith(LOCAL_TAG) or tname in DEFAULT_GLOBALS
self._value = self._resolve_name()
self.encoding = encoding

@property
def local_name(self) -> str:
return self.name.replace(_LOCAL_TAG, "")
return self.name.replace(LOCAL_TAG, "")

def __repr__(self) -> str:
return pprint_thing(self.name)
Expand Down Expand Up @@ -220,7 +220,7 @@ def __repr__(self) -> str:
@property
def return_type(self):
# clobber types to bool if the op is a boolean operator
if self.op in (_cmp_ops_syms + _bool_ops_syms):
if self.op in (CMP_OPS_SYMS + BOOL_OPS_SYMS):
return np.bool_
return result_type_many(*(term.type for term in com.flatten(self)))

Expand Down Expand Up @@ -280,7 +280,7 @@ def _not_in(x, y):
return x not in y


_cmp_ops_syms = (">", "<", ">=", "<=", "==", "!=", "in", "not in")
CMP_OPS_SYMS = (">", "<", ">=", "<=", "==", "!=", "in", "not in")
_cmp_ops_funcs = (
operator.gt,
operator.lt,
Expand All @@ -291,13 +291,13 @@ def _not_in(x, y):
_in,
_not_in,
)
_cmp_ops_dict = dict(zip(_cmp_ops_syms, _cmp_ops_funcs))
_cmp_ops_dict = dict(zip(CMP_OPS_SYMS, _cmp_ops_funcs))

_bool_ops_syms = ("&", "|", "and", "or")
BOOL_OPS_SYMS = ("&", "|", "and", "or")
_bool_ops_funcs = (operator.and_, operator.or_, operator.and_, operator.or_)
_bool_ops_dict = dict(zip(_bool_ops_syms, _bool_ops_funcs))
_bool_ops_dict = dict(zip(BOOL_OPS_SYMS, _bool_ops_funcs))

_arith_ops_syms = ("+", "-", "*", "/", "**", "//", "%")
ARITH_OPS_SYMS = ("+", "-", "*", "/", "**", "//", "%")
_arith_ops_funcs = (
operator.add,
operator.sub,
Expand All @@ -307,12 +307,12 @@ def _not_in(x, y):
operator.floordiv,
operator.mod,
)
_arith_ops_dict = dict(zip(_arith_ops_syms, _arith_ops_funcs))
_arith_ops_dict = dict(zip(ARITH_OPS_SYMS, _arith_ops_funcs))

_special_case_arith_ops_syms = ("**", "//", "%")
SPECIAL_CASE_ARITH_OPS_SYMS = ("**", "//", "%")
_special_case_arith_ops_funcs = (operator.pow, operator.floordiv, operator.mod)
_special_case_arith_ops_dict = dict(
zip(_special_case_arith_ops_syms, _special_case_arith_ops_funcs)
zip(SPECIAL_CASE_ARITH_OPS_SYMS, _special_case_arith_ops_funcs)
)

_binary_ops_dict = {}
Expand Down Expand Up @@ -530,9 +530,9 @@ def __init__(self, lhs, rhs):
_cast_inplace(com.flatten(self), acceptable_dtypes, np.float_)


_unary_ops_syms = ("+", "-", "~", "not")
UNARY_OPS_SYMS = ("+", "-", "~", "not")
_unary_ops_funcs = (operator.pos, operator.neg, operator.invert, operator.invert)
_unary_ops_dict = dict(zip(_unary_ops_syms, _unary_ops_funcs))
_unary_ops_dict = dict(zip(UNARY_OPS_SYMS, _unary_ops_funcs))


class UnaryOp(Op):
Expand Down Expand Up @@ -561,7 +561,7 @@ def __init__(self, op: str, operand):
except KeyError as err:
raise ValueError(
f"Invalid unary operator {repr(op)}, "
f"valid operators are {_unary_ops_syms}"
f"valid operators are {UNARY_OPS_SYMS}"
) from err

def __call__(self, env):
Expand Down Expand Up @@ -602,7 +602,7 @@ class FuncNode:
def __init__(self, name: str):
from pandas.core.computation.check import NUMEXPR_INSTALLED, NUMEXPR_VERSION

if name not in _mathops or (
if name not in MATHOPS or (
NUMEXPR_INSTALLED
and NUMEXPR_VERSION < LooseVersion("2.6.9")
and name in ("floor", "ceil")
Expand Down
4 changes: 2 additions & 2 deletions pandas/core/computation/scope.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ def _raw_hex_id(obj) -> str:
return "".join(_replacer(x) for x in packed)


_DEFAULT_GLOBALS = {
DEFAULT_GLOBALS = {
"Timestamp": Timestamp,
"datetime": datetime.datetime,
"True": True,
Expand Down Expand Up @@ -114,7 +114,7 @@ def __init__(

# shallow copy because we don't want to keep filling this up with what
# was there before if there are multiple calls to Scope/_ensure_scope
self.scope = DeepChainMap(_DEFAULT_GLOBALS.copy())
self.scope = DeepChainMap(DEFAULT_GLOBALS.copy())
self.target = target

if isinstance(local_dict, Scope):
Expand Down
Loading