Skip to content

CLN: grab bag of flake8 fixes #12115

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions pandas/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
# pylint: disable-msg=W0614,W0401,W0611,W0622

# flake8: noqa

__docformat__ = 'restructuredtext'

Expand Down
2 changes: 2 additions & 0 deletions pandas/_version.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@
# This file is released into the public domain. Generated by
# versioneer-0.15 (https://github.com/warner/python-versioneer)

# flake8: noqa

import errno
import os
import re
Expand Down
2 changes: 2 additions & 0 deletions pandas/compat/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,8 @@
* platform checker
"""
# pylint disable=W0611
# flake8: noqa

import functools
import itertools
from distutils.version import LooseVersion
Expand Down
30 changes: 22 additions & 8 deletions pandas/compat/chainmap_impl.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,16 +58,19 @@ def __missing__(self, key):
def __getitem__(self, key):
for mapping in self.maps:
try:
return mapping[key] # can't use 'key in mapping' with defaultdict
# can't use 'key in mapping' with defaultdict
return mapping[key]
except KeyError:
pass
return self.__missing__(key) # support subclasses that define __missing__
# support subclasses that define __missing__
return self.__missing__(key)

def get(self, key, default=None):
return self[key] if key in self else default

def __len__(self):
return len(set().union(*self.maps)) # reuses stored hash values if possible
# reuses stored hash values if possible
return len(set().union(*self.maps))

def __iter__(self):
return iter(set().union(*self.maps))
Expand All @@ -89,7 +92,10 @@ def fromkeys(cls, iterable, *args):
return cls(dict.fromkeys(iterable, *args))

def copy(self):
'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]'
"""
New ChainMap or subclass with a new copy of maps[0] and refs to
maps[1:]
"""
return self.__class__(self.maps[0].copy(), *self.maps[1:])

__copy__ = copy
Expand All @@ -115,21 +121,29 @@ def __delitem__(self, key):
try:
del self.maps[0][key]
except KeyError:
raise KeyError('Key not found in the first mapping: {!r}'.format(key))
raise KeyError('Key not found in the first mapping: {!r}'
.format(key))

def popitem(self):
'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.'
"""
Remove and return an item pair from maps[0]. Raise KeyError is maps[0]
is empty.
"""
try:
return self.maps[0].popitem()
except KeyError:
raise KeyError('No keys found in the first mapping.')

def pop(self, key, *args):
'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].'
"""
Remove *key* from maps[0] and return its value. Raise KeyError if
*key* not in maps[0].
"""
try:
return self.maps[0].pop(key, *args)
except KeyError:
raise KeyError('Key not found in the first mapping: {!r}'.format(key))
raise KeyError('Key not found in the first mapping: {!r}'
.format(key))

def clear(self):
'Clear maps[0], leaving maps[1:] intact.'
Expand Down
2 changes: 1 addition & 1 deletion pandas/compat/openpyxl_compat.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,4 +32,4 @@ def is_compat(major_ver=1):
return LooseVersion(stop_ver) <= ver
else:
raise ValueError('cannot test for openpyxl compatibility with ver {0}'
.format(major_ver))
.format(major_ver))
2 changes: 2 additions & 0 deletions pandas/compat/pickle_compat.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
""" support pre 0.12 series pickle compatibility """

# flake8: noqa

import sys
import numpy as np
import pandas
Expand Down
4 changes: 2 additions & 2 deletions pandas/computation/align.py
Original file line number Diff line number Diff line change
Expand Up @@ -173,8 +173,8 @@ def _reconstruct_object(typ, obj, axes, dtype):
ret_value = res_t.type(obj)
else:
ret_value = typ(obj).astype(res_t)
# The condition is to distinguish 0-dim array (returned in case of scalar)
# and 1 element array
# The condition is to distinguish 0-dim array (returned in case of
# scalar) and 1 element array
# e.g. np.array(0) and np.array([0])
if len(obj.shape) == 1 and len(obj) == 1:
if not isinstance(ret_value, np.ndarray):
Expand Down
2 changes: 2 additions & 0 deletions pandas/computation/api.py
Original file line number Diff line number Diff line change
@@ -1,2 +1,4 @@
# flake8: noqa

from pandas.computation.eval import eval
from pandas.computation.expr import Expr
9 changes: 6 additions & 3 deletions pandas/computation/engines.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,16 @@
"""Engine classes for :func:`~pandas.eval`
"""

# flake8: noqa

import abc

from pandas import compat
from pandas.compat import DeepChainMap, map
from pandas.core import common as com
from pandas.computation.align import _align, _reconstruct_object
from pandas.computation.ops import UndefinedVariableError, _mathops, _reductions
from pandas.computation.ops import (UndefinedVariableError,
_mathops, _reductions)


_ne_builtins = frozenset(_mathops + _reductions)
Expand All @@ -30,8 +33,8 @@ def _check_ne_builtin_clash(expr):

if overlap:
s = ', '.join(map(repr, overlap))
raise NumExprClobberingError('Variables in expression "%s" overlap with '
'numexpr builtins: (%s)' % (expr, s))
raise NumExprClobberingError('Variables in expression "%s" '
'overlap with builtins: (%s)' % (expr, s))


class AbstractEngine(object):
Expand Down
27 changes: 13 additions & 14 deletions pandas/computation/expr.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,7 @@
"""

import ast
import operator
import sys
import inspect
import tokenize
import datetime

from functools import partial

Expand All @@ -21,7 +17,7 @@
from pandas.computation.ops import _reductions, _mathops, _LOCAL_TAG
from pandas.computation.ops import Op, BinOp, UnaryOp, Term, Constant, Div
from pandas.computation.ops import UndefinedVariableError, FuncNode
from pandas.computation.scope import Scope, _ensure_scope
from pandas.computation.scope import Scope


def tokenize_string(source):
Expand Down Expand Up @@ -381,9 +377,9 @@ def _possibly_evaluate_binop(self, op, op_class, lhs, rhs,
rhs.type))

if self.engine != 'pytables':
if (res.op in _cmp_ops_syms
and getattr(lhs, 'is_datetime', False)
or getattr(rhs, 'is_datetime', False)):
if (res.op in _cmp_ops_syms and
getattr(lhs, 'is_datetime', False) or
getattr(rhs, 'is_datetime', False)):
# all date ops must be done in python bc numexpr doesn't work
# well with NaT
return self._possibly_eval(res, self.binary_ops)
Expand All @@ -392,8 +388,8 @@ def _possibly_evaluate_binop(self, op, op_class, lhs, rhs,
# "in"/"not in" ops are always evaluated in python
return self._possibly_eval(res, eval_in_python)
elif self.engine != 'pytables':
if (getattr(lhs, 'return_type', None) == object
or getattr(rhs, 'return_type', None) == object):
if (getattr(lhs, 'return_type', None) == object or
getattr(rhs, 'return_type', None) == object):
# evaluate "==" and "!=" in python if either of our operands
# has an object return type
return self._possibly_eval(res, eval_in_python +
Expand Down Expand Up @@ -517,7 +513,8 @@ def visit_Attribute(self, node, **kwargs):
raise ValueError("Invalid Attribute context {0}".format(ctx.__name__))

def visit_Call_35(self, node, side=None, **kwargs):
""" in 3.5 the starargs attribute was changed to be more flexible, #11097 """
""" in 3.5 the starargs attribute was changed to be more flexible,
#11097 """

if isinstance(node.func, ast.Attribute):
res = self.visit_Attribute(node.func)
Expand All @@ -541,7 +538,7 @@ def visit_Call_35(self, node, side=None, **kwargs):

if isinstance(res, FuncNode):

new_args = [ self.visit(arg) for arg in node.args ]
new_args = [self.visit(arg) for arg in node.args]

if node.keywords:
raise TypeError("Function \"{0}\" does not support keyword "
Expand All @@ -551,15 +548,17 @@ def visit_Call_35(self, node, side=None, **kwargs):

else:

new_args = [ self.visit(arg).value for arg in node.args ]
new_args = [self.visit(arg).value for arg in node.args]

for key in node.keywords:
if not isinstance(key, ast.keyword):
raise ValueError("keyword error in function call "
"'{0}'".format(node.func.id))

if key.arg:
kwargs.append(ast.keyword(keyword.arg, self.visit(keyword.value)))
# TODO: bug?
kwargs.append(ast.keyword(
keyword.arg, self.visit(keyword.value))) # noqa

return self.const_type(res(*new_args, **kwargs), self.env)

Expand Down
13 changes: 7 additions & 6 deletions pandas/computation/expressions.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,10 @@
ver = ne.__version__
_NUMEXPR_INSTALLED = ver >= LooseVersion('2.1')
if not _NUMEXPR_INSTALLED:
warnings.warn("The installed version of numexpr {ver} is not supported "
"in pandas and will be not be used\nThe minimum supported "
"version is 2.1\n".format(ver=ver), UserWarning)
warnings.warn(
"The installed version of numexpr {ver} is not supported "
"in pandas and will be not be used\nThe minimum supported "
"version is 2.1\n".format(ver=ver), UserWarning)

except ImportError: # pragma: no cover
_NUMEXPR_INSTALLED = False
Expand Down Expand Up @@ -96,8 +97,8 @@ def _can_use_numexpr(op, op_str, a, b, dtype_check):
return False


def _evaluate_numexpr(op, op_str, a, b, raise_on_error=False, truediv=True, reversed=False,
**eval_kwargs):
def _evaluate_numexpr(op, op_str, a, b, raise_on_error=False, truediv=True,
reversed=False, **eval_kwargs):
result = None

if _can_use_numexpr(op, op_str, a, b, 'evaluate'):
Expand All @@ -106,7 +107,7 @@ def _evaluate_numexpr(op, op_str, a, b, raise_on_error=False, truediv=True, reve
# we were originally called by a reversed op
# method
if reversed:
a,b = b,a
a, b = b, a

a_value = getattr(a, "values", a)
b_value = getattr(b, "values", b)
Expand Down
7 changes: 5 additions & 2 deletions pandas/computation/ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -498,12 +498,13 @@ def return_type(self):
if operand.return_type == np.dtype('bool'):
return np.dtype('bool')
if (isinstance(operand, Op) and
(operand.op in _cmp_ops_dict or operand.op in _bool_ops_dict)):
(operand.op in _cmp_ops_dict or operand.op in _bool_ops_dict)):
return np.dtype('bool')
return np.dtype('int')


class MathCall(Op):

def __init__(self, func, args):
super(MathCall, self).__init__(func.name, args)
self.func = func
Expand All @@ -518,9 +519,11 @@ def __unicode__(self):


class FuncNode(object):

def __init__(self, name):
if name not in _mathops:
raise ValueError("\"{0}\" is not a supported function".format(name))
raise ValueError(
"\"{0}\" is not a supported function".format(name))
self.name = name
self.func = getattr(np, name)

Expand Down
21 changes: 11 additions & 10 deletions pandas/computation/pytables.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,11 @@
from datetime import datetime, timedelta
import numpy as np
import pandas as pd
from pandas.compat import u, string_types, PY3, DeepChainMap
from pandas.compat import u, string_types, DeepChainMap
from pandas.core.base import StringMixin
import pandas.core.common as com
from pandas.computation import expr, ops
from pandas.computation.ops import is_term, UndefinedVariableError
from pandas.computation.scope import _ensure_scope
from pandas.computation.expr import BaseExprVisitor
from pandas.computation.common import _ensure_decoded
from pandas.tseries.timedeltas import _coerce_scalar_to_timedelta_type
Expand Down Expand Up @@ -147,17 +146,17 @@ def is_in_table(self):
@property
def kind(self):
""" the kind of my field """
return getattr(self.queryables.get(self.lhs),'kind',None)
return getattr(self.queryables.get(self.lhs), 'kind', None)

@property
def meta(self):
""" the meta of my field """
return getattr(self.queryables.get(self.lhs),'meta',None)
return getattr(self.queryables.get(self.lhs), 'meta', None)

@property
def metadata(self):
""" the metadata of my field """
return getattr(self.queryables.get(self.lhs),'metadata',None)
return getattr(self.queryables.get(self.lhs), 'metadata', None)

def generate(self, v):
""" create and return the op string for this TermValue """
Expand Down Expand Up @@ -195,7 +194,7 @@ def stringify(value):
return TermValue(int(v), v, kind)
elif meta == u('category'):
metadata = com._values_from_object(self.metadata)
result = metadata.searchsorted(v,side='left')
result = metadata.searchsorted(v, side='left')
return TermValue(result, result, u('integer'))
elif kind == u('integer'):
v = int(float(v))
Expand Down Expand Up @@ -504,7 +503,7 @@ def __init__(self, where, op=None, value=None, queryables=None,
else:
w = self.parse_back_compat(w)
where[idx] = w
where = ' & ' .join(["(%s)" % w for w in where])
where = ' & ' .join(["(%s)" % w for w in where]) # noqa

self.expr = where
self.env = Scope(scope_level + 1, local_dict=local_dict)
Expand Down Expand Up @@ -551,12 +550,14 @@ def parse_back_compat(self, w, op=None, value=None):

# stringify with quotes these values
def convert(v):
if isinstance(v, (datetime,np.datetime64,timedelta,np.timedelta64)) or hasattr(v, 'timetuple'):
if (isinstance(v, (datetime, np.datetime64,
timedelta, np.timedelta64)) or
hasattr(v, 'timetuple')):
return "'{0}'".format(v)
return v

if isinstance(value, (list,tuple)):
value = [ convert(v) for v in value ]
if isinstance(value, (list, tuple)):
value = [convert(v) for v in value]
else:
value = convert(value)

Expand Down
Loading