Skip to content

PYTHON-3120 Set up flake8 linting #868

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 11 commits into from
Feb 17, 2022
Merged
Show file tree
Hide file tree
Changes from 7 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 29 additions & 0 deletions .flake8
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
[flake8]
max-line-length = 100
enable-extensions = G
extend-ignore =
G200, G202,
# black adds spaces around ':'
E203,
# E501 line too long (let black handle line length)
E501
per-file-ignores =
# E402 module level import not at top of file
pymongo/__init__.py: E402

# G004 Logging statement uses f-string
pymongo/event_loggers.py: G004

# E402 module level import not at top of file
# B011 Do not call assert False since python -O removes these calls
# F405 'Foo' may be undefined, or defined from star imports
# E741 ambiguous variable name
# B007 Loop control variable 'foo' not used within the loop body
# F403 'from foo import *' used; unable to detect undefined names
# B001 Do not use bare `except:`
# E722 do not use bare 'except'
# E731 do not assign a lambda expression, use a def
# B305 `.next()` is not a thing on Python 3
# F811 redefinition of unused 'foo' from line XXX
# F841 local variable 'foo' is assigned to but never used
test/*: E402, B011, F405, E741, B007, F403, B001, E722, E731, B305, F811, F841
4 changes: 4 additions & 0 deletions .github/workflows/test-python.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,8 @@ jobs:
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
cache: 'pip'
cache-dependency-path: 'setup.py'
- name: Start MongoDB
uses: supercharge/[email protected]
with:
Expand All @@ -53,6 +55,8 @@ jobs:
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
cache: 'pip'
cache-dependency-path: 'setup.py'
- name: Install dependencies
run: |
python -m pip install -U pip mypy
Expand Down
19 changes: 19 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,16 @@ repos:
files: \.py$
args: [--profile=black]

- repo: https://gitlab.com/pycqa/flake8
rev: 3.8.4
hooks:
- id: flake8
additional_dependencies: [
'flake8-bugbear==20.1.4',
'flake8-logging-format==0.6.0',
'flake8-implicit-str-concat==0.2.0',
]

# We use the Python version instead of the original version which seems to require Docker
# https://github.com/koalaman/shellcheck-precommit
- repo: https://github.com/shellcheck-py/shellcheck-py
Expand All @@ -38,3 +48,12 @@ repos:
- id: shellcheck
name: shellcheck
args: ["--severity=warning"]

- repo: https://github.com/sirosen/check-jsonschema
rev: 0.10.2
hooks:
- id: check-jsonschema
name: "Check GitHub Workflows"
files: ^\.github/workflows/
types: [yaml]
args: ["--schemafile", "https://json.schemastore.org/github-workflow"]
20 changes: 10 additions & 10 deletions bson/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@
cast,
)

from bson.binary import (
from bson.binary import ( # noqa: F401
ALL_UUID_SUBTYPES,
CSHARP_LEGACY,
JAVA_LEGACY,
Expand Down Expand Up @@ -514,7 +514,7 @@ def _bson_to_dict(data: Any, opts: Any) -> Any:


if _USE_C:
_bson_to_dict = _cbson._bson_to_dict
_bson_to_dict = _cbson._bson_to_dict # noqa: F811


_PACK_FLOAT = struct.Struct("<d").pack
Expand Down Expand Up @@ -544,15 +544,15 @@ def _make_c_string_check(string: Union[str, bytes]) -> bytes:
"""Make a 'C' string, checking for embedded NUL characters."""
if isinstance(string, bytes):
if b"\x00" in string:
raise InvalidDocument("BSON keys / regex patterns must not " "contain a NUL character")
raise InvalidDocument("BSON keys / regex patterns must not contain a NUL character")
try:
_utf_8_decode(string, None, True)
return string + b"\x00"
except UnicodeError:
raise InvalidStringData("strings in documents must be valid " "UTF-8: %r" % string)
raise InvalidStringData("strings in documents must be valid UTF-8: %r" % string)
else:
if "\x00" in string:
raise InvalidDocument("BSON keys / regex patterns must not " "contain a NUL character")
raise InvalidDocument("BSON keys / regex patterns must not contain a NUL character")
return cast(bytes, _utf_8_encode(string)[0]) + b"\x00"


Expand All @@ -563,7 +563,7 @@ def _make_c_string(string: Union[str, bytes]) -> bytes:
_utf_8_decode(string, None, True)
return string + b"\x00"
except UnicodeError:
raise InvalidStringData("strings in documents must be valid " "UTF-8: %r" % string)
raise InvalidStringData("strings in documents must be valid UTF-8: %r" % string)
else:
return cast(bytes, _utf_8_encode(string)[0]) + b"\x00"

Expand All @@ -572,7 +572,7 @@ def _make_name(string: str) -> bytes:
"""Make a 'C' string suitable for a BSON key."""
# Keys can only be text in python 3.
if "\x00" in string:
raise InvalidDocument("BSON keys / regex patterns must not " "contain a NUL character")
raise InvalidDocument("BSON keys / regex patterns must not contain a NUL character")
return cast(bytes, _utf_8_encode(string)[0]) + b"\x00"


Expand Down Expand Up @@ -847,7 +847,7 @@ def _name_value_to_bson(
def _element_to_bson(key: Any, value: Any, check_keys: bool, opts: Any) -> bytes:
"""Encode a single key, value pair."""
if not isinstance(key, str):
raise InvalidDocument("documents must have only string keys, " "key was %r" % (key,))
raise InvalidDocument("documents must have only string keys, key was %r" % (key,))
if check_keys:
if key.startswith("$"):
raise InvalidDocument("key %r must not start with '$'" % (key,))
Expand Down Expand Up @@ -877,7 +877,7 @@ def _dict_to_bson(doc: Any, check_keys: bool, opts: Any, top_level: bool = True)


if _USE_C:
_dict_to_bson = _cbson._dict_to_bson
_dict_to_bson = _cbson._dict_to_bson # noqa: F811


def _millis_to_datetime(millis: int, opts: Any) -> datetime.datetime:
Expand Down Expand Up @@ -1033,7 +1033,7 @@ def decode_all(


if _USE_C:
decode_all = _cbson.decode_all
decode_all = _cbson.decode_all # noqa: F811


def _decode_selective(rawdoc: Any, fields: Any, codec_options: Any) -> Mapping[Any, Any]:
Expand Down
4 changes: 2 additions & 2 deletions bson/binary.py
Original file line number Diff line number Diff line change
Expand Up @@ -260,7 +260,7 @@ def from_uuid(

if uuid_representation not in ALL_UUID_REPRESENTATIONS:
raise ValueError(
"uuid_representation must be a value " "from bson.binary.UuidRepresentation"
"uuid_representation must be a value from bson.binary.UuidRepresentation"
)

if uuid_representation == UuidRepresentation.UNSPECIFIED:
Expand Down Expand Up @@ -310,7 +310,7 @@ def as_uuid(self, uuid_representation: int = UuidRepresentation.STANDARD) -> UUI

if uuid_representation not in ALL_UUID_REPRESENTATIONS:
raise ValueError(
"uuid_representation must be a value from " "bson.binary.UuidRepresentation"
"uuid_representation must be a value from bson.binary.UuidRepresentation"
)

if uuid_representation == UuidRepresentation.UNSPECIFIED:
Expand Down
6 changes: 2 additions & 4 deletions bson/codec_options.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,12 +23,10 @@
Any,
Callable,
Dict,
Generic,
Iterable,
MutableMapping,
Optional,
Type,
TypeVar,
Union,
cast,
)
Expand Down Expand Up @@ -312,10 +310,10 @@ def __new__(
raise TypeError("tz_aware must be True or False")
if uuid_representation not in ALL_UUID_REPRESENTATIONS:
raise ValueError(
"uuid_representation must be a value " "from bson.binary.UuidRepresentation"
"uuid_representation must be a value from bson.binary.UuidRepresentation"
)
if not isinstance(unicode_decode_error_handler, (str, None)): # type: ignore
raise ValueError("unicode_decode_error_handler must be a string " "or None")
raise ValueError("unicode_decode_error_handler must be a string or None")
if tzinfo is not None:
if not isinstance(tzinfo, datetime.tzinfo):
raise TypeError("tzinfo must be an instance of datetime.tzinfo")
Expand Down
4 changes: 2 additions & 2 deletions bson/dbref.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ def __init__(
collection: str,
id: Any,
database: Optional[str] = None,
_extra: Mapping[str, Any] = {},
_extra: Optional[Mapping[str, Any]] = None,
**kwargs: Any
) -> None:
"""Initialize a new :class:`DBRef`.
Expand Down Expand Up @@ -63,7 +63,7 @@ def __init__(
self.__collection = collection
self.__id = id
self.__database = database
kwargs.update(_extra)
kwargs.update(_extra or {})
self.__kwargs = kwargs

@property
Expand Down
12 changes: 5 additions & 7 deletions bson/json_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -283,7 +283,7 @@ def __new__(
self.json_mode = json_mode
if self.json_mode == JSONMode.RELAXED:
if strict_number_long:
raise ValueError("Cannot specify strict_number_long=True with" " JSONMode.RELAXED")
raise ValueError("Cannot specify strict_number_long=True with JSONMode.RELAXED")
if datetime_representation not in (None, DatetimeRepresentation.ISO8601):
raise ValueError(
"datetime_representation must be DatetimeRepresentation."
Expand All @@ -296,7 +296,7 @@ def __new__(
self.strict_uuid = True
elif self.json_mode == JSONMode.CANONICAL:
if strict_number_long not in (None, True):
raise ValueError("Cannot specify strict_number_long=False with" " JSONMode.RELAXED")
raise ValueError("Cannot specify strict_number_long=False with JSONMode.RELAXED")
if datetime_representation not in (None, DatetimeRepresentation.NUMBERLONG):
raise ValueError(
"datetime_representation must be DatetimeRepresentation."
Expand Down Expand Up @@ -581,11 +581,9 @@ def _parse_canonical_binary(doc: Any, json_options: JSONOptions) -> Union[Binary
if not isinstance(b64, str):
raise TypeError("$binary base64 must be a string: %s" % (doc,))
if not isinstance(subtype, str) or len(subtype) > 2:
raise TypeError("$binary subType must be a string at most 2 " "characters: %s" % (doc,))
raise TypeError("$binary subType must be a string at most 2 characters: %s" % (doc,))
if len(binary) != 2:
raise TypeError(
'$binary must include only "base64" and "subType" ' "components: %s" % (doc,)
)
raise TypeError('$binary must include only "base64" and "subType" components: %s' % (doc,))

data = base64.b64decode(b64.encode())
return _binary_or_uuid(data, int(subtype, 16), json_options)
Expand Down Expand Up @@ -686,7 +684,7 @@ def _parse_canonical_regex(doc: Any) -> Regex:
opts = regex["options"]
if not isinstance(opts, str):
raise TypeError(
"Bad $regularExpression options, options must be " "string, was type %s" % (type(opts))
"Bad $regularExpression options, options must be string, was type %s" % (type(opts))
)
return Regex(regex["pattern"], opts)

Expand Down
2 changes: 1 addition & 1 deletion bson/objectid.py
Original file line number Diff line number Diff line change
Expand Up @@ -204,7 +204,7 @@ def __validate(self, oid: Any) -> None:
_raise_invalid_id(oid)
else:
raise TypeError(
"id must be an instance of (bytes, str, ObjectId), " "not %s" % (type(oid),)
"id must be an instance of (bytes, str, ObjectId), not %s" % (type(oid),)
)

@property
Expand Down
3 changes: 1 addition & 2 deletions bson/raw_bson.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,8 +51,7 @@
overhead of decoding or encoding BSON.
"""

from collections.abc import Mapping as _Mapping
from typing import Any, ItemsView, Iterator, Mapping, Optional, cast
from typing import Any, ItemsView, Iterator, Mapping, Optional

from bson import _get_object_size, _raw_to_dict
from bson.codec_options import _RAW_BSON_DOCUMENT_MARKER
Expand Down
2 changes: 1 addition & 1 deletion bson/tz_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
"""Timezone related utilities for BSON."""

from datetime import datetime, timedelta, tzinfo
from typing import Any, Optional, Tuple, Union
from typing import Optional, Tuple, Union

ZERO: timedelta = timedelta(0)

Expand Down
2 changes: 1 addition & 1 deletion doc/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@

sys.path[0:0] = [os.path.abspath("..")]

import pymongo
import pymongo # noqa

# -- General configuration -----------------------------------------------------

Expand Down
4 changes: 2 additions & 2 deletions green_framework_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ def run(framework_name, *args):

# Run the tests.
sys.argv[:] = ["setup.py", "test"] + list(args)
import setup
import setup # noqa


def main():
Expand Down Expand Up @@ -87,7 +87,7 @@ def main():
list_frameworks()
sys.exit()
else:
assert False, "unhandled option"
raise AssertionError("unhandled option")

if not args:
print(usage)
Expand Down
7 changes: 3 additions & 4 deletions gridfs/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,11 +35,10 @@
)
from pymongo import ASCENDING, DESCENDING
from pymongo.client_session import ClientSession
from pymongo.collation import Collation
from pymongo.collection import Collection
from pymongo.common import UNAUTHORIZED_CODES, validate_string
from pymongo.common import validate_string
from pymongo.database import Database
from pymongo.errors import ConfigurationError, OperationFailure
from pymongo.errors import ConfigurationError
from pymongo.read_preferences import _ServerMode
from pymongo.write_concern import WriteConcern

Expand Down Expand Up @@ -83,7 +82,7 @@ def __init__(self, database: Database, collection: str = "fs"):
database = _clear_entity_type_registry(database)

if not database.write_concern.acknowledged:
raise ConfigurationError("database must use " "acknowledged write_concern")
raise ConfigurationError("database must use acknowledged write_concern")

self.__collection = database[collection]
self.__files = self.__collection.files
Expand Down
Loading