Skip to content

Commit 491f93e

Browse files
Some pylint and pyupgrade cleanups (#29)
* Fix line endings of empty files * Use `not in` for membership test * Remove unused imports * Fix implicit string concatenation * Remove python 2 coding statement * Change python2's Text into str * Remove useless linebreaks * Remove useless parentheses * Use new-style dictionary and set constructors * There is no more IOError (alias to OSError now) * Use .format instead of % string interpolation * Use super() without redundant arguments * Remove redundant read flag from open * Avoid building lists unnecessarily * Use f-strings with python 3.6 * Black reformat Co-authored-by: Michael Osthege <[email protected]>
1 parent 2d0ae94 commit 491f93e

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

93 files changed

+398
-467
lines changed

doc/conf.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,3 @@
1-
# -*- coding: utf-8 -*-
2-
#
31
# pytensor documentation build configuration file, created by
42
# sphinx-quickstart on Tue Oct 7 16:34:06 2008.
53
#

doc/extending/extending_pytensor_solution_1.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -73,7 +73,6 @@ def grad(self, inputs, output_grads):
7373
import numpy as np
7474

7575
from tests import unittest_tools as utt
76-
from pytensor import function, printing
7776
from pytensor import tensor as at
7877
from pytensor.graph.basic import Apply
7978
from pytensor.graph.op import Op

doc/generate_dtype_tensor_table.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,3 @@
1-
2-
31
letters = [
42
('b', 'int8'),
53
('w', 'int16'),

doc/scripts/docgen.py

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,6 @@
1-
21
import sys
32
import os
43
import shutil
5-
import inspect
64
import getopt
75
from collections import defaultdict
86

@@ -16,7 +14,7 @@
1614
sys.argv[1:],
1715
'o:f:',
1816
['rst', 'help', 'nopdf', 'cache', 'check', 'test'])
19-
options.update(dict([x, y or True] for x, y in opts))
17+
options.update({x: y or True for x, y in opts})
2018
if options['--help']:
2119
print(f'Usage: {sys.argv[0]} [OPTIONS] [files...]')
2220
print(' -o <dir>: output the html files in the specified dir')
@@ -100,8 +98,6 @@ def call_sphinx(builder, workdir):
10098
shutil.rmtree(workdir)
10199
except OSError as e:
102100
print('OSError:', e)
103-
except IOError as e:
104-
print('IOError:', e)
105101

106102
if options['--test']:
107103
mkdir("doc")

pytensor/_version.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -105,7 +105,7 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=
105105
return None, None
106106
else:
107107
if verbose:
108-
print("unable to find command, tried %s" % (commands,))
108+
print(f"unable to find command, tried {commands}")
109109
return None, None
110110
stdout = process.communicate()[0].strip().decode()
111111
if process.returncode != 0:
@@ -155,7 +155,7 @@ def git_get_keywords(versionfile_abs):
155155
# _version.py.
156156
keywords = {}
157157
try:
158-
with open(versionfile_abs, "r") as fobj:
158+
with open(versionfile_abs) as fobj:
159159
for line in fobj:
160160
if line.strip().startswith("git_refnames ="):
161161
mo = re.search(r'=\s*"(.*)"', line)
@@ -351,7 +351,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command):
351351
if verbose:
352352
fmt = "tag '%s' doesn't start with prefix '%s'"
353353
print(fmt % (full_tag, tag_prefix))
354-
pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % (
354+
pieces["error"] = "tag '{}' doesn't start with prefix '{}'".format(
355355
full_tag,
356356
tag_prefix,
357357
)

pytensor/compile/debugmode.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1604,7 +1604,7 @@ def f():
16041604
# storage will be None
16051605
if thunk_py:
16061606
_logger.debug(
1607-
f"{i} - running thunk_py with None as " "output storage"
1607+
f"{i} - running thunk_py with None as output storage"
16081608
)
16091609
try:
16101610
thunk_py()
@@ -2063,15 +2063,15 @@ def __init__(
20632063
infolog = StringIO()
20642064
print("Optimization process is unstable...", file=infolog)
20652065
print(
2066-
" (HINT: Ops that the nodes point to must compare " "equal)",
2066+
" (HINT: Ops that the nodes point to must compare equal)",
20672067
file=infolog,
20682068
)
20692069
print(
2070-
"(event index) (one event trace) (other event " "trace)",
2070+
"(event index) (one event trace) (other event trace)",
20712071
file=infolog,
20722072
)
20732073
print(
2074-
"-------------------------------------------------" "----",
2074+
"-----------------------------------------------------",
20752075
file=infolog,
20762076
)
20772077
for j in range(max(len(li), len(l0))):
@@ -2292,7 +2292,7 @@ def __init__(
22922292

22932293
if not isinstance(linker, _DummyLinker):
22942294
raise Exception(
2295-
"DebugMode can only use its own linker! You " "should not provide one.",
2295+
"DebugMode can only use its own linker! You should not provide one.",
22962296
linker,
22972297
)
22982298

@@ -2318,7 +2318,7 @@ def __init__(
23182318
self.require_matching_strides = require_matching_strides
23192319

23202320
if not (self.check_c_code or self.check_py_code):
2321-
raise ValueError("DebugMode has to check at least one of c and py " "code")
2321+
raise ValueError("DebugMode has to check at least one of c and py code")
23222322

23232323
def __str__(self):
23242324
return "DebugMode(linker={}, optimizer={})".format(

pytensor/compile/function/__init__.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -300,9 +300,7 @@ def opt_log1p(node):
300300
if uses_tuple:
301301
# we must use old semantics in this case.
302302
if profile:
303-
raise NotImplementedError(
304-
"profiling not supported in old-style " "function"
305-
)
303+
raise NotImplementedError("profiling not supported in old-style function")
306304
if uses_updates or uses_givens:
307305
raise NotImplementedError(
308306
"In() instances and tuple inputs trigger the old "

pytensor/compile/function/pfunc.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -181,7 +181,7 @@ def clone_inputs(i):
181181
raise TypeError("update target must be a SharedVariable", store_into)
182182
if store_into in update_d:
183183
raise ValueError(
184-
"this shared variable already has an update " "expression",
184+
"this shared variable already has an update expression",
185185
(store_into, update_d[store_into]),
186186
)
187187

pytensor/compile/nanguardmode.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -225,7 +225,7 @@ def do_check_on(value, nd, var=None):
225225
print(pytensor.printing.debugprint(nd, file="str"), file=sio)
226226
else:
227227
print(
228-
"NanGuardMode found an error in an input of the " "graph.",
228+
"NanGuardMode found an error in an input of the graph.",
229229
file=sio,
230230
)
231231
# Add the stack trace

pytensor/compile/profiling.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1308,7 +1308,7 @@ def compute_max_stats(running_memory, stats):
13081308

13091309
if len(fct_memory) > 1:
13101310
print(
1311-
"Memory Profile (the max between all functions in " "that profile)",
1311+
"Memory Profile (the max between all functions in that profile)",
13121312
file=file,
13131313
)
13141314
else:

pytensor/compile/sharedvalue.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -72,7 +72,7 @@ def __init__(
7272
self.container = container
7373
if (value is not None) or (strict is not None):
7474
raise TypeError(
75-
"value and strict are ignored if you pass " "a container here"
75+
"value and strict are ignored if you pass a container here"
7676
)
7777
else:
7878
self.container = Container(

pytensor/configdefaults.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -585,7 +585,7 @@ def add_compile_configvars():
585585

586586
config.add(
587587
"cmodule__age_thresh_use",
588-
"In seconds. The time after which " "PyTensor won't reuse a compile c module.",
588+
"In seconds. The time after which PyTensor won't reuse a compile c module.",
589589
# 24 days
590590
IntParam(60 * 60 * 24 * 24, mutable=False),
591591
in_c_key=False,
@@ -1004,7 +1004,7 @@ def add_testvalue_and_checking_configvars():
10041004

10051005
config.add(
10061006
"on_shape_error",
1007-
"warn: print a warning and use the default" " value. raise: raise an error",
1007+
"warn: print a warning and use the default value. raise: raise an error",
10081008
EnumStr("warn", ["raise"]),
10091009
in_c_key=False,
10101010
)
@@ -1149,14 +1149,14 @@ def add_metaopt_configvars():
11491149

11501150
config.add(
11511151
"metaopt__optimizer_excluding",
1152-
("exclude optimizers with these tags. " "Separate tags with ':'."),
1152+
("exclude optimizers with these tags. Separate tags with ':'."),
11531153
StrParam(""),
11541154
in_c_key=False,
11551155
)
11561156

11571157
config.add(
11581158
"metaopt__optimizer_including",
1159-
("include optimizers with these tags. " "Separate tags with ':'."),
1159+
("include optimizers with these tags. Separate tags with ':'."),
11601160
StrParam(""),
11611161
in_c_key=False,
11621162
)

pytensor/configparser.py

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -125,10 +125,7 @@ def get_config_hash(self):
125125
)
126126
return hash_from_code(
127127
"\n".join(
128-
[
129-
"{} = {}".format(cv.name, cv.__get__(self, self.__class__))
130-
for cv in all_opts
131-
]
128+
[f"{cv.name} = {cv.__get__(self, self.__class__)}" for cv in all_opts]
132129
)
133130
)
134131

pytensor/gradient.py

Lines changed: 14 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -91,10 +91,8 @@ def grad_not_implemented(op, x_pos, x, comment=""):
9191

9292
return (
9393
NullType(
94-
(
95-
"This variable is Null because the grad method for "
96-
f"input {x_pos} ({x}) of the {op} op is not implemented. {comment}"
97-
)
94+
"This variable is Null because the grad method for "
95+
f"input {x_pos} ({x}) of the {op} op is not implemented. {comment}"
9896
)
9997
)()
10098

@@ -114,10 +112,8 @@ def grad_undefined(op, x_pos, x, comment=""):
114112

115113
return (
116114
NullType(
117-
(
118-
"This variable is Null because the grad method for "
119-
f"input {x_pos} ({x}) of the {op} op is not implemented. {comment}"
120-
)
115+
"This variable is Null because the grad method for "
116+
f"input {x_pos} ({x}) of the {op} op is not implemented. {comment}"
121117
)
122118
)()
123119

@@ -1275,14 +1271,12 @@ def try_to_copy_if_needed(var):
12751271
# We therefore don't allow it because its usage has become
12761272
# so muddied.
12771273
raise TypeError(
1278-
(
1279-
f"{node.op}.grad returned None for a gradient term, "
1280-
"this is prohibited. Instead of None,"
1281-
"return zeros_like(input), disconnected_type(),"
1282-
" or a NullType variable such as those made with "
1283-
"the grad_undefined or grad_unimplemented helper "
1284-
"functions."
1285-
)
1274+
f"{node.op}.grad returned None for a gradient term, "
1275+
"this is prohibited. Instead of None,"
1276+
"return zeros_like(input), disconnected_type(),"
1277+
" or a NullType variable such as those made with "
1278+
"the grad_undefined or grad_unimplemented helper "
1279+
"functions."
12861280
)
12871281

12881282
# Check that the gradient term for this input
@@ -1402,10 +1396,8 @@ def access_grad_cache(var):
14021396

14031397
if hasattr(var, "ndim") and term.ndim != var.ndim:
14041398
raise ValueError(
1405-
(
1406-
f"{node.op}.grad returned a term with"
1407-
f" {int(term.ndim)} dimensions, but {int(var.ndim)} are required."
1408-
)
1399+
f"{node.op}.grad returned a term with"
1400+
f" {int(term.ndim)} dimensions, but {int(var.ndim)} are required."
14091401
)
14101402

14111403
terms.append(term)
@@ -1767,10 +1759,8 @@ def verify_grad(
17671759
for i, p in enumerate(pt):
17681760
if p.dtype not in ("float16", "float32", "float64"):
17691761
raise TypeError(
1770-
(
1771-
"verify_grad can work only with floating point "
1772-
f'inputs, but input {i} has dtype "{p.dtype}".'
1773-
)
1762+
"verify_grad can work only with floating point "
1763+
f'inputs, but input {i} has dtype "{p.dtype}".'
17741764
)
17751765

17761766
_type_tol = dict( # relative error tolerances for different types

pytensor/graph/basic.py

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1184,11 +1184,9 @@ def clone_replace(
11841184
items = []
11851185
else:
11861186
raise ValueError(
1187-
(
1188-
"replace is neither a dictionary, list, "
1189-
f"tuple or None ! The value provided is {replace},"
1190-
f"of type {type(replace)}"
1191-
)
1187+
"replace is neither a dictionary, list, "
1188+
f"tuple or None ! The value provided is {replace},"
1189+
f"of type {type(replace)}"
11921190
)
11931191
tmp_replace = [(x, x.type()) for x, y in items]
11941192
new_replace = [(x, y) for ((_, x), (_, y)) in zip(tmp_replace, items)]

pytensor/graph/features.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -679,7 +679,7 @@ def on_detach(self, fgraph):
679679
"""
680680
if self.fgraph is not fgraph:
681681
raise Exception(
682-
"This NodeFinder instance was not attached to the" " provided fgraph."
682+
"This NodeFinder instance was not attached to the provided fgraph."
683683
)
684684
self.fgraph = None
685685
del fgraph.get_nodes

pytensor/graph/op.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,6 @@
1010
List,
1111
Optional,
1212
Sequence,
13-
Text,
1413
Tuple,
1514
TypeVar,
1615
Union,
@@ -496,7 +495,7 @@ def prepare_node(
496495
node: Apply,
497496
storage_map: Optional[StorageMapType],
498497
compute_map: Optional[ComputeMapType],
499-
impl: Optional[Text],
498+
impl: Optional[str],
500499
) -> None:
501500
"""Make any special modifications that the `Op` needs before doing :meth:`Op.make_thunk`.
502501
@@ -573,7 +572,7 @@ def make_thunk(
573572
storage_map: StorageMapType,
574573
compute_map: ComputeMapType,
575574
no_recycling: List[Variable],
576-
impl: Optional[Text] = None,
575+
impl: Optional[str] = None,
577576
) -> ThunkType:
578577
r"""Create a thunk.
579578
@@ -676,7 +675,7 @@ def get_test_value(v: Any) -> Any:
676675
return v.get_test_value()
677676

678677

679-
def missing_test_message(msg: Text) -> None:
678+
def missing_test_message(msg: str) -> None:
680679
"""Display a message saying that some test_value is missing.
681680
682681
This uses the appropriate form based on ``config.compute_test_value``:

pytensor/graph/rewriting/utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -114,7 +114,7 @@ def is_same_graph_with_merge(var1, var2, givens=None):
114114
# We also need to make sure we replace a Variable if it is present in
115115
# `givens`.
116116
vars_replaced = [givens.get(v, v) for v in fgraph.outputs]
117-
o1, o2 = [v.owner for v in vars_replaced]
117+
o1, o2 = (v.owner for v in vars_replaced)
118118
if o1 is None and o2 is None:
119119
# Comparing two single-Variable graphs: they are equal if they are
120120
# the same Variable.

pytensor/graph/type.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
from abc import abstractmethod
2-
from typing import Any, Generic, Optional, Text, Tuple, TypeVar, Union
2+
from typing import Any, Generic, Optional, Tuple, TypeVar, Union
33

44
from typing_extensions import TypeAlias
55

@@ -188,7 +188,7 @@ def is_valid_value(self, data: D, strict: bool = True) -> bool:
188188
except (TypeError, ValueError):
189189
return False
190190

191-
def make_variable(self, name: Optional[Text] = None) -> variable_type:
191+
def make_variable(self, name: Optional[str] = None) -> variable_type:
192192
"""Return a new `Variable` instance of this `Type`.
193193
194194
Parameters
@@ -199,7 +199,7 @@ def make_variable(self, name: Optional[Text] = None) -> variable_type:
199199
"""
200200
return self.variable_type(self, None, name=name)
201201

202-
def make_constant(self, value: D, name: Optional[Text] = None) -> constant_type:
202+
def make_constant(self, value: D, name: Optional[str] = None) -> constant_type:
203203
"""Return a new `Constant` instance of this `Type`.
204204
205205
Parameters
@@ -216,7 +216,7 @@ def clone(self, *args, **kwargs) -> "Type":
216216
"""Clone a copy of this type with the given arguments/keyword values, if any."""
217217
return type(self)(*args, **kwargs)
218218

219-
def __call__(self, name: Optional[Text] = None) -> variable_type:
219+
def __call__(self, name: Optional[str] = None) -> variable_type:
220220
"""Return a new `Variable` instance of Type `self`.
221221
222222
Parameters

pytensor/graph/utils.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -245,9 +245,7 @@ def __str__(self):
245245
def __str__(self):
246246
return "{}{{{}}}".format(
247247
self.__class__.__name__,
248-
", ".join(
249-
"{}={!r}".format(p, getattr(self, p)) for p in props
250-
),
248+
", ".join(f"{p}={getattr(self, p)!r}" for p in props),
251249
)
252250

253251
dct["__str__"] = __str__

0 commit comments

Comments
 (0)