Skip to content

Commit 27bd9aa

Browse files
committed
Fix UP031: Use format specifiers instead of percent format
1 parent 75b5706 commit 27bd9aa

15 files changed

+102
-96
lines changed

pytensor/bin/pytensor_cache.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -75,9 +75,9 @@ def main():
7575
if items:
7676
_logger.warning(
7777
"There remain elements in the cache dir that you may "
78-
"need to erase manually. The cache dir is:\n %s\n"
78+
f"need to erase manually. The cache dir is:\n {config.compiledir}\n"
7979
'You can also call "pytensor-cache purge" to '
80-
"remove everything from that directory." % config.compiledir
80+
"remove everything from that directory."
8181
)
8282
_logger.debug(f"Remaining elements ({len(items)}): {', '.join(items)}")
8383
elif sys.argv[1] == "list":

pytensor/breakpoint.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -105,7 +105,7 @@ def perform(self, node, inputs, output_storage):
105105
except Exception:
106106
raise ValueError(
107107
"Some of the inputs to the PdbBreakpoint op "
108-
"'%s' could not be casted to NumPy arrays" % self.name
108+
f"'{self.name}' could not be casted to NumPy arrays"
109109
)
110110

111111
print("\n")

pytensor/compile/builders.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -709,7 +709,7 @@ def _recompute_rop_op(self):
709709
if not isinstance(roverrides_l, list):
710710
raise TypeError(
711711
"Rop overriding function should return a list, "
712-
'got "%s"' % type(roverrides_l)
712+
f'got "{type(roverrides_l)}"'
713713
)
714714
all_rops_l, all_rops_ov_l = zip(
715715
*[

pytensor/compile/ops.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -252,7 +252,7 @@ def __hash__(self):
252252
return hash(type(self)) ^ hash(self.__fn)
253253

254254
def __str__(self):
255-
return "FromFunctionOp{%s}" % self.__fn.__name__
255+
return f"FromFunctionOp{{{self.__fn.__name__}}}"
256256

257257
def perform(self, node, inputs, outputs):
258258
outs = self.__fn(*inputs)

pytensor/link/c/params_type.py

+21-16
Original file line numberDiff line numberDiff line change
@@ -262,8 +262,13 @@ def __init__(self, params_type, **kwargs):
262262
self.__dict__.update(__params_type__=params_type, __signatures__=None)
263263

264264
def __repr__(self):
265-
return "Params(%s)" % ", ".join(
266-
[(f"{k}:{type(self[k]).__name__}:{self[k]}") for k in sorted(self.keys())]
265+
return "Params({})".format(
266+
", ".join(
267+
[
268+
(f"{k}:{type(self[k]).__name__}:{self[k]}")
269+
for k in sorted(self.keys())
270+
]
271+
)
267272
)
268273

269274
def __getattr__(self, key):
@@ -346,13 +351,11 @@ def __init__(self, **kwargs):
346351
for attribute_name in kwargs:
347352
if re.match("^[A-Za-z_][A-Za-z0-9_]*$", attribute_name) is None:
348353
raise AttributeError(
349-
'ParamsType: attribute "%s" should be a valid identifier.'
350-
% attribute_name
354+
f'ParamsType: attribute "{attribute_name}" should be a valid identifier.'
351355
)
352356
if attribute_name in c_cpp_keywords:
353357
raise SyntaxError(
354-
'ParamsType: "%s" is a potential C/C++ keyword and should not be used as attribute name.'
355-
% attribute_name
358+
f'ParamsType: "{attribute_name}" is a potential C/C++ keyword and should not be used as attribute name.'
356359
)
357360
type_instance = kwargs[attribute_name]
358361
type_name = type_instance.__class__.__name__
@@ -424,8 +427,10 @@ def __getattr__(self, key):
424427
return super().__getattr__(self, key)
425428

426429
def __repr__(self):
427-
return "ParamsType<%s>" % ", ".join(
428-
[(f"{self.fields[i]}:{self.types[i]}") for i in range(self.length)]
430+
return "ParamsType<{}>".format(
431+
", ".join(
432+
[(f"{self.fields[i]}:{self.types[i]}") for i in range(self.length)]
433+
)
429434
)
430435

431436
def __eq__(self, other):
@@ -733,18 +738,18 @@ def c_support_code(self, **kwargs):
733738
struct_cleanup = "\n".join(c_cleanup_list)
734739
struct_extract = "\n\n".join(c_extract_list)
735740
struct_extract_method = """
736-
void extract(PyObject* object, int field_pos) {
737-
switch(field_pos) {
741+
void extract(PyObject* object, int field_pos) {{
742+
switch(field_pos) {{
738743
// Extraction cases.
739-
%s
744+
{}
740745
// Default case.
741746
default:
742-
PyErr_Format(PyExc_TypeError, "ParamsType: no extraction defined for a field %%d.", field_pos);
747+
PyErr_Format(PyExc_TypeError, "ParamsType: no extraction defined for a field %d.", field_pos);
743748
this->setErrorOccurred();
744749
break;
745-
}
746-
}
747-
""" % (
750+
}}
751+
}}
752+
""".format(
748753
"\n".join(
749754
[
750755
("case %d: extract_%s(object); break;" % (i, self.fields[i]))
@@ -866,7 +871,7 @@ def c_extract(self, name, sub, check_input=True, **kwargs):
866871
struct_name=self.name,
867872
length=self.length,
868873
fail=sub["fail"],
869-
fields_list='"%s"' % '", "'.join(self.fields),
874+
fields_list='"{}"'.format('", "'.join(self.fields)),
870875
)
871876
)
872877

pytensor/link/utils.py

+7-2
Original file line numberDiff line numberDiff line change
@@ -355,8 +355,13 @@ def raise_with_op(
355355
+ f"\nInputs values: {scalar_values}"
356356
)
357357
if verbosity == "high":
358-
detailed_err_msg += "\nInputs type_num: %s" % str(
359-
[getattr(getattr(i[0], "dtype", ""), "num", "") for i in thunk.inputs]
358+
detailed_err_msg += "\nInputs type_num: {}".format(
359+
str(
360+
[
361+
getattr(getattr(i[0], "dtype", ""), "num", "")
362+
for i in thunk.inputs
363+
]
364+
)
360365
)
361366

362367
detailed_err_msg += f"\nOutputs clients: {clients}\n"

pytensor/scalar/basic.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -475,7 +475,7 @@ def c_extract(self, name, sub, check_input=True, **kwargs):
475475
sub,
476476
name=name,
477477
dtype=specs[1],
478-
pyarr_type="Py%sArrType_Type" % specs[2],
478+
pyarr_type=f"Py{specs[2]}ArrType_Type",
479479
)
480480
)
481481
else:

pytensor/tensor/blas.py

+8-7
Original file line numberDiff line numberDiff line change
@@ -180,9 +180,9 @@ def __init__(self, inplace):
180180

181181
def __str__(self):
182182
if self.inplace:
183-
return "%s{inplace}" % self.__class__.__name__
183+
return f"{self.__class__.__name__}{{inplace}}"
184184
else:
185-
return "%s{no_inplace}" % self.__class__.__name__
185+
return f"{self.__class__.__name__}{{no_inplace}}"
186186

187187
def make_node(self, y, alpha, A, x, beta):
188188
y = ptb.as_tensor_variable(y)
@@ -279,9 +279,9 @@ def __init__(self, destructive):
279279

280280
def __str__(self):
281281
if self.destructive:
282-
return "%s{destructive}" % self.__class__.__name__
282+
return f"{self.__class__.__name__}{{destructive}}"
283283
else:
284-
return "%s{non-destructive}" % self.__class__.__name__
284+
return f"{self.__class__.__name__}{{non-destructive}}"
285285

286286
def make_node(self, A, alpha, x, y):
287287
A = ptb.as_tensor_variable(A)
@@ -1811,9 +1811,10 @@ def contiguous(var, ndim):
18111811
f"{strides}[{i}] > 0 && {strides}[{i}] % type_size == 0"
18121812
for i in range(1, ndim)
18131813
),
1814-
"(%s)"
1815-
% " || ".join(
1816-
f"{strides}[{i}] == type_size" for i in range(1, ndim)
1814+
"({})".format(
1815+
" || ".join(
1816+
f"{strides}[{i}] == type_size" for i in range(1, ndim)
1817+
)
18171818
),
18181819
]
18191820
)

pytensor/tensor/elemwise.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -1098,14 +1098,14 @@ def _c_all(self, node, nodename, inames, onames, sub):
10981098
all_broadcastable = all(s == 1 for s in var.type.shape)
10991099
cond1 = " && ".join(
11001100
[
1101-
"PyArray_ISCONTIGUOUS(%s)" % arr
1101+
f"PyArray_ISCONTIGUOUS({arr})"
11021102
for arr, var in z
11031103
if not all_broadcastable
11041104
]
11051105
)
11061106
cond2 = " && ".join(
11071107
[
1108-
"PyArray_ISFORTRAN(%s)" % arr
1108+
f"PyArray_ISFORTRAN({arr})"
11091109
for arr, var in z
11101110
if not all_broadcastable
11111111
]

pytensor/tensor/extra_ops.py

+12-12
Original file line numberDiff line numberDiff line change
@@ -652,8 +652,8 @@ def make_node(self, x, repeats):
652652
if repeats.dtype in numpy_unsupported_dtypes:
653653
raise TypeError(
654654
(
655-
"dtypes %s are not supported by numpy.repeat "
656-
"for the 'repeats' parameter, " % str(numpy_unsupported_dtypes)
655+
f"dtypes {numpy_unsupported_dtypes!s} are not supported by numpy.repeat "
656+
"for the 'repeats' parameter, "
657657
),
658658
repeats.dtype,
659659
)
@@ -882,8 +882,8 @@ def make_node(self, a, val):
882882
val = ptb.as_tensor_variable(val)
883883
if a.ndim < 2:
884884
raise TypeError(
885-
"%s: first parameter must have at least"
886-
" two dimensions" % self.__class__.__name__
885+
f"{self.__class__.__name__}: first parameter must have at least"
886+
" two dimensions"
887887
)
888888
elif val.ndim != 0:
889889
raise TypeError(
@@ -892,8 +892,8 @@ def make_node(self, a, val):
892892
val = ptb.cast(val, dtype=upcast(a.dtype, val.dtype))
893893
if val.dtype != a.dtype:
894894
raise TypeError(
895-
"%s: type of second parameter must be the same as"
896-
" the first's" % self.__class__.__name__
895+
f"{self.__class__.__name__}: type of second parameter must be the same as"
896+
" the first's"
897897
)
898898
return Apply(self, [a, val], [a.type()])
899899

@@ -926,8 +926,8 @@ def grad(self, inp, cost_grad):
926926
return [None, None]
927927
elif a.ndim > 2:
928928
raise NotImplementedError(
929-
"%s: gradient is currently implemented"
930-
" for matrices only" % self.__class__.__name__
929+
f"{self.__class__.__name__}: gradient is currently implemented"
930+
" for matrices only"
931931
)
932932
wr_a = fill_diagonal(grad, 0) # valid for any number of dimensions
933933
# diag is only valid for matrices
@@ -984,8 +984,8 @@ def make_node(self, a, val, offset):
984984
offset = ptb.as_tensor_variable(offset)
985985
if a.ndim != 2:
986986
raise TypeError(
987-
"%s: first parameter must have exactly"
988-
" two dimensions" % self.__class__.__name__
987+
f"{self.__class__.__name__}: first parameter must have exactly"
988+
" two dimensions"
989989
)
990990
elif val.ndim != 0:
991991
raise TypeError(
@@ -998,8 +998,8 @@ def make_node(self, a, val, offset):
998998
val = ptb.cast(val, dtype=upcast(a.dtype, val.dtype))
999999
if val.dtype != a.dtype:
10001000
raise TypeError(
1001-
"%s: type of second parameter must be the same"
1002-
" as the first's" % self.__class__.__name__
1001+
f"{self.__class__.__name__}: type of second parameter must be the same"
1002+
" as the first's"
10031003
)
10041004
elif offset.dtype not in integer_dtypes:
10051005
raise TypeError(

pytensor/tensor/fft.py

+5-6
Original file line numberDiff line numberDiff line change
@@ -20,8 +20,7 @@ def make_node(self, a, s=None):
2020
a = as_tensor_variable(a)
2121
if a.ndim < 2:
2222
raise TypeError(
23-
"%s: input must have dimension > 2, with first dimension batches"
24-
% self.__class__.__name__
23+
f"{self.__class__.__name__}: input must have dimension > 2, with first dimension batches"
2524
)
2625

2726
if s is None:
@@ -31,8 +30,8 @@ def make_node(self, a, s=None):
3130
s = as_tensor_variable(s)
3231
if s.dtype not in integer_dtypes:
3332
raise TypeError(
34-
"%s: length of the transformed axis must be"
35-
" of type integer" % self.__class__.__name__
33+
f"{self.__class__.__name__}: length of the transformed axis must be"
34+
" of type integer"
3635
)
3736
return Apply(self, [a, s], [self.output_type(a)()])
3837

@@ -92,8 +91,8 @@ def make_node(self, a, s=None):
9291
s = as_tensor_variable(s)
9392
if s.dtype not in integer_dtypes:
9493
raise TypeError(
95-
"%s: length of the transformed axis must be"
96-
" of type integer" % self.__class__.__name__
94+
f"{self.__class__.__name__}: length of the transformed axis must be"
95+
" of type integer"
9796
)
9897
return Apply(self, [a, s], [self.output_type(a)()])
9998

pytensor/tensor/io.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ def __init__(self, dtype, shape, mmap_mode=None):
2828
if mmap_mode not in (None, "c"):
2929
raise ValueError(
3030
"The only supported values for mmap_mode "
31-
"are None and 'c', got %s" % mmap_mode
31+
f"are None and 'c', got {mmap_mode}"
3232
)
3333
self.mmap_mode = mmap_mode
3434

pytensor/tensor/math.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1540,7 +1540,7 @@ def __init__(self, axis=None):
15401540

15411541
def __str__(self):
15421542
if self.axis is not None:
1543-
return "Mean{%s}" % (", ".join(str(x) for x in self.axis))
1543+
return "Mean{{{}}}".format(", ".join(str(x) for x in self.axis))
15441544
else:
15451545
return "Mean"
15461546

pytensor/tensor/subtensor.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -2174,7 +2174,7 @@ def __str__(self):
21742174
else:
21752175
msg += ",inc"
21762176

2177-
return self.__class__.__name__ + "{%s}" % msg
2177+
return self.__class__.__name__ + f"{{{msg}}}"
21782178

21792179
def make_node(self, x, y, ilist):
21802180
x_ = as_tensor_variable(x)

0 commit comments

Comments
 (0)