Skip to content

Commit 95770df

Browse files
naomi172839WillAyd
authored andcommitted
Replace "foo!r" to "repr(foo)" syntax #29886 (#30502)
1 parent cb5f9d1 commit 95770df

File tree

6 files changed

+49
-69
lines changed

6 files changed

+49
-69
lines changed

pandas/_version.py

+24-30
Original file line numberDiff line numberDiff line change
@@ -79,17 +79,17 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False):
7979
if e.errno == errno.ENOENT:
8080
continue
8181
if verbose:
82-
print("unable to run {dispcmd}".format(dispcmd=dispcmd))
82+
print(f"unable to run {dispcmd}")
8383
print(e)
8484
return None
8585
else:
8686
if verbose:
87-
print("unable to find command, tried %s" % (commands,))
87+
print(f"unable to find command, tried {commands}")
8888
return None
8989
stdout = p.communicate()[0].strip().decode()
9090
if p.returncode != 0:
9191
if verbose:
92-
print("unable to run {dispcmd} (error)".format(dispcmd=dispcmd))
92+
print(f"unable to run {dispcmd} (error)")
9393
return None
9494
return stdout
9595

@@ -101,10 +101,8 @@ def versions_from_parentdir(parentdir_prefix, root, verbose):
101101
if not dirname.startswith(parentdir_prefix):
102102
if verbose:
103103
print(
104-
"guessing rootdir is '{root}', but '{dirname}' "
105-
"doesn't start with prefix '{parentdir_prefix}'".format(
106-
root=root, dirname=dirname, parentdir_prefix=parentdir_prefix
107-
)
104+
f"guessing rootdir is '{root}', but '{dirname}' "
105+
f"doesn't start with prefix '{parentdir_prefix}'"
108106
)
109107
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
110108
return {
@@ -163,15 +161,15 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
163161
# "stabilization", as well as "HEAD" and "master".
164162
tags = {r for r in refs if re.search(r"\d", r)}
165163
if verbose:
166-
print("discarding '{}', no digits".format(",".join(refs - tags)))
164+
print(f"discarding '{','.join(refs - tags)}', no digits")
167165
if verbose:
168-
print("likely tags: {}".format(",".join(sorted(tags))))
166+
print(f"likely tags: {','.join(sorted(tags))}")
169167
for ref in sorted(tags):
170168
# sorting will prefer e.g. "2.0" over "2.0rc1"
171169
if ref.startswith(tag_prefix):
172170
r = ref[len(tag_prefix) :]
173171
if verbose:
174-
print("picking {r}".format(r=r))
172+
print(f"picking {r}")
175173
return {
176174
"version": r,
177175
"full-revisionid": keywords["full"].strip(),
@@ -198,7 +196,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
198196

199197
if not os.path.exists(os.path.join(root, ".git")):
200198
if verbose:
201-
print("no .git in {root}".format(root=root))
199+
print(f"no .git in {root}")
202200
raise NotThisMethod("no .git directory")
203201

204202
GITS = ["git"]
@@ -240,17 +238,13 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
240238
mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe)
241239
if not mo:
242240
# unparseable. Maybe git-describe is misbehaving?
243-
pieces["error"] = (
244-
"unable to parse git-describe output: "
245-
"'{describe_out}'".format(describe_out=describe_out)
246-
)
241+
pieces["error"] = f"unable to parse git-describe output: '{describe_out}'"
247242
return pieces
248243

249244
# tag
250245
full_tag = mo.group(1)
251246
if not full_tag.startswith(tag_prefix):
252-
fmt = "tag '{full_tag}' doesn't start with prefix '{tag_prefix}'"
253-
msg = fmt.format(full_tag=full_tag, tag_prefix=tag_prefix)
247+
msg = f"tag '{full_tag}' doesn't start with prefix '{tag_prefix}'"
254248
if verbose:
255249
print(msg)
256250
pieces["error"] = msg
@@ -291,12 +285,12 @@ def render_pep440(pieces):
291285
rendered = pieces["closest-tag"]
292286
if pieces["distance"] or pieces["dirty"]:
293287
rendered += plus_or_dot(pieces)
294-
rendered += "{:d}.g{}".format(pieces["distance"], pieces["short"])
288+
rendered += f"{pieces['distance']:d}.g{pieces['short']}"
295289
if pieces["dirty"]:
296290
rendered += ".dirty"
297291
else:
298292
# exception #1
299-
rendered = "0+untagged.{:d}.g{}".format(pieces["distance"], pieces["short"])
293+
rendered = f"0+untagged.{pieces['distance']:d}.g{pieces['short']}"
300294
if pieces["dirty"]:
301295
rendered += ".dirty"
302296
return rendered
@@ -311,10 +305,10 @@ def render_pep440_pre(pieces):
311305
if pieces["closest-tag"]:
312306
rendered = pieces["closest-tag"]
313307
if pieces["distance"]:
314-
rendered += ".post.dev%d" % pieces["distance"]
308+
rendered += f".post.dev{pieces['distance']:d}"
315309
else:
316310
# exception #1
317-
rendered = "0.post.dev%d" % pieces["distance"]
311+
rendered = f"0.post.dev{pieces['distance']:d}"
318312
return rendered
319313

320314

@@ -330,17 +324,17 @@ def render_pep440_post(pieces):
330324
if pieces["closest-tag"]:
331325
rendered = pieces["closest-tag"]
332326
if pieces["distance"] or pieces["dirty"]:
333-
rendered += ".post{:d}".format(pieces["distance"])
327+
rendered += f".post{pieces['distance']:d}"
334328
if pieces["dirty"]:
335329
rendered += ".dev0"
336330
rendered += plus_or_dot(pieces)
337-
rendered += "g{}".format(pieces["short"])
331+
rendered += f"g{pieces['short']}"
338332
else:
339333
# exception #1
340-
rendered = "0.post%d" % pieces["distance"]
334+
rendered = f"0.pos{pieces['distance']:d}"
341335
if pieces["dirty"]:
342336
rendered += ".dev0"
343-
rendered += "+g{}".format(pieces["short"])
337+
rendered += f"+g{pieces['short']}"
344338
return rendered
345339

346340

@@ -353,12 +347,12 @@ def render_pep440_old(pieces):
353347
if pieces["closest-tag"]:
354348
rendered = pieces["closest-tag"]
355349
if pieces["distance"] or pieces["dirty"]:
356-
rendered += ".post%d" % pieces["distance"]
350+
rendered += f".post{pieces['distance']:d}"
357351
if pieces["dirty"]:
358352
rendered += ".dev0"
359353
else:
360354
# exception #1
361-
rendered = "0.post%d" % pieces["distance"]
355+
rendered = f"0.post{pieces['distance']:d}"
362356
if pieces["dirty"]:
363357
rendered += ".dev0"
364358
return rendered
@@ -374,7 +368,7 @@ def render_git_describe(pieces):
374368
if pieces["closest-tag"]:
375369
rendered = pieces["closest-tag"]
376370
if pieces["distance"]:
377-
rendered += "-{:d}-g{}".format(pieces["distance"], pieces["short"])
371+
rendered += f"-{pieces['distance']:d}-g{pieces['short']}"
378372
else:
379373
# exception #1
380374
rendered = pieces["short"]
@@ -392,7 +386,7 @@ def render_git_describe_long(pieces):
392386

393387
if pieces["closest-tag"]:
394388
rendered = pieces["closest-tag"]
395-
rendered += "-{:d}-g{}".format(pieces["distance"], pieces["short"])
389+
rendered += f"-{pieces['distance']:d}-g{pieces['short']}"
396390
else:
397391
# exception #1
398392
rendered = pieces["short"]
@@ -426,7 +420,7 @@ def render(pieces, style):
426420
elif style == "git-describe-long":
427421
rendered = render_git_describe_long(pieces)
428422
else:
429-
raise ValueError("unknown style '{style}'".format(style=style))
423+
raise ValueError(f"unknown style '{style}'")
430424

431425
return {
432426
"version": rendered,

pandas/io/formats/style.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -255,7 +255,7 @@ def _translate(self):
255255
BLANK_VALUE = ""
256256

257257
def format_attr(pair):
258-
return "{key}={value}".format(**pair)
258+
return f"{pair['key']}={pair['value']}"
259259

260260
# for sparsifying a MultiIndex
261261
idx_lengths = _get_level_lengths(self.index)

pandas/tests/indexes/period/test_period.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -126,7 +126,7 @@ def test_difference_freq(self, sort):
126126

127127
def test_hash_error(self):
128128
index = period_range("20010101", periods=10)
129-
msg = "unhashable type: '{}'".format(type(index).__name__)
129+
msg = f"unhashable type: '{type(index).__name__}'"
130130
with pytest.raises(TypeError, match=msg):
131131
hash(index)
132132

pandas/tests/io/test_sql.py

+16-30
Original file line numberDiff line numberDiff line change
@@ -215,9 +215,7 @@ def teardown_method(self, method):
215215
class MySQLMixIn(MixInBase):
216216
def drop_table(self, table_name):
217217
cur = self.conn.cursor()
218-
cur.execute(
219-
"DROP TABLE IF EXISTS {}".format(sql._get_valid_mysql_name(table_name))
220-
)
218+
cur.execute(f"DROP TABLE IF EXISTS {sql._get_valid_mysql_name(table_name)}")
221219
self.conn.commit()
222220

223221
def _get_all_tables(self):
@@ -237,7 +235,7 @@ def _close_conn(self):
237235
class SQLiteMixIn(MixInBase):
238236
def drop_table(self, table_name):
239237
self.conn.execute(
240-
"DROP TABLE IF EXISTS {}".format(sql._get_valid_sqlite_name(table_name))
238+
f"DROP TABLE IF EXISTS {sql._get_valid_sqlite_name(table_name)}"
241239
)
242240
self.conn.commit()
243241

@@ -405,11 +403,7 @@ def _load_raw_sql(self):
405403
def _count_rows(self, table_name):
406404
result = (
407405
self._get_exec()
408-
.execute(
409-
"SELECT count(*) AS count_1 FROM {table_name}".format(
410-
table_name=table_name
411-
)
412-
)
406+
.execute(f"SELECT count(*) AS count_1 FROM {table_name}")
413407
.fetchone()
414408
)
415409
return result[0]
@@ -1207,7 +1201,7 @@ def _get_sqlite_column_type(self, schema, column):
12071201
for col in schema.split("\n"):
12081202
if col.split()[0].strip('""') == column:
12091203
return col.split()[1]
1210-
raise ValueError("Column {column} not found".format(column=column))
1204+
raise ValueError(f"Column {column} not found")
12111205

12121206
def test_sqlite_type_mapping(self):
12131207

@@ -1272,7 +1266,7 @@ def setup_connect(self):
12721266
# to test if connection can be made:
12731267
self.conn.connect()
12741268
except sqlalchemy.exc.OperationalError:
1275-
pytest.skip("Can't connect to {0} server".format(self.flavor))
1269+
pytest.skip(f"Can't connect to {self.flavor} server")
12761270

12771271
def test_read_sql(self):
12781272
self._read_sql_iris()
@@ -1414,7 +1408,7 @@ def check(col):
14141408

14151409
else:
14161410
raise AssertionError(
1417-
"DateCol loaded with incorrect type -> {0}".format(col.dtype)
1411+
f"DateCol loaded with incorrect type -> {col.dtype}"
14181412
)
14191413

14201414
# GH11216
@@ -2051,15 +2045,13 @@ def psql_insert_copy(table, conn, keys, data_iter):
20512045
writer.writerows(data_iter)
20522046
s_buf.seek(0)
20532047

2054-
columns = ", ".join('"{}"'.format(k) for k in keys)
2048+
columns = ", ".join(f'"{k}"' for k in keys)
20552049
if table.schema:
2056-
table_name = "{}.{}".format(table.schema, table.name)
2050+
table_name = f"{table.schema}.{table.name}"
20572051
else:
20582052
table_name = table.name
20592053

2060-
sql_query = "COPY {} ({}) FROM STDIN WITH CSV".format(
2061-
table_name, columns
2062-
)
2054+
sql_query = f"COPY {table_name} ({columns}) FROM STDIN WITH CSV"
20632055
cur.copy_expert(sql=sql_query, file=s_buf)
20642056

20652057
expected = DataFrame({"col1": [1, 2], "col2": [0.1, 0.2], "col3": ["a", "n"]})
@@ -2199,14 +2191,12 @@ def test_datetime_time(self):
21992191
def _get_index_columns(self, tbl_name):
22002192
ixs = sql.read_sql_query(
22012193
"SELECT * FROM sqlite_master WHERE type = 'index' "
2202-
+ "AND tbl_name = '{tbl_name}'".format(tbl_name=tbl_name),
2194+
+ f"AND tbl_name = '{tbl_name}'",
22032195
self.conn,
22042196
)
22052197
ix_cols = []
22062198
for ix_name in ixs.name:
2207-
ix_info = sql.read_sql_query(
2208-
"PRAGMA index_info({ix_name})".format(ix_name=ix_name), self.conn
2209-
)
2199+
ix_info = sql.read_sql_query(f"PRAGMA index_info({ix_name})", self.conn)
22102200
ix_cols.append(ix_info.name.tolist())
22112201
return ix_cols
22122202

@@ -2217,15 +2207,11 @@ def test_transactions(self):
22172207
self._transaction_test()
22182208

22192209
def _get_sqlite_column_type(self, table, column):
2220-
recs = self.conn.execute("PRAGMA table_info({table})".format(table=table))
2210+
recs = self.conn.execute(f"PRAGMA table_info({table})")
22212211
for cid, name, ctype, not_null, default, pk in recs:
22222212
if name == column:
22232213
return ctype
2224-
raise ValueError(
2225-
"Table {table}, column {column} not found".format(
2226-
table=table, column=column
2227-
)
2228-
)
2214+
raise ValueError(f"Table {table}, column {column} not found")
22292215

22302216
def test_dtype(self):
22312217
if self.flavor == "mysql":
@@ -2295,7 +2281,7 @@ def test_illegal_names(self):
22952281
sql.table_exists(weird_name, self.conn)
22962282

22972283
df2 = DataFrame([[1, 2], [3, 4]], columns=["a", weird_name])
2298-
c_tbl = "test_weird_col_name{ndx:d}".format(ndx=ndx)
2284+
c_tbl = f"test_weird_col_name{ndx:d}"
22992285
df2.to_sql(c_tbl, self.conn)
23002286
sql.table_exists(c_tbl, self.conn)
23012287

@@ -2500,7 +2486,7 @@ def test_if_exists(self):
25002486
df_if_exists_1 = DataFrame({"col1": [1, 2], "col2": ["A", "B"]})
25012487
df_if_exists_2 = DataFrame({"col1": [3, 4, 5], "col2": ["C", "D", "E"]})
25022488
table_name = "table_if_exists"
2503-
sql_select = "SELECT * FROM {table_name}".format(table_name=table_name)
2489+
sql_select = f"SELECT * FROM {table_name}"
25042490

25052491
def clean_up(test_table_to_drop):
25062492
"""
@@ -2788,7 +2774,7 @@ def test_if_exists(self):
27882774
df_if_exists_1 = DataFrame({"col1": [1, 2], "col2": ["A", "B"]})
27892775
df_if_exists_2 = DataFrame({"col1": [3, 4, 5], "col2": ["C", "D", "E"]})
27902776
table_name = "table_if_exists"
2791-
sql_select = "SELECT * FROM {table_name}".format(table_name=table_name)
2777+
sql_select = f"SELECT * FROM {table_name}"
27922778

27932779
def clean_up(test_table_to_drop):
27942780
"""

pandas/tests/scalar/timedelta/test_timedelta.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -399,7 +399,7 @@ def test_unit_parser(self, units, np_unit, wrapper):
399399
[np.timedelta64(i, "m") for i in np.arange(5).tolist()]
400400
)
401401

402-
str_repr = ["{}{}".format(x, unit) for x in np.arange(5)]
402+
str_repr = [f"{x}{unit}" for x in np.arange(5)]
403403
result = to_timedelta(wrapper(str_repr))
404404
tm.assert_index_equal(result, expected)
405405
result = TimedeltaIndex(wrapper(str_repr))
@@ -416,9 +416,9 @@ def test_unit_parser(self, units, np_unit, wrapper):
416416
if unit == "M":
417417
expected = Timedelta(np.timedelta64(2, "m").astype("timedelta64[ns]"))
418418

419-
result = to_timedelta("2{}".format(unit))
419+
result = to_timedelta(f"2{unit}")
420420
assert result == expected
421-
result = Timedelta("2{}".format(unit))
421+
result = Timedelta(f"2{unit}")
422422
assert result == expected
423423

424424
@pytest.mark.parametrize("unit", ["Y", "y", "M"])

pandas/tests/series/test_missing.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -275,7 +275,7 @@ def test_datetime64_tz_fillna(self):
275275
["2011-01-01 10:00", pd.NaT, "2011-01-03 10:00", pd.NaT], tz=tz
276276
)
277277
s = pd.Series(idx)
278-
assert s.dtype == "datetime64[ns, {0}]".format(tz)
278+
assert s.dtype == f"datetime64[ns, {tz}]"
279279
tm.assert_series_equal(pd.isna(s), null_loc)
280280

281281
result = s.fillna(pd.Timestamp("2011-01-02 10:00"))
@@ -1284,7 +1284,7 @@ def test_interpolate_invalid_float_limit(self, nontemporal_method):
12841284
def test_interp_invalid_method(self, invalid_method):
12851285
s = Series([1, 3, np.nan, 12, np.nan, 25])
12861286

1287-
msg = "method must be one of.* Got '{}' instead".format(invalid_method)
1287+
msg = f"method must be one of.* Got '{invalid_method}' instead"
12881288
with pytest.raises(ValueError, match=msg):
12891289
s.interpolate(method=invalid_method)
12901290

@@ -1608,9 +1608,9 @@ def test_interp_non_timedelta_index(self, interp_methods_ind, ind):
16081608
else:
16091609
expected_error = (
16101610
"Index column must be numeric or datetime type when "
1611-
"using {method} method other than linear. "
1611+
f"using {method} method other than linear. "
16121612
"Try setting a numeric or datetime index column before "
1613-
"interpolating.".format(method=method)
1613+
"interpolating."
16141614
)
16151615
with pytest.raises(ValueError, match=expected_error):
16161616
df[0].interpolate(method=method, **kwargs)

0 commit comments

Comments
 (0)