Skip to content

Commit b95d3e5

Browse files
committed
CLN: str.format -> f-strings for io/sas
1 parent a2bbdb5 commit b95d3e5

File tree

3 files changed

+21
-26
lines changed

3 files changed

+21
-26
lines changed

pandas/io/sas/sas7bdat.py

+11-16
Original file line numberDiff line numberDiff line change
@@ -169,7 +169,7 @@ def _get_properties(self):
169169
if buf in const.encoding_names:
170170
self.file_encoding = const.encoding_names[buf]
171171
else:
172-
self.file_encoding = "unknown (code={name!s})".format(name=buf)
172+
self.file_encoding = f"unknown (code={str(buf)})"
173173

174174
# Get platform information
175175
buf = self._read_bytes(const.platform_offset, const.platform_length)
@@ -293,8 +293,8 @@ def _read_bytes(self, offset, length):
293293
buf = self._path_or_buf.read(length)
294294
if len(buf) < length:
295295
self.close()
296-
msg = "Unable to read {:d} bytes from file position {:d}."
297-
raise ValueError(msg.format(length, offset))
296+
msg = f"Unable to read {length:d} bytes from file position {offset:d}."
297+
raise ValueError(msg)
298298
return buf
299299
else:
300300
if offset + length > len(self._cached_page):
@@ -456,14 +456,9 @@ def _process_columnsize_subheader(self, offset, length):
456456
offset += int_len
457457
self.column_count = self._read_int(offset, int_len)
458458
if self.col_count_p1 + self.col_count_p2 != self.column_count:
459-
print(
460-
"Warning: column count mismatch ({p1} + {p2} != "
461-
"{column_count})\n".format(
462-
p1=self.col_count_p1,
463-
p2=self.col_count_p2,
464-
column_count=self.column_count,
465-
)
466-
)
459+
print(f"Warning: column count mismatch ({self.col_count_p1} + "
460+
f"{self.col_count_p2} != "
461+
f"{self.column_count})\n")
467462

468463
# Unknown purpose
469464
def _process_subheader_counts(self, offset, length):
@@ -672,8 +667,10 @@ def _read_next_page(self):
672667
return True
673668
elif len(self._cached_page) != self._page_length:
674669
self.close()
675-
msg = "failed to read complete page from file (read {:d} of {:d} bytes)"
676-
raise ValueError(msg.format(len(self._cached_page), self._page_length))
670+
msg = ("failed to read complete page from file (read "
671+
f"{len(self._cached_page):d} of "
672+
f"{self._page_length:d} bytes)")
673+
raise ValueError(msg)
677674

678675
self._read_page_header()
679676
page_type = self._current_page_type
@@ -725,8 +722,6 @@ def _chunk_to_dataframe(self):
725722
js += 1
726723
else:
727724
self.close()
728-
raise ValueError(
729-
"unknown column type {type}".format(type=self._column_types[j])
730-
)
725+
raise ValueError(f"unknown column type {self._column_types[j]}")
731726

732727
return rslt

pandas/io/sas/sas_xport.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -367,8 +367,8 @@ def _read_header(self):
367367
fl = field["field_length"]
368368
if field["ntype"] == "numeric" and ((fl < 2) or (fl > 8)):
369369
self.close()
370-
msg = "Floating field width {0} is not between 2 and 8."
371-
raise TypeError(msg.format(fl))
370+
msg = "f{Floating field width {fl} is not between 2 and 8.}"
371+
raise TypeError(msg)
372372

373373
for k, v in field.items():
374374
try:

pandas/tests/io/sas/test_sas7bdat.py

+8-8
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ def setup_method(self, datapath):
2121
self.data = []
2222
self.test_ix = [list(range(1, 16)), [16]]
2323
for j in 1, 2:
24-
fname = os.path.join(self.dirpath, "test_sas7bdat_{j}.csv".format(j=j))
24+
fname = os.path.join(self.dirpath, f"test_sas7bdat_{j}.csv")
2525
df = pd.read_csv(fname)
2626
epoch = pd.datetime(1960, 1, 1)
2727
t1 = pd.to_timedelta(df["Column4"], unit="d")
@@ -38,15 +38,15 @@ def test_from_file(self):
3838
for j in 0, 1:
3939
df0 = self.data[j]
4040
for k in self.test_ix[j]:
41-
fname = os.path.join(self.dirpath, "test{k}.sas7bdat".format(k=k))
41+
fname = os.path.join(self.dirpath, f"test{k}.sas7bdat")
4242
df = pd.read_sas(fname, encoding="utf-8")
4343
tm.assert_frame_equal(df, df0)
4444

4545
def test_from_buffer(self):
4646
for j in 0, 1:
4747
df0 = self.data[j]
4848
for k in self.test_ix[j]:
49-
fname = os.path.join(self.dirpath, "test{k}.sas7bdat".format(k=k))
49+
fname = os.path.join(self.dirpath, f"test{k}.sas7bdat")
5050
with open(fname, "rb") as f:
5151
byts = f.read()
5252
buf = io.BytesIO(byts)
@@ -61,7 +61,7 @@ def test_from_iterator(self):
6161
for j in 0, 1:
6262
df0 = self.data[j]
6363
for k in self.test_ix[j]:
64-
fname = os.path.join(self.dirpath, "test{k}.sas7bdat".format(k=k))
64+
fname = os.path.join(self.dirpath, f"test{k}.sas7bdat")
6565
rdr = pd.read_sas(fname, iterator=True, encoding="utf-8")
6666
df = rdr.read(2)
6767
tm.assert_frame_equal(df, df0.iloc[0:2, :])
@@ -73,7 +73,7 @@ def test_path_pathlib(self):
7373
for j in 0, 1:
7474
df0 = self.data[j]
7575
for k in self.test_ix[j]:
76-
fname = Path(os.path.join(self.dirpath, "test{k}.sas7bdat".format(k=k)))
76+
fname = Path(os.path.join(self.dirpath, f"test{k}.sas7bdat"))
7777
df = pd.read_sas(fname, encoding="utf-8")
7878
tm.assert_frame_equal(df, df0)
7979

@@ -85,7 +85,7 @@ def test_path_localpath(self):
8585
df0 = self.data[j]
8686
for k in self.test_ix[j]:
8787
fname = LocalPath(
88-
os.path.join(self.dirpath, "test{k}.sas7bdat".format(k=k))
88+
os.path.join(self.dirpath, f"test{k}.sas7bdat")
8989
)
9090
df = pd.read_sas(fname, encoding="utf-8")
9191
tm.assert_frame_equal(df, df0)
@@ -95,7 +95,7 @@ def test_iterator_loop(self):
9595
for j in 0, 1:
9696
for k in self.test_ix[j]:
9797
for chunksize in 3, 5, 10, 11:
98-
fname = os.path.join(self.dirpath, "test{k}.sas7bdat".format(k=k))
98+
fname = os.path.join(self.dirpath, f"test{k}.sas7bdat")
9999
rdr = pd.read_sas(fname, chunksize=10, encoding="utf-8")
100100
y = 0
101101
for x in rdr:
@@ -106,7 +106,7 @@ def test_iterator_loop(self):
106106
def test_iterator_read_too_much(self):
107107
# github #14734
108108
k = self.test_ix[0][0]
109-
fname = os.path.join(self.dirpath, "test{k}.sas7bdat".format(k=k))
109+
fname = os.path.join(self.dirpath, f"test{k}.sas7bdat")
110110
rdr = pd.read_sas(fname, format="sas7bdat", iterator=True, encoding="utf-8")
111111
d1 = rdr.read(rdr.row_count + 20)
112112
rdr.close()

0 commit comments

Comments
 (0)