Skip to content

Commit f97a462

Browse files
jyscaojbrockmendel
authored andcommitted
CLN: str.format -> f-strings for io/sas (#30409)
* CLN: str.format -> f-strings for `io/sas` * Apply black style * Fix syntax error * Remove `str()` call
1 parent 6c91033 commit f97a462

File tree

3 files changed

+23
-26
lines changed

3 files changed

+23
-26
lines changed

pandas/io/sas/sas7bdat.py

+13-14
Original file line numberDiff line numberDiff line change
@@ -170,7 +170,7 @@ def _get_properties(self):
170170
if buf in const.encoding_names:
171171
self.file_encoding = const.encoding_names[buf]
172172
else:
173-
self.file_encoding = "unknown (code={name!s})".format(name=buf)
173+
self.file_encoding = f"unknown (code={buf})"
174174

175175
# Get platform information
176176
buf = self._read_bytes(const.platform_offset, const.platform_length)
@@ -294,8 +294,8 @@ def _read_bytes(self, offset, length):
294294
buf = self._path_or_buf.read(length)
295295
if len(buf) < length:
296296
self.close()
297-
msg = "Unable to read {:d} bytes from file position {:d}."
298-
raise ValueError(msg.format(length, offset))
297+
msg = f"Unable to read {length:d} bytes from file position {offset:d}."
298+
raise ValueError(msg)
299299
return buf
300300
else:
301301
if offset + length > len(self._cached_page):
@@ -458,12 +458,9 @@ def _process_columnsize_subheader(self, offset, length):
458458
self.column_count = self._read_int(offset, int_len)
459459
if self.col_count_p1 + self.col_count_p2 != self.column_count:
460460
print(
461-
"Warning: column count mismatch ({p1} + {p2} != "
462-
"{column_count})\n".format(
463-
p1=self.col_count_p1,
464-
p2=self.col_count_p2,
465-
column_count=self.column_count,
466-
)
461+
f"Warning: column count mismatch ({self.col_count_p1} + "
462+
f"{self.col_count_p2} != "
463+
f"{self.column_count})\n"
467464
)
468465

469466
# Unknown purpose
@@ -673,8 +670,12 @@ def _read_next_page(self):
673670
return True
674671
elif len(self._cached_page) != self._page_length:
675672
self.close()
676-
msg = "failed to read complete page from file (read {:d} of {:d} bytes)"
677-
raise ValueError(msg.format(len(self._cached_page), self._page_length))
673+
msg = (
674+
"failed to read complete page from file (read "
675+
f"{len(self._cached_page):d} of "
676+
f"{self._page_length:d} bytes)"
677+
)
678+
raise ValueError(msg)
678679

679680
self._read_page_header()
680681
page_type = self._current_page_type
@@ -726,8 +727,6 @@ def _chunk_to_dataframe(self):
726727
js += 1
727728
else:
728729
self.close()
729-
raise ValueError(
730-
"unknown column type {type}".format(type=self._column_types[j])
731-
)
730+
raise ValueError(f"unknown column type {self._column_types[j]}")
732731

733732
return rslt

pandas/io/sas/sas_xport.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -367,8 +367,8 @@ def _read_header(self):
367367
fl = field["field_length"]
368368
if field["ntype"] == "numeric" and ((fl < 2) or (fl > 8)):
369369
self.close()
370-
msg = "Floating field width {0} is not between 2 and 8."
371-
raise TypeError(msg.format(fl))
370+
msg = f"Floating field width {fl} is not between 2 and 8."
371+
raise TypeError(msg)
372372

373373
for k, v in field.items():
374374
try:

pandas/tests/io/sas/test_sas7bdat.py

+8-10
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ def setup_method(self, datapath):
2121
self.data = []
2222
self.test_ix = [list(range(1, 16)), [16]]
2323
for j in 1, 2:
24-
fname = os.path.join(self.dirpath, "test_sas7bdat_{j}.csv".format(j=j))
24+
fname = os.path.join(self.dirpath, f"test_sas7bdat_{j}.csv")
2525
df = pd.read_csv(fname)
2626
epoch = pd.datetime(1960, 1, 1)
2727
t1 = pd.to_timedelta(df["Column4"], unit="d")
@@ -38,15 +38,15 @@ def test_from_file(self):
3838
for j in 0, 1:
3939
df0 = self.data[j]
4040
for k in self.test_ix[j]:
41-
fname = os.path.join(self.dirpath, "test{k}.sas7bdat".format(k=k))
41+
fname = os.path.join(self.dirpath, f"test{k}.sas7bdat")
4242
df = pd.read_sas(fname, encoding="utf-8")
4343
tm.assert_frame_equal(df, df0)
4444

4545
def test_from_buffer(self):
4646
for j in 0, 1:
4747
df0 = self.data[j]
4848
for k in self.test_ix[j]:
49-
fname = os.path.join(self.dirpath, "test{k}.sas7bdat".format(k=k))
49+
fname = os.path.join(self.dirpath, f"test{k}.sas7bdat")
5050
with open(fname, "rb") as f:
5151
byts = f.read()
5252
buf = io.BytesIO(byts)
@@ -61,7 +61,7 @@ def test_from_iterator(self):
6161
for j in 0, 1:
6262
df0 = self.data[j]
6363
for k in self.test_ix[j]:
64-
fname = os.path.join(self.dirpath, "test{k}.sas7bdat".format(k=k))
64+
fname = os.path.join(self.dirpath, f"test{k}.sas7bdat")
6565
rdr = pd.read_sas(fname, iterator=True, encoding="utf-8")
6666
df = rdr.read(2)
6767
tm.assert_frame_equal(df, df0.iloc[0:2, :])
@@ -73,7 +73,7 @@ def test_path_pathlib(self):
7373
for j in 0, 1:
7474
df0 = self.data[j]
7575
for k in self.test_ix[j]:
76-
fname = Path(os.path.join(self.dirpath, "test{k}.sas7bdat".format(k=k)))
76+
fname = Path(os.path.join(self.dirpath, f"test{k}.sas7bdat"))
7777
df = pd.read_sas(fname, encoding="utf-8")
7878
tm.assert_frame_equal(df, df0)
7979

@@ -84,9 +84,7 @@ def test_path_localpath(self):
8484
for j in 0, 1:
8585
df0 = self.data[j]
8686
for k in self.test_ix[j]:
87-
fname = LocalPath(
88-
os.path.join(self.dirpath, "test{k}.sas7bdat".format(k=k))
89-
)
87+
fname = LocalPath(os.path.join(self.dirpath, f"test{k}.sas7bdat"))
9088
df = pd.read_sas(fname, encoding="utf-8")
9189
tm.assert_frame_equal(df, df0)
9290

@@ -95,7 +93,7 @@ def test_iterator_loop(self):
9593
for j in 0, 1:
9694
for k in self.test_ix[j]:
9795
for chunksize in 3, 5, 10, 11:
98-
fname = os.path.join(self.dirpath, "test{k}.sas7bdat".format(k=k))
96+
fname = os.path.join(self.dirpath, f"test{k}.sas7bdat")
9997
rdr = pd.read_sas(fname, chunksize=10, encoding="utf-8")
10098
y = 0
10199
for x in rdr:
@@ -106,7 +104,7 @@ def test_iterator_loop(self):
106104
def test_iterator_read_too_much(self):
107105
# github #14734
108106
k = self.test_ix[0][0]
109-
fname = os.path.join(self.dirpath, "test{k}.sas7bdat".format(k=k))
107+
fname = os.path.join(self.dirpath, f"test{k}.sas7bdat")
110108
rdr = pd.read_sas(fname, format="sas7bdat", iterator=True, encoding="utf-8")
111109
d1 = rdr.read(rdr.row_count + 20)
112110
rdr.close()

0 commit comments

Comments
 (0)