@@ -105,7 +105,7 @@ def test_frame_non_unique_index(self, orient):
105
105
@pytest .mark .parametrize ("orient" , ["index" , "columns" ])
106
106
def test_frame_non_unique_index_raises (self , orient ):
107
107
df = DataFrame ([["a" , "b" ], ["c" , "d" ]], index = [1 , 1 ], columns = ["x" , "y" ])
108
- msg = "DataFrame index must be unique for orient='{}'" . format ( orient )
108
+ msg = f "DataFrame index must be unique for orient='{ orient } '"
109
109
with pytest .raises (ValueError , match = msg ):
110
110
df .to_json (orient = orient )
111
111
@@ -142,7 +142,7 @@ def test_frame_non_unique_columns(self, orient, data):
142
142
def test_frame_non_unique_columns_raises (self , orient ):
143
143
df = DataFrame ([["a" , "b" ], ["c" , "d" ]], index = [1 , 2 ], columns = ["x" , "x" ])
144
144
145
- msg = "DataFrame columns must be unique for orient='{}'" . format ( orient )
145
+ msg = f "DataFrame columns must be unique for orient='{ orient } '"
146
146
with pytest .raises (ValueError , match = msg ):
147
147
df .to_json (orient = orient )
148
148
@@ -225,13 +225,11 @@ def test_roundtrip_str_axes(self, orient, convert_axes, numpy, dtype):
225
225
def test_roundtrip_categorical (self , orient , convert_axes , numpy ):
226
226
# TODO: create a better frame to test with and improve coverage
227
227
if orient in ("index" , "columns" ):
228
- pytest .xfail (
229
- "Can't have duplicate index values for orient '{}')" .format (orient )
230
- )
228
+ pytest .xfail (f"Can't have duplicate index values for orient '{ orient } ')" )
231
229
232
230
data = self .categorical .to_json (orient = orient )
233
231
if numpy and orient in ("records" , "values" ):
234
- pytest .xfail ("Orient {} is broken with numpy=True" . format ( orient ) )
232
+ pytest .xfail (f "Orient { orient } is broken with numpy=True" )
235
233
236
234
result = pd .read_json (
237
235
data , orient = orient , convert_axes = convert_axes , numpy = numpy
@@ -399,7 +397,7 @@ def test_frame_infinity(self, orient, inf, dtype):
399
397
def test_frame_to_json_float_precision (self , value , precision , expected_val ):
400
398
df = pd .DataFrame ([dict (a_float = value )])
401
399
encoded = df .to_json (double_precision = precision )
402
- assert encoded == '{{"a_float":{{"0":{}}}}}' . format ( expected_val )
400
+ assert encoded == f '{{"a_float":{{"0":{ expected_val } }}}}'
403
401
404
402
def test_frame_to_json_except (self ):
405
403
df = DataFrame ([1 , 2 , 3 ])
@@ -593,7 +591,7 @@ def __str__(self) -> str:
593
591
594
592
# verify the proper conversion of printable content
595
593
df_printable = DataFrame ({"A" : [binthing .hexed ]})
596
- assert df_printable .to_json () == '{{"A":{{"0":"{hex }"}}}}' . format ( hex = hexed )
594
+ assert df_printable .to_json () == f '{{"A":{{"0":"{ hexed } "}}}}'
597
595
598
596
# check if non-printable content throws appropriate Exception
599
597
df_nonprintable = DataFrame ({"A" : [binthing ]})
@@ -607,19 +605,19 @@ def __str__(self) -> str:
607
605
df_mixed .to_json ()
608
606
609
607
# default_handler should resolve exceptions for non-string types
610
- assert df_nonprintable .to_json (
611
- default_handler = str
612
- ) == '{{"A":{{"0":"{hex}"}}}}' .format (hex = hexed )
613
- assert df_mixed .to_json (
614
- default_handler = str
615
- ) == '{{"A":{{"0":"{hex}"}},"B":{{"0":1}}}}' .format (hex = hexed )
608
+ result = df_nonprintable .to_json (default_handler = str )
609
+ expected = f'{{"A":{{"0":"{ hexed } "}}}}'
610
+ assert result == expected
611
+ assert (
612
+ df_mixed .to_json (default_handler = str )
613
+ == f'{{"A":{{"0":"{ hexed } "}},"B":{{"0":1}}}}'
614
+ )
616
615
617
616
def test_label_overflow (self ):
618
617
# GH14256: buffer length not checked when writing label
619
- df = pd .DataFrame ({"bar" * 100000 : [1 ], "foo" : [1337 ]})
620
- assert df .to_json () == '{{"{bar}":{{"0":1}},"foo":{{"0":1337}}}}' .format (
621
- bar = ("bar" * 100000 )
622
- )
618
+ result = pd .DataFrame ({"bar" * 100000 : [1 ], "foo" : [1337 ]}).to_json ()
619
+ expected = f'{{"{ "bar" * 100000 } ":{{"0":1}},"foo":{{"0":1337}}}}'
620
+ assert result == expected
623
621
624
622
def test_series_non_unique_index (self ):
625
623
s = Series (["a" , "b" ], index = [1 , 1 ])
@@ -1431,7 +1429,7 @@ def test_read_timezone_information(self):
1431
1429
)
1432
1430
def test_timedelta_as_label (self , date_format , key ):
1433
1431
df = pd .DataFrame ([[1 ]], columns = [pd .Timedelta ("1D" )])
1434
- expected = '{{"{key}":{{"0":1}}}}' . format ( key = key )
1432
+ expected = f '{{"{ key } ":{{"0":1}}}}'
1435
1433
result = df .to_json (date_format = date_format )
1436
1434
1437
1435
assert result == expected
@@ -1460,7 +1458,7 @@ def test_to_json_indent(self, indent):
1460
1458
1461
1459
result = df .to_json (indent = indent )
1462
1460
spaces = " " * indent
1463
- expected = """{{
1461
+ expected = f """{{
1464
1462
{ spaces } "a":{{
1465
1463
{ spaces } { spaces } "0":"foo",
1466
1464
{ spaces } { spaces } "1":"baz"
@@ -1469,9 +1467,7 @@ def test_to_json_indent(self, indent):
1469
1467
{ spaces } { spaces } "0":"bar",
1470
1468
{ spaces } { spaces } "1":"qux"
1471
1469
{ spaces } }}
1472
- }}""" .format (
1473
- spaces = spaces
1474
- )
1470
+ }}"""
1475
1471
1476
1472
assert result == expected
1477
1473
0 commit comments