diff --git a/pymysqlreplication/binlogstream.py b/pymysqlreplication/binlogstream.py index 98876b39..feaeff3f 100644 --- a/pymysqlreplication/binlogstream.py +++ b/pymysqlreplication/binlogstream.py @@ -81,11 +81,11 @@ def __init__(self, value): self.hostname = value def __repr__(self): - return "" % ( - self.hostname, - self.username, - self.password, - self.port, + return ( + f"" ) def encoded(self, server_id, master_id=0): @@ -367,7 +367,7 @@ def __connect_to_stream(self): # master_heartbeat_period is nanoseconds heartbeat = int(heartbeat * 1000000000) cur = self._stream_connection.cursor() - cur.execute("SET @master_heartbeat_period= %d" % heartbeat) + cur.execute_with_args("SET @master_heartbeat_period= %d", (heartbeat,)) cur.close() # When replicating from Mariadb 10.6.12 using binlog coordinates, a slave capability < 4 triggers a bug in diff --git a/pymysqlreplication/event.py b/pymysqlreplication/event.py index 82b89974..2d649bb6 100644 --- a/pymysqlreplication/event.py +++ b/pymysqlreplication/event.py @@ -472,7 +472,7 @@ def __init__(self, from_packet, event_size, table_map, ctl_connection, **kwargs) def _dump(self): super()._dump() - print(f"Schema: {self.schema}" % (self.schema)) + print(f"Schema: {self.schema}") print(f"Execution time: {self.execution_time}") print(f"Query: {self.query}") diff --git a/pymysqlreplication/tests/test_basic.py b/pymysqlreplication/tests/test_basic.py index ee6bfe62..26deaf21 100644 --- a/pymysqlreplication/tests/test_basic.py +++ b/pymysqlreplication/tests/test_basic.py @@ -13,7 +13,6 @@ from pymysql.protocol import MysqlPacket import pytest - __all__ = [ "TestBasicBinLogStreamReader", "TestMultipleRowBinLogStreamReader", diff --git a/pymysqlreplication/tests/test_data_type.py b/pymysqlreplication/tests/test_data_type.py index 4c03ab1c..9483f818 100644 --- a/pymysqlreplication/tests/test_data_type.py +++ b/pymysqlreplication/tests/test_data_type.py @@ -607,12 +607,10 @@ def test_json_array(self): def test_json_large(self): data = dict( - [("foooo%i" % i, "baaaaar%i" % i) for i in range(2560)] + [(f"foooo{i}", f"baaaaar{i}") for i in range(2560)] ) # Make it large enough to reach 2^16 length create_query = "CREATE TABLE test (id int, value json);" - insert_query = ( - """INSERT INTO test (id, value) VALUES (1, '%s');""" % json.dumps(data) - ) + insert_query = f"INSERT INTO test (id, value) VALUES (1, '{json.dumps(data)}');" event = self.create_and_insert_value(create_query, insert_query) if event.table_map[event.table_id].column_name_flag: self.assertEqual(event.rows[0]["values"]["value"], to_binary_dict(data)) @@ -621,8 +619,8 @@ def test_json_large_array(self): "Test json array larger than 64k bytes" create_query = "CREATE TABLE test (id int, value json);" large_array = dict(my_key=[i for i in range(100000)]) - insert_query = "INSERT INTO test (id, value) VALUES (1, '%s');" % ( - json.dumps(large_array), + insert_query = ( + f"INSERT INTO test (id, value) VALUES (1, '{json.dumps(large_array)}');" ) event = self.create_and_insert_value(create_query, insert_query) if event.table_map[event.table_id].column_name_flag: @@ -632,12 +630,10 @@ def test_json_large_array(self): def test_json_large_with_literal(self): data = dict( - [("foooo%i" % i, "baaaaar%i" % i) for i in range(2560)], literal=True + [(f"foooo{i}", f"baaaaar{i}") for i in range(2560)], literal=True ) # Make it large with literal create_query = "CREATE TABLE test (id int, value json);" - insert_query = ( - """INSERT INTO test (id, value) VALUES (1, '%s');""" % json.dumps(data) - ) + insert_query = f"INSERT INTO test (id, value) VALUES (1, '{json.dumps(data)}');" event = self.create_and_insert_value(create_query, insert_query) if event.table_map[event.table_id].column_name_flag: self.assertEqual(event.rows[0]["values"]["value"], to_binary_dict(data)) @@ -661,7 +657,7 @@ def test_json_types(self): data = {"foo": t} create_query = "CREATE TABLE test (id int, value json);" insert_query = ( - """INSERT INTO test (id, value) VALUES (1, '%s');""" % json.dumps(data) + f"INSERT INTO test (id, value) VALUES (1, '{json.dumps(data)}');" ) event = self.create_and_insert_value(create_query, insert_query) if event.table_map[event.table_id].column_name_flag: @@ -687,7 +683,7 @@ def test_json_basic(self): for data in types: create_query = "CREATE TABLE test (id int, value json);" insert_query = ( - """INSERT INTO test (id, value) VALUES (1, '%s');""" % json.dumps(data) + f"INSERT INTO test (id, value) VALUES (1, '{json.dumps(data)}');" ) event = self.create_and_insert_value(create_query, insert_query) if event.table_map[event.table_id].column_name_flag: @@ -709,10 +705,7 @@ def test_json_long_string(self): create_query = "CREATE TABLE test (id int, value json);" # The string length needs to be larger than what can fit in a single byte. string_value = "super_long_string" * 100 - insert_query = ( - 'INSERT INTO test (id, value) VALUES (1, \'{"my_key": "%s"}\');' - % (string_value,) - ) + insert_query = f'INSERT INTO test (id, value) VALUES (1, \'{{"my_key": "{string_value}"}}\')' event = self.create_and_insert_value(create_query, insert_query) if event.table_map[event.table_id].column_name_flag: self.assertEqual(