@@ -149,8 +149,6 @@ def create_and_load_iris(conn, iris_file: Path, dialect: str):
149
149
from sqlalchemy .engine import Engine
150
150
151
151
iris = iris_table_metadata (dialect )
152
- iris .drop (conn , checkfirst = True )
153
- iris .create (bind = conn )
154
152
155
153
with iris_file .open (newline = None ) as csvfile :
156
154
reader = csv .reader (csvfile )
@@ -160,9 +158,14 @@ def create_and_load_iris(conn, iris_file: Path, dialect: str):
160
158
if isinstance (conn , Engine ):
161
159
with conn .connect () as conn :
162
160
with conn .begin ():
161
+ iris .drop (conn , checkfirst = True )
162
+ iris .create (bind = conn )
163
163
conn .execute (stmt )
164
164
else :
165
- conn .execute (stmt )
165
+ with conn .begin ():
166
+ iris .drop (conn , checkfirst = True )
167
+ iris .create (bind = conn )
168
+ conn .execute (stmt )
166
169
167
170
168
171
def create_and_load_iris_view (conn ):
@@ -180,7 +183,8 @@ def create_and_load_iris_view(conn):
180
183
with conn .begin ():
181
184
conn .execute (stmt )
182
185
else :
183
- conn .execute (stmt )
186
+ with conn .begin ():
187
+ conn .execute (stmt )
184
188
185
189
186
190
def types_table_metadata (dialect : str ):
@@ -243,16 +247,19 @@ def create_and_load_types(conn, types_data: list[dict], dialect: str):
243
247
from sqlalchemy .engine import Engine
244
248
245
249
types = types_table_metadata (dialect )
246
- types .drop (conn , checkfirst = True )
247
- types .create (bind = conn )
248
250
249
251
stmt = insert (types ).values (types_data )
250
252
if isinstance (conn , Engine ):
251
253
with conn .connect () as conn :
252
254
with conn .begin ():
255
+ types .drop (conn , checkfirst = True )
256
+ types .create (bind = conn )
253
257
conn .execute (stmt )
254
258
else :
255
- conn .execute (stmt )
259
+ with conn .begin ():
260
+ types .drop (conn , checkfirst = True )
261
+ types .create (bind = conn )
262
+ conn .execute (stmt )
256
263
257
264
258
265
def check_iris_frame (frame : DataFrame ):
@@ -713,7 +720,8 @@ def test_read_procedure(conn, request):
713
720
with engine_conn .begin ():
714
721
engine_conn .execute (proc )
715
722
else :
716
- conn .execute (proc )
723
+ with conn .begin ():
724
+ conn .execute (proc )
717
725
718
726
res1 = sql .read_sql_query ("CALL get_testdb();" , conn )
719
727
tm .assert_frame_equal (df , res1 )
@@ -995,8 +1003,6 @@ class _TestSQLApi(PandasSQLTest):
995
1003
@pytest .fixture (autouse = True )
996
1004
def setup_method (self , iris_path , types_data ):
997
1005
self .conn = self .connect ()
998
- if not isinstance (self .conn , sqlite3 .Connection ):
999
- self .conn .begin ()
1000
1006
self .load_iris_data (iris_path )
1001
1007
self .load_types_data (types_data )
1002
1008
self .load_test_data_and_sql ()
@@ -1492,7 +1498,8 @@ def test_not_reflect_all_tables(self):
1492
1498
with conn .begin ():
1493
1499
conn .execute (query )
1494
1500
else :
1495
- self .conn .execute (query )
1501
+ with self .conn .begin ():
1502
+ self .conn .execute (query )
1496
1503
1497
1504
with tm .assert_produces_warning (None ):
1498
1505
sql .read_sql_table ("other_table" , self .conn )
@@ -1742,7 +1749,6 @@ def setup_class(cls):
1742
1749
def setup_method (self , iris_path , types_data ):
1743
1750
try :
1744
1751
self .conn = self .engine .connect ()
1745
- self .conn .begin ()
1746
1752
self .pandasSQL = sql .SQLDatabase (self .conn )
1747
1753
except sqlalchemy .exc .OperationalError :
1748
1754
pytest .skip (f"Can't connect to { self .flavor } server" )
@@ -2146,7 +2152,6 @@ def _get_index_columns(self, tbl_name):
2146
2152
def test_to_sql_save_index (self ):
2147
2153
self ._to_sql_save_index ()
2148
2154
2149
- @pytest .mark .xfail (reason = "Nested transactions rollbacks don't work with Pandas" )
2150
2155
def test_transactions (self ):
2151
2156
self ._transaction_test ()
2152
2157
@@ -2168,7 +2173,8 @@ def test_get_schema_create_table(self, test_frame3):
2168
2173
with conn .begin ():
2169
2174
conn .execute (create_sql )
2170
2175
else :
2171
- self .conn .execute (create_sql )
2176
+ with self .conn .begin ():
2177
+ self .conn .execute (create_sql )
2172
2178
returned_df = sql .read_sql_table (tbl , self .conn )
2173
2179
tm .assert_frame_equal (returned_df , blank_test_df , check_index_type = False )
2174
2180
self .drop_table (tbl , self .conn )
@@ -2578,7 +2584,8 @@ class Test(BaseModel):
2578
2584
id = Column (Integer , primary_key = True )
2579
2585
string_column = Column (String (50 ))
2580
2586
2581
- BaseModel .metadata .create_all (self .conn )
2587
+ with self .conn .begin ():
2588
+ BaseModel .metadata .create_all (self .conn )
2582
2589
Session = sessionmaker (bind = self .conn )
2583
2590
with Session () as session :
2584
2591
df = DataFrame ({"id" : [0 , 1 ], "string_column" : ["hello" , "world" ]})
@@ -2660,8 +2667,9 @@ def test_schema_support(self):
2660
2667
df = DataFrame ({"col1" : [1 , 2 ], "col2" : [0.1 , 0.2 ], "col3" : ["a" , "n" ]})
2661
2668
2662
2669
# create a schema
2663
- self .conn .exec_driver_sql ("DROP SCHEMA IF EXISTS other CASCADE;" )
2664
- self .conn .exec_driver_sql ("CREATE SCHEMA other;" )
2670
+ with self .conn .begin ():
2671
+ self .conn .exec_driver_sql ("DROP SCHEMA IF EXISTS other CASCADE;" )
2672
+ self .conn .exec_driver_sql ("CREATE SCHEMA other;" )
2665
2673
2666
2674
# write dataframe to different schema's
2667
2675
assert df .to_sql ("test_schema_public" , self .conn , index = False ) == 2
@@ -2693,8 +2701,9 @@ def test_schema_support(self):
2693
2701
# different if_exists options
2694
2702
2695
2703
# create a schema
2696
- self .conn .exec_driver_sql ("DROP SCHEMA IF EXISTS other CASCADE;" )
2697
- self .conn .exec_driver_sql ("CREATE SCHEMA other;" )
2704
+ with self .conn .begin ():
2705
+ self .conn .exec_driver_sql ("DROP SCHEMA IF EXISTS other CASCADE;" )
2706
+ self .conn .exec_driver_sql ("CREATE SCHEMA other;" )
2698
2707
2699
2708
# write dataframe with different if_exists options
2700
2709
assert (
0 commit comments