|
17 | 17 |
|
18 | 18 | import numpy as np
|
19 | 19 | import pytest
|
| 20 | +import xdist |
20 | 21 |
|
21 | 22 | from pandas._config import using_string_dtype
|
22 | 23 |
|
@@ -601,11 +602,11 @@ def drop_view(
|
601 | 602 |
|
602 | 603 |
|
603 | 604 | @pytest.fixture
|
604 |
| -def mysql_pymysql_engine(): |
| 605 | +def mysql_pymysql_engine(worker_name): |
605 | 606 | sqlalchemy = pytest.importorskip("sqlalchemy")
|
606 | 607 | pymysql = pytest.importorskip("pymysql")
|
607 | 608 | engine = sqlalchemy.create_engine(
|
608 |
| - "mysql+pymysql://root@localhost:3306/pandas", |
| 609 | + f"mysql+pymysql://root@localhost:3306/pandas{worker_name}", |
609 | 610 | connect_args={"client_flag": pymysql.constants.CLIENT.MULTI_STATEMENTS},
|
610 | 611 | poolclass=sqlalchemy.pool.NullPool,
|
611 | 612 | )
|
@@ -649,11 +650,11 @@ def mysql_pymysql_conn_types(mysql_pymysql_engine_types):
|
649 | 650 |
|
650 | 651 |
|
651 | 652 | @pytest.fixture
|
652 |
| -def postgresql_psycopg2_engine(): |
| 653 | +def postgresql_psycopg2_engine(worker_name): |
653 | 654 | sqlalchemy = pytest.importorskip("sqlalchemy")
|
654 | 655 | pytest.importorskip("psycopg2")
|
655 | 656 | engine = sqlalchemy.create_engine(
|
656 |
| - "postgresql+psycopg2://postgres:postgres@localhost:5432/pandas", |
| 657 | + f"postgresql+psycopg2://postgres:postgres@localhost:5432/pandas{worker_name}", |
657 | 658 | poolclass=sqlalchemy.pool.NullPool,
|
658 | 659 | )
|
659 | 660 | yield engine
|
@@ -684,12 +685,12 @@ def postgresql_psycopg2_conn(postgresql_psycopg2_engine):
|
684 | 685 |
|
685 | 686 |
|
686 | 687 | @pytest.fixture
|
687 |
| -def postgresql_adbc_conn(): |
| 688 | +def postgresql_adbc_conn(worker_name): |
688 | 689 | pytest.importorskip("pyarrow")
|
689 | 690 | pytest.importorskip("adbc_driver_postgresql")
|
690 | 691 | from adbc_driver_postgresql import dbapi
|
691 | 692 |
|
692 |
| - uri = "postgresql://postgres:postgres@localhost:5432/pandas" |
| 693 | + uri = f"postgresql://postgres:postgres@localhost:5432/pandas{worker_name}" |
693 | 694 | with dbapi.connect(uri) as conn:
|
694 | 695 | yield conn
|
695 | 696 | for view in get_all_views(conn):
|
@@ -748,10 +749,10 @@ def postgresql_psycopg2_conn_types(postgresql_psycopg2_engine_types):
|
748 | 749 |
|
749 | 750 |
|
750 | 751 | @pytest.fixture
|
751 |
| -def sqlite_str(): |
| 752 | +def sqlite_str(worker_name): |
752 | 753 | pytest.importorskip("sqlalchemy")
|
753 | 754 | with tm.ensure_clean() as name:
|
754 |
| - yield f"sqlite:///{name}" |
| 755 | + yield f"sqlite:///{name}{worker_name}" |
755 | 756 |
|
756 | 757 |
|
757 | 758 | @pytest.fixture
|
@@ -816,14 +817,14 @@ def sqlite_conn_types(sqlite_engine_types):
|
816 | 817 | yield conn
|
817 | 818 |
|
818 | 819 |
|
819 |
| -@pytest.fixture |
820 |
| -def sqlite_adbc_conn(): |
| 820 | +@pytest.fixture(scope="function") |
| 821 | +def sqlite_adbc_conn(worker_name): |
821 | 822 | pytest.importorskip("pyarrow")
|
822 | 823 | pytest.importorskip("adbc_driver_sqlite")
|
823 | 824 | from adbc_driver_sqlite import dbapi
|
824 | 825 |
|
825 | 826 | with tm.ensure_clean() as name:
|
826 |
| - uri = f"file:{name}" |
| 827 | + uri = f"file:{name}{worker_name}" |
827 | 828 | with dbapi.connect(uri) as conn:
|
828 | 829 | yield conn
|
829 | 830 | for view in get_all_views(conn):
|
@@ -894,6 +895,131 @@ def sqlite_buildin_types(sqlite_buildin, types_data):
|
894 | 895 | return sqlite_buildin
|
895 | 896 |
|
896 | 897 |
|
| 898 | +@pytest.fixture(scope="session") |
| 899 | +def worker_name(request): |
| 900 | + """ |
| 901 | + Creates a unique schema name for Postgres to use, in order to |
| 902 | + isolate tests for parallelization. |
| 903 | + :return: Name to use for creating an isolated schema |
| 904 | + :rtype: str |
| 905 | + """ |
| 906 | + return xdist.get_xdist_worker_id(request) |
| 907 | + |
| 908 | + |
| 909 | +@pytest.fixture(scope="session") |
| 910 | +def create_engines(): |
| 911 | + # Indirectly import dependencies. To avoid being picked up by depdency scanning software. |
| 912 | + sqlalchemy = pytest.importorskip("sqlalchemy") |
| 913 | + pymysql = pytest.importorskip("pymysql") |
| 914 | + |
| 915 | + # Round robin creation of DB connections. |
| 916 | + create_engine_commands = [ |
| 917 | + lambda : sqlalchemy.create_engine("mysql+pymysql://root@localhost:3306/pandas", connect_args={"client_flag": pymysql.constants.CLIENT.MULTI_STATEMENTS}, poolclass=sqlalchemy.pool.NullPool), |
| 918 | + lambda : sqlalchemy.create_engine("postgresql+psycopg2://postgres:postgres@localhost:5432/pandas", poolclass=sqlalchemy.pool.NullPool, isolation_level="AUTOCOMMIT") |
| 919 | + ] |
| 920 | + return create_engine_commands |
| 921 | + |
| 922 | + |
| 923 | +@pytest.fixture(scope="session") |
| 924 | +def round_robin_ordering(worker_number): |
| 925 | + round_robin_order = [(worker_number+i)%len(create_engine_commands) for i in range(len(create_engine_commands))] |
| 926 | + |
| 927 | + |
| 928 | +@pytest.fixture(scope="session") |
| 929 | +def worker_number(worker_name): |
| 930 | + if worker_name == 'master': |
| 931 | + worker_number = 1 |
| 932 | + else: |
| 933 | + worker_number = int(worker_name[2:]) |
| 934 | + return worker_number |
| 935 | + |
| 936 | + |
| 937 | +@pytest.fixture(scope="session") |
| 938 | +def create_db_string(): |
| 939 | + return [ |
| 940 | + f"""CREATE DATABASE IF NOT EXISTS pandas{worker_name}""", |
| 941 | + f"""CREATE DATABASE pandas{worker_name}""" |
| 942 | + ] |
| 943 | + |
| 944 | + |
| 945 | +@pytest.fixture(scope="session") |
| 946 | +def execute_db_command(): |
| 947 | + for i in range(len(create_engine_commands)): |
| 948 | + engine=create_engines()[round_robin_order()[i]]() |
| 949 | + connection = engine.connect() |
| 950 | + connection.execute(sqlalchemy.text(create_db_string())) |
| 951 | + |
| 952 | + |
| 953 | +@pytest.fixture(scope="session", autouse=True) |
| 954 | +def prepare_db_setup(request, worker_name): |
| 955 | + worker_number = worker_number |
| 956 | + create_engine_commands = create_engines() |
| 957 | + create_db_command = create_db_string() |
| 958 | + assert len(create_engine_commands) == len(create_db_command) |
| 959 | + |
| 960 | + round_robin_order = round_robin_ordering() |
| 961 | + |
| 962 | + for i in range(len(create_engine_commands)): |
| 963 | + engine = create_engine_commands[round_robin_order[i]]() |
| 964 | + connection = engine.connect() |
| 965 | + connection.execute(sqlalchemy.text(create_db_string[round_robin_order[i]])) |
| 966 | + engine.dispose() |
| 967 | + yield |
| 968 | + teardown_db_string = [ |
| 969 | + f"""DROP DATABASE IF EXISTS pandas{worker_name}""", |
| 970 | + f"""DROP DATABASE IF EXISTS pandas{worker_name}""" |
| 971 | + ] |
| 972 | + |
| 973 | + for i in range(len(create_engine_commands)): |
| 974 | + engine = create_engine_commands[round_robin_order[i]]() |
| 975 | + connection = engine.connect() |
| 976 | + connection.execute(sqlalchemy.text(teardown_db_string[round_robin_order[i]])) |
| 977 | + engine.dispose() |
| 978 | + |
| 979 | + |
| 980 | + |
| 981 | + |
| 982 | +# @pytest.fixture(scope="session") |
| 983 | +# def parallelize_mysql(): |
| 984 | +# sqlalchemy = pytest.importorskip("sqlalchemy") |
| 985 | +# pymysql = pytest.importorskip("pymysql") |
| 986 | +# |
| 987 | +# engine = sqlalchemy.create_engine( |
| 988 | +# connection_string, |
| 989 | +# connect_args={"client_flag": pymysql.constants.CLIENT.MULTI_STATEMENTS}, |
| 990 | +# poolclass=sqlalchemy.pool.NullPool, |
| 991 | +# ) |
| 992 | +# with engine.connect() as connection: |
| 993 | +# connection.execute(sqlalchemy.text( |
| 994 | +# f""" |
| 995 | +# CREATE DATABASE IF NOT EXISTS pandas{worker_name}; |
| 996 | +# """ |
| 997 | +# )) |
| 998 | +# # connection.commit() |
| 999 | +# # connection.close() |
| 1000 | +# yield |
| 1001 | +# engine.dispose() |
| 1002 | +# |
| 1003 | +# pass |
| 1004 | + |
| 1005 | + |
| 1006 | + |
| 1007 | + |
| 1008 | +# @pytest.fixture(scope="session", autouse=True) |
| 1009 | +# def set_up_dbs(parallelize_mysql_dbs, request): |
| 1010 | +# if hasattr(request.config, "workerinput"): |
| 1011 | +# # The tests are multi-threaded |
| 1012 | +# worker_name = xdist.get_xdist_worker_id(request) |
| 1013 | +# worker_count = request.config.workerinput["workercount"] |
| 1014 | +# print(worker_name, worker_count) |
| 1015 | +# parallelize_mysql_dbs(request, worker_name, worker_count) |
| 1016 | +# else: |
| 1017 | +# quit(1) |
| 1018 | + # parallelize_mysql_dbs |
| 1019 | + |
| 1020 | + |
| 1021 | + |
| 1022 | + |
897 | 1023 | mysql_connectable = [
|
898 | 1024 | pytest.param("mysql_pymysql_engine", marks=pytest.mark.db),
|
899 | 1025 | pytest.param("mysql_pymysql_conn", marks=pytest.mark.db),
|
@@ -978,8 +1104,11 @@ def sqlite_buildin_types(sqlite_buildin, types_data):
|
978 | 1104 | sqlalchemy_connectable_types + ["sqlite_buildin_types"] + adbc_connectable_types
|
979 | 1105 | )
|
980 | 1106 |
|
981 |
| - |
982 |
| -@pytest.mark.parametrize("conn", all_connectable) |
| 1107 | +#TODO fix |
| 1108 | +@pytest.mark.parametrize("conn", [ |
| 1109 | + #pytest.param("mysql_pymysql_engine", marks=pytest.mark.db), |
| 1110 | + pytest.param("mysql_pymysql_conn", marks=pytest.mark.db), |
| 1111 | +]) |
983 | 1112 | def test_dataframe_to_sql(conn, test_frame1, request):
|
984 | 1113 | # GH 51086 if conn is sqlite_engine
|
985 | 1114 | conn = request.getfixturevalue(conn)
|
|
0 commit comments