Skip to content

Commit ee3b62a

Browse files
Backport PR #45581: CI: Fix actions-310 testing 3.9 instead of 3.10 (#45755)
Co-authored-by: Matthew Roeschke <[email protected]>
1 parent 615f032 commit ee3b62a

File tree

4 files changed

+49
-8
lines changed

4 files changed

+49
-8
lines changed

ci/deps/actions-310.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ name: pandas-dev
22
channels:
33
- conda-forge
44
dependencies:
5-
- python=3.9
5+
- python=3.10
66

77
# test dependencies
88
- cython=0.29.24

pandas/tests/io/test_fsspec.py

+2
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
import numpy as np
44
import pytest
55

6+
from pandas.compat import PY310
67
from pandas.compat._optional import VERSIONS
78

89
from pandas import (
@@ -181,6 +182,7 @@ def test_arrowparquet_options(fsspectest):
181182

182183
@td.skip_array_manager_not_yet_implemented # TODO(ArrayManager) fastparquet
183184
@td.skip_if_no("fastparquet")
185+
@pytest.mark.xfail(PY310, reason="fastparquet failing on 3.10")
184186
def test_fastparquet_options(fsspectest):
185187
"""Regression test for writing to a not-yet-existent GCS Parquet file."""
186188
df = DataFrame({"a": [0]})

pandas/tests/io/test_parquet.py

+37-5
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313

1414
from pandas._config import get_option
1515

16+
from pandas.compat import PY310
1617
from pandas.compat.pyarrow import (
1718
pa_version_under2p0,
1819
pa_version_under5p0,
@@ -261,6 +262,7 @@ def test_options_py(df_compat, pa):
261262
check_round_trip(df_compat)
262263

263264

265+
@pytest.mark.xfail(PY310, reason="fastparquet failing on 3.10")
264266
def test_options_fp(df_compat, fp):
265267
# use the set option
266268

@@ -338,6 +340,7 @@ def test_get_engine_auto_error_message():
338340
get_engine("auto")
339341

340342

343+
@pytest.mark.xfail(PY310, reason="fastparquet failing on 3.10")
341344
def test_cross_engine_pa_fp(df_cross_compat, pa, fp):
342345
# cross-compat with differing reading/writing engines
343346

@@ -404,7 +407,11 @@ def test_error(self, engine):
404407
msg = "to_parquet only supports IO with DataFrames"
405408
self.check_error_on_write(obj, engine, ValueError, msg)
406409

407-
def test_columns_dtypes(self, engine):
410+
def test_columns_dtypes(self, request, engine):
411+
if PY310 and engine == "fastparquet":
412+
request.node.add_marker(
413+
pytest.mark.xfail(reason="fastparquet failing on 3.10")
414+
)
408415
df = pd.DataFrame({"string": list("abc"), "int": list(range(1, 4))})
409416

410417
# unicode
@@ -431,27 +438,39 @@ def test_columns_dtypes_invalid(self, engine):
431438
self.check_error_on_write(df, engine, ValueError, msg)
432439

433440
@pytest.mark.parametrize("compression", [None, "gzip", "snappy", "brotli"])
434-
def test_compression(self, engine, compression):
441+
def test_compression(self, engine, compression, request):
435442

436443
if compression == "snappy":
437444
pytest.importorskip("snappy")
438445

439446
elif compression == "brotli":
440447
pytest.importorskip("brotli")
441448

449+
if PY310 and engine == "fastparquet":
450+
request.node.add_marker(
451+
pytest.mark.xfail(reason="fastparquet failing on 3.10")
452+
)
442453
df = pd.DataFrame({"A": [1, 2, 3]})
443454
check_round_trip(df, engine, write_kwargs={"compression": compression})
444455

445-
def test_read_columns(self, engine):
456+
def test_read_columns(self, engine, request):
446457
# GH18154
458+
if PY310 and engine == "fastparquet":
459+
request.node.add_marker(
460+
pytest.mark.xfail(reason="fastparquet failing on 3.10")
461+
)
447462
df = pd.DataFrame({"string": list("abc"), "int": list(range(1, 4))})
448463

449464
expected = pd.DataFrame({"string": list("abc")})
450465
check_round_trip(
451466
df, engine, expected=expected, read_kwargs={"columns": ["string"]}
452467
)
453468

454-
def test_write_index(self, engine):
469+
def test_write_index(self, engine, request):
470+
if PY310 and engine == "fastparquet":
471+
request.node.add_marker(
472+
pytest.mark.xfail(reason="fastparquet failing on 3.10")
473+
)
455474
check_names = engine != "fastparquet"
456475

457476
df = pd.DataFrame({"A": [1, 2, 3]})
@@ -500,9 +519,13 @@ def test_multiindex_with_columns(self, pa):
500519
df, engine, read_kwargs={"columns": ["A", "B"]}, expected=df[["A", "B"]]
501520
)
502521

503-
def test_write_ignoring_index(self, engine):
522+
def test_write_ignoring_index(self, engine, request):
504523
# ENH 20768
505524
# Ensure index=False omits the index from the written Parquet file.
525+
if PY310 and engine == "fastparquet":
526+
request.node.add_marker(
527+
pytest.mark.xfail(reason="fastparquet failing on 3.10")
528+
)
506529
df = pd.DataFrame({"a": [1, 2, 3], "b": ["q", "r", "s"]})
507530

508531
write_kwargs = {"compression": None, "index": False}
@@ -958,6 +981,7 @@ def test_read_parquet_manager(self, pa, using_array_manager):
958981

959982

960983
class TestParquetFastParquet(Base):
984+
@pytest.mark.xfail(PY310, reason="fastparquet failing on 3.10")
961985
def test_basic(self, fp, df_full):
962986
df = df_full
963987

@@ -975,6 +999,7 @@ def test_duplicate_columns(self, fp):
975999
msg = "Cannot create parquet dataset with duplicate column names"
9761000
self.check_error_on_write(df, fp, ValueError, msg)
9771001

1002+
@pytest.mark.xfail(PY310, reason="fastparquet failing on 3.10")
9781003
def test_bool_with_none(self, fp):
9791004
df = pd.DataFrame({"a": [True, None, False]})
9801005
expected = pd.DataFrame({"a": [1.0, np.nan, 0.0]}, dtype="float16")
@@ -994,10 +1019,12 @@ def test_unsupported(self, fp):
9941019
msg = "Can't infer object conversion type"
9951020
self.check_error_on_write(df, fp, ValueError, msg)
9961021

1022+
@pytest.mark.xfail(PY310, reason="fastparquet failing on 3.10")
9971023
def test_categorical(self, fp):
9981024
df = pd.DataFrame({"a": pd.Categorical(list("abc"))})
9991025
check_round_trip(df, fp)
10001026

1027+
@pytest.mark.xfail(PY310, reason="fastparquet failing on 3.10")
10011028
def test_filter_row_groups(self, fp):
10021029
d = {"a": list(range(0, 3))}
10031030
df = pd.DataFrame(d)
@@ -1016,6 +1043,7 @@ def test_s3_roundtrip(self, df_compat, s3_resource, fp, s3so):
10161043
write_kwargs={"compression": None, "storage_options": s3so},
10171044
)
10181045

1046+
@pytest.mark.xfail(PY310, reason="fastparquet failing on 3.10")
10191047
def test_partition_cols_supported(self, fp, df_full):
10201048
# GH #23283
10211049
partition_cols = ["bool", "int"]
@@ -1033,6 +1061,7 @@ def test_partition_cols_supported(self, fp, df_full):
10331061
actual_partition_cols = fastparquet.ParquetFile(path, False).cats
10341062
assert len(actual_partition_cols) == 2
10351063

1064+
@pytest.mark.xfail(PY310, reason="fastparquet failing on 3.10")
10361065
def test_partition_cols_string(self, fp, df_full):
10371066
# GH #27117
10381067
partition_cols = "bool"
@@ -1050,6 +1079,7 @@ def test_partition_cols_string(self, fp, df_full):
10501079
actual_partition_cols = fastparquet.ParquetFile(path, False).cats
10511080
assert len(actual_partition_cols) == 1
10521081

1082+
@pytest.mark.xfail(PY310, reason="fastparquet failing on 3.10")
10531083
def test_partition_on_supported(self, fp, df_full):
10541084
# GH #23283
10551085
partition_cols = ["bool", "int"]
@@ -1085,13 +1115,15 @@ def test_error_on_using_partition_cols_and_partition_on(self, fp, df_full):
10851115
partition_cols=partition_cols,
10861116
)
10871117

1118+
@pytest.mark.xfail(PY310, reason="fastparquet failing on 3.10")
10881119
def test_empty_dataframe(self, fp):
10891120
# GH #27339
10901121
df = pd.DataFrame()
10911122
expected = df.copy()
10921123
expected.index.name = "index"
10931124
check_round_trip(df, fp, expected=expected)
10941125

1126+
@pytest.mark.xfail(PY310, reason="fastparquet failing on 3.10")
10951127
def test_timezone_aware_index(self, fp, timezone_aware_date_list):
10961128
idx = 5 * [timezone_aware_date_list]
10971129

pandas/tests/io/test_user_agent.py

+9-2
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@
1111

1212
import pytest
1313

14+
from pandas.compat import PY310
1415
import pandas.util._test_decorators as td
1516

1617
import pandas as pd
@@ -242,7 +243,10 @@ def responder(request):
242243
pd.read_parquet,
243244
"fastparquet",
244245
# TODO(ArrayManager) fastparquet
245-
marks=td.skip_array_manager_not_yet_implemented,
246+
marks=[
247+
td.skip_array_manager_not_yet_implemented,
248+
pytest.mark.xfail(PY310, reason="fastparquet failing on 3.10"),
249+
],
246250
),
247251
(PickleUserAgentResponder, pd.read_pickle, None),
248252
(StataUserAgentResponder, pd.read_stata, None),
@@ -277,7 +281,10 @@ def test_server_and_default_headers(responder, read_method, parquet_engine):
277281
pd.read_parquet,
278282
"fastparquet",
279283
# TODO(ArrayManager) fastparquet
280-
marks=td.skip_array_manager_not_yet_implemented,
284+
marks=[
285+
td.skip_array_manager_not_yet_implemented,
286+
pytest.mark.xfail(PY310, reason="fastparquet failing on 3.10"),
287+
],
281288
),
282289
(PickleUserAgentResponder, pd.read_pickle, None),
283290
(StataUserAgentResponder, pd.read_stata, None),

0 commit comments

Comments
 (0)