Skip to content

Commit 4313267

Browse files
TomAugspurgerjreback
authored andcommitted
CI: Bump s3fs (#29573)
1 parent 207ab74 commit 4313267

File tree

9 files changed

+19
-9
lines changed

9 files changed

+19
-9
lines changed

ci/deps/travis-36-cov.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ dependencies:
2929
- python-snappy
3030
- python=3.6.*
3131
- pytz
32-
- s3fs<0.3
32+
- s3fs
3333
- scikit-learn
3434
- scipy
3535
- sqlalchemy

ci/deps/travis-36-locale.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ dependencies:
2626
- python-dateutil
2727
- python=3.6.*
2828
- pytz
29-
- s3fs=0.0.8
29+
- s3fs=0.3.0
3030
- scipy
3131
- sqlalchemy=1.1.4
3232
- xarray=0.10

ci/deps/travis-36-slow.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ dependencies:
1818
- python-dateutil
1919
- python=3.6.*
2020
- pytz
21-
- s3fs<0.3
21+
- s3fs
2222
- scipy
2323
- sqlalchemy
2424
- xlrd

ci/deps/travis-37.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ dependencies:
1717
- pytest-xdist>=1.29.0
1818
- pytest-mock
1919
- hypothesis>=3.58.0
20-
- s3fs<0.3
20+
- s3fs
2121
- pip
2222
- pyreadstat
2323
- pip:

doc/source/getting_started/install.rst

+1-1
Original file line numberDiff line numberDiff line change
@@ -263,7 +263,7 @@ pymysql 0.7.11 MySQL engine for sqlalchemy
263263
pyreadstat SPSS files (.sav) reading
264264
pytables 3.4.2 HDF5 reading / writing
265265
qtpy Clipboard I/O
266-
s3fs 0.0.8 Amazon S3 access
266+
s3fs 0.3.0 Amazon S3 access
267267
xarray 0.8.2 pandas-like API for N-dimensional data
268268
xclip Clipboard I/O on linux
269269
xlrd 1.1.0 Excel reading

doc/source/whatsnew/v1.0.0.rst

+1
Original file line numberDiff line numberDiff line change
@@ -183,6 +183,7 @@ Backwards incompatible API changes
183183
Other API changes
184184
^^^^^^^^^^^^^^^^^
185185

186+
- Bumpded the minimum supported version of ``s3fs`` from 0.0.8 to 0.3.0 (:issue:`28616`)
186187
- :class:`pandas.core.groupby.GroupBy.transform` now raises on invalid operation names (:issue:`27489`)
187188
- :meth:`pandas.api.types.infer_dtype` will now return "integer-na" for integer and ``np.nan`` mix (:issue:`27283`)
188189
- :meth:`MultiIndex.from_arrays` will no longer infer names from arrays if ``names=None`` is explicitly provided (:issue:`27292`)

pandas/compat/_optional.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
"pandas_gbq": "0.8.0",
1919
"pyarrow": "0.9.0",
2020
"pytables": "3.4.2",
21-
"s3fs": "0.0.8",
21+
"s3fs": "0.3.0",
2222
"scipy": "0.19.0",
2323
"sqlalchemy": "1.1.4",
2424
"tables": "3.4.2",

pandas/tests/io/conftest.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ def s3_resource(tips_file, jsonl_file):
4040
A private bucket "cant_get_it" is also created. The boto3 s3 resource
4141
is yielded by the fixture.
4242
"""
43-
pytest.importorskip("s3fs")
43+
s3fs = pytest.importorskip("s3fs")
4444
boto3 = pytest.importorskip("boto3")
4545

4646
with tm.ensure_safe_environment_variables():
@@ -77,6 +77,7 @@ def add_tips_files(bucket_name):
7777

7878
conn.create_bucket(Bucket="cant_get_it", ACL="private")
7979
add_tips_files("cant_get_it")
80+
s3fs.S3FileSystem.clear_instance_cache()
8081
yield conn
8182
finally:
8283
s3.stop()

pandas/tests/io/parser/test_network.py

+10-2
Original file line numberDiff line numberDiff line change
@@ -166,7 +166,7 @@ def test_s3_fails(self):
166166
# Receive a permission error when trying to read a private bucket.
167167
# It's irrelevant here that this isn't actually a table.
168168
with pytest.raises(IOError):
169-
read_csv("s3://cant_get_it/")
169+
read_csv("s3://cant_get_it/file.csv")
170170

171171
def test_read_csv_handles_boto_s3_object(self, s3_resource, tips_file):
172172
# see gh-16135
@@ -184,6 +184,8 @@ def test_read_csv_handles_boto_s3_object(self, s3_resource, tips_file):
184184

185185
def test_read_csv_chunked_download(self, s3_resource, caplog):
186186
# 8 MB, S3FS usees 5MB chunks
187+
import s3fs
188+
187189
df = DataFrame(np.random.randn(100000, 4), columns=list("abcd"))
188190
buf = BytesIO()
189191
str_buf = StringIO()
@@ -194,7 +196,13 @@ def test_read_csv_chunked_download(self, s3_resource, caplog):
194196

195197
s3_resource.Bucket("pandas-test").put_object(Key="large-file.csv", Body=buf)
196198

197-
with caplog.at_level(logging.DEBUG, logger="s3fs.core"):
199+
# Possibly some state leaking in between tests.
200+
# If we don't clear this cache, we saw `GetObject operation: Forbidden`.
201+
# Presumably the s3fs instance is being cached, with the directory listing
202+
# from *before* we add the large-file.csv in the pandas-test bucket.
203+
s3fs.S3FileSystem.clear_instance_cache()
204+
205+
with caplog.at_level(logging.DEBUG, logger="s3fs"):
198206
read_csv("s3://pandas-test/large-file.csv", nrows=5)
199207
# log of fetch_range (start, stop)
200208
assert (0, 5505024) in {x.args[-2:] for x in caplog.records}

0 commit comments

Comments
 (0)