@@ -166,7 +166,7 @@ def test_s3_fails(self):
166
166
# Receive a permission error when trying to read a private bucket.
167
167
# It's irrelevant here that this isn't actually a table.
168
168
with pytest .raises (IOError ):
169
- read_csv ("s3://cant_get_it/" )
169
+ read_csv ("s3://cant_get_it/file.csv " )
170
170
171
171
def test_read_csv_handles_boto_s3_object (self , s3_resource , tips_file ):
172
172
# see gh-16135
@@ -184,6 +184,8 @@ def test_read_csv_handles_boto_s3_object(self, s3_resource, tips_file):
184
184
185
185
def test_read_csv_chunked_download (self , s3_resource , caplog ):
186
186
# 8 MB, S3FS usees 5MB chunks
187
+ import s3fs
188
+
187
189
df = DataFrame (np .random .randn (100000 , 4 ), columns = list ("abcd" ))
188
190
buf = BytesIO ()
189
191
str_buf = StringIO ()
@@ -194,7 +196,13 @@ def test_read_csv_chunked_download(self, s3_resource, caplog):
194
196
195
197
s3_resource .Bucket ("pandas-test" ).put_object (Key = "large-file.csv" , Body = buf )
196
198
197
- with caplog .at_level (logging .DEBUG , logger = "s3fs.core" ):
199
+ # Possibly some state leaking in between tests.
200
+ # If we don't clear this cache, we saw `GetObject operation: Forbidden`.
201
+ # Presumably the s3fs instance is being cached, with the directory listing
202
+ # from *before* we add the large-file.csv in the pandas-test bucket.
203
+ s3fs .S3FileSystem .clear_instance_cache ()
204
+
205
+ with caplog .at_level (logging .DEBUG , logger = "s3fs" ):
198
206
read_csv ("s3://pandas-test/large-file.csv" , nrows = 5 )
199
207
# log of fetch_range (start, stop)
200
208
assert (0 , 5505024 ) in {x .args [- 2 :] for x in caplog .records }
0 commit comments