5
5
from pandas .util .testing import assert_frame_equal , assert_raises_regex
6
6
7
7
8
- def test_compression_roundtrip (compression ):
8
+ def test_compression_roundtrip (compression_no_zip ):
9
9
df = pd .DataFrame ([[0.123456 , 0.234567 , 0.567567 ],
10
10
[12.32112 , 123123.2 , 321321.2 ]],
11
11
index = ['A' , 'B' ], columns = ['X' , 'Y' , 'Z' ])
12
12
13
- if compression != 'zip' :
14
- with tm . ensure_clean () as path :
15
- df . to_json (path , compression = compression )
16
- assert_frame_equal ( df , pd . read_json ( path , compression = compression ))
13
+ with tm . ensure_clean () as path :
14
+ df . to_json ( path , compression = compression_no_zip )
15
+ assert_frame_equal ( df , pd . read_json (path ,
16
+ compression = compression_no_zip ))
17
17
18
- # explicitly ensure file was compressed.
19
- with tm .decompress_file (path , compression ) as fh :
20
- result = fh .read ().decode ('utf8' )
21
- assert_frame_equal (df , pd .read_json (result ))
18
+ # explicitly ensure file was compressed.
19
+ with tm .decompress_file (path , compression_no_zip ) as fh :
20
+ result = fh .read ().decode ('utf8' )
21
+ assert_frame_equal (df , pd .read_json (result ))
22
22
23
23
24
24
def test_compress_zip_value_error ():
@@ -41,7 +41,7 @@ def test_read_zipped_json():
41
41
assert_frame_equal (uncompressed_df , compressed_df )
42
42
43
43
44
- def test_with_s3_url (compression ):
44
+ def test_with_s3_url (compression_no_zip ):
45
45
boto3 = pytest .importorskip ('boto3' )
46
46
pytest .importorskip ('s3fs' )
47
47
moto = pytest .importorskip ('moto' )
@@ -52,39 +52,37 @@ def test_with_s3_url(compression):
52
52
bucket = conn .create_bucket (Bucket = "pandas-test" )
53
53
54
54
with tm .ensure_clean () as path :
55
- df .to_json (path , compression = compression )
55
+ df .to_json (path , compression = compression_no_zip )
56
56
with open (path , 'rb' ) as f :
57
57
bucket .put_object (Key = 'test-1' , Body = f )
58
58
59
59
roundtripped_df = pd .read_json ('s3://pandas-test/test-1' ,
60
- compression = compression )
60
+ compression = compression_no_zip )
61
61
assert_frame_equal (df , roundtripped_df )
62
62
63
63
64
- def test_lines_with_compression (compression ):
64
+ def test_lines_with_compression (compression_no_zip ):
65
65
66
- if compression != 'zip' :
67
- with tm .ensure_clean () as path :
68
- df = pd .read_json ('{"a": [1, 2, 3], "b": [4, 5, 6]}' )
69
- df .to_json (path , orient = 'records' , lines = True ,
70
- compression = compression )
71
- roundtripped_df = pd .read_json (path , lines = True ,
72
- compression = compression )
73
- assert_frame_equal (df , roundtripped_df )
66
+ with tm .ensure_clean () as path :
67
+ df = pd .read_json ('{"a": [1, 2, 3], "b": [4, 5, 6]}' )
68
+ df .to_json (path , orient = 'records' , lines = True ,
69
+ compression = compression_no_zip )
70
+ roundtripped_df = pd .read_json (path , lines = True ,
71
+ compression = compression_no_zip )
72
+ assert_frame_equal (df , roundtripped_df )
74
73
75
74
76
- def test_chunksize_with_compression (compression ):
75
+ def test_chunksize_with_compression (compression_no_zip ):
77
76
78
- if compression != 'zip' :
79
- with tm .ensure_clean () as path :
80
- df = pd .read_json ('{"a": ["foo", "bar", "baz"], "b": [4, 5, 6]}' )
81
- df .to_json (path , orient = 'records' , lines = True ,
82
- compression = compression )
83
-
84
- roundtripped_df = pd .concat (pd .read_json (path , lines = True ,
85
- chunksize = 1 ,
86
- compression = compression ))
87
- assert_frame_equal (df , roundtripped_df )
77
+ with tm .ensure_clean () as path :
78
+ df = pd .read_json ('{"a": ["foo", "bar", "baz"], "b": [4, 5, 6]}' )
79
+ df .to_json (path , orient = 'records' , lines = True ,
80
+ compression = compression_no_zip )
81
+
82
+ res = pd .read_json (path , lines = True , chunksize = 1 ,
83
+ compression = compression_no_zip )
84
+ roundtripped_df = pd .concat (res )
85
+ assert_frame_equal (df , roundtripped_df )
88
86
89
87
90
88
def test_write_unsupported_compression_type ():
0 commit comments