|
| 1 | +import pytest |
| 2 | +import moto |
| 3 | + |
| 4 | +import pandas as pd |
| 5 | +from pandas import compat |
| 6 | +import pandas.util.testing as tm |
| 7 | +from pandas.util.testing import assert_frame_equal, assert_raises_regex |
| 8 | + |
| 9 | + |
| 10 | +COMPRESSION_TYPES = [None, 'bz2', 'gzip', 'xz'] |
| 11 | + |
| 12 | + |
| 13 | +def test_compress_gzip(): |
| 14 | + df = pd.DataFrame([[0.123456, 0.234567, 0.567567], |
| 15 | + [12.32112, 123123.2, 321321.2]], |
| 16 | + index=['A', 'B'], columns=['X', 'Y', 'Z']) |
| 17 | + |
| 18 | + with tm.ensure_clean() as path: |
| 19 | + df.to_json(path, compression='gzip') |
| 20 | + assert_frame_equal(df, pd.read_json(path, compression='gzip')) |
| 21 | + |
| 22 | + # explicitly make sure file is gzipped |
| 23 | + import gzip |
| 24 | + with gzip.GzipFile(path, 'rb') as f: |
| 25 | + text = f.read().decode('utf8') |
| 26 | + assert_frame_equal(df, pd.read_json(text)) |
| 27 | + |
| 28 | + |
| 29 | +def test_compress_bz2(): |
| 30 | + df = pd.DataFrame([[0.123456, 0.234567, 0.567567], |
| 31 | + [12.32112, 123123.2, 321321.2]], |
| 32 | + index=['A', 'B'], columns=['X', 'Y', 'Z']) |
| 33 | + |
| 34 | + with tm.ensure_clean() as path: |
| 35 | + df.to_json(path, compression='bz2') |
| 36 | + assert_frame_equal(df, pd.read_json(path, compression='bz2')) |
| 37 | + |
| 38 | + # explicitly make sure file is bz2ed |
| 39 | + import bz2 |
| 40 | + with bz2.BZ2File(path, 'rb') as f: |
| 41 | + text = f.read().decode('utf8') |
| 42 | + assert_frame_equal(df, pd.read_json(text)) |
| 43 | + |
| 44 | + |
| 45 | +def test_compress_xz(): |
| 46 | + tm._skip_if_no_lzma() |
| 47 | + |
| 48 | + df = pd.DataFrame([[0.123456, 0.234567, 0.567567], |
| 49 | + [12.32112, 123123.2, 321321.2]], |
| 50 | + index=['A', 'B'], columns=['X', 'Y', 'Z']) |
| 51 | + |
| 52 | + with tm.ensure_clean() as path: |
| 53 | + df.to_json(path, compression='xz') |
| 54 | + assert_frame_equal(df, pd.read_json(path, compression='xz')) |
| 55 | + |
| 56 | + # explicitly make sure file is xzipped |
| 57 | + lzma = compat.import_lzma() |
| 58 | + with lzma.open(path, 'rb') as f: |
| 59 | + text = f.read().decode('utf8') |
| 60 | + assert_frame_equal(df, pd.read_json(text)) |
| 61 | + |
| 62 | + |
| 63 | +def test_compress_zip_value_error(): |
| 64 | + df = pd.DataFrame([[0.123456, 0.234567, 0.567567], |
| 65 | + [12.32112, 123123.2, 321321.2]], |
| 66 | + index=['A', 'B'], columns=['X', 'Y', 'Z']) |
| 67 | + |
| 68 | + with tm.ensure_clean() as path: |
| 69 | + import zipfile |
| 70 | + pytest.raises(zipfile.BadZipfile, df.to_json, path, compression="zip") |
| 71 | + |
| 72 | + |
| 73 | +def test_read_zipped_json(): |
| 74 | + uncompressed_path = tm.get_data_path("tsframe_v012.json") |
| 75 | + uncompressed_df = pd.read_json(uncompressed_path) |
| 76 | + |
| 77 | + compressed_path = tm.get_data_path("tsframe_v012.json.zip") |
| 78 | + compressed_df = pd.read_json(compressed_path, compression='zip') |
| 79 | + |
| 80 | + assert_frame_equal(uncompressed_df, compressed_df) |
| 81 | + |
| 82 | + |
| 83 | +@pytest.mark.parametrize('compression', COMPRESSION_TYPES) |
| 84 | +def test_with_file_url(compression): |
| 85 | + if compression == 'xz': |
| 86 | + tm._skip_if_no_lzma() |
| 87 | + |
| 88 | + with tm.ensure_clean() as path: |
| 89 | + df = pd.read_json('{"a": [1, 2, 3], "b": [4, 5, 6]}') |
| 90 | + df.to_json(path, compression=compression) |
| 91 | + file_url = 'file://localhost' + path |
| 92 | + assert_frame_equal(df, pd.read_json(file_url, compression=compression)) |
| 93 | + |
| 94 | + |
| 95 | +@pytest.mark.parametrize('compression', COMPRESSION_TYPES) |
| 96 | +def test_with_s3_url(compression): |
| 97 | + boto3 = pytest.importorskip('boto3') |
| 98 | + pytest.importorskip('s3fs') |
| 99 | + if compression == 'xz': |
| 100 | + tm._skip_if_no_lzma() |
| 101 | + |
| 102 | + df = pd.read_json('{"a": [1, 2, 3], "b": [4, 5, 6]}') |
| 103 | + with moto.mock_s3(): |
| 104 | + conn = boto3.resource("s3", region_name="us-east-1") |
| 105 | + bucket = conn.create_bucket(Bucket="pandas-test") |
| 106 | + |
| 107 | + with tm.ensure_clean() as path: |
| 108 | + df.to_json(path, compression=compression) |
| 109 | + with open(path, 'rb') as f: |
| 110 | + bucket.put_object(Key='test-1', Body=f) |
| 111 | + |
| 112 | + s3_df = pd.read_json('s3://pandas-test/test-1', compression=compression) |
| 113 | + assert_frame_equal(df, s3_df) |
| 114 | + |
| 115 | + |
| 116 | +@pytest.mark.parametrize('compression', COMPRESSION_TYPES) |
| 117 | +def test_lines_with_compression(compression): |
| 118 | + if compression == 'xz': |
| 119 | + tm._skip_if_no_lzma() |
| 120 | + |
| 121 | + with tm.ensure_clean() as path: |
| 122 | + df = pd.read_json('{"a": [1, 2, 3], "b": [4, 5, 6]}') |
| 123 | + df.to_json(path, orient='records', lines=True, compression=compression) |
| 124 | + roundtripped_df = pd.read_json(path, lines=True, |
| 125 | + compression=compression) |
| 126 | + assert_frame_equal(df, roundtripped_df) |
| 127 | + |
| 128 | + |
| 129 | +@pytest.mark.parametrize('compression', COMPRESSION_TYPES) |
| 130 | +def test_chunksize_with_compression(compression): |
| 131 | + if compression == 'xz': |
| 132 | + tm._skip_if_no_lzma() |
| 133 | + |
| 134 | + with tm.ensure_clean() as path: |
| 135 | + df = pd.read_json('{"a": ["foo", "bar", "baz"], "b": [4, 5, 6]}') |
| 136 | + df.to_json(path, orient='records', lines=True, compression=compression) |
| 137 | + |
| 138 | + roundtripped_df = pd.concat(pd.read_json(path, lines=True, chunksize=1, |
| 139 | + compression=compression)) |
| 140 | + assert_frame_equal(df, roundtripped_df) |
| 141 | + |
| 142 | + |
| 143 | +def test_write_unsupported_compression_type(): |
| 144 | + df = pd.read_json('{"a": [1, 2, 3], "b": [4, 5, 6]}') |
| 145 | + with tm.ensure_clean() as path: |
| 146 | + msg = "Unrecognized compression type: unsupported" |
| 147 | + assert_raises_regex(ValueError, msg, df.to_json, |
| 148 | + path, compression="unsupported") |
| 149 | + |
| 150 | + |
| 151 | +def test_read_unsupported_compression_type(): |
| 152 | + with tm.ensure_clean() as path: |
| 153 | + msg = "Unrecognized compression type: unsupported" |
| 154 | + assert_raises_regex(ValueError, msg, pd.read_json, |
| 155 | + path, compression="unsupported") |
0 commit comments