Skip to content

Commit 4775cac

Browse files
committed
refactor tests
1 parent 3a29ab3 commit 4775cac

File tree

5 files changed

+22
-64
lines changed

5 files changed

+22
-64
lines changed

pandas/tests/frame/test_to_csv.py

-16
Original file line numberDiff line numberDiff line change
@@ -943,22 +943,6 @@ def test_to_csv_compression(self, compression):
943943
with tm.decompress_file(filename, compression) as fh:
944944
assert_frame_equal(df, read_csv(fh, index_col=0))
945945

946-
def test_to_csv_compression_size(self, compression):
947-
948-
df = pd.concat(100 * [DataFrame([[0.123456, 0.234567, 0.567567],
949-
[12.32112, 123123.2, 321321.2]],
950-
columns=['X', 'Y', 'Z'])])
951-
952-
with ensure_clean() as filename:
953-
import os
954-
df.to_csv(filename, compression=compression)
955-
file_size = os.path.getsize(filename)
956-
957-
if compression:
958-
df.to_csv(filename, compression=None)
959-
uncompressed_file_size = os.path.getsize(filename)
960-
assert uncompressed_file_size > file_size
961-
962946
def test_to_csv_date_format(self):
963947
with ensure_clean('__tmp_to_csv_date_format__') as path:
964948
dt_index = self.tsframe.index

pandas/tests/io/json/test_compression.py

-18
Original file line numberDiff line numberDiff line change
@@ -21,24 +21,6 @@ def test_compression_roundtrip(compression):
2121
assert_frame_equal(df, pd.read_json(result))
2222

2323

24-
def test_to_json_compression_size(compression):
25-
26-
df = pd.concat(100 * [pd.DataFrame([[0.123456, 0.234567, 0.567567],
27-
[12.32112, 123123.2, 321321.2]],
28-
columns=['X', 'Y', 'Z'])],
29-
ignore_index=True)
30-
31-
with tm.ensure_clean() as filename:
32-
import os
33-
df.to_json(filename, compression=compression)
34-
file_size = os.path.getsize(filename)
35-
36-
if compression:
37-
df.to_json(filename, compression=None)
38-
uncompressed_file_size = os.path.getsize(filename)
39-
assert uncompressed_file_size > file_size
40-
41-
4224
def test_read_zipped_json():
4325
uncompressed_path = tm.get_data_path("tsframe_v012.json")
4426
uncompressed_df = pd.read_json(uncompressed_path)

pandas/tests/io/test_pickle.py

-15
Original file line numberDiff line numberDiff line change
@@ -457,21 +457,6 @@ def test_read_infer(self, ext, get_random_path):
457457

458458
tm.assert_frame_equal(df, df2)
459459

460-
def test_compression_size(self, compression):
461-
462-
df = pd.concat(100 * [pd.DataFrame([[0.123456, 0.234567, 0.567567],
463-
[12.32112, 123123.2, 321321.2]],
464-
columns=['X', 'Y', 'Z'])])
465-
466-
with tm.ensure_clean() as filename:
467-
df.to_pickle(filename, compression=compression)
468-
file_size = os.path.getsize(filename)
469-
470-
if compression:
471-
df.to_pickle(filename, compression=None)
472-
uncompressed_file_size = os.path.getsize(filename)
473-
assert uncompressed_file_size > file_size
474-
475460

476461
# ---------------------
477462
# test pickle compression

pandas/tests/series/test_io.py

-14
Original file line numberDiff line numberDiff line change
@@ -161,20 +161,6 @@ def test_to_csv_compression(self, compression):
161161
index_col=0,
162162
squeeze=True))
163163

164-
def test_to_csv_compression_size(self, compression):
165-
166-
s = Series(100 * [0.123456, 0.234567, 0.567567], name='X')
167-
168-
with ensure_clean() as filename:
169-
import os
170-
s.to_csv(filename, compression=compression, header=True)
171-
file_size = os.path.getsize(filename)
172-
173-
if compression:
174-
s.to_csv(filename, compression=None, header=True)
175-
uncompressed_file_size = os.path.getsize(filename)
176-
assert uncompressed_file_size > file_size
177-
178164

179165
class TestSeriesIO(TestData):
180166

pandas/tests/test_common.py

+22-1
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,14 @@
11
# -*- coding: utf-8 -*-
22

33
import pytest
4+
import os
45
import collections
56
from functools import partial
67

78
import numpy as np
89

9-
from pandas import Series, Timestamp
10+
import pandas as pd
11+
from pandas import Series, DataFrame, Timestamp
1012
from pandas.compat import range, lmap
1113
import pandas.core.common as com
1214
from pandas.core import ops
@@ -222,3 +224,22 @@ def test_standardize_mapping():
222224

223225
dd = collections.defaultdict(list)
224226
assert isinstance(com.standardize_mapping(dd), partial)
227+
228+
229+
@pytest.mark.parametrize('method', ['to_pickle', 'to_json', 'to_csv'])
230+
def test_compression_size(method, compression):
231+
232+
df = pd.concat(100 * [DataFrame([[0.123456, 0.234567, 0.567567],
233+
[12.32112, 123123.2, 321321.2]],
234+
columns=['X', 'Y', 'Z'])],
235+
ignore_index=True)
236+
s = df.iloc[:, 0]
237+
238+
with tm.ensure_clean() as filename:
239+
for obj in [df, s]:
240+
getattr(obj, method)(filename, compression=compression)
241+
file_size = os.path.getsize(filename)
242+
getattr(obj, method)(filename, compression=None)
243+
uncompressed_file_size = os.path.getsize(filename)
244+
if compression:
245+
assert uncompressed_file_size > file_size

0 commit comments

Comments
 (0)