@@ -226,20 +226,20 @@ def test_standardize_mapping():
226
226
assert isinstance (com .standardize_mapping (dd ), partial )
227
227
228
228
229
+ @pytest .mark .parametrize ('frame' , [
230
+ pd .concat (100 * [DataFrame ([[0.123456 , 0.234567 , 0.567567 ],
231
+ [12.32112 , 123123.2 , 321321.2 ]],
232
+ columns = ['X' , 'Y' , 'Z' ])], ignore_index = True ),
233
+ pd .concat (100 * [Series ([0.123456 , 0.234567 , 0.567567 ], name = 'X' )],
234
+ ignore_index = True )])
229
235
@pytest .mark .parametrize ('method' , ['to_pickle' , 'to_json' , 'to_csv' ])
230
- def test_compression_size (method , compression ):
231
-
232
- df = pd .concat (100 * [DataFrame ([[0.123456 , 0.234567 , 0.567567 ],
233
- [12.32112 , 123123.2 , 321321.2 ]],
234
- columns = ['X' , 'Y' , 'Z' ])],
235
- ignore_index = True )
236
- s = df .iloc [:, 0 ]
236
+ def test_compression_size (frame , method , compression ):
237
+ if not compression :
238
+ pytest .skip ("only test compression case." )
237
239
238
240
with tm .ensure_clean () as filename :
239
- for obj in [df , s ]:
240
- getattr (obj , method )(filename , compression = compression )
241
- file_size = os .path .getsize (filename )
242
- getattr (obj , method )(filename , compression = None )
243
- uncompressed_file_size = os .path .getsize (filename )
244
- if compression :
245
- assert uncompressed_file_size > file_size
241
+ getattr (frame , method )(filename , compression = compression )
242
+ compressed = os .path .getsize (filename )
243
+ getattr (frame , method )(filename , compression = None )
244
+ uncompressed = os .path .getsize (filename )
245
+ assert uncompressed > compressed
0 commit comments