Skip to content

Commit a9ccd04

Browse files
CLN: Add teardowns for some benchmarks (#17616)
Added teardowns for hdfstore, io and packers benchmarks.
1 parent 1647a72 commit a9ccd04

File tree

4 files changed

+47
-30
lines changed

4 files changed

+47
-30
lines changed

asv_bench/benchmarks/hdfstore_bench.py

+2
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,7 @@ def setup(self):
4040

4141
def teardown(self):
4242
self.store.close()
43+
self.remove(self.f)
4344

4445
def remove(self, f):
4546
try:
@@ -115,6 +116,7 @@ def setup(self):
115116

116117
def teardown(self):
117118
self.store.close()
119+
self.remove(self.f)
118120

119121
def remove(self, f):
120122
try:

asv_bench/benchmarks/io_bench.py

+37-9
Original file line numberDiff line numberDiff line change
@@ -10,16 +10,21 @@
1010

1111
class frame_to_csv(object):
1212
goal_time = 0.2
13+
fname = '__test__.csv'
1314

1415
def setup(self):
1516
self.df = DataFrame(np.random.randn(3000, 30))
1617

1718
def time_frame_to_csv(self):
18-
self.df.to_csv('__test__.csv')
19+
self.df.to_csv(self.fname)
20+
21+
def teardown(self):
22+
os.remove(self.fname)
1923

2024

2125
class frame_to_csv2(object):
2226
goal_time = 0.2
27+
fname = '__test__.csv'
2328

2429
def setup(self):
2530
self.df = DataFrame({'A': range(50000), })
@@ -28,22 +33,30 @@ def setup(self):
2833
self.df['D'] = (self.df.A + 3.0)
2934

3035
def time_frame_to_csv2(self):
31-
self.df.to_csv('__test__.csv')
36+
self.df.to_csv(self.fname)
37+
38+
def teardown(self):
39+
os.remove(self.fname)
3240

3341

3442
class frame_to_csv_date_formatting(object):
3543
goal_time = 0.2
44+
fname = '__test__.csv'
3645

3746
def setup(self):
3847
self.rng = date_range('1/1/2000', periods=1000)
3948
self.data = DataFrame(self.rng, index=self.rng)
4049

4150
def time_frame_to_csv_date_formatting(self):
42-
self.data.to_csv('__test__.csv', date_format='%Y%m%d')
51+
self.data.to_csv(self.fname, date_format='%Y%m%d')
52+
53+
def teardown(self):
54+
os.remove(self.fname)
4355

4456

4557
class frame_to_csv_mixed(object):
4658
goal_time = 0.2
59+
fname = '__test__.csv'
4760

4861
def setup(self):
4962
self.df_float = DataFrame(np.random.randn(5000, 5), dtype='float64', columns=self.create_cols('float'))
@@ -55,11 +68,14 @@ def setup(self):
5568
self.df = concat([self.df_float, self.df_int, self.df_bool, self.df_object, self.df_dt], axis=1)
5669

5770
def time_frame_to_csv_mixed(self):
58-
self.df.to_csv('__test__.csv')
71+
self.df.to_csv(self.fname)
5972

6073
def create_cols(self, name):
6174
return [('%s%03d' % (name, i)) for i in range(5)]
6275

76+
def teardown(self):
77+
os.remove(self.fname)
78+
6379

6480
class read_csv_infer_datetime_format_custom(object):
6581
goal_time = 0.2
@@ -96,26 +112,34 @@ def time_read_csv_infer_datetime_format_ymd(self):
96112

97113
class read_csv_skiprows(object):
98114
goal_time = 0.2
115+
fname = '__test__.csv'
99116

100117
def setup(self):
101118
self.index = tm.makeStringIndex(20000)
102119
self.df = DataFrame({'float1': randn(20000), 'float2': randn(20000), 'string1': (['foo'] * 20000), 'bool1': ([True] * 20000), 'int1': np.random.randint(0, 200000, size=20000), }, index=self.index)
103-
self.df.to_csv('__test__.csv')
120+
self.df.to_csv(self.fname)
104121

105122
def time_read_csv_skiprows(self):
106-
read_csv('__test__.csv', skiprows=10000)
123+
read_csv(self.fname, skiprows=10000)
124+
125+
def teardown(self):
126+
os.remove(self.fname)
107127

108128

109129
class read_csv_standard(object):
110130
goal_time = 0.2
131+
fname = '__test__.csv'
111132

112133
def setup(self):
113134
self.index = tm.makeStringIndex(10000)
114135
self.df = DataFrame({'float1': randn(10000), 'float2': randn(10000), 'string1': (['foo'] * 10000), 'bool1': ([True] * 10000), 'int1': np.random.randint(0, 100000, size=10000), }, index=self.index)
115-
self.df.to_csv('__test__.csv')
136+
self.df.to_csv(self.fname)
116137

117138
def time_read_csv_standard(self):
118-
read_csv('__test__.csv')
139+
read_csv(self.fname)
140+
141+
def teardown(self):
142+
os.remove(self.fname)
119143

120144

121145
class read_parse_dates_iso8601(object):
@@ -154,13 +178,17 @@ def time_read_uint64_na_values(self):
154178

155179
class write_csv_standard(object):
156180
goal_time = 0.2
181+
fname = '__test__.csv'
157182

158183
def setup(self):
159184
self.index = tm.makeStringIndex(10000)
160185
self.df = DataFrame({'float1': randn(10000), 'float2': randn(10000), 'string1': (['foo'] * 10000), 'bool1': ([True] * 10000), 'int1': np.random.randint(0, 100000, size=10000), }, index=self.index)
161186

162187
def time_write_csv_standard(self):
163-
self.df.to_csv('__test__.csv')
188+
self.df.to_csv(self.fname)
189+
190+
def teardown(self):
191+
os.remove(self.fname)
164192

165193

166194
class read_csv_from_s3(object):

asv_bench/benchmarks/packers.py

+7-20
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99
import numpy as np
1010
from random import randrange
1111

12+
1213
class _Packers(object):
1314
goal_time = 0.2
1415

@@ -28,8 +29,11 @@ def remove(self, f):
2829
except:
2930
pass
3031

32+
def teardown(self):
33+
self.remove(self.f)
34+
35+
3136
class Packers(_Packers):
32-
goal_time = 0.2
3337

3438
def setup(self):
3539
self._setup()
@@ -38,8 +42,8 @@ def setup(self):
3842
def time_packers_read_csv(self):
3943
pd.read_csv(self.f)
4044

45+
4146
class packers_read_excel(_Packers):
42-
goal_time = 0.2
4347

4448
def setup(self):
4549
self._setup()
@@ -54,7 +58,6 @@ def time_packers_read_excel(self):
5458

5559

5660
class packers_read_hdf_store(_Packers):
57-
goal_time = 0.2
5861

5962
def setup(self):
6063
self._setup()
@@ -115,6 +118,7 @@ def setup(self):
115118
def time_packers_read_pickle(self):
116119
pd.read_pickle(self.f)
117120

121+
118122
class packers_read_sql(_Packers):
119123

120124
def setup(self):
@@ -177,9 +181,6 @@ def setup(self):
177181
def time_write_csv(self):
178182
self.df.to_csv(self.f)
179183

180-
def teardown(self):
181-
self.remove(self.f)
182-
183184

184185
class Excel(_Packers):
185186

@@ -217,8 +218,6 @@ def time_write_hdf_store(self):
217218
def time_write_hdf_table(self):
218219
self.df2.to_hdf(self.f, 'df', table=True)
219220

220-
def teardown(self):
221-
self.remove(self.f)
222221

223222
class JSON(_Packers):
224223

@@ -259,9 +258,6 @@ def time_write_json_mixed_float_int_str(self):
259258
def time_write_json_lines(self):
260259
self.df.to_json(self.f, orient="records", lines=True)
261260

262-
def teardown(self):
263-
self.remove(self.f)
264-
265261

266262
class MsgPack(_Packers):
267263

@@ -271,9 +267,6 @@ def setup(self):
271267
def time_write_msgpack(self):
272268
self.df2.to_msgpack(self.f)
273269

274-
def teardown(self):
275-
self.remove(self.f)
276-
277270

278271
class Pickle(_Packers):
279272

@@ -283,9 +276,6 @@ def setup(self):
283276
def time_write_pickle(self):
284277
self.df2.to_pickle(self.f)
285278

286-
def teardown(self):
287-
self.remove(self.f)
288-
289279

290280
class SQL(_Packers):
291281

@@ -313,6 +303,3 @@ def time_write_stata(self):
313303

314304
def time_write_stata_with_validation(self):
315305
self.df3.to_stata(self.f, {'index': 'tc', })
316-
317-
def teardown(self):
318-
self.remove(self.f)

doc/source/whatsnew/v0.22.0.txt

+1-1
Original file line numberDiff line numberDiff line change
@@ -167,5 +167,5 @@ Other
167167
^^^^^
168168

169169
- Improved error message when attempting to use a Python keyword as an identifier in a numexpr query (:issue:`18221`)
170-
-
170+
- Added teardown functions in asv benchmarks for hdfstore, io and packers benches.
171171
-

0 commit comments

Comments
 (0)