@@ -284,7 +284,19 @@ def to_hdf(
284
284
f (path_or_buf )
285
285
286
286
287
- def read_hdf (path_or_buf , key = None , mode : str = "r" , ** kwargs ):
287
+ def read_hdf (
288
+ path_or_buf ,
289
+ key = None ,
290
+ mode : str = "r" ,
291
+ errors : str = "strict" ,
292
+ where = None ,
293
+ start : Optional [int ] = None ,
294
+ stop : Optional [int ] = None ,
295
+ columns = None ,
296
+ iterator = False ,
297
+ chunksize : Optional [int ] = None ,
298
+ ** kwargs ,
299
+ ):
288
300
"""
289
301
Read from the store, close it if we opened it.
290
302
@@ -350,15 +362,18 @@ def read_hdf(path_or_buf, key=None, mode: str = "r", **kwargs):
350
362
>>> df.to_hdf('./store.h5', 'data')
351
363
>>> reread = pd.read_hdf('./store.h5')
352
364
"""
365
+ assert not kwargs , kwargs
366
+ # NB: in principle more kwargs could be passed to HDFStore, but in
367
+ # tests none are.
353
368
354
369
if mode not in ["r" , "r+" , "a" ]:
355
370
raise ValueError (
356
371
f"mode { mode } is not allowed while performing a read. "
357
372
f"Allowed modes are r, r+ and a."
358
373
)
359
374
# grab the scope
360
- if " where" in kwargs :
361
- kwargs [ " where" ] = _ensure_term (kwargs [ " where" ] , scope_level = 1 )
375
+ if where is not None :
376
+ where = _ensure_term (where , scope_level = 1 )
362
377
363
378
if isinstance (path_or_buf , HDFStore ):
364
379
if not path_or_buf .is_open :
@@ -382,7 +397,7 @@ def read_hdf(path_or_buf, key=None, mode: str = "r", **kwargs):
382
397
if not exists :
383
398
raise FileNotFoundError (f"File { path_or_buf } does not exist" )
384
399
385
- store = HDFStore (path_or_buf , mode = mode , ** kwargs )
400
+ store = HDFStore (path_or_buf , mode = mode , errors = errors , ** kwargs )
386
401
# can't auto open/close if we are using an iterator
387
402
# so delegate to the iterator
388
403
auto_close = True
@@ -405,7 +420,16 @@ def read_hdf(path_or_buf, key=None, mode: str = "r", **kwargs):
405
420
"contains multiple datasets."
406
421
)
407
422
key = candidate_only_group ._v_pathname
408
- return store .select (key , auto_close = auto_close , ** kwargs )
423
+ return store .select (
424
+ key ,
425
+ where = where ,
426
+ start = start ,
427
+ stop = stop ,
428
+ columns = columns ,
429
+ iterator = iterator ,
430
+ chunksize = chunksize ,
431
+ auto_close = auto_close ,
432
+ )
409
433
except (ValueError , TypeError , KeyError ):
410
434
if not isinstance (path_or_buf , HDFStore ):
411
435
# if there is an error, close the store if we opened it.
@@ -734,7 +758,6 @@ def select(
734
758
iterator = False ,
735
759
chunksize = None ,
736
760
auto_close : bool = False ,
737
- ** kwargs ,
738
761
):
739
762
"""
740
763
Retrieve pandas object stored in file, optionally based on where criteria.
@@ -850,7 +873,6 @@ def select_as_multiple(
850
873
iterator = False ,
851
874
chunksize = None ,
852
875
auto_close : bool = False ,
853
- ** kwargs ,
854
876
):
855
877
"""
856
878
Retrieve pandas objects from multiple tables.
@@ -888,7 +910,7 @@ def select_as_multiple(
888
910
stop = stop ,
889
911
iterator = iterator ,
890
912
chunksize = chunksize ,
891
- ** kwargs ,
913
+ auto_close = auto_close ,
892
914
)
893
915
894
916
if not isinstance (keys , (list , tuple )):
0 commit comments