Skip to content

Commit 7de90a7

Browse files
jbrockmendelproost
authored andcommitted
CLN: explicit signature for to_hdf (pandas-dev#29939)
1 parent 63dfbb6 commit 7de90a7

File tree

2 files changed

+64
-29
lines changed

2 files changed

+64
-29
lines changed

pandas/core/generic.py

+47-20
Original file line numberDiff line numberDiff line change
@@ -2403,7 +2403,19 @@ def to_json(
24032403
indent=indent,
24042404
)
24052405

2406-
def to_hdf(self, path_or_buf, key, **kwargs):
2406+
def to_hdf(
2407+
self,
2408+
path_or_buf,
2409+
key: str,
2410+
mode: str = "a",
2411+
complevel: Optional[int] = None,
2412+
complib: Optional[str] = None,
2413+
append: bool_t = False,
2414+
format: Optional[str] = None,
2415+
errors: str = "strict",
2416+
encoding: str = "UTF-8",
2417+
**kwargs,
2418+
):
24072419
"""
24082420
Write the contained data to an HDF5 file using HDFStore.
24092421
@@ -2431,40 +2443,43 @@ def to_hdf(self, path_or_buf, key, **kwargs):
24312443
- 'a': append, an existing file is opened for reading and
24322444
writing, and if the file does not exist it is created.
24332445
- 'r+': similar to 'a', but the file must already exist.
2434-
format : {'fixed', 'table'}, default 'fixed'
2446+
complevel : {0-9}, optional
2447+
Specifies a compression level for data.
2448+
A value of 0 disables compression.
2449+
complib : {'zlib', 'lzo', 'bzip2', 'blosc'}, default 'zlib'
2450+
Specifies the compression library to be used.
2451+
As of v0.20.2 these additional compressors for Blosc are supported
2452+
(default if no compressor specified: 'blosc:blosclz'):
2453+
{'blosc:blosclz', 'blosc:lz4', 'blosc:lz4hc', 'blosc:snappy',
2454+
'blosc:zlib', 'blosc:zstd'}.
2455+
Specifying a compression library which is not available issues
2456+
a ValueError.
2457+
append : bool, default False
2458+
For Table formats, append the input data to the existing.
2459+
format : {'fixed', 'table', None}, default 'fixed'
24352460
Possible values:
24362461
24372462
- 'fixed': Fixed format. Fast writing/reading. Not-appendable,
24382463
nor searchable.
24392464
- 'table': Table format. Write as a PyTables Table structure
24402465
which may perform worse but allow more flexible operations
24412466
like searching / selecting subsets of the data.
2442-
append : bool, default False
2443-
For Table formats, append the input data to the existing.
2467+
- If None, pd.get_option('io.hdf.default_format') is checked,
2468+
followed by fallback to "fixed"
2469+
errors : str, default 'strict'
2470+
Specifies how encoding and decoding errors are to be handled.
2471+
See the errors argument for :func:`open` for a full list
2472+
of options.
2473+
encoding : str, default "UTF-8"
24442474
data_columns : list of columns or True, optional
24452475
List of columns to create as indexed data columns for on-disk
24462476
queries, or True to use all columns. By default only the axes
24472477
of the object are indexed. See :ref:`io.hdf5-query-data-columns`.
24482478
Applicable only to format='table'.
2449-
complevel : {0-9}, optional
2450-
Specifies a compression level for data.
2451-
A value of 0 disables compression.
2452-
complib : {'zlib', 'lzo', 'bzip2', 'blosc'}, default 'zlib'
2453-
Specifies the compression library to be used.
2454-
As of v0.20.2 these additional compressors for Blosc are supported
2455-
(default if no compressor specified: 'blosc:blosclz'):
2456-
{'blosc:blosclz', 'blosc:lz4', 'blosc:lz4hc', 'blosc:snappy',
2457-
'blosc:zlib', 'blosc:zstd'}.
2458-
Specifying a compression library which is not available issues
2459-
a ValueError.
24602479
fletcher32 : bool, default False
24612480
If applying compression use the fletcher32 checksum.
24622481
dropna : bool, default False
24632482
If true, ALL nan rows will not be written to store.
2464-
errors : str, default 'strict'
2465-
Specifies how encoding and decoding errors are to be handled.
2466-
See the errors argument for :func:`open` for a full list
2467-
of options.
24682483
24692484
See Also
24702485
--------
@@ -2506,7 +2521,19 @@ def to_hdf(self, path_or_buf, key, **kwargs):
25062521
"""
25072522
from pandas.io import pytables
25082523

2509-
pytables.to_hdf(path_or_buf, key, self, **kwargs)
2524+
pytables.to_hdf(
2525+
path_or_buf,
2526+
key,
2527+
self,
2528+
mode=mode,
2529+
complevel=complevel,
2530+
complib=complib,
2531+
append=append,
2532+
format=format,
2533+
errors=errors,
2534+
encoding=encoding,
2535+
**kwargs,
2536+
)
25102537

25112538
def to_msgpack(self, path_or_buf=None, encoding="utf-8", **kwargs):
25122539
"""

pandas/io/pytables.py

+17-9
Original file line numberDiff line numberDiff line change
@@ -53,6 +53,7 @@
5353
concat,
5454
isna,
5555
)
56+
from pandas._typing import FrameOrSeries
5657
from pandas.core.arrays.categorical import Categorical
5758
import pandas.core.common as com
5859
from pandas.core.computation.pytables import PyTablesExpr, maybe_expression
@@ -251,20 +252,27 @@ def _tables():
251252

252253
def to_hdf(
253254
path_or_buf,
254-
key,
255-
value,
256-
mode=None,
255+
key: str,
256+
value: FrameOrSeries,
257+
mode: str = "a",
257258
complevel: Optional[int] = None,
258-
complib=None,
259-
append=None,
259+
complib: Optional[str] = None,
260+
append: bool = False,
261+
format: Optional[str] = None,
262+
errors: str = "strict",
263+
encoding: str = "UTF-8",
260264
**kwargs,
261265
):
262266
""" store this object, close it if we opened it """
263267

264268
if append:
265-
f = lambda store: store.append(key, value, **kwargs)
269+
f = lambda store: store.append(
270+
key, value, format=format, errors=errors, encoding=encoding, **kwargs
271+
)
266272
else:
267-
f = lambda store: store.put(key, value, **kwargs)
273+
f = lambda store: store.put(
274+
key, value, format=format, errors=errors, encoding=encoding, **kwargs
275+
)
268276

269277
path_or_buf = _stringify_path(path_or_buf)
270278
if isinstance(path_or_buf, str):
@@ -1042,7 +1050,7 @@ def append(
10421050
format=None,
10431051
append=True,
10441052
columns=None,
1045-
dropna=None,
1053+
dropna: Optional[bool] = None,
10461054
**kwargs,
10471055
):
10481056
"""
@@ -1070,7 +1078,7 @@ def append(
10701078
chunksize : size to chunk the writing
10711079
expectedrows : expected TOTAL row size of this table
10721080
encoding : default None, provide an encoding for strings
1073-
dropna : bool, default False
1081+
dropna : bool, default False
10741082
Do not write an ALL nan row to the store settable
10751083
by the option 'io.hdf.dropna_table'.
10761084

0 commit comments

Comments
 (0)