@@ -1399,7 +1399,7 @@ Writing to a file, with a date index and a date column
1399
1399
dfj2[' date' ] = Timestamp(' 20130101' )
1400
1400
dfj2[' ints' ] = list (range (5 ))
1401
1401
dfj2[' bools' ] = True
1402
- dfj2.index = date_range(' 20130101' , periods = 5 )
1402
+ dfj2.index = pd. date_range(' 20130101' , periods = 5 )
1403
1403
dfj2.to_json(' test.json' )
1404
1404
open (' test.json' ).read()
1405
1405
@@ -2562,7 +2562,7 @@ dict:
2562
2562
.. ipython :: python
2563
2563
2564
2564
np.random.seed(1234 )
2565
- index = date_range(' 1/1/2000' , periods = 8 )
2565
+ index = pd. date_range(' 1/1/2000' , periods = 8 )
2566
2566
s = Series(randn(5 ), index = [' a' , ' b' , ' c' , ' d' , ' e' ])
2567
2567
df = DataFrame(randn(8 , 3 ), index = index,
2568
2568
columns = [' A' , ' B' , ' C' ])
@@ -2754,7 +2754,7 @@ enable ``put/append/to_hdf`` to by default store in the ``table`` format.
2754
2754
2755
2755
.. ipython :: python
2756
2756
2757
- store = HDFStore(' store.h5' )
2757
+ store = pd. HDFStore(' store.h5' )
2758
2758
df1 = df[0 :4 ]
2759
2759
df2 = df[4 :]
2760
2760
@@ -2801,6 +2801,22 @@ everything in the sub-store and BELOW, so be *careful*.
2801
2801
2802
2802
.. _io.hdf5-types :
2803
2803
2804
+ .. warning :: Hierarchical keys cannot be retrieved as dotted (attribute) access as described above for items stored under root node.
2805
+
2806
+ .. ipython :: python
2807
+
2808
+ store.foo.bar.bah
2809
+ AttributeError : ' HDFStore' object has no attribute ' foo'
2810
+
2811
+ store.root.foo.bar.bah
2812
+ / foo/ bar/ bah (Group) ' '
2813
+ children := [' block0_items' (Array), ' axis1' (Array), ' axis0' (Array), ' block0_values' (Array)]
2814
+
2815
+
2816
+
2817
+
2818
+
2819
+
2804
2820
Storing Types
2805
2821
'''''''''''''
2806
2822
@@ -3364,7 +3380,7 @@ Compression for all objects within the file
3364
3380
3365
3381
.. code-block :: python
3366
3382
3367
- store_compressed = HDFStore(' store_compressed.h5' , complevel = 9 , complib = ' blosc' )
3383
+ store_compressed = pd. HDFStore(' store_compressed.h5' , complevel = 9 , complib = ' blosc' )
3368
3384
3369
3385
Or on-the-fly compression (this only applies to tables). You can turn
3370
3386
off file compression for a specific table by passing ``complevel=0 ``
@@ -3573,7 +3589,7 @@ It is possible to write an ``HDFStore`` object that can easily be imported into
3573
3589
index = range (100 ))
3574
3590
df_for_r.head()
3575
3591
3576
- store_export = HDFStore(' export.h5' )
3592
+ store_export = pd. HDFStore(' export.h5' )
3577
3593
store_export.append(' df_for_r' , df_for_r, data_columns = df_dc.columns)
3578
3594
store_export
3579
3595
@@ -3662,7 +3678,7 @@ number of options, please see the docstring.
3662
3678
.. ipython :: python
3663
3679
3664
3680
# a legacy store
3665
- legacy_store = HDFStore(legacy_file_path,' r' )
3681
+ legacy_store = pd. HDFStore(legacy_file_path,' r' )
3666
3682
legacy_store
3667
3683
3668
3684
# copy (and return the new handle)
0 commit comments