@@ -197,19 +197,19 @@ def __init__(self, path, mode='a', complevel=None, complib=None,
197
197
except ImportError : # pragma: no cover
198
198
raise Exception ('HDFStore requires PyTables' )
199
199
200
- self .path = path
201
- self .mode = mode
202
- self .handle = None
203
- self .complevel = complevel
204
- self .complib = complib
205
- self .fletcher32 = fletcher32
206
- self .filters = None
200
+ self ._path = path
201
+ self ._mode = mode
202
+ self ._handle = None
203
+ self ._complevel = complevel
204
+ self ._complib = complib
205
+ self ._fletcher32 = fletcher32
206
+ self ._filters = None
207
207
self .open (mode = mode , warn = False )
208
208
209
209
@property
210
210
def root (self ):
211
211
""" return the root node """
212
- return self .handle .root
212
+ return self ._handle .root
213
213
214
214
def __getitem__ (self , key ):
215
215
return self .get (key )
@@ -220,10 +220,19 @@ def __setitem__(self, key, value):
220
220
def __delitem__ (self , key ):
221
221
return self .remove (key )
222
222
223
+ def __getattr__ (self , name ):
224
+ """ allow attribute access to get stores """
225
+ try :
226
+ return self .get (name )
227
+ except :
228
+ pass
229
+ raise AttributeError ("'%s' object has no attribute '%s'" %
230
+ (type (self ).__name__ , name ))
231
+
223
232
def __contains__ (self , key ):
224
233
""" check for existance of this key
225
234
can match the exact pathname or the pathnm w/o the leading '/'
226
- """
235
+ """
227
236
node = self .get_node (key )
228
237
if node is not None :
229
238
name = node ._v_pathname
@@ -234,7 +243,7 @@ def __len__(self):
234
243
return len (self .groups ())
235
244
236
245
def __repr__ (self ):
237
- output = '%s\n File path: %s\n ' % (type (self ), self .path )
246
+ output = '%s\n File path: %s\n ' % (type (self ), self ._path )
238
247
239
248
if len (self .keys ()):
240
249
keys = []
@@ -277,7 +286,7 @@ def open(self, mode='a', warn=True):
277
286
mode : {'a', 'w', 'r', 'r+'}, default 'a'
278
287
See HDFStore docstring or tables.openFile for info about modes
279
288
"""
280
- self .mode = mode
289
+ self ._mode = mode
281
290
if warn and mode == 'w' : # pragma: no cover
282
291
while True :
283
292
response = raw_input ("Re-opening as mode='w' will delete the "
@@ -286,36 +295,36 @@ def open(self, mode='a', warn=True):
286
295
break
287
296
elif response == 'n' :
288
297
return
289
- if self .handle is not None and self .handle .isopen :
290
- self .handle .close ()
298
+ if self ._handle is not None and self ._handle .isopen :
299
+ self ._handle .close ()
291
300
292
- if self .complib is not None :
293
- if self .complevel is None :
294
- self .complevel = 9
295
- self .filters = _tables ().Filters (self .complevel ,
296
- self .complib ,
297
- fletcher32 = self .fletcher32 )
301
+ if self ._complib is not None :
302
+ if self ._complevel is None :
303
+ self ._complevel = 9
304
+ self ._filters = _tables ().Filters (self ._complevel ,
305
+ self ._complib ,
306
+ fletcher32 = self ._fletcher32 )
298
307
299
308
try :
300
- self .handle = h5_open (self .path , self .mode )
309
+ self ._handle = h5_open (self ._path , self ._mode )
301
310
except IOError , e : # pragma: no cover
302
311
if 'can not be written' in str (e ):
303
- print 'Opening %s in read-only mode' % self .path
304
- self .handle = h5_open (self .path , 'r' )
312
+ print 'Opening %s in read-only mode' % self ._path
313
+ self ._handle = h5_open (self ._path , 'r' )
305
314
else :
306
315
raise
307
316
308
317
def close (self ):
309
318
"""
310
319
Close the PyTables file handle
311
320
"""
312
- self .handle .close ()
321
+ self ._handle .close ()
313
322
314
323
def flush (self ):
315
324
"""
316
325
Force all buffered modifications to be written to disk
317
326
"""
318
- self .handle .flush ()
327
+ self ._handle .flush ()
319
328
320
329
def get (self , key ):
321
330
"""
@@ -617,14 +626,14 @@ def create_table_index(self, key, **kwargs):
617
626
def groups (self ):
618
627
""" return a list of all the top-level nodes (that are not themselves a pandas storage object) """
619
628
_tables ()
620
- return [ g for g in self .handle .walkNodes () if getattr (g ._v_attrs ,'pandas_type' ,None ) or getattr (g ,'table' ,None ) or (isinstance (g ,_table_mod .table .Table ) and g ._v_name != 'table' ) ]
629
+ return [ g for g in self ._handle .walkNodes () if getattr (g ._v_attrs ,'pandas_type' ,None ) or getattr (g ,'table' ,None ) or (isinstance (g ,_table_mod .table .Table ) and g ._v_name != 'table' ) ]
621
630
622
631
def get_node (self , key ):
623
632
""" return the node with the key or None if it does not exist """
624
633
try :
625
634
if not key .startswith ('/' ):
626
635
key = '/' + key
627
- return self .handle .getNode (self .root , key )
636
+ return self ._handle .getNode (self .root , key )
628
637
except :
629
638
return None
630
639
@@ -751,7 +760,7 @@ def _write_to_group(self, key, value, index=True, table=False, append=False, com
751
760
752
761
# remove the node if we are not appending
753
762
if group is not None and not append :
754
- self .handle .removeNode (group , recursive = True )
763
+ self ._handle .removeNode (group , recursive = True )
755
764
group = None
756
765
757
766
if group is None :
@@ -768,7 +777,7 @@ def _write_to_group(self, key, value, index=True, table=False, append=False, com
768
777
new_path += p
769
778
group = self .get_node (new_path )
770
779
if group is None :
771
- group = self .handle .createGroup (path , p )
780
+ group = self ._handle .createGroup (path , p )
772
781
path = new_path
773
782
774
783
s = self ._create_storer (group , value , table = table , append = append , ** kwargs )
@@ -1304,28 +1313,28 @@ def pathname(self):
1304
1313
return self .group ._v_pathname
1305
1314
1306
1315
@property
1307
- def handle (self ):
1308
- return self .parent .handle
1316
+ def _handle (self ):
1317
+ return self .parent ._handle
1309
1318
1310
1319
@property
1311
1320
def _quiet (self ):
1312
1321
return self .parent ._quiet
1313
1322
1314
1323
@property
1315
- def filters (self ):
1316
- return self .parent .filters
1324
+ def _filters (self ):
1325
+ return self .parent ._filters
1317
1326
1318
1327
@property
1319
- def complevel (self ):
1320
- return self .parent .complevel
1328
+ def _complevel (self ):
1329
+ return self .parent ._complevel
1321
1330
1322
1331
@property
1323
- def fletcher32 (self ):
1324
- return self .parent .fletcher32
1332
+ def _fletcher32 (self ):
1333
+ return self .parent ._fletcher32
1325
1334
1326
1335
@property
1327
- def complib (self ):
1328
- return self .parent .complib
1336
+ def _complib (self ):
1337
+ return self .parent ._complib
1329
1338
1330
1339
@property
1331
1340
def attrs (self ):
@@ -1380,7 +1389,7 @@ def write(self, **kwargs):
1380
1389
def delete (self , where = None , ** kwargs ):
1381
1390
""" support fully deleting the node in its entirety (only) - where specification must be None """
1382
1391
if where is None :
1383
- self .handle .removeNode (self .group , recursive = True )
1392
+ self ._handle .removeNode (self .group , recursive = True )
1384
1393
return None
1385
1394
1386
1395
raise NotImplementedError ("cannot delete on an abstract storer" )
@@ -1583,7 +1592,7 @@ def read_index_node(self, node):
1583
1592
1584
1593
def write_array (self , key , value ):
1585
1594
if key in self .group :
1586
- self .handle .removeNode (self .group , key )
1595
+ self ._handle .removeNode (self .group , key )
1587
1596
1588
1597
# Transform needed to interface with pytables row/col notation
1589
1598
empty_array = any (x == 0 for x in value .shape )
@@ -1593,7 +1602,7 @@ def write_array(self, key, value):
1593
1602
value = value .T
1594
1603
transposed = True
1595
1604
1596
- if self .filters is not None :
1605
+ if self ._filters is not None :
1597
1606
atom = None
1598
1607
try :
1599
1608
# get the atom for this datatype
@@ -1603,9 +1612,9 @@ def write_array(self, key, value):
1603
1612
1604
1613
if atom is not None :
1605
1614
# create an empty chunked array and fill it from value
1606
- ca = self .handle .createCArray (self .group , key , atom ,
1615
+ ca = self ._handle .createCArray (self .group , key , atom ,
1607
1616
value .shape ,
1608
- filters = self .filters )
1617
+ filters = self ._filters )
1609
1618
ca [:] = value
1610
1619
getattr (self .group , key )._v_attrs .transposed = transposed
1611
1620
return
@@ -1622,21 +1631,21 @@ def write_array(self, key, value):
1622
1631
ws = performance_doc % (inferred_type ,key )
1623
1632
warnings .warn (ws , PerformanceWarning )
1624
1633
1625
- vlarr = self .handle .createVLArray (self .group , key ,
1634
+ vlarr = self ._handle .createVLArray (self .group , key ,
1626
1635
_tables ().ObjectAtom ())
1627
1636
vlarr .append (value )
1628
1637
elif value .dtype .type == np .datetime64 :
1629
- self .handle .createArray (self .group , key , value .view ('i8' ))
1638
+ self ._handle .createArray (self .group , key , value .view ('i8' ))
1630
1639
getattr (self .group , key )._v_attrs .value_type = 'datetime64'
1631
1640
else :
1632
1641
if empty_array :
1633
1642
# ugly hack for length 0 axes
1634
1643
arr = np .empty ((1 ,) * value .ndim )
1635
- self .handle .createArray (self .group , key , arr )
1644
+ self ._handle .createArray (self .group , key , arr )
1636
1645
getattr (self .group , key )._v_attrs .value_type = str (value .dtype )
1637
1646
getattr (self .group , key )._v_attrs .shape = value .shape
1638
1647
else :
1639
- self .handle .createArray (self .group , key , value )
1648
+ self ._handle .createArray (self .group , key , value )
1640
1649
1641
1650
getattr (self .group , key )._v_attrs .transposed = transposed
1642
1651
@@ -1729,7 +1738,7 @@ def write(self, obj, **kwargs):
1729
1738
for name , ss in obj .iteritems ():
1730
1739
key = 'sparse_series_%s' % name
1731
1740
if key not in self .group ._v_children :
1732
- node = self .handle .createGroup (self .group , key )
1741
+ node = self ._handle .createGroup (self .group , key )
1733
1742
else :
1734
1743
node = getattr (self .group , key )
1735
1744
s = SparseSeriesStorer (self .parent , node )
@@ -1763,7 +1772,7 @@ def write(self, obj, **kwargs):
1763
1772
for name , sdf in obj .iteritems ():
1764
1773
key = 'sparse_frame_%s' % name
1765
1774
if key not in self .group ._v_children :
1766
- node = self .handle .createGroup (self .group , key )
1775
+ node = self ._handle .createGroup (self .group , key )
1767
1776
else :
1768
1777
node = getattr (self .group , key )
1769
1778
s = SparseFrameStorer (self .parent , node )
@@ -2293,13 +2302,13 @@ def create_description(self, complib=None, complevel=None, fletcher32=False, exp
2293
2302
2294
2303
if complib :
2295
2304
if complevel is None :
2296
- complevel = self .complevel or 9
2305
+ complevel = self ._complevel or 9
2297
2306
filters = _tables ().Filters (complevel = complevel ,
2298
2307
complib = complib ,
2299
- fletcher32 = fletcher32 or self .fletcher32 )
2308
+ fletcher32 = fletcher32 or self ._fletcher32 )
2300
2309
d ['filters' ] = filters
2301
- elif self .filters is not None :
2302
- d ['filters' ] = self .filters
2310
+ elif self ._filters is not None :
2311
+ d ['filters' ] = self ._filters
2303
2312
2304
2313
return d
2305
2314
@@ -2484,7 +2493,7 @@ def write(self, obj, axes=None, append=False, complib=None,
2484
2493
expectedrows = None , ** kwargs ):
2485
2494
2486
2495
if not append and self .is_exists :
2487
- self .handle .removeNode (self .group , 'table' )
2496
+ self ._handle .removeNode (self .group , 'table' )
2488
2497
2489
2498
# create the axes
2490
2499
self .create_axes (axes = axes , obj = obj , validate = append ,
@@ -2502,7 +2511,7 @@ def write(self, obj, axes=None, append=False, complib=None,
2502
2511
self .set_attrs ()
2503
2512
2504
2513
# create the table
2505
- table = self .handle .createTable (self .group , ** options )
2514
+ table = self ._handle .createTable (self .group , ** options )
2506
2515
2507
2516
else :
2508
2517
table = self .table
@@ -2579,7 +2588,7 @@ def delete(self, where=None, **kwargs):
2579
2588
# delete all rows (and return the nrows)
2580
2589
if where is None or not len (where ):
2581
2590
nrows = self .nrows
2582
- self .handle .removeNode (self .group , recursive = True )
2591
+ self ._handle .removeNode (self .group , recursive = True )
2583
2592
return nrows
2584
2593
2585
2594
# infer the data kind
0 commit comments