@@ -151,8 +151,8 @@ def _tables():
151
151
def h5_open (path , mode ):
152
152
tables = _tables ()
153
153
return tables .openFile (path , mode )
154
-
155
-
154
+
155
+
156
156
@contextmanager
157
157
def get_store (path , mode = 'a' , complevel = None , complib = None ,
158
158
fletcher32 = False ):
@@ -217,7 +217,7 @@ def read_hdf(path_or_buf, key, **kwargs):
217
217
218
218
# a passed store; user controls open/close
219
219
f (path_or_buf , False )
220
-
220
+
221
221
class HDFStore (object ):
222
222
"""
223
223
dict-like IO interface for storing pandas objects in PyTables
@@ -757,7 +757,7 @@ def get_node(self, key):
757
757
def get_storer (self , key ):
758
758
""" return the storer object for a key, raise if not in the file """
759
759
group = self .get_node (key )
760
- if group is None :
760
+ if group is None :
761
761
return None
762
762
s = self ._create_storer (group )
763
763
s .infer_axes ()
@@ -810,9 +810,9 @@ def _create_storer(self, group, value = None, table = False, append = False, **k
810
810
""" return a suitable Storer class to operate """
811
811
812
812
def error (t ):
813
- raise TypeError ("cannot properly create the storer for: [%s] [group->%s,value->%s,table->%s,append->%s,kwargs->%s]" %
813
+ raise TypeError ("cannot properly create the storer for: [%s] [group->%s,value->%s,table->%s,append->%s,kwargs->%s]" %
814
814
(t ,group ,type (value ),table ,append ,kwargs ))
815
-
815
+
816
816
pt = _ensure_decoded (getattr (group ._v_attrs ,'pandas_type' ,None ))
817
817
tt = _ensure_decoded (getattr (group ._v_attrs ,'table_type' ,None ))
818
818
@@ -863,7 +863,7 @@ def error(t):
863
863
tt = u'appendable_ndim'
864
864
865
865
else :
866
-
866
+
867
867
# distiguish between a frame/table
868
868
tt = u'legacy_panel'
869
869
try :
@@ -930,7 +930,7 @@ def _read_group(self, group, **kwargs):
930
930
931
931
class TableIterator (object ):
932
932
""" define the iteration interface on a table
933
-
933
+
934
934
Parameters
935
935
----------
936
936
@@ -974,7 +974,7 @@ def __iter__(self):
974
974
yield v
975
975
976
976
self .close ()
977
-
977
+
978
978
def close (self ):
979
979
if self .auto_close :
980
980
self .store .close ()
@@ -1003,7 +1003,7 @@ class IndexCol(object):
1003
1003
_info_fields = ['freq' ,'tz' ,'index_name' ]
1004
1004
1005
1005
def __init__ (self , values = None , kind = None , typ = None , cname = None , itemsize = None ,
1006
- name = None , axis = None , kind_attr = None , pos = None , freq = None , tz = None ,
1006
+ name = None , axis = None , kind_attr = None , pos = None , freq = None , tz = None ,
1007
1007
index_name = None , ** kwargs ):
1008
1008
self .values = values
1009
1009
self .kind = kind
@@ -1088,21 +1088,27 @@ def convert(self, values, nan_rep, encoding):
1088
1088
except :
1089
1089
pass
1090
1090
1091
+ values = _maybe_convert (values , self .kind , encoding )
1092
+
1091
1093
kwargs = dict ()
1092
1094
if self .freq is not None :
1093
1095
kwargs ['freq' ] = _ensure_decoded (self .freq )
1094
- if self .tz is not None :
1095
- kwargs ['tz' ] = _ensure_decoded (self .tz )
1096
1096
if self .index_name is not None :
1097
1097
kwargs ['name' ] = _ensure_decoded (self .index_name )
1098
1098
try :
1099
- self .values = Index (_maybe_convert ( values , self . kind , self . encoding ) , ** kwargs )
1099
+ self .values = Index (values , ** kwargs )
1100
1100
except :
1101
1101
1102
1102
# if the output freq is different that what we recorded, then infer it
1103
1103
if 'freq' in kwargs :
1104
1104
kwargs ['freq' ] = 'infer'
1105
1105
self .values = Index (_maybe_convert (values , self .kind , encoding ), ** kwargs )
1106
+
1107
+ # set the timezone if indicated
1108
+ # we stored in utc, so reverse to local timezone
1109
+ if self .tz is not None :
1110
+ self .values = self .values .tz_localize ('UTC' ).tz_convert (_ensure_decoded (self .tz ))
1111
+
1106
1112
return self
1107
1113
1108
1114
def take_data (self ):
@@ -1189,7 +1195,7 @@ def update_info(self, info):
1189
1195
idx = info [self .name ]
1190
1196
except :
1191
1197
idx = info [self .name ] = dict ()
1192
-
1198
+
1193
1199
existing_value = idx .get (key )
1194
1200
if key in idx and value is not None and existing_value != value :
1195
1201
@@ -1235,7 +1241,7 @@ def is_indexed(self):
1235
1241
1236
1242
def convert (self , values , nan_rep , encoding ):
1237
1243
""" set the values from this selection: take = take ownership """
1238
-
1244
+
1239
1245
self .values = Int64Index (np .arange (self .table .nrows ))
1240
1246
return self
1241
1247
@@ -1359,7 +1365,13 @@ def set_atom(self, block, existing_col, min_itemsize, nan_rep, info, encoding=No
1359
1365
"invalid timezone specification" )
1360
1366
1361
1367
values = index .tz_convert ('UTC' ).values .view ('i8' )
1362
- self .tz = tz
1368
+
1369
+ # store a converted timezone
1370
+ zone = tslib .get_timezone (index .tz )
1371
+ if zone is None :
1372
+ zone = tslib .tot_seconds (index .tz .utcoffset ())
1373
+ self .tz = zone
1374
+
1363
1375
self .update_info (info )
1364
1376
self .set_atom_datetime64 (block , values .reshape (block .values .shape ))
1365
1377
@@ -1398,7 +1410,7 @@ def set_atom_string(self, block, existing_col, min_itemsize, nan_rep, encoding):
1398
1410
inferred_type = lib .infer_dtype (col .ravel ())
1399
1411
if inferred_type != 'string' :
1400
1412
raise TypeError ("Cannot serialize the column [%s] because\n "
1401
- "its data contents are [%s] object dtype" %
1413
+ "its data contents are [%s] object dtype" %
1402
1414
(item ,inferred_type ))
1403
1415
1404
1416
@@ -1607,7 +1619,7 @@ def __repr__(self):
1607
1619
s = "[%s]" % ',' .join ([ str (x ) for x in s ])
1608
1620
return "%-12.12s (shape->%s)" % (self .pandas_type ,s )
1609
1621
return self .pandas_type
1610
-
1622
+
1611
1623
def __str__ (self ):
1612
1624
return self .__repr__ ()
1613
1625
@@ -1929,7 +1941,7 @@ def write_array_empty(self, key, value):
1929
1941
self ._handle .createArray (self .group , key , arr )
1930
1942
getattr (self .group , key )._v_attrs .value_type = str (value .dtype )
1931
1943
getattr (self .group , key )._v_attrs .shape = value .shape
1932
-
1944
+
1933
1945
def write_array (self , key , value , items = None ):
1934
1946
if key in self .group :
1935
1947
self ._handle .removeNode (self .group , key )
@@ -2142,7 +2154,7 @@ def shape(self):
2142
2154
try :
2143
2155
ndim = self .ndim
2144
2156
2145
- # items
2157
+ # items
2146
2158
items = 0
2147
2159
for i in range (self .nblocks ):
2148
2160
node = getattr (self .group , 'block%d_items' % i )
@@ -2212,7 +2224,7 @@ class PanelStorer(BlockManagerStorer):
2212
2224
pandas_kind = u'wide'
2213
2225
obj_type = Panel
2214
2226
is_shape_reversed = True
2215
-
2227
+
2216
2228
def write (self , obj , ** kwargs ):
2217
2229
obj ._consolidate_inplace ()
2218
2230
return super (PanelStorer , self ).write (obj , ** kwargs )
@@ -2270,7 +2282,7 @@ def __repr__(self):
2270
2282
self .ncols ,
2271
2283
',' .join ([ a .name for a in self .index_axes ]),
2272
2284
dc )
2273
-
2285
+
2274
2286
def __getitem__ (self , c ):
2275
2287
""" return the axis for c """
2276
2288
for a in self .axes :
@@ -2568,7 +2580,7 @@ def create_axes(self, axes, obj, validate=True, nan_rep=None, data_columns=None,
2568
2580
try :
2569
2581
axes = _AXES_MAP [type (obj )]
2570
2582
except :
2571
- raise TypeError ("cannot properly create the storer for: [group->%s,value->%s]" %
2583
+ raise TypeError ("cannot properly create the storer for: [group->%s,value->%s]" %
2572
2584
(self .group ._v_name ,type (obj )))
2573
2585
2574
2586
# map axes to numbers
@@ -2597,7 +2609,7 @@ def create_axes(self, axes, obj, validate=True, nan_rep=None, data_columns=None,
2597
2609
# nan_representation
2598
2610
if nan_rep is None :
2599
2611
nan_rep = 'nan'
2600
-
2612
+
2601
2613
self .nan_rep = nan_rep
2602
2614
2603
2615
# create axes to index and non_index
@@ -2665,7 +2677,7 @@ def create_axes(self, axes, obj, validate=True, nan_rep=None, data_columns=None,
2665
2677
name = b .items [0 ]
2666
2678
self .data_columns .append (name )
2667
2679
2668
- # make sure that we match up the existing columns
2680
+ # make sure that we match up the existing columns
2669
2681
# if we have an existing table
2670
2682
if existing_table is not None and validate :
2671
2683
try :
@@ -2740,7 +2752,7 @@ def process_filter(field, filt):
2740
2752
return obj .ix ._getitem_axis (takers ,axis = axis_number )
2741
2753
2742
2754
raise ValueError ("cannot find the field [%s] for filtering!" % field )
2743
-
2755
+
2744
2756
obj = process_filter (field , filt )
2745
2757
2746
2758
return obj
@@ -3053,7 +3065,7 @@ def write_data_chunk(self, indexes, mask, search, values):
3053
3065
self .table .flush ()
3054
3066
except (Exception ), detail :
3055
3067
raise Exception ("tables cannot write this data -> %s" % str (detail ))
3056
-
3068
+
3057
3069
def delete (self , where = None , ** kwargs ):
3058
3070
3059
3071
# delete all rows (and return the nrows)
@@ -3113,7 +3125,7 @@ class AppendableFrameTable(AppendableTable):
3113
3125
table_type = u'appendable_frame'
3114
3126
ndim = 2
3115
3127
obj_type = DataFrame
3116
-
3128
+
3117
3129
@property
3118
3130
def is_transposed (self ):
3119
3131
return self .index_axes [0 ].axis == 1
@@ -3266,7 +3278,7 @@ def _convert_index(index, encoding=None):
3266
3278
3267
3279
if isinstance (index , DatetimeIndex ):
3268
3280
converted = index .asi8
3269
- return IndexCol (converted , 'datetime64' , _tables ().Int64Col (),
3281
+ return IndexCol (converted , 'datetime64' , _tables ().Int64Col (),
3270
3282
freq = getattr (index ,'freq' ,None ), tz = getattr (index ,'tz' ,None ),
3271
3283
index_name = index_name )
3272
3284
elif isinstance (index , (Int64Index , PeriodIndex )):
@@ -3382,7 +3394,7 @@ def _unconvert_string_array(data, nan_rep=None, encoding=None):
3382
3394
3383
3395
if nan_rep is None :
3384
3396
nan_rep = 'nan'
3385
-
3397
+
3386
3398
data = lib .string_array_replace_from_nan_rep (data , nan_rep )
3387
3399
return data .reshape (shape )
3388
3400
@@ -3421,7 +3433,7 @@ class Term(object):
3421
3433
value : a value or list of values (required)
3422
3434
queryables : a kinds map (dict of column name -> kind), or None i column is non-indexable
3423
3435
encoding : an encoding that will encode the query terms
3424
-
3436
+
3425
3437
Returns
3426
3438
-------
3427
3439
a Term object
@@ -3582,7 +3594,7 @@ def eval(self):
3582
3594
if self .is_in_table :
3583
3595
3584
3596
self .condition = self .generate (values [0 ])
3585
-
3597
+
3586
3598
else :
3587
3599
3588
3600
raise TypeError ("passing a filterable condition to a non-table indexer [%s]" % str (self ))
0 commit comments