@@ -458,9 +458,9 @@ def select_as_multiple(self, keys, where=None, selector=None, columns=None, star
458
458
nrows = tbls [0 ].nrows
459
459
for t in tbls :
460
460
if t .nrows != nrows :
461
- raise Exception ("all tables must have exactly the same nrows!" )
461
+ raise ValueError ("all tables must have exactly the same nrows!" )
462
462
if not t .is_table :
463
- raise Exception ("object [%s] is not a table, and cannot be used in all select as multiple" % t .pathname )
463
+ raise TypeError ("object [%s] is not a table, and cannot be used in all select as multiple" % t .pathname )
464
464
465
465
# select coordinates from the selector table
466
466
c = self .select_as_coordinates (selector , where , start = start , stop = stop )
@@ -526,7 +526,7 @@ def remove(self, key, where=None, start=None, stop=None):
526
526
except :
527
527
528
528
if where is not None :
529
- raise Exception ("trying to remove a node with a non-None where clause!" )
529
+ raise ValueError ("trying to remove a node with a non-None where clause!" )
530
530
531
531
# we are actually trying to remove a node (with children)
532
532
s = self .get_node (key )
@@ -544,7 +544,7 @@ def remove(self, key, where=None, start=None, stop=None):
544
544
# delete from the table
545
545
else :
546
546
if not s .is_table :
547
- raise Exception ('can only remove with where on objects written as tables' )
547
+ raise ValueError ('can only remove with where on objects written as tables' )
548
548
return s .delete (where = where , start = start , stop = stop )
549
549
550
550
def append (self , key , value , columns = None , ** kwargs ):
@@ -597,10 +597,10 @@ def append_to_multiple(self, d, value, selector, data_columns=None, axes=None, *
597
597
raise Exception ("axes is currently not accepted as a paremter to append_to_multiple; you can create the tables indepdently instead" )
598
598
599
599
if not isinstance (d , dict ):
600
- raise Exception ("append_to_multiple must have a dictionary specified as the way to split the value" )
600
+ raise ValueError ("append_to_multiple must have a dictionary specified as the way to split the value" )
601
601
602
602
if selector not in d :
603
- raise Exception ("append_to_multiple requires a selector that is in passed dict" )
603
+ raise ValueError ("append_to_multiple requires a selector that is in passed dict" )
604
604
605
605
# figure out the splitting axis (the non_index_axis)
606
606
axis = list (set (range (value .ndim )) - set (_AXES_MAP [type (value )]))[0 ]
@@ -611,7 +611,7 @@ def append_to_multiple(self, d, value, selector, data_columns=None, axes=None, *
611
611
for k , v in d .items ():
612
612
if v is None :
613
613
if remain_key is not None :
614
- raise Exception ("append_to_multiple can only have one value in d that is None" )
614
+ raise ValueError ("append_to_multiple can only have one value in d that is None" )
615
615
remain_key = k
616
616
else :
617
617
remain_values .extend (v )
@@ -655,7 +655,7 @@ def create_table_index(self, key, **kwargs):
655
655
if s is None : return
656
656
657
657
if not s .is_table :
658
- raise Exception ("cannot create table index on a non-table" )
658
+ raise TypeError ("cannot create table index on a non-table" )
659
659
s .create_index (** kwargs )
660
660
661
661
def groups (self ):
@@ -727,8 +727,8 @@ def _create_storer(self, group, value = None, table = False, append = False, **k
727
727
""" return a suitable Storer class to operate """
728
728
729
729
def error (t ):
730
- raise NotImplementedError ("cannot properly create the storer for: [%s] [group->%s,value->%s,table->%s,append->%s,kwargs->%s]" %
731
- (t ,group ,type (value ),table ,append ,kwargs ))
730
+ raise TypeError ("cannot properly create the storer for: [%s] [group->%s,value->%s,table->%s,append->%s,kwargs->%s]" %
731
+ (t ,group ,type (value ),table ,append ,kwargs ))
732
732
733
733
pt = getattr (group ._v_attrs ,'pandas_type' ,None )
734
734
tt = getattr (group ._v_attrs ,'table_type' ,None )
@@ -742,7 +742,7 @@ def error(t):
742
742
pt = 'frame_table'
743
743
tt = 'generic_table'
744
744
else :
745
- raise Exception ("cannot create a storer if the object is not existing nor a value are passed" )
745
+ raise TypeError ("cannot create a storer if the object is not existing nor a value are passed" )
746
746
else :
747
747
748
748
try :
@@ -1044,8 +1044,10 @@ def validate_col(self, itemsize=None):
1044
1044
if itemsize is None :
1045
1045
itemsize = self .itemsize
1046
1046
if c .itemsize < itemsize :
1047
- raise Exception ("[%s] column has a min_itemsize of [%s] but itemsize [%s] is required!"
1048
- % (self .cname , itemsize , c .itemsize ))
1047
+ raise ValueError ("Trying to store a string with len [%s] in [%s] column but\n "
1048
+ "this column has a limit of [%s]!\n "
1049
+ "Consider using min_itemsize to preset the sizes on these columns"
1050
+ % (itemsize ,self .cname , c .itemsize ))
1049
1051
return c .itemsize
1050
1052
1051
1053
return None
@@ -1176,11 +1178,11 @@ def set_atom(self, block, existing_col, min_itemsize, nan_rep, **kwargs):
1176
1178
if inferred_type == 'datetime64' :
1177
1179
self .set_atom_datetime64 (block )
1178
1180
elif inferred_type == 'date' :
1179
- raise NotImplementedError (
1180
- "date is not implemented as a table column" )
1181
+ raise TypeError (
1182
+ "[ date] is not implemented as a table column" )
1181
1183
elif inferred_type == 'unicode' :
1182
- raise NotImplementedError (
1183
- "unicode is not implemented as a table column" )
1184
+ raise TypeError (
1185
+ "[ unicode] is not implemented as a table column" )
1184
1186
1185
1187
# this is basically a catchall; if say a datetime64 has nans then will
1186
1188
# end up here ###
@@ -1209,9 +1211,9 @@ def set_atom_string(self, block, existing_col, min_itemsize, nan_rep):
1209
1211
col = block .get (item )
1210
1212
inferred_type = lib .infer_dtype (col .ravel ())
1211
1213
if inferred_type != 'string' :
1212
- raise NotImplementedError ( "cannot serialize the column [%s] because "
1213
- "its data contents are [%s] object dtype" %
1214
- (item ,inferred_type ))
1214
+ raise TypeError ( "Cannot serialize the column [%s] because\n "
1215
+ "its data contents are [%s] object dtype" %
1216
+ (item ,inferred_type ))
1215
1217
1216
1218
1217
1219
# itemsize is the maximum length of a string (along any dimension)
@@ -1268,13 +1270,13 @@ def validate_attr(self, append):
1268
1270
existing_fields = getattr (self .attrs , self .kind_attr , None )
1269
1271
if (existing_fields is not None and
1270
1272
existing_fields != list (self .values )):
1271
- raise Exception ("appended items do not match existing items"
1273
+ raise ValueError ("appended items do not match existing items"
1272
1274
" in table!" )
1273
1275
1274
1276
existing_dtype = getattr (self .attrs , self .dtype_attr , None )
1275
1277
if (existing_dtype is not None and
1276
1278
existing_dtype != self .dtype ):
1277
- raise Exception ("appended items dtype do not match existing items dtype"
1279
+ raise ValueError ("appended items dtype do not match existing items dtype"
1278
1280
" in table!" )
1279
1281
1280
1282
def convert (self , values , nan_rep ):
@@ -1497,7 +1499,7 @@ def delete(self, where = None, **kwargs):
1497
1499
self ._handle .removeNode (self .group , recursive = True )
1498
1500
return None
1499
1501
1500
- raise NotImplementedError ("cannot delete on an abstract storer" )
1502
+ raise TypeError ("cannot delete on an abstract storer" )
1501
1503
1502
1504
class GenericStorer (Storer ):
1503
1505
""" a generified storer version """
@@ -2045,7 +2047,7 @@ def validate(self, other):
2045
2047
2046
2048
for c in ['index_axes' ,'non_index_axes' ,'values_axes' ]:
2047
2049
if getattr (self ,c ,None ) != getattr (other ,c ,None ):
2048
- raise Exception ("invalid combinate of [%s] on appending data [%s] vs current table [%s]" % (c ,getattr (self ,c ,None ),getattr (other ,c ,None )))
2050
+ raise ValueError ("invalid combinate of [%s] on appending data [%s] vs current table [%s]" % (c ,getattr (self ,c ,None ),getattr (other ,c ,None )))
2049
2051
2050
2052
@property
2051
2053
def nrows_expected (self ):
@@ -2132,6 +2134,21 @@ def validate_version(self, where = None):
2132
2134
ws = incompatibility_doc % '.' .join ([ str (x ) for x in self .version ])
2133
2135
warnings .warn (ws , IncompatibilityWarning )
2134
2136
2137
+ def validate_min_itemsize (self , min_itemsize ):
2138
+ """ validate the min_itemisze doesn't contain items that are not in the axes
2139
+ this needs data_columns to be defined """
2140
+ if min_itemsize is None : return
2141
+ if not isinstance (min_itemsize , dict ): return
2142
+
2143
+ q = self .queryables ()
2144
+ for k , v in min_itemsize .items ():
2145
+
2146
+ # ok, apply generally
2147
+ if k == 'values' :
2148
+ continue
2149
+ if k not in q :
2150
+ raise ValueError ("min_itemsize has [%s] which is not an axis or data_column" % k )
2151
+
2135
2152
@property
2136
2153
def indexables (self ):
2137
2154
""" create/cache the indexables if they don't exist """
@@ -2262,8 +2279,8 @@ def create_axes(self, axes, obj, validate=True, nan_rep=None, data_columns=None,
2262
2279
try :
2263
2280
axes = _AXES_MAP [type (obj )]
2264
2281
except :
2265
- raise NotImplementedError ("cannot properly create the storer for: [group->%s,value->%s]" %
2266
- (self .group ._v_name ,type (obj )))
2282
+ raise TypeError ("cannot properly create the storer for: [group->%s,value->%s]" %
2283
+ (self .group ._v_name ,type (obj )))
2267
2284
2268
2285
# map axes to numbers
2269
2286
axes = [obj ._get_axis_number (a ) for a in axes ]
@@ -2280,7 +2297,7 @@ def create_axes(self, axes, obj, validate=True, nan_rep=None, data_columns=None,
2280
2297
2281
2298
# currently support on ndim-1 axes
2282
2299
if len (axes ) != self .ndim - 1 :
2283
- raise Exception ("currently only support ndim-1 indexers in an AppendableTable" )
2300
+ raise ValueError ("currently only support ndim-1 indexers in an AppendableTable" )
2284
2301
2285
2302
# create according to the new data
2286
2303
self .non_index_axes = []
@@ -2370,7 +2387,7 @@ def create_axes(self, axes, obj, validate=True, nan_rep=None, data_columns=None,
2370
2387
try :
2371
2388
existing_col = existing_table .values_axes [i ]
2372
2389
except :
2373
- raise Exception ("Incompatible appended table [%s] with existing table [%s]" %
2390
+ raise ValueError ("Incompatible appended table [%s] with existing table [%s]" %
2374
2391
(blocks ,existing_table .values_axes ))
2375
2392
else :
2376
2393
existing_col = None
@@ -2386,12 +2403,15 @@ def create_axes(self, axes, obj, validate=True, nan_rep=None, data_columns=None,
2386
2403
col .set_pos (j )
2387
2404
2388
2405
self .values_axes .append (col )
2389
- except (NotImplementedError ) :
2390
- raise
2406
+ except (NotImplementedError , ValueError , TypeError ), e :
2407
+ raise e
2391
2408
except (Exception ), detail :
2392
2409
raise Exception ("cannot find the correct atom type -> [dtype->%s,items->%s] %s" % (b .dtype .name , b .items , str (detail )))
2393
2410
j += 1
2394
2411
2412
+ # validate our min_itemsize
2413
+ self .validate_min_itemsize (min_itemsize )
2414
+
2395
2415
# validate the axes if we have an existing table
2396
2416
if validate :
2397
2417
self .validate (existing_table )
@@ -2433,7 +2453,7 @@ def process_filter(field, filt):
2433
2453
takers = op (values ,filt )
2434
2454
return obj .ix ._getitem_axis (takers ,axis = axis_number )
2435
2455
2436
- raise Exception ("cannot find the field [%s] for filtering!" % field )
2456
+ raise ValueError ("cannot find the field [%s] for filtering!" % field )
2437
2457
2438
2458
obj = process_filter (field , filt )
2439
2459
@@ -3111,12 +3131,12 @@ def __init__(self, field, op=None, value=None, queryables=None):
3111
3131
self .value = op
3112
3132
3113
3133
else :
3114
- raise Exception (
3134
+ raise ValueError (
3115
3135
"Term does not understand the supplied field [%s]" % field )
3116
3136
3117
3137
# we have valid fields
3118
3138
if self .field is None or self .op is None or self .value is None :
3119
- raise Exception ("Could not create this term [%s]" % str (self ))
3139
+ raise ValueError ("Could not create this term [%s]" % str (self ))
3120
3140
3121
3141
# = vs ==
3122
3142
if self .op == '=' :
@@ -3125,7 +3145,7 @@ def __init__(self, field, op=None, value=None, queryables=None):
3125
3145
# we have valid conditions
3126
3146
if self .op in ['>' , '>=' , '<' , '<=' ]:
3127
3147
if hasattr (self .value , '__iter__' ) and len (self .value ) > 1 :
3128
- raise Exception ("an inequality condition cannot have multiple values [%s]" % str (self ))
3148
+ raise ValueError ("an inequality condition cannot have multiple values [%s]" % str (self ))
3129
3149
3130
3150
if not hasattr (self .value , '__iter__' ):
3131
3151
self .value = [self .value ]
@@ -3157,7 +3177,7 @@ def eval(self):
3157
3177
""" set the numexpr expression for this term """
3158
3178
3159
3179
if not self .is_valid :
3160
- raise Exception ("query term is not valid [%s]" % str (self ))
3180
+ raise ValueError ("query term is not valid [%s]" % str (self ))
3161
3181
3162
3182
# convert values if we are in the table
3163
3183
if self .is_in_table :
@@ -3199,7 +3219,7 @@ def eval(self):
3199
3219
3200
3220
else :
3201
3221
3202
- raise Exception ("passing a filterable condition to a non-table indexer [%s]" % str (self ))
3222
+ raise TypeError ("passing a filterable condition to a non-table indexer [%s]" % str (self ))
3203
3223
3204
3224
def convert_value (self , v ):
3205
3225
""" convert the expression that is in the term to something that is accepted by pytables """
0 commit comments