@@ -101,8 +101,12 @@ def test_frame_non_unique_index(self):
101
101
df = DataFrame ([['a' , 'b' ], ['c' , 'd' ]], index = [1 , 1 ],
102
102
columns = ['x' , 'y' ])
103
103
104
- pytest .raises (ValueError , df .to_json , orient = 'index' )
105
- pytest .raises (ValueError , df .to_json , orient = 'columns' )
104
+ msg = "DataFrame index must be unique for orient='index'"
105
+ with pytest .raises (ValueError , match = msg ):
106
+ df .to_json (orient = 'index' )
107
+ msg = "DataFrame index must be unique for orient='columns'"
108
+ with pytest .raises (ValueError , match = msg ):
109
+ df .to_json (orient = 'columns' )
106
110
107
111
assert_frame_equal (df , read_json (df .to_json (orient = 'split' ),
108
112
orient = 'split' ))
@@ -116,9 +120,15 @@ def test_frame_non_unique_columns(self):
116
120
df = DataFrame ([['a' , 'b' ], ['c' , 'd' ]], index = [1 , 2 ],
117
121
columns = ['x' , 'x' ])
118
122
119
- pytest .raises (ValueError , df .to_json , orient = 'index' )
120
- pytest .raises (ValueError , df .to_json , orient = 'columns' )
121
- pytest .raises (ValueError , df .to_json , orient = 'records' )
123
+ msg = "DataFrame columns must be unique for orient='index'"
124
+ with pytest .raises (ValueError , match = msg ):
125
+ df .to_json (orient = 'index' )
126
+ msg = "DataFrame columns must be unique for orient='columns'"
127
+ with pytest .raises (ValueError , match = msg ):
128
+ df .to_json (orient = 'columns' )
129
+ msg = "DataFrame columns must be unique for orient='records'"
130
+ with pytest .raises (ValueError , match = msg ):
131
+ df .to_json (orient = 'records' )
122
132
123
133
assert_frame_equal (df , read_json (df .to_json (orient = 'split' ),
124
134
orient = 'split' , dtype = False ))
@@ -156,13 +166,16 @@ def _check_orient(df, orient, dtype=None, numpy=False,
156
166
# if we are not unique, then check that we are raising ValueError
157
167
# for the appropriate orients
158
168
if not df .index .is_unique and orient in ['index' , 'columns' ]:
159
- pytest .raises (
160
- ValueError , lambda : df .to_json (orient = orient ))
169
+ msg = ("DataFrame index must be unique for orient='{}'"
170
+ .format (orient ))
171
+ with pytest .raises (ValueError , match = msg ):
172
+ df .to_json (orient = orient )
161
173
return
162
174
if (not df .columns .is_unique and
163
175
orient in ['index' , 'columns' , 'records' ]):
164
- pytest .raises (
165
- ValueError , lambda : df .to_json (orient = orient ))
176
+ # TODO: not executed. fix this.
177
+ with pytest .raises (ValueError , match = 'ksjkajksfjksjfkjs' ):
178
+ df .to_json (orient = orient )
166
179
return
167
180
168
181
dfjson = df .to_json (orient = orient )
@@ -326,21 +339,24 @@ def _check_all_orients(df, dtype=None, convert_axes=True,
326
339
_check_orient (df .transpose ().transpose (), "index" , dtype = False )
327
340
328
341
def test_frame_from_json_bad_data (self ):
329
- pytest .raises (ValueError , read_json , StringIO ('{"key":b:a:d}' ))
342
+ with pytest .raises (ValueError , match = 'Expected object or value' ):
343
+ read_json (StringIO ('{"key":b:a:d}' ))
330
344
331
345
# too few indices
332
346
json = StringIO ('{"columns":["A","B"],'
333
347
'"index":["2","3"],'
334
348
'"data":[[1.0,"1"],[2.0,"2"],[null,"3"]]}' )
335
- pytest .raises (ValueError , read_json , json ,
336
- orient = "split" )
349
+ msg = r"Shape of passed values is \(2, 3\), indices imply \(2, 2\)"
350
+ with pytest .raises (ValueError , match = msg ):
351
+ read_json (json , orient = "split" )
337
352
338
353
# too many columns
339
354
json = StringIO ('{"columns":["A","B","C"],'
340
355
'"index":["1","2","3"],'
341
356
'"data":[[1.0,"1"],[2.0,"2"],[null,"3"]]}' )
342
- pytest .raises (AssertionError , read_json , json ,
343
- orient = "split" )
357
+ msg = "3 columns passed, passed data had 2 columns"
358
+ with pytest .raises (AssertionError , match = msg ):
359
+ read_json (json , orient = "split" )
344
360
345
361
# bad key
346
362
json = StringIO ('{"badkey":["A","B"],'
@@ -414,7 +430,9 @@ def test_frame_to_json_float_precision(self):
414
430
415
431
def test_frame_to_json_except (self ):
416
432
df = DataFrame ([1 , 2 , 3 ])
417
- pytest .raises (ValueError , df .to_json , orient = "garbage" )
433
+ msg = "Invalid value 'garbage' for option 'orient'"
434
+ with pytest .raises (ValueError , match = msg ):
435
+ df .to_json (orient = "garbage" )
418
436
419
437
def test_frame_empty (self ):
420
438
df = DataFrame (columns = ['jim' , 'joe' ])
@@ -540,7 +558,8 @@ def __str__(self):
540
558
541
559
# check if non-printable content throws appropriate Exception
542
560
df_nonprintable = DataFrame ({'A' : [binthing ]})
543
- with pytest .raises (OverflowError ):
561
+ msg = "Unsupported UTF-8 sequence length when encoding string"
562
+ with pytest .raises (OverflowError , match = msg ):
544
563
df_nonprintable .to_json ()
545
564
546
565
# the same with multiple columns threw segfaults
@@ -565,7 +584,9 @@ def test_label_overflow(self):
565
584
def test_series_non_unique_index (self ):
566
585
s = Series (['a' , 'b' ], index = [1 , 1 ])
567
586
568
- pytest .raises (ValueError , s .to_json , orient = 'index' )
587
+ msg = "Series index must be unique for orient='index'"
588
+ with pytest .raises (ValueError , match = msg ):
589
+ s .to_json (orient = 'index' )
569
590
570
591
assert_series_equal (s , read_json (s .to_json (orient = 'split' ),
571
592
orient = 'split' , typ = 'series' ))
@@ -637,7 +658,9 @@ def _check_all_orients(series, dtype=None, check_index_type=True):
637
658
638
659
def test_series_to_json_except (self ):
639
660
s = Series ([1 , 2 , 3 ])
640
- pytest .raises (ValueError , s .to_json , orient = "garbage" )
661
+ msg = "Invalid value 'garbage' for option 'orient'"
662
+ with pytest .raises (ValueError , match = msg ):
663
+ s .to_json (orient = "garbage" )
641
664
642
665
def test_series_from_json_precise_float (self ):
643
666
s = Series ([4.56 , 4.56 , 4.56 ])
@@ -752,8 +775,9 @@ def test_w_date(date, date_unit=None):
752
775
test_w_date ('20130101 20:43:42.123456' , date_unit = 'us' )
753
776
test_w_date ('20130101 20:43:42.123456789' , date_unit = 'ns' )
754
777
755
- pytest .raises (ValueError , df .to_json , date_format = 'iso' ,
756
- date_unit = 'foo' )
778
+ msg = "Invalid value 'foo' for option 'date_unit'"
779
+ with pytest .raises (ValueError , match = msg ):
780
+ df .to_json (date_format = 'iso' , date_unit = 'foo' )
757
781
758
782
def test_date_format_series (self ):
759
783
def test_w_date (date , date_unit = None ):
@@ -774,8 +798,9 @@ def test_w_date(date, date_unit=None):
774
798
test_w_date ('20130101 20:43:42.123456789' , date_unit = 'ns' )
775
799
776
800
ts = Series (Timestamp ('20130101 20:43:42.123' ), index = self .ts .index )
777
- pytest .raises (ValueError , ts .to_json , date_format = 'iso' ,
778
- date_unit = 'foo' )
801
+ msg = "Invalid value 'foo' for option 'date_unit'"
802
+ with pytest .raises (ValueError , match = msg ):
803
+ ts .to_json (date_format = 'iso' , date_unit = 'foo' )
779
804
780
805
def test_date_unit (self ):
781
806
df = self .tsframe .copy ()
@@ -940,14 +965,16 @@ def test_default_handler_numpy_unsupported_dtype(self):
940
965
assert df .to_json (default_handler = str , orient = "values" ) == expected
941
966
942
967
def test_default_handler_raises (self ):
968
+ msg = "raisin"
969
+
943
970
def my_handler_raises (obj ):
944
- raise TypeError ("raisin" )
945
- pytest .raises (TypeError ,
946
- DataFrame ({'a' : [1 , 2 , object ()]}).to_json ,
947
- default_handler = my_handler_raises )
948
- pytest .raises (TypeError ,
949
- DataFrame ({'a' : [1 , 2 , complex (4 , - 5 )]}).to_json ,
950
- default_handler = my_handler_raises )
971
+ raise TypeError (msg )
972
+ with pytest .raises (TypeError , match = msg ):
973
+ DataFrame ({'a' : [1 , 2 , object ()]}).to_json (
974
+ default_handler = my_handler_raises )
975
+ with pytest .raises (TypeError , match = msg ):
976
+ DataFrame ({'a' : [1 , 2 , complex (4 , - 5 )]}).to_json (
977
+ default_handler = my_handler_raises )
951
978
952
979
def test_categorical (self ):
953
980
# GH4377 df.to_json segfaults with non-ndarray blocks
0 commit comments