@@ -580,63 +580,59 @@ def _extract_index(data) -> Index:
580
580
"""
581
581
Try to infer an Index from the passed data, raise ValueError on failure.
582
582
"""
583
- index = None
583
+ index : Index
584
584
if len (data ) == 0 :
585
- index = Index ([])
586
- else :
587
- raw_lengths = []
588
- indexes : list [list [Hashable ] | Index ] = []
589
-
590
- have_raw_arrays = False
591
- have_series = False
592
- have_dicts = False
593
-
594
- for val in data :
595
- if isinstance (val , ABCSeries ):
596
- have_series = True
597
- indexes .append (val .index )
598
- elif isinstance (val , dict ):
599
- have_dicts = True
600
- indexes .append (list (val .keys ()))
601
- elif is_list_like (val ) and getattr (val , "ndim" , 1 ) == 1 :
602
- have_raw_arrays = True
603
- raw_lengths .append (len (val ))
604
- elif isinstance (val , np .ndarray ) and val .ndim > 1 :
605
- raise ValueError ("Per-column arrays must each be 1-dimensional" )
606
-
607
- if not indexes and not raw_lengths :
608
- raise ValueError ("If using all scalar values, you must pass an index" )
585
+ return Index ([])
609
586
610
- if have_series :
611
- index = union_indexes (indexes )
612
- elif have_dicts :
613
- index = union_indexes (indexes , sort = False )
587
+ raw_lengths = []
588
+ indexes : list [list [Hashable ] | Index ] = []
614
589
615
- if have_raw_arrays :
616
- lengths = list (set (raw_lengths ))
617
- if len (lengths ) > 1 :
618
- raise ValueError ("All arrays must be of the same length" )
590
+ have_raw_arrays = False
591
+ have_series = False
592
+ have_dicts = False
619
593
620
- if have_dicts :
621
- raise ValueError (
622
- "Mixing dicts with non-Series may lead to ambiguous ordering."
623
- )
594
+ for val in data :
595
+ if isinstance (val , ABCSeries ):
596
+ have_series = True
597
+ indexes .append (val .index )
598
+ elif isinstance (val , dict ):
599
+ have_dicts = True
600
+ indexes .append (list (val .keys ()))
601
+ elif is_list_like (val ) and getattr (val , "ndim" , 1 ) == 1 :
602
+ have_raw_arrays = True
603
+ raw_lengths .append (len (val ))
604
+ elif isinstance (val , np .ndarray ) and val .ndim > 1 :
605
+ raise ValueError ("Per-column arrays must each be 1-dimensional" )
606
+
607
+ if not indexes and not raw_lengths :
608
+ raise ValueError ("If using all scalar values, you must pass an index" )
609
+
610
+ if have_series :
611
+ index = union_indexes (indexes )
612
+ elif have_dicts :
613
+ index = union_indexes (indexes , sort = False )
614
+
615
+ if have_raw_arrays :
616
+ lengths = list (set (raw_lengths ))
617
+ if len (lengths ) > 1 :
618
+ raise ValueError ("All arrays must be of the same length" )
619
+
620
+ if have_dicts :
621
+ raise ValueError (
622
+ "Mixing dicts with non-Series may lead to ambiguous ordering."
623
+ )
624
624
625
- if have_series :
626
- assert index is not None # for mypy
627
- if lengths [0 ] != len (index ):
628
- msg = (
629
- f"array length { lengths [0 ]} does not match index "
630
- f"length { len (index )} "
631
- )
632
- raise ValueError (msg )
633
- else :
634
- index = default_index (lengths [0 ])
625
+ if have_series :
626
+ if lengths [0 ] != len (index ):
627
+ msg = (
628
+ f"array length { lengths [0 ]} does not match index "
629
+ f"length { len (index )} "
630
+ )
631
+ raise ValueError (msg )
632
+ else :
633
+ index = default_index (lengths [0 ])
635
634
636
- # error: Argument 1 to "ensure_index" has incompatible type "Optional[Index]";
637
- # expected "Union[Union[Union[ExtensionArray, ndarray], Index, Series],
638
- # Sequence[Any]]"
639
- return ensure_index (index ) # type: ignore[arg-type]
635
+ return ensure_index (index )
640
636
641
637
642
638
def reorder_arrays (
0 commit comments