@@ -12,12 +12,12 @@ use crate::MemFlags;
12
12
use rustc_ast as ast;
13
13
use rustc_ast:: { InlineAsmOptions , InlineAsmTemplatePiece } ;
14
14
use rustc_hir:: lang_items:: LangItem ;
15
- use rustc_middle:: mir:: { self , AssertKind , SwitchTargets , UnwindTerminateReason } ;
15
+ use rustc_middle:: mir:: { self , AssertKind , BasicBlock , SwitchTargets , UnwindTerminateReason } ;
16
16
use rustc_middle:: ty:: layout:: { HasTyCtxt , LayoutOf , ValidityRequirement } ;
17
17
use rustc_middle:: ty:: print:: { with_no_trimmed_paths, with_no_visible_paths} ;
18
18
use rustc_middle:: ty:: { self , Instance , Ty } ;
19
19
use rustc_session:: config:: OptLevel ;
20
- use rustc_span:: { source_map:: Spanned , sym, Span , Symbol } ;
20
+ use rustc_span:: { source_map:: Spanned , sym, Span } ;
21
21
use rustc_target:: abi:: call:: { ArgAbi , FnAbi , PassMode , Reg } ;
22
22
use rustc_target:: abi:: { self , HasDataLayout , WrappingRange } ;
23
23
use rustc_target:: spec:: abi:: Abi ;
@@ -680,7 +680,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
680
680
& mut self ,
681
681
helper : & TerminatorCodegenHelper < ' tcx > ,
682
682
bx : & mut Bx ,
683
- intrinsic : Option < Symbol > ,
683
+ intrinsic : Option < ty :: IntrinsicDef > ,
684
684
instance : Option < Instance < ' tcx > > ,
685
685
source_info : mir:: SourceInfo ,
686
686
target : Option < mir:: BasicBlock > ,
@@ -690,7 +690,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
690
690
// Emit a panic or a no-op for `assert_*` intrinsics.
691
691
// These are intrinsics that compile to panics so that we can get a message
692
692
// which mentions the offending type, even from a const context.
693
- let panic_intrinsic = intrinsic. and_then ( |s | ValidityRequirement :: from_intrinsic ( s ) ) ;
693
+ let panic_intrinsic = intrinsic. and_then ( |i | ValidityRequirement :: from_intrinsic ( i . name ) ) ;
694
694
if let Some ( requirement) = panic_intrinsic {
695
695
let ty = instance. unwrap ( ) . args . type_at ( 0 ) ;
696
696
@@ -826,14 +826,20 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
826
826
// The arguments we'll be passing. Plus one to account for outptr, if used.
827
827
let arg_count = fn_abi. args . len ( ) + fn_abi. ret . is_indirect ( ) as usize ;
828
828
829
- if intrinsic == Some ( sym:: caller_location) {
829
+ if matches ! ( intrinsic, Some ( ty :: IntrinsicDef { name : sym:: caller_location, .. } ) ) {
830
830
return if let Some ( target) = target {
831
831
let location =
832
832
self . get_caller_location ( bx, mir:: SourceInfo { span : fn_span, ..source_info } ) ;
833
833
834
834
let mut llargs = Vec :: with_capacity ( arg_count) ;
835
- let ret_dest =
836
- self . make_return_dest ( bx, destination, & fn_abi. ret , & mut llargs, true , true ) ;
835
+ let ret_dest = self . make_return_dest (
836
+ bx,
837
+ destination,
838
+ & fn_abi. ret ,
839
+ & mut llargs,
840
+ intrinsic,
841
+ Some ( target) ,
842
+ ) ;
837
843
assert_eq ! ( llargs, [ ] ) ;
838
844
if let ReturnDest :: IndirectOperand ( tmp, _) = ret_dest {
839
845
location. val . store ( bx, tmp) ;
@@ -846,16 +852,16 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
846
852
}
847
853
848
854
let instance = match intrinsic {
849
- None | Some ( sym:: drop_in_place) => instance,
855
+ None | Some ( ty :: IntrinsicDef { name : sym:: drop_in_place, .. } ) => instance,
850
856
Some ( intrinsic) => {
851
857
let mut llargs = Vec :: with_capacity ( 1 ) ;
852
858
let ret_dest = self . make_return_dest (
853
859
bx,
854
860
destination,
855
861
& fn_abi. ret ,
856
862
& mut llargs,
857
- true ,
858
- target. is_some ( ) ,
863
+ Some ( intrinsic ) ,
864
+ target,
859
865
) ;
860
866
let dest = match ret_dest {
861
867
_ if fn_abi. ret . is_indirect ( ) => llargs[ 0 ] ,
@@ -873,7 +879,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
873
879
// The indices passed to simd_shuffle in the
874
880
// third argument must be constant. This is
875
881
// checked by the type-checker.
876
- if i == 2 && intrinsic == sym:: simd_shuffle {
882
+ if i == 2 && intrinsic. name == sym:: simd_shuffle {
877
883
if let mir:: Operand :: Constant ( constant) = & arg. node {
878
884
let ( llval, ty) = self . simd_shuffle_indices ( bx, constant) ;
879
885
return OperandRef {
@@ -903,14 +909,33 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
903
909
MergingSucc :: False
904
910
} ;
905
911
}
906
- Err ( instance) => Some ( instance) ,
912
+ Err ( instance) => {
913
+ if intrinsic. must_be_overridden {
914
+ span_bug ! (
915
+ span,
916
+ "intrinsic {} must be overridden by codegen backend, but isn't" ,
917
+ intrinsic. name,
918
+ ) ;
919
+ }
920
+ Some ( instance)
921
+ }
907
922
}
908
923
}
909
924
} ;
910
925
911
926
let mut llargs = Vec :: with_capacity ( arg_count) ;
912
927
let destination = target. as_ref ( ) . map ( |& target| {
913
- ( self . make_return_dest ( bx, destination, & fn_abi. ret , & mut llargs, false , true ) , target)
928
+ (
929
+ self . make_return_dest (
930
+ bx,
931
+ destination,
932
+ & fn_abi. ret ,
933
+ & mut llargs,
934
+ None ,
935
+ Some ( target) ,
936
+ ) ,
937
+ target,
938
+ )
914
939
} ) ;
915
940
916
941
// Split the rust-call tupled arguments off.
@@ -1643,10 +1668,10 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
1643
1668
dest : mir:: Place < ' tcx > ,
1644
1669
fn_ret : & ArgAbi < ' tcx , Ty < ' tcx > > ,
1645
1670
llargs : & mut Vec < Bx :: Value > ,
1646
- is_intrinsic : bool ,
1647
- has_target : bool ,
1671
+ intrinsic : Option < ty :: IntrinsicDef > ,
1672
+ target : Option < BasicBlock > ,
1648
1673
) -> ReturnDest < ' tcx , Bx :: Value > {
1649
- if !has_target {
1674
+ if target . is_none ( ) {
1650
1675
return ReturnDest :: Nothing ;
1651
1676
}
1652
1677
// If the return is ignored, we can just return a do-nothing `ReturnDest`.
@@ -1667,7 +1692,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
1667
1692
tmp. storage_live ( bx) ;
1668
1693
llargs. push ( tmp. llval ) ;
1669
1694
ReturnDest :: IndirectOperand ( tmp, index)
1670
- } else if is_intrinsic {
1695
+ } else if intrinsic . is_some ( ) {
1671
1696
// Currently, intrinsics always need a location to store
1672
1697
// the result, so we create a temporary `alloca` for the
1673
1698
// result.
0 commit comments