@@ -12,12 +12,12 @@ use crate::MemFlags;
12
12
use rustc_ast as ast;
13
13
use rustc_ast:: { InlineAsmOptions , InlineAsmTemplatePiece } ;
14
14
use rustc_hir:: lang_items:: LangItem ;
15
- use rustc_middle:: mir:: { self , AssertKind , SwitchTargets , UnwindTerminateReason } ;
15
+ use rustc_middle:: mir:: { self , AssertKind , BasicBlock , SwitchTargets , UnwindTerminateReason } ;
16
16
use rustc_middle:: ty:: layout:: { HasTyCtxt , LayoutOf , ValidityRequirement } ;
17
17
use rustc_middle:: ty:: print:: { with_no_trimmed_paths, with_no_visible_paths} ;
18
18
use rustc_middle:: ty:: { self , Instance , Ty } ;
19
19
use rustc_session:: config:: OptLevel ;
20
- use rustc_span:: { source_map:: Spanned , sym, Span , Symbol } ;
20
+ use rustc_span:: { source_map:: Spanned , sym, Span } ;
21
21
use rustc_target:: abi:: call:: { ArgAbi , FnAbi , PassMode , Reg } ;
22
22
use rustc_target:: abi:: { self , HasDataLayout , WrappingRange } ;
23
23
use rustc_target:: spec:: abi:: Abi ;
@@ -672,7 +672,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
672
672
& mut self ,
673
673
helper : & TerminatorCodegenHelper < ' tcx > ,
674
674
bx : & mut Bx ,
675
- intrinsic : Option < Symbol > ,
675
+ intrinsic : Option < ty :: IntrinsicDef > ,
676
676
instance : Option < Instance < ' tcx > > ,
677
677
source_info : mir:: SourceInfo ,
678
678
target : Option < mir:: BasicBlock > ,
@@ -682,7 +682,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
682
682
// Emit a panic or a no-op for `assert_*` intrinsics.
683
683
// These are intrinsics that compile to panics so that we can get a message
684
684
// which mentions the offending type, even from a const context.
685
- let panic_intrinsic = intrinsic. and_then ( |s | ValidityRequirement :: from_intrinsic ( s ) ) ;
685
+ let panic_intrinsic = intrinsic. and_then ( |i | ValidityRequirement :: from_intrinsic ( i . name ) ) ;
686
686
if let Some ( requirement) = panic_intrinsic {
687
687
let ty = instance. unwrap ( ) . args . type_at ( 0 ) ;
688
688
@@ -818,14 +818,20 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
818
818
// The arguments we'll be passing. Plus one to account for outptr, if used.
819
819
let arg_count = fn_abi. args . len ( ) + fn_abi. ret . is_indirect ( ) as usize ;
820
820
821
- if intrinsic == Some ( sym:: caller_location) {
821
+ if matches ! ( intrinsic, Some ( ty :: IntrinsicDef { name : sym:: caller_location, .. } ) ) {
822
822
return if let Some ( target) = target {
823
823
let location =
824
824
self . get_caller_location ( bx, mir:: SourceInfo { span : fn_span, ..source_info } ) ;
825
825
826
826
let mut llargs = Vec :: with_capacity ( arg_count) ;
827
- let ret_dest =
828
- self . make_return_dest ( bx, destination, & fn_abi. ret , & mut llargs, true , true ) ;
827
+ let ret_dest = self . make_return_dest (
828
+ bx,
829
+ destination,
830
+ & fn_abi. ret ,
831
+ & mut llargs,
832
+ intrinsic,
833
+ Some ( target) ,
834
+ ) ;
829
835
assert_eq ! ( llargs, [ ] ) ;
830
836
if let ReturnDest :: IndirectOperand ( tmp, _) = ret_dest {
831
837
location. val . store ( bx, tmp) ;
@@ -838,16 +844,16 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
838
844
}
839
845
840
846
let instance = match intrinsic {
841
- None | Some ( sym:: drop_in_place) => instance,
847
+ None | Some ( ty :: IntrinsicDef { name : sym:: drop_in_place, .. } ) => instance,
842
848
Some ( intrinsic) => {
843
849
let mut llargs = Vec :: with_capacity ( 1 ) ;
844
850
let ret_dest = self . make_return_dest (
845
851
bx,
846
852
destination,
847
853
& fn_abi. ret ,
848
854
& mut llargs,
849
- true ,
850
- target. is_some ( ) ,
855
+ Some ( intrinsic ) ,
856
+ target,
851
857
) ;
852
858
let dest = match ret_dest {
853
859
_ if fn_abi. ret . is_indirect ( ) => llargs[ 0 ] ,
@@ -865,7 +871,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
865
871
// The indices passed to simd_shuffle in the
866
872
// third argument must be constant. This is
867
873
// checked by the type-checker.
868
- if i == 2 && intrinsic == sym:: simd_shuffle {
874
+ if i == 2 && intrinsic. name == sym:: simd_shuffle {
869
875
if let mir:: Operand :: Constant ( constant) = & arg. node {
870
876
let ( llval, ty) = self . simd_shuffle_indices ( bx, constant) ;
871
877
return OperandRef {
@@ -895,14 +901,33 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
895
901
MergingSucc :: False
896
902
} ;
897
903
}
898
- Err ( instance) => Some ( instance) ,
904
+ Err ( instance) => {
905
+ if intrinsic. must_be_overridden {
906
+ span_bug ! (
907
+ span,
908
+ "intrinsic {} must be overridden by codegen backend, but isn't" ,
909
+ intrinsic. name,
910
+ ) ;
911
+ }
912
+ Some ( instance)
913
+ }
899
914
}
900
915
}
901
916
} ;
902
917
903
918
let mut llargs = Vec :: with_capacity ( arg_count) ;
904
919
let destination = target. as_ref ( ) . map ( |& target| {
905
- ( self . make_return_dest ( bx, destination, & fn_abi. ret , & mut llargs, false , true ) , target)
920
+ (
921
+ self . make_return_dest (
922
+ bx,
923
+ destination,
924
+ & fn_abi. ret ,
925
+ & mut llargs,
926
+ None ,
927
+ Some ( target) ,
928
+ ) ,
929
+ target,
930
+ )
906
931
} ) ;
907
932
908
933
// Split the rust-call tupled arguments off.
@@ -1635,10 +1660,10 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
1635
1660
dest : mir:: Place < ' tcx > ,
1636
1661
fn_ret : & ArgAbi < ' tcx , Ty < ' tcx > > ,
1637
1662
llargs : & mut Vec < Bx :: Value > ,
1638
- is_intrinsic : bool ,
1639
- has_target : bool ,
1663
+ intrinsic : Option < ty :: IntrinsicDef > ,
1664
+ target : Option < BasicBlock > ,
1640
1665
) -> ReturnDest < ' tcx , Bx :: Value > {
1641
- if !has_target {
1666
+ if target . is_none ( ) {
1642
1667
return ReturnDest :: Nothing ;
1643
1668
}
1644
1669
// If the return is ignored, we can just return a do-nothing `ReturnDest`.
@@ -1659,7 +1684,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
1659
1684
tmp. storage_live ( bx) ;
1660
1685
llargs. push ( tmp. llval ) ;
1661
1686
ReturnDest :: IndirectOperand ( tmp, index)
1662
- } else if is_intrinsic {
1687
+ } else if intrinsic . is_some ( ) {
1663
1688
// Currently, intrinsics always need a location to store
1664
1689
// the result, so we create a temporary `alloca` for the
1665
1690
// result.
0 commit comments