@@ -75,13 +75,13 @@ fn get_simple_intrinsic(cx: &CodegenCx<'ll, '_>, name: Symbol) -> Option<&'ll Va
75
75
76
76
impl IntrinsicCallMethods < ' tcx > for Builder < ' a , ' ll , ' tcx > {
77
77
fn codegen_intrinsic_call (
78
- & mut self ,
78
+ mut self ,
79
79
instance : ty:: Instance < ' tcx > ,
80
80
fn_abi : & FnAbi < ' tcx , Ty < ' tcx > > ,
81
81
args : & [ OperandRef < ' tcx , & ' ll Value > ] ,
82
82
llresult : & ' ll Value ,
83
83
span : Span ,
84
- ) {
84
+ ) -> Self {
85
85
let tcx = self . tcx ;
86
86
let callee_ty = instance. ty ( tcx, ty:: ParamEnv :: reveal_all ( ) ) ;
87
87
@@ -97,10 +97,10 @@ impl IntrinsicCallMethods<'tcx> for Builder<'a, 'll, 'tcx> {
97
97
let name = tcx. item_name ( def_id) ;
98
98
let name_str = & * name. as_str ( ) ;
99
99
100
- let llret_ty = self . layout_of ( ret_ty) . llvm_type ( self ) ;
100
+ let llret_ty = self . layout_of ( ret_ty) . llvm_type ( & self ) ;
101
101
let result = PlaceRef :: new_sized ( llresult, fn_abi. ret . layout ) ;
102
102
103
- let simple = get_simple_intrinsic ( self , name) ;
103
+ let simple = get_simple_intrinsic ( & self , name) ;
104
104
let llval = match name {
105
105
_ if simple. is_some ( ) => self . call (
106
106
simple. unwrap ( ) ,
@@ -118,13 +118,13 @@ impl IntrinsicCallMethods<'tcx> for Builder<'a, 'll, 'tcx> {
118
118
}
119
119
kw:: Try => {
120
120
try_intrinsic (
121
- self ,
121
+ & mut self ,
122
122
args[ 0 ] . immediate ( ) ,
123
123
args[ 1 ] . immediate ( ) ,
124
124
args[ 2 ] . immediate ( ) ,
125
125
llresult,
126
126
) ;
127
- return ;
127
+ return self ;
128
128
}
129
129
sym:: breakpoint => {
130
130
let llfn = self . get_intrinsic ( & ( "llvm.debugtrap" ) ) ;
@@ -135,7 +135,7 @@ impl IntrinsicCallMethods<'tcx> for Builder<'a, 'll, 'tcx> {
135
135
self . call ( intrinsic, & [ args[ 0 ] . immediate ( ) , args[ 1 ] . immediate ( ) ] , None , None )
136
136
}
137
137
sym:: va_arg => {
138
- match fn_abi. ret . layout . abi {
138
+ let ( new_bx , val ) = match fn_abi. ret . layout . abi {
139
139
abi:: Abi :: Scalar ( ref scalar) => {
140
140
match scalar. value {
141
141
Primitive :: Int ( ..) => {
@@ -144,8 +144,10 @@ impl IntrinsicCallMethods<'tcx> for Builder<'a, 'll, 'tcx> {
144
144
// less than 4 bytes in length. If it is, promote
145
145
// the integer to a `i32` and truncate the result
146
146
// back to the smaller type.
147
- let promoted_result = emit_va_arg ( self , args[ 0 ] , tcx. types . i32 ) ;
148
- self . trunc ( promoted_result, llret_ty)
147
+ let ( mut new_bx, promoted_result) =
148
+ emit_va_arg ( self , args[ 0 ] , tcx. types . i32 ) ;
149
+ let val = new_bx. trunc ( promoted_result, llret_ty) ;
150
+ ( new_bx, val)
149
151
} else {
150
152
emit_va_arg ( self , args[ 0 ] , ret_ty)
151
153
}
@@ -158,14 +160,16 @@ impl IntrinsicCallMethods<'tcx> for Builder<'a, 'll, 'tcx> {
158
160
}
159
161
}
160
162
_ => bug ! ( "the va_arg intrinsic does not work with non-scalar types" ) ,
161
- }
163
+ } ;
164
+ self = new_bx;
165
+ val
162
166
}
163
167
164
168
sym:: volatile_load | sym:: unaligned_volatile_load => {
165
169
let tp_ty = substs. type_at ( 0 ) ;
166
170
let mut ptr = args[ 0 ] . immediate ( ) ;
167
171
if let PassMode :: Cast ( ty) = fn_abi. ret . mode {
168
- ptr = self . pointercast ( ptr, self . type_ptr_to ( ty. llvm_type ( self ) ) ) ;
172
+ ptr = self . pointercast ( ptr, self . type_ptr_to ( ty. llvm_type ( & self ) ) ) ;
169
173
}
170
174
let load = self . volatile_load ( ptr) ;
171
175
let align = if name == sym:: unaligned_volatile_load {
@@ -180,13 +184,13 @@ impl IntrinsicCallMethods<'tcx> for Builder<'a, 'll, 'tcx> {
180
184
}
181
185
sym:: volatile_store => {
182
186
let dst = args[ 0 ] . deref ( self . cx ( ) ) ;
183
- args[ 1 ] . val . volatile_store ( self , dst) ;
184
- return ;
187
+ args[ 1 ] . val . volatile_store ( & mut self , dst) ;
188
+ return self ;
185
189
}
186
190
sym:: unaligned_volatile_store => {
187
191
let dst = args[ 0 ] . deref ( self . cx ( ) ) ;
188
- args[ 1 ] . val . unaligned_volatile_store ( self , dst) ;
189
- return ;
192
+ args[ 1 ] . val . unaligned_volatile_store ( & mut self , dst) ;
193
+ return self ;
190
194
}
191
195
sym:: prefetch_read_data
192
196
| sym:: prefetch_write_data
@@ -224,7 +228,7 @@ impl IntrinsicCallMethods<'tcx> for Builder<'a, 'll, 'tcx> {
224
228
| sym:: saturating_add
225
229
| sym:: saturating_sub => {
226
230
let ty = arg_tys[ 0 ] ;
227
- match int_type_width_signed ( ty, self ) {
231
+ match int_type_width_signed ( ty, & self ) {
228
232
Some ( ( width, signed) ) => match name {
229
233
sym:: ctlz | sym:: cttz => {
230
234
let y = self . const_bool ( false ) ;
@@ -296,15 +300,17 @@ impl IntrinsicCallMethods<'tcx> for Builder<'a, 'll, 'tcx> {
296
300
name, ty
297
301
) ,
298
302
) ;
299
- return ;
303
+ return self ;
300
304
}
301
305
}
302
306
}
303
307
304
308
_ if name_str. starts_with ( "simd_" ) => {
305
- match generic_simd_intrinsic ( self , name, callee_ty, args, ret_ty, llret_ty, span) {
309
+ match generic_simd_intrinsic (
310
+ & mut self , name, callee_ty, args, ret_ty, llret_ty, span,
311
+ ) {
306
312
Ok ( llval) => llval,
307
- Err ( ( ) ) => return ,
313
+ Err ( ( ) ) => return self ,
308
314
}
309
315
}
310
316
@@ -313,15 +319,17 @@ impl IntrinsicCallMethods<'tcx> for Builder<'a, 'll, 'tcx> {
313
319
314
320
if !fn_abi. ret . is_ignore ( ) {
315
321
if let PassMode :: Cast ( ty) = fn_abi. ret . mode {
316
- let ptr_llty = self . type_ptr_to ( ty. llvm_type ( self ) ) ;
322
+ let ptr_llty = self . type_ptr_to ( ty. llvm_type ( & self ) ) ;
317
323
let ptr = self . pointercast ( result. llval , ptr_llty) ;
318
324
self . store ( llval, ptr, result. align ) ;
319
325
} else {
320
- OperandRef :: from_immediate_or_packed_pair ( self , llval, result. layout )
326
+ OperandRef :: from_immediate_or_packed_pair ( & mut self , llval, result. layout )
321
327
. val
322
- . store ( self , result) ;
328
+ . store ( & mut self , result) ;
323
329
}
324
330
}
331
+
332
+ self
325
333
}
326
334
327
335
fn abort ( & mut self ) {
0 commit comments