@@ -6148,15 +6148,15 @@ fn decl_native_fn_and_pair(@crate_ctxt cx,
6148
6148
auto arg_n = 3 u;
6149
6149
auto pass_task;
6150
6150
6151
- auto lltaskptr = bcx . build . PtrToInt ( fcx. lltaskptr , T_int ( ) ) ;
6151
+ auto lltaskptr = vp2i ( bcx , fcx. lltaskptr ) ;
6152
6152
alt ( abi) {
6153
6153
case ( ast. native_abi_rust ) {
6154
6154
pass_task = true ;
6155
6155
call_args += vec ( lltaskptr) ;
6156
6156
for each ( uint i in _uint. range( 0 u, num_ty_param) ) {
6157
6157
auto llarg = llvm. LLVMGetParam ( fcx. llfn, arg_n) ;
6158
6158
check ( llarg as int != 0 ) ;
6159
- call_args += vec( bcx . build . PointerCast ( llarg , T_i32 ( ) ) ) ;
6159
+ call_args += vec( vp2i ( bcx , llarg ) ) ;
6160
6160
arg_n += 1 u;
6161
6161
}
6162
6162
}
@@ -6169,6 +6169,26 @@ fn decl_native_fn_and_pair(@crate_ctxt cx,
6169
6169
}
6170
6170
}
6171
6171
6172
+ fn push_arg( @block_ctxt cx,
6173
+ & mutable vec[ ValueRef ] args,
6174
+ ValueRef v,
6175
+ @ty. t t) {
6176
+ if ( ty. type_is_integral( t) ) {
6177
+ auto lldsttype = T_int ( ) ;
6178
+ auto llsrctype = type_of( cx. fcx. ccx, t) ;
6179
+ if ( llvm. LLVMGetIntTypeWidth ( lldsttype) >
6180
+ llvm. LLVMGetIntTypeWidth ( llsrctype) ) {
6181
+ args += vec( cx. build. ZExtOrBitCast ( v, T_int ( ) ) ) ;
6182
+ } else {
6183
+ args += vec( cx. build. TruncOrBitCast ( v, T_int ( ) ) ) ;
6184
+ }
6185
+ } else if ( ty. type_is_fp( t) ) {
6186
+ args += vec( cx. build. FPToSI ( v, T_int ( ) ) ) ;
6187
+ } else {
6188
+ args += vec( vp2i( cx, v) ) ;
6189
+ }
6190
+ }
6191
+
6172
6192
auto r;
6173
6193
auto rptr;
6174
6194
auto args = ty. ty_fn_args( fn_type) ;
@@ -6192,7 +6212,7 @@ fn decl_native_fn_and_pair(@crate_ctxt cx,
6192
6212
for ( ty. arg arg in args) {
6193
6213
auto llarg = llvm. LLVMGetParam ( fcx. llfn, arg_n) ;
6194
6214
check ( llarg as int != 0 ) ;
6195
- call_args += vec ( bcx. build . PointerCast ( llarg , T_i32 ( ) ) ) ;
6215
+ push_arg ( bcx, call_args , llarg, arg . ty ) ;
6196
6216
arg_n += 1 u;
6197
6217
}
6198
6218
0 commit comments