Skip to content

Commit f4f6441

Browse files
authored
Rollup merge of rust-lang#123775 - scottmcm:place-val, r=cjgillot
Make `PlaceRef` and `OperandValue::Ref` share a common `PlaceValue` type Both `PlaceRef` and `OperandValue::Ref` need the triple of the backend pointer immediate, the optional backend metadata for DSTs, and the actual alignment of the place (since it can differ from the ABI alignment). This PR introduces a new `PlaceValue` type for those three values, leaving [`PlaceRef`](https://doc.rust-lang.org/nightly/nightly-rustc/rustc_codegen_ssa/mir/place/struct.PlaceRef.html) with the `TyAndLayout` and a `PlaceValue`, just like how [`OperandRef`](https://doc.rust-lang.org/nightly/nightly-rustc/rustc_codegen_ssa/mir/operand/struct.OperandRef.html) is a `TyAndLayout` and an `OperandValue`. This means that various places that use `Ref`s as places can just pass the `PlaceValue` along, like in the below excerpt from the diff: ```diff match operand.val { - OperandValue::Ref(ptr, meta, align) => { - debug_assert_eq!(meta, None); + OperandValue::Ref(source_place_val) => { + debug_assert_eq!(source_place_val.llextra, None); debug_assert!(matches!(operand_kind, OperandValueKind::Ref)); - let fake_place = PlaceRef::new_sized_aligned(ptr, cast, align); + let fake_place = PlaceRef { val: source_place_val, layout: cast }; Some(bx.load_operand(fake_place).val) } ``` There's more refactoring that I'd like to do after this, but I wanted to stop the PR here where it's hopefully easy (albeit probably not quick) to review since I tried to keep every change line-by-line clear. (Most are just adding `.val` to get to a field.) You can also go commit-at-a-time if you'd like. Each passed tidy and the codegen tests on my machine (though I didn't run the cg_gcc ones).
2 parents a510cbd + d0ae768 commit f4f6441

File tree

14 files changed

+239
-169
lines changed

14 files changed

+239
-169
lines changed

compiler/rustc_codegen_gcc/src/builder.rs

+13-12
Original file line numberDiff line numberDiff line change
@@ -974,7 +974,7 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> {
974974
&mut self,
975975
place: PlaceRef<'tcx, RValue<'gcc>>,
976976
) -> OperandRef<'tcx, RValue<'gcc>> {
977-
assert_eq!(place.llextra.is_some(), place.layout.is_unsized());
977+
assert_eq!(place.val.llextra.is_some(), place.layout.is_unsized());
978978

979979
if place.layout.is_zst() {
980980
return OperandRef::zero_sized(place.layout);
@@ -999,10 +999,11 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> {
999999
}
10001000
}
10011001

1002-
let val = if let Some(llextra) = place.llextra {
1003-
OperandValue::Ref(place.llval, Some(llextra), place.align)
1002+
let val = if let Some(_) = place.val.llextra {
1003+
// FIXME: Merge with the `else` below?
1004+
OperandValue::Ref(place.val)
10041005
} else if place.layout.is_gcc_immediate() {
1005-
let load = self.load(place.layout.gcc_type(self), place.llval, place.align);
1006+
let load = self.load(place.layout.gcc_type(self), place.val.llval, place.val.align);
10061007
if let abi::Abi::Scalar(ref scalar) = place.layout.abi {
10071008
scalar_load_metadata(self, load, scalar);
10081009
}
@@ -1012,9 +1013,9 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> {
10121013

10131014
let mut load = |i, scalar: &abi::Scalar, align| {
10141015
let llptr = if i == 0 {
1015-
place.llval
1016+
place.val.llval
10161017
} else {
1017-
self.inbounds_ptradd(place.llval, self.const_usize(b_offset.bytes()))
1018+
self.inbounds_ptradd(place.val.llval, self.const_usize(b_offset.bytes()))
10181019
};
10191020
let llty = place.layout.scalar_pair_element_gcc_type(self, i);
10201021
let load = self.load(llty, llptr, align);
@@ -1027,11 +1028,11 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> {
10271028
};
10281029

10291030
OperandValue::Pair(
1030-
load(0, a, place.align),
1031-
load(1, b, place.align.restrict_for_offset(b_offset)),
1031+
load(0, a, place.val.align),
1032+
load(1, b, place.val.align.restrict_for_offset(b_offset)),
10321033
)
10331034
} else {
1034-
OperandValue::Ref(place.llval, None, place.align)
1035+
OperandValue::Ref(place.val)
10351036
};
10361037

10371038
OperandRef { val, layout: place.layout }
@@ -1045,8 +1046,8 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> {
10451046
) {
10461047
let zero = self.const_usize(0);
10471048
let count = self.const_usize(count);
1048-
let start = dest.project_index(self, zero).llval;
1049-
let end = dest.project_index(self, count).llval;
1049+
let start = dest.project_index(self, zero).val.llval;
1050+
let end = dest.project_index(self, count).val.llval;
10501051

10511052
let header_bb = self.append_sibling_block("repeat_loop_header");
10521053
let body_bb = self.append_sibling_block("repeat_loop_body");
@@ -1064,7 +1065,7 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> {
10641065
self.cond_br(keep_going, body_bb, next_bb);
10651066

10661067
self.switch_to_block(body_bb);
1067-
let align = dest.align.restrict_for_offset(dest.layout.field(self.cx(), 0).size);
1068+
let align = dest.val.align.restrict_for_offset(dest.layout.field(self.cx(), 0).size);
10681069
cg_elem.val.store(self, PlaceRef::new_sized_aligned(current_val, cg_elem.layout, align));
10691070

10701071
let next = self.inbounds_gep(

compiler/rustc_codegen_gcc/src/intrinsic/mod.rs

+13-8
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ use rustc_codegen_ssa::base::wants_msvc_seh;
1111
use rustc_codegen_ssa::common::IntPredicate;
1212
use rustc_codegen_ssa::errors::InvalidMonomorphization;
1313
use rustc_codegen_ssa::mir::operand::{OperandRef, OperandValue};
14-
use rustc_codegen_ssa::mir::place::PlaceRef;
14+
use rustc_codegen_ssa::mir::place::{PlaceRef, PlaceValue};
1515
use rustc_codegen_ssa::traits::{
1616
ArgAbiMethods, BuilderMethods, ConstMethods, IntrinsicCallMethods,
1717
};
@@ -354,7 +354,7 @@ impl<'a, 'gcc, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
354354

355355
let block = self.llbb();
356356
let extended_asm = block.add_extended_asm(None, "");
357-
extended_asm.add_input_operand(None, "r", result.llval);
357+
extended_asm.add_input_operand(None, "r", result.val.llval);
358358
extended_asm.add_clobber("memory");
359359
extended_asm.set_volatile_flag(true);
360360

@@ -388,8 +388,8 @@ impl<'a, 'gcc, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
388388
if !fn_abi.ret.is_ignore() {
389389
if let PassMode::Cast { cast: ty, .. } = &fn_abi.ret.mode {
390390
let ptr_llty = self.type_ptr_to(ty.gcc_type(self));
391-
let ptr = self.pointercast(result.llval, ptr_llty);
392-
self.store(llval, ptr, result.align);
391+
let ptr = self.pointercast(result.val.llval, ptr_llty);
392+
self.store(llval, ptr, result.val.align);
393393
} else {
394394
OperandRef::from_immediate_or_packed_pair(self, llval, result.layout)
395395
.val
@@ -502,7 +502,7 @@ impl<'gcc, 'tcx> ArgAbiExt<'gcc, 'tcx> for ArgAbi<'tcx, Ty<'tcx>> {
502502
return;
503503
}
504504
if self.is_sized_indirect() {
505-
OperandValue::Ref(val, None, self.layout.align.abi).store(bx, dst)
505+
OperandValue::Ref(PlaceValue::new_sized(val, self.layout.align.abi)).store(bx, dst)
506506
} else if self.is_unsized_indirect() {
507507
bug!("unsized `ArgAbi` must be handled through `store_fn_arg`");
508508
} else if let PassMode::Cast { ref cast, .. } = self.mode {
@@ -511,7 +511,7 @@ impl<'gcc, 'tcx> ArgAbiExt<'gcc, 'tcx> for ArgAbi<'tcx, Ty<'tcx>> {
511511
let can_store_through_cast_ptr = false;
512512
if can_store_through_cast_ptr {
513513
let cast_ptr_llty = bx.type_ptr_to(cast.gcc_type(bx));
514-
let cast_dst = bx.pointercast(dst.llval, cast_ptr_llty);
514+
let cast_dst = bx.pointercast(dst.val.llval, cast_ptr_llty);
515515
bx.store(val, cast_dst, self.layout.align.abi);
516516
} else {
517517
// The actual return type is a struct, but the ABI
@@ -539,7 +539,7 @@ impl<'gcc, 'tcx> ArgAbiExt<'gcc, 'tcx> for ArgAbi<'tcx, Ty<'tcx>> {
539539

540540
// ... and then memcpy it to the intended destination.
541541
bx.memcpy(
542-
dst.llval,
542+
dst.val.llval,
543543
self.layout.align.abi,
544544
llscratch,
545545
scratch_align,
@@ -571,7 +571,12 @@ impl<'gcc, 'tcx> ArgAbiExt<'gcc, 'tcx> for ArgAbi<'tcx, Ty<'tcx>> {
571571
OperandValue::Pair(next(), next()).store(bx, dst);
572572
}
573573
PassMode::Indirect { meta_attrs: Some(_), .. } => {
574-
OperandValue::Ref(next(), Some(next()), self.layout.align.abi).store(bx, dst);
574+
let place_val = PlaceValue {
575+
llval: next(),
576+
llextra: Some(next()),
577+
align: self.layout.align.abi,
578+
};
579+
OperandValue::Ref(place_val).store(bx, dst);
575580
}
576581
PassMode::Direct(_)
577582
| PassMode::Indirect { meta_attrs: None, .. }

compiler/rustc_codegen_gcc/src/intrinsic/simd.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -82,7 +82,7 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
8282
let place = PlaceRef::alloca(bx, args[0].layout);
8383
args[0].val.store(bx, place);
8484
let int_ty = bx.type_ix(expected_bytes * 8);
85-
let ptr = bx.pointercast(place.llval, bx.cx.type_ptr_to(int_ty));
85+
let ptr = bx.pointercast(place.val.llval, bx.cx.type_ptr_to(int_ty));
8686
bx.load(int_ty, ptr, Align::ONE)
8787
}
8888
_ => return_error!(InvalidMonomorphization::InvalidBitmask {

compiler/rustc_codegen_llvm/src/abi.rs

+9-4
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ use crate::type_of::LayoutLlvmExt;
77
use crate::value::Value;
88

99
use rustc_codegen_ssa::mir::operand::{OperandRef, OperandValue};
10-
use rustc_codegen_ssa::mir::place::PlaceRef;
10+
use rustc_codegen_ssa::mir::place::{PlaceRef, PlaceValue};
1111
use rustc_codegen_ssa::traits::*;
1212
use rustc_codegen_ssa::MemFlags;
1313
use rustc_middle::bug;
@@ -207,7 +207,7 @@ impl<'ll, 'tcx> ArgAbiExt<'ll, 'tcx> for ArgAbi<'tcx, Ty<'tcx>> {
207207
// Sized indirect arguments
208208
PassMode::Indirect { attrs, meta_attrs: None, on_stack: _ } => {
209209
let align = attrs.pointee_align.unwrap_or(self.layout.align.abi);
210-
OperandValue::Ref(val, None, align).store(bx, dst);
210+
OperandValue::Ref(PlaceValue::new_sized(val, align)).store(bx, dst);
211211
}
212212
// Unsized indirect qrguments
213213
PassMode::Indirect { attrs: _, meta_attrs: Some(_), on_stack: _ } => {
@@ -233,7 +233,7 @@ impl<'ll, 'tcx> ArgAbiExt<'ll, 'tcx> for ArgAbi<'tcx, Ty<'tcx>> {
233233
bx.store(val, llscratch, scratch_align);
234234
// ... and then memcpy it to the intended destination.
235235
bx.memcpy(
236-
dst.llval,
236+
dst.val.llval,
237237
self.layout.align.abi,
238238
llscratch,
239239
scratch_align,
@@ -265,7 +265,12 @@ impl<'ll, 'tcx> ArgAbiExt<'ll, 'tcx> for ArgAbi<'tcx, Ty<'tcx>> {
265265
OperandValue::Pair(next(), next()).store(bx, dst);
266266
}
267267
PassMode::Indirect { attrs: _, meta_attrs: Some(_), on_stack: _ } => {
268-
OperandValue::Ref(next(), Some(next()), self.layout.align.abi).store(bx, dst);
268+
let place_val = PlaceValue {
269+
llval: next(),
270+
llextra: Some(next()),
271+
align: self.layout.align.abi,
272+
};
273+
OperandValue::Ref(place_val).store(bx, dst);
269274
}
270275
PassMode::Direct(_)
271276
| PassMode::Indirect { attrs: _, meta_attrs: None, on_stack: _ }

compiler/rustc_codegen_llvm/src/builder.rs

+11-10
Original file line numberDiff line numberDiff line change
@@ -535,7 +535,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
535535
panic!("unsized locals must not be `extern` types");
536536
}
537537
}
538-
assert_eq!(place.llextra.is_some(), place.layout.is_unsized());
538+
assert_eq!(place.val.llextra.is_some(), place.layout.is_unsized());
539539

540540
if place.layout.is_zst() {
541541
return OperandRef::zero_sized(place.layout);
@@ -579,13 +579,14 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
579579
}
580580
}
581581

582-
let val = if let Some(llextra) = place.llextra {
583-
OperandValue::Ref(place.llval, Some(llextra), place.align)
582+
let val = if let Some(_) = place.val.llextra {
583+
// FIXME: Merge with the `else` below?
584+
OperandValue::Ref(place.val)
584585
} else if place.layout.is_llvm_immediate() {
585586
let mut const_llval = None;
586587
let llty = place.layout.llvm_type(self);
587588
unsafe {
588-
if let Some(global) = llvm::LLVMIsAGlobalVariable(place.llval) {
589+
if let Some(global) = llvm::LLVMIsAGlobalVariable(place.val.llval) {
589590
if llvm::LLVMIsGlobalConstant(global) == llvm::True {
590591
if let Some(init) = llvm::LLVMGetInitializer(global) {
591592
if self.val_ty(init) == llty {
@@ -596,7 +597,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
596597
}
597598
}
598599
let llval = const_llval.unwrap_or_else(|| {
599-
let load = self.load(llty, place.llval, place.align);
600+
let load = self.load(llty, place.val.llval, place.val.align);
600601
if let abi::Abi::Scalar(scalar) = place.layout.abi {
601602
scalar_load_metadata(self, load, scalar, place.layout, Size::ZERO);
602603
}
@@ -608,9 +609,9 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
608609

609610
let mut load = |i, scalar: abi::Scalar, layout, align, offset| {
610611
let llptr = if i == 0 {
611-
place.llval
612+
place.val.llval
612613
} else {
613-
self.inbounds_ptradd(place.llval, self.const_usize(b_offset.bytes()))
614+
self.inbounds_ptradd(place.val.llval, self.const_usize(b_offset.bytes()))
614615
};
615616
let llty = place.layout.scalar_pair_element_llvm_type(self, i, false);
616617
let load = self.load(llty, llptr, align);
@@ -619,11 +620,11 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
619620
};
620621

621622
OperandValue::Pair(
622-
load(0, a, place.layout, place.align, Size::ZERO),
623-
load(1, b, place.layout, place.align.restrict_for_offset(b_offset), b_offset),
623+
load(0, a, place.layout, place.val.align, Size::ZERO),
624+
load(1, b, place.layout, place.val.align.restrict_for_offset(b_offset), b_offset),
624625
)
625626
} else {
626-
OperandValue::Ref(place.llval, None, place.align)
627+
OperandValue::Ref(place.val)
627628
};
628629

629630
OperandRef { val, layout: place.layout }

compiler/rustc_codegen_llvm/src/intrinsic.rs

+4-4
Original file line numberDiff line numberDiff line change
@@ -264,7 +264,7 @@ impl<'ll, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'_, 'll, 'tcx> {
264264
llvm::LLVMSetAlignment(load, align);
265265
}
266266
if !result.layout.is_zst() {
267-
self.store(load, result.llval, result.align);
267+
self.store_to_place(load, result.val);
268268
}
269269
return Ok(());
270270
}
@@ -428,7 +428,7 @@ impl<'ll, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'_, 'll, 'tcx> {
428428

429429
sym::black_box => {
430430
args[0].val.store(self, result);
431-
let result_val_span = [result.llval];
431+
let result_val_span = [result.val.llval];
432432
// We need to "use" the argument in some way LLVM can't introspect, and on
433433
// targets that support it we can typically leverage inline assembly to do
434434
// this. LLVM's interpretation of inline assembly is that it's, well, a black
@@ -482,7 +482,7 @@ impl<'ll, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'_, 'll, 'tcx> {
482482

483483
if !fn_abi.ret.is_ignore() {
484484
if let PassMode::Cast { .. } = &fn_abi.ret.mode {
485-
self.store(llval, result.llval, result.align);
485+
self.store(llval, result.val.llval, result.val.align);
486486
} else {
487487
OperandRef::from_immediate_or_packed_pair(self, llval, result.layout)
488488
.val
@@ -1065,7 +1065,7 @@ fn generic_simd_intrinsic<'ll, 'tcx>(
10651065
let place = PlaceRef::alloca(bx, args[0].layout);
10661066
args[0].val.store(bx, place);
10671067
let int_ty = bx.type_ix(expected_bytes * 8);
1068-
bx.load(int_ty, place.llval, Align::ONE)
1068+
bx.load(int_ty, place.val.llval, Align::ONE)
10691069
}
10701070
_ => return_error!(InvalidMonomorphization::InvalidBitmask {
10711071
span,

0 commit comments

Comments
 (0)