Skip to content

Commit d4be8ef

Browse files
committed
Auto merge of rust-lang#110137 - Dylan-DPC:rollup-fdruvwp, r=Dylan-DPC
Rollup of 6 pull requests Successful merges: - rust-lang#109724 (prioritize param env candidates if they don't guide type inference) - rust-lang#110021 (Fix a couple ICEs in the new `CastKind::Transmute` code) - rust-lang#110044 (Avoid some manual slice length calculation) - rust-lang#110115 (compiletest: Use remap-path-prefix only in CI) - rust-lang#110121 (Fix `x check --stage 1` when download-rustc is enabled) - rust-lang#110124 (Some clippy fixes in the compiler) Failed merges: - rust-lang#109752 (Stall auto trait assembly in new solver for int/float vars) r? `@ghost` `@rustbot` modify labels: rollup
2 parents 7f7e8fb + 97921ab commit d4be8ef

File tree

76 files changed

+629
-300
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

76 files changed

+629
-300
lines changed

compiler/rustc_abi/src/lib.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -1176,7 +1176,7 @@ impl FieldsShape {
11761176

11771177
/// Gets source indices of the fields by increasing offsets.
11781178
#[inline]
1179-
pub fn index_by_increasing_offset<'a>(&'a self) -> impl Iterator<Item = usize> + 'a {
1179+
pub fn index_by_increasing_offset(&self) -> impl Iterator<Item = usize> + '_ {
11801180
let mut inverse_small = [0u8; 64];
11811181
let mut inverse_big = IndexVec::new();
11821182
let use_small = self.count() <= inverse_small.len();

compiler/rustc_arena/src/lib.rs

+9
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@
2222
#![feature(strict_provenance)]
2323
#![deny(rustc::untranslatable_diagnostic)]
2424
#![deny(rustc::diagnostic_outside_of_impl)]
25+
#![allow(clippy::mut_from_ref)] // Arena allocators are one of the places where this pattern is fine.
2526

2627
use smallvec::SmallVec;
2728

@@ -568,7 +569,9 @@ pub macro declare_arena([$($a:tt $name:ident: $ty:ty,)*]) {
568569
}
569570

570571
pub trait ArenaAllocatable<'tcx, C = rustc_arena::IsNotCopy>: Sized {
572+
#[allow(clippy::mut_from_ref)]
571573
fn allocate_on<'a>(self, arena: &'a Arena<'tcx>) -> &'a mut Self;
574+
#[allow(clippy::mut_from_ref)]
572575
fn allocate_from_iter<'a>(
573576
arena: &'a Arena<'tcx>,
574577
iter: impl ::std::iter::IntoIterator<Item = Self>,
@@ -578,10 +581,12 @@ pub macro declare_arena([$($a:tt $name:ident: $ty:ty,)*]) {
578581
// Any type that impls `Copy` can be arena-allocated in the `DroplessArena`.
579582
impl<'tcx, T: Copy> ArenaAllocatable<'tcx, rustc_arena::IsCopy> for T {
580583
#[inline]
584+
#[allow(clippy::mut_from_ref)]
581585
fn allocate_on<'a>(self, arena: &'a Arena<'tcx>) -> &'a mut Self {
582586
arena.dropless.alloc(self)
583587
}
584588
#[inline]
589+
#[allow(clippy::mut_from_ref)]
585590
fn allocate_from_iter<'a>(
586591
arena: &'a Arena<'tcx>,
587592
iter: impl ::std::iter::IntoIterator<Item = Self>,
@@ -601,6 +606,7 @@ pub macro declare_arena([$($a:tt $name:ident: $ty:ty,)*]) {
601606
}
602607

603608
#[inline]
609+
#[allow(clippy::mut_from_ref)]
604610
fn allocate_from_iter<'a>(
605611
arena: &'a Arena<'tcx>,
606612
iter: impl ::std::iter::IntoIterator<Item = Self>,
@@ -616,19 +622,22 @@ pub macro declare_arena([$($a:tt $name:ident: $ty:ty,)*]) {
616622

617623
impl<'tcx> Arena<'tcx> {
618624
#[inline]
625+
#[allow(clippy::mut_from_ref)]
619626
pub fn alloc<T: ArenaAllocatable<'tcx, C>, C>(&self, value: T) -> &mut T {
620627
value.allocate_on(self)
621628
}
622629

623630
// Any type that impls `Copy` can have slices be arena-allocated in the `DroplessArena`.
624631
#[inline]
632+
#[allow(clippy::mut_from_ref)]
625633
pub fn alloc_slice<T: ::std::marker::Copy>(&self, value: &[T]) -> &mut [T] {
626634
if value.is_empty() {
627635
return &mut [];
628636
}
629637
self.dropless.alloc_slice(value)
630638
}
631639

640+
#[allow(clippy::mut_from_ref)]
632641
pub fn alloc_from_iter<'a, T: ArenaAllocatable<'tcx, C>, C>(
633642
&'a self,
634643
iter: impl ::std::iter::IntoIterator<Item = T>,

compiler/rustc_ast_passes/src/ast_validation.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -691,7 +691,7 @@ fn validate_generic_param_order(
691691
GenericParamKind::Lifetime => (),
692692
GenericParamKind::Const { ty: _, kw_span: _, default: Some(default) } => {
693693
ordered_params += " = ";
694-
ordered_params += &pprust::expr_to_string(&*default.value);
694+
ordered_params += &pprust::expr_to_string(&default.value);
695695
}
696696
GenericParamKind::Const { ty: _, kw_span: _, default: None } => (),
697697
}

compiler/rustc_ast_passes/src/feature_gate.rs

+5-2
Original file line numberDiff line numberDiff line change
@@ -404,11 +404,14 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
404404
);
405405
} else {
406406
// And if it isn't, cancel the early-pass warning.
407-
self.sess
407+
if let Some(err) = self
408+
.sess
408409
.parse_sess
409410
.span_diagnostic
410411
.steal_diagnostic(e.span, StashKey::EarlySyntaxWarning)
411-
.map(|err| err.cancel());
412+
{
413+
err.cancel()
414+
}
412415
}
413416
}
414417
ast::ExprKind::TryBlock(_) => {

compiler/rustc_ast_pretty/src/pprust/state.rs

+4-2
Original file line numberDiff line numberDiff line change
@@ -686,7 +686,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
686686
fn bclose_maybe_open(&mut self, span: rustc_span::Span, empty: bool, close_box: bool) {
687687
let has_comment = self.maybe_print_comment(span.hi());
688688
if !empty || has_comment {
689-
self.break_offset_if_not_bol(1, -(INDENT_UNIT as isize));
689+
self.break_offset_if_not_bol(1, -INDENT_UNIT);
690690
}
691691
self.word("}");
692692
if close_box {
@@ -988,7 +988,9 @@ impl<'a> State<'a> {
988988

989989
pub fn print_assoc_constraint(&mut self, constraint: &ast::AssocConstraint) {
990990
self.print_ident(constraint.ident);
991-
constraint.gen_args.as_ref().map(|args| self.print_generic_args(args, false));
991+
if let Some(args) = constraint.gen_args.as_ref() {
992+
self.print_generic_args(args, false)
993+
}
992994
self.space();
993995
match &constraint.kind {
994996
ast::AssocConstraintKind::Equality { term } => {

compiler/rustc_codegen_ssa/src/mir/operand.rs

+25
Original file line numberDiff line numberDiff line change
@@ -259,6 +259,31 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
259259
}
260260

261261
impl<'a, 'tcx, V: CodegenObject> OperandValue<V> {
262+
/// Returns an `OperandValue` that's generally UB to use in any way.
263+
///
264+
/// Depending on the `layout`, returns an `Immediate` or `Pair` containing
265+
/// poison value(s), or a `Ref` containing a poison pointer.
266+
///
267+
/// Supports sized types only.
268+
pub fn poison<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
269+
bx: &mut Bx,
270+
layout: TyAndLayout<'tcx>,
271+
) -> OperandValue<V> {
272+
assert!(layout.is_sized());
273+
if bx.cx().is_backend_immediate(layout) {
274+
let ibty = bx.cx().immediate_backend_type(layout);
275+
OperandValue::Immediate(bx.const_poison(ibty))
276+
} else if bx.cx().is_backend_scalar_pair(layout) {
277+
let ibty0 = bx.cx().scalar_pair_element_backend_type(layout, 0, true);
278+
let ibty1 = bx.cx().scalar_pair_element_backend_type(layout, 1, true);
279+
OperandValue::Pair(bx.const_poison(ibty0), bx.const_poison(ibty1))
280+
} else {
281+
let bty = bx.cx().backend_type(layout);
282+
let ptr_bty = bx.cx().type_ptr_to(bty);
283+
OperandValue::Ref(bx.const_poison(ptr_bty), None, layout.align.abi)
284+
}
285+
}
286+
262287
pub fn store<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
263288
self,
264289
bx: &mut Bx,

compiler/rustc_codegen_ssa/src/mir/rvalue.rs

+71-36
Original file line numberDiff line numberDiff line change
@@ -158,17 +158,6 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
158158
debug_assert!(src.layout.is_sized());
159159
debug_assert!(dst.layout.is_sized());
160160

161-
if src.layout.size != dst.layout.size
162-
|| src.layout.abi.is_uninhabited()
163-
|| dst.layout.abi.is_uninhabited()
164-
{
165-
// In all of these cases it's UB to run this transmute, but that's
166-
// known statically so might as well trap for it, rather than just
167-
// making it unreachable.
168-
bx.abort();
169-
return;
170-
}
171-
172161
if let Some(val) = self.codegen_transmute_operand(bx, src, dst.layout) {
173162
val.store(bx, dst);
174163
return;
@@ -202,8 +191,21 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
202191
operand: OperandRef<'tcx, Bx::Value>,
203192
cast: TyAndLayout<'tcx>,
204193
) -> Option<OperandValue<Bx::Value>> {
205-
// Callers already checked that the layout sizes match
206-
debug_assert_eq!(operand.layout.size, cast.size);
194+
// Check for transmutes that are always UB.
195+
if operand.layout.size != cast.size
196+
|| operand.layout.abi.is_uninhabited()
197+
|| cast.abi.is_uninhabited()
198+
{
199+
if !operand.layout.abi.is_uninhabited() {
200+
// Since this is known statically and the input could have existed
201+
// without already having hit UB, might as well trap for it.
202+
bx.abort();
203+
}
204+
205+
// Because this transmute is UB, return something easy to generate,
206+
// since it's fine that later uses of the value are probably UB.
207+
return Some(OperandValue::poison(bx, cast));
208+
}
207209

208210
let operand_kind = self.value_kind(operand.layout);
209211
let cast_kind = self.value_kind(cast);
@@ -222,10 +224,20 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
222224
bug!("Found {operand_kind:?} for operand {operand:?}");
223225
};
224226
if let OperandValueKind::Immediate(out_scalar) = cast_kind {
225-
let cast_bty = bx.backend_type(cast);
226-
Some(OperandValue::Immediate(Self::transmute_immediate(
227-
bx, imm, in_scalar, out_scalar, cast_bty,
228-
)))
227+
match (in_scalar, out_scalar) {
228+
(ScalarOrZst::Zst, ScalarOrZst::Zst) => {
229+
Some(OperandRef::new_zst(bx, cast).val)
230+
}
231+
(ScalarOrZst::Scalar(in_scalar), ScalarOrZst::Scalar(out_scalar))
232+
if in_scalar.size(self.cx) == out_scalar.size(self.cx) =>
233+
{
234+
let cast_bty = bx.backend_type(cast);
235+
Some(OperandValue::Immediate(
236+
self.transmute_immediate(bx, imm, in_scalar, out_scalar, cast_bty),
237+
))
238+
}
239+
_ => None,
240+
}
229241
} else {
230242
None
231243
}
@@ -234,12 +246,15 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
234246
let OperandValueKind::Pair(in_a, in_b) = operand_kind else {
235247
bug!("Found {operand_kind:?} for operand {operand:?}");
236248
};
237-
if let OperandValueKind::Pair(out_a, out_b) = cast_kind {
249+
if let OperandValueKind::Pair(out_a, out_b) = cast_kind
250+
&& in_a.size(self.cx) == out_a.size(self.cx)
251+
&& in_b.size(self.cx) == out_b.size(self.cx)
252+
{
238253
let out_a_ibty = bx.scalar_pair_element_backend_type(cast, 0, false);
239254
let out_b_ibty = bx.scalar_pair_element_backend_type(cast, 1, false);
240255
Some(OperandValue::Pair(
241-
Self::transmute_immediate(bx, imm_a, in_a, out_a, out_a_ibty),
242-
Self::transmute_immediate(bx, imm_b, in_b, out_b, out_b_ibty),
256+
self.transmute_immediate(bx, imm_a, in_a, out_a, out_a_ibty),
257+
self.transmute_immediate(bx, imm_b, in_b, out_b, out_b_ibty),
243258
))
244259
} else {
245260
None
@@ -254,12 +269,15 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
254269
/// `to_backend_ty` must be the *non*-immediate backend type (so it will be
255270
/// `i8`, not `i1`, for `bool`-like types.)
256271
fn transmute_immediate(
272+
&self,
257273
bx: &mut Bx,
258274
mut imm: Bx::Value,
259275
from_scalar: abi::Scalar,
260276
to_scalar: abi::Scalar,
261277
to_backend_ty: Bx::Type,
262278
) -> Bx::Value {
279+
debug_assert_eq!(from_scalar.size(self.cx), to_scalar.size(self.cx));
280+
263281
use abi::Primitive::*;
264282
imm = bx.from_immediate(imm);
265283
imm = match (from_scalar.primitive(), to_scalar.primitive()) {
@@ -831,14 +849,6 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
831849
let operand_ty = operand.ty(self.mir, self.cx.tcx());
832850
let cast_layout = self.cx.layout_of(self.monomorphize(cast_ty));
833851
let operand_layout = self.cx.layout_of(self.monomorphize(operand_ty));
834-
if operand_layout.size != cast_layout.size
835-
|| operand_layout.abi.is_uninhabited()
836-
|| cast_layout.abi.is_uninhabited()
837-
{
838-
// Send UB cases to the full form so the operand version can
839-
// `bitcast` without worrying about malformed IR.
840-
return false;
841-
}
842852

843853
match (self.value_kind(operand_layout), self.value_kind(cast_layout)) {
844854
// Can always load from a pointer as needed
@@ -847,9 +857,12 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
847857
// Need to generate an `alloc` to get a pointer from an immediate
848858
(OperandValueKind::Immediate(..) | OperandValueKind::Pair(..), OperandValueKind::Ref) => false,
849859

850-
// When we have scalar immediates, we can convert them as needed
851-
(OperandValueKind::Immediate(..), OperandValueKind::Immediate(..)) |
852-
(OperandValueKind::Pair(..), OperandValueKind::Pair(..)) => true,
860+
// When we have scalar immediates, we can only convert things
861+
// where the sizes match, to avoid endianness questions.
862+
(OperandValueKind::Immediate(a), OperandValueKind::Immediate(b)) =>
863+
a.size(self.cx) == b.size(self.cx),
864+
(OperandValueKind::Pair(a0, a1), OperandValueKind::Pair(b0, b1)) =>
865+
a0.size(self.cx) == b0.size(self.cx) && a1.size(self.cx) == b1.size(self.cx),
853866

854867
// Send mixings between scalars and pairs through the memory route
855868
// FIXME: Maybe this could use insertvalue/extractvalue instead?
@@ -887,13 +900,18 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
887900
if self.cx.is_backend_immediate(layout) {
888901
debug_assert!(!self.cx.is_backend_scalar_pair(layout));
889902
OperandValueKind::Immediate(match layout.abi {
890-
abi::Abi::Scalar(s) => s,
891-
abi::Abi::Vector { element, .. } => element,
892-
x => bug!("Couldn't translate {x:?} as backend immediate"),
903+
abi::Abi::Scalar(s) => ScalarOrZst::Scalar(s),
904+
abi::Abi::Vector { element, .. } => ScalarOrZst::Scalar(element),
905+
_ if layout.is_zst() => ScalarOrZst::Zst,
906+
x => span_bug!(self.mir.span, "Couldn't translate {x:?} as backend immediate"),
893907
})
894908
} else if self.cx.is_backend_scalar_pair(layout) {
895909
let abi::Abi::ScalarPair(s1, s2) = layout.abi else {
896-
bug!("Couldn't translate {:?} as backend scalar pair", layout.abi)
910+
span_bug!(
911+
self.mir.span,
912+
"Couldn't translate {:?} as backend scalar pair",
913+
layout.abi,
914+
);
897915
};
898916
OperandValueKind::Pair(s1, s2)
899917
} else {
@@ -902,9 +920,26 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
902920
}
903921
}
904922

923+
/// The variants of this match [`OperandValue`], giving details about the
924+
/// backend values that will be held in that other type.
905925
#[derive(Debug, Copy, Clone)]
906926
enum OperandValueKind {
907927
Ref,
908-
Immediate(abi::Scalar),
928+
Immediate(ScalarOrZst),
909929
Pair(abi::Scalar, abi::Scalar),
910930
}
931+
932+
#[derive(Debug, Copy, Clone)]
933+
enum ScalarOrZst {
934+
Zst,
935+
Scalar(abi::Scalar),
936+
}
937+
938+
impl ScalarOrZst {
939+
pub fn size(self, cx: &impl abi::HasDataLayout) -> abi::Size {
940+
match self {
941+
ScalarOrZst::Zst => abi::Size::ZERO,
942+
ScalarOrZst::Scalar(s) => s.size(cx),
943+
}
944+
}
945+
}

compiler/rustc_data_structures/src/graph/implementation/mod.rs

+2-8
Original file line numberDiff line numberDiff line change
@@ -206,17 +206,11 @@ impl<N: Debug, E: Debug> Graph<N, E> {
206206
AdjacentEdges { graph: self, direction, next: first_edge }
207207
}
208208

209-
pub fn successor_nodes<'a>(
210-
&'a self,
211-
source: NodeIndex,
212-
) -> impl Iterator<Item = NodeIndex> + 'a {
209+
pub fn successor_nodes(&self, source: NodeIndex) -> impl Iterator<Item = NodeIndex> + '_ {
213210
self.outgoing_edges(source).targets()
214211
}
215212

216-
pub fn predecessor_nodes<'a>(
217-
&'a self,
218-
target: NodeIndex,
219-
) -> impl Iterator<Item = NodeIndex> + 'a {
213+
pub fn predecessor_nodes(&self, target: NodeIndex) -> impl Iterator<Item = NodeIndex> + '_ {
220214
self.incoming_edges(target).sources()
221215
}
222216

compiler/rustc_data_structures/src/memmap.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ impl Deref for Mmap {
4040

4141
impl AsRef<[u8]> for Mmap {
4242
fn as_ref(&self) -> &[u8] {
43-
&*self.0
43+
&self.0
4444
}
4545
}
4646

compiler/rustc_data_structures/src/profiling.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -778,7 +778,7 @@ pub fn print_time_passes_entry(
778778
"rss_start": start_rss,
779779
"rss_end": end_rss,
780780
});
781-
eprintln!("time: {}", json.to_string());
781+
eprintln!("time: {json}");
782782
return;
783783
}
784784
TimePassesFormat::Text => (),

compiler/rustc_data_structures/src/sharded.rs

+1
Original file line numberDiff line numberDiff line change
@@ -140,6 +140,7 @@ pub fn make_hash<K: Hash + ?Sized>(val: &K) -> u64 {
140140
/// `hash` can be computed with any hasher, so long as that hasher is used
141141
/// consistently for each `Sharded` instance.
142142
#[inline]
143+
#[allow(clippy::modulo_one)]
143144
pub fn get_shard_index_by_hash(hash: u64) -> usize {
144145
let hash_len = mem::size_of::<usize>();
145146
// Ignore the top 7 bits as hashbrown uses these and get the next SHARD_BITS highest bits.

compiler/rustc_data_structures/src/stable_hasher.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -312,14 +312,14 @@ impl<CTX> HashStable<CTX> for ::std::num::NonZeroUsize {
312312

313313
impl<CTX> HashStable<CTX> for f32 {
314314
fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) {
315-
let val: u32 = unsafe { ::std::mem::transmute(*self) };
315+
let val: u32 = self.to_bits();
316316
val.hash_stable(ctx, hasher);
317317
}
318318
}
319319

320320
impl<CTX> HashStable<CTX> for f64 {
321321
fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) {
322-
let val: u64 = unsafe { ::std::mem::transmute(*self) };
322+
let val: u64 = self.to_bits();
323323
val.hash_stable(ctx, hasher);
324324
}
325325
}

0 commit comments

Comments
 (0)