Skip to content

Commit 88d905c

Browse files
committed
Auto merge of #3210 - rust-lang:rustup-2023-12-05, r=RalfJung
Automatic Rustup
2 parents e27da14 + 0d5fdcc commit 88d905c

File tree

108 files changed

+2301
-769
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

108 files changed

+2301
-769
lines changed

compiler/rustc_arena/src/lib.rs

+25-16
Original file line numberDiff line numberDiff line change
@@ -197,23 +197,24 @@ impl<T> TypedArena<T> {
197197
start_ptr
198198
}
199199

200+
/// Allocates the elements of this iterator into a contiguous slice in the `TypedArena`.
201+
///
202+
/// Note: for reasons of reentrancy and panic safety we collect into a `SmallVec<[_; 8]>` before
203+
/// storing the elements in the arena.
200204
#[inline]
201205
pub fn alloc_from_iter<I: IntoIterator<Item = T>>(&self, iter: I) -> &mut [T] {
202-
// This implementation is entirely separate to
203-
// `DroplessIterator::alloc_from_iter`, even though conceptually they
204-
// are the same.
206+
// Despite the similarlty with `DroplessArena`, we cannot reuse their fast case. The reason
207+
// is subtle: these arenas are reentrant. In other words, `iter` may very well be holding a
208+
// reference to `self` and adding elements to the arena during iteration.
205209
//
206-
// `DroplessIterator` (in the fast case) writes elements from the
207-
// iterator one at a time into the allocated memory. That's easy
208-
// because the elements don't implement `Drop`. But for `TypedArena`
209-
// they do implement `Drop`, which means that if the iterator panics we
210-
// could end up with some allocated-but-uninitialized elements, which
211-
// will then cause UB in `TypedArena::drop`.
210+
// For this reason, if we pre-allocated any space for the elements of this iterator, we'd
211+
// have to track that some uninitialized elements are followed by some initialized elements,
212+
// else we might accidentally drop uninitialized memory if something panics or if the
213+
// iterator doesn't fill all the length we expected.
212214
//
213-
// Instead we use an approach where any iterator panic will occur
214-
// before the memory is allocated. This function is much less hot than
215-
// `DroplessArena::alloc_from_iter`, so it doesn't need to be
216-
// hyper-optimized.
215+
// So we collect all the elements beforehand, which takes care of reentrancy and panic
216+
// safety. This function is much less hot than `DroplessArena::alloc_from_iter`, so it
217+
// doesn't need to be hyper-optimized.
217218
assert!(mem::size_of::<T>() != 0);
218219

219220
let mut vec: SmallVec<[_; 8]> = iter.into_iter().collect();
@@ -485,8 +486,9 @@ impl DroplessArena {
485486

486487
/// # Safety
487488
///
488-
/// The caller must ensure that `mem` is valid for writes up to
489-
/// `size_of::<T>() * len`.
489+
/// The caller must ensure that `mem` is valid for writes up to `size_of::<T>() * len`, and that
490+
/// that memory stays allocated and not shared for the lifetime of `self`. This must hold even
491+
/// if `iter.next()` allocates onto `self`.
490492
#[inline]
491493
unsafe fn write_from_iter<T, I: Iterator<Item = T>>(
492494
&self,
@@ -516,6 +518,8 @@ impl DroplessArena {
516518

517519
#[inline]
518520
pub fn alloc_from_iter<T, I: IntoIterator<Item = T>>(&self, iter: I) -> &mut [T] {
521+
// Warning: this function is reentrant: `iter` could hold a reference to `&self` and
522+
// allocate additional elements while we're iterating.
519523
let iter = iter.into_iter();
520524
assert!(mem::size_of::<T>() != 0);
521525
assert!(!mem::needs_drop::<T>());
@@ -524,18 +528,23 @@ impl DroplessArena {
524528

525529
match size_hint {
526530
(min, Some(max)) if min == max => {
527-
// We know the exact number of elements the iterator will produce here
531+
// We know the exact number of elements the iterator expects to produce here.
528532
let len = min;
529533

530534
if len == 0 {
531535
return &mut [];
532536
}
533537

534538
let mem = self.alloc_raw(Layout::array::<T>(len).unwrap()) as *mut T;
539+
// SAFETY: `write_from_iter` doesn't touch `self`. It only touches the slice we just
540+
// reserved. If the iterator panics or doesn't output `len` elements, this will
541+
// leave some unallocated slots in the arena, which is fine because we do not call
542+
// `drop`.
535543
unsafe { self.write_from_iter(iter, len, mem) }
536544
}
537545
(_, _) => {
538546
outline(move || -> &mut [T] {
547+
// Takes care of reentrancy.
539548
let mut vec: SmallVec<[_; 8]> = iter.collect();
540549
if vec.is_empty() {
541550
return &mut [];

compiler/rustc_ast_lowering/src/asm.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -14,8 +14,8 @@ use rustc_ast::*;
1414
use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap};
1515
use rustc_hir as hir;
1616
use rustc_hir::def::{DefKind, Res};
17-
use rustc_hir::definitions::DefPathData;
1817
use rustc_session::parse::feature_err;
18+
use rustc_span::symbol::kw;
1919
use rustc_span::{sym, Span};
2020
use rustc_target::asm;
2121
use std::collections::hash_map::Entry;
@@ -227,7 +227,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
227227
self.create_def(
228228
parent_def_id.def_id,
229229
node_id,
230-
DefPathData::AnonConst,
230+
kw::Empty,
231231
DefKind::AnonConst,
232232
*op_sp,
233233
);

compiler/rustc_ast_lowering/src/expr.rs

+2-3
Original file line numberDiff line numberDiff line change
@@ -13,10 +13,9 @@ use rustc_ast::*;
1313
use rustc_data_structures::stack::ensure_sufficient_stack;
1414
use rustc_hir as hir;
1515
use rustc_hir::def::{DefKind, Res};
16-
use rustc_hir::definitions::DefPathData;
1716
use rustc_session::errors::report_lit_error;
1817
use rustc_span::source_map::{respan, Spanned};
19-
use rustc_span::symbol::{sym, Ident, Symbol};
18+
use rustc_span::symbol::{kw, sym, Ident, Symbol};
2019
use rustc_span::DUMMY_SP;
2120
use rustc_span::{DesugaringKind, Span};
2221
use thin_vec::{thin_vec, ThinVec};
@@ -376,7 +375,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
376375
self.create_def(
377376
parent_def_id.def_id,
378377
node_id,
379-
DefPathData::AnonConst,
378+
kw::Empty,
380379
DefKind::AnonConst,
381380
f.span,
382381
);

compiler/rustc_ast_lowering/src/item.rs

+3-9
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@ use super::ResolverAstLoweringExt;
33
use super::{AstOwner, ImplTraitContext, ImplTraitPosition};
44
use super::{FnDeclKind, LoweringContext, ParamMode};
55

6-
use hir::definitions::DefPathData;
76
use rustc_ast::ptr::P;
87
use rustc_ast::visit::AssocCtxt;
98
use rustc_ast::*;
@@ -1367,7 +1366,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
13671366
let def_id = self.create_def(
13681367
self.local_def_id(parent_node_id),
13691368
param_node_id,
1370-
DefPathData::TypeNs(sym::host),
1369+
sym::host,
13711370
DefKind::ConstParam,
13721371
span,
13731372
);
@@ -1427,13 +1426,8 @@ impl<'hir> LoweringContext<'_, 'hir> {
14271426

14281427
if let Some((span, hir_id, def_id)) = host_param_parts {
14291428
let const_node_id = self.next_node_id();
1430-
let anon_const = self.create_def(
1431-
def_id,
1432-
const_node_id,
1433-
DefPathData::AnonConst,
1434-
DefKind::AnonConst,
1435-
span,
1436-
);
1429+
let anon_const =
1430+
self.create_def(def_id, const_node_id, kw::Empty, DefKind::AnonConst, span);
14371431

14381432
let const_id = self.next_id();
14391433
let const_expr_id = self.next_id();

compiler/rustc_ast_lowering/src/lib.rs

+10-11
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,6 @@ use rustc_errors::{DiagnosticArgFromDisplay, StashKey};
5858
use rustc_hir as hir;
5959
use rustc_hir::def::{DefKind, LifetimeRes, Namespace, PartialRes, PerNS, Res};
6060
use rustc_hir::def_id::{LocalDefId, CRATE_DEF_ID, LOCAL_CRATE};
61-
use rustc_hir::definitions::DefPathData;
6261
use rustc_hir::{ConstArg, GenericArg, ItemLocalId, ParamName, TraitCandidate};
6362
use rustc_index::{Idx, IndexSlice, IndexVec};
6463
use rustc_middle::{
@@ -499,20 +498,20 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
499498
&mut self,
500499
parent: LocalDefId,
501500
node_id: ast::NodeId,
502-
data: DefPathData,
501+
name: Symbol,
503502
def_kind: DefKind,
504503
span: Span,
505504
) -> LocalDefId {
506505
debug_assert_ne!(node_id, ast::DUMMY_NODE_ID);
507506
assert!(
508507
self.opt_local_def_id(node_id).is_none(),
509-
"adding a def'n for node-id {:?} and data {:?} but a previous def'n exists: {:?}",
508+
"adding a def'n for node-id {:?} and def kind {:?} but a previous def'n exists: {:?}",
510509
node_id,
511-
data,
510+
def_kind,
512511
self.tcx.hir().def_key(self.local_def_id(node_id)),
513512
);
514513

515-
let def_id = self.tcx.at(span).create_def(parent, data, def_kind).def_id();
514+
let def_id = self.tcx.at(span).create_def(parent, name, def_kind).def_id();
516515

517516
debug!("create_def: def_id_to_node_id[{:?}] <-> {:?}", def_id, node_id);
518517
self.resolver.node_id_to_def_id.insert(node_id, def_id);
@@ -809,7 +808,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
809808
let _def_id = self.create_def(
810809
self.current_hir_id_owner.def_id,
811810
param,
812-
DefPathData::LifetimeNs(kw::UnderscoreLifetime),
811+
kw::UnderscoreLifetime,
813812
DefKind::LifetimeParam,
814813
ident.span,
815814
);
@@ -1227,7 +1226,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
12271226
let def_id = self.create_def(
12281227
parent_def_id.def_id,
12291228
node_id,
1230-
DefPathData::AnonConst,
1229+
kw::Empty,
12311230
DefKind::AnonConst,
12321231
span,
12331232
);
@@ -1465,7 +1464,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
14651464
self.create_def(
14661465
self.current_hir_id_owner.def_id,
14671466
*def_node_id,
1468-
DefPathData::TypeNs(ident.name),
1467+
ident.name,
14691468
DefKind::TyParam,
14701469
span,
14711470
);
@@ -1619,7 +1618,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
16191618
let opaque_ty_def_id = self.create_def(
16201619
self.current_hir_id_owner.def_id,
16211620
opaque_ty_node_id,
1622-
DefPathData::ImplTrait,
1621+
kw::Empty,
16231622
DefKind::OpaqueTy,
16241623
opaque_ty_span,
16251624
);
@@ -1674,7 +1673,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
16741673
let duplicated_lifetime_def_id = self.create_def(
16751674
opaque_ty_def_id,
16761675
duplicated_lifetime_node_id,
1677-
DefPathData::LifetimeNs(lifetime.ident.name),
1676+
lifetime.ident.name,
16781677
DefKind::LifetimeParam,
16791678
lifetime.ident.span,
16801679
);
@@ -2549,7 +2548,7 @@ impl<'hir> GenericArgsCtor<'hir> {
25492548
let def_id = lcx.create_def(
25502549
lcx.current_hir_id_owner.def_id,
25512550
id,
2552-
DefPathData::AnonConst,
2551+
kw::Empty,
25532552
DefKind::AnonConst,
25542553
span,
25552554
);

compiler/rustc_codegen_llvm/src/builder.rs

+9
Original file line numberDiff line numberDiff line change
@@ -489,6 +489,15 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
489489

490490
#[instrument(level = "trace", skip(self))]
491491
fn load_operand(&mut self, place: PlaceRef<'tcx, &'ll Value>) -> OperandRef<'tcx, &'ll Value> {
492+
if place.layout.is_unsized() {
493+
let tail = self.tcx.struct_tail_with_normalize(place.layout.ty, |ty| ty, || {});
494+
if matches!(tail.kind(), ty::Foreign(..)) {
495+
// Unsized locals and, at least conceptually, even unsized arguments must be copied
496+
// around, which requires dynamically determining their size. Therefore, we cannot
497+
// allow `extern` types here. Consult t-opsem before removing this check.
498+
panic!("unsized locals must not be `extern` types");
499+
}
500+
}
492501
assert_eq!(place.llextra.is_some(), place.layout.is_unsized());
493502

494503
if place.layout.is_zst() {

compiler/rustc_codegen_ssa/src/debuginfo/type_names.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -594,7 +594,7 @@ fn push_unqualified_item_name(
594594
DefPathData::CrateRoot => {
595595
output.push_str(tcx.crate_name(def_id.krate).as_str());
596596
}
597-
DefPathData::ClosureExpr => {
597+
DefPathData::Closure => {
598598
let label = coroutine_kind_label(tcx.coroutine_kind(def_id));
599599

600600
push_disambiguated_special_name(

compiler/rustc_codegen_ssa/src/mir/operand.rs

+1
Original file line numberDiff line numberDiff line change
@@ -414,6 +414,7 @@ impl<'a, 'tcx, V: CodegenObject> OperandValue<V> {
414414
// value is through `undef`/`poison`, and the store itself is useless.
415415
}
416416
OperandValue::Ref(r, None, source_align) => {
417+
assert!(dest.layout.is_sized(), "cannot directly store unsized values");
417418
if flags.contains(MemFlags::NONTEMPORAL) {
418419
// HACK(nox): This is inefficient but there is no nontemporal memcpy.
419420
let ty = bx.backend_type(dest.layout);

compiler/rustc_codegen_ssa/src/mir/place.rs

+12-10
Original file line numberDiff line numberDiff line change
@@ -143,7 +143,8 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
143143
// Simple cases, which don't need DST adjustment:
144144
// * no metadata available - just log the case
145145
// * known alignment - sized types, `[T]`, `str` or a foreign type
146-
// * packed struct - there is no alignment padding
146+
// Note that looking at `field.align` is incorrect since that is not necessarily equal
147+
// to the dynamic alignment of the type.
147148
match field.ty.kind() {
148149
_ if self.llextra.is_none() => {
149150
debug!(
@@ -154,14 +155,6 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
154155
}
155156
_ if field.is_sized() => return simple(),
156157
ty::Slice(..) | ty::Str | ty::Foreign(..) => return simple(),
157-
ty::Adt(def, _) => {
158-
if def.repr().packed() {
159-
// FIXME(eddyb) generalize the adjustment when we
160-
// start supporting packing to larger alignments.
161-
assert_eq!(self.layout.align.abi.bytes(), 1);
162-
return simple();
163-
}
164-
}
165158
_ => {}
166159
}
167160

@@ -186,7 +179,16 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
186179
let unaligned_offset = bx.cx().const_usize(offset.bytes());
187180

188181
// Get the alignment of the field
189-
let (_, unsized_align) = glue::size_and_align_of_dst(bx, field.ty, meta);
182+
let (_, mut unsized_align) = glue::size_and_align_of_dst(bx, field.ty, meta);
183+
184+
// For packed types, we need to cap alignment.
185+
if let ty::Adt(def, _) = self.layout.ty.kind()
186+
&& let Some(packed) = def.repr().pack
187+
{
188+
let packed = bx.const_usize(packed.bytes());
189+
let cmp = bx.icmp(IntPredicate::IntULT, unsized_align, packed);
190+
unsized_align = bx.select(cmp, unsized_align, packed)
191+
}
190192

191193
// Bump the unaligned offset up to the appropriate alignment
192194
let offset = round_up_const_value_to_alignment(bx, unaligned_offset, unsized_align);

compiler/rustc_const_eval/src/interpret/eval_context.rs

+2-4
Original file line numberDiff line numberDiff line change
@@ -282,9 +282,7 @@ impl<'mir, 'tcx, Prov: Provenance, Extra> Frame<'mir, 'tcx, Prov, Extra> {
282282
impl<'tcx> fmt::Display for FrameInfo<'tcx> {
283283
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
284284
ty::tls::with(|tcx| {
285-
if tcx.def_key(self.instance.def_id()).disambiguated_data.data
286-
== DefPathData::ClosureExpr
287-
{
285+
if tcx.def_key(self.instance.def_id()).disambiguated_data.data == DefPathData::Closure {
288286
write!(f, "inside closure")
289287
} else {
290288
// Note: this triggers a `good_path_delayed_bug` state, which means that if we ever
@@ -299,7 +297,7 @@ impl<'tcx> fmt::Display for FrameInfo<'tcx> {
299297
impl<'tcx> FrameInfo<'tcx> {
300298
pub fn as_note(&self, tcx: TyCtxt<'tcx>) -> errors::FrameNote {
301299
let span = self.span;
302-
if tcx.def_key(self.instance.def_id()).disambiguated_data.data == DefPathData::ClosureExpr {
300+
if tcx.def_key(self.instance.def_id()).disambiguated_data.data == DefPathData::Closure {
303301
errors::FrameNote { where_: "closure", span, instance: String::new(), times: 0 }
304302
} else {
305303
let instance = format!("{}", self.instance);

0 commit comments

Comments
 (0)