Skip to content

Commit 97f6e6e

Browse files
authored
Rollup merge of #141627 - nnethercote:drop-cleanups, r=matthewjasper
Drop-build cleanups Some cleanups I made while trying to speed up the program in #134404. r? ```@matthewjasper```
2 parents ae95246 + ec8baa5 commit 97f6e6e

File tree

1 file changed

+59
-53
lines changed
  • compiler/rustc_mir_build/src/builder

1 file changed

+59
-53
lines changed

compiler/rustc_mir_build/src/builder/scope.rs

Lines changed: 59 additions & 53 deletions
Original file line numberDiff line numberDiff line change
@@ -209,7 +209,7 @@ const ROOT_NODE: DropIdx = DropIdx::ZERO;
209209
#[derive(Debug)]
210210
struct DropTree {
211211
/// Nodes in the drop tree, containing drop data and a link to the next node.
212-
drops: IndexVec<DropIdx, DropNode>,
212+
drop_nodes: IndexVec<DropIdx, DropNode>,
213213
/// Map for finding the index of an existing node, given its contents.
214214
existing_drops_map: FxHashMap<DropNodeKey, DropIdx>,
215215
/// Edges into the `DropTree` that need to be added once it's lowered.
@@ -230,7 +230,6 @@ struct DropNode {
230230
struct DropNodeKey {
231231
next: DropIdx,
232232
local: Local,
233-
kind: DropKind,
234233
}
235234

236235
impl Scope {
@@ -278,8 +277,8 @@ impl DropTree {
278277
let fake_source_info = SourceInfo::outermost(DUMMY_SP);
279278
let fake_data =
280279
DropData { source_info: fake_source_info, local: Local::MAX, kind: DropKind::Storage };
281-
let drops = IndexVec::from_raw(vec![DropNode { data: fake_data, next: DropIdx::MAX }]);
282-
Self { drops, entry_points: Vec::new(), existing_drops_map: FxHashMap::default() }
280+
let drop_nodes = IndexVec::from_raw(vec![DropNode { data: fake_data, next: DropIdx::MAX }]);
281+
Self { drop_nodes, entry_points: Vec::new(), existing_drops_map: FxHashMap::default() }
283282
}
284283

285284
/// Adds a node to the drop tree, consisting of drop data and the index of
@@ -288,20 +287,20 @@ impl DropTree {
288287
/// If there is already an equivalent node in the tree, nothing is added, and
289288
/// that node's index is returned. Otherwise, the new node's index is returned.
290289
fn add_drop(&mut self, data: DropData, next: DropIdx) -> DropIdx {
291-
let drops = &mut self.drops;
290+
let drop_nodes = &mut self.drop_nodes;
292291
*self
293292
.existing_drops_map
294-
.entry(DropNodeKey { next, local: data.local, kind: data.kind })
293+
.entry(DropNodeKey { next, local: data.local })
295294
// Create a new node, and also add its index to the map.
296-
.or_insert_with(|| drops.push(DropNode { data, next }))
295+
.or_insert_with(|| drop_nodes.push(DropNode { data, next }))
297296
}
298297

299298
/// Registers `from` as an entry point to this drop tree, at `to`.
300299
///
301300
/// During [`Self::build_mir`], `from` will be linked to the corresponding
302301
/// block within the drop tree.
303302
fn add_entry_point(&mut self, from: BasicBlock, to: DropIdx) {
304-
debug_assert!(to < self.drops.next_index());
303+
debug_assert!(to < self.drop_nodes.next_index());
305304
self.entry_points.push((to, from));
306305
}
307306

@@ -341,10 +340,10 @@ impl DropTree {
341340
Own,
342341
}
343342

344-
let mut blocks = IndexVec::from_elem(None, &self.drops);
343+
let mut blocks = IndexVec::from_elem(None, &self.drop_nodes);
345344
blocks[ROOT_NODE] = root_node;
346345

347-
let mut needs_block = IndexVec::from_elem(Block::None, &self.drops);
346+
let mut needs_block = IndexVec::from_elem(Block::None, &self.drop_nodes);
348347
if root_node.is_some() {
349348
// In some cases (such as drops for `continue`) the root node
350349
// already has a block. In this case, make sure that we don't
@@ -356,7 +355,7 @@ impl DropTree {
356355
let entry_points = &mut self.entry_points;
357356
entry_points.sort();
358357

359-
for (drop_idx, drop_node) in self.drops.iter_enumerated().rev() {
358+
for (drop_idx, drop_node) in self.drop_nodes.iter_enumerated().rev() {
360359
if entry_points.last().is_some_and(|entry_point| entry_point.0 == drop_idx) {
361360
let block = *blocks[drop_idx].get_or_insert_with(|| T::make_block(cfg));
362361
needs_block[drop_idx] = Block::Own;
@@ -396,7 +395,7 @@ impl DropTree {
396395
cfg: &mut CFG<'tcx>,
397396
blocks: &IndexSlice<DropIdx, Option<BasicBlock>>,
398397
) {
399-
for (drop_idx, drop_node) in self.drops.iter_enumerated().rev() {
398+
for (drop_idx, drop_node) in self.drop_nodes.iter_enumerated().rev() {
400399
let Some(block) = blocks[drop_idx] else { continue };
401400
match drop_node.data.kind {
402401
DropKind::Value => {
@@ -726,11 +725,12 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
726725
drops
727726
};
728727

729-
let drop_idx = self.scopes.scopes[scope_index + 1..]
730-
.iter()
731-
.flat_map(|scope| &scope.drops)
732-
.fold(ROOT_NODE, |drop_idx, &drop| drops.add_drop(drop, drop_idx));
733-
728+
let mut drop_idx = ROOT_NODE;
729+
for scope in &self.scopes.scopes[scope_index + 1..] {
730+
for drop in &scope.drops {
731+
drop_idx = drops.add_drop(*drop, drop_idx);
732+
}
733+
}
734734
drops.add_entry_point(block, drop_idx);
735735

736736
// `build_drop_trees` doesn't have access to our source_info, so we
@@ -829,9 +829,15 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
829829
// `unwind_to` should drop the value that we're about to
830830
// schedule. If dropping this value panics, then we continue
831831
// with the *next* value on the unwind path.
832-
debug_assert_eq!(unwind_drops.drops[unwind_to].data.local, drop_data.local);
833-
debug_assert_eq!(unwind_drops.drops[unwind_to].data.kind, drop_data.kind);
834-
unwind_to = unwind_drops.drops[unwind_to].next;
832+
debug_assert_eq!(
833+
unwind_drops.drop_nodes[unwind_to].data.local,
834+
drop_data.local
835+
);
836+
debug_assert_eq!(
837+
unwind_drops.drop_nodes[unwind_to].data.kind,
838+
drop_data.kind
839+
);
840+
unwind_to = unwind_drops.drop_nodes[unwind_to].next;
835841

836842
let mut unwind_entry_point = unwind_to;
837843

@@ -1551,14 +1557,14 @@ where
15511557
//
15521558
// We adjust this BEFORE we create the drop (e.g., `drops[n]`)
15531559
// because `drops[n]` should unwind to `drops[n-1]`.
1554-
debug_assert_eq!(unwind_drops.drops[unwind_to].data.local, drop_data.local);
1555-
debug_assert_eq!(unwind_drops.drops[unwind_to].data.kind, drop_data.kind);
1556-
unwind_to = unwind_drops.drops[unwind_to].next;
1560+
debug_assert_eq!(unwind_drops.drop_nodes[unwind_to].data.local, drop_data.local);
1561+
debug_assert_eq!(unwind_drops.drop_nodes[unwind_to].data.kind, drop_data.kind);
1562+
unwind_to = unwind_drops.drop_nodes[unwind_to].next;
15571563

15581564
if let Some(idx) = dropline_to {
1559-
debug_assert_eq!(coroutine_drops.drops[idx].data.local, drop_data.local);
1560-
debug_assert_eq!(coroutine_drops.drops[idx].data.kind, drop_data.kind);
1561-
dropline_to = Some(coroutine_drops.drops[idx].next);
1565+
debug_assert_eq!(coroutine_drops.drop_nodes[idx].data.local, drop_data.local);
1566+
debug_assert_eq!(coroutine_drops.drop_nodes[idx].data.kind, drop_data.kind);
1567+
dropline_to = Some(coroutine_drops.drop_nodes[idx].next);
15621568
}
15631569

15641570
// If the operand has been moved, and we are not on an unwind
@@ -1598,9 +1604,12 @@ where
15981604
// cases we emit things ALSO on the unwind path, so we need to adjust
15991605
// `unwind_to` in that case.
16001606
if storage_dead_on_unwind {
1601-
debug_assert_eq!(unwind_drops.drops[unwind_to].data.local, drop_data.local);
1602-
debug_assert_eq!(unwind_drops.drops[unwind_to].data.kind, drop_data.kind);
1603-
unwind_to = unwind_drops.drops[unwind_to].next;
1607+
debug_assert_eq!(
1608+
unwind_drops.drop_nodes[unwind_to].data.local,
1609+
drop_data.local
1610+
);
1611+
debug_assert_eq!(unwind_drops.drop_nodes[unwind_to].data.kind, drop_data.kind);
1612+
unwind_to = unwind_drops.drop_nodes[unwind_to].next;
16041613
}
16051614

16061615
// If the operand has been moved, and we are not on an unwind
@@ -1629,14 +1638,17 @@ where
16291638
// the storage-dead has completed, we need to adjust the `unwind_to` pointer
16301639
// so that any future drops we emit will not register storage-dead.
16311640
if storage_dead_on_unwind {
1632-
debug_assert_eq!(unwind_drops.drops[unwind_to].data.local, drop_data.local);
1633-
debug_assert_eq!(unwind_drops.drops[unwind_to].data.kind, drop_data.kind);
1634-
unwind_to = unwind_drops.drops[unwind_to].next;
1641+
debug_assert_eq!(
1642+
unwind_drops.drop_nodes[unwind_to].data.local,
1643+
drop_data.local
1644+
);
1645+
debug_assert_eq!(unwind_drops.drop_nodes[unwind_to].data.kind, drop_data.kind);
1646+
unwind_to = unwind_drops.drop_nodes[unwind_to].next;
16351647
}
16361648
if let Some(idx) = dropline_to {
1637-
debug_assert_eq!(coroutine_drops.drops[idx].data.local, drop_data.local);
1638-
debug_assert_eq!(coroutine_drops.drops[idx].data.kind, drop_data.kind);
1639-
dropline_to = Some(coroutine_drops.drops[idx].next);
1649+
debug_assert_eq!(coroutine_drops.drop_nodes[idx].data.local, drop_data.local);
1650+
debug_assert_eq!(coroutine_drops.drop_nodes[idx].data.kind, drop_data.kind);
1651+
dropline_to = Some(coroutine_drops.drop_nodes[idx].next);
16401652
}
16411653
// Only temps and vars need their storage dead.
16421654
assert!(local.index() > arg_count);
@@ -1663,10 +1675,10 @@ impl<'a, 'tcx: 'a> Builder<'a, 'tcx> {
16631675
let is_coroutine = self.coroutine.is_some();
16641676

16651677
// Link the exit drop tree to unwind drop tree.
1666-
if drops.drops.iter().any(|drop_node| drop_node.data.kind == DropKind::Value) {
1678+
if drops.drop_nodes.iter().any(|drop_node| drop_node.data.kind == DropKind::Value) {
16671679
let unwind_target = self.diverge_cleanup_target(else_scope, span);
16681680
let mut unwind_indices = IndexVec::from_elem_n(unwind_target, 1);
1669-
for (drop_idx, drop_node) in drops.drops.iter_enumerated().skip(1) {
1681+
for (drop_idx, drop_node) in drops.drop_nodes.iter_enumerated().skip(1) {
16701682
match drop_node.data.kind {
16711683
DropKind::Storage | DropKind::ForLint => {
16721684
if is_coroutine {
@@ -1695,35 +1707,29 @@ impl<'a, 'tcx: 'a> Builder<'a, 'tcx> {
16951707
}
16961708
// Link the exit drop tree to dropline drop tree (coroutine drop path) for async drops
16971709
if is_coroutine
1698-
&& drops.drops.iter().any(|DropNode { data, next: _ }| {
1710+
&& drops.drop_nodes.iter().any(|DropNode { data, next: _ }| {
16991711
data.kind == DropKind::Value && self.is_async_drop(data.local)
17001712
})
17011713
{
17021714
let dropline_target = self.diverge_dropline_target(else_scope, span);
17031715
let mut dropline_indices = IndexVec::from_elem_n(dropline_target, 1);
1704-
for (drop_idx, drop_data) in drops.drops.iter_enumerated().skip(1) {
1716+
for (drop_idx, drop_data) in drops.drop_nodes.iter_enumerated().skip(1) {
1717+
let coroutine_drop = self
1718+
.scopes
1719+
.coroutine_drops
1720+
.add_drop(drop_data.data, dropline_indices[drop_data.next]);
17051721
match drop_data.data.kind {
1706-
DropKind::Storage | DropKind::ForLint => {
1707-
let coroutine_drop = self
1708-
.scopes
1709-
.coroutine_drops
1710-
.add_drop(drop_data.data, dropline_indices[drop_data.next]);
1711-
dropline_indices.push(coroutine_drop);
1712-
}
1722+
DropKind::Storage | DropKind::ForLint => {}
17131723
DropKind::Value => {
1714-
let coroutine_drop = self
1715-
.scopes
1716-
.coroutine_drops
1717-
.add_drop(drop_data.data, dropline_indices[drop_data.next]);
17181724
if self.is_async_drop(drop_data.data.local) {
17191725
self.scopes.coroutine_drops.add_entry_point(
17201726
blocks[drop_idx].unwrap(),
17211727
dropline_indices[drop_data.next],
17221728
);
17231729
}
1724-
dropline_indices.push(coroutine_drop);
17251730
}
17261731
}
1732+
dropline_indices.push(coroutine_drop);
17271733
}
17281734
}
17291735
blocks[ROOT_NODE].map(BasicBlock::unit)
@@ -1769,11 +1775,11 @@ impl<'a, 'tcx: 'a> Builder<'a, 'tcx> {
17691775
// prevent drop elaboration from creating drop flags that would have
17701776
// to be captured by the coroutine. I'm not sure how important this
17711777
// optimization is, but it is here.
1772-
for (drop_idx, drop_node) in drops.drops.iter_enumerated() {
1778+
for (drop_idx, drop_node) in drops.drop_nodes.iter_enumerated() {
17731779
if let DropKind::Value = drop_node.data.kind
17741780
&& let Some(bb) = blocks[drop_idx]
17751781
{
1776-
debug_assert!(drop_node.next < drops.drops.next_index());
1782+
debug_assert!(drop_node.next < drops.drop_nodes.next_index());
17771783
drops.entry_points.push((drop_node.next, bb));
17781784
}
17791785
}

0 commit comments

Comments
 (0)