Skip to content

Commit b2501ef

Browse files
authored
Merge pull request #19252 from flodiebold/fix-fixup-delimiters
Fix syntax fixup producing invalid punctuation
2 parents f36e2ea + fee83ba commit b2501ef

File tree

5 files changed

+105
-88
lines changed

5 files changed

+105
-88
lines changed

Diff for: src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/tt_conversion.rs

+4-4
Original file line numberDiff line numberDiff line change
@@ -93,15 +93,15 @@ fn broken_parenthesis_sequence() {
9393
macro_rules! m1 { ($x:ident) => { ($x } }
9494
macro_rules! m2 { ($x:ident) => {} }
9595
96-
m1!();
97-
m2!(x
96+
fn f1() { m1!(x); }
97+
fn f2() { m2!(x }
9898
"#,
9999
expect![[r#"
100100
macro_rules! m1 { ($x:ident) => { ($x } }
101101
macro_rules! m2 { ($x:ident) => {} }
102102
103-
/* error: macro definition has parse errors */
104-
/* error: expected ident */
103+
fn f1() { (x); }
104+
fn f2() { /* error: expected ident */ }
105105
"#]],
106106
)
107107
}

Diff for: src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs

+26-42
Original file line numberDiff line numberDiff line change
@@ -148,7 +148,6 @@ pub(crate) fn fixup_syntax(
148148
}
149149
if it.then_branch().is_none() {
150150
append.insert(node.clone().into(), vec![
151-
// FIXME: THis should be a subtree no?
152151
Leaf::Punct(Punct {
153152
char: '{',
154153
spacing: Spacing::Alone,
@@ -179,7 +178,6 @@ pub(crate) fn fixup_syntax(
179178
}
180179
if it.loop_body().is_none() {
181180
append.insert(node.clone().into(), vec![
182-
// FIXME: THis should be a subtree no?
183181
Leaf::Punct(Punct {
184182
char: '{',
185183
spacing: Spacing::Alone,
@@ -196,7 +194,6 @@ pub(crate) fn fixup_syntax(
196194
ast::LoopExpr(it) => {
197195
if it.loop_body().is_none() {
198196
append.insert(node.clone().into(), vec![
199-
// FIXME: THis should be a subtree no?
200197
Leaf::Punct(Punct {
201198
char: '{',
202199
spacing: Spacing::Alone,
@@ -228,7 +225,6 @@ pub(crate) fn fixup_syntax(
228225
if it.match_arm_list().is_none() {
229226
// No match arms
230227
append.insert(node.clone().into(), vec![
231-
// FIXME: THis should be a subtree no?
232228
Leaf::Punct(Punct {
233229
char: '{',
234230
spacing: Spacing::Alone,
@@ -269,7 +265,6 @@ pub(crate) fn fixup_syntax(
269265

270266
if it.loop_body().is_none() {
271267
append.insert(node.clone().into(), vec![
272-
// FIXME: THis should be a subtree no?
273268
Leaf::Punct(Punct {
274269
char: '{',
275270
spacing: Spacing::Alone,
@@ -309,28 +304,6 @@ pub(crate) fn fixup_syntax(
309304
}
310305
}
311306
},
312-
ast::ArgList(it) => {
313-
if it.r_paren_token().is_none() {
314-
append.insert(node.into(), vec![
315-
Leaf::Punct(Punct {
316-
span: fake_span(node_range),
317-
char: ')',
318-
spacing: Spacing::Alone
319-
})
320-
]);
321-
}
322-
},
323-
ast::ArgList(it) => {
324-
if it.r_paren_token().is_none() {
325-
append.insert(node.into(), vec![
326-
Leaf::Punct(Punct {
327-
span: fake_span(node_range),
328-
char: ')',
329-
spacing: Spacing::Alone
330-
})
331-
]);
332-
}
333-
},
334307
ast::ClosureExpr(it) => {
335308
if it.body().is_none() {
336309
append.insert(node.into(), vec![
@@ -476,12 +449,12 @@ fn reverse_fixups_(tt: &mut TopSubtree, undo_info: &[TopSubtree]) {
476449
}
477450
}
478451
tt::TokenTree::Subtree(tt) => {
452+
// fixup should only create matching delimiters, but proc macros
453+
// could just copy the span to one of the delimiters. We don't want
454+
// to leak the dummy ID, so we remove both.
479455
if tt.delimiter.close.anchor.ast_id == FIXUP_DUMMY_AST_ID
480456
|| tt.delimiter.open.anchor.ast_id == FIXUP_DUMMY_AST_ID
481457
{
482-
// Even though fixup never creates subtrees with fixup spans, the old proc-macro server
483-
// might copy them if the proc-macro asks for it, so we need to filter those out
484-
// here as well.
485458
return TransformTtAction::remove();
486459
}
487460
TransformTtAction::Keep
@@ -571,6 +544,17 @@ mod tests {
571544
parse.syntax_node()
572545
);
573546

547+
// the fixed-up tree should not contain braces as punct
548+
// FIXME: should probably instead check that it's a valid punctuation character
549+
for x in tt.token_trees().flat_tokens() {
550+
match x {
551+
::tt::TokenTree::Leaf(::tt::Leaf::Punct(punct)) => {
552+
assert!(!matches!(punct.char, '{' | '}' | '(' | ')' | '[' | ']'))
553+
}
554+
_ => (),
555+
}
556+
}
557+
574558
reverse_fixups(&mut tt, &fixups.undo_info);
575559

576560
// the fixed-up + reversed version should be equivalent to the original input
@@ -596,7 +580,7 @@ fn foo() {
596580
}
597581
"#,
598582
expect![[r#"
599-
fn foo () {for _ in __ra_fixup { }}
583+
fn foo () {for _ in __ra_fixup {}}
600584
"#]],
601585
)
602586
}
@@ -624,7 +608,7 @@ fn foo() {
624608
}
625609
"#,
626610
expect![[r#"
627-
fn foo () {for bar in qux { }}
611+
fn foo () {for bar in qux {}}
628612
"#]],
629613
)
630614
}
@@ -655,7 +639,7 @@ fn foo() {
655639
}
656640
"#,
657641
expect![[r#"
658-
fn foo () {match __ra_fixup { }}
642+
fn foo () {match __ra_fixup {}}
659643
"#]],
660644
)
661645
}
@@ -687,7 +671,7 @@ fn foo() {
687671
}
688672
"#,
689673
expect![[r#"
690-
fn foo () {match __ra_fixup { }}
674+
fn foo () {match __ra_fixup {}}
691675
"#]],
692676
)
693677
}
@@ -802,7 +786,7 @@ fn foo() {
802786
}
803787
"#,
804788
expect![[r#"
805-
fn foo () {if a { }}
789+
fn foo () {if a {}}
806790
"#]],
807791
)
808792
}
@@ -816,7 +800,7 @@ fn foo() {
816800
}
817801
"#,
818802
expect![[r#"
819-
fn foo () {if __ra_fixup { }}
803+
fn foo () {if __ra_fixup {}}
820804
"#]],
821805
)
822806
}
@@ -830,7 +814,7 @@ fn foo() {
830814
}
831815
"#,
832816
expect![[r#"
833-
fn foo () {if __ra_fixup {} { }}
817+
fn foo () {if __ra_fixup {} {}}
834818
"#]],
835819
)
836820
}
@@ -844,7 +828,7 @@ fn foo() {
844828
}
845829
"#,
846830
expect![[r#"
847-
fn foo () {while __ra_fixup { }}
831+
fn foo () {while __ra_fixup {}}
848832
"#]],
849833
)
850834
}
@@ -858,7 +842,7 @@ fn foo() {
858842
}
859843
"#,
860844
expect![[r#"
861-
fn foo () {while foo { }}
845+
fn foo () {while foo {}}
862846
"#]],
863847
)
864848
}
@@ -885,7 +869,7 @@ fn foo() {
885869
}
886870
"#,
887871
expect![[r#"
888-
fn foo () {loop { }}
872+
fn foo () {loop {}}
889873
"#]],
890874
)
891875
}
@@ -941,7 +925,7 @@ fn foo() {
941925
}
942926
"#,
943927
expect![[r#"
944-
fn foo () { foo ( a ) }
928+
fn foo () {foo (a)}
945929
"#]],
946930
);
947931
check(
@@ -951,7 +935,7 @@ fn foo() {
951935
}
952936
"#,
953937
expect![[r#"
954-
fn foo () { bar . foo ( a ) }
938+
fn foo () {bar . foo (a)}
955939
"#]],
956940
);
957941
}

Diff for: src/tools/rust-analyzer/crates/mbe/src/tests.rs

+17
Original file line numberDiff line numberDiff line change
@@ -99,6 +99,23 @@ fn check(
9999
);
100100
}
101101

102+
#[test]
103+
fn unbalanced_brace() {
104+
check(
105+
Edition::CURRENT,
106+
Edition::CURRENT,
107+
r#"
108+
() => { { }
109+
"#,
110+
r#""#,
111+
expect![[r#"
112+
113+
114+
115+
{}"#]],
116+
);
117+
}
118+
102119
#[test]
103120
fn token_mapping_smoke_test() {
104121
check(

Diff for: src/tools/rust-analyzer/crates/syntax-bridge/src/lib.rs

+54-16
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ use syntax::{
1212
SyntaxKind::{self, *},
1313
SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextSize, WalkEvent, T,
1414
};
15-
use tt::{buffer::Cursor, token_to_literal};
15+
use tt::{buffer::Cursor, token_to_literal, Punct};
1616

1717
pub mod prettify_macro_expansion;
1818
mod to_parser_input;
@@ -217,8 +217,39 @@ where
217217
tt::TopSubtreeBuilder::new(tt::Delimiter::invisible_spanned(conv.call_site()));
218218

219219
while let Some((token, abs_range)) = conv.bump() {
220-
let delimiter = builder.expected_delimiter().map(|it| it.kind);
221220
let tt = match token.as_leaf() {
221+
// These delimiters are not actually valid punctuation, but we produce them in syntax fixup.
222+
// So we need to handle them specially here.
223+
Some(&tt::Leaf::Punct(Punct {
224+
char: char @ ('(' | ')' | '{' | '}' | '[' | ']'),
225+
span,
226+
spacing: _,
227+
})) => {
228+
let found_expected_delimiter =
229+
builder.expected_delimiters().enumerate().find(|(_, delim)| match delim.kind {
230+
tt::DelimiterKind::Parenthesis => char == ')',
231+
tt::DelimiterKind::Brace => char == '}',
232+
tt::DelimiterKind::Bracket => char == ']',
233+
tt::DelimiterKind::Invisible => false,
234+
});
235+
if let Some((idx, _)) = found_expected_delimiter {
236+
for _ in 0..=idx {
237+
builder.close(span);
238+
}
239+
continue;
240+
}
241+
242+
let delim = match char {
243+
'(' => tt::DelimiterKind::Parenthesis,
244+
'{' => tt::DelimiterKind::Brace,
245+
'[' => tt::DelimiterKind::Bracket,
246+
_ => panic!("unmatched closing delimiter from syntax fixup"),
247+
};
248+
249+
// Start a new subtree
250+
builder.open(delim, span);
251+
continue;
252+
}
222253
Some(leaf) => leaf.clone(),
223254
None => match token.kind(conv) {
224255
// Desugar doc comments into doc attributes
@@ -228,17 +259,24 @@ where
228259
continue;
229260
}
230261
kind if kind.is_punct() && kind != UNDERSCORE => {
231-
let expected = match delimiter {
232-
Some(tt::DelimiterKind::Parenthesis) => Some(T![')']),
233-
Some(tt::DelimiterKind::Brace) => Some(T!['}']),
234-
Some(tt::DelimiterKind::Bracket) => Some(T![']']),
235-
Some(tt::DelimiterKind::Invisible) | None => None,
236-
};
262+
let found_expected_delimiter =
263+
builder.expected_delimiters().enumerate().find(|(_, delim)| {
264+
match delim.kind {
265+
tt::DelimiterKind::Parenthesis => kind == T![')'],
266+
tt::DelimiterKind::Brace => kind == T!['}'],
267+
tt::DelimiterKind::Bracket => kind == T![']'],
268+
tt::DelimiterKind::Invisible => false,
269+
}
270+
});
237271

238272
// Current token is a closing delimiter that we expect, fix up the closing span
239-
// and end the subtree here
240-
if matches!(expected, Some(expected) if expected == kind) {
241-
builder.close(conv.span_for(abs_range));
273+
// and end the subtree here.
274+
// We also close any open inner subtrees that might be missing their delimiter.
275+
if let Some((idx, _)) = found_expected_delimiter {
276+
for _ in 0..=idx {
277+
// FIXME: record an error somewhere if we're closing more than one tree here?
278+
builder.close(conv.span_for(abs_range));
279+
}
242280
continue;
243281
}
244282

@@ -262,6 +300,7 @@ where
262300
let Some(char) = token.to_char(conv) else {
263301
panic!("Token from lexer must be single char: token = {token:#?}")
264302
};
303+
// FIXME: this might still be an unmatched closing delimiter? Maybe we should assert here
265304
tt::Leaf::from(tt::Punct { char, spacing, span: conv.span_for(abs_range) })
266305
}
267306
kind => {
@@ -317,11 +356,10 @@ where
317356
builder.push(tt);
318357
}
319358

320-
// If we get here, we've consumed all input tokens.
321-
// We might have more than one subtree in the stack, if the delimiters are improperly balanced.
322-
// Merge them so we're left with one.
323-
builder.flatten_unclosed_subtrees();
324-
359+
while builder.expected_delimiters().next().is_some() {
360+
// FIXME: record an error somewhere?
361+
builder.close(conv.call_site());
362+
}
325363
builder.build_skip_top_subtree()
326364
}
327365

0 commit comments

Comments
 (0)