Skip to content

Commit 29554c0

Browse files
committed
Auto merge of rust-lang#98463 - mystor:expand_expr_bool, r=eddyb
proc_macro: Fix expand_expr expansion of bool literals Previously, the expand_expr method would expand bool literals as a `Literal` token containing a `LitKind::Bool`, rather than as an `Ident`. This is not a valid token, and the `LitKind::Bool` case needs to be handled seperately. Tests were added to more deeply compare the streams in the expand-expr test suite to catch mistakes like this in the future.
2 parents 268be96 + fb5b7b4 commit 29554c0

File tree

2 files changed

+74
-1
lines changed

2 files changed

+74
-1
lines changed

compiler/rustc_expand/src/proc_macro_server.rs

+4
Original file line numberDiff line numberDiff line change
@@ -426,6 +426,10 @@ impl server::TokenStream for Rustc<'_, '_> {
426426
// We don't use `TokenStream::from_ast` as the tokenstream currently cannot
427427
// be recovered in the general case.
428428
match &expr.kind {
429+
ast::ExprKind::Lit(l) if l.token.kind == token::Bool => {
430+
Ok(tokenstream::TokenTree::token(token::Ident(l.token.symbol, false), l.span)
431+
.into())
432+
}
429433
ast::ExprKind::Lit(l) => {
430434
Ok(tokenstream::TokenTree::token(token::Literal(l.token), l.span).into())
431435
}

src/test/ui/proc-macro/auxiliary/expand-expr.rs

+70-1
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,72 @@ extern crate proc_macro;
1010
use proc_macro::*;
1111
use std::str::FromStr;
1212

13+
// Flatten the TokenStream, removing any toplevel `Delimiter::None`s for
14+
// comparison.
15+
fn flatten(ts: TokenStream) -> Vec<TokenTree> {
16+
ts.into_iter()
17+
.flat_map(|tt| match &tt {
18+
TokenTree::Group(group) if group.delimiter() == Delimiter::None => {
19+
flatten(group.stream())
20+
}
21+
_ => vec![tt],
22+
})
23+
.collect()
24+
}
25+
26+
// Assert that two TokenStream values are roughly equal to one-another.
27+
fn assert_ts_eq(lhs: &TokenStream, rhs: &TokenStream) {
28+
let ltts = flatten(lhs.clone());
29+
let rtts = flatten(rhs.clone());
30+
31+
if ltts.len() != rtts.len() {
32+
panic!(
33+
"expected the same number of tts ({} == {})\nlhs:\n{:#?}\nrhs:\n{:#?}",
34+
ltts.len(),
35+
rtts.len(),
36+
lhs,
37+
rhs
38+
)
39+
}
40+
41+
for (ltt, rtt) in ltts.iter().zip(&rtts) {
42+
match (ltt, rtt) {
43+
(TokenTree::Group(l), TokenTree::Group(r)) => {
44+
assert_eq!(
45+
l.delimiter(),
46+
r.delimiter(),
47+
"expected delimiters to match for {:?} and {:?}",
48+
l,
49+
r
50+
);
51+
assert_ts_eq(&l.stream(), &r.stream());
52+
}
53+
(TokenTree::Punct(l), TokenTree::Punct(r)) => assert_eq!(
54+
(l.as_char(), l.spacing()),
55+
(r.as_char(), r.spacing()),
56+
"expected punct to match for {:?} and {:?}",
57+
l,
58+
r
59+
),
60+
(TokenTree::Ident(l), TokenTree::Ident(r)) => assert_eq!(
61+
l.to_string(),
62+
r.to_string(),
63+
"expected ident to match for {:?} and {:?}",
64+
l,
65+
r
66+
),
67+
(TokenTree::Literal(l), TokenTree::Literal(r)) => assert_eq!(
68+
l.to_string(),
69+
r.to_string(),
70+
"expected literal to match for {:?} and {:?}",
71+
l,
72+
r
73+
),
74+
(l, r) => panic!("expected type to match for {:?} and {:?}", l, r),
75+
}
76+
}
77+
}
78+
1379
#[proc_macro]
1480
pub fn expand_expr_is(input: TokenStream) -> TokenStream {
1581
let mut iter = input.into_iter();
@@ -31,6 +97,9 @@ pub fn expand_expr_is(input: TokenStream) -> TokenStream {
3197
expanded.to_string()
3298
);
3399

100+
// Also compare the raw tts to make sure they line up.
101+
assert_ts_eq(&expected, &expanded);
102+
34103
TokenStream::new()
35104
}
36105

@@ -48,7 +117,7 @@ pub fn check_expand_expr_file(ts: TokenStream) -> TokenStream {
48117
// invocation expand to the same literal.
49118
let input_t = ts.expand_expr().expect("expand_expr failed on macro input").to_string();
50119
let parse_t = TokenStream::from_str("file!{}")
51-
.unwrap()
120+
.unwrap()
52121
.expand_expr()
53122
.expect("expand_expr failed on internal macro")
54123
.to_string();

0 commit comments

Comments
 (0)