Skip to content

Commit b04c532

Browse files
committed
Auto merge of rust-lang#96210 - nnethercote:speed-up-TokenCursor, r=petrochenkov
Speed up `TokenCursor` Plus a few related clean-ups. r? `@petrochenkov`
2 parents 1dec35a + 643e9f7 commit b04c532

File tree

4 files changed

+118
-140
lines changed

4 files changed

+118
-140
lines changed

compiler/rustc_ast/src/tokenstream.rs

+12-15
Original file line numberDiff line numberDiff line change
@@ -94,16 +94,6 @@ impl TokenTree {
9494
TokenTree::Token(Token::new(kind, span))
9595
}
9696

97-
/// Returns the opening delimiter as a token tree.
98-
pub fn open_tt(span: DelimSpan, delim: DelimToken) -> TokenTree {
99-
TokenTree::token(token::OpenDelim(delim), span.open)
100-
}
101-
102-
/// Returns the closing delimiter as a token tree.
103-
pub fn close_tt(span: DelimSpan, delim: DelimToken) -> TokenTree {
104-
TokenTree::token(token::CloseDelim(delim), span.close)
105-
}
106-
10797
pub fn uninterpolate(self) -> TokenTree {
10898
match self {
10999
TokenTree::Token(token) => TokenTree::Token(token.uninterpolate().into_owned()),
@@ -585,13 +575,20 @@ impl Cursor {
585575
Cursor { stream, index: 0 }
586576
}
587577

578+
#[inline]
588579
pub fn next_with_spacing(&mut self) -> Option<TreeAndSpacing> {
589-
if self.index < self.stream.len() {
580+
self.stream.0.get(self.index).map(|tree| {
590581
self.index += 1;
591-
Some(self.stream.0[self.index - 1].clone())
592-
} else {
593-
None
594-
}
582+
tree.clone()
583+
})
584+
}
585+
586+
#[inline]
587+
pub fn next_with_spacing_ref(&mut self) -> Option<&TreeAndSpacing> {
588+
self.stream.0.get(self.index).map(|tree| {
589+
self.index += 1;
590+
tree
591+
})
595592
}
596593

597594
pub fn index(&self) -> usize {

compiler/rustc_parse/src/parser/attr_wrapper.rs

+6-11
Original file line numberDiff line numberDiff line change
@@ -100,21 +100,16 @@ rustc_data_structures::static_assert_size!(LazyTokenStreamImpl, 144);
100100

101101
impl CreateTokenStream for LazyTokenStreamImpl {
102102
fn create_token_stream(&self) -> AttrAnnotatedTokenStream {
103-
// The token produced by the final call to `{,inlined_}next` or
104-
// `{,inlined_}next_desugared` was not actually consumed by the
105-
// callback. The combination of chaining the initial token and using
106-
// `take` produces the desired result - we produce an empty
107-
// `TokenStream` if no calls were made, and omit the final token
108-
// otherwise.
103+
// The token produced by the final call to `{,inlined_}next` was not
104+
// actually consumed by the callback. The combination of chaining the
105+
// initial token and using `take` produces the desired result - we
106+
// produce an empty `TokenStream` if no calls were made, and omit the
107+
// final token otherwise.
109108
let mut cursor_snapshot = self.cursor_snapshot.clone();
110109
let tokens =
111110
std::iter::once((FlatToken::Token(self.start_token.0.clone()), self.start_token.1))
112111
.chain((0..self.num_calls).map(|_| {
113-
let token = if cursor_snapshot.desugar_doc_comments {
114-
cursor_snapshot.next_desugared()
115-
} else {
116-
cursor_snapshot.next()
117-
};
112+
let token = cursor_snapshot.next(cursor_snapshot.desugar_doc_comments);
118113
(FlatToken::Token(token.0), token.1)
119114
}))
120115
.take(self.num_calls);

0 commit comments

Comments
 (0)