Skip to content
This repository was archived by the owner on May 28, 2025. It is now read-only.

Commit ab3a0f6

Browse files
committed
Impl Copy for Token and TokenKind.
1 parent 42265a8 commit ab3a0f6

File tree

19 files changed

+69
-70
lines changed

19 files changed

+69
-70
lines changed

compiler/rustc_ast/src/token.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -279,7 +279,7 @@ impl From<IdentIsRaw> for bool {
279279
}
280280
}
281281

282-
#[derive(Clone, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
282+
#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
283283
pub enum TokenKind {
284284
/* Expression-operator symbols. */
285285
/// `=`
@@ -377,7 +377,7 @@ pub enum TokenKind {
377377
Eof,
378378
}
379379

380-
#[derive(Clone, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
380+
#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
381381
pub struct Token {
382382
pub kind: TokenKind,
383383
pub span: Span,

compiler/rustc_ast/src/tokenstream.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -475,7 +475,7 @@ impl TokenStream {
475475
Delimiter::Invisible(InvisibleOrigin::FlattenToken),
476476
TokenStream::token_alone(token::Lifetime(ident.name), ident.span),
477477
),
478-
_ => TokenTree::Token(token.clone(), spacing),
478+
_ => TokenTree::Token(*token, spacing),
479479
}
480480
}
481481

compiler/rustc_expand/src/mbe/diagnostics.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -153,7 +153,7 @@ impl<'a, 'cx, 'matcher> Tracker<'matcher> for CollectTrackerAndEmitter<'a, 'cx,
153153
.map_or(true, |failure| failure.is_better_position(*approx_position))
154154
{
155155
self.best_failure = Some(BestFailure {
156-
token: token.clone(),
156+
token: *token,
157157
position_in_tokenstream: *approx_position,
158158
msg,
159159
remaining_matcher: self

compiler/rustc_expand/src/mbe/macro_parser.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -181,7 +181,7 @@ pub(super) fn compute_locs(matcher: &[TokenTree]) -> Vec<MatcherLoc> {
181181
for tt in tts {
182182
match tt {
183183
TokenTree::Token(token) => {
184-
locs.push(MatcherLoc::Token { token: token.clone() });
184+
locs.push(MatcherLoc::Token { token: *token });
185185
}
186186
TokenTree::Delimited(span, _, delimited) => {
187187
let open_token = Token::new(token::OpenDelim(delimited.delim), span.open);
@@ -648,7 +648,7 @@ impl TtParser {
648648
// There are no possible next positions AND we aren't waiting for the black-box
649649
// parser: syntax error.
650650
return Failure(T::build_failure(
651-
parser.token.clone(),
651+
parser.token,
652652
parser.approx_token_stream_pos(),
653653
"no rules expected this token in macro call",
654654
));

compiler/rustc_expand/src/mbe/macro_rules.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -790,7 +790,7 @@ impl<'tt> FirstSets<'tt> {
790790
// token could be the separator token itself.
791791

792792
if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
793-
first.add_one_maybe(TtHandle::from_token(sep.clone()));
793+
first.add_one_maybe(TtHandle::from_token(*sep));
794794
}
795795

796796
// Reverse scan: Sequence comes before `first`.
@@ -853,7 +853,7 @@ impl<'tt> FirstSets<'tt> {
853853
// If the sequence contents can be empty, then the first
854854
// token could be the separator token itself.
855855
if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
856-
first.add_one_maybe(TtHandle::from_token(sep.clone()));
856+
first.add_one_maybe(TtHandle::from_token(*sep));
857857
}
858858

859859
assert!(first.maybe_empty);
@@ -929,7 +929,7 @@ impl<'tt> Clone for TtHandle<'tt> {
929929
// This variant *must* contain a `mbe::TokenTree::Token`, and not
930930
// any other variant of `mbe::TokenTree`.
931931
TtHandle::Token(mbe::TokenTree::Token(tok)) => {
932-
TtHandle::Token(mbe::TokenTree::Token(tok.clone()))
932+
TtHandle::Token(mbe::TokenTree::Token(*tok))
933933
}
934934

935935
_ => unreachable!(),
@@ -1105,7 +1105,7 @@ fn check_matcher_core<'tt>(
11051105
let mut new;
11061106
let my_suffix = if let Some(sep) = &seq_rep.separator {
11071107
new = suffix_first.clone();
1108-
new.add_one_maybe(TtHandle::from_token(sep.clone()));
1108+
new.add_one_maybe(TtHandle::from_token(*sep));
11091109
&new
11101110
} else {
11111111
&suffix_first

compiler/rustc_expand/src/mbe/quoted.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -311,7 +311,7 @@ fn parse_tree<'a>(
311311
}
312312

313313
// `tree` is an arbitrary token. Keep it.
314-
tokenstream::TokenTree::Token(token, _) => TokenTree::Token(token.clone()),
314+
tokenstream::TokenTree::Token(token, _) => TokenTree::Token(*token),
315315

316316
// `tree` is the beginning of a delimited set of tokens (e.g., `(` or `{`). We need to
317317
// descend into the delimited set and further parse it.
@@ -349,7 +349,7 @@ fn parse_kleene_op<'a>(
349349
match input.next() {
350350
Some(tokenstream::TokenTree::Token(token, _)) => match kleene_op(token) {
351351
Some(op) => Ok(Ok((op, token.span))),
352-
None => Ok(Err(token.clone())),
352+
None => Ok(Err(*token)),
353353
},
354354
tree => Err(tree.map_or(span, tokenstream::TokenTree::span)),
355355
}

compiler/rustc_expand/src/mbe/transcribe.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -158,7 +158,7 @@ pub(super) fn transcribe<'a>(
158158
if repeat_idx < repeat_len {
159159
frame.idx = 0;
160160
if let Some(sep) = sep {
161-
result.push(TokenTree::Token(sep.clone(), Spacing::Alone));
161+
result.push(TokenTree::Token(*sep, Spacing::Alone));
162162
}
163163
continue;
164164
}
@@ -377,7 +377,7 @@ pub(super) fn transcribe<'a>(
377377
// Nothing much to do here. Just push the token to the result, being careful to
378378
// preserve syntax context.
379379
mbe::TokenTree::Token(token) => {
380-
let mut token = token.clone();
380+
let mut token = *token;
381381
mut_visit::visit_token(&mut token, &mut marker);
382382
let tt = TokenTree::Token(token, Spacing::Alone);
383383
result.push(tt);

compiler/rustc_parse/src/lexer/unicode_chars.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -377,7 +377,7 @@ pub(super) fn check_for_substitution(
377377
ascii_name,
378378
})
379379
};
380-
(token.clone(), sugg)
380+
(*token, sugg)
381381
}
382382

383383
/// Extract string if found at current position with given delimiters

compiler/rustc_parse/src/parser/attr_wrapper.rs

Lines changed: 7 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -104,13 +104,12 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
104104
// produce an empty `TokenStream` if no calls were made, and omit the
105105
// final token otherwise.
106106
let mut cursor_snapshot = self.cursor_snapshot.clone();
107-
let tokens =
108-
std::iter::once((FlatToken::Token(self.start_token.0.clone()), self.start_token.1))
109-
.chain(std::iter::repeat_with(|| {
110-
let token = cursor_snapshot.next();
111-
(FlatToken::Token(token.0), token.1)
112-
}))
113-
.take(self.num_calls);
107+
let tokens = std::iter::once((FlatToken::Token(self.start_token.0), self.start_token.1))
108+
.chain(std::iter::repeat_with(|| {
109+
let token = cursor_snapshot.next();
110+
(FlatToken::Token(token.0), token.1)
111+
}))
112+
.take(self.num_calls);
114113

115114
if !self.replace_ranges.is_empty() {
116115
let mut tokens: Vec<_> = tokens.collect();
@@ -215,7 +214,7 @@ impl<'a> Parser<'a> {
215214
return Ok(f(self, attrs.attrs)?.0);
216215
}
217216

218-
let start_token = (self.token.clone(), self.token_spacing);
217+
let start_token = (self.token, self.token_spacing);
219218
let cursor_snapshot = self.token_cursor.clone();
220219
let start_pos = self.num_bump_calls;
221220

compiler/rustc_parse/src/parser/diagnostics.rs

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -287,7 +287,7 @@ impl<'a> Parser<'a> {
287287
let mut recovered_ident = None;
288288
// we take this here so that the correct original token is retained in
289289
// the diagnostic, regardless of eager recovery.
290-
let bad_token = self.token.clone();
290+
let bad_token = self.token;
291291

292292
// suggest prepending a keyword in identifier position with `r#`
293293
let suggest_raw = if let Some((ident, IdentIsRaw::No)) = self.token.ident()
@@ -347,7 +347,7 @@ impl<'a> Parser<'a> {
347347
// if the previous token is a valid keyword
348348
// that might use a generic, then suggest a correct
349349
// generic placement (later on)
350-
let maybe_keyword = self.prev_token.clone();
350+
let maybe_keyword = self.prev_token;
351351
if valid_prev_keywords.into_iter().any(|x| maybe_keyword.is_keyword(x)) {
352352
// if we have a valid keyword, attempt to parse generics
353353
// also obtain the keywords symbol
@@ -463,7 +463,7 @@ impl<'a> Parser<'a> {
463463
false
464464
}
465465

466-
if **token != parser::TokenType::Token(self.token.kind.clone()) {
466+
if **token != parser::TokenType::Token(self.token.kind) {
467467
let eq = is_ident_eq_keyword(&self.token.kind, &token);
468468
// If the suggestion is a keyword and the found token is an ident,
469469
// the content of which are equal to the suggestion's content,
@@ -527,7 +527,7 @@ impl<'a> Parser<'a> {
527527
// let y = 42;
528528
let guar = self.dcx().emit_err(ExpectedSemi {
529529
span: self.token.span,
530-
token: self.token.clone(),
530+
token: self.token,
531531
unexpected_token_label: None,
532532
sugg: ExpectedSemiSugg::ChangeToSemi(self.token.span),
533533
});
@@ -552,7 +552,7 @@ impl<'a> Parser<'a> {
552552
let span = self.prev_token.span.shrink_to_hi();
553553
let guar = self.dcx().emit_err(ExpectedSemi {
554554
span,
555-
token: self.token.clone(),
555+
token: self.token,
556556
unexpected_token_label: Some(self.token.span),
557557
sugg: ExpectedSemiSugg::AddSemi(span),
558558
});
@@ -748,7 +748,7 @@ impl<'a> Parser<'a> {
748748
let span = self.prev_token.span.shrink_to_hi();
749749
let mut err = self.dcx().create_err(ExpectedSemi {
750750
span,
751-
token: self.token.clone(),
751+
token: self.token,
752752
unexpected_token_label: Some(self.token.span),
753753
sugg: ExpectedSemiSugg::AddSemi(span),
754754
});
@@ -2366,7 +2366,7 @@ impl<'a> Parser<'a> {
23662366
// code was interpreted. This helps the user realize when a macro argument of one type is
23672367
// later reinterpreted as a different type, like `$x:expr` being reinterpreted as `$x:pat`
23682368
// in a subsequent macro invocation (#71039).
2369-
let mut tok = self.token.clone();
2369+
let mut tok = self.token;
23702370
let mut labels = vec![];
23712371
while let TokenKind::Interpolated(nt) = &tok.kind {
23722372
let tokens = nt.tokens();
@@ -2376,7 +2376,7 @@ impl<'a> Parser<'a> {
23762376
&& let tokens = tokens.0.deref()
23772377
&& let [AttrTokenTree::Token(token, _)] = &tokens[..]
23782378
{
2379-
tok = token.clone();
2379+
tok = token;
23802380
} else {
23812381
break;
23822382
}

compiler/rustc_parse/src/parser/expr.rs

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -409,7 +409,7 @@ impl<'a> Parser<'a> {
409409
fn error_found_expr_would_be_stmt(&self, lhs: &Expr) {
410410
self.dcx().emit_err(errors::FoundExprWouldBeStmt {
411411
span: self.token.span,
412-
token: self.token.clone(),
412+
token: self.token,
413413
suggestion: ExprParenthesesNeeded::surrounding(lhs.span),
414414
});
415415
}
@@ -484,7 +484,7 @@ impl<'a> Parser<'a> {
484484
cur_op_span: Span,
485485
) -> PResult<'a, P<Expr>> {
486486
let rhs = if self.is_at_start_of_range_notation_rhs() {
487-
let maybe_lt = self.token.clone();
487+
let maybe_lt = self.token;
488488
let attrs = self.parse_outer_attributes()?;
489489
Some(
490490
self.parse_expr_assoc_with(prec + 1, LhsExpr::Unparsed { attrs })
@@ -678,7 +678,7 @@ impl<'a> Parser<'a> {
678678

679679
/// Recover on `not expr` in favor of `!expr`.
680680
fn recover_not_expr(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> {
681-
let negated_token = self.look_ahead(1, |t| t.clone());
681+
let negated_token = self.look_ahead(1, |t| *t);
682682

683683
let sub_diag = if negated_token.is_numeric_lit() {
684684
errors::NotAsNegationOperatorSub::SuggestNotBitwise
@@ -1634,7 +1634,7 @@ impl<'a> Parser<'a> {
16341634
}
16351635

16361636
fn parse_expr_path_start(&mut self) -> PResult<'a, P<Expr>> {
1637-
let maybe_eq_tok = self.prev_token.clone();
1637+
let maybe_eq_tok = self.prev_token;
16381638
let (qself, path) = if self.eat_lt() {
16391639
let lt_span = self.prev_token.span;
16401640
let (qself, path) = self.parse_qpath(PathStyle::Expr).map_err(|mut err| {
@@ -2079,7 +2079,7 @@ impl<'a> Parser<'a> {
20792079
&mut self,
20802080
mk_lit_char: impl FnOnce(Symbol, Span) -> L,
20812081
) -> PResult<'a, L> {
2082-
let token = self.token.clone();
2082+
let token = self.token;
20832083
let err = |self_: &Self| {
20842084
let msg = format!("unexpected token: {}", super::token_descr(&token));
20852085
self_.dcx().struct_span_err(token.span, msg)
@@ -2422,7 +2422,7 @@ impl<'a> Parser<'a> {
24222422
fn parse_expr_closure(&mut self) -> PResult<'a, P<Expr>> {
24232423
let lo = self.token.span;
24242424

2425-
let before = self.prev_token.clone();
2425+
let before = self.prev_token;
24262426
let binder = if self.check_keyword(kw::For) {
24272427
let lo = self.token.span;
24282428
let lifetime_defs = self.parse_late_bound_lifetime_defs()?;
@@ -2453,8 +2453,8 @@ impl<'a> Parser<'a> {
24532453
FnRetTy::Default(_) => {
24542454
let restrictions =
24552455
self.restrictions - Restrictions::STMT_EXPR - Restrictions::ALLOW_LET;
2456-
let prev = self.prev_token.clone();
2457-
let token = self.token.clone();
2456+
let prev = self.prev_token;
2457+
let token = self.token;
24582458
let attrs = self.parse_outer_attributes()?;
24592459
match self.parse_expr_res(restrictions, attrs) {
24602460
Ok(expr) => expr,
@@ -2652,7 +2652,7 @@ impl<'a> Parser<'a> {
26522652
}
26532653
} else {
26542654
let attrs = self.parse_outer_attributes()?; // For recovery.
2655-
let maybe_fatarrow = self.token.clone();
2655+
let maybe_fatarrow = self.token;
26562656
let block = if self.check(&token::OpenDelim(Delimiter::Brace)) {
26572657
self.parse_block()?
26582658
} else {
@@ -3851,7 +3851,7 @@ impl<'a> Parser<'a> {
38513851
return Err(this.dcx().create_err(errors::ExpectedStructField {
38523852
span: this.look_ahead(1, |t| t.span),
38533853
ident_span: this.token.span,
3854-
token: this.look_ahead(1, |t| t.clone()),
3854+
token: this.look_ahead(1, |t| *t),
38553855
}));
38563856
}
38573857
let (ident, expr) = if is_shorthand {

compiler/rustc_parse/src/parser/item.rs

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1703,8 +1703,7 @@ impl<'a> Parser<'a> {
17031703
self.expect_semi()?;
17041704
body
17051705
} else {
1706-
let err =
1707-
errors::UnexpectedTokenAfterStructName::new(self.token.span, self.token.clone());
1706+
let err = errors::UnexpectedTokenAfterStructName::new(self.token.span, self.token);
17081707
return Err(self.dcx().create_err(err));
17091708
};
17101709

@@ -2233,7 +2232,7 @@ impl<'a> Parser<'a> {
22332232
|| self.token.is_keyword(kw::Union))
22342233
&& self.look_ahead(1, |t| t.is_ident())
22352234
{
2236-
let kw_token = self.token.clone();
2235+
let kw_token = self.token;
22372236
let kw_str = pprust::token_to_string(&kw_token);
22382237
let item = self.parse_item(ForceCollect::No)?;
22392238
self.dcx().emit_err(errors::NestedAdt {

compiler/rustc_parse/src/parser/mod.rs

Lines changed: 10 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -309,12 +309,12 @@ impl TokenCursor {
309309
// below can be removed.
310310
if let Some(tree) = self.tree_cursor.next_ref() {
311311
match tree {
312-
&TokenTree::Token(ref token, spacing) => {
312+
&TokenTree::Token(token, spacing) => {
313313
debug_assert!(!matches!(
314314
token.kind,
315315
token::OpenDelim(_) | token::CloseDelim(_)
316316
));
317-
return (token.clone(), spacing);
317+
return (token, spacing);
318318
}
319319
&TokenTree::Delimited(sp, spacing, delim, ref tts) => {
320320
let trees = tts.clone().into_trees();
@@ -593,7 +593,7 @@ impl<'a> Parser<'a> {
593593
fn check(&mut self, tok: &TokenKind) -> bool {
594594
let is_present = self.token == *tok;
595595
if !is_present {
596-
self.expected_tokens.push(TokenType::Token(tok.clone()));
596+
self.expected_tokens.push(TokenType::Token(*tok));
597597
}
598598
is_present
599599
}
@@ -1460,7 +1460,7 @@ impl<'a> Parser<'a> {
14601460
_ => {
14611461
let prev_spacing = self.token_spacing;
14621462
self.bump();
1463-
TokenTree::Token(self.prev_token.clone(), prev_spacing)
1463+
TokenTree::Token(self.prev_token, prev_spacing)
14641464
}
14651465
}
14661466
}
@@ -1645,13 +1645,14 @@ impl<'a> Parser<'a> {
16451645
// we don't need N spans, but we want at least one, so print all of prev_token
16461646
dbg_fmt.field("prev_token", &parser.prev_token);
16471647
// make it easier to peek farther ahead by taking TokenKinds only until EOF
1648-
let tokens = (0..*lookahead)
1649-
.map(|i| parser.look_ahead(i, |tok| tok.kind.clone()))
1650-
.scan(parser.prev_token == TokenKind::Eof, |eof, tok| {
1651-
let current = eof.then_some(tok.clone()); // include a trailing EOF token
1648+
let tokens = (0..*lookahead).map(|i| parser.look_ahead(i, |tok| tok.kind)).scan(
1649+
parser.prev_token == TokenKind::Eof,
1650+
|eof, tok| {
1651+
let current = eof.then_some(tok); // include a trailing EOF token
16521652
*eof |= &tok == &TokenKind::Eof;
16531653
current
1654-
});
1654+
},
1655+
);
16551656
dbg_fmt.field_with("tokens", |field| field.debug_list().entries(tokens).finish());
16561657
dbg_fmt.field("approx_token_stream_pos", &parser.num_bump_calls);
16571658

0 commit comments

Comments
 (0)