Skip to content

Commit 5e69353

Browse files
committed
syntax: Add some helper methods to Token
1 parent aa6fba9 commit 5e69353

File tree

9 files changed

+64
-62
lines changed

9 files changed

+64
-62
lines changed

src/libsyntax/attr/mod.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -604,8 +604,8 @@ impl NestedMetaItem {
604604
fn from_tokens<I>(tokens: &mut iter::Peekable<I>) -> Option<NestedMetaItem>
605605
where I: Iterator<Item = TokenTree>,
606606
{
607-
if let Some(TokenTree::Token(token)) = tokens.peek().cloned() {
608-
if let Ok(lit) = Lit::from_token(&token, token.span) {
607+
if let Some(TokenTree::Token(token)) = tokens.peek() {
608+
if let Ok(lit) = Lit::from_token(token, token.span) {
609609
tokens.next();
610610
return Some(NestedMetaItem::Literal(lit));
611611
}

src/libsyntax/ext/tt/macro_parser.rs

Lines changed: 6 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -727,13 +727,12 @@ pub fn parse(
727727
"ambiguity: multiple successful parses".to_string(),
728728
);
729729
} else {
730-
let span = if parser.span.is_dummy() {
731-
parser.span
732-
} else {
733-
sess.source_map().next_point(parser.span)
734-
};
735730
return Failure(
736-
Token { kind: token::Eof, span },
731+
Token::new(token::Eof, if parser.span.is_dummy() {
732+
parser.span
733+
} else {
734+
sess.source_map().next_point(parser.span)
735+
}),
737736
"missing tokens in macro arguments",
738737
);
739738
}
@@ -771,7 +770,7 @@ pub fn parse(
771770
// then there is a syntax error.
772771
else if bb_items.is_empty() && next_items.is_empty() {
773772
return Failure(
774-
parser.token.clone(),
773+
parser.token.take(),
775774
"no rules expected this token in macro call",
776775
);
777776
}

src/libsyntax/ext/tt/quoted.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -154,7 +154,7 @@ impl TokenTree {
154154
}
155155

156156
crate fn token(span: Span, kind: TokenKind) -> TokenTree {
157-
TokenTree::Token(Token { kind, span })
157+
TokenTree::Token(Token::new(kind, span))
158158
}
159159
}
160160

src/libsyntax/parse/lexer/mod.rs

Lines changed: 24 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -88,7 +88,7 @@ impl<'a> StringReader<'a> {
8888
/// Returns the next token. EFFECT: advances the string_reader.
8989
pub fn try_next_token(&mut self) -> Result<Token, ()> {
9090
assert!(self.fatal_errs.is_empty());
91-
let ret_val = self.peek_token.clone();
91+
let ret_val = self.peek_token.take();
9292
self.advance_token()?;
9393
Ok(ret_val)
9494
}
@@ -205,8 +205,7 @@ impl<'a> StringReader<'a> {
205205
ch: Some('\n'),
206206
source_file,
207207
end_src_index: src.len(),
208-
// dummy values; not read
209-
peek_token: Token { kind: token::Eof, span: syntax_pos::DUMMY_SP },
208+
peek_token: Token::dummy(),
210209
peek_span_src_raw: syntax_pos::DUMMY_SP,
211210
src,
212211
fatal_errs: Vec::new(),
@@ -320,21 +319,15 @@ impl<'a> StringReader<'a> {
320319
self.peek_token = comment;
321320
}
322321
None => {
323-
if self.is_eof() {
324-
325-
let (real, raw) = self.mk_sp_and_raw(
326-
self.source_file.end_pos,
327-
self.source_file.end_pos,
328-
);
329-
self.peek_token = Token { kind: token::Eof, span: real };
330-
self.peek_span_src_raw = raw;
322+
let (kind, start_pos, end_pos) = if self.is_eof() {
323+
(token::Eof, self.source_file.end_pos, self.source_file.end_pos)
331324
} else {
332-
let start_bytepos = self.pos;
333-
let kind = self.next_token_inner()?;
334-
let (real, raw) = self.mk_sp_and_raw(start_bytepos, self.pos);
335-
self.peek_token = Token { kind, span: real };
336-
self.peek_span_src_raw = raw;
325+
let start_pos = self.pos;
326+
(self.next_token_inner()?, start_pos, self.pos)
337327
};
328+
let (real, raw) = self.mk_sp_and_raw(start_pos, end_pos);
329+
self.peek_token = Token::new(kind, real);
330+
self.peek_span_src_raw = raw;
338331
}
339332
}
340333

@@ -544,7 +537,7 @@ impl<'a> StringReader<'a> {
544537
} else {
545538
token::Comment
546539
};
547-
Some(Token { kind, span: self.mk_sp(start_bpos, self.pos) })
540+
Some(Token::new(kind, self.mk_sp(start_bpos, self.pos)))
548541
}
549542
Some('*') => {
550543
self.bump();
@@ -568,10 +561,10 @@ impl<'a> StringReader<'a> {
568561
while !self.ch_is('\n') && !self.is_eof() {
569562
self.bump();
570563
}
571-
return Some(Token {
572-
kind: token::Shebang(self.name_from(start)),
573-
span: self.mk_sp(start, self.pos),
574-
});
564+
return Some(Token::new(
565+
token::Shebang(self.name_from(start)),
566+
self.mk_sp(start, self.pos),
567+
));
575568
}
576569
}
577570
None
@@ -596,10 +589,7 @@ impl<'a> StringReader<'a> {
596589
while is_pattern_whitespace(self.ch) {
597590
self.bump();
598591
}
599-
let c = Some(Token {
600-
kind: token::Whitespace,
601-
span: self.mk_sp(start_bpos, self.pos),
602-
});
592+
let c = Some(Token::new(token::Whitespace, self.mk_sp(start_bpos, self.pos)));
603593
debug!("scanning whitespace: {:?}", c);
604594
c
605595
}
@@ -658,10 +648,7 @@ impl<'a> StringReader<'a> {
658648
token::Comment
659649
};
660650

661-
Some(Token {
662-
kind,
663-
span: self.mk_sp(start_bpos, self.pos),
664-
})
651+
Some(Token::new(kind, self.mk_sp(start_bpos, self.pos)))
665652
})
666653
}
667654

@@ -1588,21 +1575,21 @@ mod tests {
15881575
assert_eq!(string_reader.next_token(), token::Comment);
15891576
assert_eq!(string_reader.next_token(), token::Whitespace);
15901577
let tok1 = string_reader.next_token();
1591-
let tok2 = Token {
1592-
kind: token::Ident(id, false),
1593-
span: Span::new(BytePos(21), BytePos(23), NO_EXPANSION),
1594-
};
1578+
let tok2 = Token::new(
1579+
token::Ident(id, false),
1580+
Span::new(BytePos(21), BytePos(23), NO_EXPANSION),
1581+
);
15951582
assert_eq!(tok1.kind, tok2.kind);
15961583
assert_eq!(tok1.span, tok2.span);
15971584
assert_eq!(string_reader.next_token(), token::Whitespace);
15981585
// the 'main' id is already read:
15991586
assert_eq!(string_reader.pos.clone(), BytePos(28));
16001587
// read another token:
16011588
let tok3 = string_reader.next_token();
1602-
let tok4 = Token {
1603-
kind: mk_ident("main"),
1604-
span: Span::new(BytePos(24), BytePos(28), NO_EXPANSION),
1605-
};
1589+
let tok4 = Token::new(
1590+
mk_ident("main"),
1591+
Span::new(BytePos(24), BytePos(28), NO_EXPANSION),
1592+
);
16061593
assert_eq!(tok3.kind, tok4.kind);
16071594
assert_eq!(tok3.span, tok4.span);
16081595
// the lparen is already read:

src/libsyntax/parse/lexer/tokentrees.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ impl<'a> StringReader<'a> {
1010
crate fn into_token_trees(self) -> (PResult<'a, TokenStream>, Vec<UnmatchedBrace>) {
1111
let mut tt_reader = TokenTreesReader {
1212
string_reader: self,
13-
token: token::Token { kind: token::Eof, span: syntax_pos::DUMMY_SP },
13+
token: Token::dummy(),
1414
open_braces: Vec::new(),
1515
unmatched_braces: Vec::new(),
1616
matching_delim_spans: Vec::new(),
@@ -202,7 +202,7 @@ impl<'a> TokenTreesReader<'a> {
202202
Err(err)
203203
},
204204
_ => {
205-
let tt = TokenTree::Token(self.token.clone());
205+
let tt = TokenTree::Token(self.token.take());
206206
// Note that testing for joint-ness here is done via the raw
207207
// source span as the joint-ness is a property of the raw source
208208
// rather than wanting to take `override_span` into account.

src/libsyntax/parse/literal.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -277,7 +277,7 @@ impl<'a> Parser<'a> {
277277
if self.span.hi() == next_span.lo() {
278278
let s = String::from("0.") + &symbol.as_str();
279279
let kind = TokenKind::lit(token::Float, Symbol::intern(&s), suffix);
280-
return Some(Token { kind, span: self.span.to(next_span) });
280+
return Some(Token::new(kind, self.span.to(next_span)));
281281
}
282282
}
283283
None

src/libsyntax/parse/parser.rs

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -318,7 +318,7 @@ impl TokenCursor {
318318
self.frame = frame;
319319
continue
320320
} else {
321-
return Token { kind: token::Eof, span: DUMMY_SP }
321+
return Token::new(token::Eof, DUMMY_SP);
322322
};
323323

324324
match self.frame.last_token {
@@ -477,7 +477,7 @@ impl<'a> Parser<'a> {
477477
) -> Self {
478478
let mut parser = Parser {
479479
sess,
480-
token: Token { kind: token::Whitespace, span: DUMMY_SP },
480+
token: Token::dummy(),
481481
prev_span: DUMMY_SP,
482482
meta_var_span: None,
483483
prev_token_kind: PrevTokenKind::Other,
@@ -1042,12 +1042,12 @@ impl<'a> Parser<'a> {
10421042
// fortunately for tokens currently using `bump_with`, the
10431043
// prev_token_kind will be of no use anyway.
10441044
self.prev_token_kind = PrevTokenKind::Other;
1045-
self.token = Token { kind: next, span };
1045+
self.token = Token::new(next, span);
10461046
self.expected_tokens.clear();
10471047
}
10481048

10491049
pub fn look_ahead<R, F>(&self, dist: usize, f: F) -> R where
1050-
F: FnOnce(&token::Token) -> R,
1050+
F: FnOnce(&Token) -> R,
10511051
{
10521052
if dist == 0 {
10531053
// FIXME: Avoid cloning here.
@@ -1058,9 +1058,9 @@ impl<'a> Parser<'a> {
10581058
f(&match frame.tree_cursor.look_ahead(dist - 1) {
10591059
Some(tree) => match tree {
10601060
TokenTree::Token(token) => token,
1061-
TokenTree::Delimited(dspan, delim, _) => Token { kind: token::OpenDelim(delim), span: dspan.open },
1061+
TokenTree::Delimited(dspan, delim, _) => Token::new(token::OpenDelim(delim), dspan.open),
10621062
}
1063-
None => Token { kind: token::CloseDelim(frame.delim), span: frame.span.close }
1063+
None => Token::new(token::CloseDelim(frame.delim), frame.span.close)
10641064
})
10651065
}
10661066

@@ -2651,8 +2651,8 @@ impl<'a> Parser<'a> {
26512651
// Interpolated identifier and lifetime tokens are replaced with usual identifier
26522652
// and lifetime tokens, so the former are never encountered during normal parsing.
26532653
match **nt {
2654-
token::NtIdent(ident, is_raw) => Token { kind: token::Ident(ident, is_raw), span: ident.span },
2655-
token::NtLifetime(ident) => Token { kind: token::Lifetime(ident), span: ident.span },
2654+
token::NtIdent(ident, is_raw) => Token::new(token::Ident(ident, is_raw), ident.span),
2655+
token::NtLifetime(ident) => Token::new(token::Lifetime(ident), ident.span),
26562656
_ => return,
26572657
}
26582658
}
@@ -2676,7 +2676,7 @@ impl<'a> Parser<'a> {
26762676
},
26772677
token::CloseDelim(_) | token::Eof => unreachable!(),
26782678
_ => {
2679-
let token = mem::replace(&mut self.token, Token { kind: token::Whitespace, span: DUMMY_SP });
2679+
let token = self.token.take();
26802680
self.bump();
26812681
TokenTree::Token(token)
26822682
}
@@ -2763,7 +2763,7 @@ impl<'a> Parser<'a> {
27632763
// `not` is just an ordinary identifier in Rust-the-language,
27642764
// but as `rustc`-the-compiler, we can issue clever diagnostics
27652765
// for confused users who really want to say `!`
2766-
let token_cannot_continue_expr = |t: &token::Token| match t.kind {
2766+
let token_cannot_continue_expr = |t: &Token| match t.kind {
27672767
// These tokens can start an expression after `!`, but
27682768
// can't continue an expression after an ident
27692769
token::Ident(ident, is_raw) => token::ident_can_begin_expr(ident, is_raw),

src/libsyntax/parse/token.rs

Lines changed: 17 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ use crate::syntax::parse::parse_stream_from_source_str;
1313
use crate::tokenstream::{self, DelimSpan, TokenStream, TokenTree};
1414

1515
use syntax_pos::symbol::{self, Symbol};
16-
use syntax_pos::{self, Span, FileName};
16+
use syntax_pos::{self, Span, FileName, DUMMY_SP};
1717
use log::info;
1818

1919
use std::fmt;
@@ -609,6 +609,22 @@ impl TokenKind {
609609
}
610610
}
611611

612+
impl Token {
613+
crate fn new(kind: TokenKind, span: Span) -> Self {
614+
Token { kind, span }
615+
}
616+
617+
/// Some token that will be thrown away later.
618+
crate fn dummy() -> Self {
619+
Token::new(TokenKind::Whitespace, DUMMY_SP)
620+
}
621+
622+
/// Return this token by value and leave a dummy token in its place.
623+
crate fn take(&mut self) -> Self {
624+
mem::replace(self, Token::dummy())
625+
}
626+
}
627+
612628
impl PartialEq<TokenKind> for Token {
613629
fn eq(&self, rhs: &TokenKind) -> bool {
614630
self.kind == *rhs

src/libsyntax/tokenstream.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -139,7 +139,7 @@ impl TokenTree {
139139
}
140140

141141
pub fn token(span: Span, kind: TokenKind) -> TokenTree {
142-
TokenTree::Token(Token { kind, span })
142+
TokenTree::Token(Token::new(kind, span))
143143
}
144144

145145
/// Returns the opening delimiter as a token tree.

0 commit comments

Comments
 (0)