Skip to content

Commit 99b27d7

Browse files
committed
syntax: Rename Token into TokenKind
1 parent eac3846 commit 99b27d7

File tree

24 files changed

+119
-119
lines changed

24 files changed

+119
-119
lines changed

src/librustc/hir/lowering.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@ use syntax::source_map::CompilerDesugaringKind::IfTemporary;
6767
use syntax::std_inject;
6868
use syntax::symbol::{kw, sym, Symbol};
6969
use syntax::tokenstream::{TokenStream, TokenTree};
70-
use syntax::parse::token::{self, Token};
70+
use syntax::parse::token::{self, TokenKind};
7171
use syntax::visit::{self, Visitor};
7272
use syntax_pos::{DUMMY_SP, edition, Span};
7373

@@ -1337,7 +1337,7 @@ impl<'a> LoweringContext<'a> {
13371337
}
13381338
}
13391339

1340-
fn lower_token(&mut self, token: Token, span: Span) -> TokenStream {
1340+
fn lower_token(&mut self, token: TokenKind, span: Span) -> TokenStream {
13411341
match token {
13421342
token::Interpolated(nt) => {
13431343
let tts = nt.to_tokenstream(&self.sess.parse_sess, span);

src/librustc/hir/map/def_collector.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ use syntax::ast::*;
66
use syntax::ext::hygiene::Mark;
77
use syntax::visit;
88
use syntax::symbol::{kw, sym};
9-
use syntax::parse::token::{self, Token};
9+
use syntax::parse::token::{self, TokenKind};
1010
use syntax_pos::Span;
1111

1212
/// Creates `DefId`s for nodes in the AST.
@@ -325,7 +325,7 @@ impl<'a> visit::Visitor<'a> for DefCollector<'a> {
325325
}
326326
}
327327

328-
fn visit_token(&mut self, t: Token) {
328+
fn visit_token(&mut self, t: TokenKind) {
329329
if let token::Interpolated(nt) = t {
330330
if let token::NtExpr(ref expr) = *nt {
331331
if let ExprKind::Mac(..) = expr.node {

src/librustc/ich/impls_syntax.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -307,7 +307,7 @@ impl_stable_hash_for!(struct token::Lit {
307307
});
308308

309309
fn hash_token<'a, 'gcx, W: StableHasherResult>(
310-
token: &token::Token,
310+
token: &token::TokenKind,
311311
hcx: &mut StableHashingContext<'a>,
312312
hasher: &mut StableHasher<W>,
313313
) {

src/librustc_resolve/build_reduced_graph.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ use syntax::ext::base::Determinacy::Undetermined;
3434
use syntax::ext::hygiene::Mark;
3535
use syntax::ext::tt::macro_rules;
3636
use syntax::feature_gate::is_builtin_attr;
37-
use syntax::parse::token::{self, Token};
37+
use syntax::parse::token::{self, TokenKind};
3838
use syntax::span_err;
3939
use syntax::std_inject::injected_crate_name;
4040
use syntax::symbol::{kw, sym};
@@ -1052,7 +1052,7 @@ impl<'a, 'b> Visitor<'a> for BuildReducedGraphVisitor<'a, 'b> {
10521052
self.resolver.current_module = parent;
10531053
}
10541054

1055-
fn visit_token(&mut self, t: Token) {
1055+
fn visit_token(&mut self, t: TokenKind) {
10561056
if let token::Interpolated(nt) = t {
10571057
if let token::NtExpr(ref expr) = *nt {
10581058
if let ast::ExprKind::Mac(..) = expr.node {

src/librustc_save_analysis/span_utils.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ use crate::generated_code;
55
use std::cell::Cell;
66

77
use syntax::parse::lexer::{self, StringReader};
8-
use syntax::parse::token::{self, Token};
8+
use syntax::parse::token::{self, TokenKind};
99
use syntax_pos::*;
1010

1111
#[derive(Clone)]
@@ -56,7 +56,7 @@ impl<'a> SpanUtils<'a> {
5656
lexer::StringReader::retokenize(&self.sess.parse_sess, span)
5757
}
5858

59-
pub fn sub_span_of_token(&self, span: Span, tok: Token) -> Option<Span> {
59+
pub fn sub_span_of_token(&self, span: Span, tok: TokenKind) -> Option<Span> {
6060
let mut toks = self.retokenise_span(span);
6161
loop {
6262
let next = toks.real_token();

src/libsyntax/attr/mod.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ use crate::source_map::{BytePos, Spanned, dummy_spanned};
2020
use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
2121
use crate::parse::parser::Parser;
2222
use crate::parse::{self, ParseSess, PResult};
23-
use crate::parse::token::{self, Token};
23+
use crate::parse::token::{self, TokenKind};
2424
use crate::ptr::P;
2525
use crate::symbol::{sym, Symbol};
2626
use crate::ThinVec;
@@ -468,7 +468,7 @@ impl MetaItem {
468468
idents.push(TokenTree::Token(mod_sep_span, token::ModSep).into());
469469
}
470470
idents.push(TokenTree::Token(segment.ident.span,
471-
Token::from_ast_ident(segment.ident)).into());
471+
TokenKind::from_ast_ident(segment.ident)).into());
472472
last_pos = segment.ident.span.hi();
473473
}
474474
self.node.tokens(self.span).append_to_tree_and_joint_vec(&mut idents);

src/libsyntax/ext/tt/macro_parser.rs

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -78,7 +78,7 @@ use crate::ast::Ident;
7878
use crate::ext::tt::quoted::{self, TokenTree};
7979
use crate::parse::{Directory, ParseSess};
8080
use crate::parse::parser::{Parser, PathStyle};
81-
use crate::parse::token::{self, DocComment, Nonterminal, Token};
81+
use crate::parse::token::{self, DocComment, Nonterminal, TokenKind};
8282
use crate::print::pprust;
8383
use crate::symbol::{kw, sym, Symbol};
8484
use crate::tokenstream::{DelimSpan, TokenStream};
@@ -199,7 +199,7 @@ struct MatcherPos<'root, 'tt: 'root> {
199199
seq_op: Option<quoted::KleeneOp>,
200200

201201
/// The separator if we are in a repetition.
202-
sep: Option<Token>,
202+
sep: Option<TokenKind>,
203203

204204
/// The "parent" matcher position if we are in a repetition. That is, the matcher position just
205205
/// before we enter the sequence.
@@ -273,7 +273,7 @@ pub enum ParseResult<T> {
273273
Success(T),
274274
/// Arm failed to match. If the second parameter is `token::Eof`, it indicates an unexpected
275275
/// end of macro invocation. Otherwise, it indicates that no rules expected the given token.
276-
Failure(syntax_pos::Span, Token, &'static str),
276+
Failure(syntax_pos::Span, TokenKind, &'static str),
277277
/// Fatal error (malformed macro?). Abort compilation.
278278
Error(syntax_pos::Span, String),
279279
}
@@ -417,7 +417,7 @@ fn nameize<I: Iterator<Item = NamedMatch>>(
417417

418418
/// Generates an appropriate parsing failure message. For EOF, this is "unexpected end...". For
419419
/// other tokens, this is "unexpected token...".
420-
pub fn parse_failure_msg(tok: Token) -> String {
420+
pub fn parse_failure_msg(tok: TokenKind) -> String {
421421
match tok {
422422
token::Eof => "unexpected end of macro invocation".to_string(),
423423
_ => format!(
@@ -428,7 +428,7 @@ pub fn parse_failure_msg(tok: Token) -> String {
428428
}
429429

430430
/// Performs a token equality check, ignoring syntax context (that is, an unhygienic comparison)
431-
fn token_name_eq(t1: &Token, t2: &Token) -> bool {
431+
fn token_name_eq(t1: &TokenKind, t2: &TokenKind) -> bool {
432432
if let (Some((id1, is_raw1)), Some((id2, is_raw2))) = (t1.ident(), t2.ident()) {
433433
id1.name == id2.name && is_raw1 == is_raw2
434434
} else if let (Some(id1), Some(id2)) = (t1.lifetime(), t2.lifetime()) {
@@ -466,7 +466,7 @@ fn inner_parse_loop<'root, 'tt>(
466466
next_items: &mut Vec<MatcherPosHandle<'root, 'tt>>,
467467
eof_items: &mut SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>,
468468
bb_items: &mut SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>,
469-
token: &Token,
469+
token: &TokenKind,
470470
span: syntax_pos::Span,
471471
) -> ParseResult<()> {
472472
// Pop items from `cur_items` until it is empty.
@@ -807,7 +807,7 @@ pub fn parse(
807807

808808
/// The token is an identifier, but not `_`.
809809
/// We prohibit passing `_` to macros expecting `ident` for now.
810-
fn get_macro_ident(token: &Token) -> Option<(Ident, bool)> {
810+
fn get_macro_ident(token: &TokenKind) -> Option<(Ident, bool)> {
811811
match *token {
812812
token::Ident(ident, is_raw) if ident.name != kw::Underscore =>
813813
Some((ident, is_raw)),
@@ -819,7 +819,7 @@ fn get_macro_ident(token: &Token) -> Option<(Ident, bool)> {
819819
///
820820
/// Returning `false` is a *stability guarantee* that such a matcher will *never* begin with that
821821
/// token. Be conservative (return true) if not sure.
822-
fn may_begin_with(name: Symbol, token: &Token) -> bool {
822+
fn may_begin_with(name: Symbol, token: &TokenKind) -> bool {
823823
/// Checks whether the non-terminal may contain a single (non-keyword) identifier.
824824
fn may_be_ident(nt: &token::Nonterminal) -> bool {
825825
match *nt {

src/libsyntax/ext/tt/macro_rules.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ use crate::feature_gate::Features;
1212
use crate::parse::{Directory, ParseSess};
1313
use crate::parse::parser::Parser;
1414
use crate::parse::token::{self, NtTT};
15-
use crate::parse::token::Token::*;
15+
use crate::parse::token::TokenKind::*;
1616
use crate::symbol::{Symbol, kw, sym};
1717
use crate::tokenstream::{DelimSpan, TokenStream, TokenTree};
1818

src/libsyntax/ext/tt/quoted.rs

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -23,12 +23,12 @@ pub struct Delimited {
2323

2424
impl Delimited {
2525
/// Returns the opening delimiter (possibly `NoDelim`).
26-
pub fn open_token(&self) -> token::Token {
26+
pub fn open_token(&self) -> token::TokenKind {
2727
token::OpenDelim(self.delim)
2828
}
2929

3030
/// Returns the closing delimiter (possibly `NoDelim`).
31-
pub fn close_token(&self) -> token::Token {
31+
pub fn close_token(&self) -> token::TokenKind {
3232
token::CloseDelim(self.delim)
3333
}
3434

@@ -58,7 +58,7 @@ pub struct SequenceRepetition {
5858
/// The sequence of token trees
5959
pub tts: Vec<TokenTree>,
6060
/// The optional separator
61-
pub separator: Option<token::Token>,
61+
pub separator: Option<token::TokenKind>,
6262
/// Whether the sequence can be repeated zero (*), or one or more times (+)
6363
pub op: KleeneOp,
6464
/// The number of `Match`s that appear in the sequence (and subsequences)
@@ -81,7 +81,7 @@ pub enum KleeneOp {
8181
/// are "first-class" token trees. Useful for parsing macros.
8282
#[derive(Debug, Clone, PartialEq, RustcEncodable, RustcDecodable)]
8383
pub enum TokenTree {
84-
Token(Span, token::Token),
84+
Token(Span, token::TokenKind),
8585
Delimited(DelimSpan, Lrc<Delimited>),
8686
/// A kleene-style repetition sequence
8787
Sequence(DelimSpan, Lrc<SequenceRepetition>),
@@ -366,7 +366,7 @@ where
366366

367367
/// Takes a token and returns `Some(KleeneOp)` if the token is `+` `*` or `?`. Otherwise, return
368368
/// `None`.
369-
fn kleene_op(token: &token::Token) -> Option<KleeneOp> {
369+
fn kleene_op(token: &token::TokenKind) -> Option<KleeneOp> {
370370
match *token {
371371
token::BinOp(token::Star) => Some(KleeneOp::ZeroOrMore),
372372
token::BinOp(token::Plus) => Some(KleeneOp::OneOrMore),
@@ -383,7 +383,7 @@ fn kleene_op(token: &token::Token) -> Option<KleeneOp> {
383383
fn parse_kleene_op<I>(
384384
input: &mut I,
385385
span: Span,
386-
) -> Result<Result<(KleeneOp, Span), (token::Token, Span)>, Span>
386+
) -> Result<Result<(KleeneOp, Span), (token::TokenKind, Span)>, Span>
387387
where
388388
I: Iterator<Item = tokenstream::TokenTree>,
389389
{
@@ -422,7 +422,7 @@ fn parse_sep_and_kleene_op<I>(
422422
attrs: &[ast::Attribute],
423423
edition: Edition,
424424
macro_node_id: NodeId,
425-
) -> (Option<token::Token>, KleeneOp)
425+
) -> (Option<token::TokenKind>, KleeneOp)
426426
where
427427
I: Iterator<Item = tokenstream::TokenTree>,
428428
{
@@ -447,7 +447,7 @@ fn parse_sep_and_kleene_op_2015<I>(
447447
_features: &Features,
448448
_attrs: &[ast::Attribute],
449449
macro_node_id: NodeId,
450-
) -> (Option<token::Token>, KleeneOp)
450+
) -> (Option<token::TokenKind>, KleeneOp)
451451
where
452452
I: Iterator<Item = tokenstream::TokenTree>,
453453
{
@@ -565,7 +565,7 @@ fn parse_sep_and_kleene_op_2018<I>(
565565
sess: &ParseSess,
566566
_features: &Features,
567567
_attrs: &[ast::Attribute],
568-
) -> (Option<token::Token>, KleeneOp)
568+
) -> (Option<token::TokenKind>, KleeneOp)
569569
where
570570
I: Iterator<Item = tokenstream::TokenTree>,
571571
{

src/libsyntax/ext/tt/transcribe.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ use crate::ext::expand::Marker;
44
use crate::ext::tt::macro_parser::{MatchedNonterminal, MatchedSeq, NamedMatch};
55
use crate::ext::tt::quoted;
66
use crate::mut_visit::noop_visit_tt;
7-
use crate::parse::token::{self, NtTT, Token};
7+
use crate::parse::token::{self, NtTT, TokenKind};
88
use crate::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndJoint};
99

1010
use smallvec::{smallvec, SmallVec};
@@ -18,7 +18,7 @@ use std::rc::Rc;
1818
/// An iterator over the token trees in a delimited token tree (`{ ... }`) or a sequence (`$(...)`).
1919
enum Frame {
2020
Delimited { forest: Lrc<quoted::Delimited>, idx: usize, span: DelimSpan },
21-
Sequence { forest: Lrc<quoted::SequenceRepetition>, idx: usize, sep: Option<Token> },
21+
Sequence { forest: Lrc<quoted::SequenceRepetition>, idx: usize, sep: Option<TokenKind> },
2222
}
2323

2424
impl Frame {
@@ -242,7 +242,7 @@ pub fn transcribe(
242242
Ident::new(ident.name, ident.span.apply_mark(cx.current_expansion.mark));
243243
sp = sp.apply_mark(cx.current_expansion.mark);
244244
result.push(TokenTree::Token(sp, token::Dollar).into());
245-
result.push(TokenTree::Token(sp, token::Token::from_ast_ident(ident)).into());
245+
result.push(TokenTree::Token(sp, token::TokenKind::from_ast_ident(ident)).into());
246246
}
247247
}
248248

src/libsyntax/mut_visit.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99
1010
use crate::ast::*;
1111
use crate::source_map::{Spanned, respan};
12-
use crate::parse::token::{self, Token};
12+
use crate::parse::token::{self, TokenKind};
1313
use crate::ptr::P;
1414
use crate::ThinVec;
1515
use crate::tokenstream::*;
@@ -262,7 +262,7 @@ pub trait MutVisitor: Sized {
262262
noop_visit_tts(tts, self);
263263
}
264264

265-
fn visit_token(&mut self, t: &mut Token) {
265+
fn visit_token(&mut self, t: &mut TokenKind) {
266266
noop_visit_token(t, self);
267267
}
268268

@@ -596,7 +596,7 @@ pub fn noop_visit_tts<T: MutVisitor>(TokenStream(tts): &mut TokenStream, vis: &m
596596
}
597597

598598
// apply ident visitor if it's an ident, apply other visits to interpolated nodes
599-
pub fn noop_visit_token<T: MutVisitor>(t: &mut Token, vis: &mut T) {
599+
pub fn noop_visit_token<T: MutVisitor>(t: &mut TokenKind, vis: &mut T) {
600600
match t {
601601
token::Ident(id, _is_raw) => vis.visit_ident(id),
602602
token::Lifetime(id) => vis.visit_ident(id),

src/libsyntax/parse/diagnostics.rs

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -229,8 +229,8 @@ impl<'a> Parser<'a> {
229229

230230
pub fn expected_one_of_not_found(
231231
&mut self,
232-
edible: &[token::Token],
233-
inedible: &[token::Token],
232+
edible: &[token::TokenKind],
233+
inedible: &[token::TokenKind],
234234
) -> PResult<'a, bool /* recovered */> {
235235
fn tokens_to_string(tokens: &[TokenType]) -> String {
236236
let mut i = tokens.iter();
@@ -368,7 +368,7 @@ impl<'a> Parser<'a> {
368368

369369
/// Eats and discards tokens until one of `kets` is encountered. Respects token trees,
370370
/// passes through any errors encountered. Used for error recovery.
371-
crate fn eat_to_tokens(&mut self, kets: &[&token::Token]) {
371+
crate fn eat_to_tokens(&mut self, kets: &[&token::TokenKind]) {
372372
let handler = self.diagnostic();
373373

374374
if let Err(ref mut err) = self.parse_seq_to_before_tokens(
@@ -388,7 +388,7 @@ impl<'a> Parser<'a> {
388388
/// let _ = vec![1, 2, 3].into_iter().collect::<Vec<usize>>>>();
389389
/// ^^ help: remove extra angle brackets
390390
/// ```
391-
crate fn check_trailing_angle_brackets(&mut self, segment: &PathSegment, end: token::Token) {
391+
crate fn check_trailing_angle_brackets(&mut self, segment: &PathSegment, end: token::TokenKind) {
392392
// This function is intended to be invoked after parsing a path segment where there are two
393393
// cases:
394394
//
@@ -726,7 +726,7 @@ impl<'a> Parser<'a> {
726726
/// closing delimiter.
727727
pub fn unexpected_try_recover(
728728
&mut self,
729-
t: &token::Token,
729+
t: &token::TokenKind,
730730
) -> PResult<'a, bool /* recovered */> {
731731
let token_str = pprust::token_to_string(t);
732732
let this_token_str = self.this_token_descr();
@@ -903,7 +903,7 @@ impl<'a> Parser<'a> {
903903

904904
crate fn recover_closing_delimiter(
905905
&mut self,
906-
tokens: &[token::Token],
906+
tokens: &[token::TokenKind],
907907
mut err: DiagnosticBuilder<'a>,
908908
) -> PResult<'a, bool> {
909909
let mut pos = None;

0 commit comments

Comments
 (0)