Skip to content

Commit eac3846

Browse files
committed
Always use token kinds through token module rather than Token type
1 parent daf1ed0 commit eac3846

File tree

15 files changed

+130
-130
lines changed

15 files changed

+130
-130
lines changed

src/librustc/hir/lowering.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@ use syntax::source_map::CompilerDesugaringKind::IfTemporary;
6767
use syntax::std_inject;
6868
use syntax::symbol::{kw, sym, Symbol};
6969
use syntax::tokenstream::{TokenStream, TokenTree};
70-
use syntax::parse::token::Token;
70+
use syntax::parse::token::{self, Token};
7171
use syntax::visit::{self, Visitor};
7272
use syntax_pos::{DUMMY_SP, edition, Span};
7373

@@ -1339,7 +1339,7 @@ impl<'a> LoweringContext<'a> {
13391339

13401340
fn lower_token(&mut self, token: Token, span: Span) -> TokenStream {
13411341
match token {
1342-
Token::Interpolated(nt) => {
1342+
token::Interpolated(nt) => {
13431343
let tts = nt.to_tokenstream(&self.sess.parse_sess, span);
13441344
self.lower_token_stream(tts)
13451345
}

src/librustc/hir/map/def_collector.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -326,7 +326,7 @@ impl<'a> visit::Visitor<'a> for DefCollector<'a> {
326326
}
327327

328328
fn visit_token(&mut self, t: Token) {
329-
if let Token::Interpolated(nt) = t {
329+
if let token::Interpolated(nt) = t {
330330
if let token::NtExpr(ref expr) = *nt {
331331
if let ExprKind::Mac(..) = expr.node {
332332
self.visit_macro_invoc(expr.id);

src/librustc/ich/impls_syntax.rs

Lines changed: 41 additions & 41 deletions
Original file line numberDiff line numberDiff line change
@@ -313,60 +313,60 @@ fn hash_token<'a, 'gcx, W: StableHasherResult>(
313313
) {
314314
mem::discriminant(token).hash_stable(hcx, hasher);
315315
match *token {
316-
token::Token::Eq |
317-
token::Token::Lt |
318-
token::Token::Le |
319-
token::Token::EqEq |
320-
token::Token::Ne |
321-
token::Token::Ge |
322-
token::Token::Gt |
323-
token::Token::AndAnd |
324-
token::Token::OrOr |
325-
token::Token::Not |
326-
token::Token::Tilde |
327-
token::Token::At |
328-
token::Token::Dot |
329-
token::Token::DotDot |
330-
token::Token::DotDotDot |
331-
token::Token::DotDotEq |
332-
token::Token::Comma |
333-
token::Token::Semi |
334-
token::Token::Colon |
335-
token::Token::ModSep |
336-
token::Token::RArrow |
337-
token::Token::LArrow |
338-
token::Token::FatArrow |
339-
token::Token::Pound |
340-
token::Token::Dollar |
341-
token::Token::Question |
342-
token::Token::SingleQuote |
343-
token::Token::Whitespace |
344-
token::Token::Comment |
345-
token::Token::Eof => {}
346-
347-
token::Token::BinOp(bin_op_token) |
348-
token::Token::BinOpEq(bin_op_token) => {
316+
token::Eq |
317+
token::Lt |
318+
token::Le |
319+
token::EqEq |
320+
token::Ne |
321+
token::Ge |
322+
token::Gt |
323+
token::AndAnd |
324+
token::OrOr |
325+
token::Not |
326+
token::Tilde |
327+
token::At |
328+
token::Dot |
329+
token::DotDot |
330+
token::DotDotDot |
331+
token::DotDotEq |
332+
token::Comma |
333+
token::Semi |
334+
token::Colon |
335+
token::ModSep |
336+
token::RArrow |
337+
token::LArrow |
338+
token::FatArrow |
339+
token::Pound |
340+
token::Dollar |
341+
token::Question |
342+
token::SingleQuote |
343+
token::Whitespace |
344+
token::Comment |
345+
token::Eof => {}
346+
347+
token::BinOp(bin_op_token) |
348+
token::BinOpEq(bin_op_token) => {
349349
std_hash::Hash::hash(&bin_op_token, hasher);
350350
}
351351

352-
token::Token::OpenDelim(delim_token) |
353-
token::Token::CloseDelim(delim_token) => {
352+
token::OpenDelim(delim_token) |
353+
token::CloseDelim(delim_token) => {
354354
std_hash::Hash::hash(&delim_token, hasher);
355355
}
356-
token::Token::Literal(lit) => lit.hash_stable(hcx, hasher),
356+
token::Literal(lit) => lit.hash_stable(hcx, hasher),
357357

358-
token::Token::Ident(ident, is_raw) => {
358+
token::Ident(ident, is_raw) => {
359359
ident.name.hash_stable(hcx, hasher);
360360
is_raw.hash_stable(hcx, hasher);
361361
}
362-
token::Token::Lifetime(ident) => ident.name.hash_stable(hcx, hasher),
362+
token::Lifetime(ident) => ident.name.hash_stable(hcx, hasher),
363363

364-
token::Token::Interpolated(_) => {
364+
token::Interpolated(_) => {
365365
bug!("interpolated tokens should not be present in the HIR")
366366
}
367367

368-
token::Token::DocComment(val) |
369-
token::Token::Shebang(val) => val.hash_stable(hcx, hasher),
368+
token::DocComment(val) |
369+
token::Shebang(val) => val.hash_stable(hcx, hasher),
370370
}
371371
}
372372

src/librustc_resolve/build_reduced_graph.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1053,7 +1053,7 @@ impl<'a, 'b> Visitor<'a> for BuildReducedGraphVisitor<'a, 'b> {
10531053
}
10541054

10551055
fn visit_token(&mut self, t: Token) {
1056-
if let Token::Interpolated(nt) = t {
1056+
if let token::Interpolated(nt) = t {
10571057
if let token::NtExpr(ref expr) = *nt {
10581058
if let ast::ExprKind::Mac(..) = expr.node {
10591059
self.visit_invoc(expr.id);

src/libsyntax/attr/mod.rs

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -465,7 +465,7 @@ impl MetaItem {
465465
let mod_sep_span = Span::new(last_pos,
466466
segment.ident.span.lo(),
467467
segment.ident.span.ctxt());
468-
idents.push(TokenTree::Token(mod_sep_span, Token::ModSep).into());
468+
idents.push(TokenTree::Token(mod_sep_span, token::ModSep).into());
469469
}
470470
idents.push(TokenTree::Token(segment.ident.span,
471471
Token::from_ast_ident(segment.ident)).into());
@@ -480,10 +480,10 @@ impl MetaItem {
480480
{
481481
// FIXME: Share code with `parse_path`.
482482
let path = match tokens.next() {
483-
Some(TokenTree::Token(span, token @ Token::Ident(..))) |
484-
Some(TokenTree::Token(span, token @ Token::ModSep)) => 'arm: {
485-
let mut segments = if let Token::Ident(ident, _) = token {
486-
if let Some(TokenTree::Token(_, Token::ModSep)) = tokens.peek() {
483+
Some(TokenTree::Token(span, token @ token::Ident(..))) |
484+
Some(TokenTree::Token(span, token @ token::ModSep)) => 'arm: {
485+
let mut segments = if let token::Ident(ident, _) = token {
486+
if let Some(TokenTree::Token(_, token::ModSep)) = tokens.peek() {
487487
tokens.next();
488488
vec![PathSegment::from_ident(ident.with_span_pos(span))]
489489
} else {
@@ -494,12 +494,12 @@ impl MetaItem {
494494
};
495495
loop {
496496
if let Some(TokenTree::Token(span,
497-
Token::Ident(ident, _))) = tokens.next() {
497+
token::Ident(ident, _))) = tokens.next() {
498498
segments.push(PathSegment::from_ident(ident.with_span_pos(span)));
499499
} else {
500500
return None;
501501
}
502-
if let Some(TokenTree::Token(_, Token::ModSep)) = tokens.peek() {
502+
if let Some(TokenTree::Token(_, token::ModSep)) = tokens.peek() {
503503
tokens.next();
504504
} else {
505505
break;
@@ -508,7 +508,7 @@ impl MetaItem {
508508
let span = span.with_hi(segments.last().unwrap().ident.span.hi());
509509
Path { span, segments }
510510
}
511-
Some(TokenTree::Token(_, Token::Interpolated(nt))) => match *nt {
511+
Some(TokenTree::Token(_, token::Interpolated(nt))) => match *nt {
512512
token::Nonterminal::NtIdent(ident, _) => Path::from_ident(ident),
513513
token::Nonterminal::NtMeta(ref meta) => return Some(meta.clone()),
514514
token::Nonterminal::NtPath(ref path) => path.clone(),
@@ -533,15 +533,15 @@ impl MetaItemKind {
533533
match *self {
534534
MetaItemKind::Word => TokenStream::empty(),
535535
MetaItemKind::NameValue(ref lit) => {
536-
let mut vec = vec![TokenTree::Token(span, Token::Eq).into()];
536+
let mut vec = vec![TokenTree::Token(span, token::Eq).into()];
537537
lit.tokens().append_to_tree_and_joint_vec(&mut vec);
538538
TokenStream::new(vec)
539539
}
540540
MetaItemKind::List(ref list) => {
541541
let mut tokens = Vec::new();
542542
for (i, item) in list.iter().enumerate() {
543543
if i > 0 {
544-
tokens.push(TokenTree::Token(span, Token::Comma).into());
544+
tokens.push(TokenTree::Token(span, token::Comma).into());
545545
}
546546
item.tokens().append_to_tree_and_joint_vec(&mut tokens);
547547
}
@@ -579,7 +579,7 @@ impl MetaItemKind {
579579
let item = NestedMetaItem::from_tokens(&mut tokens)?;
580580
result.push(item);
581581
match tokens.next() {
582-
None | Some(TokenTree::Token(_, Token::Comma)) => {}
582+
None | Some(TokenTree::Token(_, token::Comma)) => {}
583583
_ => return None,
584584
}
585585
}

src/libsyntax/ext/expand.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ use crate::ext::placeholders::{placeholder, PlaceholderExpander};
1010
use crate::feature_gate::{self, Features, GateIssue, is_builtin_attr, emit_feature_err};
1111
use crate::mut_visit::*;
1212
use crate::parse::{DirectoryOwnership, PResult, ParseSess};
13-
use crate::parse::token::{self, Token};
13+
use crate::parse::token;
1414
use crate::parse::parser::Parser;
1515
use crate::ptr::P;
1616
use crate::symbol::Symbol;
@@ -585,7 +585,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
585585
}
586586
AttrProcMacro(ref mac, ..) => {
587587
self.gate_proc_macro_attr_item(attr.span, &item);
588-
let item_tok = TokenTree::Token(DUMMY_SP, Token::Interpolated(Lrc::new(match item {
588+
let item_tok = TokenTree::Token(DUMMY_SP, token::Interpolated(Lrc::new(match item {
589589
Annotatable::Item(item) => token::NtItem(item),
590590
Annotatable::TraitItem(item) => token::NtTraitItem(item.into_inner()),
591591
Annotatable::ImplItem(item) => token::NtImplItem(item.into_inner()),

src/libsyntax/ext/tt/macro_parser.rs

Lines changed: 20 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -835,12 +835,12 @@ fn may_begin_with(name: Symbol, token: &Token) -> bool {
835835
sym::literal => token.can_begin_literal_or_bool(),
836836
sym::vis => match *token {
837837
// The follow-set of :vis + "priv" keyword + interpolated
838-
Token::Comma | Token::Ident(..) | Token::Interpolated(_) => true,
838+
token::Comma | token::Ident(..) | token::Interpolated(_) => true,
839839
_ => token.can_begin_type(),
840840
},
841841
sym::block => match *token {
842-
Token::OpenDelim(token::Brace) => true,
843-
Token::Interpolated(ref nt) => match **nt {
842+
token::OpenDelim(token::Brace) => true,
843+
token::Interpolated(ref nt) => match **nt {
844844
token::NtItem(_)
845845
| token::NtPat(_)
846846
| token::NtTy(_)
@@ -853,32 +853,32 @@ fn may_begin_with(name: Symbol, token: &Token) -> bool {
853853
_ => false,
854854
},
855855
sym::path | sym::meta => match *token {
856-
Token::ModSep | Token::Ident(..) => true,
857-
Token::Interpolated(ref nt) => match **nt {
856+
token::ModSep | token::Ident(..) => true,
857+
token::Interpolated(ref nt) => match **nt {
858858
token::NtPath(_) | token::NtMeta(_) => true,
859859
_ => may_be_ident(&nt),
860860
},
861861
_ => false,
862862
},
863863
sym::pat => match *token {
864-
Token::Ident(..) | // box, ref, mut, and other identifiers (can stricten)
865-
Token::OpenDelim(token::Paren) | // tuple pattern
866-
Token::OpenDelim(token::Bracket) | // slice pattern
867-
Token::BinOp(token::And) | // reference
868-
Token::BinOp(token::Minus) | // negative literal
869-
Token::AndAnd | // double reference
870-
Token::Literal(..) | // literal
871-
Token::DotDot | // range pattern (future compat)
872-
Token::DotDotDot | // range pattern (future compat)
873-
Token::ModSep | // path
874-
Token::Lt | // path (UFCS constant)
875-
Token::BinOp(token::Shl) => true, // path (double UFCS)
876-
Token::Interpolated(ref nt) => may_be_ident(nt),
864+
token::Ident(..) | // box, ref, mut, and other identifiers (can stricten)
865+
token::OpenDelim(token::Paren) | // tuple pattern
866+
token::OpenDelim(token::Bracket) | // slice pattern
867+
token::BinOp(token::And) | // reference
868+
token::BinOp(token::Minus) | // negative literal
869+
token::AndAnd | // double reference
870+
token::Literal(..) | // literal
871+
token::DotDot | // range pattern (future compat)
872+
token::DotDotDot | // range pattern (future compat)
873+
token::ModSep | // path
874+
token::Lt | // path (UFCS constant)
875+
token::BinOp(token::Shl) => true, // path (double UFCS)
876+
token::Interpolated(ref nt) => may_be_ident(nt),
877877
_ => false,
878878
},
879879
sym::lifetime => match *token {
880-
Token::Lifetime(_) => true,
881-
Token::Interpolated(ref nt) => match **nt {
880+
token::Lifetime(_) => true,
881+
token::Interpolated(ref nt) => match **nt {
882882
token::NtLifetime(_) | token::NtTT(_) => true,
883883
_ => false,
884884
},

src/libsyntax/ext/tt/transcribe.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -225,7 +225,7 @@ pub fn transcribe(
225225
result.push(tt.clone().into());
226226
} else {
227227
sp = sp.apply_mark(cx.current_expansion.mark);
228-
let token = TokenTree::Token(sp, Token::Interpolated(nt.clone()));
228+
let token = TokenTree::Token(sp, token::Interpolated(nt.clone()));
229229
result.push(token.into());
230230
}
231231
} else {

src/libsyntax/parse/diagnostics.rs

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -294,7 +294,7 @@ impl<'a> Parser<'a> {
294294
Applicability::MaybeIncorrect,
295295
);
296296
}
297-
let sp = if self.token == token::Token::Eof {
297+
let sp = if self.token == token::Eof {
298298
// This is EOF, don't want to point at the following char, but rather the last token
299299
self.prev_span
300300
} else {
@@ -732,22 +732,22 @@ impl<'a> Parser<'a> {
732732
let this_token_str = self.this_token_descr();
733733
let (prev_sp, sp) = match (&self.token, self.subparser_name) {
734734
// Point at the end of the macro call when reaching end of macro arguments.
735-
(token::Token::Eof, Some(_)) => {
735+
(token::Eof, Some(_)) => {
736736
let sp = self.sess.source_map().next_point(self.span);
737737
(sp, sp)
738738
}
739739
// We don't want to point at the following span after DUMMY_SP.
740740
// This happens when the parser finds an empty TokenStream.
741741
_ if self.prev_span == DUMMY_SP => (self.span, self.span),
742742
// EOF, don't want to point at the following char, but rather the last token.
743-
(token::Token::Eof, None) => (self.prev_span, self.span),
743+
(token::Eof, None) => (self.prev_span, self.span),
744744
_ => (self.sess.source_map().next_point(self.prev_span), self.span),
745745
};
746746
let msg = format!(
747747
"expected `{}`, found {}",
748748
token_str,
749749
match (&self.token, self.subparser_name) {
750-
(token::Token::Eof, Some(origin)) => format!("end of {}", origin),
750+
(token::Eof, Some(origin)) => format!("end of {}", origin),
751751
_ => this_token_str,
752752
},
753753
);
@@ -1215,7 +1215,7 @@ impl<'a> Parser<'a> {
12151215

12161216
crate fn expected_expression_found(&self) -> DiagnosticBuilder<'a> {
12171217
let (span, msg) = match (&self.token, self.subparser_name) {
1218-
(&token::Token::Eof, Some(origin)) => {
1218+
(&token::Eof, Some(origin)) => {
12191219
let sp = self.sess.source_map().next_point(self.span);
12201220
(sp, format!("expected expression, found end of {}", origin))
12211221
}

src/libsyntax/parse/mod.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -311,7 +311,7 @@ pub fn maybe_file_to_stream(
311311
for unmatched in unmatched_braces {
312312
let mut db = sess.span_diagnostic.struct_span_err(unmatched.found_span, &format!(
313313
"incorrect close delimiter: `{}`",
314-
token_to_string(&token::Token::CloseDelim(unmatched.found_delim)),
314+
token_to_string(&token::CloseDelim(unmatched.found_delim)),
315315
));
316316
db.span_label(unmatched.found_span, "incorrect close delimiter");
317317
if let Some(sp) = unmatched.candidate_span {

src/libsyntax/parse/parser.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -3359,7 +3359,7 @@ impl<'a> Parser<'a> {
33593359
let discriminant = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL,
33603360
None)?;
33613361
if let Err(mut e) = self.expect(&token::OpenDelim(token::Brace)) {
3362-
if self.token == token::Token::Semi {
3362+
if self.token == token::Semi {
33633363
e.span_suggestion_short(
33643364
match_span,
33653365
"try removing this `match`",
@@ -5920,7 +5920,7 @@ impl<'a> Parser<'a> {
59205920
while !self.eat(&token::CloseDelim(token::Brace)) {
59215921
if let token::DocComment(_) = self.token {
59225922
if self.look_ahead(1,
5923-
|tok| tok == &token::Token::CloseDelim(token::Brace)) {
5923+
|tok| tok == &token::CloseDelim(token::Brace)) {
59245924
let mut err = self.diagnostic().struct_span_err_with_code(
59255925
self.span,
59265926
"found a documentation comment that doesn't document anything",
@@ -6796,7 +6796,7 @@ impl<'a> Parser<'a> {
67966796
let mut replacement = vec![];
67976797
let mut fixed_crate_name = false;
67986798
// Accept `extern crate name-like-this` for better diagnostics
6799-
let dash = token::Token::BinOp(token::BinOpToken::Minus);
6799+
let dash = token::BinOp(token::BinOpToken::Minus);
68006800
if self.token == dash { // Do not include `-` as part of the expected tokens list
68016801
while self.eat(&dash) {
68026802
fixed_crate_name = true;
@@ -7869,7 +7869,7 @@ pub fn emit_unclosed_delims(unclosed_delims: &mut Vec<UnmatchedBrace>, handler:
78697869
for unmatched in unclosed_delims.iter() {
78707870
let mut err = handler.struct_span_err(unmatched.found_span, &format!(
78717871
"incorrect close delimiter: `{}`",
7872-
pprust::token_to_string(&token::Token::CloseDelim(unmatched.found_delim)),
7872+
pprust::token_to_string(&token::CloseDelim(unmatched.found_delim)),
78737873
));
78747874
err.span_label(unmatched.found_span, "incorrect close delimiter");
78757875
if let Some(sp) = unmatched.candidate_span {

0 commit comments

Comments
 (0)