Skip to content

Commit a3425ed

Browse files
committed
syntax: Rename TokenAndSpan into Token
1 parent 99b27d7 commit a3425ed

File tree

8 files changed

+118
-122
lines changed

8 files changed

+118
-122
lines changed

src/librustc_save_analysis/span_utils.rs

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -60,11 +60,11 @@ impl<'a> SpanUtils<'a> {
6060
let mut toks = self.retokenise_span(span);
6161
loop {
6262
let next = toks.real_token();
63-
if next.tok == token::Eof {
63+
if next == token::Eof {
6464
return None;
6565
}
66-
if next.tok == tok {
67-
return Some(next.sp);
66+
if next == tok {
67+
return Some(next.span);
6868
}
6969
}
7070
}
@@ -74,12 +74,12 @@ impl<'a> SpanUtils<'a> {
7474
// let mut toks = self.retokenise_span(span);
7575
// loop {
7676
// let ts = toks.real_token();
77-
// if ts.tok == token::Eof {
77+
// if ts == token::Eof {
7878
// return None;
7979
// }
80-
// if ts.tok == token::Not {
80+
// if ts == token::Not {
8181
// let ts = toks.real_token();
82-
// if ts.tok.is_ident() {
82+
// if ts.kind.is_ident() {
8383
// return Some(ts.sp);
8484
// } else {
8585
// return None;
@@ -93,12 +93,12 @@ impl<'a> SpanUtils<'a> {
9393
// let mut toks = self.retokenise_span(span);
9494
// let mut prev = toks.real_token();
9595
// loop {
96-
// if prev.tok == token::Eof {
96+
// if prev == token::Eof {
9797
// return None;
9898
// }
9999
// let ts = toks.real_token();
100-
// if ts.tok == token::Not {
101-
// if prev.tok.is_ident() {
100+
// if ts == token::Not {
101+
// if prev.kind.is_ident() {
102102
// return Some(prev.sp);
103103
// } else {
104104
// return None;

src/librustdoc/html/highlight.rs

Lines changed: 15 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -12,8 +12,8 @@ use std::io;
1212
use std::io::prelude::*;
1313

1414
use syntax::source_map::{SourceMap, FilePathMapping};
15-
use syntax::parse::lexer::{self, TokenAndSpan};
16-
use syntax::parse::token;
15+
use syntax::parse::lexer;
16+
use syntax::parse::token::{self, Token};
1717
use syntax::parse;
1818
use syntax::symbol::{kw, sym};
1919
use syntax_pos::{Span, FileName};
@@ -186,9 +186,9 @@ impl<'a> Classifier<'a> {
186186
}
187187

188188
/// Gets the next token out of the lexer.
189-
fn try_next_token(&mut self) -> Result<TokenAndSpan, HighlightError> {
189+
fn try_next_token(&mut self) -> Result<Token, HighlightError> {
190190
match self.lexer.try_next_token() {
191-
Ok(tas) => Ok(tas),
191+
Ok(token) => Ok(token),
192192
Err(_) => Err(HighlightError::LexError),
193193
}
194194
}
@@ -205,7 +205,7 @@ impl<'a> Classifier<'a> {
205205
-> Result<(), HighlightError> {
206206
loop {
207207
let next = self.try_next_token()?;
208-
if next.tok == token::Eof {
208+
if next == token::Eof {
209209
break;
210210
}
211211

@@ -218,9 +218,9 @@ impl<'a> Classifier<'a> {
218218
// Handles an individual token from the lexer.
219219
fn write_token<W: Writer>(&mut self,
220220
out: &mut W,
221-
tas: TokenAndSpan)
221+
token: Token)
222222
-> Result<(), HighlightError> {
223-
let klass = match tas.tok {
223+
let klass = match token.kind {
224224
token::Shebang(s) => {
225225
out.string(Escape(&s.as_str()), Class::None)?;
226226
return Ok(());
@@ -234,7 +234,7 @@ impl<'a> Classifier<'a> {
234234
// reference or dereference operator or a reference or pointer type, instead of the
235235
// bit-and or multiplication operator.
236236
token::BinOp(token::And) | token::BinOp(token::Star)
237-
if self.lexer.peek().tok != token::Whitespace => Class::RefKeyWord,
237+
if self.lexer.peek().kind != token::Whitespace => Class::RefKeyWord,
238238

239239
// Consider this as part of a macro invocation if there was a
240240
// leading identifier.
@@ -257,7 +257,7 @@ impl<'a> Classifier<'a> {
257257
token::Question => Class::QuestionMark,
258258

259259
token::Dollar => {
260-
if self.lexer.peek().tok.is_ident() {
260+
if self.lexer.peek().kind.is_ident() {
261261
self.in_macro_nonterminal = true;
262262
Class::MacroNonTerminal
263263
} else {
@@ -280,9 +280,9 @@ impl<'a> Classifier<'a> {
280280
// as an attribute.
281281

282282
// Case 1: #![inner_attribute]
283-
if self.lexer.peek().tok == token::Not {
283+
if self.lexer.peek() == token::Not {
284284
self.try_next_token()?; // NOTE: consumes `!` token!
285-
if self.lexer.peek().tok == token::OpenDelim(token::Bracket) {
285+
if self.lexer.peek() == token::OpenDelim(token::Bracket) {
286286
self.in_attribute = true;
287287
out.enter_span(Class::Attribute)?;
288288
}
@@ -292,7 +292,7 @@ impl<'a> Classifier<'a> {
292292
}
293293

294294
// Case 2: #[outer_attribute]
295-
if self.lexer.peek().tok == token::OpenDelim(token::Bracket) {
295+
if self.lexer.peek() == token::OpenDelim(token::Bracket) {
296296
self.in_attribute = true;
297297
out.enter_span(Class::Attribute)?;
298298
}
@@ -335,13 +335,13 @@ impl<'a> Classifier<'a> {
335335
sym::Option | sym::Result => Class::PreludeTy,
336336
sym::Some | sym::None | sym::Ok | sym::Err => Class::PreludeVal,
337337

338-
_ if tas.tok.is_reserved_ident() => Class::KeyWord,
338+
_ if token.kind.is_reserved_ident() => Class::KeyWord,
339339

340340
_ => {
341341
if self.in_macro_nonterminal {
342342
self.in_macro_nonterminal = false;
343343
Class::MacroNonTerminal
344-
} else if self.lexer.peek().tok == token::Not {
344+
} else if self.lexer.peek() == token::Not {
345345
self.in_macro = true;
346346
Class::Macro
347347
} else {
@@ -359,7 +359,7 @@ impl<'a> Classifier<'a> {
359359

360360
// Anything that didn't return above is the simple case where we the
361361
// class just spans a single token, so we can use the `string` method.
362-
out.string(Escape(&self.snip(tas.sp)), klass)?;
362+
out.string(Escape(&self.snip(token.span)), klass)?;
363363

364364
Ok(())
365365
}

src/librustdoc/passes/check_code_block_syntax.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
use errors::Applicability;
2-
use syntax::parse::lexer::{TokenAndSpan, StringReader as Lexer};
2+
use syntax::parse::lexer::{StringReader as Lexer};
33
use syntax::parse::{ParseSess, token};
44
use syntax::source_map::FilePathMapping;
55
use syntax_pos::FileName;
@@ -33,8 +33,8 @@ impl<'a, 'tcx> SyntaxChecker<'a, 'tcx> {
3333
);
3434

3535
let errors = Lexer::new_or_buffered_errs(&sess, source_file, None).and_then(|mut lexer| {
36-
while let Ok(TokenAndSpan { tok, .. }) = lexer.try_next_token() {
37-
if tok == token::Eof {
36+
while let Ok(token::Token { kind, .. }) = lexer.try_next_token() {
37+
if kind == token::Eof {
3838
break;
3939
}
4040
}

0 commit comments

Comments
 (0)