Skip to content

[PERF-EXPERIMENT] Keep two spans per token #95580

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 4 additions & 2 deletions compiler/rustc_ast/src/attr/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -407,6 +407,7 @@ impl MetaItem {
Some(TokenTree::Token(Token {
kind: kind @ (token::Ident(..) | token::ModSep),
span,
..
})) => 'arm: {
let mut segments = if let token::Ident(name, _) = kind {
if let Some(TokenTree::Token(Token { kind: token::ModSep, .. })) = tokens.peek()
Expand All @@ -420,8 +421,9 @@ impl MetaItem {
vec![PathSegment::path_root(span)]
};
loop {
if let Some(TokenTree::Token(Token { kind: token::Ident(name, _), span })) =
tokens.next().map(TokenTree::uninterpolate)
if let Some(TokenTree::Token(Token {
kind: token::Ident(name, _), span, ..
})) = tokens.next().map(TokenTree::uninterpolate)
{
segments.push(PathSegment::from_ident(Ident::new(name, span)));
} else {
Expand Down
4 changes: 3 additions & 1 deletion compiler/rustc_ast/src/mut_visit.rs
Original file line number Diff line number Diff line change
Expand Up @@ -719,13 +719,14 @@ pub fn visit_lazy_tts<T: MutVisitor>(lazy_tts: &mut Option<LazyTokenStream>, vis
// In practice the ident part is not actually used by specific visitors right now,
// but there's a test below checking that it works.
pub fn visit_token<T: MutVisitor>(t: &mut Token, vis: &mut T) {
let Token { kind, span } = t;
let Token { kind, span, span2 } = t;
match kind {
token::Ident(name, _) | token::Lifetime(name) => {
let mut ident = Ident::new(*name, *span);
vis.visit_ident(&mut ident);
*name = ident.name;
*span = ident.span;
vis.visit_span(span2);
return; // Avoid visiting the span for the second time.
}
token::Interpolated(nt) => {
Expand All @@ -735,6 +736,7 @@ pub fn visit_token<T: MutVisitor>(t: &mut Token, vis: &mut T) {
_ => {}
}
vis.visit_span(span);
vis.visit_span(span2);
}

// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`.
Expand Down
3 changes: 2 additions & 1 deletion compiler/rustc_ast/src/token.rs
Original file line number Diff line number Diff line change
Expand Up @@ -248,6 +248,7 @@ rustc_data_structures::static_assert_size!(TokenKind, 16);
pub struct Token {
pub kind: TokenKind,
pub span: Span,
pub span2: Span,
}

impl TokenKind {
Expand Down Expand Up @@ -306,7 +307,7 @@ impl TokenKind {

impl Token {
pub fn new(kind: TokenKind, span: Span) -> Self {
Token { kind, span }
Token { kind, span, span2: span }
}

/// Some token that will be thrown away later.
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_expand/src/mbe/macro_parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ type NamedMatchVec = SmallVec<[NamedMatch; 1]>;

// This type is used a lot. Make sure it doesn't unintentionally get bigger.
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
rustc_data_structures::static_assert_size!(NamedMatchVec, 48);
rustc_data_structures::static_assert_size!(NamedMatchVec, 56);

#[derive(Clone)]
enum MatcherKind<'tt> {
Expand Down
14 changes: 9 additions & 5 deletions compiler/rustc_expand/src/mbe/quoted.rs
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,9 @@ pub(super) fn parse(
match tree {
TokenTree::MetaVar(start_sp, ident) if parsing_patterns => {
let span = match trees.next() {
Some(tokenstream::TokenTree::Token(Token { kind: token::Colon, span })) => {
Some(tokenstream::TokenTree::Token(Token {
kind: token::Colon, span, ..
})) => {
match trees.next() {
Some(tokenstream::TokenTree::Token(token)) => match token.ident() {
Some((frag, _)) => {
Expand Down Expand Up @@ -149,7 +151,7 @@ fn parse_tree(
// Depending on what `tree` is, we could be parsing different parts of a macro
match tree {
// `tree` is a `$` token. Look at the next token in `trees`
tokenstream::TokenTree::Token(Token { kind: token::Dollar, span }) => {
tokenstream::TokenTree::Token(Token { kind: token::Dollar, span, .. }) => {
// FIXME: Handle `None`-delimited groups in a more systematic way
// during parsing.
let mut next = outer_trees.next();
Expand All @@ -168,7 +170,7 @@ fn parse_tree(
if delim != token::Paren {
span_dollar_dollar_or_metavar_in_the_lhs_err(
sess,
&Token { kind: token::OpenDelim(delim), span: delim_span.entire() },
&Token::new(token::OpenDelim(delim), delim_span.entire()),
);
}
} else {
Expand Down Expand Up @@ -237,11 +239,13 @@ fn parse_tree(
}

// `tree` is followed by another `$`. This is an escaped `$`.
Some(tokenstream::TokenTree::Token(Token { kind: token::Dollar, span })) => {
Some(tokenstream::TokenTree::Token(Token {
kind: token::Dollar, span, ..
})) => {
if parsing_patterns {
span_dollar_dollar_or_metavar_in_the_lhs_err(
sess,
&Token { kind: token::Dollar, span },
&Token::new(token::Dollar, span),
);
} else {
maybe_emit_macro_metavar_expr_feature(features, sess, span);
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_expand/src/proc_macro_server.rs
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ impl FromInternal<(TreeAndSpacing, &'_ mut Vec<Self>, &mut Rustc<'_, '_>)>
use rustc_ast::token::*;

let joint = spacing == Joint;
let Token { kind, span } = match tree {
let Token { kind, span, .. } = match tree {
tokenstream::TokenTree::Delimited(span, delim, tts) => {
let delimiter = Delimiter::from_internal(delim);
return TokenTree::Group(Group { delimiter, stream: tts, span, flatten: false });
Expand Down
6 changes: 3 additions & 3 deletions compiler/rustc_parse/src/parser/attr_wrapper.rs
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ struct LazyTokenStreamImpl {
}

#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
rustc_data_structures::static_assert_size!(LazyTokenStreamImpl, 144);
rustc_data_structures::static_assert_size!(LazyTokenStreamImpl, 152);

impl CreateTokenStream for LazyTokenStreamImpl {
fn create_token_stream(&self) -> AttrAnnotatedTokenStream {
Expand Down Expand Up @@ -414,10 +414,10 @@ fn make_token_stream(
let mut token_and_spacing = iter.next();
while let Some((token, spacing)) = token_and_spacing {
match token {
FlatToken::Token(Token { kind: TokenKind::OpenDelim(delim), span }) => {
FlatToken::Token(Token { kind: TokenKind::OpenDelim(delim), span, .. }) => {
stack.push(FrameData { open: span, open_delim: delim, inner: vec![] });
}
FlatToken::Token(Token { kind: TokenKind::CloseDelim(delim), span }) => {
FlatToken::Token(Token { kind: TokenKind::CloseDelim(delim), span, .. }) => {
// HACK: If we encounter a mismatched `None` delimiter at the top
// level, just ignore it.
if matches!(delim, DelimToken::NoDelim)
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_parse/src/parser/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -299,7 +299,7 @@ impl TokenCursor {
#[inline(always)]
fn inlined_next_desugared(&mut self) -> (Token, Spacing) {
let (data, attr_style, sp) = match self.inlined_next() {
(Token { kind: token::DocComment(_, attr_style, data), span }, _) => {
(Token { kind: token::DocComment(_, attr_style, data), span, .. }, _) => {
(data, attr_style, span)
}
tok => return tok,
Expand Down
24 changes: 8 additions & 16 deletions src/tools/rustfmt/src/macros.rs
Original file line number Diff line number Diff line change
Expand Up @@ -683,10 +683,9 @@ struct MacroArgParser {
fn last_tok(tt: &TokenTree) -> Token {
match *tt {
TokenTree::Token(ref t) => t.clone(),
TokenTree::Delimited(delim_span, delim, _) => Token {
kind: TokenKind::CloseDelim(delim),
span: delim_span.close,
},
TokenTree::Delimited(delim_span, delim, _) => {
Token::new(TokenKind::CloseDelim(delim), delim_span.close)
}
}
}

Expand All @@ -695,14 +694,8 @@ impl MacroArgParser {
MacroArgParser {
buf: String::new(),
is_meta_var: false,
last_tok: Token {
kind: TokenKind::Eof,
span: DUMMY_SP,
},
start_tok: Token {
kind: TokenKind::Eof,
span: DUMMY_SP,
},
last_tok: Token::new(TokenKind::Eof, DUMMY_SP),
start_tok: Token::new(TokenKind::Eof, DUMMY_SP),
result: vec![],
}
}
Expand Down Expand Up @@ -862,6 +855,7 @@ impl MacroArgParser {
TokenTree::Token(Token {
kind: TokenKind::Dollar,
span,
..
}) => {
// We always want to add a separator before meta variables.
if !self.buf.is_empty() {
Expand All @@ -870,10 +864,7 @@ impl MacroArgParser {

// Start keeping the name of this metavariable in the buffer.
self.is_meta_var = true;
self.start_tok = Token {
kind: TokenKind::Dollar,
span,
};
self.start_tok = Token::new(TokenKind::Dollar, span);
}
TokenTree::Token(Token {
kind: TokenKind::Colon,
Expand Down Expand Up @@ -1150,6 +1141,7 @@ impl MacroParser {
if let Some(TokenTree::Token(Token {
kind: TokenKind::Semi,
span,
..
})) = self.toks.look_ahead(0)
{
hi = span.hi();
Expand Down