@@ -404,7 +404,8 @@ impl<'a> Parser<'a> {
404
404
subparser_name,
405
405
} ;
406
406
407
- parser. token = parser. next_tok ( ) ;
407
+ // Make parser point to the first token.
408
+ parser. bump ( ) ;
408
409
409
410
if let Some ( directory) = directory {
410
411
parser. directory = directory;
@@ -418,7 +419,6 @@ impl<'a> Parser<'a> {
418
419
}
419
420
}
420
421
421
- parser. process_potential_macro_variable ( ) ;
422
422
parser
423
423
}
424
424
@@ -430,15 +430,15 @@ impl<'a> Parser<'a> {
430
430
self . unnormalized_prev_token . as_ref ( ) . unwrap_or ( & self . prev_token )
431
431
}
432
432
433
- fn next_tok ( & mut self ) -> Token {
433
+ fn next_tok ( & mut self , fallback_span : Span ) -> Token {
434
434
let mut next = if self . desugar_doc_comments {
435
435
self . token_cursor . next_desugared ( )
436
436
} else {
437
437
self . token_cursor . next ( )
438
438
} ;
439
439
if next. span . is_dummy ( ) {
440
440
// Tweak the location for better diagnostics, but keep syntactic context intact.
441
- next. span = self . unnormalized_token ( ) . span . with_ctxt ( next. span . ctxt ( ) ) ;
441
+ next. span = fallback_span . with_ctxt ( next. span . ctxt ( ) ) ;
442
442
}
443
443
next
444
444
}
@@ -896,6 +896,24 @@ impl<'a> Parser<'a> {
896
896
self . parse_delim_comma_seq ( token:: Paren , f)
897
897
}
898
898
899
+ // Interpolated identifier (`$i: ident`) and lifetime (`$l: lifetime`)
900
+ // tokens are replaced with usual identifier and lifetime tokens,
901
+ // so the former are never encountered during normal parsing.
902
+ fn normalize_token ( token : & Token ) -> Option < Token > {
903
+ match & token. kind {
904
+ token:: Interpolated ( nt) => match * * nt {
905
+ token:: NtIdent ( ident, is_raw) => {
906
+ Some ( Token :: new ( token:: Ident ( ident. name , is_raw) , ident. span ) )
907
+ }
908
+ token:: NtLifetime ( ident) => {
909
+ Some ( Token :: new ( token:: Lifetime ( ident. name ) , ident. span ) )
910
+ }
911
+ _ => None ,
912
+ } ,
913
+ _ => None ,
914
+ }
915
+ }
916
+
899
917
/// Advance the parser by one token.
900
918
pub fn bump ( & mut self ) {
901
919
if self . prev_token . kind == TokenKind :: Eof {
@@ -905,16 +923,17 @@ impl<'a> Parser<'a> {
905
923
}
906
924
907
925
// Update the current and previous tokens.
908
- let next_token = self . next_tok ( ) ;
909
- self . prev_token = mem:: replace ( & mut self . token , next_token) ;
926
+ self . prev_token = self . token . take ( ) ;
910
927
self . unnormalized_prev_token = self . unnormalized_token . take ( ) ;
928
+ self . token = self . next_tok ( self . unnormalized_prev_token ( ) . span ) ;
929
+ if let Some ( normalized_token) = Self :: normalize_token ( & self . token ) {
930
+ self . unnormalized_token = Some ( mem:: replace ( & mut self . token , normalized_token) ) ;
931
+ }
911
932
912
933
// Update fields derived from the previous token.
913
934
self . prev_span = self . unnormalized_prev_token ( ) . span ;
914
935
915
936
self . expected_tokens . clear ( ) ;
916
- // Check after each token.
917
- self . process_potential_macro_variable ( ) ;
918
937
}
919
938
920
939
/// Advances the parser using provided token as a next one. Use this when
@@ -924,9 +943,12 @@ impl<'a> Parser<'a> {
924
943
/// Correct token kinds and spans need to be calculated instead.
925
944
fn bump_with ( & mut self , next : TokenKind , span : Span ) {
926
945
// Update the current and previous tokens.
927
- let next_token = Token :: new ( next, span) ;
928
- self . prev_token = mem:: replace ( & mut self . token , next_token) ;
946
+ self . prev_token = self . token . take ( ) ;
929
947
self . unnormalized_prev_token = self . unnormalized_token . take ( ) ;
948
+ self . token = Token :: new ( next, span) ;
949
+ if let Some ( normalized_token) = Self :: normalize_token ( & self . token ) {
950
+ self . unnormalized_token = Some ( mem:: replace ( & mut self . token , normalized_token) ) ;
951
+ }
930
952
931
953
// Update fields derived from the previous token.
932
954
self . prev_span = self . unnormalized_prev_token ( ) . span . with_hi ( span. lo ( ) ) ;
@@ -1066,39 +1088,6 @@ impl<'a> Parser<'a> {
1066
1088
}
1067
1089
}
1068
1090
1069
- pub fn process_potential_macro_variable ( & mut self ) {
1070
- let normalized_token = match self . token . kind {
1071
- token:: Dollar
1072
- if self . token . span . from_expansion ( ) && self . look_ahead ( 1 , |t| t. is_ident ( ) ) =>
1073
- {
1074
- self . bump ( ) ;
1075
- let name = match self . token . kind {
1076
- token:: Ident ( name, _) => name,
1077
- _ => unreachable ! ( ) ,
1078
- } ;
1079
- let span = self . prev_span . to ( self . token . span ) ;
1080
- self . struct_span_err ( span, & format ! ( "unknown macro variable `{}`" , name) )
1081
- . span_label ( span, "unknown macro variable" )
1082
- . emit ( ) ;
1083
- self . bump ( ) ;
1084
- return ;
1085
- }
1086
- token:: Interpolated ( ref nt) => {
1087
- // Interpolated identifier and lifetime tokens are replaced with usual identifier
1088
- // and lifetime tokens, so the former are never encountered during normal parsing.
1089
- match * * nt {
1090
- token:: NtIdent ( ident, is_raw) => {
1091
- Token :: new ( token:: Ident ( ident. name , is_raw) , ident. span )
1092
- }
1093
- token:: NtLifetime ( ident) => Token :: new ( token:: Lifetime ( ident. name ) , ident. span ) ,
1094
- _ => return ,
1095
- }
1096
- }
1097
- _ => return ,
1098
- } ;
1099
- self . unnormalized_token = Some ( mem:: replace ( & mut self . token , normalized_token) ) ;
1100
- }
1101
-
1102
1091
/// Parses a single token tree from the input.
1103
1092
pub fn parse_token_tree ( & mut self ) -> TokenTree {
1104
1093
match self . token . kind {
0 commit comments