Skip to content

Commit d02150f

Browse files
committed
Fix lifetimes in StringReader.
Two different lifetimes are conflated. This doesn't matter right now, but needs to be fixed for the next commit to work. And the more descriptive lifetime names make the code easier to read.
1 parent fbe68bc commit d02150f

File tree

3 files changed

+27
-23
lines changed

3 files changed

+27
-23
lines changed

compiler/rustc_parse/src/lexer/mod.rs

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -42,9 +42,9 @@ pub struct UnmatchedDelim {
4242
pub candidate_span: Option<Span>,
4343
}
4444

45-
pub(crate) fn parse_token_trees<'a>(
46-
sess: &'a ParseSess,
47-
mut src: &'a str,
45+
pub(crate) fn parse_token_trees<'sess, 'src>(
46+
sess: &'sess ParseSess,
47+
mut src: &'src str,
4848
mut start_pos: BytePos,
4949
override_span: Option<Span>,
5050
) -> Result<TokenStream, Vec<Diagnostic>> {
@@ -90,25 +90,25 @@ pub(crate) fn parse_token_trees<'a>(
9090
}
9191
}
9292

93-
struct StringReader<'a> {
94-
sess: &'a ParseSess,
93+
struct StringReader<'sess, 'src> {
94+
sess: &'sess ParseSess,
9595
/// Initial position, read-only.
9696
start_pos: BytePos,
9797
/// The absolute offset within the source_map of the current character.
9898
pos: BytePos,
9999
/// Source text to tokenize.
100-
src: &'a str,
100+
src: &'src str,
101101
/// Cursor for getting lexer tokens.
102-
cursor: Cursor<'a>,
102+
cursor: Cursor<'src>,
103103
override_span: Option<Span>,
104104
/// When a "unknown start of token: \u{a0}" has already been emitted earlier
105105
/// in this file, it's safe to treat further occurrences of the non-breaking
106106
/// space character as whitespace.
107107
nbsp_is_whitespace: bool,
108108
}
109109

110-
impl<'a> StringReader<'a> {
111-
pub fn dcx(&self) -> &'a DiagCtxt {
110+
impl<'sess, 'src> StringReader<'sess, 'src> {
111+
pub fn dcx(&self) -> &'sess DiagCtxt {
112112
&self.sess.dcx
113113
}
114114

@@ -526,7 +526,7 @@ impl<'a> StringReader<'a> {
526526

527527
/// Slice of the source text from `start` up to but excluding `self.pos`,
528528
/// meaning the slice does not include the character `self.ch`.
529-
fn str_from(&self, start: BytePos) -> &'a str {
529+
fn str_from(&self, start: BytePos) -> &'src str {
530530
self.str_from_to(start, self.pos)
531531
}
532532

@@ -537,12 +537,12 @@ impl<'a> StringReader<'a> {
537537
}
538538

539539
/// Slice of the source text spanning from `start` up to but excluding `end`.
540-
fn str_from_to(&self, start: BytePos, end: BytePos) -> &'a str {
540+
fn str_from_to(&self, start: BytePos, end: BytePos) -> &'src str {
541541
&self.src[self.src_index(start)..self.src_index(end)]
542542
}
543543

544544
/// Slice of the source text spanning from `start` until the end
545-
fn str_from_to_end(&self, start: BytePos) -> &'a str {
545+
fn str_from_to_end(&self, start: BytePos) -> &'src str {
546546
&self.src[self.src_index(start)..]
547547
}
548548

compiler/rustc_parse/src/lexer/tokentrees.rs

Lines changed: 14 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -8,18 +8,18 @@ use rustc_ast_pretty::pprust::token_to_string;
88
use rustc_errors::{Applicability, PErr};
99
use rustc_span::symbol::kw;
1010

11-
pub(super) struct TokenTreesReader<'a> {
12-
string_reader: StringReader<'a>,
11+
pub(super) struct TokenTreesReader<'sess, 'src> {
12+
string_reader: StringReader<'sess, 'src>,
1313
/// The "next" token, which has been obtained from the `StringReader` but
1414
/// not yet handled by the `TokenTreesReader`.
1515
token: Token,
1616
diag_info: TokenTreeDiagInfo,
1717
}
1818

19-
impl<'a> TokenTreesReader<'a> {
19+
impl<'sess, 'src> TokenTreesReader<'sess, 'src> {
2020
pub(super) fn parse_all_token_trees(
21-
string_reader: StringReader<'a>,
22-
) -> (TokenStream, Result<(), Vec<PErr<'a>>>, Vec<UnmatchedDelim>) {
21+
string_reader: StringReader<'sess, 'src>,
22+
) -> (TokenStream, Result<(), Vec<PErr<'sess>>>, Vec<UnmatchedDelim>) {
2323
let mut tt_reader = TokenTreesReader {
2424
string_reader,
2525
token: Token::dummy(),
@@ -35,7 +35,7 @@ impl<'a> TokenTreesReader<'a> {
3535
fn parse_token_trees(
3636
&mut self,
3737
is_delimited: bool,
38-
) -> (Spacing, TokenStream, Result<(), Vec<PErr<'a>>>) {
38+
) -> (Spacing, TokenStream, Result<(), Vec<PErr<'sess>>>) {
3939
// Move past the opening delimiter.
4040
let (_, open_spacing) = self.bump(false);
4141

@@ -71,7 +71,7 @@ impl<'a> TokenTreesReader<'a> {
7171
}
7272
}
7373

74-
fn eof_err(&mut self) -> PErr<'a> {
74+
fn eof_err(&mut self) -> PErr<'sess> {
7575
let msg = "this file contains an unclosed delimiter";
7676
let mut err = self.string_reader.sess.dcx.struct_span_err(self.token.span, msg);
7777
for &(_, sp) in &self.diag_info.open_braces {
@@ -99,7 +99,7 @@ impl<'a> TokenTreesReader<'a> {
9999
fn parse_token_tree_open_delim(
100100
&mut self,
101101
open_delim: Delimiter,
102-
) -> Result<TokenTree, Vec<PErr<'a>>> {
102+
) -> Result<TokenTree, Vec<PErr<'sess>>> {
103103
// The span for beginning of the delimited section
104104
let pre_span = self.token.span;
105105

@@ -229,7 +229,11 @@ impl<'a> TokenTreesReader<'a> {
229229
(this_tok, this_spacing)
230230
}
231231

232-
fn unclosed_delim_err(&mut self, tts: TokenStream, mut errs: Vec<PErr<'a>>) -> Vec<PErr<'a>> {
232+
fn unclosed_delim_err(
233+
&mut self,
234+
tts: TokenStream,
235+
mut errs: Vec<PErr<'sess>>,
236+
) -> Vec<PErr<'sess>> {
233237
// If there are unclosed delims, see if there are diff markers and if so, point them
234238
// out instead of complaining about the unclosed delims.
235239
let mut parser = crate::stream_to_parser(self.string_reader.sess, tts, None);
@@ -285,7 +289,7 @@ impl<'a> TokenTreesReader<'a> {
285289
return errs;
286290
}
287291

288-
fn close_delim_err(&mut self, delim: Delimiter) -> PErr<'a> {
292+
fn close_delim_err(&mut self, delim: Delimiter) -> PErr<'sess> {
289293
// An unexpected closing delimiter (i.e., there is no
290294
// matching opening delimiter).
291295
let token_str = token_to_string(&self.token);

compiler/rustc_parse/src/lexer/unicode_chars.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -337,7 +337,7 @@ const ASCII_ARRAY: &[(&str, &str, Option<token::TokenKind>)] = &[
337337
];
338338

339339
pub(super) fn check_for_substitution(
340-
reader: &StringReader<'_>,
340+
reader: &StringReader<'_, '_>,
341341
pos: BytePos,
342342
ch: char,
343343
count: usize,

0 commit comments

Comments
 (0)