Skip to content

Commit 256df83

Browse files
committed
remove peek_span_src_raw from StringReader
1 parent e9dc95c commit 256df83

File tree

3 files changed

+46
-69
lines changed

3 files changed

+46
-69
lines changed

src/librustc_save_analysis/span_utils.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@ impl<'a> SpanUtils<'a> {
5353
pub fn sub_span_of_token(&self, span: Span, tok: TokenKind) -> Option<Span> {
5454
let mut toks = self.retokenise_span(span);
5555
loop {
56-
let next = toks.real_token();
56+
let next = toks.next_token();
5757
if next == token::Eof {
5858
return None;
5959
}

src/libsyntax/parse/lexer/mod.rs

Lines changed: 37 additions & 61 deletions
Original file line numberDiff line numberDiff line change
@@ -38,8 +38,6 @@ pub struct StringReader<'a> {
3838
crate source_file: Lrc<syntax_pos::SourceFile>,
3939
/// Stop reading src at this index.
4040
crate end_src_index: usize,
41-
// cached:
42-
peek_span_src_raw: Span,
4341
fatal_errs: Vec<DiagnosticBuilder<'a>>,
4442
// cache a direct reference to the source text, so that we don't have to
4543
// retrieve it via `self.source_file.src.as_ref().unwrap()` all the time.
@@ -59,7 +57,7 @@ impl<'a> StringReader<'a> {
5957
(real, raw)
6058
}
6159

62-
fn unwrap_or_abort(&mut self, res: Result<Token, ()>) -> Token {
60+
fn unwrap_or_abort<T>(&mut self, res: Result<T, ()>) -> T {
6361
match res {
6462
Ok(tok) => tok,
6563
Err(_) => {
@@ -69,36 +67,52 @@ impl<'a> StringReader<'a> {
6967
}
7068
}
7169

72-
fn next_token(&mut self) -> Token where Self: Sized {
73-
let res = self.try_next_token();
74-
self.unwrap_or_abort(res)
75-
}
76-
7770
/// Returns the next token. EFFECT: advances the string_reader.
7871
pub fn try_next_token(&mut self) -> Result<Token, ()> {
79-
assert!(self.fatal_errs.is_empty());
80-
self.advance_token()
72+
let (token, _raw_span) = self.try_next_token_with_raw_span()?;
73+
Ok(token)
8174
}
8275

83-
fn try_real_token(&mut self) -> Result<Token, ()> {
84-
let mut t = self.try_next_token()?;
76+
pub fn next_token(&mut self) -> Token {
77+
let res = self.try_next_token();
78+
self.unwrap_or_abort(res)
79+
}
80+
81+
fn try_real_token(&mut self) -> Result<(Token, Span), ()> {
8582
loop {
86-
match t.kind {
87-
token::Whitespace | token::Comment | token::Shebang(_) => {
88-
t = self.try_next_token()?;
89-
}
90-
_ => break,
83+
let t = self.try_next_token_with_raw_span()?;
84+
match t.0.kind {
85+
token::Whitespace | token::Comment | token::Shebang(_) => continue,
86+
_ => return Ok(t),
9187
}
9288
}
93-
94-
Ok(t)
9589
}
9690

97-
pub fn real_token(&mut self) -> Token {
91+
fn real_token(&mut self) -> (Token, Span) {
9892
let res = self.try_real_token();
9993
self.unwrap_or_abort(res)
10094
}
10195

96+
fn try_next_token_with_raw_span(&mut self) -> Result<(Token, Span), ()> {
97+
assert!(self.fatal_errs.is_empty());
98+
match self.scan_whitespace_or_comment() {
99+
Some(comment) => {
100+
let raw_span = comment.span;
101+
Ok((comment, raw_span))
102+
}
103+
None => {
104+
let (kind, start_pos, end_pos) = if self.is_eof() {
105+
(token::Eof, self.source_file.end_pos, self.source_file.end_pos)
106+
} else {
107+
let start_pos = self.pos;
108+
(self.next_token_inner()?, start_pos, self.pos)
109+
};
110+
let (real, raw) = self.mk_sp_and_raw(start_pos, end_pos);
111+
Ok((Token::new(kind, real), raw))
112+
}
113+
}
114+
}
115+
102116
#[inline]
103117
fn is_eof(&self) -> bool {
104118
self.ch.is_none()
@@ -141,7 +155,6 @@ impl<'a> StringReader<'a> {
141155
override_span: Option<Span>) -> Self {
142156
let mut sr = StringReader::new_raw_internal(sess, source_file, override_span);
143157
sr.bump();
144-
145158
sr
146159
}
147160

@@ -162,7 +175,6 @@ impl<'a> StringReader<'a> {
162175
ch: Some('\n'),
163176
source_file,
164177
end_src_index: src.len(),
165-
peek_span_src_raw: syntax_pos::DUMMY_SP,
166178
src,
167179
fatal_errs: Vec::new(),
168180
override_span,
@@ -172,12 +184,8 @@ impl<'a> StringReader<'a> {
172184
pub fn new_or_buffered_errs(sess: &'a ParseSess,
173185
source_file: Lrc<syntax_pos::SourceFile>,
174186
override_span: Option<Span>) -> Result<Self, Vec<Diagnostic>> {
175-
let mut sr = StringReader::new_raw(sess, source_file, override_span);
176-
if sr.advance_token().is_err() {
177-
Err(sr.buffer_fatal_errors())
178-
} else {
179-
Ok(sr)
180-
}
187+
let sr = StringReader::new_raw(sess, source_file, override_span);
188+
Ok(sr)
181189
}
182190

183191
pub fn retokenize(sess: &'a ParseSess, mut span: Span) -> Self {
@@ -197,11 +205,6 @@ impl<'a> StringReader<'a> {
197205

198206
sr.bump();
199207

200-
if sr.advance_token().is_err() {
201-
sr.emit_fatal_errors();
202-
FatalError.raise();
203-
}
204-
205208
sr
206209
}
207210

@@ -257,28 +260,6 @@ impl<'a> StringReader<'a> {
257260
self.sess.span_diagnostic.struct_span_fatal(self.mk_sp(from_pos, to_pos), &m[..])
258261
}
259262

260-
/// Advance peek_token to refer to the next token, and
261-
/// possibly update the interner.
262-
fn advance_token(&mut self) -> Result<Token, ()> {
263-
match self.scan_whitespace_or_comment() {
264-
Some(comment) => {
265-
self.peek_span_src_raw = comment.span;
266-
Ok(comment)
267-
}
268-
None => {
269-
let (kind, start_pos, end_pos) = if self.is_eof() {
270-
(token::Eof, self.source_file.end_pos, self.source_file.end_pos)
271-
} else {
272-
let start_pos = self.pos;
273-
(self.next_token_inner()?, start_pos, self.pos)
274-
};
275-
let (real, raw) = self.mk_sp_and_raw(start_pos, end_pos);
276-
self.peek_span_src_raw = raw;
277-
Ok(Token::new(kind, real))
278-
}
279-
}
280-
}
281-
282263
#[inline]
283264
fn src_index(&self, pos: BytePos) -> usize {
284265
(pos - self.source_file.start_pos).to_usize()
@@ -1447,12 +1428,7 @@ mod tests {
14471428
teststr: String)
14481429
-> StringReader<'a> {
14491430
let sf = sm.new_source_file(PathBuf::from(teststr.clone()).into(), teststr);
1450-
let mut sr = StringReader::new_raw(sess, sf, None);
1451-
if sr.advance_token().is_err() {
1452-
sr.emit_fatal_errors();
1453-
FatalError.raise();
1454-
}
1455-
sr
1431+
StringReader::new_raw(sess, sf, None)
14561432
}
14571433

14581434
#[test]

src/libsyntax/parse/lexer/tokentrees.rs

Lines changed: 8 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
use syntax_pos::Span;
1+
use syntax_pos::{Span, DUMMY_SP};
22

33
use crate::print::pprust::token_to_string;
44
use crate::parse::lexer::{StringReader, UnmatchedBrace};
@@ -11,6 +11,7 @@ impl<'a> StringReader<'a> {
1111
let mut tt_reader = TokenTreesReader {
1212
string_reader: self,
1313
token: Token::dummy(),
14+
raw_span: DUMMY_SP,
1415
open_braces: Vec::new(),
1516
unmatched_braces: Vec::new(),
1617
matching_delim_spans: Vec::new(),
@@ -24,6 +25,7 @@ impl<'a> StringReader<'a> {
2425
struct TokenTreesReader<'a> {
2526
string_reader: StringReader<'a>,
2627
token: Token,
28+
raw_span: Span,
2729
/// Stack of open delimiters and their spans. Used for error message.
2830
open_braces: Vec<(token::DelimToken, Span)>,
2931
unmatched_braces: Vec<UnmatchedBrace>,
@@ -206,18 +208,17 @@ impl<'a> TokenTreesReader<'a> {
206208
// Note that testing for joint-ness here is done via the raw
207209
// source span as the joint-ness is a property of the raw source
208210
// rather than wanting to take `override_span` into account.
209-
// Additionally, we actually check if the *next* pair of tokens
210-
// is joint, but this is equivalent to checking the current pair.
211-
let raw = self.string_reader.peek_span_src_raw;
211+
let raw_span = self.raw_span;
212212
self.real_token();
213-
let is_joint = raw.hi() == self.string_reader.peek_span_src_raw.lo()
214-
&& self.token.is_op();
213+
let is_joint = raw_span.hi() == self.raw_span.lo() && self.token.is_op();
215214
Ok((tt, if is_joint { Joint } else { NonJoint }))
216215
}
217216
}
218217
}
219218

220219
fn real_token(&mut self) {
221-
self.token = self.string_reader.real_token();
220+
let (token, raw_span) = self.string_reader.real_token();
221+
self.token = token;
222+
self.raw_span = raw_span;
222223
}
223224
}

0 commit comments

Comments
 (0)