@@ -39,7 +39,6 @@ pub struct StringReader<'a> {
39
39
/// Stop reading src at this index.
40
40
crate end_src_index : usize ,
41
41
// cached:
42
- peek_token : Token ,
43
42
peek_span_src_raw : Span ,
44
43
fatal_errs : Vec < DiagnosticBuilder < ' a > > ,
45
44
// cache a direct reference to the source text, so that we don't have to
@@ -78,9 +77,7 @@ impl<'a> StringReader<'a> {
78
77
/// Returns the next token. EFFECT: advances the string_reader.
79
78
pub fn try_next_token ( & mut self ) -> Result < Token , ( ) > {
80
79
assert ! ( self . fatal_errs. is_empty( ) ) ;
81
- let ret_val = self . peek_token . take ( ) ;
82
- self . advance_token ( ) ?;
83
- Ok ( ret_val)
80
+ self . advance_token ( )
84
81
}
85
82
86
83
fn try_real_token ( & mut self ) -> Result < Token , ( ) > {
@@ -120,10 +117,6 @@ impl<'a> StringReader<'a> {
120
117
FatalError . raise ( ) ;
121
118
}
122
119
123
- fn fatal ( & self , m : & str ) -> FatalError {
124
- self . fatal_span ( self . peek_token . span , m)
125
- }
126
-
127
120
crate fn emit_fatal_errors ( & mut self ) {
128
121
for err in & mut self . fatal_errs {
129
122
err. emit ( ) ;
@@ -169,7 +162,6 @@ impl<'a> StringReader<'a> {
169
162
ch : Some ( '\n' ) ,
170
163
source_file,
171
164
end_src_index : src. len ( ) ,
172
- peek_token : Token :: dummy ( ) ,
173
165
peek_span_src_raw : syntax_pos:: DUMMY_SP ,
174
166
src,
175
167
fatal_errs : Vec :: new ( ) ,
@@ -267,11 +259,11 @@ impl<'a> StringReader<'a> {
267
259
268
260
/// Advance peek_token to refer to the next token, and
269
261
/// possibly update the interner.
270
- fn advance_token ( & mut self ) -> Result < ( ) , ( ) > {
262
+ fn advance_token ( & mut self ) -> Result < Token , ( ) > {
271
263
match self . scan_whitespace_or_comment ( ) {
272
264
Some ( comment) => {
273
265
self . peek_span_src_raw = comment. span ;
274
- self . peek_token = comment;
266
+ Ok ( comment)
275
267
}
276
268
None => {
277
269
let ( kind, start_pos, end_pos) = if self . is_eof ( ) {
@@ -281,12 +273,10 @@ impl<'a> StringReader<'a> {
281
273
( self . next_token_inner ( ) ?, start_pos, self . pos )
282
274
} ;
283
275
let ( real, raw) = self . mk_sp_and_raw ( start_pos, end_pos) ;
284
- self . peek_token = Token :: new ( kind, real) ;
285
276
self . peek_span_src_raw = raw;
277
+ Ok ( Token :: new ( kind, real) )
286
278
}
287
279
}
288
-
289
- Ok ( ( ) )
290
280
}
291
281
292
282
#[ inline]
@@ -1484,17 +1474,17 @@ mod tests {
1484
1474
assert_eq ! ( tok1. kind, tok2. kind) ;
1485
1475
assert_eq ! ( tok1. span, tok2. span) ;
1486
1476
assert_eq ! ( string_reader. next_token( ) , token:: Whitespace ) ;
1487
- // the 'main' id is already read:
1488
- assert_eq ! ( string_reader. pos. clone( ) , BytePos ( 28 ) ) ;
1489
1477
// read another token:
1490
1478
let tok3 = string_reader. next_token ( ) ;
1479
+ assert_eq ! ( string_reader. pos. clone( ) , BytePos ( 28 ) ) ;
1491
1480
let tok4 = Token :: new (
1492
1481
mk_ident ( "main" ) ,
1493
1482
Span :: new ( BytePos ( 24 ) , BytePos ( 28 ) , NO_EXPANSION ) ,
1494
1483
) ;
1495
1484
assert_eq ! ( tok3. kind, tok4. kind) ;
1496
1485
assert_eq ! ( tok3. span, tok4. span) ;
1497
- // the lparen is already read:
1486
+
1487
+ assert_eq ! ( string_reader. next_token( ) , token:: OpenDelim ( token:: Paren ) ) ;
1498
1488
assert_eq ! ( string_reader. pos. clone( ) , BytePos ( 29 ) )
1499
1489
} )
1500
1490
}
0 commit comments