Skip to content

Commit 601bad8

Browse files
committed
cleanup lexer constructors
1 parent 256df83 commit 601bad8

File tree

5 files changed

+21
-28
lines changed

5 files changed

+21
-28
lines changed

src/librustdoc/html/highlight.rs

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -38,17 +38,17 @@ pub fn render_with_highlighting(
3838
FileName::Custom(String::from("rustdoc-highlighting")),
3939
src.to_owned(),
4040
);
41-
let highlight_result =
42-
lexer::StringReader::new_or_buffered_errs(&sess, fm, None).and_then(|lexer| {
43-
let mut classifier = Classifier::new(lexer, sess.source_map());
44-
45-
let mut highlighted_source = vec![];
46-
if classifier.write_source(&mut highlighted_source).is_err() {
47-
Err(classifier.lexer.buffer_fatal_errors())
48-
} else {
49-
Ok(String::from_utf8_lossy(&highlighted_source).into_owned())
50-
}
51-
});
41+
let highlight_result = {
42+
let lexer = lexer::StringReader::new(&sess, fm, None);
43+
let mut classifier = Classifier::new(lexer, sess.source_map());
44+
45+
let mut highlighted_source = vec![];
46+
if classifier.write_source(&mut highlighted_source).is_err() {
47+
Err(classifier.lexer.buffer_fatal_errors())
48+
} else {
49+
Ok(String::from_utf8_lossy(&highlighted_source).into_owned())
50+
}
51+
};
5252

5353
match highlight_result {
5454
Ok(highlighted_source) => {

src/librustdoc/passes/check_code_block_syntax.rs

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,8 @@ impl<'a, 'tcx> SyntaxChecker<'a, 'tcx> {
3232
dox[code_block.code].to_owned(),
3333
);
3434

35-
let errors = Lexer::new_or_buffered_errs(&sess, source_file, None).and_then(|mut lexer| {
35+
let errors = {
36+
let mut lexer = Lexer::new(&sess, source_file, None);
3637
while let Ok(token::Token { kind, .. }) = lexer.try_next_token() {
3738
if kind == token::Eof {
3839
break;
@@ -46,7 +47,7 @@ impl<'a, 'tcx> SyntaxChecker<'a, 'tcx> {
4647
} else {
4748
Ok(())
4849
}
49-
});
50+
};
5051

5152
if let Err(errors) = errors {
5253
let mut diag = if let Some(sp) =

src/libsyntax/parse/lexer/comments.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -346,7 +346,7 @@ pub fn gather_comments(sess: &ParseSess, path: FileName, srdr: &mut dyn Read) ->
346346
srdr.read_to_string(&mut src).unwrap();
347347
let cm = SourceMap::new(sess.source_map().path_mapping().clone());
348348
let source_file = cm.new_source_file(path, src);
349-
let mut rdr = lexer::StringReader::new_raw(sess, source_file, None);
349+
let mut rdr = lexer::StringReader::new(sess, source_file, None);
350350

351351
let mut comments: Vec<Comment> = Vec::new();
352352
let mut code_to_the_left = false; // Only code

src/libsyntax/parse/lexer/mod.rs

Lines changed: 5 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -149,16 +149,15 @@ impl<'a> StringReader<'a> {
149149
buffer
150150
}
151151

152-
/// For comments.rs, which hackily pokes into next_pos and ch
153-
fn new_raw(sess: &'a ParseSess,
152+
pub fn new(sess: &'a ParseSess,
154153
source_file: Lrc<syntax_pos::SourceFile>,
155154
override_span: Option<Span>) -> Self {
156-
let mut sr = StringReader::new_raw_internal(sess, source_file, override_span);
155+
let mut sr = StringReader::new_internal(sess, source_file, override_span);
157156
sr.bump();
158157
sr
159158
}
160159

161-
fn new_raw_internal(sess: &'a ParseSess, source_file: Lrc<syntax_pos::SourceFile>,
160+
fn new_internal(sess: &'a ParseSess, source_file: Lrc<syntax_pos::SourceFile>,
162161
override_span: Option<Span>) -> Self
163162
{
164163
if source_file.src.is_none() {
@@ -181,13 +180,6 @@ impl<'a> StringReader<'a> {
181180
}
182181
}
183182

184-
pub fn new_or_buffered_errs(sess: &'a ParseSess,
185-
source_file: Lrc<syntax_pos::SourceFile>,
186-
override_span: Option<Span>) -> Result<Self, Vec<Diagnostic>> {
187-
let sr = StringReader::new_raw(sess, source_file, override_span);
188-
Ok(sr)
189-
}
190-
191183
pub fn retokenize(sess: &'a ParseSess, mut span: Span) -> Self {
192184
let begin = sess.source_map().lookup_byte_offset(span.lo());
193185
let end = sess.source_map().lookup_byte_offset(span.hi());
@@ -197,7 +189,7 @@ impl<'a> StringReader<'a> {
197189
span = span.shrink_to_lo();
198190
}
199191

200-
let mut sr = StringReader::new_raw_internal(sess, begin.sf, None);
192+
let mut sr = StringReader::new_internal(sess, begin.sf, None);
201193

202194
// Seek the lexer to the right byte range.
203195
sr.next_pos = span.lo();
@@ -1428,7 +1420,7 @@ mod tests {
14281420
teststr: String)
14291421
-> StringReader<'a> {
14301422
let sf = sm.new_source_file(PathBuf::from(teststr.clone()).into(), teststr);
1431-
StringReader::new_raw(sess, sf, None)
1423+
StringReader::new(sess, sf, None)
14321424
}
14331425

14341426
#[test]

src/libsyntax/parse/mod.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -305,7 +305,7 @@ pub fn maybe_file_to_stream(
305305
source_file: Lrc<SourceFile>,
306306
override_span: Option<Span>,
307307
) -> Result<(TokenStream, Vec<lexer::UnmatchedBrace>), Vec<Diagnostic>> {
308-
let srdr = lexer::StringReader::new_or_buffered_errs(sess, source_file, override_span)?;
308+
let srdr = lexer::StringReader::new(sess, source_file, override_span);
309309
let (token_trees, unmatched_braces) = srdr.into_token_trees();
310310

311311
match token_trees {

0 commit comments

Comments
 (0)