Skip to content

Commit 43ffca1

Browse files
committed
Clean up unused imports
1 parent 33e9523 commit 43ffca1

File tree

6 files changed

+8
-13
lines changed

6 files changed

+8
-13
lines changed

benches/tokenizer.rs

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,6 @@ extern crate html5ever;
1313

1414
use std::{fs, env, cmp};
1515
use std::path::PathBuf;
16-
use std::io::Read;
1716
use std::default::Default;
1817

1918
use test::{black_box, Bencher, TestDesc, TestDescAndFn};
@@ -101,7 +100,7 @@ impl TDynBenchFn for Bench {
101100
buffer.push_back(buf);
102101
let _ = tok.feed(&mut buffer);
103102
}
104-
tok.feed(&mut buffer);
103+
let _ = tok.feed(&mut buffer);
105104
tok.end();
106105
}
107106
});

examples/noop-tokenize.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ fn main() {
4040
input.push_back(chunk.try_reinterpret().unwrap());
4141

4242
let mut tok = Tokenizer::new(Sink(Vec::new()), Default::default());
43-
tok.feed(&mut input);
43+
let _ = tok.feed(&mut input);
4444
assert!(input.is_empty());
4545
tok.end();
4646
}

examples/tokenize.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -93,7 +93,7 @@ fn main() {
9393
profile: true,
9494
.. Default::default()
9595
});
96-
tok.feed(&mut input);
96+
let _ = tok.feed(&mut input);
9797
assert!(input.is_empty());
9898
tok.end();
9999
sink.is_char(false);

src/tokenizer/buffer_queue.rs

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,6 @@
99

1010
use util::smallcharset::SmallCharSet;
1111

12-
use std::ascii::AsciiExt;
1312
use std::collections::VecDeque;
1413

1514
use tendril::StrTendril;
@@ -166,7 +165,7 @@ impl BufferQueue {
166165
#[allow(non_snake_case)]
167166
mod test {
168167
use std::ascii::AsciiExt;
169-
use tendril::{StrTendril, SliceExt};
168+
use tendril::SliceExt;
170169
use super::{BufferQueue, FromSet, NotFromSet};
171170

172171
#[test]

src/tokenizer/mod.rs

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -9,14 +9,11 @@
99

1010
//! The HTML5 tokenizer.
1111
12-
#![allow(unused_imports)]
13-
1412
pub use self::interface::{Doctype, Attribute, TagKind, StartTag, EndTag, Tag};
1513
pub use self::interface::{Token, DoctypeToken, TagToken, CommentToken};
1614
pub use self::interface::{CharacterTokens, NullCharacterToken, EOFToken, ParseError};
1715
pub use self::interface::{TokenSink, TokenSinkResult};
1816

19-
use self::states::{RawLessThanSign, RawEndTagOpen, RawEndTagName};
2017
use self::states::{Rcdata, Rawtext, ScriptData, ScriptDataEscaped};
2118
use self::states::{Escaped, DoubleEscaped};
2219
use self::states::{Unquoted, SingleQuoted, DoubleQuoted};
@@ -1415,9 +1412,9 @@ mod test {
14151412

14161413
use super::{TokenSink, Tokenizer, TokenizerOpts, TokenSinkResult};
14171414

1418-
use super::interface::{Token, DoctypeToken, TagToken, CommentToken};
1415+
use super::interface::{Token, TagToken};
14191416
use super::interface::{CharacterTokens, NullCharacterToken, EOFToken, ParseError};
1420-
use super::interface::{Doctype, Attribute, TagKind, StartTag, EndTag, Tag};
1417+
use super::interface::{TagKind, StartTag, EndTag, Tag};
14211418

14221419
use super::buffer_queue::{BufferQueue};
14231420
use std::mem::replace;

tests/tokenizer.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -99,7 +99,7 @@ impl TokenLogger {
9999
impl TokenSink for TokenLogger {
100100
type Handle = ();
101101

102-
fn process_token(&mut self, token: Token, line_number: u64) -> TokenSinkResult<()> {
102+
fn process_token(&mut self, token: Token, _line_number: u64) -> TokenSinkResult<()> {
103103
match token {
104104
CharacterTokens(b) => {
105105
self.current_str.push_slice(&b);
@@ -143,7 +143,7 @@ fn tokenize(input: Vec<StrTendril>, opts: TokenizerOpts) -> Vec<Token> {
143143
buffer.push_back(chunk);
144144
let _ = tok.feed(&mut buffer);
145145
}
146-
tok.feed(&mut buffer);
146+
let _ = tok.feed(&mut buffer);
147147
tok.end();
148148
tok.unwrap().get_tokens()
149149
}

0 commit comments

Comments
 (0)