Skip to content

Commit 2341b39

Browse files
committed
Remove incorrect unsafe impl Send for Token { }
Token contains StrTendril, which is not Send because it contains a shared reference to a `Cell<usize>` for reference-counting. Refactor test harness to not need Send.
1 parent 00c3c41 commit 2341b39

File tree

2 files changed

+4
-7
lines changed

2 files changed

+4
-7
lines changed

src/tokenizer/interface.rs

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -94,9 +94,6 @@ pub enum Token {
9494
ParseError(Cow<'static, str>),
9595
}
9696

97-
// FIXME: rust-lang/rust#22629
98-
unsafe impl Send for Token { }
99-
10097
#[derive(Debug, PartialEq)]
10198
#[must_use]
10299
pub enum TokenSinkResult<Handle> {

tests/tokenizer.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -313,7 +313,7 @@ fn unescape_json(js: &Json) -> Json {
313313
}
314314
}
315315

316-
fn mk_test(desc: String, input: String, expect: Vec<Token>, opts: TokenizerOpts)
316+
fn mk_test(desc: String, input: String, expect: Json, opts: TokenizerOpts)
317317
-> TestDescAndFn {
318318
TestDescAndFn {
319319
desc: TestDesc {
@@ -330,7 +330,8 @@ fn mk_test(desc: String, input: String, expect: Vec<Token>, opts: TokenizerOpts)
330330
// result but the compiler doesn't catch it!
331331
// Possibly mozilla/rust#12223.
332332
let output = tokenize(input.clone(), opts.clone());
333-
if output != expect {
333+
let expect_toks = json_to_tokens(&expect, opts.exact_errors);
334+
if output != expect_toks {
334335
panic!("\ninput: {:?}\ngot: {:?}\nexpected: {:?}",
335336
input, output, expect);
336337
}
@@ -384,8 +385,7 @@ fn mk_tests(tests: &mut Vec<TestDescAndFn>, filename: &str, js: &Json) {
384385
newdesc = format!("{} (exact errors)", newdesc);
385386
}
386387

387-
let expect_toks = json_to_tokens(&expect, exact_errors);
388-
tests.push(mk_test(newdesc, input.clone(), expect_toks, TokenizerOpts {
388+
tests.push(mk_test(newdesc, input.clone(), expect.clone(), TokenizerOpts {
389389
exact_errors: exact_errors,
390390
initial_state: state,
391391
last_start_tag_name: start_tag.clone(),

0 commit comments

Comments
 (0)