@@ -250,12 +250,12 @@ impl<'a> TokenStream<'a> {
250
250
)
251
251
)
252
252
} else {
253
- let mut prev_char = cur_char;
254
253
let mut nchars = 1 ;
254
+ let mut escaped = false ;
255
255
for ( idx, cur_char) in iter {
256
256
nchars += 1 ;
257
257
match cur_char {
258
- '"' if prev_char == '\\' => { }
258
+ '"' if escaped => { }
259
259
'"' => {
260
260
self . position . column += nchars;
261
261
self . off += idx+1 ;
@@ -268,11 +268,14 @@ impl<'a> TokenStream<'a> {
268
268
)
269
269
) ;
270
270
}
271
+
271
272
_ => {
272
273
273
274
}
274
275
}
275
- prev_char = cur_char;
276
+
277
+ // if we aren't escaped and the current char is a \, we are now escaped
278
+ escaped = !escaped && cur_char == '\\' ;
276
279
}
277
280
Err (
278
281
Error :: unexpected_message (
@@ -482,12 +485,17 @@ mod test {
482
485
#[ test] #[ should_panic] fn letters_float2 ( ) { tok_str ( "0.bbc" ) ; }
483
486
#[ test] #[ should_panic] fn letters_float3 ( ) { tok_str ( "0.bbce0" ) ; }
484
487
#[ test] #[ should_panic] fn no_exp_sign_float ( ) { tok_str ( "0e0" ) ; }
488
+ #[ test] #[ should_panic] fn unterminated_string ( ) { tok_str ( r#""hello\""# ) ; }
489
+ #[ test] #[ should_panic] fn extra_unterminated_string ( ) { tok_str ( r#""hello\\\""# ) ; }
485
490
486
491
#[ test]
487
492
fn string ( ) {
488
493
assert_eq ! ( tok_str( r#""""# ) , [ r#""""# ] ) ;
489
494
assert_eq ! ( tok_typ( r#""""# ) , [ StringValue ] ) ;
490
495
assert_eq ! ( tok_str( r#""hello""# ) , [ r#""hello""# ] ) ;
496
+ assert_eq ! ( tok_str( r#""hello\\""# ) , [ r#""hello\\""# ] ) ;
497
+ assert_eq ! ( tok_str( r#""hello\\\\""# ) , [ r#""hello\\\\""# ] ) ;
498
+ assert_eq ! ( tok_str( r#""he\\llo""# ) , [ r#""he\\llo""# ] ) ;
491
499
assert_eq ! ( tok_typ( r#""hello""# ) , [ StringValue ] ) ;
492
500
assert_eq ! ( tok_str( r#""my\"quote""# ) , [ r#""my\"quote""# ] ) ;
493
501
assert_eq ! ( tok_typ( r#""my\"quote""# ) , [ StringValue ] ) ;
0 commit comments