File tree Expand file tree Collapse file tree 1 file changed +19
-2
lines changed Expand file tree Collapse file tree 1 file changed +19
-2
lines changed Original file line number Diff line number Diff line change @@ -1284,10 +1284,17 @@ impl<'a> Tokenizer<'a> {
1284
1284
chars. next ( ) ; // consume the dot
1285
1285
1286
1286
match chars. peek ( ) {
1287
- Some ( '_' ) => {
1288
- // Handle "._" case as a period (special token) followed by identifier
1287
+ // Handle "._" case as a period followed by identifier
1288
+ // if the last token was a word
1289
+ Some ( '_' ) if matches ! ( prev_token, Some ( Token :: Word ( _) ) ) => {
1289
1290
Ok ( Some ( Token :: Period ) )
1290
1291
}
1292
+ Some ( '_' ) => {
1293
+ self . tokenizer_error (
1294
+ chars. location ( ) ,
1295
+ "Unexpected an underscore here" . to_string ( ) ,
1296
+ )
1297
+ }
1291
1298
Some ( ch)
1292
1299
// Hive and mysql dialects allow numeric prefixes for identifers
1293
1300
if ch. is_ascii_digit ( )
@@ -2498,6 +2505,16 @@ mod tests {
2498
2505
] ;
2499
2506
2500
2507
compare ( expected, tokens) ;
2508
+
2509
+ let sql = String :: from ( "SELECT ._123" ) ;
2510
+ if let Ok ( tokens) = Tokenizer :: new ( & dialect, & sql) . tokenize ( ) {
2511
+ panic ! ( "Tokenizer should have failed on {sql}, but it succeeded with {tokens:?}" ) ;
2512
+ }
2513
+
2514
+ let sql = String :: from ( "SELECT ._abc" ) ;
2515
+ if let Ok ( tokens) = Tokenizer :: new ( & dialect, & sql) . tokenize ( ) {
2516
+ panic ! ( "Tokenizer should have failed on {sql}, but it succeeded with {tokens:?}" ) ;
2517
+ }
2501
2518
}
2502
2519
2503
2520
#[ test]
You can’t perform that action at this time.
0 commit comments