@@ -482,18 +482,19 @@ impl<'a> Tokenizer<'a> {
482
482
// TODO: deal with nested comments
483
483
loop {
484
484
match chars. next ( ) {
485
- Some ( ch) if maybe_closing_comment && ch == '/' => {
486
- break Ok ( Some ( Token :: Whitespace ( Whitespace :: MultiLineComment ( s) ) ) ) ;
487
- }
488
- Some ( ch) if maybe_closing_comment && ch != '/' => {
489
- maybe_closing_comment = false ;
490
- s. push ( '*' ) ;
491
- s. push ( ch) ;
492
- }
493
- Some ( ch) if !maybe_closing_comment && ch == '*' => {
494
- maybe_closing_comment = true ;
485
+ Some ( ch) => {
486
+ if maybe_closing_comment {
487
+ if ch == '/' {
488
+ break Ok ( Some ( Token :: Whitespace ( Whitespace :: MultiLineComment ( s) ) ) ) ;
489
+ } else {
490
+ s. push ( '*' ) ;
491
+ }
492
+ }
493
+ maybe_closing_comment = ch == '*' ;
494
+ if !maybe_closing_comment {
495
+ s. push ( ch) ;
496
+ }
495
497
}
496
- Some ( ch) => s. push ( ch) ,
497
498
None => {
498
499
break Err ( TokenizerError (
499
500
"Unexpected EOF while in a multi-line comment" . to_string ( ) ,
@@ -727,6 +728,21 @@ mod tests {
727
728
compare ( expected, tokens) ;
728
729
}
729
730
731
+ #[ test]
732
+ fn tokenize_multiline_comment_with_even_asterisks ( ) {
733
+ let sql = String :: from ( "\n /** Comment **/\n " ) ;
734
+
735
+ let dialect = GenericSqlDialect { } ;
736
+ let mut tokenizer = Tokenizer :: new ( & dialect, & sql) ;
737
+ let tokens = tokenizer. tokenize ( ) . unwrap ( ) ;
738
+ let expected = vec ! [
739
+ Token :: Whitespace ( Whitespace :: Newline ) ,
740
+ Token :: Whitespace ( Whitespace :: MultiLineComment ( "* Comment *" . to_string( ) ) ) ,
741
+ Token :: Whitespace ( Whitespace :: Newline ) ,
742
+ ] ;
743
+ compare ( expected, tokens) ;
744
+ }
745
+
730
746
fn compare ( expected : Vec < Token > , actual : Vec < Token > ) {
731
747
//println!("------------------------------");
732
748
//println!("tokens = {:?}", actual);
0 commit comments