@@ -40,13 +40,13 @@ use serde::{Deserialize, Serialize};
40
40
#[ cfg( feature = "visitor" ) ]
41
41
use sqlparser_derive:: { Visit , VisitMut } ;
42
42
43
+ use crate :: ast:: DollarQuotedString ;
43
44
use crate :: dialect:: Dialect ;
44
45
use crate :: dialect:: {
45
46
BigQueryDialect , DuckDbDialect , GenericDialect , MySqlDialect , PostgreSqlDialect ,
46
47
SnowflakeDialect ,
47
48
} ;
48
49
use crate :: keywords:: { Keyword , ALL_KEYWORDS , ALL_KEYWORDS_INDEX } ;
49
- use crate :: { ast:: DollarQuotedString , dialect:: HiveDialect } ;
50
50
51
51
/// SQL Token enumeration
52
52
#[ derive( Debug , Clone , PartialEq , PartialOrd , Eq , Ord , Hash ) ]
@@ -1372,8 +1372,7 @@ impl<'a> Tokenizer<'a> {
1372
1372
}
1373
1373
'{' => self . consume_and_return ( chars, Token :: LBrace ) ,
1374
1374
'}' => self . consume_and_return ( chars, Token :: RBrace ) ,
1375
- '#' if dialect_of ! ( self is SnowflakeDialect | BigQueryDialect | MySqlDialect | HiveDialect ) =>
1376
- {
1375
+ '#' if dialect_of ! ( self is SnowflakeDialect | BigQueryDialect | MySqlDialect ) => {
1377
1376
chars. next ( ) ; // consume the '#', starting a snowflake single-line comment
1378
1377
let comment = self . tokenize_single_line_comment ( chars) ;
1379
1378
Ok ( Some ( Token :: Whitespace ( Whitespace :: SingleLineComment {
0 commit comments