diff --git a/src/tokenizer.rs b/src/tokenizer.rs
index f8e6793fe..7a1813544 100644
--- a/src/tokenizer.rs
+++ b/src/tokenizer.rs
@@ -35,7 +35,9 @@ use serde::{Deserialize, Serialize};
use sqlparser_derive::{Visit, VisitMut};
use crate::ast::DollarQuotedString;
-use crate::dialect::{BigQueryDialect, DuckDbDialect, GenericDialect, SnowflakeDialect};
+use crate::dialect::{
+ BigQueryDialect, DuckDbDialect, GenericDialect, HiveDialect, SnowflakeDialect,
+};
use crate::dialect::{Dialect, MySqlDialect};
use crate::keywords::{Keyword, ALL_KEYWORDS, ALL_KEYWORDS_INDEX};
@@ -495,9 +497,32 @@ impl<'a> Tokenizer<'a> {
Ok(tokens)
}
+ fn tokenize_identifier_or_keyword(
+ &self,
+ ch: String,
+ chars: &mut State,
+ ) -> Result