@@ -1049,18 +1049,18 @@ impl<'a> Parser<'a> {
1049
1049
| Keyword::CURRENT_USER
1050
1050
| Keyword::SESSION_USER
1051
1051
| Keyword::USER
1052
- if dialect_of!(self is PostgreSqlDialect | GenericDialect) =>
1053
- {
1054
- Ok(Some(Expr::Function(Function {
1055
- name: ObjectName(vec![w.to_ident(w_span)]),
1056
- parameters: FunctionArguments::None,
1057
- args: FunctionArguments::None,
1058
- null_treatment: None,
1059
- filter: None,
1060
- over: None,
1061
- within_group: vec![],
1062
- })))
1063
- }
1052
+ if dialect_of!(self is PostgreSqlDialect | GenericDialect) =>
1053
+ {
1054
+ Ok(Some(Expr::Function(Function {
1055
+ name: ObjectName(vec![w.to_ident(w_span)]),
1056
+ parameters: FunctionArguments::None,
1057
+ args: FunctionArguments::None,
1058
+ null_treatment: None,
1059
+ filter: None,
1060
+ over: None,
1061
+ within_group: vec![],
1062
+ })))
1063
+ }
1064
1064
Keyword::CURRENT_TIMESTAMP
1065
1065
| Keyword::CURRENT_TIME
1066
1066
| Keyword::CURRENT_DATE
@@ -1075,18 +1075,18 @@ impl<'a> Parser<'a> {
1075
1075
Keyword::TRY_CAST => Ok(Some(self.parse_cast_expr(CastKind::TryCast)?)),
1076
1076
Keyword::SAFE_CAST => Ok(Some(self.parse_cast_expr(CastKind::SafeCast)?)),
1077
1077
Keyword::EXISTS
1078
- // Support parsing Databricks has a function named `exists`.
1079
- if !dialect_of!(self is DatabricksDialect)
1080
- || matches!(
1078
+ // Support parsing Databricks has a function named `exists`.
1079
+ if !dialect_of!(self is DatabricksDialect)
1080
+ || matches!(
1081
1081
self.peek_nth_token(1).token,
1082
1082
Token::Word(Word {
1083
1083
keyword: Keyword::SELECT | Keyword::WITH,
1084
1084
..
1085
1085
})
1086
1086
) =>
1087
- {
1088
- Ok(Some(self.parse_exists_expr(false)?))
1089
- }
1087
+ {
1088
+ Ok(Some(self.parse_exists_expr(false)?))
1089
+ }
1090
1090
Keyword::EXTRACT => Ok(Some(self.parse_extract_expr()?)),
1091
1091
Keyword::CEIL => Ok(Some(self.parse_ceil_floor_expr(true)?)),
1092
1092
Keyword::FLOOR => Ok(Some(self.parse_ceil_floor_expr(false)?)),
@@ -1103,22 +1103,22 @@ impl<'a> Parser<'a> {
1103
1103
Ok(Some(self.parse_array_expr(true)?))
1104
1104
}
1105
1105
Keyword::ARRAY
1106
- if self.peek_token() == Token::LParen
1107
- && !dialect_of!(self is ClickHouseDialect | DatabricksDialect) =>
1108
- {
1109
- self.expect_token(&Token::LParen)?;
1110
- let query = self.parse_query()?;
1111
- self.expect_token(&Token::RParen)?;
1112
- Ok(Some(Expr::Function(Function {
1113
- name: ObjectName(vec![w.to_ident(w_span)]),
1114
- parameters: FunctionArguments::None,
1115
- args: FunctionArguments::Subquery(query),
1116
- filter: None,
1117
- null_treatment: None,
1118
- over: None,
1119
- within_group: vec![],
1120
- })))
1121
- }
1106
+ if self.peek_token() == Token::LParen
1107
+ && !dialect_of!(self is ClickHouseDialect | DatabricksDialect) =>
1108
+ {
1109
+ self.expect_token(&Token::LParen)?;
1110
+ let query = self.parse_query()?;
1111
+ self.expect_token(&Token::RParen)?;
1112
+ Ok(Some(Expr::Function(Function {
1113
+ name: ObjectName(vec![w.to_ident(w_span)]),
1114
+ parameters: FunctionArguments::None,
1115
+ args: FunctionArguments::Subquery(query),
1116
+ filter: None,
1117
+ null_treatment: None,
1118
+ over: None,
1119
+ within_group: vec![],
1120
+ })))
1121
+ }
1122
1122
Keyword::NOT => Ok(Some(self.parse_not()?)),
1123
1123
Keyword::MATCH if dialect_of!(self is MySqlDialect | GenericDialect) => {
1124
1124
Ok(Some(self.parse_match_against()?))
@@ -5022,7 +5022,7 @@ impl<'a> Parser<'a> {
5022
5022
return Err(ParserError::ParserError(format!("Expected: CURRENT_USER, CURRENT_ROLE, SESSION_USER or identifier after OWNER TO. {e}")))
5023
5023
}
5024
5024
}
5025
- },
5025
+ }
5026
5026
};
5027
5027
Ok(owner)
5028
5028
}
@@ -7979,6 +7979,27 @@ impl<'a> Parser<'a> {
7979
7979
}
7980
7980
}
7981
7981
7982
+ pub fn parse_enum_values(&mut self) -> Result<Vec<EnumValue>, ParserError> {
7983
+ self.expect_token(&Token::LParen)?;
7984
+ let values = self.parse_comma_separated(Parser::parse_enum_value)?;
7985
+ self.expect_token(&Token::RParen)?;
7986
+ Ok(values)
7987
+ }
7988
+
7989
+ pub fn parse_enum_value(&mut self) -> Result<EnumValue, ParserError> {
7990
+ let str = self.parse_literal_string()?;
7991
+ let value = match self.peek_token().token {
7992
+ Token::Eq => {
7993
+ // Consume the `=` token
7994
+ self.next_token();
7995
+ let value = self.parse_number_value()?;
7996
+ EnumValue::Pair(str, value)
7997
+ }
7998
+ _ => EnumValue::String(str),
7999
+ };
8000
+ Ok(value)
8001
+ }
8002
+
7982
8003
/// Parse a SQL datatype (in the context of a CREATE TABLE statement for example)
7983
8004
pub fn parse_data_type(&mut self) -> Result<DataType, ParserError> {
7984
8005
let (ty, trailing_bracket) = self.parse_data_type_helper()?;
@@ -8210,7 +8231,9 @@ impl<'a> Parser<'a> {
8210
8231
Keyword::BIGDECIMAL => Ok(DataType::BigDecimal(
8211
8232
self.parse_exact_number_optional_precision_scale()?,
8212
8233
)),
8213
- Keyword::ENUM => Ok(DataType::Enum(self.parse_string_values()?)),
8234
+ Keyword::ENUM => Ok(DataType::Enum(self.parse_enum_values()?, None)),
8235
+ Keyword::ENUM8 => Ok(DataType::Enum(self.parse_enum_values()?, Some(8))),
8236
+ Keyword::ENUM16 => Ok(DataType::Enum(self.parse_enum_values()?, Some(16))),
8214
8237
Keyword::SET => Ok(DataType::Set(self.parse_string_values()?)),
8215
8238
Keyword::ARRAY => {
8216
8239
if dialect_of!(self is SnowflakeDialect) {
0 commit comments