diff --git a/Cargo.toml b/Cargo.toml index 9caff2512..aea65b344 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -30,7 +30,7 @@ include = [ "Cargo.toml", "LICENSE.TXT", ] -edition = "2021" +edition = "2024" [lib] name = "sqlparser" diff --git a/examples/cli.rs b/examples/cli.rs index 0252fca74..2f4b9de3c 100644 --- a/examples/cli.rs +++ b/examples/cli.rs @@ -21,7 +21,7 @@ //! Run with `cargo run --example cli` use std::fs; -use std::io::{stdin, Read}; +use std::io::{Read, stdin}; use simple_logger::SimpleLogger; use sqlparser::dialect::*; diff --git a/src/ast/data_type.rs b/src/ast/data_type.rs index cae8ca8f0..fa1e60c96 100644 --- a/src/ast/data_type.rs +++ b/src/ast/data_type.rs @@ -25,9 +25,9 @@ use serde::{Deserialize, Serialize}; #[cfg(feature = "visitor")] use sqlparser_derive::{Visit, VisitMut}; -use crate::ast::{display_comma_separated, Expr, ObjectName, StructField, UnionField}; +use crate::ast::{Expr, ObjectName, StructField, UnionField, display_comma_separated}; -use super::{value::escape_single_quote_string, ColumnDef}; +use super::{ColumnDef, value::escape_single_quote_string}; #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] diff --git a/src/ast/dcl.rs b/src/ast/dcl.rs index 735ab0cce..1c2d0e304 100644 --- a/src/ast/dcl.rs +++ b/src/ast/dcl.rs @@ -28,8 +28,8 @@ use serde::{Deserialize, Serialize}; #[cfg(feature = "visitor")] use sqlparser_derive::{Visit, VisitMut}; -use super::{display_comma_separated, Expr, Ident, Password}; -use crate::ast::{display_separated, ObjectName}; +use super::{Expr, Ident, Password, display_comma_separated}; +use crate::ast::{ObjectName, display_separated}; /// An option in `ROLE` statement. /// diff --git a/src/ast/ddl.rs b/src/ast/ddl.rs index 1fbc45603..99e3a2869 100644 --- a/src/ast/ddl.rs +++ b/src/ast/ddl.rs @@ -30,10 +30,10 @@ use sqlparser_derive::{Visit, VisitMut}; use crate::ast::value::escape_single_quote_string; use crate::ast::{ - display_comma_separated, display_separated, CommentDef, CreateFunctionBody, - CreateFunctionUsing, DataType, Expr, FunctionBehavior, FunctionCalledOnNull, - FunctionDeterminismSpecifier, FunctionParallel, Ident, MySQLColumnPosition, ObjectName, - OperateFunctionArg, OrderByExpr, ProjectionSelect, SequenceOptions, SqlOption, Tag, Value, + CommentDef, CreateFunctionBody, CreateFunctionUsing, DataType, Expr, FunctionBehavior, + FunctionCalledOnNull, FunctionDeterminismSpecifier, FunctionParallel, Ident, + MySQLColumnPosition, ObjectName, OperateFunctionArg, OrderByExpr, ProjectionSelect, + SequenceOptions, SqlOption, Tag, Value, display_comma_separated, display_separated, }; use crate::keywords::Keyword; use crate::tokenizer::Token; @@ -1840,11 +1840,7 @@ impl ConstraintCharacteristics { fn enforced_text(&self) -> Option<&'static str> { self.enforced.map( |enforced| { - if enforced { - "ENFORCED" - } else { - "NOT ENFORCED" - } + if enforced { "ENFORCED" } else { "NOT ENFORCED" } }, ) } diff --git a/src/ast/dml.rs b/src/ast/dml.rs index 8cfc67414..3ed1b96c0 100644 --- a/src/ast/dml.rs +++ b/src/ast/dml.rs @@ -32,12 +32,12 @@ use sqlparser_derive::{Visit, VisitMut}; pub use super::ddl::{ColumnDef, TableConstraint}; use super::{ - display_comma_separated, display_separated, query::InputFormatClause, Assignment, ClusteredBy, - CommentDef, Expr, FileFormat, FromTable, HiveDistributionStyle, HiveFormat, HiveIOFormat, - HiveRowFormat, Ident, InsertAliases, MysqlInsertPriority, ObjectName, OnCommit, OnInsert, - OneOrManyWithParens, OrderByExpr, Query, RowAccessPolicy, SelectItem, Setting, SqlOption, - SqliteOnConflict, StorageSerializationPolicy, TableEngine, TableObject, TableWithJoins, Tag, - WrappedCollection, + Assignment, ClusteredBy, CommentDef, Expr, FileFormat, FromTable, HiveDistributionStyle, + HiveFormat, HiveIOFormat, HiveRowFormat, Ident, InsertAliases, MysqlInsertPriority, ObjectName, + OnCommit, OnInsert, OneOrManyWithParens, OrderByExpr, Query, RowAccessPolicy, SelectItem, + Setting, SqlOption, SqliteOnConflict, StorageSerializationPolicy, TableEngine, TableObject, + TableWithJoins, Tag, WrappedCollection, display_comma_separated, display_separated, + query::InputFormatClause, }; /// CREATE INDEX statement. @@ -225,16 +225,15 @@ impl Display for CreateTable { "CREATE {or_replace}{external}{global}{temporary}{transient}{volatile}{iceberg}TABLE {if_not_exists}{name}", or_replace = if self.or_replace { "OR REPLACE " } else { "" }, external = if self.external { "EXTERNAL " } else { "" }, - global = self.global - .map(|global| { - if global { - "GLOBAL " - } else { - "LOCAL " - } - }) + global = self + .global + .map(|global| { if global { "GLOBAL " } else { "LOCAL " } }) .unwrap_or(""), - if_not_exists = if self.if_not_exists { "IF NOT EXISTS " } else { "" }, + if_not_exists = if self.if_not_exists { + "IF NOT EXISTS " + } else { + "" + }, temporary = if self.temporary { "TEMPORARY " } else { "" }, transient = if self.transient { "TRANSIENT " } else { "" }, volatile = if self.volatile { "VOLATILE " } else { "" }, diff --git a/src/ast/mod.rs b/src/ast/mod.rs index 2b9016d9a..35c8eba66 100644 --- a/src/ast/mod.rs +++ b/src/ast/mod.rs @@ -85,8 +85,8 @@ pub use self::trigger::{ }; pub use self::value::{ - escape_double_quote_string, escape_quoted_string, DateTimeField, DollarQuotedString, - NormalizationForm, TrimWhereField, Value, + DateTimeField, DollarQuotedString, NormalizationForm, TrimWhereField, Value, + escape_double_quote_string, escape_quoted_string, }; use crate::ast::helpers::key_value_options::KeyValueOptions; @@ -3708,7 +3708,7 @@ impl fmt::Display for Statement { local = if *local { " LOCAL" } else { "" }, path = path )?; - if let Some(ref ff) = file_format { + if let Some(ff) = file_format { write!(f, " STORED AS {ff}")? } write!(f, " {source}") @@ -3760,7 +3760,7 @@ impl fmt::Display for Statement { } } - if let Some(ref parts) = partitions { + if let Some(parts) = partitions { if !parts.is_empty() { write!(f, " PARTITION ({})", display_comma_separated(parts))?; } @@ -3827,7 +3827,7 @@ impl fmt::Display for Statement { "ANALYZE{}{table_name}", if *has_table_keyword { " TABLE " } else { " " } )?; - if let Some(ref parts) = partitions { + if let Some(parts) = partitions { if !parts.is_empty() { write!(f, " PARTITION ({})", display_comma_separated(parts))?; } @@ -4149,7 +4149,7 @@ impl fmt::Display for Statement { overwrite = if *overwrite { "OVERWRITE " } else { "" }, table_name = table_name, )?; - if let Some(ref parts) = &partitioned { + if let Some(parts) = &partitioned { if !parts.is_empty() { write!(f, " PARTITION ({})", display_comma_separated(parts))?; } @@ -4253,37 +4253,37 @@ impl fmt::Display for Statement { superuser = match *superuser { Some(true) => " SUPERUSER", Some(false) => " NOSUPERUSER", - None => "" + None => "", }, create_db = match *create_db { Some(true) => " CREATEDB", Some(false) => " NOCREATEDB", - None => "" + None => "", }, create_role = match *create_role { Some(true) => " CREATEROLE", Some(false) => " NOCREATEROLE", - None => "" + None => "", }, inherit = match *inherit { Some(true) => " INHERIT", Some(false) => " NOINHERIT", - None => "" + None => "", }, login = match *login { Some(true) => " LOGIN", Some(false) => " NOLOGIN", - None => "" + None => "", }, replication = match *replication { Some(true) => " REPLICATION", Some(false) => " NOREPLICATION", - None => "" + None => "", }, bypassrls = match *bypassrls { Some(true) => " BYPASSRLS", Some(false) => " NOBYPASSRLS", - None => "" + None => "", } )?; if let Some(limit) = connection_limit { diff --git a/src/ast/spans.rs b/src/ast/spans.rs index 3de41902d..f28747988 100644 --- a/src/ast/spans.rs +++ b/src/ast/spans.rs @@ -21,21 +21,21 @@ use core::iter; use crate::tokenizer::Span; use super::{ - dcl::SecondaryRoles, AccessExpr, AlterColumnOperation, AlterIndexOperation, - AlterTableOperation, Array, Assignment, AssignmentTarget, CloseCursor, ClusteredIndex, - ColumnDef, ColumnOption, ColumnOptionDef, ConflictTarget, ConnectBy, ConstraintCharacteristics, - CopySource, CreateIndex, CreateTable, CreateTableOptions, Cte, Delete, DoUpdate, - ExceptSelectItem, ExcludeSelectItem, Expr, ExprWithAlias, Fetch, FromTable, Function, - FunctionArg, FunctionArgExpr, FunctionArgumentClause, FunctionArgumentList, FunctionArguments, - GroupByExpr, HavingBound, IlikeSelectItem, Insert, Interpolate, InterpolateExpr, Join, - JoinConstraint, JoinOperator, JsonPath, JsonPathElem, LateralView, MatchRecognizePattern, - Measure, NamedWindowDefinition, ObjectName, ObjectNamePart, Offset, OnConflict, - OnConflictAction, OnInsert, OrderBy, OrderByExpr, Partition, PivotValueSource, - ProjectionSelect, Query, ReferentialAction, RenameSelectItem, ReplaceSelectElement, - ReplaceSelectItem, Select, SelectInto, SelectItem, SetExpr, SqlOption, Statement, Subscript, - SymbolDefinition, TableAlias, TableAliasColumnDef, TableConstraint, TableFactor, TableObject, - TableOptionsClustered, TableWithJoins, UpdateTableFromKind, Use, Value, Values, ViewColumnDef, - WildcardAdditionalOptions, With, WithFill, + AccessExpr, AlterColumnOperation, AlterIndexOperation, AlterTableOperation, Array, Assignment, + AssignmentTarget, CloseCursor, ClusteredIndex, ColumnDef, ColumnOption, ColumnOptionDef, + ConflictTarget, ConnectBy, ConstraintCharacteristics, CopySource, CreateIndex, CreateTable, + CreateTableOptions, Cte, Delete, DoUpdate, ExceptSelectItem, ExcludeSelectItem, Expr, + ExprWithAlias, Fetch, FromTable, Function, FunctionArg, FunctionArgExpr, + FunctionArgumentClause, FunctionArgumentList, FunctionArguments, GroupByExpr, HavingBound, + IlikeSelectItem, Insert, Interpolate, InterpolateExpr, Join, JoinConstraint, JoinOperator, + JsonPath, JsonPathElem, LateralView, MatchRecognizePattern, Measure, NamedWindowDefinition, + ObjectName, ObjectNamePart, Offset, OnConflict, OnConflictAction, OnInsert, OrderBy, + OrderByExpr, Partition, PivotValueSource, ProjectionSelect, Query, ReferentialAction, + RenameSelectItem, ReplaceSelectElement, ReplaceSelectItem, Select, SelectInto, SelectItem, + SetExpr, SqlOption, Statement, Subscript, SymbolDefinition, TableAlias, TableAliasColumnDef, + TableConstraint, TableFactor, TableObject, TableOptionsClustered, TableWithJoins, + UpdateTableFromKind, Use, Value, Values, ViewColumnDef, WildcardAdditionalOptions, With, + WithFill, dcl::SecondaryRoles, }; /// Given an iterator of spans, return the [Span::union] of all spans. @@ -2267,25 +2267,37 @@ pub mod tests { #[test] pub fn test_cte() { let dialect = &GenericDialect; - let mut test = SpanTest::new(dialect, "WITH cte_outer AS (SELECT a FROM postgres.public.source), cte_ignored AS (SELECT a FROM cte_outer), cte_inner AS (SELECT a FROM cte_outer) SELECT a FROM cte_inner"); + let mut test = SpanTest::new( + dialect, + "WITH cte_outer AS (SELECT a FROM postgres.public.source), cte_ignored AS (SELECT a FROM cte_outer), cte_inner AS (SELECT a FROM cte_outer) SELECT a FROM cte_inner", + ); let query = test.0.parse_query().unwrap(); let select_span = query.span(); - assert_eq!(test.get_source(select_span), "WITH cte_outer AS (SELECT a FROM postgres.public.source), cte_ignored AS (SELECT a FROM cte_outer), cte_inner AS (SELECT a FROM cte_outer) SELECT a FROM cte_inner"); + assert_eq!( + test.get_source(select_span), + "WITH cte_outer AS (SELECT a FROM postgres.public.source), cte_ignored AS (SELECT a FROM cte_outer), cte_inner AS (SELECT a FROM cte_outer) SELECT a FROM cte_inner" + ); } #[test] pub fn test_snowflake_lateral_flatten() { let dialect = &SnowflakeDialect; - let mut test = SpanTest::new(dialect, "SELECT FLATTENED.VALUE:field::TEXT AS FIELD FROM SNOWFLAKE.SCHEMA.SOURCE AS S, LATERAL FLATTEN(INPUT => S.JSON_ARRAY) AS FLATTENED"); + let mut test = SpanTest::new( + dialect, + "SELECT FLATTENED.VALUE:field::TEXT AS FIELD FROM SNOWFLAKE.SCHEMA.SOURCE AS S, LATERAL FLATTEN(INPUT => S.JSON_ARRAY) AS FLATTENED", + ); let query = test.0.parse_select().unwrap(); let select_span = query.span(); - assert_eq!(test.get_source(select_span), "SELECT FLATTENED.VALUE:field::TEXT AS FIELD FROM SNOWFLAKE.SCHEMA.SOURCE AS S, LATERAL FLATTEN(INPUT => S.JSON_ARRAY) AS FLATTENED"); + assert_eq!( + test.get_source(select_span), + "SELECT FLATTENED.VALUE:field::TEXT AS FIELD FROM SNOWFLAKE.SCHEMA.SOURCE AS S, LATERAL FLATTEN(INPUT => S.JSON_ARRAY) AS FLATTENED" + ); } #[test] diff --git a/src/ast/visitor.rs b/src/ast/visitor.rs index 457dbbaed..ca29cc7e7 100644 --- a/src/ast/visitor.rs +++ b/src/ast/visitor.rs @@ -876,7 +876,7 @@ mod tests { "POST: EXPR: EMPID", "POST: QUERY: SELECT * FROM monthly_sales PIVOT(SUM(a.amount) FOR a.MONTH IN ('JAN', 'FEB', 'MAR', 'APR')) AS p (c, d) ORDER BY EMPID", "POST: STATEMENT: SELECT * FROM monthly_sales PIVOT(SUM(a.amount) FOR a.MONTH IN ('JAN', 'FEB', 'MAR', 'APR')) AS p (c, d) ORDER BY EMPID", - ] + ], ), ( "SHOW COLUMNS FROM t1", diff --git a/src/keywords.rs b/src/keywords.rs index d62a038b8..59ef27637 100644 --- a/src/keywords.rs +++ b/src/keywords.rs @@ -36,7 +36,7 @@ use sqlparser_derive::{Visit, VisitMut}; /// Defines a string constant for a single keyword: `kw_def!(SELECT);` /// expands to `pub const SELECT = "SELECT";` macro_rules! kw_def { - ($ident:ident = $string_keyword:expr) => { + ($ident:ident = $string_keyword:expr_2021) => { pub const $ident: &'static str = $string_keyword; }; ($ident:ident) => { @@ -48,7 +48,7 @@ macro_rules! kw_def { /// and defines an ALL_KEYWORDS array of the defined constants. macro_rules! define_keywords { ($( - $ident:ident $(= $string_keyword:expr)? + $ident:ident $(= $string_keyword:expr_2021)? ),*) => { #[derive(Debug, Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] diff --git a/src/parser/mod.rs b/src/parser/mod.rs index 69268bc51..168d4735e 100644 --- a/src/parser/mod.rs +++ b/src/parser/mod.rs @@ -28,15 +28,15 @@ use helpers::attached_token::AttachedToken; use log::debug; -use recursion::RecursionCounter; use IsLateral::*; use IsOptional::*; +use recursion::RecursionCounter; -use crate::ast::helpers::stmt_create_table::{CreateTableBuilder, CreateTableConfiguration}; use crate::ast::Statement::CreatePolicy; +use crate::ast::helpers::stmt_create_table::{CreateTableBuilder, CreateTableConfiguration}; use crate::ast::*; use crate::dialect::*; -use crate::keywords::{Keyword, ALL_KEYWORDS}; +use crate::keywords::{ALL_KEYWORDS, Keyword}; use crate::tokenizer::*; mod alter; @@ -50,7 +50,7 @@ pub enum ParserError { // Use `Parser::expected` instead, if possible macro_rules! parser_err { - ($MSG:expr, $loc:expr) => { + ($MSG:expr_2021, $loc:expr_2021) => { Err(ParserError::ParserError(format!("{}{}", $MSG, $loc))) }; } @@ -1427,7 +1427,7 @@ impl<'a> Parser<'a> { return Err(ParserError::ParserError(format!( "Unexpected token in unary operator parsing: {:?}", tok - ))) + ))); } }; Ok(Expr::UnaryOp { @@ -2236,7 +2236,7 @@ impl<'a> Parser<'a> { _ => { return Err(ParserError::ParserError( "Scale field can only be of number type".to_string(), - )) + )); } } } else { @@ -3262,8 +3262,8 @@ impl<'a> Parser<'a> { ) { return parser_err!( format!( - "Expected one of [=, >, <, =>, =<, !=] as comparison operator, found: {op}" - ), + "Expected one of [=, >, <, =>, =<, !=] as comparison operator, found: {op}" + ), span.start ); }; @@ -3702,20 +3702,22 @@ impl<'a> Parser<'a> { /// See [`Self::peek_token`] for an example. pub fn peek_tokens_with_location(&self) -> [TokenWithSpan; N] { let mut index = self.index; - core::array::from_fn(|_| loop { - let token = self.tokens.get(index); - index += 1; - if let Some(TokenWithSpan { - token: Token::Whitespace(_), - span: _, - }) = token - { - continue; + core::array::from_fn(|_| { + loop { + let token = self.tokens.get(index); + index += 1; + if let Some(TokenWithSpan { + token: Token::Whitespace(_), + span: _, + }) = token + { + continue; + } + break token.cloned().unwrap_or(TokenWithSpan { + token: Token::EOF, + span: Span::empty(), + }); } - break token.cloned().unwrap_or(TokenWithSpan { - token: Token::EOF, - span: Span::empty(), - }); }) } @@ -3725,17 +3727,19 @@ impl<'a> Parser<'a> { /// See [`Self::peek_tokens`] for an example. pub fn peek_tokens_ref(&self) -> [&TokenWithSpan; N] { let mut index = self.index; - core::array::from_fn(|_| loop { - let token = self.tokens.get(index); - index += 1; - if let Some(TokenWithSpan { - token: Token::Whitespace(_), - span: _, - }) = token - { - continue; + core::array::from_fn(|_| { + loop { + let token = self.tokens.get(index); + index += 1; + if let Some(TokenWithSpan { + token: Token::Whitespace(_), + span: _, + }) = token + { + continue; + } + break token.unwrap_or(&EOF_TOKEN); } - break token.unwrap_or(&EOF_TOKEN); }) } @@ -5618,19 +5622,23 @@ impl<'a> Parser<'a> { } pub fn parse_owner(&mut self) -> Result { - let owner = match self.parse_one_of_keywords(&[Keyword::CURRENT_USER, Keyword::CURRENT_ROLE, Keyword::SESSION_USER]) { + let owner = match self.parse_one_of_keywords(&[ + Keyword::CURRENT_USER, + Keyword::CURRENT_ROLE, + Keyword::SESSION_USER, + ]) { Some(Keyword::CURRENT_USER) => Owner::CurrentUser, Some(Keyword::CURRENT_ROLE) => Owner::CurrentRole, Some(Keyword::SESSION_USER) => Owner::SessionUser, Some(_) => unreachable!(), - None => { - match self.parse_identifier() { - Ok(ident) => Owner::Ident(ident), - Err(e) => { - return Err(ParserError::ParserError(format!("Expected: CURRENT_USER, CURRENT_ROLE, SESSION_USER or identifier after OWNER TO. {e}"))) - } + None => match self.parse_identifier() { + Ok(ident) => Owner::Ident(ident), + Err(e) => { + return Err(ParserError::ParserError(format!( + "Expected: CURRENT_USER, CURRENT_ROLE, SESSION_USER or identifier after OWNER TO. {e}" + ))); } - } + }, }; Ok(owner) } @@ -9215,14 +9223,14 @@ impl<'a> Parser<'a> { return self.expected( "expected to match USE/IGNORE/FORCE keyword", self.peek_token(), - ) + ); } }; let index_type = match self.parse_one_of_keywords(&[Keyword::INDEX, Keyword::KEY]) { Some(Keyword::INDEX) => TableIndexType::Index, Some(Keyword::KEY) => TableIndexType::Key, _ => { - return self.expected("expected to match INDEX/KEY keyword", self.peek_token()) + return self.expected("expected to match INDEX/KEY keyword", self.peek_token()); } }; let for_clause = if self.parse_keyword(Keyword::FOR) { @@ -9344,7 +9352,7 @@ impl<'a> Parser<'a> { return parser_err!( "BUG: expected to match GroupBy modifier keyword", self.peek_token().span.start - ) + ); } }); } @@ -9547,12 +9555,12 @@ impl<'a> Parser<'a> { Token::EOF => { return Err(ParserError::ParserError( "Empty input when parsing identifier".to_string(), - ))? + ))?; } token => { return Err(ParserError::ParserError(format!( "Unexpected token in identifier: {token}" - )))? + )))?; } }; @@ -9567,12 +9575,12 @@ impl<'a> Parser<'a> { Token::EOF => { return Err(ParserError::ParserError( "Trailing period in identifier".to_string(), - ))? + ))?; } token => { return Err(ParserError::ParserError(format!( "Unexpected token following period in identifier: {token}" - )))? + )))?; } } } @@ -9580,7 +9588,7 @@ impl<'a> Parser<'a> { token => { return Err(ParserError::ParserError(format!( "Unexpected token in identifier: {token}" - )))? + )))?; } } } @@ -11376,7 +11384,7 @@ impl<'a> Parser<'a> { _ => { return Err(ParserError::ParserError(format!( "expected OUTER, SEMI, ANTI or JOIN after {kw:?}" - ))) + ))); } } } @@ -14574,7 +14582,7 @@ impl<'a> Parser<'a> { return self.expected( "one of ACCOUNT, DATABASE, SCHEMA, TABLE or VIEW", self.peek_token(), - ) + ); } } } @@ -14648,7 +14656,7 @@ impl Word { #[cfg(test)] mod tests { - use crate::test_utils::{all_dialects, TestedDialects}; + use crate::test_utils::{TestedDialects, all_dialects}; use super::*; @@ -14727,7 +14735,7 @@ mod tests { use crate::test_utils::TestedDialects; macro_rules! test_parse_data_type { - ($dialect:expr, $input:expr, $expected_type:expr $(,)?) => {{ + ($dialect:expr_2021, $input:expr_2021, $expected_type:expr_2021 $(,)?) => {{ $dialect.run_parser_method(&*$input, |parser| { let data_type = parser.parse_data_type().unwrap(); assert_eq!($expected_type, data_type); @@ -15045,7 +15053,7 @@ mod tests { fn test_parse_schema_name() { // The expected name should be identical as the input name, that's why I don't receive both macro_rules! test_parse_schema_name { - ($input:expr, $expected_name:expr $(,)?) => {{ + ($input:expr_2021, $expected_name:expr_2021 $(,)?) => {{ all_dialects().run_parser_method(&*$input, |parser| { let schema_name = parser.parse_schema_name().unwrap(); // Validate that the structure is the same as expected @@ -15077,7 +15085,7 @@ mod tests { #[test] fn mysql_parse_index_table_constraint() { macro_rules! test_parse_table_constraint { - ($dialect:expr, $input:expr, $expected:expr $(,)?) => {{ + ($dialect:expr_2021, $input:expr_2021, $expected:expr_2021 $(,)?) => {{ $dialect.run_parser_method(&*$input, |parser| { let constraint = parser.parse_optional_table_constraint().unwrap().unwrap(); // Validate that the structure is the same as expected @@ -15255,7 +15263,7 @@ mod tests { #[test] fn test_parse_multipart_identifier_negative() { macro_rules! test_parse_multipart_identifier_error { - ($input:expr, $expected_err:expr $(,)?) => {{ + ($input:expr_2021, $expected_err:expr_2021 $(,)?) => {{ all_dialects().run_parser_method(&*$input, |parser| { let actual_err = parser.parse_multipart_identifier().unwrap_err(); assert_eq!(actual_err.to_string(), $expected_err); diff --git a/src/test_utils.rs b/src/test_utils.rs index 6270ac42b..b53c496d1 100644 --- a/src/test_utils.rs +++ b/src/test_utils.rs @@ -302,10 +302,11 @@ pub fn assert_eq_vec(expected: &[&str], actual: &[T]) { pub fn only(v: impl IntoIterator) -> T { let mut iter = v.into_iter(); - if let (Some(item), None) = (iter.next(), iter.next()) { - item - } else { - panic!("only called on collection without exactly one item") + match (iter.next(), iter.next()) { + (Some(item), None) => item, + _ => { + panic!("only called on collection without exactly one item") + } } } diff --git a/src/tokenizer.rs b/src/tokenizer.rs index bc0f0efeb..7baf2e64d 100644 --- a/src/tokenizer.rs +++ b/src/tokenizer.rs @@ -45,7 +45,7 @@ use crate::dialect::{ BigQueryDialect, DuckDbDialect, GenericDialect, MySqlDialect, PostgreSqlDialect, SnowflakeDialect, }; -use crate::keywords::{Keyword, ALL_KEYWORDS, ALL_KEYWORDS_INDEX}; +use crate::keywords::{ALL_KEYWORDS, ALL_KEYWORDS_INDEX, Keyword}; use crate::{ast::DollarQuotedString, dialect::HiveDialect}; /// SQL Token enumeration @@ -281,26 +281,26 @@ impl fmt::Display for Token { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { Token::EOF => f.write_str("EOF"), - Token::Word(ref w) => write!(f, "{w}"), - Token::Number(ref n, l) => write!(f, "{}{long}", n, long = if *l { "L" } else { "" }), - Token::Char(ref c) => write!(f, "{c}"), - Token::SingleQuotedString(ref s) => write!(f, "'{s}'"), - Token::TripleSingleQuotedString(ref s) => write!(f, "'''{s}'''"), - Token::DoubleQuotedString(ref s) => write!(f, "\"{s}\""), - Token::TripleDoubleQuotedString(ref s) => write!(f, "\"\"\"{s}\"\"\""), - Token::DollarQuotedString(ref s) => write!(f, "{s}"), - Token::NationalStringLiteral(ref s) => write!(f, "N'{s}'"), - Token::EscapedStringLiteral(ref s) => write!(f, "E'{s}'"), - Token::UnicodeStringLiteral(ref s) => write!(f, "U&'{s}'"), - Token::HexStringLiteral(ref s) => write!(f, "X'{s}'"), - Token::SingleQuotedByteStringLiteral(ref s) => write!(f, "B'{s}'"), - Token::TripleSingleQuotedByteStringLiteral(ref s) => write!(f, "B'''{s}'''"), - Token::DoubleQuotedByteStringLiteral(ref s) => write!(f, "B\"{s}\""), - Token::TripleDoubleQuotedByteStringLiteral(ref s) => write!(f, "B\"\"\"{s}\"\"\""), - Token::SingleQuotedRawStringLiteral(ref s) => write!(f, "R'{s}'"), - Token::DoubleQuotedRawStringLiteral(ref s) => write!(f, "R\"{s}\""), - Token::TripleSingleQuotedRawStringLiteral(ref s) => write!(f, "R'''{s}'''"), - Token::TripleDoubleQuotedRawStringLiteral(ref s) => write!(f, "R\"\"\"{s}\"\"\""), + Token::Word(w) => write!(f, "{w}"), + Token::Number(n, l) => write!(f, "{}{long}", n, long = if *l { "L" } else { "" }), + Token::Char(c) => write!(f, "{c}"), + Token::SingleQuotedString(s) => write!(f, "'{s}'"), + Token::TripleSingleQuotedString(s) => write!(f, "'''{s}'''"), + Token::DoubleQuotedString(s) => write!(f, "\"{s}\""), + Token::TripleDoubleQuotedString(s) => write!(f, "\"\"\"{s}\"\"\""), + Token::DollarQuotedString(s) => write!(f, "{s}"), + Token::NationalStringLiteral(s) => write!(f, "N'{s}'"), + Token::EscapedStringLiteral(s) => write!(f, "E'{s}'"), + Token::UnicodeStringLiteral(s) => write!(f, "U&'{s}'"), + Token::HexStringLiteral(s) => write!(f, "X'{s}'"), + Token::SingleQuotedByteStringLiteral(s) => write!(f, "B'{s}'"), + Token::TripleSingleQuotedByteStringLiteral(s) => write!(f, "B'''{s}'''"), + Token::DoubleQuotedByteStringLiteral(s) => write!(f, "B\"{s}\""), + Token::TripleDoubleQuotedByteStringLiteral(s) => write!(f, "B\"\"\"{s}\"\"\""), + Token::SingleQuotedRawStringLiteral(s) => write!(f, "R'{s}'"), + Token::DoubleQuotedRawStringLiteral(s) => write!(f, "R\"{s}\""), + Token::TripleSingleQuotedRawStringLiteral(s) => write!(f, "R'''{s}'''"), + Token::TripleDoubleQuotedRawStringLiteral(s) => write!(f, "R\"\"\"{s}\"\"\""), Token::Comma => f.write_str(","), Token::Whitespace(ws) => write!(f, "{ws}"), Token::DoubleEq => f.write_str("=="), @@ -368,7 +368,7 @@ impl fmt::Display for Token { Token::TildeEqual => f.write_str("~="), Token::ShiftLeftVerticalBar => f.write_str("<<|"), Token::VerticalBarShiftRight => f.write_str("|>>"), - Token::Placeholder(ref s) => write!(f, "{s}"), + Token::Placeholder(s) => write!(f, "{s}"), Token::Arrow => write!(f, "->"), Token::LongArrow => write!(f, "->>"), Token::HashArrow => write!(f, "#>"), @@ -2209,11 +2209,7 @@ impl<'a: 'b, 'b> Unescape<'a, 'b> { #[inline] fn check_null(c: char) -> Option { - if c == '\0' { - None - } else { - Some(c) - } + if c == '\0' { None } else { Some(c) } } #[inline] @@ -2223,11 +2219,7 @@ impl<'a: 'b, 'b> Unescape<'a, 'b> { Err(_) => None, Ok(n) => { let n = n & 0xFF; - if n <= 127 { - char::from_u32(n) - } else { - None - } + if n <= 127 { char::from_u32(n) } else { None } } } } @@ -2807,15 +2799,18 @@ mod tests { fn tokenize_dollar_quoted_string_tagged() { let test_cases = vec![ ( - String::from("SELECT $tag$dollar '$' quoted strings have $tags like this$ or like this $$$tag$"), + String::from( + "SELECT $tag$dollar '$' quoted strings have $tags like this$ or like this $$$tag$", + ), vec![ Token::make_keyword("SELECT"), Token::Whitespace(Whitespace::Space), Token::DollarQuotedString(DollarQuotedString { - value: "dollar '$' quoted strings have $tags like this$ or like this $$".into(), + value: "dollar '$' quoted strings have $tags like this$ or like this $$" + .into(), tag: Some("tag".into()), - }) - ] + }), + ], ), ( String::from("SELECT $abc$x$ab$abc$"), @@ -2825,8 +2820,8 @@ mod tests { Token::DollarQuotedString(DollarQuotedString { value: "x$ab".into(), tag: Some("abc".into()), - }) - ] + }), + ], ), ( String::from("SELECT $abc$$abc$"), @@ -2836,8 +2831,8 @@ mod tests { Token::DollarQuotedString(DollarQuotedString { value: "".into(), tag: Some("abc".into()), - }) - ] + }), + ], ), ( String::from("0$abc$$abc$1"), @@ -2848,16 +2843,14 @@ mod tests { tag: Some("abc".into()), }), Token::Number("1".into(), false), - ] + ], ), ( String::from("$function$abc$q$data$q$$function$"), - vec![ - Token::DollarQuotedString(DollarQuotedString { - value: "abc$q$data$q$".into(), - tag: Some("function".into()), - }), - ] + vec![Token::DollarQuotedString(DollarQuotedString { + value: "abc$q$data$q$".into(), + tag: Some("function".into()), + })], ), ]; @@ -2870,7 +2863,9 @@ mod tests { #[test] fn tokenize_dollar_quoted_string_tagged_unterminated() { - let sql = String::from("SELECT $tag$dollar '$' quoted strings have $tags like this$ or like this $$$different tag$"); + let sql = String::from( + "SELECT $tag$dollar '$' quoted strings have $tags like this$ or like this $$$different tag$", + ); let dialect = GenericDialect {}; assert_eq!( Tokenizer::new(&dialect, &sql).tokenize(), diff --git a/tests/sqlparser_clickhouse.rs b/tests/sqlparser_clickhouse.rs index 34f684c69..0343220c2 100644 --- a/tests/sqlparser_clickhouse.rs +++ b/tests/sqlparser_clickhouse.rs @@ -994,7 +994,7 @@ fn parse_select_parametric_function() { match &projection[0] { UnnamedExpr(Expr::Function(f)) => { let args = match &f.args { - FunctionArguments::List(ref args) => args, + FunctionArguments::List(args) => args, _ => unreachable!(), }; assert_eq!(args.args.len(), 2); diff --git a/tests/sqlparser_common.rs b/tests/sqlparser_common.rs index 653142dc6..fb81d59f3 100644 --- a/tests/sqlparser_common.rs +++ b/tests/sqlparser_common.rs @@ -35,13 +35,13 @@ use sqlparser::dialect::{ GenericDialect, HiveDialect, MsSqlDialect, MySqlDialect, PostgreSqlDialect, RedshiftSqlDialect, SQLiteDialect, SnowflakeDialect, }; -use sqlparser::keywords::{Keyword, ALL_KEYWORDS}; +use sqlparser::keywords::{ALL_KEYWORDS, Keyword}; use sqlparser::parser::{Parser, ParserError, ParserOptions}; use sqlparser::tokenizer::Tokenizer; use sqlparser::tokenizer::{Location, Span}; use test_utils::{ - all_dialects, all_dialects_where, alter_table_op, assert_eq_vec, call, expr_from_projection, - join, number, only, table, table_alias, table_from_name, TestedDialects, + TestedDialects, all_dialects, all_dialects_where, alter_table_op, assert_eq_vec, call, + expr_from_projection, join, number, only, table, table_alias, table_from_name, }; #[macro_use] @@ -1122,10 +1122,8 @@ fn parse_column_aliases() { let sql = "SELECT a.col + 1 AS newname FROM foo AS a"; let select = verified_only_select(sql); if let SelectItem::ExprWithAlias { - expr: Expr::BinaryOp { - ref op, ref right, .. - }, - ref alias, + expr: Expr::BinaryOp { op, right, .. }, + alias, } = only(&select.projection) { assert_eq!(&BinaryOperator::Plus, op); @@ -3280,16 +3278,17 @@ fn test_double_value() { for (input, expected) in test_cases { for (i, expr) in input.iter().enumerate() { - if let Statement::Query(query) = - dialects.one_statement_parses_to(&format!("SELECT {}", expr), "") - { - if let SetExpr::Select(select) = *query.body { - assert_eq!(expected[i], select.projection[0]); - } else { + match dialects.one_statement_parses_to(&format!("SELECT {}", expr), "") { + Statement::Query(query) => { + if let SetExpr::Select(select) = *query.body { + assert_eq!(expected[i], select.projection[0]); + } else { + panic!("Expected a SELECT statement"); + } + } + _ => { panic!("Expected a SELECT statement"); } - } else { - panic!("Expected a SELECT statement"); } } } @@ -3553,16 +3552,18 @@ fn parse_create_table() { } let res = parse_sql_statements("CREATE TABLE t (a int NOT NULL GARBAGE)"); - assert!(res - .unwrap_err() - .to_string() - .contains("Expected: \',\' or \')\' after column definition, found: GARBAGE")); + assert!( + res.unwrap_err() + .to_string() + .contains("Expected: \',\' or \')\' after column definition, found: GARBAGE") + ); let res = parse_sql_statements("CREATE TABLE t (a int NOT NULL CONSTRAINT foo)"); - assert!(res - .unwrap_err() - .to_string() - .contains("Expected: constraint details after CONSTRAINT ")); + assert!( + res.unwrap_err() + .to_string() + .contains("Expected: constraint details after CONSTRAINT ") + ); } #[test] @@ -3695,28 +3696,31 @@ fn parse_create_table_with_constraint_characteristics() { a int NOT NULL, FOREIGN KEY (a) REFERENCES othertable4(a) ON DELETE CASCADE ON UPDATE SET DEFAULT DEFERRABLE INITIALLY IMMEDIATE NOT DEFERRABLE, \ )"); - assert!(res - .unwrap_err() - .to_string() - .contains("Expected: \',\' or \')\' after column definition, found: NOT")); + assert!( + res.unwrap_err() + .to_string() + .contains("Expected: \',\' or \')\' after column definition, found: NOT") + ); let res = parse_sql_statements("CREATE TABLE t ( a int NOT NULL, FOREIGN KEY (a) REFERENCES othertable4(a) ON DELETE CASCADE ON UPDATE SET DEFAULT NOT ENFORCED INITIALLY DEFERRED ENFORCED, \ )"); - assert!(res - .unwrap_err() - .to_string() - .contains("Expected: \',\' or \')\' after column definition, found: ENFORCED")); + assert!( + res.unwrap_err() + .to_string() + .contains("Expected: \',\' or \')\' after column definition, found: ENFORCED") + ); let res = parse_sql_statements("CREATE TABLE t ( a int NOT NULL, FOREIGN KEY (lat) REFERENCES othertable4(lat) ON DELETE CASCADE ON UPDATE SET DEFAULT INITIALLY DEFERRED INITIALLY IMMEDIATE, \ )"); - assert!(res - .unwrap_err() - .to_string() - .contains("Expected: \',\' or \')\' after column definition, found: INITIALLY")); + assert!( + res.unwrap_err() + .to_string() + .contains("Expected: \',\' or \')\' after column definition, found: INITIALLY") + ); } #[test] @@ -3803,10 +3807,11 @@ fn parse_create_table_column_constraint_characteristics() { let res = parse_sql_statements( "CREATE TABLE t (a int NOT NULL UNIQUE DEFERRABLE INITIALLY BADVALUE)", ); - assert!(res - .unwrap_err() - .to_string() - .contains("Expected: one of DEFERRED or IMMEDIATE, found: BADVALUE")); + assert!( + res.unwrap_err() + .to_string() + .contains("Expected: one of DEFERRED or IMMEDIATE, found: BADVALUE") + ); let res = parse_sql_statements( "CREATE TABLE t (a int NOT NULL UNIQUE INITIALLY IMMEDIATE DEFERRABLE INITIALLY DEFERRED)", @@ -6817,7 +6822,7 @@ fn parse_joins_using() { }, global: false, join_operator: f(JoinConstraint::Using(vec![ObjectName::from(vec![ - "c1".into() + "c1".into(), ])])), } } @@ -7071,7 +7076,7 @@ fn parse_ctes() { let sql = &format!("SELECT ({with})"); let select = verified_only_select(sql); match expr_from_projection(only(&select.projection)) { - Expr::Subquery(ref subquery) => { + Expr::Subquery(subquery) => { assert_ctes_in_select(&cte_sqls, subquery.as_ref()); } _ => panic!("Expected: subquery"), @@ -7210,8 +7215,12 @@ fn parse_union_except_intersect_minus() { verified_stmt("SELECT 1 UNION (SELECT 2 ORDER BY 1 LIMIT 1)"); verified_stmt("SELECT 1 UNION SELECT 2 INTERSECT SELECT 3"); // Union[1, Intersect[2,3]] verified_stmt("SELECT foo FROM tab UNION SELECT bar FROM TAB"); - verified_stmt("(SELECT * FROM new EXCEPT SELECT * FROM old) UNION ALL (SELECT * FROM old EXCEPT SELECT * FROM new) ORDER BY 1"); - verified_stmt("(SELECT * FROM new EXCEPT DISTINCT SELECT * FROM old) UNION DISTINCT (SELECT * FROM old EXCEPT DISTINCT SELECT * FROM new) ORDER BY 1"); + verified_stmt( + "(SELECT * FROM new EXCEPT SELECT * FROM old) UNION ALL (SELECT * FROM old EXCEPT SELECT * FROM new) ORDER BY 1", + ); + verified_stmt( + "(SELECT * FROM new EXCEPT DISTINCT SELECT * FROM old) UNION DISTINCT (SELECT * FROM old EXCEPT DISTINCT SELECT * FROM new) ORDER BY 1", + ); verified_stmt("SELECT 1 AS x, 2 AS y EXCEPT BY NAME SELECT 9 AS y, 8 AS x"); verified_stmt("SELECT 1 AS x, 2 AS y EXCEPT ALL BY NAME SELECT 9 AS y, 8 AS x"); verified_stmt("SELECT 1 AS x, 2 AS y EXCEPT DISTINCT BY NAME SELECT 9 AS y, 8 AS x"); @@ -8025,7 +8034,9 @@ fn parse_fetch() { }, _ => panic!("Test broke"), } - let ast = verified_query("SELECT foo FROM (SELECT * FROM bar OFFSET 2 ROWS FETCH FIRST 2 ROWS ONLY) OFFSET 2 ROWS FETCH FIRST 2 ROWS ONLY"); + let ast = verified_query( + "SELECT foo FROM (SELECT * FROM bar OFFSET 2 ROWS FETCH FIRST 2 ROWS ONLY) OFFSET 2 ROWS FETCH FIRST 2 ROWS ONLY", + ); assert_eq!( ast.offset, Some(Offset { @@ -10511,9 +10522,11 @@ fn parse_non_latin_identifiers() { supported_dialects.verified_stmt("SELECT a.説明 FROM test.public.inter01 AS a"); supported_dialects.verified_stmt("SELECT a.説明 FROM inter01 AS a, inter01_transactions AS b WHERE a.説明 = b.取引 GROUP BY a.説明"); supported_dialects.verified_stmt("SELECT 説明, hühnervögel, garçon, Москва, 東京 FROM inter01"); - assert!(supported_dialects - .parse_sql_statements("SELECT 💝 FROM table1") - .is_err()); + assert!( + supported_dialects + .parse_sql_statements("SELECT 💝 FROM table1") + .is_err() + ); } #[test] @@ -12708,7 +12721,9 @@ fn parse_method_select() { let _ = verified_only_select( "SELECT LEFT('abc', 1).value('.', 'NVARCHAR(MAX)').value('.', 'NVARCHAR(MAX)') AS T", ); - let _ = verified_only_select("SELECT STUFF((SELECT ',' + name FROM sys.objects FOR XML PATH(''), TYPE).value('.', 'NVARCHAR(MAX)'), 1, 1, '') AS T"); + let _ = verified_only_select( + "SELECT STUFF((SELECT ',' + name FROM sys.objects FOR XML PATH(''), TYPE).value('.', 'NVARCHAR(MAX)'), 1, 1, '') AS T", + ); let _ = verified_only_select("SELECT CAST(column AS XML).value('.', 'NVARCHAR(MAX)') AS T"); // `CONVERT` support @@ -13661,11 +13676,12 @@ fn parse_select_without_projection() { #[test] fn parse_update_from_before_select() { - verified_stmt("UPDATE t1 FROM (SELECT name, id FROM t1 GROUP BY id) AS t2 SET name = t2.name WHERE t1.id = t2.id"); + verified_stmt( + "UPDATE t1 FROM (SELECT name, id FROM t1 GROUP BY id) AS t2 SET name = t2.name WHERE t1.id = t2.id", + ); verified_stmt("UPDATE t1 FROM U, (SELECT id FROM V) AS W SET a = b WHERE 1 = 1"); - let query = - "UPDATE t1 FROM (SELECT name, id FROM t1 GROUP BY id) AS t2 SET name = t2.name FROM (SELECT name from t2) AS t2"; + let query = "UPDATE t1 FROM (SELECT name, id FROM t1 GROUP BY id) AS t2 SET name = t2.name FROM (SELECT name from t2) AS t2"; assert_eq!( ParserError::ParserError("Expected: end of statement, found: FROM".to_string()), parse_sql_statements(query).unwrap_err() @@ -13673,7 +13689,9 @@ fn parse_update_from_before_select() { } #[test] fn parse_overlaps() { - verified_stmt("SELECT (DATE '2016-01-10', DATE '2016-02-01') OVERLAPS (DATE '2016-01-20', DATE '2016-02-10')"); + verified_stmt( + "SELECT (DATE '2016-01-10', DATE '2016-02-01') OVERLAPS (DATE '2016-01-20', DATE '2016-02-10')", + ); } #[test] diff --git a/tests/sqlparser_mssql.rs b/tests/sqlparser_mssql.rs index 7b1277599..9a40d1291 100644 --- a/tests/sqlparser_mssql.rs +++ b/tests/sqlparser_mssql.rs @@ -713,9 +713,10 @@ fn parse_for_clause() { #[test] fn dont_parse_trailing_for() { - assert!(ms() - .run_parser_method("SELECT * FROM foo FOR", |p| p.parse_query()) - .is_err()); + assert!( + ms().run_parser_method("SELECT * FROM foo FOR", |p| p.parse_query()) + .is_err() + ); } #[test] @@ -1342,71 +1343,72 @@ fn parse_create_table_with_valid_options() { value: "ROUND_ROBIN".to_string(), quote_style: None, span: Span::empty(), - }) + }), }, SqlOption::Partition { column_name: "column_a".into(), range_direction: None, - for_values: vec![Expr::Value(test_utils::number("10")), Expr::Value(test_utils::number("11"))] , + for_values: vec![ + Expr::Value(test_utils::number("10")), + Expr::Value(test_utils::number("11")), + ], }, ], ), ( "CREATE TABLE mytable (column_a INT, column_b INT, column_c INT) WITH (PARTITION (column_a RANGE LEFT FOR VALUES (10, 11)))", - vec![ - SqlOption::Partition { - column_name: "column_a".into(), - range_direction: Some(PartitionRangeDirection::Left), - for_values: vec![ - Expr::Value(test_utils::number("10")), - Expr::Value(test_utils::number("11")), - ], - } - ], + vec![SqlOption::Partition { + column_name: "column_a".into(), + range_direction: Some(PartitionRangeDirection::Left), + for_values: vec![ + Expr::Value(test_utils::number("10")), + Expr::Value(test_utils::number("11")), + ], + }], ), ( "CREATE TABLE mytable (column_a INT, column_b INT, column_c INT) WITH (CLUSTERED COLUMNSTORE INDEX)", - vec![SqlOption::Clustered(TableOptionsClustered::ColumnstoreIndex)], + vec![SqlOption::Clustered( + TableOptionsClustered::ColumnstoreIndex, + )], ), ( "CREATE TABLE mytable (column_a INT, column_b INT, column_c INT) WITH (CLUSTERED COLUMNSTORE INDEX ORDER (column_a, column_b))", - vec![ - SqlOption::Clustered(TableOptionsClustered::ColumnstoreIndexOrder(vec![ + vec![SqlOption::Clustered( + TableOptionsClustered::ColumnstoreIndexOrder(vec![ "column_a".into(), "column_b".into(), - ])) - ], + ]), + )], ), ( "CREATE TABLE mytable (column_a INT, column_b INT, column_c INT) WITH (CLUSTERED INDEX (column_a ASC, column_b DESC, column_c))", - vec![ - SqlOption::Clustered(TableOptionsClustered::Index(vec![ - ClusteredIndex { - name: Ident { - value: "column_a".to_string(), - quote_style: None, - span: Span::empty(), - }, - asc: Some(true), - }, - ClusteredIndex { - name: Ident { - value: "column_b".to_string(), - quote_style: None, - span: Span::empty(), - }, - asc: Some(false), - }, - ClusteredIndex { - name: Ident { - value: "column_c".to_string(), - quote_style: None, - span: Span::empty(), - }, - asc: None, - }, - ])) - ], + vec![SqlOption::Clustered(TableOptionsClustered::Index(vec![ + ClusteredIndex { + name: Ident { + value: "column_a".to_string(), + quote_style: None, + span: Span::empty(), + }, + asc: Some(true), + }, + ClusteredIndex { + name: Ident { + value: "column_b".to_string(), + quote_style: None, + span: Span::empty(), + }, + asc: Some(false), + }, + ClusteredIndex { + name: Ident { + value: "column_c".to_string(), + quote_style: None, + span: Span::empty(), + }, + asc: None, + }, + ]))], ), ( "CREATE TABLE mytable (column_a INT, column_b INT, column_c INT) WITH (DISTRIBUTION = HASH(column_a, column_b), HEAP)", @@ -1417,59 +1419,43 @@ fn parse_create_table_with_valid_options() { quote_style: None, span: Span::empty(), }, - value: Expr::Function( - Function { - name: ObjectName::from( - vec![ + value: Expr::Function(Function { + name: ObjectName::from(vec![Ident { + value: "HASH".to_string(), + quote_style: None, + span: Span::empty(), + }]), + uses_odbc_syntax: false, + parameters: FunctionArguments::None, + args: FunctionArguments::List(FunctionArgumentList { + duplicate_treatment: None, + args: vec![ + FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Identifier( Ident { - value: "HASH".to_string(), + value: "column_a".to_string(), quote_style: None, span: Span::empty(), }, - ], - ), - uses_odbc_syntax: false, - parameters: FunctionArguments::None, - args: FunctionArguments::List( - FunctionArgumentList { - duplicate_treatment: None, - args: vec![ - FunctionArg::Unnamed( - FunctionArgExpr::Expr( - Expr::Identifier( - Ident { - value: "column_a".to_string(), - quote_style: None, - span: Span::empty(), - }, - ), - ), - ), - FunctionArg::Unnamed( - FunctionArgExpr::Expr( - Expr::Identifier( - Ident { - value: "column_b".to_string(), - quote_style: None, - span: Span::empty(), - }, - ), - ), - ), - ], - clauses: vec![], - }, - ), - filter: None, - null_treatment: None, - over: None, - within_group: vec![], - }, - ), + ))), + FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Identifier( + Ident { + value: "column_b".to_string(), + quote_style: None, + span: Span::empty(), + }, + ))), + ], + clauses: vec![], + }), + filter: None, + null_treatment: None, + over: None, + within_group: vec![], + }), }, SqlOption::Ident("HEAP".into()), ], - ), + ), ]; for (sql, with_options) in options { @@ -1585,7 +1571,6 @@ fn parse_create_table_with_invalid_options() { "Expected: ), found: INDEX", ), ( - "CREATE TABLE mytable (column_a INT, column_b INT, column_c INT) WITH (PARTITION (RANGE LEFT FOR VALUES (10, 11)))", "Expected: RANGE, found: LEFT", ), diff --git a/tests/sqlparser_mysql.rs b/tests/sqlparser_mysql.rs index 44c8350fa..f82b8e081 100644 --- a/tests/sqlparser_mysql.rs +++ b/tests/sqlparser_mysql.rs @@ -517,22 +517,32 @@ fn parse_show_tables() { #[test] fn parse_show_extended_full() { - assert!(mysql_and_generic() - .parse_sql_statements("SHOW EXTENDED FULL TABLES") - .is_ok()); - assert!(mysql_and_generic() - .parse_sql_statements("SHOW EXTENDED FULL COLUMNS FROM mytable") - .is_ok()); + assert!( + mysql_and_generic() + .parse_sql_statements("SHOW EXTENDED FULL TABLES") + .is_ok() + ); + assert!( + mysql_and_generic() + .parse_sql_statements("SHOW EXTENDED FULL COLUMNS FROM mytable") + .is_ok() + ); // SHOW EXTENDED/FULL can only be used with COLUMNS and TABLES - assert!(mysql_and_generic() - .parse_sql_statements("SHOW EXTENDED FULL CREATE TABLE mytable") - .is_err()); - assert!(mysql_and_generic() - .parse_sql_statements("SHOW EXTENDED FULL COLLATION") - .is_err()); - assert!(mysql_and_generic() - .parse_sql_statements("SHOW EXTENDED FULL VARIABLES") - .is_err()); + assert!( + mysql_and_generic() + .parse_sql_statements("SHOW EXTENDED FULL CREATE TABLE mytable") + .is_err() + ); + assert!( + mysql_and_generic() + .parse_sql_statements("SHOW EXTENDED FULL COLLATION") + .is_err() + ); + assert!( + mysql_and_generic() + .parse_sql_statements("SHOW EXTENDED FULL VARIABLES") + .is_err() + ); } #[test] @@ -3043,10 +3053,12 @@ fn parse_grant() { ); assert!(!with_grant_option); assert!(granted_by.is_none()); - if let [Grantee { - grantee_type: GranteesType::None, - name: Some(GranteeName::UserHost { user, host }), - }] = grantees.as_slice() + if let [ + Grantee { + grantee_type: GranteesType::None, + name: Some(GranteeName::UserHost { user, host }), + }, + ] = grantees.as_slice() { assert_eq!(user.value, "jeffrey"); assert_eq!(user.quote_style, Some('\'')); @@ -3085,10 +3097,12 @@ fn parse_revoke() { "*".into() ])])) ); - if let [Grantee { - grantee_type: GranteesType::None, - name: Some(GranteeName::UserHost { user, host }), - }] = grantees.as_slice() + if let [ + Grantee { + grantee_type: GranteesType::None, + name: Some(GranteeName::UserHost { user, host }), + }, + ] = grantees.as_slice() { assert_eq!(user.value, "jeffrey"); assert_eq!(user.quote_style, Some('\'')); diff --git a/tests/sqlparser_postgres.rs b/tests/sqlparser_postgres.rs index 312ce1186..6460e9d4e 100644 --- a/tests/sqlparser_postgres.rs +++ b/tests/sqlparser_postgres.rs @@ -1802,9 +1802,9 @@ fn parse_pg_on_conflict() { assert_eq!( OnConflictAction::DoUpdate(DoUpdate { assignments: vec![Assignment { - target: AssignmentTarget::ColumnName(ObjectName::from( - vec!["dname".into()] - )), + target: AssignmentTarget::ColumnName(ObjectName::from(vec![ + "dname".into() + ])), value: Expr::CompoundIdentifier(vec!["EXCLUDED".into(), "dname".into()]) },], selection: None @@ -1896,9 +1896,9 @@ fn parse_pg_on_conflict() { assert_eq!( OnConflictAction::DoUpdate(DoUpdate { assignments: vec![Assignment { - target: AssignmentTarget::ColumnName(ObjectName::from( - vec!["dname".into()] - )), + target: AssignmentTarget::ColumnName(ObjectName::from(vec![ + "dname".into() + ])), value: Expr::Value(Value::Placeholder("$1".to_string())) },], selection: Some(Expr::BinaryOp { @@ -1939,9 +1939,9 @@ fn parse_pg_on_conflict() { assert_eq!( OnConflictAction::DoUpdate(DoUpdate { assignments: vec![Assignment { - target: AssignmentTarget::ColumnName(ObjectName::from( - vec!["dname".into()] - )), + target: AssignmentTarget::ColumnName(ObjectName::from(vec![ + "dname".into() + ])), value: Expr::Value(Value::Placeholder("$1".to_string())) },], selection: Some(Expr::BinaryOp { @@ -2963,13 +2963,25 @@ fn test_json() { #[test] fn test_fn_arg_with_value_operator() { match pg().verified_expr("JSON_OBJECT('name' VALUE 'value')") { - Expr::Function(Function { args: FunctionArguments::List(FunctionArgumentList { args, .. }), .. }) => { - assert!(matches!( - &args[..], - &[FunctionArg::ExprNamed { operator: FunctionArgOperator::Value, .. }] - ), "Invalid function argument: {:?}", args); + Expr::Function(Function { + args: FunctionArguments::List(FunctionArgumentList { args, .. }), + .. + }) => { + assert!( + matches!( + &args[..], + &[FunctionArg::ExprNamed { + operator: FunctionArgOperator::Value, + .. + }] + ), + "Invalid function argument: {:?}", + args + ); } - other => panic!("Expected: JSON_OBJECT('name' VALUE 'value') to be parsed as a function, but got {other:?}"), + other => panic!( + "Expected: JSON_OBJECT('name' VALUE 'value') to be parsed as a function, but got {other:?}" + ), } } @@ -3365,12 +3377,14 @@ fn parse_create_role() { let sql = "CREATE ROLE abc LOGIN PASSWORD NULL"; match pg().parse_sql_statements(sql).as_deref() { Ok( - [Statement::CreateRole { - names, - login, - password, - .. - }], + [ + Statement::CreateRole { + names, + login, + password, + .. + }, + ], ) => { assert_eq_vec(&["abc"], names); assert_eq!(*login, Some(true)); @@ -3382,12 +3396,14 @@ fn parse_create_role() { let sql = "CREATE ROLE abc WITH LOGIN PASSWORD NULL"; match pg().parse_sql_statements(sql).as_deref() { Ok( - [Statement::CreateRole { - names, - login, - password, - .. - }], + [ + Statement::CreateRole { + names, + login, + password, + .. + }, + ], ) => { assert_eq_vec(&["abc"], names); assert_eq!(*login, Some(true)); @@ -3400,26 +3416,28 @@ fn parse_create_role() { // Roundtrip order of optional parameters is not preserved match pg().parse_sql_statements(sql).as_deref() { Ok( - [Statement::CreateRole { - names, - if_not_exists, - bypassrls, - login, - inherit, - password, - superuser, - create_db, - create_role, - replication, - connection_limit, - valid_until, - in_role, - in_group, - role, - user: _, - admin, - authorization_owner, - }], + [ + Statement::CreateRole { + names, + if_not_exists, + bypassrls, + login, + inherit, + password, + superuser, + create_db, + create_role, + replication, + connection_limit, + valid_until, + in_role, + in_group, + role, + user: _, + admin, + authorization_owner, + }, + ], ) => { assert_eq_vec(&["magician"], names); assert!(!*if_not_exists); @@ -3453,9 +3471,11 @@ fn parse_create_role() { let sql = "CREATE ROLE abc WITH USER foo, bar ROLE baz "; match pg().parse_sql_statements(sql).as_deref() { Ok( - [Statement::CreateRole { - names, user, role, .. - }], + [ + Statement::CreateRole { + names, user, role, .. + }, + ], ) => { assert_eq_vec(&["abc"], names); assert_eq_vec(&["foo", "bar"], user); @@ -3477,7 +3497,9 @@ fn parse_create_role() { for negatable_kw in negatables.iter() { let sql = format!("CREATE ROLE abc {negatable_kw} NO{negatable_kw}"); if pg().parse_sql_statements(&sql).is_ok() { - panic!("Should not be able to parse CREATE ROLE containing both negated and non-negated versions of the same keyword: {negatable_kw}") + panic!( + "Should not be able to parse CREATE ROLE containing both negated and non-negated versions of the same keyword: {negatable_kw}" + ) } } } @@ -4917,16 +4939,16 @@ fn parse_create_trigger_invalid_cases() { let invalid_cases = vec![ ( "CREATE TRIGGER check_update BEFORE UPDATE ON accounts FUNCTION check_account_update", - "Expected: FOR, found: FUNCTION" + "Expected: FOR, found: FUNCTION", ), ( "CREATE TRIGGER check_update TOMORROW UPDATE ON accounts EXECUTE FUNCTION check_account_update", - "Expected: one of BEFORE or AFTER or INSTEAD, found: TOMORROW" + "Expected: one of BEFORE or AFTER or INSTEAD, found: TOMORROW", ), ( "CREATE TRIGGER check_update BEFORE SAVE ON accounts EXECUTE FUNCTION check_account_update", - "Expected: one of INSERT or UPDATE or DELETE or TRUNCATE, found: SAVE" - ) + "Expected: one of INSERT or UPDATE or DELETE or TRUNCATE, found: SAVE", + ), ]; for (sql, expected_error) in invalid_cases { diff --git a/tests/sqlparser_redshift.rs b/tests/sqlparser_redshift.rs index c4b897f01..eeea06b05 100644 --- a/tests/sqlparser_redshift.rs +++ b/tests/sqlparser_redshift.rs @@ -381,7 +381,9 @@ fn test_parse_nested_quoted_identifier() { // trim spaces redshift().one_statement_parses_to(r#"SELECT 1 AS [ " 1 " ]"#, r#"SELECT 1 AS [" 1 "]"#); // invalid query - assert!(redshift() - .parse_sql_statements(r#"SELECT 1 AS ["1]"#) - .is_err()); + assert!( + redshift() + .parse_sql_statements(r#"SELECT 1 AS ["1]"#) + .is_err() + ); } diff --git a/tests/sqlparser_regression.rs b/tests/sqlparser_regression.rs index 55e03c45b..388530e39 100644 --- a/tests/sqlparser_regression.rs +++ b/tests/sqlparser_regression.rs @@ -21,7 +21,7 @@ use sqlparser::dialect::GenericDialect; use sqlparser::parser::Parser; macro_rules! tpch_tests { - ($($name:ident: $value:expr,)*) => { + ($($name:ident: $value:expr_2021,)*) => { const QUERIES: &[&str] = &[ $(include_str!(concat!("queries/tpch/", $value, ".sql"))),* ]; diff --git a/tests/sqlparser_snowflake.rs b/tests/sqlparser_snowflake.rs index 12796bb65..b98831f73 100644 --- a/tests/sqlparser_snowflake.rs +++ b/tests/sqlparser_snowflake.rs @@ -925,13 +925,17 @@ fn test_snowflake_create_iceberg_table_without_location() { #[test] fn parse_sf_create_or_replace_view_with_comment_missing_equal() { - assert!(snowflake_and_generic() - .parse_sql_statements("CREATE OR REPLACE VIEW v COMMENT = 'hello, world' AS SELECT 1") - .is_ok()); + assert!( + snowflake_and_generic() + .parse_sql_statements("CREATE OR REPLACE VIEW v COMMENT = 'hello, world' AS SELECT 1") + .is_ok() + ); - assert!(snowflake_and_generic() - .parse_sql_statements("CREATE OR REPLACE VIEW v COMMENT 'hello, world' AS SELECT 1") - .is_err()); + assert!( + snowflake_and_generic() + .parse_sql_statements("CREATE OR REPLACE VIEW v COMMENT 'hello, world' AS SELECT 1") + .is_err() + ); } #[test] @@ -2069,7 +2073,11 @@ fn test_copy_into() { }; assert_eq!(snowflake().verified_stmt(sql).to_string(), sql); - let sql = concat!("COPY INTO 's3://a/b/c/data.parquet' ", "FROM db.sc.tbl ", "PARTITION BY ('date=' || to_varchar(dt, 'YYYY-MM-DD') || '/hour=' || to_varchar(date_part(hour, ts)))"); + let sql = concat!( + "COPY INTO 's3://a/b/c/data.parquet' ", + "FROM db.sc.tbl ", + "PARTITION BY ('date=' || to_varchar(dt, 'YYYY-MM-DD') || '/hour=' || to_varchar(date_part(hour, ts)))" + ); match snowflake().verified_stmt(sql) { Statement::CopyIntoSnowflake { kind, @@ -3024,8 +3032,7 @@ fn view_comment_option_should_be_after_column_list() { "CREATE OR REPLACE VIEW v (a COMMENT 'a comment', b, c COMMENT 'c comment') COMMENT = 'Comment' AS SELECT a FROM t", "CREATE OR REPLACE VIEW v (a COMMENT 'a comment', b, c COMMENT 'c comment') WITH (foo = bar) COMMENT = 'Comment' AS SELECT a FROM t", ] { - snowflake_and_generic() - .verified_stmt(sql); + snowflake_and_generic().verified_stmt(sql); } } @@ -3320,9 +3327,11 @@ fn test_sql_keywords_as_select_item_aliases() { "WITH", ]; for kw in reserved_kws { - assert!(snowflake() - .parse_sql_statements(&format!("SELECT 1 {kw}")) - .is_err()); + assert!( + snowflake() + .parse_sql_statements(&format!("SELECT 1 {kw}")) + .is_err() + ); } } diff --git a/tests/test_utils/mod.rs b/tests/test_utils/mod.rs index 4bb0b1151..d7d03df51 100644 --- a/tests/test_utils/mod.rs +++ b/tests/test_utils/mod.rs @@ -30,7 +30,7 @@ pub use sqlparser::test_utils::*; #[macro_export] macro_rules! nest { - ($base:expr $(, $join:expr)*) => { + ($base:expr_2021 $(, $join:expr_2021)*) => { TableFactor::NestedJoin { table_with_joins: Box::new(TableWithJoins { relation: $base, joins: vec![$(join($join)),*]