Skip to content

proto PR for #870 #894

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 13 commits into from
2 changes: 2 additions & 0 deletions src/ast/value.rs
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@ pub enum Value {
HexStringLiteral(String),

DoubleQuotedString(String),
OriginalString(String),
/// Boolean value true or false
Boolean(bool),
/// `NULL` value
Expand All @@ -70,6 +71,7 @@ impl fmt::Display for Value {
Value::Number(v, l) => write!(f, "{}{long}", v, long = if *l { "L" } else { "" }),
Value::DoubleQuotedString(v) => write!(f, "\"{v}\""),
Value::SingleQuotedString(v) => write!(f, "'{}'", escape_single_quote_string(v)),
Value::OriginalString(v) => write!(f, "{}", v),
Value::DollarQuotedString(v) => write!(f, "{v}"),
Value::EscapedStringLiteral(v) => write!(f, "E'{}'", escape_escaped_string(v)),
Value::NationalStringLiteral(v) => write!(f, "N'{v}'"),
Expand Down
2 changes: 1 addition & 1 deletion src/ast/visitor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -632,7 +632,7 @@ mod tests {

fn do_visit(sql: &str) -> Vec<String> {
let dialect = GenericDialect {};
let mut tokenizer = Tokenizer::new(&dialect, sql);
let mut tokenizer = Tokenizer::new(&dialect, sql, &TokenizerOptions { no_escape: false });
let tokens = tokenizer.tokenize().unwrap();
let s = Parser::new(&dialect)
.with_tokens(tokens)
Expand Down
9 changes: 7 additions & 2 deletions src/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -198,6 +198,7 @@ const DEFAULT_REMAINING_DEPTH: usize = 50;
#[derive(Debug, Default, Clone, PartialEq, Eq)]
pub struct ParserOptions {
pub trailing_commas: bool,
pub no_escape: bool,
}

pub struct Parser<'a> {
Expand All @@ -207,7 +208,7 @@ pub struct Parser<'a> {
/// The current dialect to use
dialect: &'a dyn Dialect,
/// Additional options that allow you to mix & match behavior otherwise
/// constrained to certain dialects (e.g. trailing commas)
/// constrained to certain dialects (e.g. trailing commas) and/or format of parse (e.g. no escape)
options: ParserOptions,
/// ensure the stack does not overflow by limiting recursion depth
recursion_counter: RecursionCounter,
Expand Down Expand Up @@ -317,7 +318,10 @@ impl<'a> Parser<'a> {
/// See example on [`Parser::new()`] for an example
pub fn try_with_sql(self, sql: &str) -> Result<Self, ParserError> {
debug!("Parsing sql '{}'...", sql);
let mut tokenizer = Tokenizer::new(self.dialect, sql);
let tokenizer_options = TokenizerOptions {
no_escape: self.options.no_escape,
};
let mut tokenizer = Tokenizer::new(self.dialect, sql, &tokenizer_options);
let tokens = tokenizer.tokenize()?;
Ok(self.with_tokens(tokens))
}
Expand Down Expand Up @@ -4277,6 +4281,7 @@ impl<'a> Parser<'a> {
},
Token::SingleQuotedString(ref s) => Ok(Value::SingleQuotedString(s.to_string())),
Token::DoubleQuotedString(ref s) => Ok(Value::DoubleQuotedString(s.to_string())),
Token::OriginalString(ref s) => Ok(Value::OriginalString(s.to_string())),
Token::DollarQuotedString(ref s) => Ok(Value::DollarQuotedString(s.clone())),
Token::SingleQuotedByteStringLiteral(ref s) => {
Ok(Value::SingleQuotedByteStringLiteral(s.clone()))
Expand Down
Loading