Thanks to visit codestin.com
Credit goes to docs.rs

sqlparser/parser/
mod.rs

1// Licensed under the Apache License, Version 2.0 (the "License");
2// you may not use this file except in compliance with the License.
3// You may obtain a copy of the License at
4//
5// http://www.apache.org/licenses/LICENSE-2.0
6//
7// Unless required by applicable law or agreed to in writing, software
8// distributed under the License is distributed on an "AS IS" BASIS,
9// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
10// See the License for the specific language governing permissions and
11// limitations under the License.
12
13//! SQL Parser
14
15#[cfg(not(feature = "std"))]
16use alloc::{
17    boxed::Box,
18    format,
19    string::{String, ToString},
20    vec,
21    vec::Vec,
22};
23use core::{
24    fmt::{self, Display},
25    str::FromStr,
26};
27use helpers::attached_token::AttachedToken;
28
29use log::debug;
30
31use recursion::RecursionCounter;
32use IsLateral::*;
33use IsOptional::*;
34
35use crate::ast::helpers::stmt_create_table::{CreateTableBuilder, CreateTableConfiguration};
36use crate::ast::Statement::CreatePolicy;
37use crate::ast::*;
38use crate::dialect::*;
39use crate::keywords::{Keyword, ALL_KEYWORDS};
40use crate::tokenizer::*;
41
42mod alter;
43
44#[derive(Debug, Clone, PartialEq, Eq)]
45pub enum ParserError {
46    TokenizerError(String),
47    ParserError(String),
48    RecursionLimitExceeded,
49}
50
51// Use `Parser::expected` instead, if possible
52macro_rules! parser_err {
53    ($MSG:expr, $loc:expr) => {
54        Err(ParserError::ParserError(format!("{}{}", $MSG, $loc)))
55    };
56}
57
58#[cfg(feature = "std")]
59/// Implementation [`RecursionCounter`] if std is available
60mod recursion {
61    use std::cell::Cell;
62    use std::rc::Rc;
63
64    use super::ParserError;
65
66    /// Tracks remaining recursion depth. This value is decremented on
67    /// each call to [`RecursionCounter::try_decrease()`], when it reaches 0 an error will
68    /// be returned.
69    ///
70    /// Note: Uses an [`std::rc::Rc`] and [`std::cell::Cell`] in order to satisfy the Rust
71    /// borrow checker so the automatic [`DepthGuard`] decrement a
72    /// reference to the counter.
73    ///
74    /// Note: when "recursive-protection" feature is enabled, this crate uses additional stack overflow protection
75    /// for some of its recursive methods. See [`recursive::recursive`] for more information.
76    pub(crate) struct RecursionCounter {
77        remaining_depth: Rc<Cell<usize>>,
78    }
79
80    impl RecursionCounter {
81        /// Creates a [`RecursionCounter`] with the specified maximum
82        /// depth
83        pub fn new(remaining_depth: usize) -> Self {
84            Self {
85                remaining_depth: Rc::new(remaining_depth.into()),
86            }
87        }
88
89        /// Decreases the remaining depth by 1.
90        ///
91        /// Returns [`Err`] if the remaining depth falls to 0.
92        ///
93        /// Returns a [`DepthGuard`] which will adds 1 to the
94        /// remaining depth upon drop;
95        pub fn try_decrease(&self) -> Result<DepthGuard, ParserError> {
96            let old_value = self.remaining_depth.get();
97            // ran out of space
98            if old_value == 0 {
99                Err(ParserError::RecursionLimitExceeded)
100            } else {
101                self.remaining_depth.set(old_value - 1);
102                Ok(DepthGuard::new(Rc::clone(&self.remaining_depth)))
103            }
104        }
105    }
106
107    /// Guard that increases the remaining depth by 1 on drop
108    pub struct DepthGuard {
109        remaining_depth: Rc<Cell<usize>>,
110    }
111
112    impl DepthGuard {
113        fn new(remaining_depth: Rc<Cell<usize>>) -> Self {
114            Self { remaining_depth }
115        }
116    }
117    impl Drop for DepthGuard {
118        fn drop(&mut self) {
119            let old_value = self.remaining_depth.get();
120            self.remaining_depth.set(old_value + 1);
121        }
122    }
123}
124
125#[cfg(not(feature = "std"))]
126mod recursion {
127    /// Implementation [`RecursionCounter`] if std is NOT available (and does not
128    /// guard against stack overflow).
129    ///
130    /// Has the same API as the std [`RecursionCounter`] implementation
131    /// but does not actually limit stack depth.
132    pub(crate) struct RecursionCounter {}
133
134    impl RecursionCounter {
135        pub fn new(_remaining_depth: usize) -> Self {
136            Self {}
137        }
138        pub fn try_decrease(&self) -> Result<DepthGuard, super::ParserError> {
139            Ok(DepthGuard {})
140        }
141    }
142
143    pub struct DepthGuard {}
144}
145
146#[derive(PartialEq, Eq)]
147pub enum IsOptional {
148    Optional,
149    Mandatory,
150}
151
152pub enum IsLateral {
153    Lateral,
154    NotLateral,
155}
156
157pub enum WildcardExpr {
158    Expr(Expr),
159    QualifiedWildcard(ObjectName),
160    Wildcard,
161}
162
163impl From<TokenizerError> for ParserError {
164    fn from(e: TokenizerError) -> Self {
165        ParserError::TokenizerError(e.to_string())
166    }
167}
168
169impl fmt::Display for ParserError {
170    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
171        write!(
172            f,
173            "sql parser error: {}",
174            match self {
175                ParserError::TokenizerError(s) => s,
176                ParserError::ParserError(s) => s,
177                ParserError::RecursionLimitExceeded => "recursion limit exceeded",
178            }
179        )
180    }
181}
182
183#[cfg(feature = "std")]
184impl std::error::Error for ParserError {}
185
186// By default, allow expressions up to this deep before erroring
187const DEFAULT_REMAINING_DEPTH: usize = 50;
188
189// A constant EOF token that can be referenced.
190const EOF_TOKEN: TokenWithSpan = TokenWithSpan {
191    token: Token::EOF,
192    span: Span {
193        start: Location { line: 0, column: 0 },
194        end: Location { line: 0, column: 0 },
195    },
196};
197
198/// Composite types declarations using angle brackets syntax can be arbitrary
199/// nested such that the following declaration is possible:
200///      `ARRAY<ARRAY<INT>>`
201/// But the tokenizer recognizes the `>>` as a ShiftRight token.
202/// We work around that limitation when parsing a data type by accepting
203/// either a `>` or `>>` token in such cases, remembering which variant we
204/// matched.
205/// In the latter case having matched a `>>`, the parent type will not look to
206/// match its closing `>` as a result since that will have taken place at the
207/// child type.
208///
209/// See [Parser::parse_data_type] for details
210struct MatchedTrailingBracket(bool);
211
212impl From<bool> for MatchedTrailingBracket {
213    fn from(value: bool) -> Self {
214        Self(value)
215    }
216}
217
218/// Options that control how the [`Parser`] parses SQL text
219#[derive(Debug, Clone, PartialEq, Eq)]
220pub struct ParserOptions {
221    pub trailing_commas: bool,
222    /// Controls how literal values are unescaped. See
223    /// [`Tokenizer::with_unescape`] for more details.
224    pub unescape: bool,
225}
226
227impl Default for ParserOptions {
228    fn default() -> Self {
229        Self {
230            trailing_commas: false,
231            unescape: true,
232        }
233    }
234}
235
236impl ParserOptions {
237    /// Create a new [`ParserOptions`]
238    pub fn new() -> Self {
239        Default::default()
240    }
241
242    /// Set if trailing commas are allowed.
243    ///
244    /// If this option is `false` (the default), the following SQL will
245    /// not parse. If the option is `true`, the SQL will parse.
246    ///
247    /// ```sql
248    ///  SELECT
249    ///   foo,
250    ///   bar,
251    ///  FROM baz
252    /// ```
253    pub fn with_trailing_commas(mut self, trailing_commas: bool) -> Self {
254        self.trailing_commas = trailing_commas;
255        self
256    }
257
258    /// Set if literal values are unescaped. Defaults to true. See
259    /// [`Tokenizer::with_unescape`] for more details.
260    pub fn with_unescape(mut self, unescape: bool) -> Self {
261        self.unescape = unescape;
262        self
263    }
264}
265
266#[derive(Copy, Clone)]
267enum ParserState {
268    /// The default state of the parser.
269    Normal,
270    /// The state when parsing a CONNECT BY expression. This allows parsing
271    /// PRIOR expressions while still allowing prior as an identifier name
272    /// in other contexts.
273    ConnectBy,
274}
275
276/// A SQL Parser
277///
278/// This struct is the main entry point for parsing SQL queries.
279///
280/// # Functionality:
281/// * Parsing SQL: see examples on [`Parser::new`] and [`Parser::parse_sql`]
282/// * Controlling recursion: See [`Parser::with_recursion_limit`]
283/// * Controlling parser options: See [`Parser::with_options`]
284/// * Providing your own tokens: See [`Parser::with_tokens`]
285///
286/// # Internals
287///
288/// The parser uses a [`Tokenizer`] to tokenize the input SQL string into a
289/// `Vec` of [`TokenWithSpan`]s and maintains an `index` to the current token
290/// being processed. The token vec may contain multiple SQL statements.
291///
292/// * The "current" token is the token at `index - 1`
293/// * The "next" token is the token at `index`
294/// * The "previous" token is the token at `index - 2`
295///
296/// If `index` is equal to the length of the token stream, the 'next' token is
297/// [`Token::EOF`].
298///
299/// For example, the SQL string "SELECT * FROM foo" will be tokenized into
300/// following tokens:
301/// ```text
302///  [
303///    "SELECT", // token index 0
304///    " ",      // whitespace
305///    "*",
306///    " ",
307///    "FROM",
308///    " ",
309///    "foo"
310///   ]
311/// ```
312///
313///
314pub struct Parser<'a> {
315    /// The tokens
316    tokens: Vec<TokenWithSpan>,
317    /// The index of the first unprocessed token in [`Parser::tokens`].
318    index: usize,
319    /// The current state of the parser.
320    state: ParserState,
321    /// The SQL dialect to use.
322    dialect: &'a dyn Dialect,
323    /// Additional options that allow you to mix & match behavior
324    /// otherwise constrained to certain dialects (e.g. trailing
325    /// commas) and/or format of parse (e.g. unescaping).
326    options: ParserOptions,
327    /// Ensures the stack does not overflow by limiting recursion depth.
328    recursion_counter: RecursionCounter,
329}
330
331impl<'a> Parser<'a> {
332    /// Create a parser for a [`Dialect`]
333    ///
334    /// See also [`Parser::parse_sql`]
335    ///
336    /// Example:
337    /// ```
338    /// # use sqlparser::{parser::{Parser, ParserError}, dialect::GenericDialect};
339    /// # fn main() -> Result<(), ParserError> {
340    /// let dialect = GenericDialect{};
341    /// let statements = Parser::new(&dialect)
342    ///   .try_with_sql("SELECT * FROM foo")?
343    ///   .parse_statements()?;
344    /// # Ok(())
345    /// # }
346    /// ```
347    pub fn new(dialect: &'a dyn Dialect) -> Self {
348        Self {
349            tokens: vec![],
350            index: 0,
351            state: ParserState::Normal,
352            dialect,
353            recursion_counter: RecursionCounter::new(DEFAULT_REMAINING_DEPTH),
354            options: ParserOptions::new().with_trailing_commas(dialect.supports_trailing_commas()),
355        }
356    }
357
358    /// Specify the maximum recursion limit while parsing.
359    ///
360    /// [`Parser`] prevents stack overflows by returning
361    /// [`ParserError::RecursionLimitExceeded`] if the parser exceeds
362    /// this depth while processing the query.
363    ///
364    /// Example:
365    /// ```
366    /// # use sqlparser::{parser::{Parser, ParserError}, dialect::GenericDialect};
367    /// # fn main() -> Result<(), ParserError> {
368    /// let dialect = GenericDialect{};
369    /// let result = Parser::new(&dialect)
370    ///   .with_recursion_limit(1)
371    ///   .try_with_sql("SELECT * FROM foo WHERE (a OR (b OR (c OR d)))")?
372    ///   .parse_statements();
373    ///   assert_eq!(result, Err(ParserError::RecursionLimitExceeded));
374    /// # Ok(())
375    /// # }
376    /// ```
377    ///
378    /// Note: when "recursive-protection" feature is enabled, this crate uses additional stack overflow protection
379    //  for some of its recursive methods. See [`recursive::recursive`] for more information.
380    pub fn with_recursion_limit(mut self, recursion_limit: usize) -> Self {
381        self.recursion_counter = RecursionCounter::new(recursion_limit);
382        self
383    }
384
385    /// Specify additional parser options
386    ///
387    /// [`Parser`] supports additional options ([`ParserOptions`])
388    /// that allow you to mix & match behavior otherwise constrained
389    /// to certain dialects (e.g. trailing commas).
390    ///
391    /// Example:
392    /// ```
393    /// # use sqlparser::{parser::{Parser, ParserError, ParserOptions}, dialect::GenericDialect};
394    /// # fn main() -> Result<(), ParserError> {
395    /// let dialect = GenericDialect{};
396    /// let options = ParserOptions::new()
397    ///    .with_trailing_commas(true)
398    ///    .with_unescape(false);
399    /// let result = Parser::new(&dialect)
400    ///   .with_options(options)
401    ///   .try_with_sql("SELECT a, b, COUNT(*), FROM foo GROUP BY a, b,")?
402    ///   .parse_statements();
403    ///   assert!(matches!(result, Ok(_)));
404    /// # Ok(())
405    /// # }
406    /// ```
407    pub fn with_options(mut self, options: ParserOptions) -> Self {
408        self.options = options;
409        self
410    }
411
412    /// Reset this parser to parse the specified token stream
413    pub fn with_tokens_with_locations(mut self, tokens: Vec<TokenWithSpan>) -> Self {
414        self.tokens = tokens;
415        self.index = 0;
416        self
417    }
418
419    /// Reset this parser state to parse the specified tokens
420    pub fn with_tokens(self, tokens: Vec<Token>) -> Self {
421        // Put in dummy locations
422        let tokens_with_locations: Vec<TokenWithSpan> = tokens
423            .into_iter()
424            .map(|token| TokenWithSpan {
425                token,
426                span: Span::empty(),
427            })
428            .collect();
429        self.with_tokens_with_locations(tokens_with_locations)
430    }
431
432    /// Tokenize the sql string and sets this [`Parser`]'s state to
433    /// parse the resulting tokens
434    ///
435    /// Returns an error if there was an error tokenizing the SQL string.
436    ///
437    /// See example on [`Parser::new()`] for an example
438    pub fn try_with_sql(self, sql: &str) -> Result<Self, ParserError> {
439        debug!("Parsing sql '{}'...", sql);
440        let tokens = Tokenizer::new(self.dialect, sql)
441            .with_unescape(self.options.unescape)
442            .tokenize_with_location()?;
443        Ok(self.with_tokens_with_locations(tokens))
444    }
445
446    /// Parse potentially multiple statements
447    ///
448    /// Example
449    /// ```
450    /// # use sqlparser::{parser::{Parser, ParserError}, dialect::GenericDialect};
451    /// # fn main() -> Result<(), ParserError> {
452    /// let dialect = GenericDialect{};
453    /// let statements = Parser::new(&dialect)
454    ///   // Parse a SQL string with 2 separate statements
455    ///   .try_with_sql("SELECT * FROM foo; SELECT * FROM bar;")?
456    ///   .parse_statements()?;
457    /// assert_eq!(statements.len(), 2);
458    /// # Ok(())
459    /// # }
460    /// ```
461    pub fn parse_statements(&mut self) -> Result<Vec<Statement>, ParserError> {
462        let mut stmts = Vec::new();
463        let mut expecting_statement_delimiter = false;
464        loop {
465            // ignore empty statements (between successive statement delimiters)
466            while self.consume_token(&Token::SemiColon) {
467                expecting_statement_delimiter = false;
468            }
469
470            match self.peek_token().token {
471                Token::EOF => break,
472
473                // end of statement
474                Token::Word(word) => {
475                    if expecting_statement_delimiter && word.keyword == Keyword::END {
476                        break;
477                    }
478                }
479                _ => {}
480            }
481
482            if expecting_statement_delimiter {
483                return self.expected("end of statement", self.peek_token());
484            }
485
486            let statement = self.parse_statement()?;
487            stmts.push(statement);
488            expecting_statement_delimiter = true;
489        }
490        Ok(stmts)
491    }
492
493    /// Convenience method to parse a string with one or more SQL
494    /// statements into produce an Abstract Syntax Tree (AST).
495    ///
496    /// Example
497    /// ```
498    /// # use sqlparser::{parser::{Parser, ParserError}, dialect::GenericDialect};
499    /// # fn main() -> Result<(), ParserError> {
500    /// let dialect = GenericDialect{};
501    /// let statements = Parser::parse_sql(
502    ///   &dialect, "SELECT * FROM foo"
503    /// )?;
504    /// assert_eq!(statements.len(), 1);
505    /// # Ok(())
506    /// # }
507    /// ```
508    pub fn parse_sql(dialect: &dyn Dialect, sql: &str) -> Result<Vec<Statement>, ParserError> {
509        Parser::new(dialect).try_with_sql(sql)?.parse_statements()
510    }
511
512    /// Parse a single top-level statement (such as SELECT, INSERT, CREATE, etc.),
513    /// stopping before the statement separator, if any.
514    pub fn parse_statement(&mut self) -> Result<Statement, ParserError> {
515        let _guard = self.recursion_counter.try_decrease()?;
516
517        // allow the dialect to override statement parsing
518        if let Some(statement) = self.dialect.parse_statement(self) {
519            return statement;
520        }
521
522        let next_token = self.next_token();
523        match &next_token.token {
524            Token::Word(w) => match w.keyword {
525                Keyword::KILL => self.parse_kill(),
526                Keyword::FLUSH => self.parse_flush(),
527                Keyword::DESC => self.parse_explain(DescribeAlias::Desc),
528                Keyword::DESCRIBE => self.parse_explain(DescribeAlias::Describe),
529                Keyword::EXPLAIN => self.parse_explain(DescribeAlias::Explain),
530                Keyword::ANALYZE => self.parse_analyze(),
531                Keyword::CASE => {
532                    self.prev_token();
533                    self.parse_case_stmt()
534                }
535                Keyword::IF => {
536                    self.prev_token();
537                    self.parse_if_stmt()
538                }
539                Keyword::WHILE => {
540                    self.prev_token();
541                    self.parse_while()
542                }
543                Keyword::RAISE => {
544                    self.prev_token();
545                    self.parse_raise_stmt()
546                }
547                Keyword::SELECT | Keyword::WITH | Keyword::VALUES | Keyword::FROM => {
548                    self.prev_token();
549                    self.parse_query().map(Statement::Query)
550                }
551                Keyword::TRUNCATE => self.parse_truncate(),
552                Keyword::ATTACH => {
553                    if dialect_of!(self is DuckDbDialect) {
554                        self.parse_attach_duckdb_database()
555                    } else {
556                        self.parse_attach_database()
557                    }
558                }
559                Keyword::DETACH if dialect_of!(self is DuckDbDialect | GenericDialect) => {
560                    self.parse_detach_duckdb_database()
561                }
562                Keyword::MSCK => self.parse_msck(),
563                Keyword::CREATE => self.parse_create(),
564                Keyword::CACHE => self.parse_cache_table(),
565                Keyword::DROP => self.parse_drop(),
566                Keyword::DISCARD => self.parse_discard(),
567                Keyword::DECLARE => self.parse_declare(),
568                Keyword::FETCH => self.parse_fetch_statement(),
569                Keyword::DELETE => self.parse_delete(),
570                Keyword::INSERT => self.parse_insert(),
571                Keyword::REPLACE => self.parse_replace(),
572                Keyword::UNCACHE => self.parse_uncache_table(),
573                Keyword::UPDATE => self.parse_update(),
574                Keyword::ALTER => self.parse_alter(),
575                Keyword::CALL => self.parse_call(),
576                Keyword::COPY => self.parse_copy(),
577                Keyword::OPEN => {
578                    self.prev_token();
579                    self.parse_open()
580                }
581                Keyword::CLOSE => self.parse_close(),
582                Keyword::SET => self.parse_set(),
583                Keyword::SHOW => self.parse_show(),
584                Keyword::USE => self.parse_use(),
585                Keyword::GRANT => self.parse_grant(),
586                Keyword::DENY => {
587                    self.prev_token();
588                    self.parse_deny()
589                }
590                Keyword::REVOKE => self.parse_revoke(),
591                Keyword::START => self.parse_start_transaction(),
592                Keyword::BEGIN => self.parse_begin(),
593                Keyword::END => self.parse_end(),
594                Keyword::SAVEPOINT => self.parse_savepoint(),
595                Keyword::RELEASE => self.parse_release(),
596                Keyword::COMMIT => self.parse_commit(),
597                Keyword::RAISERROR => Ok(self.parse_raiserror()?),
598                Keyword::ROLLBACK => self.parse_rollback(),
599                Keyword::ASSERT => self.parse_assert(),
600                // `PREPARE`, `EXECUTE` and `DEALLOCATE` are Postgres-specific
601                // syntaxes. They are used for Postgres prepared statement.
602                Keyword::DEALLOCATE => self.parse_deallocate(),
603                Keyword::EXECUTE | Keyword::EXEC => self.parse_execute(),
604                Keyword::PREPARE => self.parse_prepare(),
605                Keyword::MERGE => self.parse_merge(),
606                // `LISTEN`, `UNLISTEN` and `NOTIFY` are Postgres-specific
607                // syntaxes. They are used for Postgres statement.
608                Keyword::LISTEN if self.dialect.supports_listen_notify() => self.parse_listen(),
609                Keyword::UNLISTEN if self.dialect.supports_listen_notify() => self.parse_unlisten(),
610                Keyword::NOTIFY if self.dialect.supports_listen_notify() => self.parse_notify(),
611                // `PRAGMA` is sqlite specific https://www.sqlite.org/pragma.html
612                Keyword::PRAGMA => self.parse_pragma(),
613                Keyword::UNLOAD => self.parse_unload(),
614                Keyword::RENAME => self.parse_rename(),
615                // `INSTALL` is duckdb specific https://duckdb.org/docs/extensions/overview
616                Keyword::INSTALL if dialect_of!(self is DuckDbDialect | GenericDialect) => {
617                    self.parse_install()
618                }
619                Keyword::LOAD => self.parse_load(),
620                // `OPTIMIZE` is clickhouse specific https://clickhouse.tech/docs/en/sql-reference/statements/optimize/
621                Keyword::OPTIMIZE if dialect_of!(self is ClickHouseDialect | GenericDialect) => {
622                    self.parse_optimize_table()
623                }
624                // `COMMENT` is snowflake specific https://docs.snowflake.com/en/sql-reference/sql/comment
625                Keyword::COMMENT if self.dialect.supports_comment_on() => self.parse_comment(),
626                Keyword::PRINT => self.parse_print(),
627                Keyword::RETURN => self.parse_return(),
628                _ => self.expected("an SQL statement", next_token),
629            },
630            Token::LParen => {
631                self.prev_token();
632                self.parse_query().map(Statement::Query)
633            }
634            _ => self.expected("an SQL statement", next_token),
635        }
636    }
637
638    /// Parse a `CASE` statement.
639    ///
640    /// See [Statement::Case]
641    pub fn parse_case_stmt(&mut self) -> Result<Statement, ParserError> {
642        let case_token = self.expect_keyword(Keyword::CASE)?;
643
644        let match_expr = if self.peek_keyword(Keyword::WHEN) {
645            None
646        } else {
647            Some(self.parse_expr()?)
648        };
649
650        self.expect_keyword_is(Keyword::WHEN)?;
651        let when_blocks = self.parse_keyword_separated(Keyword::WHEN, |parser| {
652            parser.parse_conditional_statement_block(&[Keyword::WHEN, Keyword::ELSE, Keyword::END])
653        })?;
654
655        let else_block = if self.parse_keyword(Keyword::ELSE) {
656            Some(self.parse_conditional_statement_block(&[Keyword::END])?)
657        } else {
658            None
659        };
660
661        let mut end_case_token = self.expect_keyword(Keyword::END)?;
662        if self.peek_keyword(Keyword::CASE) {
663            end_case_token = self.expect_keyword(Keyword::CASE)?;
664        }
665
666        Ok(Statement::Case(CaseStatement {
667            case_token: AttachedToken(case_token),
668            match_expr,
669            when_blocks,
670            else_block,
671            end_case_token: AttachedToken(end_case_token),
672        }))
673    }
674
675    /// Parse an `IF` statement.
676    ///
677    /// See [Statement::If]
678    pub fn parse_if_stmt(&mut self) -> Result<Statement, ParserError> {
679        self.expect_keyword_is(Keyword::IF)?;
680        let if_block = self.parse_conditional_statement_block(&[
681            Keyword::ELSE,
682            Keyword::ELSEIF,
683            Keyword::END,
684        ])?;
685
686        let elseif_blocks = if self.parse_keyword(Keyword::ELSEIF) {
687            self.parse_keyword_separated(Keyword::ELSEIF, |parser| {
688                parser.parse_conditional_statement_block(&[
689                    Keyword::ELSEIF,
690                    Keyword::ELSE,
691                    Keyword::END,
692                ])
693            })?
694        } else {
695            vec![]
696        };
697
698        let else_block = if self.parse_keyword(Keyword::ELSE) {
699            Some(self.parse_conditional_statement_block(&[Keyword::END])?)
700        } else {
701            None
702        };
703
704        self.expect_keyword_is(Keyword::END)?;
705        let end_token = self.expect_keyword(Keyword::IF)?;
706
707        Ok(Statement::If(IfStatement {
708            if_block,
709            elseif_blocks,
710            else_block,
711            end_token: Some(AttachedToken(end_token)),
712        }))
713    }
714
715    /// Parse a `WHILE` statement.
716    ///
717    /// See [Statement::While]
718    fn parse_while(&mut self) -> Result<Statement, ParserError> {
719        self.expect_keyword_is(Keyword::WHILE)?;
720        let while_block = self.parse_conditional_statement_block(&[Keyword::END])?;
721
722        Ok(Statement::While(WhileStatement { while_block }))
723    }
724
725    /// Parses an expression and associated list of statements
726    /// belonging to a conditional statement like `IF` or `WHEN` or `WHILE`.
727    ///
728    /// Example:
729    /// ```sql
730    /// IF condition THEN statement1; statement2;
731    /// ```
732    fn parse_conditional_statement_block(
733        &mut self,
734        terminal_keywords: &[Keyword],
735    ) -> Result<ConditionalStatementBlock, ParserError> {
736        let start_token = self.get_current_token().clone(); // self.expect_keyword(keyword)?;
737        let mut then_token = None;
738
739        let condition = match &start_token.token {
740            Token::Word(w) if w.keyword == Keyword::ELSE => None,
741            Token::Word(w) if w.keyword == Keyword::WHILE => {
742                let expr = self.parse_expr()?;
743                Some(expr)
744            }
745            _ => {
746                let expr = self.parse_expr()?;
747                then_token = Some(AttachedToken(self.expect_keyword(Keyword::THEN)?));
748                Some(expr)
749            }
750        };
751
752        let conditional_statements = self.parse_conditional_statements(terminal_keywords)?;
753
754        Ok(ConditionalStatementBlock {
755            start_token: AttachedToken(start_token),
756            condition,
757            then_token,
758            conditional_statements,
759        })
760    }
761
762    /// Parse a BEGIN/END block or a sequence of statements
763    /// This could be inside of a conditional (IF, CASE, WHILE etc.) or an object body defined optionally BEGIN/END and one or more statements.
764    pub(crate) fn parse_conditional_statements(
765        &mut self,
766        terminal_keywords: &[Keyword],
767    ) -> Result<ConditionalStatements, ParserError> {
768        let conditional_statements = if self.peek_keyword(Keyword::BEGIN) {
769            let begin_token = self.expect_keyword(Keyword::BEGIN)?;
770            let statements = self.parse_statement_list(terminal_keywords)?;
771            let end_token = self.expect_keyword(Keyword::END)?;
772
773            ConditionalStatements::BeginEnd(BeginEndStatements {
774                begin_token: AttachedToken(begin_token),
775                statements,
776                end_token: AttachedToken(end_token),
777            })
778        } else {
779            ConditionalStatements::Sequence {
780                statements: self.parse_statement_list(terminal_keywords)?,
781            }
782        };
783        Ok(conditional_statements)
784    }
785
786    /// Parse a `RAISE` statement.
787    ///
788    /// See [Statement::Raise]
789    pub fn parse_raise_stmt(&mut self) -> Result<Statement, ParserError> {
790        self.expect_keyword_is(Keyword::RAISE)?;
791
792        let value = if self.parse_keywords(&[Keyword::USING, Keyword::MESSAGE]) {
793            self.expect_token(&Token::Eq)?;
794            Some(RaiseStatementValue::UsingMessage(self.parse_expr()?))
795        } else {
796            self.maybe_parse(|parser| parser.parse_expr().map(RaiseStatementValue::Expr))?
797        };
798
799        Ok(Statement::Raise(RaiseStatement { value }))
800    }
801
802    pub fn parse_comment(&mut self) -> Result<Statement, ParserError> {
803        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
804
805        self.expect_keyword_is(Keyword::ON)?;
806        let token = self.next_token();
807
808        let (object_type, object_name) = match token.token {
809            Token::Word(w) if w.keyword == Keyword::COLUMN => {
810                (CommentObject::Column, self.parse_object_name(false)?)
811            }
812            Token::Word(w) if w.keyword == Keyword::TABLE => {
813                (CommentObject::Table, self.parse_object_name(false)?)
814            }
815            Token::Word(w) if w.keyword == Keyword::EXTENSION => {
816                (CommentObject::Extension, self.parse_object_name(false)?)
817            }
818            Token::Word(w) if w.keyword == Keyword::SCHEMA => {
819                (CommentObject::Schema, self.parse_object_name(false)?)
820            }
821            Token::Word(w) if w.keyword == Keyword::DATABASE => {
822                (CommentObject::Database, self.parse_object_name(false)?)
823            }
824            Token::Word(w) if w.keyword == Keyword::USER => {
825                (CommentObject::User, self.parse_object_name(false)?)
826            }
827            Token::Word(w) if w.keyword == Keyword::ROLE => {
828                (CommentObject::Role, self.parse_object_name(false)?)
829            }
830            _ => self.expected("comment object_type", token)?,
831        };
832
833        self.expect_keyword_is(Keyword::IS)?;
834        let comment = if self.parse_keyword(Keyword::NULL) {
835            None
836        } else {
837            Some(self.parse_literal_string()?)
838        };
839        Ok(Statement::Comment {
840            object_type,
841            object_name,
842            comment,
843            if_exists,
844        })
845    }
846
847    pub fn parse_flush(&mut self) -> Result<Statement, ParserError> {
848        let mut channel = None;
849        let mut tables: Vec<ObjectName> = vec![];
850        let mut read_lock = false;
851        let mut export = false;
852
853        if !dialect_of!(self is MySqlDialect | GenericDialect) {
854            return parser_err!("Unsupported statement FLUSH", self.peek_token().span.start);
855        }
856
857        let location = if self.parse_keyword(Keyword::NO_WRITE_TO_BINLOG) {
858            Some(FlushLocation::NoWriteToBinlog)
859        } else if self.parse_keyword(Keyword::LOCAL) {
860            Some(FlushLocation::Local)
861        } else {
862            None
863        };
864
865        let object_type = if self.parse_keywords(&[Keyword::BINARY, Keyword::LOGS]) {
866            FlushType::BinaryLogs
867        } else if self.parse_keywords(&[Keyword::ENGINE, Keyword::LOGS]) {
868            FlushType::EngineLogs
869        } else if self.parse_keywords(&[Keyword::ERROR, Keyword::LOGS]) {
870            FlushType::ErrorLogs
871        } else if self.parse_keywords(&[Keyword::GENERAL, Keyword::LOGS]) {
872            FlushType::GeneralLogs
873        } else if self.parse_keywords(&[Keyword::HOSTS]) {
874            FlushType::Hosts
875        } else if self.parse_keyword(Keyword::PRIVILEGES) {
876            FlushType::Privileges
877        } else if self.parse_keyword(Keyword::OPTIMIZER_COSTS) {
878            FlushType::OptimizerCosts
879        } else if self.parse_keywords(&[Keyword::RELAY, Keyword::LOGS]) {
880            if self.parse_keywords(&[Keyword::FOR, Keyword::CHANNEL]) {
881                channel = Some(self.parse_object_name(false).unwrap().to_string());
882            }
883            FlushType::RelayLogs
884        } else if self.parse_keywords(&[Keyword::SLOW, Keyword::LOGS]) {
885            FlushType::SlowLogs
886        } else if self.parse_keyword(Keyword::STATUS) {
887            FlushType::Status
888        } else if self.parse_keyword(Keyword::USER_RESOURCES) {
889            FlushType::UserResources
890        } else if self.parse_keywords(&[Keyword::LOGS]) {
891            FlushType::Logs
892        } else if self.parse_keywords(&[Keyword::TABLES]) {
893            loop {
894                let next_token = self.next_token();
895                match &next_token.token {
896                    Token::Word(w) => match w.keyword {
897                        Keyword::WITH => {
898                            read_lock = self.parse_keywords(&[Keyword::READ, Keyword::LOCK]);
899                        }
900                        Keyword::FOR => {
901                            export = self.parse_keyword(Keyword::EXPORT);
902                        }
903                        Keyword::NoKeyword => {
904                            self.prev_token();
905                            tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
906                        }
907                        _ => {}
908                    },
909                    _ => {
910                        break;
911                    }
912                }
913            }
914
915            FlushType::Tables
916        } else {
917            return self.expected(
918                "BINARY LOGS, ENGINE LOGS, ERROR LOGS, GENERAL LOGS, HOSTS, LOGS, PRIVILEGES, OPTIMIZER_COSTS,\
919                 RELAY LOGS [FOR CHANNEL channel], SLOW LOGS, STATUS, USER_RESOURCES",
920                self.peek_token(),
921            );
922        };
923
924        Ok(Statement::Flush {
925            object_type,
926            location,
927            channel,
928            read_lock,
929            export,
930            tables,
931        })
932    }
933
934    pub fn parse_msck(&mut self) -> Result<Statement, ParserError> {
935        let repair = self.parse_keyword(Keyword::REPAIR);
936        self.expect_keyword_is(Keyword::TABLE)?;
937        let table_name = self.parse_object_name(false)?;
938        let partition_action = self
939            .maybe_parse(|parser| {
940                let pa = match parser.parse_one_of_keywords(&[
941                    Keyword::ADD,
942                    Keyword::DROP,
943                    Keyword::SYNC,
944                ]) {
945                    Some(Keyword::ADD) => Some(AddDropSync::ADD),
946                    Some(Keyword::DROP) => Some(AddDropSync::DROP),
947                    Some(Keyword::SYNC) => Some(AddDropSync::SYNC),
948                    _ => None,
949                };
950                parser.expect_keyword_is(Keyword::PARTITIONS)?;
951                Ok(pa)
952            })?
953            .unwrap_or_default();
954        Ok(Statement::Msck {
955            repair,
956            table_name,
957            partition_action,
958        })
959    }
960
961    pub fn parse_truncate(&mut self) -> Result<Statement, ParserError> {
962        let table = self.parse_keyword(Keyword::TABLE);
963
964        let table_names = self
965            .parse_comma_separated(|p| {
966                Ok((p.parse_keyword(Keyword::ONLY), p.parse_object_name(false)?))
967            })?
968            .into_iter()
969            .map(|(only, name)| TruncateTableTarget { name, only })
970            .collect();
971
972        let mut partitions = None;
973        if self.parse_keyword(Keyword::PARTITION) {
974            self.expect_token(&Token::LParen)?;
975            partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
976            self.expect_token(&Token::RParen)?;
977        }
978
979        let mut identity = None;
980        let mut cascade = None;
981
982        if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
983            identity = if self.parse_keywords(&[Keyword::RESTART, Keyword::IDENTITY]) {
984                Some(TruncateIdentityOption::Restart)
985            } else if self.parse_keywords(&[Keyword::CONTINUE, Keyword::IDENTITY]) {
986                Some(TruncateIdentityOption::Continue)
987            } else {
988                None
989            };
990
991            cascade = self.parse_cascade_option();
992        };
993
994        let on_cluster = self.parse_optional_on_cluster()?;
995
996        Ok(Statement::Truncate {
997            table_names,
998            partitions,
999            table,
1000            identity,
1001            cascade,
1002            on_cluster,
1003        })
1004    }
1005
1006    fn parse_cascade_option(&mut self) -> Option<CascadeOption> {
1007        if self.parse_keyword(Keyword::CASCADE) {
1008            Some(CascadeOption::Cascade)
1009        } else if self.parse_keyword(Keyword::RESTRICT) {
1010            Some(CascadeOption::Restrict)
1011        } else {
1012            None
1013        }
1014    }
1015
1016    pub fn parse_attach_duckdb_database_options(
1017        &mut self,
1018    ) -> Result<Vec<AttachDuckDBDatabaseOption>, ParserError> {
1019        if !self.consume_token(&Token::LParen) {
1020            return Ok(vec![]);
1021        }
1022
1023        let mut options = vec![];
1024        loop {
1025            if self.parse_keyword(Keyword::READ_ONLY) {
1026                let boolean = if self.parse_keyword(Keyword::TRUE) {
1027                    Some(true)
1028                } else if self.parse_keyword(Keyword::FALSE) {
1029                    Some(false)
1030                } else {
1031                    None
1032                };
1033                options.push(AttachDuckDBDatabaseOption::ReadOnly(boolean));
1034            } else if self.parse_keyword(Keyword::TYPE) {
1035                let ident = self.parse_identifier()?;
1036                options.push(AttachDuckDBDatabaseOption::Type(ident));
1037            } else {
1038                return self.expected("expected one of: ), READ_ONLY, TYPE", self.peek_token());
1039            };
1040
1041            if self.consume_token(&Token::RParen) {
1042                return Ok(options);
1043            } else if self.consume_token(&Token::Comma) {
1044                continue;
1045            } else {
1046                return self.expected("expected one of: ')', ','", self.peek_token());
1047            }
1048        }
1049    }
1050
1051    pub fn parse_attach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
1052        let database = self.parse_keyword(Keyword::DATABASE);
1053        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
1054        let database_path = self.parse_identifier()?;
1055        let database_alias = if self.parse_keyword(Keyword::AS) {
1056            Some(self.parse_identifier()?)
1057        } else {
1058            None
1059        };
1060
1061        let attach_options = self.parse_attach_duckdb_database_options()?;
1062        Ok(Statement::AttachDuckDBDatabase {
1063            if_not_exists,
1064            database,
1065            database_path,
1066            database_alias,
1067            attach_options,
1068        })
1069    }
1070
1071    pub fn parse_detach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
1072        let database = self.parse_keyword(Keyword::DATABASE);
1073        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
1074        let database_alias = self.parse_identifier()?;
1075        Ok(Statement::DetachDuckDBDatabase {
1076            if_exists,
1077            database,
1078            database_alias,
1079        })
1080    }
1081
1082    pub fn parse_attach_database(&mut self) -> Result<Statement, ParserError> {
1083        let database = self.parse_keyword(Keyword::DATABASE);
1084        let database_file_name = self.parse_expr()?;
1085        self.expect_keyword_is(Keyword::AS)?;
1086        let schema_name = self.parse_identifier()?;
1087        Ok(Statement::AttachDatabase {
1088            database,
1089            schema_name,
1090            database_file_name,
1091        })
1092    }
1093
1094    pub fn parse_analyze(&mut self) -> Result<Statement, ParserError> {
1095        let has_table_keyword = self.parse_keyword(Keyword::TABLE);
1096        let table_name = self.parse_object_name(false)?;
1097        let mut for_columns = false;
1098        let mut cache_metadata = false;
1099        let mut noscan = false;
1100        let mut partitions = None;
1101        let mut compute_statistics = false;
1102        let mut columns = vec![];
1103        loop {
1104            match self.parse_one_of_keywords(&[
1105                Keyword::PARTITION,
1106                Keyword::FOR,
1107                Keyword::CACHE,
1108                Keyword::NOSCAN,
1109                Keyword::COMPUTE,
1110            ]) {
1111                Some(Keyword::PARTITION) => {
1112                    self.expect_token(&Token::LParen)?;
1113                    partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
1114                    self.expect_token(&Token::RParen)?;
1115                }
1116                Some(Keyword::NOSCAN) => noscan = true,
1117                Some(Keyword::FOR) => {
1118                    self.expect_keyword_is(Keyword::COLUMNS)?;
1119
1120                    columns = self
1121                        .maybe_parse(|parser| {
1122                            parser.parse_comma_separated(|p| p.parse_identifier())
1123                        })?
1124                        .unwrap_or_default();
1125                    for_columns = true
1126                }
1127                Some(Keyword::CACHE) => {
1128                    self.expect_keyword_is(Keyword::METADATA)?;
1129                    cache_metadata = true
1130                }
1131                Some(Keyword::COMPUTE) => {
1132                    self.expect_keyword_is(Keyword::STATISTICS)?;
1133                    compute_statistics = true
1134                }
1135                _ => break,
1136            }
1137        }
1138
1139        Ok(Statement::Analyze {
1140            has_table_keyword,
1141            table_name,
1142            for_columns,
1143            columns,
1144            partitions,
1145            cache_metadata,
1146            noscan,
1147            compute_statistics,
1148        })
1149    }
1150
1151    /// Parse a new expression including wildcard & qualified wildcard.
1152    pub fn parse_wildcard_expr(&mut self) -> Result<Expr, ParserError> {
1153        let index = self.index;
1154
1155        let next_token = self.next_token();
1156        match next_token.token {
1157            t @ (Token::Word(_) | Token::SingleQuotedString(_)) => {
1158                if self.peek_token().token == Token::Period {
1159                    let mut id_parts: Vec<Ident> = vec![match t {
1160                        Token::Word(w) => w.into_ident(next_token.span),
1161                        Token::SingleQuotedString(s) => Ident::with_quote('\'', s),
1162                        _ => unreachable!(), // We matched above
1163                    }];
1164
1165                    while self.consume_token(&Token::Period) {
1166                        let next_token = self.next_token();
1167                        match next_token.token {
1168                            Token::Word(w) => id_parts.push(w.into_ident(next_token.span)),
1169                            Token::SingleQuotedString(s) => {
1170                                // SQLite has single-quoted identifiers
1171                                id_parts.push(Ident::with_quote('\'', s))
1172                            }
1173                            Token::Mul => {
1174                                return Ok(Expr::QualifiedWildcard(
1175                                    ObjectName::from(id_parts),
1176                                    AttachedToken(next_token),
1177                                ));
1178                            }
1179                            _ => {
1180                                return self
1181                                    .expected("an identifier or a '*' after '.'", next_token);
1182                            }
1183                        }
1184                    }
1185                }
1186            }
1187            Token::Mul => {
1188                return Ok(Expr::Wildcard(AttachedToken(next_token)));
1189            }
1190            _ => (),
1191        };
1192
1193        self.index = index;
1194        self.parse_expr()
1195    }
1196
1197    /// Parse a new expression.
1198    pub fn parse_expr(&mut self) -> Result<Expr, ParserError> {
1199        self.parse_subexpr(self.dialect.prec_unknown())
1200    }
1201
1202    pub fn parse_expr_with_alias_and_order_by(
1203        &mut self,
1204    ) -> Result<ExprWithAliasAndOrderBy, ParserError> {
1205        let expr = self.parse_expr()?;
1206
1207        fn validator(explicit: bool, kw: &Keyword, _parser: &mut Parser) -> bool {
1208            explicit || !&[Keyword::ASC, Keyword::DESC, Keyword::GROUP].contains(kw)
1209        }
1210        let alias = self.parse_optional_alias_inner(None, validator)?;
1211        let order_by = OrderByOptions {
1212            asc: self.parse_asc_desc(),
1213            nulls_first: None,
1214        };
1215        Ok(ExprWithAliasAndOrderBy {
1216            expr: ExprWithAlias { expr, alias },
1217            order_by,
1218        })
1219    }
1220
1221    /// Parse tokens until the precedence changes.
1222    pub fn parse_subexpr(&mut self, precedence: u8) -> Result<Expr, ParserError> {
1223        let _guard = self.recursion_counter.try_decrease()?;
1224        debug!("parsing expr");
1225        let mut expr = self.parse_prefix()?;
1226
1227        expr = self.parse_compound_expr(expr, vec![])?;
1228
1229        debug!("prefix: {:?}", expr);
1230        loop {
1231            let next_precedence = self.get_next_precedence()?;
1232            debug!("next precedence: {:?}", next_precedence);
1233
1234            if precedence >= next_precedence {
1235                break;
1236            }
1237
1238            // The period operator is handled exclusively by the
1239            // compound field access parsing.
1240            if Token::Period == self.peek_token_ref().token {
1241                break;
1242            }
1243
1244            expr = self.parse_infix(expr, next_precedence)?;
1245        }
1246        Ok(expr)
1247    }
1248
1249    pub fn parse_assert(&mut self) -> Result<Statement, ParserError> {
1250        let condition = self.parse_expr()?;
1251        let message = if self.parse_keyword(Keyword::AS) {
1252            Some(self.parse_expr()?)
1253        } else {
1254            None
1255        };
1256
1257        Ok(Statement::Assert { condition, message })
1258    }
1259
1260    pub fn parse_savepoint(&mut self) -> Result<Statement, ParserError> {
1261        let name = self.parse_identifier()?;
1262        Ok(Statement::Savepoint { name })
1263    }
1264
1265    pub fn parse_release(&mut self) -> Result<Statement, ParserError> {
1266        let _ = self.parse_keyword(Keyword::SAVEPOINT);
1267        let name = self.parse_identifier()?;
1268
1269        Ok(Statement::ReleaseSavepoint { name })
1270    }
1271
1272    pub fn parse_listen(&mut self) -> Result<Statement, ParserError> {
1273        let channel = self.parse_identifier()?;
1274        Ok(Statement::LISTEN { channel })
1275    }
1276
1277    pub fn parse_unlisten(&mut self) -> Result<Statement, ParserError> {
1278        let channel = if self.consume_token(&Token::Mul) {
1279            Ident::new(Expr::Wildcard(AttachedToken::empty()).to_string())
1280        } else {
1281            match self.parse_identifier() {
1282                Ok(expr) => expr,
1283                _ => {
1284                    self.prev_token();
1285                    return self.expected("wildcard or identifier", self.peek_token());
1286                }
1287            }
1288        };
1289        Ok(Statement::UNLISTEN { channel })
1290    }
1291
1292    pub fn parse_notify(&mut self) -> Result<Statement, ParserError> {
1293        let channel = self.parse_identifier()?;
1294        let payload = if self.consume_token(&Token::Comma) {
1295            Some(self.parse_literal_string()?)
1296        } else {
1297            None
1298        };
1299        Ok(Statement::NOTIFY { channel, payload })
1300    }
1301
1302    /// Parses a `RENAME TABLE` statement. See [Statement::RenameTable]
1303    pub fn parse_rename(&mut self) -> Result<Statement, ParserError> {
1304        if self.peek_keyword(Keyword::TABLE) {
1305            self.expect_keyword(Keyword::TABLE)?;
1306            let rename_tables = self.parse_comma_separated(|parser| {
1307                let old_name = parser.parse_object_name(false)?;
1308                parser.expect_keyword(Keyword::TO)?;
1309                let new_name = parser.parse_object_name(false)?;
1310
1311                Ok(RenameTable { old_name, new_name })
1312            })?;
1313            Ok(Statement::RenameTable(rename_tables))
1314        } else {
1315            self.expected("KEYWORD `TABLE` after RENAME", self.peek_token())
1316        }
1317    }
1318
1319    /// Tries to parse an expression by matching the specified word to known keywords that have a special meaning in the dialect.
1320    /// Returns `None if no match is found.
1321    fn parse_expr_prefix_by_reserved_word(
1322        &mut self,
1323        w: &Word,
1324        w_span: Span,
1325    ) -> Result<Option<Expr>, ParserError> {
1326        match w.keyword {
1327            Keyword::TRUE | Keyword::FALSE if self.dialect.supports_boolean_literals() => {
1328                self.prev_token();
1329                Ok(Some(Expr::Value(self.parse_value()?)))
1330            }
1331            Keyword::NULL => {
1332                self.prev_token();
1333                Ok(Some(Expr::Value(self.parse_value()?)))
1334            }
1335            Keyword::CURRENT_CATALOG
1336            | Keyword::CURRENT_USER
1337            | Keyword::SESSION_USER
1338            | Keyword::USER
1339            if dialect_of!(self is PostgreSqlDialect | GenericDialect) =>
1340                {
1341                    Ok(Some(Expr::Function(Function {
1342                        name: ObjectName::from(vec![w.clone().into_ident(w_span)]),
1343                        uses_odbc_syntax: false,
1344                        parameters: FunctionArguments::None,
1345                        args: FunctionArguments::None,
1346                        null_treatment: None,
1347                        filter: None,
1348                        over: None,
1349                        within_group: vec![],
1350                    })))
1351                }
1352            Keyword::CURRENT_TIMESTAMP
1353            | Keyword::CURRENT_TIME
1354            | Keyword::CURRENT_DATE
1355            | Keyword::LOCALTIME
1356            | Keyword::LOCALTIMESTAMP => {
1357                Ok(Some(self.parse_time_functions(ObjectName::from(vec![w.clone().into_ident(w_span)]))?))
1358            }
1359            Keyword::CASE => Ok(Some(self.parse_case_expr()?)),
1360            Keyword::CONVERT => Ok(Some(self.parse_convert_expr(false)?)),
1361            Keyword::TRY_CONVERT if self.dialect.supports_try_convert() => Ok(Some(self.parse_convert_expr(true)?)),
1362            Keyword::CAST => Ok(Some(self.parse_cast_expr(CastKind::Cast)?)),
1363            Keyword::TRY_CAST => Ok(Some(self.parse_cast_expr(CastKind::TryCast)?)),
1364            Keyword::SAFE_CAST => Ok(Some(self.parse_cast_expr(CastKind::SafeCast)?)),
1365            Keyword::EXISTS
1366            // Support parsing Databricks has a function named `exists`.
1367            if !dialect_of!(self is DatabricksDialect)
1368                || matches!(
1369                        self.peek_nth_token_ref(1).token,
1370                        Token::Word(Word {
1371                            keyword: Keyword::SELECT | Keyword::WITH,
1372                            ..
1373                        })
1374                    ) =>
1375                {
1376                    Ok(Some(self.parse_exists_expr(false)?))
1377                }
1378            Keyword::EXTRACT => Ok(Some(self.parse_extract_expr()?)),
1379            Keyword::CEIL => Ok(Some(self.parse_ceil_floor_expr(true)?)),
1380            Keyword::FLOOR => Ok(Some(self.parse_ceil_floor_expr(false)?)),
1381            Keyword::POSITION if self.peek_token_ref().token == Token::LParen => {
1382                Ok(Some(self.parse_position_expr(w.clone().into_ident(w_span))?))
1383            }
1384            Keyword::SUBSTR | Keyword::SUBSTRING => {
1385                self.prev_token();
1386                Ok(Some(self.parse_substring()?))
1387            }
1388            Keyword::OVERLAY => Ok(Some(self.parse_overlay_expr()?)),
1389            Keyword::TRIM => Ok(Some(self.parse_trim_expr()?)),
1390            Keyword::INTERVAL => Ok(Some(self.parse_interval()?)),
1391            // Treat ARRAY[1,2,3] as an array [1,2,3], otherwise try as subquery or a function call
1392            Keyword::ARRAY if *self.peek_token_ref() == Token::LBracket => {
1393                self.expect_token(&Token::LBracket)?;
1394                Ok(Some(self.parse_array_expr(true)?))
1395            }
1396            Keyword::ARRAY
1397            if self.peek_token() == Token::LParen
1398                && !dialect_of!(self is ClickHouseDialect | DatabricksDialect) =>
1399                {
1400                    self.expect_token(&Token::LParen)?;
1401                    let query = self.parse_query()?;
1402                    self.expect_token(&Token::RParen)?;
1403                    Ok(Some(Expr::Function(Function {
1404                        name: ObjectName::from(vec![w.clone().into_ident(w_span)]),
1405                        uses_odbc_syntax: false,
1406                        parameters: FunctionArguments::None,
1407                        args: FunctionArguments::Subquery(query),
1408                        filter: None,
1409                        null_treatment: None,
1410                        over: None,
1411                        within_group: vec![],
1412                    })))
1413                }
1414            Keyword::NOT => Ok(Some(self.parse_not()?)),
1415            Keyword::MATCH if self.dialect.supports_match_against() => {
1416                Ok(Some(self.parse_match_against()?))
1417            }
1418            Keyword::STRUCT if self.dialect.supports_struct_literal() => {
1419                let struct_expr = self.parse_struct_literal()?;
1420                Ok(Some(struct_expr))
1421            }
1422            Keyword::PRIOR if matches!(self.state, ParserState::ConnectBy) => {
1423                let expr = self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?;
1424                Ok(Some(Expr::Prior(Box::new(expr))))
1425            }
1426            Keyword::MAP if *self.peek_token_ref() == Token::LBrace && self.dialect.support_map_literal_syntax() => {
1427                Ok(Some(self.parse_duckdb_map_literal()?))
1428            }
1429            _ if self.dialect.supports_geometric_types() => match w.keyword {
1430                Keyword::CIRCLE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Circle)?)),
1431                Keyword::BOX => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricBox)?)),
1432                Keyword::PATH => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricPath)?)),
1433                Keyword::LINE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Line)?)),
1434                Keyword::LSEG => Ok(Some(self.parse_geometric_type(GeometricTypeKind::LineSegment)?)),
1435                Keyword::POINT => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Point)?)),
1436                Keyword::POLYGON => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Polygon)?)),
1437                _ => Ok(None),
1438            },
1439            _ => Ok(None),
1440        }
1441    }
1442
1443    /// Tries to parse an expression by a word that is not known to have a special meaning in the dialect.
1444    fn parse_expr_prefix_by_unreserved_word(
1445        &mut self,
1446        w: &Word,
1447        w_span: Span,
1448    ) -> Result<Expr, ParserError> {
1449        match self.peek_token().token {
1450            Token::LParen if !self.peek_outer_join_operator() => {
1451                let id_parts = vec![w.clone().into_ident(w_span)];
1452                self.parse_function(ObjectName::from(id_parts))
1453            }
1454            // string introducer https://dev.mysql.com/doc/refman/8.0/en/charset-introducer.html
1455            Token::SingleQuotedString(_)
1456            | Token::DoubleQuotedString(_)
1457            | Token::HexStringLiteral(_)
1458                if w.value.starts_with('_') =>
1459            {
1460                Ok(Expr::Prefixed {
1461                    prefix: w.clone().into_ident(w_span),
1462                    value: self.parse_introduced_string_expr()?.into(),
1463                })
1464            }
1465            // string introducer https://dev.mysql.com/doc/refman/8.0/en/charset-introducer.html
1466            Token::SingleQuotedString(_)
1467            | Token::DoubleQuotedString(_)
1468            | Token::HexStringLiteral(_)
1469                if w.value.starts_with('_') =>
1470            {
1471                Ok(Expr::Prefixed {
1472                    prefix: w.clone().into_ident(w_span),
1473                    value: self.parse_introduced_string_expr()?.into(),
1474                })
1475            }
1476            Token::Arrow if self.dialect.supports_lambda_functions() => {
1477                self.expect_token(&Token::Arrow)?;
1478                Ok(Expr::Lambda(LambdaFunction {
1479                    params: OneOrManyWithParens::One(w.clone().into_ident(w_span)),
1480                    body: Box::new(self.parse_expr()?),
1481                }))
1482            }
1483            _ => Ok(Expr::Identifier(w.clone().into_ident(w_span))),
1484        }
1485    }
1486
1487    /// Parse an expression prefix.
1488    pub fn parse_prefix(&mut self) -> Result<Expr, ParserError> {
1489        // allow the dialect to override prefix parsing
1490        if let Some(prefix) = self.dialect.parse_prefix(self) {
1491            return prefix;
1492        }
1493
1494        // PostgreSQL allows any string literal to be preceded by a type name, indicating that the
1495        // string literal represents a literal of that type. Some examples:
1496        //
1497        //      DATE '2020-05-20'
1498        //      TIMESTAMP WITH TIME ZONE '2020-05-20 7:43:54'
1499        //      BOOL 'true'
1500        //
1501        // The first two are standard SQL, while the latter is a PostgreSQL extension. Complicating
1502        // matters is the fact that INTERVAL string literals may optionally be followed by special
1503        // keywords, e.g.:
1504        //
1505        //      INTERVAL '7' DAY
1506        //
1507        // Note also that naively `SELECT date` looks like a syntax error because the `date` type
1508        // name is not followed by a string literal, but in fact in PostgreSQL it is a valid
1509        // expression that should parse as the column name "date".
1510        let loc = self.peek_token_ref().span.start;
1511        let opt_expr = self.maybe_parse(|parser| {
1512            match parser.parse_data_type()? {
1513                DataType::Interval => parser.parse_interval(),
1514                // PostgreSQL allows almost any identifier to be used as custom data type name,
1515                // and we support that in `parse_data_type()`. But unlike Postgres we don't
1516                // have a list of globally reserved keywords (since they vary across dialects),
1517                // so given `NOT 'a' LIKE 'b'`, we'd accept `NOT` as a possible custom data type
1518                // name, resulting in `NOT 'a'` being recognized as a `TypedString` instead of
1519                // an unary negation `NOT ('a' LIKE 'b')`. To solve this, we don't accept the
1520                // `type 'string'` syntax for the custom data types at all.
1521                DataType::Custom(..) => parser_err!("dummy", loc),
1522                data_type => Ok(Expr::TypedString {
1523                    data_type,
1524                    value: parser.parse_value()?.value,
1525                }),
1526            }
1527        })?;
1528
1529        if let Some(expr) = opt_expr {
1530            return Ok(expr);
1531        }
1532
1533        // Cache some dialect properties to avoid lifetime issues with the
1534        // next_token reference.
1535
1536        let dialect = self.dialect;
1537
1538        self.advance_token();
1539        let next_token_index = self.get_current_index();
1540        let next_token = self.get_current_token();
1541        let span = next_token.span;
1542        let expr = match &next_token.token {
1543            Token::Word(w) => {
1544                // The word we consumed may fall into one of two cases: it has a special meaning, or not.
1545                // For example, in Snowflake, the word `interval` may have two meanings depending on the context:
1546                // `SELECT CURRENT_DATE() + INTERVAL '1 DAY', MAX(interval) FROM tbl;`
1547                //                          ^^^^^^^^^^^^^^^^      ^^^^^^^^
1548                //                         interval expression   identifier
1549                //
1550                // We first try to parse the word and following tokens as a special expression, and if that fails,
1551                // we rollback and try to parse it as an identifier.
1552                let w = w.clone();
1553                match self.try_parse(|parser| parser.parse_expr_prefix_by_reserved_word(&w, span)) {
1554                    // This word indicated an expression prefix and parsing was successful
1555                    Ok(Some(expr)) => Ok(expr),
1556
1557                    // No expression prefix associated with this word
1558                    Ok(None) => Ok(self.parse_expr_prefix_by_unreserved_word(&w, span)?),
1559
1560                    // If parsing of the word as a special expression failed, we are facing two options:
1561                    // 1. The statement is malformed, e.g. `SELECT INTERVAL '1 DAI` (`DAI` instead of `DAY`)
1562                    // 2. The word is used as an identifier, e.g. `SELECT MAX(interval) FROM tbl`
1563                    // We first try to parse the word as an identifier and if that fails
1564                    // we rollback and return the parsing error we got from trying to parse a
1565                    // special expression (to maintain backwards compatibility of parsing errors).
1566                    Err(e) => {
1567                        if !self.dialect.is_reserved_for_identifier(w.keyword) {
1568                            if let Ok(Some(expr)) = self.maybe_parse(|parser| {
1569                                parser.parse_expr_prefix_by_unreserved_word(&w, span)
1570                            }) {
1571                                return Ok(expr);
1572                            }
1573                        }
1574                        return Err(e);
1575                    }
1576                }
1577            } // End of Token::Word
1578            // array `[1, 2, 3]`
1579            Token::LBracket => self.parse_array_expr(false),
1580            tok @ Token::Minus | tok @ Token::Plus => {
1581                let op = if *tok == Token::Plus {
1582                    UnaryOperator::Plus
1583                } else {
1584                    UnaryOperator::Minus
1585                };
1586                Ok(Expr::UnaryOp {
1587                    op,
1588                    expr: Box::new(
1589                        self.parse_subexpr(self.dialect.prec_value(Precedence::MulDivModOp))?,
1590                    ),
1591                })
1592            }
1593            Token::ExclamationMark if dialect.supports_bang_not_operator() => Ok(Expr::UnaryOp {
1594                op: UnaryOperator::BangNot,
1595                expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
1596            }),
1597            tok @ Token::DoubleExclamationMark
1598            | tok @ Token::PGSquareRoot
1599            | tok @ Token::PGCubeRoot
1600            | tok @ Token::AtSign
1601            | tok @ Token::Tilde
1602                if dialect_is!(dialect is PostgreSqlDialect) =>
1603            {
1604                let op = match tok {
1605                    Token::DoubleExclamationMark => UnaryOperator::PGPrefixFactorial,
1606                    Token::PGSquareRoot => UnaryOperator::PGSquareRoot,
1607                    Token::PGCubeRoot => UnaryOperator::PGCubeRoot,
1608                    Token::AtSign => UnaryOperator::PGAbs,
1609                    Token::Tilde => UnaryOperator::PGBitwiseNot,
1610                    _ => unreachable!(),
1611                };
1612                Ok(Expr::UnaryOp {
1613                    op,
1614                    expr: Box::new(
1615                        self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1616                    ),
1617                })
1618            }
1619            tok @ Token::Sharp
1620            | tok @ Token::AtDashAt
1621            | tok @ Token::AtAt
1622            | tok @ Token::QuestionMarkDash
1623            | tok @ Token::QuestionPipe
1624                if self.dialect.supports_geometric_types() =>
1625            {
1626                let op = match tok {
1627                    Token::Sharp => UnaryOperator::Hash,
1628                    Token::AtDashAt => UnaryOperator::AtDashAt,
1629                    Token::AtAt => UnaryOperator::DoubleAt,
1630                    Token::QuestionMarkDash => UnaryOperator::QuestionDash,
1631                    Token::QuestionPipe => UnaryOperator::QuestionPipe,
1632                    _ => {
1633                        return Err(ParserError::ParserError(format!(
1634                            "Unexpected token in unary operator parsing: {:?}",
1635                            tok
1636                        )))
1637                    }
1638                };
1639                Ok(Expr::UnaryOp {
1640                    op,
1641                    expr: Box::new(
1642                        self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1643                    ),
1644                })
1645            }
1646            Token::EscapedStringLiteral(_) if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) =>
1647            {
1648                self.prev_token();
1649                Ok(Expr::Value(self.parse_value()?))
1650            }
1651            Token::UnicodeStringLiteral(_) => {
1652                self.prev_token();
1653                Ok(Expr::Value(self.parse_value()?))
1654            }
1655            Token::Number(_, _)
1656            | Token::SingleQuotedString(_)
1657            | Token::DoubleQuotedString(_)
1658            | Token::TripleSingleQuotedString(_)
1659            | Token::TripleDoubleQuotedString(_)
1660            | Token::DollarQuotedString(_)
1661            | Token::SingleQuotedByteStringLiteral(_)
1662            | Token::DoubleQuotedByteStringLiteral(_)
1663            | Token::TripleSingleQuotedByteStringLiteral(_)
1664            | Token::TripleDoubleQuotedByteStringLiteral(_)
1665            | Token::SingleQuotedRawStringLiteral(_)
1666            | Token::DoubleQuotedRawStringLiteral(_)
1667            | Token::TripleSingleQuotedRawStringLiteral(_)
1668            | Token::TripleDoubleQuotedRawStringLiteral(_)
1669            | Token::NationalStringLiteral(_)
1670            | Token::HexStringLiteral(_) => {
1671                self.prev_token();
1672                Ok(Expr::Value(self.parse_value()?))
1673            }
1674            Token::LParen => {
1675                let expr = if let Some(expr) = self.try_parse_expr_sub_query()? {
1676                    expr
1677                } else if let Some(lambda) = self.try_parse_lambda()? {
1678                    return Ok(lambda);
1679                } else {
1680                    let exprs = self.parse_comma_separated(Parser::parse_expr)?;
1681                    match exprs.len() {
1682                        0 => unreachable!(), // parse_comma_separated ensures 1 or more
1683                        1 => Expr::Nested(Box::new(exprs.into_iter().next().unwrap())),
1684                        _ => Expr::Tuple(exprs),
1685                    }
1686                };
1687                self.expect_token(&Token::RParen)?;
1688                Ok(expr)
1689            }
1690            Token::Placeholder(_) | Token::Colon | Token::AtSign => {
1691                self.prev_token();
1692                Ok(Expr::Value(self.parse_value()?))
1693            }
1694            Token::LBrace => {
1695                self.prev_token();
1696                self.parse_lbrace_expr()
1697            }
1698            _ => self.expected_at("an expression", next_token_index),
1699        }?;
1700
1701        if self.parse_keyword(Keyword::COLLATE) {
1702            Ok(Expr::Collate {
1703                expr: Box::new(expr),
1704                collation: self.parse_object_name(false)?,
1705            })
1706        } else {
1707            Ok(expr)
1708        }
1709    }
1710
1711    fn parse_geometric_type(&mut self, kind: GeometricTypeKind) -> Result<Expr, ParserError> {
1712        let value: Value = self.parse_value()?.value;
1713        Ok(Expr::TypedString {
1714            data_type: DataType::GeometricType(kind),
1715            value,
1716        })
1717    }
1718
1719    /// Try to parse an [Expr::CompoundFieldAccess] like `a.b.c` or `a.b[1].c`.
1720    /// If all the fields are `Expr::Identifier`s, return an [Expr::CompoundIdentifier] instead.
1721    /// If only the root exists, return the root.
1722    /// Parses compound expressions which may be delimited by period
1723    /// or bracket notation.
1724    /// For example: `a.b.c`, `a.b[1]`.
1725    pub fn parse_compound_expr(
1726        &mut self,
1727        root: Expr,
1728        mut chain: Vec<AccessExpr>,
1729    ) -> Result<Expr, ParserError> {
1730        let mut ending_wildcard: Option<TokenWithSpan> = None;
1731        loop {
1732            if self.consume_token(&Token::Period) {
1733                let next_token = self.peek_token_ref();
1734                match &next_token.token {
1735                    Token::Mul => {
1736                        // Postgres explicitly allows funcnm(tablenm.*) and the
1737                        // function array_agg traverses this control flow
1738                        if dialect_of!(self is PostgreSqlDialect) {
1739                            ending_wildcard = Some(self.next_token());
1740                        } else {
1741                            // Put back the consumed `.` tokens before exiting.
1742                            // If this expression is being parsed in the
1743                            // context of a projection, then the `.*` could imply
1744                            // a wildcard expansion. For example:
1745                            // `SELECT STRUCT('foo').* FROM T`
1746                            self.prev_token(); // .
1747                        }
1748
1749                        break;
1750                    }
1751                    Token::SingleQuotedString(s) => {
1752                        let expr =
1753                            Expr::Identifier(Ident::with_quote_and_span('\'', next_token.span, s));
1754                        chain.push(AccessExpr::Dot(expr));
1755                        self.advance_token(); // The consumed string
1756                    }
1757                    // Fallback to parsing an arbitrary expression.
1758                    _ => match self.parse_subexpr(self.dialect.prec_value(Precedence::Period))? {
1759                        // If we get back a compound field access or identifier,
1760                        // we flatten the nested expression.
1761                        // For example if the current root is `foo`
1762                        // and we get back a compound identifier expression `bar.baz`
1763                        // The full expression should be `foo.bar.baz` (i.e.
1764                        // a root with an access chain with 2 entries) and not
1765                        // `foo.(bar.baz)` (i.e. a root with an access chain with
1766                        // 1 entry`).
1767                        Expr::CompoundFieldAccess { root, access_chain } => {
1768                            chain.push(AccessExpr::Dot(*root));
1769                            chain.extend(access_chain);
1770                        }
1771                        Expr::CompoundIdentifier(parts) => chain
1772                            .extend(parts.into_iter().map(Expr::Identifier).map(AccessExpr::Dot)),
1773                        expr => {
1774                            chain.push(AccessExpr::Dot(expr));
1775                        }
1776                    },
1777                }
1778            } else if !self.dialect.supports_partiql()
1779                && self.peek_token_ref().token == Token::LBracket
1780            {
1781                self.parse_multi_dim_subscript(&mut chain)?;
1782            } else {
1783                break;
1784            }
1785        }
1786
1787        let tok_index = self.get_current_index();
1788        if let Some(wildcard_token) = ending_wildcard {
1789            if !Self::is_all_ident(&root, &chain) {
1790                return self.expected("an identifier or a '*' after '.'", self.peek_token());
1791            };
1792            Ok(Expr::QualifiedWildcard(
1793                ObjectName::from(Self::exprs_to_idents(root, chain)?),
1794                AttachedToken(wildcard_token),
1795            ))
1796        } else if self.maybe_parse_outer_join_operator() {
1797            if !Self::is_all_ident(&root, &chain) {
1798                return self.expected_at("column identifier before (+)", tok_index);
1799            };
1800            let expr = if chain.is_empty() {
1801                root
1802            } else {
1803                Expr::CompoundIdentifier(Self::exprs_to_idents(root, chain)?)
1804            };
1805            Ok(Expr::OuterJoin(expr.into()))
1806        } else {
1807            Self::build_compound_expr(root, chain)
1808        }
1809    }
1810
1811    /// Combines a root expression and access chain to form
1812    /// a compound expression. Which may be a [Expr::CompoundFieldAccess]
1813    /// or other special cased expressions like [Expr::CompoundIdentifier],
1814    /// [Expr::OuterJoin].
1815    fn build_compound_expr(
1816        root: Expr,
1817        mut access_chain: Vec<AccessExpr>,
1818    ) -> Result<Expr, ParserError> {
1819        if access_chain.is_empty() {
1820            return Ok(root);
1821        }
1822
1823        if Self::is_all_ident(&root, &access_chain) {
1824            return Ok(Expr::CompoundIdentifier(Self::exprs_to_idents(
1825                root,
1826                access_chain,
1827            )?));
1828        }
1829
1830        // Flatten qualified function calls.
1831        // For example, the expression `a.b.c.foo(1,2,3)` should
1832        // represent a function called `a.b.c.foo`, rather than
1833        // a composite expression.
1834        if matches!(root, Expr::Identifier(_))
1835            && matches!(
1836                access_chain.last(),
1837                Some(AccessExpr::Dot(Expr::Function(_)))
1838            )
1839            && access_chain
1840                .iter()
1841                .rev()
1842                .skip(1) // All except the Function
1843                .all(|access| matches!(access, AccessExpr::Dot(Expr::Identifier(_))))
1844        {
1845            let Some(AccessExpr::Dot(Expr::Function(mut func))) = access_chain.pop() else {
1846                return parser_err!("expected function expression", root.span().start);
1847            };
1848
1849            let compound_func_name = [root]
1850                .into_iter()
1851                .chain(access_chain.into_iter().flat_map(|access| match access {
1852                    AccessExpr::Dot(expr) => Some(expr),
1853                    _ => None,
1854                }))
1855                .flat_map(|expr| match expr {
1856                    Expr::Identifier(ident) => Some(ident),
1857                    _ => None,
1858                })
1859                .map(ObjectNamePart::Identifier)
1860                .chain(func.name.0)
1861                .collect::<Vec<_>>();
1862            func.name = ObjectName(compound_func_name);
1863
1864            return Ok(Expr::Function(func));
1865        }
1866
1867        // Flatten qualified outer join expressions.
1868        // For example, the expression `T.foo(+)` should
1869        // represent an outer join on the column name `T.foo`
1870        // rather than a composite expression.
1871        if access_chain.len() == 1
1872            && matches!(
1873                access_chain.last(),
1874                Some(AccessExpr::Dot(Expr::OuterJoin(_)))
1875            )
1876        {
1877            let Some(AccessExpr::Dot(Expr::OuterJoin(inner_expr))) = access_chain.pop() else {
1878                return parser_err!("expected (+) expression", root.span().start);
1879            };
1880
1881            if !Self::is_all_ident(&root, &[]) {
1882                return parser_err!("column identifier before (+)", root.span().start);
1883            };
1884
1885            let token_start = root.span().start;
1886            let mut idents = Self::exprs_to_idents(root, vec![])?;
1887            match *inner_expr {
1888                Expr::CompoundIdentifier(suffix) => idents.extend(suffix),
1889                Expr::Identifier(suffix) => idents.push(suffix),
1890                _ => {
1891                    return parser_err!("column identifier before (+)", token_start);
1892                }
1893            }
1894
1895            return Ok(Expr::OuterJoin(Expr::CompoundIdentifier(idents).into()));
1896        }
1897
1898        Ok(Expr::CompoundFieldAccess {
1899            root: Box::new(root),
1900            access_chain,
1901        })
1902    }
1903
1904    fn keyword_to_modifier(k: Keyword) -> Option<ContextModifier> {
1905        match k {
1906            Keyword::LOCAL => Some(ContextModifier::Local),
1907            Keyword::GLOBAL => Some(ContextModifier::Global),
1908            Keyword::SESSION => Some(ContextModifier::Session),
1909            _ => None,
1910        }
1911    }
1912
1913    /// Check if the root is an identifier and all fields are identifiers.
1914    fn is_all_ident(root: &Expr, fields: &[AccessExpr]) -> bool {
1915        if !matches!(root, Expr::Identifier(_)) {
1916            return false;
1917        }
1918        fields
1919            .iter()
1920            .all(|x| matches!(x, AccessExpr::Dot(Expr::Identifier(_))))
1921    }
1922
1923    /// Convert a root and a list of fields to a list of identifiers.
1924    fn exprs_to_idents(root: Expr, fields: Vec<AccessExpr>) -> Result<Vec<Ident>, ParserError> {
1925        let mut idents = vec![];
1926        if let Expr::Identifier(root) = root {
1927            idents.push(root);
1928            for x in fields {
1929                if let AccessExpr::Dot(Expr::Identifier(ident)) = x {
1930                    idents.push(ident);
1931                } else {
1932                    return parser_err!(
1933                        format!("Expected identifier, found: {}", x),
1934                        x.span().start
1935                    );
1936                }
1937            }
1938            Ok(idents)
1939        } else {
1940            parser_err!(
1941                format!("Expected identifier, found: {}", root),
1942                root.span().start
1943            )
1944        }
1945    }
1946
1947    /// Returns true if the next tokens indicate the outer join operator `(+)`.
1948    fn peek_outer_join_operator(&mut self) -> bool {
1949        if !self.dialect.supports_outer_join_operator() {
1950            return false;
1951        }
1952
1953        let [maybe_lparen, maybe_plus, maybe_rparen] = self.peek_tokens_ref();
1954        Token::LParen == maybe_lparen.token
1955            && Token::Plus == maybe_plus.token
1956            && Token::RParen == maybe_rparen.token
1957    }
1958
1959    /// If the next tokens indicates the outer join operator `(+)`, consume
1960    /// the tokens and return true.
1961    fn maybe_parse_outer_join_operator(&mut self) -> bool {
1962        self.dialect.supports_outer_join_operator()
1963            && self.consume_tokens(&[Token::LParen, Token::Plus, Token::RParen])
1964    }
1965
1966    pub fn parse_utility_options(&mut self) -> Result<Vec<UtilityOption>, ParserError> {
1967        self.expect_token(&Token::LParen)?;
1968        let options = self.parse_comma_separated(Self::parse_utility_option)?;
1969        self.expect_token(&Token::RParen)?;
1970
1971        Ok(options)
1972    }
1973
1974    fn parse_utility_option(&mut self) -> Result<UtilityOption, ParserError> {
1975        let name = self.parse_identifier()?;
1976
1977        let next_token = self.peek_token();
1978        if next_token == Token::Comma || next_token == Token::RParen {
1979            return Ok(UtilityOption { name, arg: None });
1980        }
1981        let arg = self.parse_expr()?;
1982
1983        Ok(UtilityOption {
1984            name,
1985            arg: Some(arg),
1986        })
1987    }
1988
1989    fn try_parse_expr_sub_query(&mut self) -> Result<Option<Expr>, ParserError> {
1990        if !self.peek_sub_query() {
1991            return Ok(None);
1992        }
1993
1994        Ok(Some(Expr::Subquery(self.parse_query()?)))
1995    }
1996
1997    fn try_parse_lambda(&mut self) -> Result<Option<Expr>, ParserError> {
1998        if !self.dialect.supports_lambda_functions() {
1999            return Ok(None);
2000        }
2001        self.maybe_parse(|p| {
2002            let params = p.parse_comma_separated(|p| p.parse_identifier())?;
2003            p.expect_token(&Token::RParen)?;
2004            p.expect_token(&Token::Arrow)?;
2005            let expr = p.parse_expr()?;
2006            Ok(Expr::Lambda(LambdaFunction {
2007                params: OneOrManyWithParens::Many(params),
2008                body: Box::new(expr),
2009            }))
2010        })
2011    }
2012
2013    /// Tries to parse the body of an [ODBC function] call.
2014    /// i.e. without the enclosing braces
2015    ///
2016    /// ```sql
2017    /// fn myfunc(1,2,3)
2018    /// ```
2019    ///
2020    /// [ODBC function]: https://learn.microsoft.com/en-us/sql/odbc/reference/develop-app/scalar-function-calls?view=sql-server-2017
2021    fn maybe_parse_odbc_fn_body(&mut self) -> Result<Option<Expr>, ParserError> {
2022        self.maybe_parse(|p| {
2023            p.expect_keyword(Keyword::FN)?;
2024            let fn_name = p.parse_object_name(false)?;
2025            let mut fn_call = p.parse_function_call(fn_name)?;
2026            fn_call.uses_odbc_syntax = true;
2027            Ok(Expr::Function(fn_call))
2028        })
2029    }
2030
2031    pub fn parse_function(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
2032        self.parse_function_call(name).map(Expr::Function)
2033    }
2034
2035    fn parse_function_call(&mut self, name: ObjectName) -> Result<Function, ParserError> {
2036        self.expect_token(&Token::LParen)?;
2037
2038        // Snowflake permits a subquery to be passed as an argument without
2039        // an enclosing set of parens if it's the only argument.
2040        if dialect_of!(self is SnowflakeDialect) && self.peek_sub_query() {
2041            let subquery = self.parse_query()?;
2042            self.expect_token(&Token::RParen)?;
2043            return Ok(Function {
2044                name,
2045                uses_odbc_syntax: false,
2046                parameters: FunctionArguments::None,
2047                args: FunctionArguments::Subquery(subquery),
2048                filter: None,
2049                null_treatment: None,
2050                over: None,
2051                within_group: vec![],
2052            });
2053        }
2054
2055        let mut args = self.parse_function_argument_list()?;
2056        let mut parameters = FunctionArguments::None;
2057        // ClickHouse aggregations support parametric functions like `HISTOGRAM(0.5, 0.6)(x, y)`
2058        // which (0.5, 0.6) is a parameter to the function.
2059        if dialect_of!(self is ClickHouseDialect | GenericDialect)
2060            && self.consume_token(&Token::LParen)
2061        {
2062            parameters = FunctionArguments::List(args);
2063            args = self.parse_function_argument_list()?;
2064        }
2065
2066        let within_group = if self.parse_keywords(&[Keyword::WITHIN, Keyword::GROUP]) {
2067            self.expect_token(&Token::LParen)?;
2068            self.expect_keywords(&[Keyword::ORDER, Keyword::BY])?;
2069            let order_by = self.parse_comma_separated(Parser::parse_order_by_expr)?;
2070            self.expect_token(&Token::RParen)?;
2071            order_by
2072        } else {
2073            vec![]
2074        };
2075
2076        let filter = if self.dialect.supports_filter_during_aggregation()
2077            && self.parse_keyword(Keyword::FILTER)
2078            && self.consume_token(&Token::LParen)
2079            && self.parse_keyword(Keyword::WHERE)
2080        {
2081            let filter = Some(Box::new(self.parse_expr()?));
2082            self.expect_token(&Token::RParen)?;
2083            filter
2084        } else {
2085            None
2086        };
2087
2088        // Syntax for null treatment shows up either in the args list
2089        // or after the function call, but not both.
2090        let null_treatment = if args
2091            .clauses
2092            .iter()
2093            .all(|clause| !matches!(clause, FunctionArgumentClause::IgnoreOrRespectNulls(_)))
2094        {
2095            self.parse_null_treatment()?
2096        } else {
2097            None
2098        };
2099
2100        let over = if self.parse_keyword(Keyword::OVER) {
2101            if self.consume_token(&Token::LParen) {
2102                let window_spec = self.parse_window_spec()?;
2103                Some(WindowType::WindowSpec(window_spec))
2104            } else {
2105                Some(WindowType::NamedWindow(self.parse_identifier()?))
2106            }
2107        } else {
2108            None
2109        };
2110
2111        Ok(Function {
2112            name,
2113            uses_odbc_syntax: false,
2114            parameters,
2115            args: FunctionArguments::List(args),
2116            null_treatment,
2117            filter,
2118            over,
2119            within_group,
2120        })
2121    }
2122
2123    /// Optionally parses a null treatment clause.
2124    fn parse_null_treatment(&mut self) -> Result<Option<NullTreatment>, ParserError> {
2125        match self.parse_one_of_keywords(&[Keyword::RESPECT, Keyword::IGNORE]) {
2126            Some(keyword) => {
2127                self.expect_keyword_is(Keyword::NULLS)?;
2128
2129                Ok(match keyword {
2130                    Keyword::RESPECT => Some(NullTreatment::RespectNulls),
2131                    Keyword::IGNORE => Some(NullTreatment::IgnoreNulls),
2132                    _ => None,
2133                })
2134            }
2135            None => Ok(None),
2136        }
2137    }
2138
2139    pub fn parse_time_functions(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
2140        let args = if self.consume_token(&Token::LParen) {
2141            FunctionArguments::List(self.parse_function_argument_list()?)
2142        } else {
2143            FunctionArguments::None
2144        };
2145        Ok(Expr::Function(Function {
2146            name,
2147            uses_odbc_syntax: false,
2148            parameters: FunctionArguments::None,
2149            args,
2150            filter: None,
2151            over: None,
2152            null_treatment: None,
2153            within_group: vec![],
2154        }))
2155    }
2156
2157    pub fn parse_window_frame_units(&mut self) -> Result<WindowFrameUnits, ParserError> {
2158        let next_token = self.next_token();
2159        match &next_token.token {
2160            Token::Word(w) => match w.keyword {
2161                Keyword::ROWS => Ok(WindowFrameUnits::Rows),
2162                Keyword::RANGE => Ok(WindowFrameUnits::Range),
2163                Keyword::GROUPS => Ok(WindowFrameUnits::Groups),
2164                _ => self.expected("ROWS, RANGE, GROUPS", next_token)?,
2165            },
2166            _ => self.expected("ROWS, RANGE, GROUPS", next_token),
2167        }
2168    }
2169
2170    pub fn parse_window_frame(&mut self) -> Result<WindowFrame, ParserError> {
2171        let units = self.parse_window_frame_units()?;
2172        let (start_bound, end_bound) = if self.parse_keyword(Keyword::BETWEEN) {
2173            let start_bound = self.parse_window_frame_bound()?;
2174            self.expect_keyword_is(Keyword::AND)?;
2175            let end_bound = Some(self.parse_window_frame_bound()?);
2176            (start_bound, end_bound)
2177        } else {
2178            (self.parse_window_frame_bound()?, None)
2179        };
2180        Ok(WindowFrame {
2181            units,
2182            start_bound,
2183            end_bound,
2184        })
2185    }
2186
2187    /// Parse `CURRENT ROW` or `{ <positive number> | UNBOUNDED } { PRECEDING | FOLLOWING }`
2188    pub fn parse_window_frame_bound(&mut self) -> Result<WindowFrameBound, ParserError> {
2189        if self.parse_keywords(&[Keyword::CURRENT, Keyword::ROW]) {
2190            Ok(WindowFrameBound::CurrentRow)
2191        } else {
2192            let rows = if self.parse_keyword(Keyword::UNBOUNDED) {
2193                None
2194            } else {
2195                Some(Box::new(match self.peek_token().token {
2196                    Token::SingleQuotedString(_) => self.parse_interval()?,
2197                    _ => self.parse_expr()?,
2198                }))
2199            };
2200            if self.parse_keyword(Keyword::PRECEDING) {
2201                Ok(WindowFrameBound::Preceding(rows))
2202            } else if self.parse_keyword(Keyword::FOLLOWING) {
2203                Ok(WindowFrameBound::Following(rows))
2204            } else {
2205                self.expected("PRECEDING or FOLLOWING", self.peek_token())
2206            }
2207        }
2208    }
2209
2210    /// Parse a group by expr. Group by expr can be one of group sets, roll up, cube, or simple expr.
2211    fn parse_group_by_expr(&mut self) -> Result<Expr, ParserError> {
2212        if self.dialect.supports_group_by_expr() {
2213            if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
2214                self.expect_token(&Token::LParen)?;
2215                let result = self.parse_comma_separated(|p| p.parse_tuple(false, true))?;
2216                self.expect_token(&Token::RParen)?;
2217                Ok(Expr::GroupingSets(result))
2218            } else if self.parse_keyword(Keyword::CUBE) {
2219                self.expect_token(&Token::LParen)?;
2220                let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2221                self.expect_token(&Token::RParen)?;
2222                Ok(Expr::Cube(result))
2223            } else if self.parse_keyword(Keyword::ROLLUP) {
2224                self.expect_token(&Token::LParen)?;
2225                let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2226                self.expect_token(&Token::RParen)?;
2227                Ok(Expr::Rollup(result))
2228            } else if self.consume_tokens(&[Token::LParen, Token::RParen]) {
2229                // PostgreSQL allow to use empty tuple as a group by expression,
2230                // e.g. `GROUP BY (), name`. Please refer to GROUP BY Clause section in
2231                // [PostgreSQL](https://www.postgresql.org/docs/16/sql-select.html)
2232                Ok(Expr::Tuple(vec![]))
2233            } else {
2234                self.parse_expr()
2235            }
2236        } else {
2237            // TODO parse rollup for other dialects
2238            self.parse_expr()
2239        }
2240    }
2241
2242    /// Parse a tuple with `(` and `)`.
2243    /// If `lift_singleton` is true, then a singleton tuple is lifted to a tuple of length 1, otherwise it will fail.
2244    /// If `allow_empty` is true, then an empty tuple is allowed.
2245    fn parse_tuple(
2246        &mut self,
2247        lift_singleton: bool,
2248        allow_empty: bool,
2249    ) -> Result<Vec<Expr>, ParserError> {
2250        if lift_singleton {
2251            if self.consume_token(&Token::LParen) {
2252                let result = if allow_empty && self.consume_token(&Token::RParen) {
2253                    vec![]
2254                } else {
2255                    let result = self.parse_comma_separated(Parser::parse_expr)?;
2256                    self.expect_token(&Token::RParen)?;
2257                    result
2258                };
2259                Ok(result)
2260            } else {
2261                Ok(vec![self.parse_expr()?])
2262            }
2263        } else {
2264            self.expect_token(&Token::LParen)?;
2265            let result = if allow_empty && self.consume_token(&Token::RParen) {
2266                vec![]
2267            } else {
2268                let result = self.parse_comma_separated(Parser::parse_expr)?;
2269                self.expect_token(&Token::RParen)?;
2270                result
2271            };
2272            Ok(result)
2273        }
2274    }
2275
2276    pub fn parse_case_expr(&mut self) -> Result<Expr, ParserError> {
2277        let case_token = AttachedToken(self.get_current_token().clone());
2278        let mut operand = None;
2279        if !self.parse_keyword(Keyword::WHEN) {
2280            operand = Some(Box::new(self.parse_expr()?));
2281            self.expect_keyword_is(Keyword::WHEN)?;
2282        }
2283        let mut conditions = vec![];
2284        loop {
2285            let condition = self.parse_expr()?;
2286            self.expect_keyword_is(Keyword::THEN)?;
2287            let result = self.parse_expr()?;
2288            conditions.push(CaseWhen { condition, result });
2289            if !self.parse_keyword(Keyword::WHEN) {
2290                break;
2291            }
2292        }
2293        let else_result = if self.parse_keyword(Keyword::ELSE) {
2294            Some(Box::new(self.parse_expr()?))
2295        } else {
2296            None
2297        };
2298        let end_token = AttachedToken(self.expect_keyword(Keyword::END)?);
2299        Ok(Expr::Case {
2300            case_token,
2301            end_token,
2302            operand,
2303            conditions,
2304            else_result,
2305        })
2306    }
2307
2308    pub fn parse_optional_cast_format(&mut self) -> Result<Option<CastFormat>, ParserError> {
2309        if self.parse_keyword(Keyword::FORMAT) {
2310            let value = self.parse_value()?.value;
2311            match self.parse_optional_time_zone()? {
2312                Some(tz) => Ok(Some(CastFormat::ValueAtTimeZone(value, tz))),
2313                None => Ok(Some(CastFormat::Value(value))),
2314            }
2315        } else {
2316            Ok(None)
2317        }
2318    }
2319
2320    pub fn parse_optional_time_zone(&mut self) -> Result<Option<Value>, ParserError> {
2321        if self.parse_keywords(&[Keyword::AT, Keyword::TIME, Keyword::ZONE]) {
2322            self.parse_value().map(|v| Some(v.value))
2323        } else {
2324            Ok(None)
2325        }
2326    }
2327
2328    /// mssql-like convert function
2329    fn parse_mssql_convert(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2330        self.expect_token(&Token::LParen)?;
2331        let data_type = self.parse_data_type()?;
2332        self.expect_token(&Token::Comma)?;
2333        let expr = self.parse_expr()?;
2334        let styles = if self.consume_token(&Token::Comma) {
2335            self.parse_comma_separated(Parser::parse_expr)?
2336        } else {
2337            Default::default()
2338        };
2339        self.expect_token(&Token::RParen)?;
2340        Ok(Expr::Convert {
2341            is_try,
2342            expr: Box::new(expr),
2343            data_type: Some(data_type),
2344            charset: None,
2345            target_before_value: true,
2346            styles,
2347        })
2348    }
2349
2350    /// Parse a SQL CONVERT function:
2351    ///  - `CONVERT('héhé' USING utf8mb4)` (MySQL)
2352    ///  - `CONVERT('héhé', CHAR CHARACTER SET utf8mb4)` (MySQL)
2353    ///  - `CONVERT(DECIMAL(10, 5), 42)` (MSSQL) - the type comes first
2354    pub fn parse_convert_expr(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2355        if self.dialect.convert_type_before_value() {
2356            return self.parse_mssql_convert(is_try);
2357        }
2358        self.expect_token(&Token::LParen)?;
2359        let expr = self.parse_expr()?;
2360        if self.parse_keyword(Keyword::USING) {
2361            let charset = self.parse_object_name(false)?;
2362            self.expect_token(&Token::RParen)?;
2363            return Ok(Expr::Convert {
2364                is_try,
2365                expr: Box::new(expr),
2366                data_type: None,
2367                charset: Some(charset),
2368                target_before_value: false,
2369                styles: vec![],
2370            });
2371        }
2372        self.expect_token(&Token::Comma)?;
2373        let data_type = self.parse_data_type()?;
2374        let charset = if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
2375            Some(self.parse_object_name(false)?)
2376        } else {
2377            None
2378        };
2379        self.expect_token(&Token::RParen)?;
2380        Ok(Expr::Convert {
2381            is_try,
2382            expr: Box::new(expr),
2383            data_type: Some(data_type),
2384            charset,
2385            target_before_value: false,
2386            styles: vec![],
2387        })
2388    }
2389
2390    /// Parse a SQL CAST function e.g. `CAST(expr AS FLOAT)`
2391    pub fn parse_cast_expr(&mut self, kind: CastKind) -> Result<Expr, ParserError> {
2392        self.expect_token(&Token::LParen)?;
2393        let expr = self.parse_expr()?;
2394        self.expect_keyword_is(Keyword::AS)?;
2395        let data_type = self.parse_data_type()?;
2396        let format = self.parse_optional_cast_format()?;
2397        self.expect_token(&Token::RParen)?;
2398        Ok(Expr::Cast {
2399            kind,
2400            expr: Box::new(expr),
2401            data_type,
2402            format,
2403        })
2404    }
2405
2406    /// Parse a SQL EXISTS expression e.g. `WHERE EXISTS(SELECT ...)`.
2407    pub fn parse_exists_expr(&mut self, negated: bool) -> Result<Expr, ParserError> {
2408        self.expect_token(&Token::LParen)?;
2409        let exists_node = Expr::Exists {
2410            negated,
2411            subquery: self.parse_query()?,
2412        };
2413        self.expect_token(&Token::RParen)?;
2414        Ok(exists_node)
2415    }
2416
2417    pub fn parse_extract_expr(&mut self) -> Result<Expr, ParserError> {
2418        self.expect_token(&Token::LParen)?;
2419        let field = self.parse_date_time_field()?;
2420
2421        let syntax = if self.parse_keyword(Keyword::FROM) {
2422            ExtractSyntax::From
2423        } else if self.consume_token(&Token::Comma)
2424            && dialect_of!(self is SnowflakeDialect | GenericDialect)
2425        {
2426            ExtractSyntax::Comma
2427        } else {
2428            return Err(ParserError::ParserError(
2429                "Expected 'FROM' or ','".to_string(),
2430            ));
2431        };
2432
2433        let expr = self.parse_expr()?;
2434        self.expect_token(&Token::RParen)?;
2435        Ok(Expr::Extract {
2436            field,
2437            expr: Box::new(expr),
2438            syntax,
2439        })
2440    }
2441
2442    pub fn parse_ceil_floor_expr(&mut self, is_ceil: bool) -> Result<Expr, ParserError> {
2443        self.expect_token(&Token::LParen)?;
2444        let expr = self.parse_expr()?;
2445        // Parse `CEIL/FLOOR(expr)`
2446        let field = if self.parse_keyword(Keyword::TO) {
2447            // Parse `CEIL/FLOOR(expr TO DateTimeField)`
2448            CeilFloorKind::DateTimeField(self.parse_date_time_field()?)
2449        } else if self.consume_token(&Token::Comma) {
2450            // Parse `CEIL/FLOOR(expr, scale)`
2451            match self.parse_value()?.value {
2452                Value::Number(n, s) => CeilFloorKind::Scale(Value::Number(n, s)),
2453                _ => {
2454                    return Err(ParserError::ParserError(
2455                        "Scale field can only be of number type".to_string(),
2456                    ))
2457                }
2458            }
2459        } else {
2460            CeilFloorKind::DateTimeField(DateTimeField::NoDateTime)
2461        };
2462        self.expect_token(&Token::RParen)?;
2463        if is_ceil {
2464            Ok(Expr::Ceil {
2465                expr: Box::new(expr),
2466                field,
2467            })
2468        } else {
2469            Ok(Expr::Floor {
2470                expr: Box::new(expr),
2471                field,
2472            })
2473        }
2474    }
2475
2476    pub fn parse_position_expr(&mut self, ident: Ident) -> Result<Expr, ParserError> {
2477        let between_prec = self.dialect.prec_value(Precedence::Between);
2478        let position_expr = self.maybe_parse(|p| {
2479            // PARSE SELECT POSITION('@' in field)
2480            p.expect_token(&Token::LParen)?;
2481
2482            // Parse the subexpr till the IN keyword
2483            let expr = p.parse_subexpr(between_prec)?;
2484            p.expect_keyword_is(Keyword::IN)?;
2485            let from = p.parse_expr()?;
2486            p.expect_token(&Token::RParen)?;
2487            Ok(Expr::Position {
2488                expr: Box::new(expr),
2489                r#in: Box::new(from),
2490            })
2491        })?;
2492        match position_expr {
2493            Some(expr) => Ok(expr),
2494            // Snowflake supports `position` as an ordinary function call
2495            // without the special `IN` syntax.
2496            None => self.parse_function(ObjectName::from(vec![ident])),
2497        }
2498    }
2499
2500    // { SUBSTRING | SUBSTR } (<EXPR> [FROM 1] [FOR 3])
2501    pub fn parse_substring(&mut self) -> Result<Expr, ParserError> {
2502        let shorthand = match self.expect_one_of_keywords(&[Keyword::SUBSTR, Keyword::SUBSTRING])? {
2503            Keyword::SUBSTR => true,
2504            Keyword::SUBSTRING => false,
2505            _ => {
2506                self.prev_token();
2507                return self.expected("SUBSTR or SUBSTRING", self.peek_token());
2508            }
2509        };
2510        self.expect_token(&Token::LParen)?;
2511        let expr = self.parse_expr()?;
2512        let mut from_expr = None;
2513        let special = self.consume_token(&Token::Comma);
2514        if special || self.parse_keyword(Keyword::FROM) {
2515            from_expr = Some(self.parse_expr()?);
2516        }
2517
2518        let mut to_expr = None;
2519        if self.parse_keyword(Keyword::FOR) || self.consume_token(&Token::Comma) {
2520            to_expr = Some(self.parse_expr()?);
2521        }
2522        self.expect_token(&Token::RParen)?;
2523
2524        Ok(Expr::Substring {
2525            expr: Box::new(expr),
2526            substring_from: from_expr.map(Box::new),
2527            substring_for: to_expr.map(Box::new),
2528            special,
2529            shorthand,
2530        })
2531    }
2532
2533    pub fn parse_overlay_expr(&mut self) -> Result<Expr, ParserError> {
2534        // PARSE OVERLAY (EXPR PLACING EXPR FROM 1 [FOR 3])
2535        self.expect_token(&Token::LParen)?;
2536        let expr = self.parse_expr()?;
2537        self.expect_keyword_is(Keyword::PLACING)?;
2538        let what_expr = self.parse_expr()?;
2539        self.expect_keyword_is(Keyword::FROM)?;
2540        let from_expr = self.parse_expr()?;
2541        let mut for_expr = None;
2542        if self.parse_keyword(Keyword::FOR) {
2543            for_expr = Some(self.parse_expr()?);
2544        }
2545        self.expect_token(&Token::RParen)?;
2546
2547        Ok(Expr::Overlay {
2548            expr: Box::new(expr),
2549            overlay_what: Box::new(what_expr),
2550            overlay_from: Box::new(from_expr),
2551            overlay_for: for_expr.map(Box::new),
2552        })
2553    }
2554
2555    /// ```sql
2556    /// TRIM ([WHERE] ['text' FROM] 'text')
2557    /// TRIM ('text')
2558    /// TRIM(<expr>, [, characters]) -- only Snowflake or BigQuery
2559    /// ```
2560    pub fn parse_trim_expr(&mut self) -> Result<Expr, ParserError> {
2561        self.expect_token(&Token::LParen)?;
2562        let mut trim_where = None;
2563        if let Token::Word(word) = self.peek_token().token {
2564            if [Keyword::BOTH, Keyword::LEADING, Keyword::TRAILING].contains(&word.keyword) {
2565                trim_where = Some(self.parse_trim_where()?);
2566            }
2567        }
2568        let expr = self.parse_expr()?;
2569        if self.parse_keyword(Keyword::FROM) {
2570            let trim_what = Box::new(expr);
2571            let expr = self.parse_expr()?;
2572            self.expect_token(&Token::RParen)?;
2573            Ok(Expr::Trim {
2574                expr: Box::new(expr),
2575                trim_where,
2576                trim_what: Some(trim_what),
2577                trim_characters: None,
2578            })
2579        } else if self.consume_token(&Token::Comma)
2580            && dialect_of!(self is SnowflakeDialect | BigQueryDialect | GenericDialect)
2581        {
2582            let characters = self.parse_comma_separated(Parser::parse_expr)?;
2583            self.expect_token(&Token::RParen)?;
2584            Ok(Expr::Trim {
2585                expr: Box::new(expr),
2586                trim_where: None,
2587                trim_what: None,
2588                trim_characters: Some(characters),
2589            })
2590        } else {
2591            self.expect_token(&Token::RParen)?;
2592            Ok(Expr::Trim {
2593                expr: Box::new(expr),
2594                trim_where,
2595                trim_what: None,
2596                trim_characters: None,
2597            })
2598        }
2599    }
2600
2601    pub fn parse_trim_where(&mut self) -> Result<TrimWhereField, ParserError> {
2602        let next_token = self.next_token();
2603        match &next_token.token {
2604            Token::Word(w) => match w.keyword {
2605                Keyword::BOTH => Ok(TrimWhereField::Both),
2606                Keyword::LEADING => Ok(TrimWhereField::Leading),
2607                Keyword::TRAILING => Ok(TrimWhereField::Trailing),
2608                _ => self.expected("trim_where field", next_token)?,
2609            },
2610            _ => self.expected("trim_where field", next_token),
2611        }
2612    }
2613
2614    /// Parses an array expression `[ex1, ex2, ..]`
2615    /// if `named` is `true`, came from an expression like  `ARRAY[ex1, ex2]`
2616    pub fn parse_array_expr(&mut self, named: bool) -> Result<Expr, ParserError> {
2617        let exprs = self.parse_comma_separated0(Parser::parse_expr, Token::RBracket)?;
2618        self.expect_token(&Token::RBracket)?;
2619        Ok(Expr::Array(Array { elem: exprs, named }))
2620    }
2621
2622    pub fn parse_listagg_on_overflow(&mut self) -> Result<Option<ListAggOnOverflow>, ParserError> {
2623        if self.parse_keywords(&[Keyword::ON, Keyword::OVERFLOW]) {
2624            if self.parse_keyword(Keyword::ERROR) {
2625                Ok(Some(ListAggOnOverflow::Error))
2626            } else {
2627                self.expect_keyword_is(Keyword::TRUNCATE)?;
2628                let filler = match self.peek_token().token {
2629                    Token::Word(w)
2630                        if w.keyword == Keyword::WITH || w.keyword == Keyword::WITHOUT =>
2631                    {
2632                        None
2633                    }
2634                    Token::SingleQuotedString(_)
2635                    | Token::EscapedStringLiteral(_)
2636                    | Token::UnicodeStringLiteral(_)
2637                    | Token::NationalStringLiteral(_)
2638                    | Token::HexStringLiteral(_) => Some(Box::new(self.parse_expr()?)),
2639                    _ => self.expected(
2640                        "either filler, WITH, or WITHOUT in LISTAGG",
2641                        self.peek_token(),
2642                    )?,
2643                };
2644                let with_count = self.parse_keyword(Keyword::WITH);
2645                if !with_count && !self.parse_keyword(Keyword::WITHOUT) {
2646                    self.expected("either WITH or WITHOUT in LISTAGG", self.peek_token())?;
2647                }
2648                self.expect_keyword_is(Keyword::COUNT)?;
2649                Ok(Some(ListAggOnOverflow::Truncate { filler, with_count }))
2650            }
2651        } else {
2652            Ok(None)
2653        }
2654    }
2655
2656    // This function parses date/time fields for the EXTRACT function-like
2657    // operator, interval qualifiers, and the ceil/floor operations.
2658    // EXTRACT supports a wider set of date/time fields than interval qualifiers,
2659    // so this function may need to be split in two.
2660    pub fn parse_date_time_field(&mut self) -> Result<DateTimeField, ParserError> {
2661        let next_token = self.next_token();
2662        match &next_token.token {
2663            Token::Word(w) => match w.keyword {
2664                Keyword::YEAR => Ok(DateTimeField::Year),
2665                Keyword::YEARS => Ok(DateTimeField::Years),
2666                Keyword::MONTH => Ok(DateTimeField::Month),
2667                Keyword::MONTHS => Ok(DateTimeField::Months),
2668                Keyword::WEEK => {
2669                    let week_day = if dialect_of!(self is BigQueryDialect | GenericDialect)
2670                        && self.consume_token(&Token::LParen)
2671                    {
2672                        let week_day = self.parse_identifier()?;
2673                        self.expect_token(&Token::RParen)?;
2674                        Some(week_day)
2675                    } else {
2676                        None
2677                    };
2678                    Ok(DateTimeField::Week(week_day))
2679                }
2680                Keyword::WEEKS => Ok(DateTimeField::Weeks),
2681                Keyword::DAY => Ok(DateTimeField::Day),
2682                Keyword::DAYOFWEEK => Ok(DateTimeField::DayOfWeek),
2683                Keyword::DAYOFYEAR => Ok(DateTimeField::DayOfYear),
2684                Keyword::DAYS => Ok(DateTimeField::Days),
2685                Keyword::DATE => Ok(DateTimeField::Date),
2686                Keyword::DATETIME => Ok(DateTimeField::Datetime),
2687                Keyword::HOUR => Ok(DateTimeField::Hour),
2688                Keyword::HOURS => Ok(DateTimeField::Hours),
2689                Keyword::MINUTE => Ok(DateTimeField::Minute),
2690                Keyword::MINUTES => Ok(DateTimeField::Minutes),
2691                Keyword::SECOND => Ok(DateTimeField::Second),
2692                Keyword::SECONDS => Ok(DateTimeField::Seconds),
2693                Keyword::CENTURY => Ok(DateTimeField::Century),
2694                Keyword::DECADE => Ok(DateTimeField::Decade),
2695                Keyword::DOY => Ok(DateTimeField::Doy),
2696                Keyword::DOW => Ok(DateTimeField::Dow),
2697                Keyword::EPOCH => Ok(DateTimeField::Epoch),
2698                Keyword::ISODOW => Ok(DateTimeField::Isodow),
2699                Keyword::ISOYEAR => Ok(DateTimeField::Isoyear),
2700                Keyword::ISOWEEK => Ok(DateTimeField::IsoWeek),
2701                Keyword::JULIAN => Ok(DateTimeField::Julian),
2702                Keyword::MICROSECOND => Ok(DateTimeField::Microsecond),
2703                Keyword::MICROSECONDS => Ok(DateTimeField::Microseconds),
2704                Keyword::MILLENIUM => Ok(DateTimeField::Millenium),
2705                Keyword::MILLENNIUM => Ok(DateTimeField::Millennium),
2706                Keyword::MILLISECOND => Ok(DateTimeField::Millisecond),
2707                Keyword::MILLISECONDS => Ok(DateTimeField::Milliseconds),
2708                Keyword::NANOSECOND => Ok(DateTimeField::Nanosecond),
2709                Keyword::NANOSECONDS => Ok(DateTimeField::Nanoseconds),
2710                Keyword::QUARTER => Ok(DateTimeField::Quarter),
2711                Keyword::TIME => Ok(DateTimeField::Time),
2712                Keyword::TIMEZONE => Ok(DateTimeField::Timezone),
2713                Keyword::TIMEZONE_ABBR => Ok(DateTimeField::TimezoneAbbr),
2714                Keyword::TIMEZONE_HOUR => Ok(DateTimeField::TimezoneHour),
2715                Keyword::TIMEZONE_MINUTE => Ok(DateTimeField::TimezoneMinute),
2716                Keyword::TIMEZONE_REGION => Ok(DateTimeField::TimezoneRegion),
2717                _ if self.dialect.allow_extract_custom() => {
2718                    self.prev_token();
2719                    let custom = self.parse_identifier()?;
2720                    Ok(DateTimeField::Custom(custom))
2721                }
2722                _ => self.expected("date/time field", next_token),
2723            },
2724            Token::SingleQuotedString(_) if self.dialect.allow_extract_single_quotes() => {
2725                self.prev_token();
2726                let custom = self.parse_identifier()?;
2727                Ok(DateTimeField::Custom(custom))
2728            }
2729            _ => self.expected("date/time field", next_token),
2730        }
2731    }
2732
2733    pub fn parse_not(&mut self) -> Result<Expr, ParserError> {
2734        match self.peek_token().token {
2735            Token::Word(w) => match w.keyword {
2736                Keyword::EXISTS => {
2737                    let negated = true;
2738                    let _ = self.parse_keyword(Keyword::EXISTS);
2739                    self.parse_exists_expr(negated)
2740                }
2741                _ => Ok(Expr::UnaryOp {
2742                    op: UnaryOperator::Not,
2743                    expr: Box::new(
2744                        self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?,
2745                    ),
2746                }),
2747            },
2748            _ => Ok(Expr::UnaryOp {
2749                op: UnaryOperator::Not,
2750                expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
2751            }),
2752        }
2753    }
2754
2755    /// Parse expression types that start with a left brace '{'.
2756    /// Examples:
2757    /// ```sql
2758    /// -- Dictionary expr.
2759    /// {'key1': 'value1', 'key2': 'value2'}
2760    ///
2761    /// -- Function call using the ODBC syntax.
2762    /// { fn CONCAT('foo', 'bar') }
2763    /// ```
2764    fn parse_lbrace_expr(&mut self) -> Result<Expr, ParserError> {
2765        let token = self.expect_token(&Token::LBrace)?;
2766
2767        if let Some(fn_expr) = self.maybe_parse_odbc_fn_body()? {
2768            self.expect_token(&Token::RBrace)?;
2769            return Ok(fn_expr);
2770        }
2771
2772        if self.dialect.supports_dictionary_syntax() {
2773            self.prev_token(); // Put back the '{'
2774            return self.parse_duckdb_struct_literal();
2775        }
2776
2777        self.expected("an expression", token)
2778    }
2779
2780    /// Parses fulltext expressions [`sqlparser::ast::Expr::MatchAgainst`]
2781    ///
2782    /// # Errors
2783    /// This method will raise an error if the column list is empty or with invalid identifiers,
2784    /// the match expression is not a literal string, or if the search modifier is not valid.
2785    pub fn parse_match_against(&mut self) -> Result<Expr, ParserError> {
2786        let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
2787
2788        self.expect_keyword_is(Keyword::AGAINST)?;
2789
2790        self.expect_token(&Token::LParen)?;
2791
2792        // MySQL is too permissive about the value, IMO we can't validate it perfectly on syntax level.
2793        let match_value = self.parse_value()?.value;
2794
2795        let in_natural_language_mode_keywords = &[
2796            Keyword::IN,
2797            Keyword::NATURAL,
2798            Keyword::LANGUAGE,
2799            Keyword::MODE,
2800        ];
2801
2802        let with_query_expansion_keywords = &[Keyword::WITH, Keyword::QUERY, Keyword::EXPANSION];
2803
2804        let in_boolean_mode_keywords = &[Keyword::IN, Keyword::BOOLEAN, Keyword::MODE];
2805
2806        let opt_search_modifier = if self.parse_keywords(in_natural_language_mode_keywords) {
2807            if self.parse_keywords(with_query_expansion_keywords) {
2808                Some(SearchModifier::InNaturalLanguageModeWithQueryExpansion)
2809            } else {
2810                Some(SearchModifier::InNaturalLanguageMode)
2811            }
2812        } else if self.parse_keywords(in_boolean_mode_keywords) {
2813            Some(SearchModifier::InBooleanMode)
2814        } else if self.parse_keywords(with_query_expansion_keywords) {
2815            Some(SearchModifier::WithQueryExpansion)
2816        } else {
2817            None
2818        };
2819
2820        self.expect_token(&Token::RParen)?;
2821
2822        Ok(Expr::MatchAgainst {
2823            columns,
2824            match_value,
2825            opt_search_modifier,
2826        })
2827    }
2828
2829    /// Parse an `INTERVAL` expression.
2830    ///
2831    /// Some syntactically valid intervals:
2832    ///
2833    /// ```sql
2834    ///   1. INTERVAL '1' DAY
2835    ///   2. INTERVAL '1-1' YEAR TO MONTH
2836    ///   3. INTERVAL '1' SECOND
2837    ///   4. INTERVAL '1:1:1.1' HOUR (5) TO SECOND (5)
2838    ///   5. INTERVAL '1.1' SECOND (2, 2)
2839    ///   6. INTERVAL '1:1' HOUR (5) TO MINUTE (5)
2840    ///   7. (MySql & BigQuery only): INTERVAL 1 DAY
2841    /// ```
2842    ///
2843    /// Note that we do not currently attempt to parse the quoted value.
2844    pub fn parse_interval(&mut self) -> Result<Expr, ParserError> {
2845        // The SQL standard allows an optional sign before the value string, but
2846        // it is not clear if any implementations support that syntax, so we
2847        // don't currently try to parse it. (The sign can instead be included
2848        // inside the value string.)
2849
2850        // to match the different flavours of INTERVAL syntax, we only allow expressions
2851        // if the dialect requires an interval qualifier,
2852        // see https://github.com/sqlparser-rs/sqlparser-rs/pull/1398 for more details
2853        let value = if self.dialect.require_interval_qualifier() {
2854            // parse a whole expression so `INTERVAL 1 + 1 DAY` is valid
2855            self.parse_expr()?
2856        } else {
2857            // parse a prefix expression so `INTERVAL 1 DAY` is valid, but `INTERVAL 1 + 1 DAY` is not
2858            // this also means that `INTERVAL '5 days' > INTERVAL '1 day'` treated properly
2859            self.parse_prefix()?
2860        };
2861
2862        // Following the string literal is a qualifier which indicates the units
2863        // of the duration specified in the string literal.
2864        //
2865        // Note that PostgreSQL allows omitting the qualifier, so we provide
2866        // this more general implementation.
2867        let leading_field = if self.next_token_is_temporal_unit() {
2868            Some(self.parse_date_time_field()?)
2869        } else if self.dialect.require_interval_qualifier() {
2870            return parser_err!(
2871                "INTERVAL requires a unit after the literal value",
2872                self.peek_token().span.start
2873            );
2874        } else {
2875            None
2876        };
2877
2878        let (leading_precision, last_field, fsec_precision) =
2879            if leading_field == Some(DateTimeField::Second) {
2880                // SQL mandates special syntax for `SECOND TO SECOND` literals.
2881                // Instead of
2882                //     `SECOND [(<leading precision>)] TO SECOND[(<fractional seconds precision>)]`
2883                // one must use the special format:
2884                //     `SECOND [( <leading precision> [ , <fractional seconds precision>] )]`
2885                let last_field = None;
2886                let (leading_precision, fsec_precision) = self.parse_optional_precision_scale()?;
2887                (leading_precision, last_field, fsec_precision)
2888            } else {
2889                let leading_precision = self.parse_optional_precision()?;
2890                if self.parse_keyword(Keyword::TO) {
2891                    let last_field = Some(self.parse_date_time_field()?);
2892                    let fsec_precision = if last_field == Some(DateTimeField::Second) {
2893                        self.parse_optional_precision()?
2894                    } else {
2895                        None
2896                    };
2897                    (leading_precision, last_field, fsec_precision)
2898                } else {
2899                    (leading_precision, None, None)
2900                }
2901            };
2902
2903        Ok(Expr::Interval(Interval {
2904            value: Box::new(value),
2905            leading_field,
2906            leading_precision,
2907            last_field,
2908            fractional_seconds_precision: fsec_precision,
2909        }))
2910    }
2911
2912    /// Peek at the next token and determine if it is a temporal unit
2913    /// like `second`.
2914    pub fn next_token_is_temporal_unit(&mut self) -> bool {
2915        if let Token::Word(word) = self.peek_token().token {
2916            matches!(
2917                word.keyword,
2918                Keyword::YEAR
2919                    | Keyword::YEARS
2920                    | Keyword::MONTH
2921                    | Keyword::MONTHS
2922                    | Keyword::WEEK
2923                    | Keyword::WEEKS
2924                    | Keyword::DAY
2925                    | Keyword::DAYS
2926                    | Keyword::HOUR
2927                    | Keyword::HOURS
2928                    | Keyword::MINUTE
2929                    | Keyword::MINUTES
2930                    | Keyword::SECOND
2931                    | Keyword::SECONDS
2932                    | Keyword::CENTURY
2933                    | Keyword::DECADE
2934                    | Keyword::DOW
2935                    | Keyword::DOY
2936                    | Keyword::EPOCH
2937                    | Keyword::ISODOW
2938                    | Keyword::ISOYEAR
2939                    | Keyword::JULIAN
2940                    | Keyword::MICROSECOND
2941                    | Keyword::MICROSECONDS
2942                    | Keyword::MILLENIUM
2943                    | Keyword::MILLENNIUM
2944                    | Keyword::MILLISECOND
2945                    | Keyword::MILLISECONDS
2946                    | Keyword::NANOSECOND
2947                    | Keyword::NANOSECONDS
2948                    | Keyword::QUARTER
2949                    | Keyword::TIMEZONE
2950                    | Keyword::TIMEZONE_HOUR
2951                    | Keyword::TIMEZONE_MINUTE
2952            )
2953        } else {
2954            false
2955        }
2956    }
2957
2958    /// Syntax
2959    /// ```sql
2960    /// -- typed
2961    /// STRUCT<[field_name] field_type, ...>( expr1 [, ... ])
2962    /// -- typeless
2963    /// STRUCT( expr1 [AS field_name] [, ... ])
2964    /// ```
2965    fn parse_struct_literal(&mut self) -> Result<Expr, ParserError> {
2966        // Parse the fields definition if exist `<[field_name] field_type, ...>`
2967        self.prev_token();
2968        let (fields, trailing_bracket) =
2969            self.parse_struct_type_def(Self::parse_struct_field_def)?;
2970        if trailing_bracket.0 {
2971            return parser_err!(
2972                "unmatched > in STRUCT literal",
2973                self.peek_token().span.start
2974            );
2975        }
2976
2977        // Parse the struct values `(expr1 [, ... ])`
2978        self.expect_token(&Token::LParen)?;
2979        let values = self
2980            .parse_comma_separated(|parser| parser.parse_struct_field_expr(!fields.is_empty()))?;
2981        self.expect_token(&Token::RParen)?;
2982
2983        Ok(Expr::Struct { values, fields })
2984    }
2985
2986    /// Parse an expression value for a struct literal
2987    /// Syntax
2988    /// ```sql
2989    /// expr [AS name]
2990    /// ```
2991    ///
2992    /// For biquery [1], Parameter typed_syntax is set to true if the expression
2993    /// is to be parsed as a field expression declared using typed
2994    /// struct syntax [2], and false if using typeless struct syntax [3].
2995    ///
2996    /// [1]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#constructing_a_struct
2997    /// [2]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#typed_struct_syntax
2998    /// [3]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#typeless_struct_syntax
2999    fn parse_struct_field_expr(&mut self, typed_syntax: bool) -> Result<Expr, ParserError> {
3000        let expr = self.parse_expr()?;
3001        if self.parse_keyword(Keyword::AS) {
3002            if typed_syntax {
3003                return parser_err!("Typed syntax does not allow AS", {
3004                    self.prev_token();
3005                    self.peek_token().span.start
3006                });
3007            }
3008            let field_name = self.parse_identifier()?;
3009            Ok(Expr::Named {
3010                expr: expr.into(),
3011                name: field_name,
3012            })
3013        } else {
3014            Ok(expr)
3015        }
3016    }
3017
3018    /// Parse a Struct type definition as a sequence of field-value pairs.
3019    /// The syntax of the Struct elem differs by dialect so it is customised
3020    /// by the `elem_parser` argument.
3021    ///
3022    /// Syntax
3023    /// ```sql
3024    /// Hive:
3025    /// STRUCT<field_name: field_type>
3026    ///
3027    /// BigQuery:
3028    /// STRUCT<[field_name] field_type>
3029    /// ```
3030    fn parse_struct_type_def<F>(
3031        &mut self,
3032        mut elem_parser: F,
3033    ) -> Result<(Vec<StructField>, MatchedTrailingBracket), ParserError>
3034    where
3035        F: FnMut(&mut Parser<'a>) -> Result<(StructField, MatchedTrailingBracket), ParserError>,
3036    {
3037        let start_token = self.peek_token();
3038        self.expect_keyword_is(Keyword::STRUCT)?;
3039
3040        // Nothing to do if we have no type information.
3041        if Token::Lt != self.peek_token() {
3042            return Ok((Default::default(), false.into()));
3043        }
3044        self.next_token();
3045
3046        let mut field_defs = vec![];
3047        let trailing_bracket = loop {
3048            let (def, trailing_bracket) = elem_parser(self)?;
3049            field_defs.push(def);
3050            if !self.consume_token(&Token::Comma) {
3051                break trailing_bracket;
3052            }
3053
3054            // Angle brackets are balanced so we only expect the trailing `>>` after
3055            // we've matched all field types for the current struct.
3056            // e.g. this is invalid syntax `STRUCT<STRUCT<INT>>>, INT>(NULL)`
3057            if trailing_bracket.0 {
3058                return parser_err!("unmatched > in STRUCT definition", start_token.span.start);
3059            }
3060        };
3061
3062        Ok((
3063            field_defs,
3064            self.expect_closing_angle_bracket(trailing_bracket)?,
3065        ))
3066    }
3067
3068    /// Duckdb Struct Data Type <https://duckdb.org/docs/sql/data_types/struct.html#retrieving-from-structs>
3069    fn parse_duckdb_struct_type_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3070        self.expect_keyword_is(Keyword::STRUCT)?;
3071        self.expect_token(&Token::LParen)?;
3072        let struct_body = self.parse_comma_separated(|parser| {
3073            let field_name = parser.parse_identifier()?;
3074            let field_type = parser.parse_data_type()?;
3075
3076            Ok(StructField {
3077                field_name: Some(field_name),
3078                field_type,
3079            })
3080        });
3081        self.expect_token(&Token::RParen)?;
3082        struct_body
3083    }
3084
3085    /// Parse a field definition in a [struct] or [tuple].
3086    /// Syntax:
3087    ///
3088    /// ```sql
3089    /// [field_name] field_type
3090    /// ```
3091    ///
3092    /// [struct]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#declaring_a_struct_type
3093    /// [tuple]: https://clickhouse.com/docs/en/sql-reference/data-types/tuple
3094    fn parse_struct_field_def(
3095        &mut self,
3096    ) -> Result<(StructField, MatchedTrailingBracket), ParserError> {
3097        // Look beyond the next item to infer whether both field name
3098        // and type are specified.
3099        let is_anonymous_field = !matches!(
3100            (self.peek_nth_token(0).token, self.peek_nth_token(1).token),
3101            (Token::Word(_), Token::Word(_))
3102        );
3103
3104        let field_name = if is_anonymous_field {
3105            None
3106        } else {
3107            Some(self.parse_identifier()?)
3108        };
3109
3110        let (field_type, trailing_bracket) = self.parse_data_type_helper()?;
3111
3112        Ok((
3113            StructField {
3114                field_name,
3115                field_type,
3116            },
3117            trailing_bracket,
3118        ))
3119    }
3120
3121    /// DuckDB specific: Parse a Union type definition as a sequence of field-value pairs.
3122    ///
3123    /// Syntax:
3124    ///
3125    /// ```sql
3126    /// UNION(field_name field_type[,...])
3127    /// ```
3128    ///
3129    /// [1]: https://duckdb.org/docs/sql/data_types/union.html
3130    fn parse_union_type_def(&mut self) -> Result<Vec<UnionField>, ParserError> {
3131        self.expect_keyword_is(Keyword::UNION)?;
3132
3133        self.expect_token(&Token::LParen)?;
3134
3135        let fields = self.parse_comma_separated(|p| {
3136            Ok(UnionField {
3137                field_name: p.parse_identifier()?,
3138                field_type: p.parse_data_type()?,
3139            })
3140        })?;
3141
3142        self.expect_token(&Token::RParen)?;
3143
3144        Ok(fields)
3145    }
3146
3147    /// DuckDB specific: Parse a duckdb [dictionary]
3148    ///
3149    /// Syntax:
3150    ///
3151    /// ```sql
3152    /// {'field_name': expr1[, ... ]}
3153    /// ```
3154    ///
3155    /// [dictionary]: https://duckdb.org/docs/sql/data_types/struct#creating-structs
3156    fn parse_duckdb_struct_literal(&mut self) -> Result<Expr, ParserError> {
3157        self.expect_token(&Token::LBrace)?;
3158
3159        let fields =
3160            self.parse_comma_separated0(Self::parse_duckdb_dictionary_field, Token::RBrace)?;
3161
3162        self.expect_token(&Token::RBrace)?;
3163
3164        Ok(Expr::Dictionary(fields))
3165    }
3166
3167    /// Parse a field for a duckdb [dictionary]
3168    ///
3169    /// Syntax
3170    ///
3171    /// ```sql
3172    /// 'name': expr
3173    /// ```
3174    ///
3175    /// [dictionary]: https://duckdb.org/docs/sql/data_types/struct#creating-structs
3176    fn parse_duckdb_dictionary_field(&mut self) -> Result<DictionaryField, ParserError> {
3177        let key = self.parse_identifier()?;
3178
3179        self.expect_token(&Token::Colon)?;
3180
3181        let expr = self.parse_expr()?;
3182
3183        Ok(DictionaryField {
3184            key,
3185            value: Box::new(expr),
3186        })
3187    }
3188
3189    /// DuckDB specific: Parse a duckdb [map]
3190    ///
3191    /// Syntax:
3192    ///
3193    /// ```sql
3194    /// Map {key1: value1[, ... ]}
3195    /// ```
3196    ///
3197    /// [map]: https://duckdb.org/docs/sql/data_types/map.html#creating-maps
3198    fn parse_duckdb_map_literal(&mut self) -> Result<Expr, ParserError> {
3199        self.expect_token(&Token::LBrace)?;
3200        let fields = self.parse_comma_separated0(Self::parse_duckdb_map_field, Token::RBrace)?;
3201        self.expect_token(&Token::RBrace)?;
3202        Ok(Expr::Map(Map { entries: fields }))
3203    }
3204
3205    /// Parse a field for a duckdb [map]
3206    ///
3207    /// Syntax
3208    ///
3209    /// ```sql
3210    /// key: value
3211    /// ```
3212    ///
3213    /// [map]: https://duckdb.org/docs/sql/data_types/map.html#creating-maps
3214    fn parse_duckdb_map_field(&mut self) -> Result<MapEntry, ParserError> {
3215        let key = self.parse_expr()?;
3216
3217        self.expect_token(&Token::Colon)?;
3218
3219        let value = self.parse_expr()?;
3220
3221        Ok(MapEntry {
3222            key: Box::new(key),
3223            value: Box::new(value),
3224        })
3225    }
3226
3227    /// Parse clickhouse [map]
3228    ///
3229    /// Syntax
3230    ///
3231    /// ```sql
3232    /// Map(key_data_type, value_data_type)
3233    /// ```
3234    ///
3235    /// [map]: https://clickhouse.com/docs/en/sql-reference/data-types/map
3236    fn parse_click_house_map_def(&mut self) -> Result<(DataType, DataType), ParserError> {
3237        self.expect_keyword_is(Keyword::MAP)?;
3238        self.expect_token(&Token::LParen)?;
3239        let key_data_type = self.parse_data_type()?;
3240        self.expect_token(&Token::Comma)?;
3241        let value_data_type = self.parse_data_type()?;
3242        self.expect_token(&Token::RParen)?;
3243
3244        Ok((key_data_type, value_data_type))
3245    }
3246
3247    /// Parse clickhouse [tuple]
3248    ///
3249    /// Syntax
3250    ///
3251    /// ```sql
3252    /// Tuple([field_name] field_type, ...)
3253    /// ```
3254    ///
3255    /// [tuple]: https://clickhouse.com/docs/en/sql-reference/data-types/tuple
3256    fn parse_click_house_tuple_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3257        self.expect_keyword_is(Keyword::TUPLE)?;
3258        self.expect_token(&Token::LParen)?;
3259        let mut field_defs = vec![];
3260        loop {
3261            let (def, _) = self.parse_struct_field_def()?;
3262            field_defs.push(def);
3263            if !self.consume_token(&Token::Comma) {
3264                break;
3265            }
3266        }
3267        self.expect_token(&Token::RParen)?;
3268
3269        Ok(field_defs)
3270    }
3271
3272    /// For nested types that use the angle bracket syntax, this matches either
3273    /// `>`, `>>` or nothing depending on which variant is expected (specified by the previously
3274    /// matched `trailing_bracket` argument). It returns whether there is a trailing
3275    /// left to be matched - (i.e. if '>>' was matched).
3276    fn expect_closing_angle_bracket(
3277        &mut self,
3278        trailing_bracket: MatchedTrailingBracket,
3279    ) -> Result<MatchedTrailingBracket, ParserError> {
3280        let trailing_bracket = if !trailing_bracket.0 {
3281            match self.peek_token().token {
3282                Token::Gt => {
3283                    self.next_token();
3284                    false.into()
3285                }
3286                Token::ShiftRight => {
3287                    self.next_token();
3288                    true.into()
3289                }
3290                _ => return self.expected(">", self.peek_token()),
3291            }
3292        } else {
3293            false.into()
3294        };
3295
3296        Ok(trailing_bracket)
3297    }
3298
3299    /// Parse an operator following an expression
3300    pub fn parse_infix(&mut self, expr: Expr, precedence: u8) -> Result<Expr, ParserError> {
3301        // allow the dialect to override infix parsing
3302        if let Some(infix) = self.dialect.parse_infix(self, &expr, precedence) {
3303            return infix;
3304        }
3305
3306        let dialect = self.dialect;
3307
3308        self.advance_token();
3309        let tok = self.get_current_token();
3310        let tok_index = self.get_current_index();
3311        let span = tok.span;
3312        let regular_binary_operator = match &tok.token {
3313            Token::Spaceship => Some(BinaryOperator::Spaceship),
3314            Token::DoubleEq => Some(BinaryOperator::Eq),
3315            Token::Assignment => Some(BinaryOperator::Assignment),
3316            Token::Eq => Some(BinaryOperator::Eq),
3317            Token::Neq => Some(BinaryOperator::NotEq),
3318            Token::Gt => Some(BinaryOperator::Gt),
3319            Token::GtEq => Some(BinaryOperator::GtEq),
3320            Token::Lt => Some(BinaryOperator::Lt),
3321            Token::LtEq => Some(BinaryOperator::LtEq),
3322            Token::Plus => Some(BinaryOperator::Plus),
3323            Token::Minus => Some(BinaryOperator::Minus),
3324            Token::Mul => Some(BinaryOperator::Multiply),
3325            Token::Mod => Some(BinaryOperator::Modulo),
3326            Token::StringConcat => Some(BinaryOperator::StringConcat),
3327            Token::Pipe => Some(BinaryOperator::BitwiseOr),
3328            Token::Caret => {
3329                // In PostgreSQL, ^ stands for the exponentiation operation,
3330                // and # stands for XOR. See https://www.postgresql.org/docs/current/functions-math.html
3331                if dialect_is!(dialect is PostgreSqlDialect) {
3332                    Some(BinaryOperator::PGExp)
3333                } else {
3334                    Some(BinaryOperator::BitwiseXor)
3335                }
3336            }
3337            Token::Ampersand => Some(BinaryOperator::BitwiseAnd),
3338            Token::Div => Some(BinaryOperator::Divide),
3339            Token::DuckIntDiv if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
3340                Some(BinaryOperator::DuckIntegerDivide)
3341            }
3342            Token::ShiftLeft if dialect_is!(dialect is PostgreSqlDialect | DuckDbDialect | GenericDialect | RedshiftSqlDialect) => {
3343                Some(BinaryOperator::PGBitwiseShiftLeft)
3344            }
3345            Token::ShiftRight if dialect_is!(dialect is PostgreSqlDialect | DuckDbDialect | GenericDialect | RedshiftSqlDialect) => {
3346                Some(BinaryOperator::PGBitwiseShiftRight)
3347            }
3348            Token::Sharp if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3349                Some(BinaryOperator::PGBitwiseXor)
3350            }
3351            Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3352                Some(BinaryOperator::PGOverlap)
3353            }
3354            Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3355                Some(BinaryOperator::PGOverlap)
3356            }
3357            Token::CaretAt if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3358                Some(BinaryOperator::PGStartsWith)
3359            }
3360            Token::Tilde => Some(BinaryOperator::PGRegexMatch),
3361            Token::TildeAsterisk => Some(BinaryOperator::PGRegexIMatch),
3362            Token::ExclamationMarkTilde => Some(BinaryOperator::PGRegexNotMatch),
3363            Token::ExclamationMarkTildeAsterisk => Some(BinaryOperator::PGRegexNotIMatch),
3364            Token::DoubleTilde => Some(BinaryOperator::PGLikeMatch),
3365            Token::DoubleTildeAsterisk => Some(BinaryOperator::PGILikeMatch),
3366            Token::ExclamationMarkDoubleTilde => Some(BinaryOperator::PGNotLikeMatch),
3367            Token::ExclamationMarkDoubleTildeAsterisk => Some(BinaryOperator::PGNotILikeMatch),
3368            Token::Arrow => Some(BinaryOperator::Arrow),
3369            Token::LongArrow => Some(BinaryOperator::LongArrow),
3370            Token::HashArrow => Some(BinaryOperator::HashArrow),
3371            Token::HashLongArrow => Some(BinaryOperator::HashLongArrow),
3372            Token::AtArrow => Some(BinaryOperator::AtArrow),
3373            Token::ArrowAt => Some(BinaryOperator::ArrowAt),
3374            Token::HashMinus => Some(BinaryOperator::HashMinus),
3375            Token::AtQuestion => Some(BinaryOperator::AtQuestion),
3376            Token::AtAt => Some(BinaryOperator::AtAt),
3377            Token::Question => Some(BinaryOperator::Question),
3378            Token::QuestionAnd => Some(BinaryOperator::QuestionAnd),
3379            Token::QuestionPipe => Some(BinaryOperator::QuestionPipe),
3380            Token::CustomBinaryOperator(s) => Some(BinaryOperator::Custom(s.clone())),
3381            Token::DoubleSharp if self.dialect.supports_geometric_types() => {
3382                Some(BinaryOperator::DoubleHash)
3383            }
3384
3385            Token::AmpersandLeftAngleBracket if self.dialect.supports_geometric_types() => {
3386                Some(BinaryOperator::AndLt)
3387            }
3388            Token::AmpersandRightAngleBracket if self.dialect.supports_geometric_types() => {
3389                Some(BinaryOperator::AndGt)
3390            }
3391            Token::QuestionMarkDash if self.dialect.supports_geometric_types() => {
3392                Some(BinaryOperator::QuestionDash)
3393            }
3394            Token::AmpersandLeftAngleBracketVerticalBar
3395                if self.dialect.supports_geometric_types() =>
3396            {
3397                Some(BinaryOperator::AndLtPipe)
3398            }
3399            Token::VerticalBarAmpersandRightAngleBracket
3400                if self.dialect.supports_geometric_types() =>
3401            {
3402                Some(BinaryOperator::PipeAndGt)
3403            }
3404            Token::TwoWayArrow if self.dialect.supports_geometric_types() => {
3405                Some(BinaryOperator::LtDashGt)
3406            }
3407            Token::LeftAngleBracketCaret if self.dialect.supports_geometric_types() => {
3408                Some(BinaryOperator::LtCaret)
3409            }
3410            Token::RightAngleBracketCaret if self.dialect.supports_geometric_types() => {
3411                Some(BinaryOperator::GtCaret)
3412            }
3413            Token::QuestionMarkSharp if self.dialect.supports_geometric_types() => {
3414                Some(BinaryOperator::QuestionHash)
3415            }
3416            Token::QuestionMarkDoubleVerticalBar if self.dialect.supports_geometric_types() => {
3417                Some(BinaryOperator::QuestionDoublePipe)
3418            }
3419            Token::QuestionMarkDashVerticalBar if self.dialect.supports_geometric_types() => {
3420                Some(BinaryOperator::QuestionDashPipe)
3421            }
3422            Token::TildeEqual if self.dialect.supports_geometric_types() => {
3423                Some(BinaryOperator::TildeEq)
3424            }
3425            Token::ShiftLeftVerticalBar if self.dialect.supports_geometric_types() => {
3426                Some(BinaryOperator::LtLtPipe)
3427            }
3428            Token::VerticalBarShiftRight if self.dialect.supports_geometric_types() => {
3429                Some(BinaryOperator::PipeGtGt)
3430            }
3431            Token::AtSign if self.dialect.supports_geometric_types() => Some(BinaryOperator::At),
3432
3433            Token::Word(w) => match w.keyword {
3434                Keyword::AND => Some(BinaryOperator::And),
3435                Keyword::OR => Some(BinaryOperator::Or),
3436                Keyword::XOR => Some(BinaryOperator::Xor),
3437                Keyword::OVERLAPS => Some(BinaryOperator::Overlaps),
3438                Keyword::OPERATOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3439                    self.expect_token(&Token::LParen)?;
3440                    // there are special rules for operator names in
3441                    // postgres so we can not use 'parse_object'
3442                    // or similar.
3443                    // See https://www.postgresql.org/docs/current/sql-createoperator.html
3444                    let mut idents = vec![];
3445                    loop {
3446                        self.advance_token();
3447                        idents.push(self.get_current_token().to_string());
3448                        if !self.consume_token(&Token::Period) {
3449                            break;
3450                        }
3451                    }
3452                    self.expect_token(&Token::RParen)?;
3453                    Some(BinaryOperator::PGCustomBinaryOperator(idents))
3454                }
3455                _ => None,
3456            },
3457            _ => None,
3458        };
3459
3460        let tok = self.token_at(tok_index);
3461        if let Some(op) = regular_binary_operator {
3462            if let Some(keyword) =
3463                self.parse_one_of_keywords(&[Keyword::ANY, Keyword::ALL, Keyword::SOME])
3464            {
3465                self.expect_token(&Token::LParen)?;
3466                let right = if self.peek_sub_query() {
3467                    // We have a subquery ahead (SELECT\WITH ...) need to rewind and
3468                    // use the parenthesis for parsing the subquery as an expression.
3469                    self.prev_token(); // LParen
3470                    self.parse_subexpr(precedence)?
3471                } else {
3472                    // Non-subquery expression
3473                    let right = self.parse_subexpr(precedence)?;
3474                    self.expect_token(&Token::RParen)?;
3475                    right
3476                };
3477
3478                if !matches!(
3479                    op,
3480                    BinaryOperator::Gt
3481                        | BinaryOperator::Lt
3482                        | BinaryOperator::GtEq
3483                        | BinaryOperator::LtEq
3484                        | BinaryOperator::Eq
3485                        | BinaryOperator::NotEq
3486                ) {
3487                    return parser_err!(
3488                        format!(
3489                        "Expected one of [=, >, <, =>, =<, !=] as comparison operator, found: {op}"
3490                    ),
3491                        span.start
3492                    );
3493                };
3494
3495                Ok(match keyword {
3496                    Keyword::ALL => Expr::AllOp {
3497                        left: Box::new(expr),
3498                        compare_op: op,
3499                        right: Box::new(right),
3500                    },
3501                    Keyword::ANY | Keyword::SOME => Expr::AnyOp {
3502                        left: Box::new(expr),
3503                        compare_op: op,
3504                        right: Box::new(right),
3505                        is_some: keyword == Keyword::SOME,
3506                    },
3507                    _ => unreachable!(),
3508                })
3509            } else {
3510                Ok(Expr::BinaryOp {
3511                    left: Box::new(expr),
3512                    op,
3513                    right: Box::new(self.parse_subexpr(precedence)?),
3514                })
3515            }
3516        } else if let Token::Word(w) = &tok.token {
3517            match w.keyword {
3518                Keyword::IS => {
3519                    if self.parse_keyword(Keyword::NULL) {
3520                        Ok(Expr::IsNull(Box::new(expr)))
3521                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
3522                        Ok(Expr::IsNotNull(Box::new(expr)))
3523                    } else if self.parse_keywords(&[Keyword::TRUE]) {
3524                        Ok(Expr::IsTrue(Box::new(expr)))
3525                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::TRUE]) {
3526                        Ok(Expr::IsNotTrue(Box::new(expr)))
3527                    } else if self.parse_keywords(&[Keyword::FALSE]) {
3528                        Ok(Expr::IsFalse(Box::new(expr)))
3529                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::FALSE]) {
3530                        Ok(Expr::IsNotFalse(Box::new(expr)))
3531                    } else if self.parse_keywords(&[Keyword::UNKNOWN]) {
3532                        Ok(Expr::IsUnknown(Box::new(expr)))
3533                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::UNKNOWN]) {
3534                        Ok(Expr::IsNotUnknown(Box::new(expr)))
3535                    } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::FROM]) {
3536                        let expr2 = self.parse_expr()?;
3537                        Ok(Expr::IsDistinctFrom(Box::new(expr), Box::new(expr2)))
3538                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::DISTINCT, Keyword::FROM])
3539                    {
3540                        let expr2 = self.parse_expr()?;
3541                        Ok(Expr::IsNotDistinctFrom(Box::new(expr), Box::new(expr2)))
3542                    } else if let Ok(is_normalized) = self.parse_unicode_is_normalized(expr) {
3543                        Ok(is_normalized)
3544                    } else {
3545                        self.expected(
3546                            "[NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS",
3547                            self.peek_token(),
3548                        )
3549                    }
3550                }
3551                Keyword::AT => {
3552                    self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
3553                    Ok(Expr::AtTimeZone {
3554                        timestamp: Box::new(expr),
3555                        time_zone: Box::new(self.parse_subexpr(precedence)?),
3556                    })
3557                }
3558                Keyword::NOT
3559                | Keyword::IN
3560                | Keyword::BETWEEN
3561                | Keyword::LIKE
3562                | Keyword::ILIKE
3563                | Keyword::SIMILAR
3564                | Keyword::REGEXP
3565                | Keyword::RLIKE => {
3566                    self.prev_token();
3567                    let negated = self.parse_keyword(Keyword::NOT);
3568                    let regexp = self.parse_keyword(Keyword::REGEXP);
3569                    let rlike = self.parse_keyword(Keyword::RLIKE);
3570                    if regexp || rlike {
3571                        Ok(Expr::RLike {
3572                            negated,
3573                            expr: Box::new(expr),
3574                            pattern: Box::new(
3575                                self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3576                            ),
3577                            regexp,
3578                        })
3579                    } else if self.parse_keyword(Keyword::IN) {
3580                        self.parse_in(expr, negated)
3581                    } else if self.parse_keyword(Keyword::BETWEEN) {
3582                        self.parse_between(expr, negated)
3583                    } else if self.parse_keyword(Keyword::LIKE) {
3584                        Ok(Expr::Like {
3585                            negated,
3586                            any: self.parse_keyword(Keyword::ANY),
3587                            expr: Box::new(expr),
3588                            pattern: Box::new(
3589                                self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3590                            ),
3591                            escape_char: self.parse_escape_char()?,
3592                        })
3593                    } else if self.parse_keyword(Keyword::ILIKE) {
3594                        Ok(Expr::ILike {
3595                            negated,
3596                            any: self.parse_keyword(Keyword::ANY),
3597                            expr: Box::new(expr),
3598                            pattern: Box::new(
3599                                self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3600                            ),
3601                            escape_char: self.parse_escape_char()?,
3602                        })
3603                    } else if self.parse_keywords(&[Keyword::SIMILAR, Keyword::TO]) {
3604                        Ok(Expr::SimilarTo {
3605                            negated,
3606                            expr: Box::new(expr),
3607                            pattern: Box::new(
3608                                self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3609                            ),
3610                            escape_char: self.parse_escape_char()?,
3611                        })
3612                    } else {
3613                        self.expected("IN or BETWEEN after NOT", self.peek_token())
3614                    }
3615                }
3616                // Can only happen if `get_next_precedence` got out of sync with this function
3617                _ => parser_err!(
3618                    format!("No infix parser for token {:?}", tok.token),
3619                    tok.span.start
3620                ),
3621            }
3622        } else if Token::DoubleColon == *tok {
3623            Ok(Expr::Cast {
3624                kind: CastKind::DoubleColon,
3625                expr: Box::new(expr),
3626                data_type: self.parse_data_type()?,
3627                format: None,
3628            })
3629        } else if Token::ExclamationMark == *tok && self.dialect.supports_factorial_operator() {
3630            Ok(Expr::UnaryOp {
3631                op: UnaryOperator::PGPostfixFactorial,
3632                expr: Box::new(expr),
3633            })
3634        } else if Token::LBracket == *tok && self.dialect.supports_partiql()
3635            || (dialect_of!(self is SnowflakeDialect | GenericDialect) && Token::Colon == *tok)
3636        {
3637            self.prev_token();
3638            self.parse_json_access(expr)
3639        } else {
3640            // Can only happen if `get_next_precedence` got out of sync with this function
3641            parser_err!(
3642                format!("No infix parser for token {:?}", tok.token),
3643                tok.span.start
3644            )
3645        }
3646    }
3647
3648    /// Parse the `ESCAPE CHAR` portion of `LIKE`, `ILIKE`, and `SIMILAR TO`
3649    pub fn parse_escape_char(&mut self) -> Result<Option<String>, ParserError> {
3650        if self.parse_keyword(Keyword::ESCAPE) {
3651            Ok(Some(self.parse_literal_string()?))
3652        } else {
3653            Ok(None)
3654        }
3655    }
3656
3657    /// Parses an array subscript like
3658    /// * `[:]`
3659    /// * `[l]`
3660    /// * `[l:]`
3661    /// * `[:u]`
3662    /// * `[l:u]`
3663    /// * `[l:u:s]`
3664    ///
3665    /// Parser is right after `[`
3666    fn parse_subscript_inner(&mut self) -> Result<Subscript, ParserError> {
3667        // at either `<lower>:(rest)` or `:(rest)]`
3668        let lower_bound = if self.consume_token(&Token::Colon) {
3669            None
3670        } else {
3671            Some(self.parse_expr()?)
3672        };
3673
3674        // check for end
3675        if self.consume_token(&Token::RBracket) {
3676            if let Some(lower_bound) = lower_bound {
3677                return Ok(Subscript::Index { index: lower_bound });
3678            };
3679            return Ok(Subscript::Slice {
3680                lower_bound,
3681                upper_bound: None,
3682                stride: None,
3683            });
3684        }
3685
3686        // consume the `:`
3687        if lower_bound.is_some() {
3688            self.expect_token(&Token::Colon)?;
3689        }
3690
3691        // we are now at either `]`, `<upper>(rest)]`
3692        let upper_bound = if self.consume_token(&Token::RBracket) {
3693            return Ok(Subscript::Slice {
3694                lower_bound,
3695                upper_bound: None,
3696                stride: None,
3697            });
3698        } else {
3699            Some(self.parse_expr()?)
3700        };
3701
3702        // check for end
3703        if self.consume_token(&Token::RBracket) {
3704            return Ok(Subscript::Slice {
3705                lower_bound,
3706                upper_bound,
3707                stride: None,
3708            });
3709        }
3710
3711        // we are now at `:]` or `:stride]`
3712        self.expect_token(&Token::Colon)?;
3713        let stride = if self.consume_token(&Token::RBracket) {
3714            None
3715        } else {
3716            Some(self.parse_expr()?)
3717        };
3718
3719        if stride.is_some() {
3720            self.expect_token(&Token::RBracket)?;
3721        }
3722
3723        Ok(Subscript::Slice {
3724            lower_bound,
3725            upper_bound,
3726            stride,
3727        })
3728    }
3729
3730    /// Parse a multi-dimension array accessing like `[1:3][1][1]`
3731    pub fn parse_multi_dim_subscript(
3732        &mut self,
3733        chain: &mut Vec<AccessExpr>,
3734    ) -> Result<(), ParserError> {
3735        while self.consume_token(&Token::LBracket) {
3736            self.parse_subscript(chain)?;
3737        }
3738        Ok(())
3739    }
3740
3741    /// Parses an array subscript like `[1:3]`
3742    ///
3743    /// Parser is right after `[`
3744    fn parse_subscript(&mut self, chain: &mut Vec<AccessExpr>) -> Result<(), ParserError> {
3745        let subscript = self.parse_subscript_inner()?;
3746        chain.push(AccessExpr::Subscript(subscript));
3747        Ok(())
3748    }
3749
3750    fn parse_json_path_object_key(&mut self) -> Result<JsonPathElem, ParserError> {
3751        let token = self.next_token();
3752        match token.token {
3753            Token::Word(Word {
3754                value,
3755                // path segments in SF dot notation can be unquoted or double-quoted
3756                quote_style: quote_style @ (Some('"') | None),
3757                // some experimentation suggests that snowflake permits
3758                // any keyword here unquoted.
3759                keyword: _,
3760            }) => Ok(JsonPathElem::Dot {
3761                key: value,
3762                quoted: quote_style.is_some(),
3763            }),
3764
3765            // This token should never be generated on snowflake or generic
3766            // dialects, but we handle it just in case this is used on future
3767            // dialects.
3768            Token::DoubleQuotedString(key) => Ok(JsonPathElem::Dot { key, quoted: true }),
3769
3770            _ => self.expected("variant object key name", token),
3771        }
3772    }
3773
3774    fn parse_json_access(&mut self, expr: Expr) -> Result<Expr, ParserError> {
3775        let path = self.parse_json_path()?;
3776        Ok(Expr::JsonAccess {
3777            value: Box::new(expr),
3778            path,
3779        })
3780    }
3781
3782    fn parse_json_path(&mut self) -> Result<JsonPath, ParserError> {
3783        let mut path = Vec::new();
3784        loop {
3785            match self.next_token().token {
3786                Token::Colon if path.is_empty() => {
3787                    path.push(self.parse_json_path_object_key()?);
3788                }
3789                Token::Period if !path.is_empty() => {
3790                    path.push(self.parse_json_path_object_key()?);
3791                }
3792                Token::LBracket => {
3793                    let key = self.parse_expr()?;
3794                    self.expect_token(&Token::RBracket)?;
3795
3796                    path.push(JsonPathElem::Bracket { key });
3797                }
3798                _ => {
3799                    self.prev_token();
3800                    break;
3801                }
3802            };
3803        }
3804
3805        debug_assert!(!path.is_empty());
3806        Ok(JsonPath { path })
3807    }
3808
3809    /// Parses the parens following the `[ NOT ] IN` operator.
3810    pub fn parse_in(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
3811        // BigQuery allows `IN UNNEST(array_expression)`
3812        // https://cloud.google.com/bigquery/docs/reference/standard-sql/operators#in_operators
3813        if self.parse_keyword(Keyword::UNNEST) {
3814            self.expect_token(&Token::LParen)?;
3815            let array_expr = self.parse_expr()?;
3816            self.expect_token(&Token::RParen)?;
3817            return Ok(Expr::InUnnest {
3818                expr: Box::new(expr),
3819                array_expr: Box::new(array_expr),
3820                negated,
3821            });
3822        }
3823        self.expect_token(&Token::LParen)?;
3824        let in_op = match self.maybe_parse(|p| p.parse_query_body(p.dialect.prec_unknown()))? {
3825            Some(subquery) => Expr::InSubquery {
3826                expr: Box::new(expr),
3827                subquery,
3828                negated,
3829            },
3830            None => Expr::InList {
3831                expr: Box::new(expr),
3832                list: if self.dialect.supports_in_empty_list() {
3833                    self.parse_comma_separated0(Parser::parse_expr, Token::RParen)?
3834                } else {
3835                    self.parse_comma_separated(Parser::parse_expr)?
3836                },
3837                negated,
3838            },
3839        };
3840        self.expect_token(&Token::RParen)?;
3841        Ok(in_op)
3842    }
3843
3844    /// Parses `BETWEEN <low> AND <high>`, assuming the `BETWEEN` keyword was already consumed.
3845    pub fn parse_between(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
3846        // Stop parsing subexpressions for <low> and <high> on tokens with
3847        // precedence lower than that of `BETWEEN`, such as `AND`, `IS`, etc.
3848        let low = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
3849        self.expect_keyword_is(Keyword::AND)?;
3850        let high = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
3851        Ok(Expr::Between {
3852            expr: Box::new(expr),
3853            negated,
3854            low: Box::new(low),
3855            high: Box::new(high),
3856        })
3857    }
3858
3859    /// Parse a PostgreSQL casting style which is in the form of `expr::datatype`.
3860    pub fn parse_pg_cast(&mut self, expr: Expr) -> Result<Expr, ParserError> {
3861        Ok(Expr::Cast {
3862            kind: CastKind::DoubleColon,
3863            expr: Box::new(expr),
3864            data_type: self.parse_data_type()?,
3865            format: None,
3866        })
3867    }
3868
3869    /// Get the precedence of the next token
3870    pub fn get_next_precedence(&self) -> Result<u8, ParserError> {
3871        self.dialect.get_next_precedence_default(self)
3872    }
3873
3874    /// Return the token at the given location, or EOF if the index is beyond
3875    /// the length of the current set of tokens.
3876    pub fn token_at(&self, index: usize) -> &TokenWithSpan {
3877        self.tokens.get(index).unwrap_or(&EOF_TOKEN)
3878    }
3879
3880    /// Return the first non-whitespace token that has not yet been processed
3881    /// or Token::EOF
3882    ///
3883    /// See [`Self::peek_token_ref`] to avoid the copy.
3884    pub fn peek_token(&self) -> TokenWithSpan {
3885        self.peek_nth_token(0)
3886    }
3887
3888    /// Return a reference to the first non-whitespace token that has not yet
3889    /// been processed or Token::EOF
3890    pub fn peek_token_ref(&self) -> &TokenWithSpan {
3891        self.peek_nth_token_ref(0)
3892    }
3893
3894    /// Returns the `N` next non-whitespace tokens that have not yet been
3895    /// processed.
3896    ///
3897    /// Example:
3898    /// ```rust
3899    /// # use sqlparser::dialect::GenericDialect;
3900    /// # use sqlparser::parser::Parser;
3901    /// # use sqlparser::keywords::Keyword;
3902    /// # use sqlparser::tokenizer::{Token, Word};
3903    /// let dialect = GenericDialect {};
3904    /// let mut parser = Parser::new(&dialect).try_with_sql("ORDER BY foo, bar").unwrap();
3905    ///
3906    /// // Note that Rust infers the number of tokens to peek based on the
3907    /// // length of the slice pattern!
3908    /// assert!(matches!(
3909    ///     parser.peek_tokens(),
3910    ///     [
3911    ///         Token::Word(Word { keyword: Keyword::ORDER, .. }),
3912    ///         Token::Word(Word { keyword: Keyword::BY, .. }),
3913    ///     ]
3914    /// ));
3915    /// ```
3916    pub fn peek_tokens<const N: usize>(&self) -> [Token; N] {
3917        self.peek_tokens_with_location()
3918            .map(|with_loc| with_loc.token)
3919    }
3920
3921    /// Returns the `N` next non-whitespace tokens with locations that have not
3922    /// yet been processed.
3923    ///
3924    /// See [`Self::peek_token`] for an example.
3925    pub fn peek_tokens_with_location<const N: usize>(&self) -> [TokenWithSpan; N] {
3926        let mut index = self.index;
3927        core::array::from_fn(|_| loop {
3928            let token = self.tokens.get(index);
3929            index += 1;
3930            if let Some(TokenWithSpan {
3931                token: Token::Whitespace(_),
3932                span: _,
3933            }) = token
3934            {
3935                continue;
3936            }
3937            break token.cloned().unwrap_or(TokenWithSpan {
3938                token: Token::EOF,
3939                span: Span::empty(),
3940            });
3941        })
3942    }
3943
3944    /// Returns references to the `N` next non-whitespace tokens
3945    /// that have not yet been processed.
3946    ///
3947    /// See [`Self::peek_tokens`] for an example.
3948    pub fn peek_tokens_ref<const N: usize>(&self) -> [&TokenWithSpan; N] {
3949        let mut index = self.index;
3950        core::array::from_fn(|_| loop {
3951            let token = self.tokens.get(index);
3952            index += 1;
3953            if let Some(TokenWithSpan {
3954                token: Token::Whitespace(_),
3955                span: _,
3956            }) = token
3957            {
3958                continue;
3959            }
3960            break token.unwrap_or(&EOF_TOKEN);
3961        })
3962    }
3963
3964    /// Return nth non-whitespace token that has not yet been processed
3965    pub fn peek_nth_token(&self, n: usize) -> TokenWithSpan {
3966        self.peek_nth_token_ref(n).clone()
3967    }
3968
3969    /// Return nth non-whitespace token that has not yet been processed
3970    pub fn peek_nth_token_ref(&self, mut n: usize) -> &TokenWithSpan {
3971        let mut index = self.index;
3972        loop {
3973            index += 1;
3974            match self.tokens.get(index - 1) {
3975                Some(TokenWithSpan {
3976                    token: Token::Whitespace(_),
3977                    span: _,
3978                }) => continue,
3979                non_whitespace => {
3980                    if n == 0 {
3981                        return non_whitespace.unwrap_or(&EOF_TOKEN);
3982                    }
3983                    n -= 1;
3984                }
3985            }
3986        }
3987    }
3988
3989    /// Return the first token, possibly whitespace, that has not yet been processed
3990    /// (or None if reached end-of-file).
3991    pub fn peek_token_no_skip(&self) -> TokenWithSpan {
3992        self.peek_nth_token_no_skip(0)
3993    }
3994
3995    /// Return nth token, possibly whitespace, that has not yet been processed.
3996    pub fn peek_nth_token_no_skip(&self, n: usize) -> TokenWithSpan {
3997        self.tokens
3998            .get(self.index + n)
3999            .cloned()
4000            .unwrap_or(TokenWithSpan {
4001                token: Token::EOF,
4002                span: Span::empty(),
4003            })
4004    }
4005
4006    /// Return true if the next tokens exactly `expected`
4007    ///
4008    /// Does not advance the current token.
4009    fn peek_keywords(&mut self, expected: &[Keyword]) -> bool {
4010        let index = self.index;
4011        let matched = self.parse_keywords(expected);
4012        self.index = index;
4013        matched
4014    }
4015
4016    /// Advances to the next non-whitespace token and returns a copy.
4017    ///
4018    /// Please use [`Self::advance_token`] and [`Self::get_current_token`] to
4019    /// avoid the copy.
4020    pub fn next_token(&mut self) -> TokenWithSpan {
4021        self.advance_token();
4022        self.get_current_token().clone()
4023    }
4024
4025    /// Returns the index of the current token
4026    ///
4027    /// This can be used with APIs that expect an index, such as
4028    /// [`Self::token_at`]
4029    pub fn get_current_index(&self) -> usize {
4030        self.index.saturating_sub(1)
4031    }
4032
4033    /// Return the next unprocessed token, possibly whitespace.
4034    pub fn next_token_no_skip(&mut self) -> Option<&TokenWithSpan> {
4035        self.index += 1;
4036        self.tokens.get(self.index - 1)
4037    }
4038
4039    /// Advances the current token to the next non-whitespace token
4040    ///
4041    /// See [`Self::get_current_token`] to get the current token after advancing
4042    pub fn advance_token(&mut self) {
4043        loop {
4044            self.index += 1;
4045            match self.tokens.get(self.index - 1) {
4046                Some(TokenWithSpan {
4047                    token: Token::Whitespace(_),
4048                    span: _,
4049                }) => continue,
4050                _ => break,
4051            }
4052        }
4053    }
4054
4055    /// Returns a reference to the current token
4056    ///
4057    /// Does not advance the current token.
4058    pub fn get_current_token(&self) -> &TokenWithSpan {
4059        self.token_at(self.index.saturating_sub(1))
4060    }
4061
4062    /// Returns a reference to the previous token
4063    ///
4064    /// Does not advance the current token.
4065    pub fn get_previous_token(&self) -> &TokenWithSpan {
4066        self.token_at(self.index.saturating_sub(2))
4067    }
4068
4069    /// Returns a reference to the next token
4070    ///
4071    /// Does not advance the current token.
4072    pub fn get_next_token(&self) -> &TokenWithSpan {
4073        self.token_at(self.index)
4074    }
4075
4076    /// Seek back the last one non-whitespace token.
4077    ///
4078    /// Must be called after `next_token()`, otherwise might panic. OK to call
4079    /// after `next_token()` indicates an EOF.
4080    ///
4081    // TODO rename to backup_token and deprecate prev_token?
4082    pub fn prev_token(&mut self) {
4083        loop {
4084            assert!(self.index > 0);
4085            self.index -= 1;
4086            if let Some(TokenWithSpan {
4087                token: Token::Whitespace(_),
4088                span: _,
4089            }) = self.tokens.get(self.index)
4090            {
4091                continue;
4092            }
4093            return;
4094        }
4095    }
4096
4097    /// Report `found` was encountered instead of `expected`
4098    pub fn expected<T>(&self, expected: &str, found: TokenWithSpan) -> Result<T, ParserError> {
4099        parser_err!(
4100            format!("Expected: {expected}, found: {found}"),
4101            found.span.start
4102        )
4103    }
4104
4105    /// report `found` was encountered instead of `expected`
4106    pub fn expected_ref<T>(&self, expected: &str, found: &TokenWithSpan) -> Result<T, ParserError> {
4107        parser_err!(
4108            format!("Expected: {expected}, found: {found}"),
4109            found.span.start
4110        )
4111    }
4112
4113    /// Report that the token at `index` was found instead of `expected`.
4114    pub fn expected_at<T>(&self, expected: &str, index: usize) -> Result<T, ParserError> {
4115        let found = self.tokens.get(index).unwrap_or(&EOF_TOKEN);
4116        parser_err!(
4117            format!("Expected: {expected}, found: {found}"),
4118            found.span.start
4119        )
4120    }
4121
4122    /// If the current token is the `expected` keyword, consume it and returns
4123    /// true. Otherwise, no tokens are consumed and returns false.
4124    #[must_use]
4125    pub fn parse_keyword(&mut self, expected: Keyword) -> bool {
4126        if self.peek_keyword(expected) {
4127            self.advance_token();
4128            true
4129        } else {
4130            false
4131        }
4132    }
4133
4134    #[must_use]
4135    pub fn peek_keyword(&self, expected: Keyword) -> bool {
4136        matches!(&self.peek_token_ref().token, Token::Word(w) if expected == w.keyword)
4137    }
4138
4139    /// If the current token is the `expected` keyword followed by
4140    /// specified tokens, consume them and returns true.
4141    /// Otherwise, no tokens are consumed and returns false.
4142    ///
4143    /// Note that if the length of `tokens` is too long, this function will
4144    /// not be efficient as it does a loop on the tokens with `peek_nth_token`
4145    /// each time.
4146    pub fn parse_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool {
4147        match &self.peek_token_ref().token {
4148            Token::Word(w) if expected == w.keyword => {
4149                for (idx, token) in tokens.iter().enumerate() {
4150                    if self.peek_nth_token_ref(idx + 1).token != *token {
4151                        return false;
4152                    }
4153                }
4154                // consume all tokens
4155                for _ in 0..(tokens.len() + 1) {
4156                    self.advance_token();
4157                }
4158                true
4159            }
4160            _ => false,
4161        }
4162    }
4163
4164    /// If the current and subsequent tokens exactly match the `keywords`
4165    /// sequence, consume them and returns true. Otherwise, no tokens are
4166    /// consumed and returns false
4167    #[must_use]
4168    pub fn parse_keywords(&mut self, keywords: &[Keyword]) -> bool {
4169        let index = self.index;
4170        for &keyword in keywords {
4171            if !self.parse_keyword(keyword) {
4172                // println!("parse_keywords aborting .. did not find {:?}", keyword);
4173                // reset index and return immediately
4174                self.index = index;
4175                return false;
4176            }
4177        }
4178        true
4179    }
4180
4181    /// If the current token is one of the given `keywords`, returns the keyword
4182    /// that matches, without consuming the token. Otherwise, returns [`None`].
4183    #[must_use]
4184    pub fn peek_one_of_keywords(&self, keywords: &[Keyword]) -> Option<Keyword> {
4185        for keyword in keywords {
4186            if self.peek_keyword(*keyword) {
4187                return Some(*keyword);
4188            }
4189        }
4190        None
4191    }
4192
4193    /// If the current token is one of the given `keywords`, consume the token
4194    /// and return the keyword that matches. Otherwise, no tokens are consumed
4195    /// and returns [`None`].
4196    #[must_use]
4197    pub fn parse_one_of_keywords(&mut self, keywords: &[Keyword]) -> Option<Keyword> {
4198        match &self.peek_token_ref().token {
4199            Token::Word(w) => {
4200                keywords
4201                    .iter()
4202                    .find(|keyword| **keyword == w.keyword)
4203                    .map(|keyword| {
4204                        self.advance_token();
4205                        *keyword
4206                    })
4207            }
4208            _ => None,
4209        }
4210    }
4211
4212    /// If the current token is one of the expected keywords, consume the token
4213    /// and return the keyword that matches. Otherwise, return an error.
4214    pub fn expect_one_of_keywords(&mut self, keywords: &[Keyword]) -> Result<Keyword, ParserError> {
4215        if let Some(keyword) = self.parse_one_of_keywords(keywords) {
4216            Ok(keyword)
4217        } else {
4218            let keywords: Vec<String> = keywords.iter().map(|x| format!("{x:?}")).collect();
4219            self.expected_ref(
4220                &format!("one of {}", keywords.join(" or ")),
4221                self.peek_token_ref(),
4222            )
4223        }
4224    }
4225
4226    /// If the current token is the `expected` keyword, consume the token.
4227    /// Otherwise, return an error.
4228    ///
4229    // todo deprecate in favor of expected_keyword_is
4230    pub fn expect_keyword(&mut self, expected: Keyword) -> Result<TokenWithSpan, ParserError> {
4231        if self.parse_keyword(expected) {
4232            Ok(self.get_current_token().clone())
4233        } else {
4234            self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4235        }
4236    }
4237
4238    /// If the current token is the `expected` keyword, consume the token.
4239    /// Otherwise, return an error.
4240    ///
4241    /// This differs from expect_keyword only in that the matched keyword
4242    /// token is not returned.
4243    pub fn expect_keyword_is(&mut self, expected: Keyword) -> Result<(), ParserError> {
4244        if self.parse_keyword(expected) {
4245            Ok(())
4246        } else {
4247            self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4248        }
4249    }
4250
4251    /// If the current and subsequent tokens exactly match the `keywords`
4252    /// sequence, consume them and returns Ok. Otherwise, return an Error.
4253    pub fn expect_keywords(&mut self, expected: &[Keyword]) -> Result<(), ParserError> {
4254        for &kw in expected {
4255            self.expect_keyword_is(kw)?;
4256        }
4257        Ok(())
4258    }
4259
4260    /// Consume the next token if it matches the expected token, otherwise return false
4261    ///
4262    /// See [Self::advance_token] to consume the token unconditionally
4263    #[must_use]
4264    pub fn consume_token(&mut self, expected: &Token) -> bool {
4265        if self.peek_token_ref() == expected {
4266            self.advance_token();
4267            true
4268        } else {
4269            false
4270        }
4271    }
4272
4273    /// If the current and subsequent tokens exactly match the `tokens`
4274    /// sequence, consume them and returns true. Otherwise, no tokens are
4275    /// consumed and returns false
4276    #[must_use]
4277    pub fn consume_tokens(&mut self, tokens: &[Token]) -> bool {
4278        let index = self.index;
4279        for token in tokens {
4280            if !self.consume_token(token) {
4281                self.index = index;
4282                return false;
4283            }
4284        }
4285        true
4286    }
4287
4288    /// Bail out if the current token is not an expected keyword, or consume it if it is
4289    pub fn expect_token(&mut self, expected: &Token) -> Result<TokenWithSpan, ParserError> {
4290        if self.peek_token_ref() == expected {
4291            Ok(self.next_token())
4292        } else {
4293            self.expected_ref(&expected.to_string(), self.peek_token_ref())
4294        }
4295    }
4296
4297    fn parse<T: FromStr>(s: String, loc: Location) -> Result<T, ParserError>
4298    where
4299        <T as FromStr>::Err: Display,
4300    {
4301        s.parse::<T>().map_err(|e| {
4302            ParserError::ParserError(format!(
4303                "Could not parse '{s}' as {}: {e}{loc}",
4304                core::any::type_name::<T>()
4305            ))
4306        })
4307    }
4308
4309    /// Parse a comma-separated list of 1+ SelectItem
4310    pub fn parse_projection(&mut self) -> Result<Vec<SelectItem>, ParserError> {
4311        // BigQuery and Snowflake allow trailing commas, but only in project lists
4312        // e.g. `SELECT 1, 2, FROM t`
4313        // https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#trailing_commas
4314        // https://docs.snowflake.com/en/release-notes/2024/8_11#select-supports-trailing-commas
4315
4316        let trailing_commas =
4317            self.options.trailing_commas | self.dialect.supports_projection_trailing_commas();
4318
4319        self.parse_comma_separated_with_trailing_commas(
4320            |p| p.parse_select_item(),
4321            trailing_commas,
4322            Self::is_reserved_for_column_alias,
4323        )
4324    }
4325
4326    pub fn parse_actions_list(&mut self) -> Result<Vec<Action>, ParserError> {
4327        let mut values = vec![];
4328        loop {
4329            values.push(self.parse_grant_permission()?);
4330            if !self.consume_token(&Token::Comma) {
4331                break;
4332            } else if self.options.trailing_commas {
4333                match self.peek_token().token {
4334                    Token::Word(kw) if kw.keyword == Keyword::ON => {
4335                        break;
4336                    }
4337                    Token::RParen
4338                    | Token::SemiColon
4339                    | Token::EOF
4340                    | Token::RBracket
4341                    | Token::RBrace => break,
4342                    _ => continue,
4343                }
4344            }
4345        }
4346        Ok(values)
4347    }
4348
4349    /// Parse a list of [TableWithJoins]
4350    fn parse_table_with_joins(&mut self) -> Result<Vec<TableWithJoins>, ParserError> {
4351        let trailing_commas = self.dialect.supports_from_trailing_commas();
4352
4353        self.parse_comma_separated_with_trailing_commas(
4354            Parser::parse_table_and_joins,
4355            trailing_commas,
4356            |kw, _parser| {
4357                self.dialect
4358                    .get_reserved_keywords_for_table_factor()
4359                    .contains(kw)
4360            },
4361        )
4362    }
4363
4364    /// Parse the comma of a comma-separated syntax element.
4365    /// `R` is a predicate that should return true if the next
4366    /// keyword is a reserved keyword.
4367    /// Allows for control over trailing commas
4368    ///
4369    /// Returns true if there is a next element
4370    fn is_parse_comma_separated_end_with_trailing_commas<R>(
4371        &mut self,
4372        trailing_commas: bool,
4373        is_reserved_keyword: &R,
4374    ) -> bool
4375    where
4376        R: Fn(&Keyword, &mut Parser) -> bool,
4377    {
4378        if !self.consume_token(&Token::Comma) {
4379            true
4380        } else if trailing_commas {
4381            let token = self.next_token().token;
4382            let is_end = match token {
4383                Token::Word(ref kw) if is_reserved_keyword(&kw.keyword, self) => true,
4384                Token::RParen | Token::SemiColon | Token::EOF | Token::RBracket | Token::RBrace => {
4385                    true
4386                }
4387                _ => false,
4388            };
4389            self.prev_token();
4390
4391            is_end
4392        } else {
4393            false
4394        }
4395    }
4396
4397    /// Parse the comma of a comma-separated syntax element.
4398    /// Returns true if there is a next element
4399    fn is_parse_comma_separated_end(&mut self) -> bool {
4400        self.is_parse_comma_separated_end_with_trailing_commas(
4401            self.options.trailing_commas,
4402            &Self::is_reserved_for_column_alias,
4403        )
4404    }
4405
4406    /// Parse a comma-separated list of 1+ items accepted by `F`
4407    pub fn parse_comma_separated<T, F>(&mut self, f: F) -> Result<Vec<T>, ParserError>
4408    where
4409        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4410    {
4411        self.parse_comma_separated_with_trailing_commas(
4412            f,
4413            self.options.trailing_commas,
4414            Self::is_reserved_for_column_alias,
4415        )
4416    }
4417
4418    /// Parse a comma-separated list of 1+ items accepted by `F`.
4419    /// `R` is a predicate that should return true if the next
4420    /// keyword is a reserved keyword.
4421    /// Allows for control over trailing commas.
4422    fn parse_comma_separated_with_trailing_commas<T, F, R>(
4423        &mut self,
4424        mut f: F,
4425        trailing_commas: bool,
4426        is_reserved_keyword: R,
4427    ) -> Result<Vec<T>, ParserError>
4428    where
4429        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4430        R: Fn(&Keyword, &mut Parser) -> bool,
4431    {
4432        let mut values = vec![];
4433        loop {
4434            values.push(f(self)?);
4435            if self.is_parse_comma_separated_end_with_trailing_commas(
4436                trailing_commas,
4437                &is_reserved_keyword,
4438            ) {
4439                break;
4440            }
4441        }
4442        Ok(values)
4443    }
4444
4445    /// Parse a period-separated list of 1+ items accepted by `F`
4446    fn parse_period_separated<T, F>(&mut self, mut f: F) -> Result<Vec<T>, ParserError>
4447    where
4448        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4449    {
4450        let mut values = vec![];
4451        loop {
4452            values.push(f(self)?);
4453            if !self.consume_token(&Token::Period) {
4454                break;
4455            }
4456        }
4457        Ok(values)
4458    }
4459
4460    /// Parse a keyword-separated list of 1+ items accepted by `F`
4461    pub fn parse_keyword_separated<T, F>(
4462        &mut self,
4463        keyword: Keyword,
4464        mut f: F,
4465    ) -> Result<Vec<T>, ParserError>
4466    where
4467        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4468    {
4469        let mut values = vec![];
4470        loop {
4471            values.push(f(self)?);
4472            if !self.parse_keyword(keyword) {
4473                break;
4474            }
4475        }
4476        Ok(values)
4477    }
4478
4479    pub fn parse_parenthesized<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
4480    where
4481        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4482    {
4483        self.expect_token(&Token::LParen)?;
4484        let res = f(self)?;
4485        self.expect_token(&Token::RParen)?;
4486        Ok(res)
4487    }
4488
4489    /// Parse a comma-separated list of 0+ items accepted by `F`
4490    /// * `end_token` - expected end token for the closure (e.g. [Token::RParen], [Token::RBrace] ...)
4491    pub fn parse_comma_separated0<T, F>(
4492        &mut self,
4493        f: F,
4494        end_token: Token,
4495    ) -> Result<Vec<T>, ParserError>
4496    where
4497        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4498    {
4499        if self.peek_token().token == end_token {
4500            return Ok(vec![]);
4501        }
4502
4503        if self.options.trailing_commas && self.peek_tokens() == [Token::Comma, end_token] {
4504            let _ = self.consume_token(&Token::Comma);
4505            return Ok(vec![]);
4506        }
4507
4508        self.parse_comma_separated(f)
4509    }
4510
4511    /// Parses 0 or more statements, each followed by a semicolon.
4512    /// If the next token is any of `terminal_keywords` then no more
4513    /// statements will be parsed.
4514    pub(crate) fn parse_statement_list(
4515        &mut self,
4516        terminal_keywords: &[Keyword],
4517    ) -> Result<Vec<Statement>, ParserError> {
4518        let mut values = vec![];
4519        loop {
4520            match &self.peek_nth_token_ref(0).token {
4521                Token::EOF => break,
4522                Token::Word(w) => {
4523                    if w.quote_style.is_none() && terminal_keywords.contains(&w.keyword) {
4524                        break;
4525                    }
4526                }
4527                _ => {}
4528            }
4529
4530            values.push(self.parse_statement()?);
4531            self.expect_token(&Token::SemiColon)?;
4532        }
4533        Ok(values)
4534    }
4535
4536    /// Default implementation of a predicate that returns true if
4537    /// the specified keyword is reserved for column alias.
4538    /// See [Dialect::is_column_alias]
4539    fn is_reserved_for_column_alias(kw: &Keyword, parser: &mut Parser) -> bool {
4540        !parser.dialect.is_column_alias(kw, parser)
4541    }
4542
4543    /// Run a parser method `f`, reverting back to the current position if unsuccessful.
4544    /// Returns `ParserError::RecursionLimitExceeded` if `f` returns a `RecursionLimitExceeded`.
4545    /// Returns `Ok(None)` if `f` returns any other error.
4546    pub fn maybe_parse<T, F>(&mut self, f: F) -> Result<Option<T>, ParserError>
4547    where
4548        F: FnMut(&mut Parser) -> Result<T, ParserError>,
4549    {
4550        match self.try_parse(f) {
4551            Ok(t) => Ok(Some(t)),
4552            Err(ParserError::RecursionLimitExceeded) => Err(ParserError::RecursionLimitExceeded),
4553            _ => Ok(None),
4554        }
4555    }
4556
4557    /// Run a parser method `f`, reverting back to the current position if unsuccessful.
4558    pub fn try_parse<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
4559    where
4560        F: FnMut(&mut Parser) -> Result<T, ParserError>,
4561    {
4562        let index = self.index;
4563        match f(self) {
4564            Ok(t) => Ok(t),
4565            Err(e) => {
4566                // Unwind stack if limit exceeded
4567                self.index = index;
4568                Err(e)
4569            }
4570        }
4571    }
4572
4573    /// Parse either `ALL`, `DISTINCT` or `DISTINCT ON (...)`. Returns [`None`] if `ALL` is parsed
4574    /// and results in a [`ParserError`] if both `ALL` and `DISTINCT` are found.
4575    pub fn parse_all_or_distinct(&mut self) -> Result<Option<Distinct>, ParserError> {
4576        let loc = self.peek_token().span.start;
4577        let all = self.parse_keyword(Keyword::ALL);
4578        let distinct = self.parse_keyword(Keyword::DISTINCT);
4579        if !distinct {
4580            return Ok(None);
4581        }
4582        if all {
4583            return parser_err!("Cannot specify both ALL and DISTINCT".to_string(), loc);
4584        }
4585        let on = self.parse_keyword(Keyword::ON);
4586        if !on {
4587            return Ok(Some(Distinct::Distinct));
4588        }
4589
4590        self.expect_token(&Token::LParen)?;
4591        let col_names = if self.consume_token(&Token::RParen) {
4592            self.prev_token();
4593            Vec::new()
4594        } else {
4595            self.parse_comma_separated(Parser::parse_expr)?
4596        };
4597        self.expect_token(&Token::RParen)?;
4598        Ok(Some(Distinct::On(col_names)))
4599    }
4600
4601    /// Parse a SQL CREATE statement
4602    pub fn parse_create(&mut self) -> Result<Statement, ParserError> {
4603        let or_replace = self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]);
4604        let or_alter = self.parse_keywords(&[Keyword::OR, Keyword::ALTER]);
4605        let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
4606        let global = self.parse_one_of_keywords(&[Keyword::GLOBAL]).is_some();
4607        let transient = self.parse_one_of_keywords(&[Keyword::TRANSIENT]).is_some();
4608        let global: Option<bool> = if global {
4609            Some(true)
4610        } else if local {
4611            Some(false)
4612        } else {
4613            None
4614        };
4615        let temporary = self
4616            .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
4617            .is_some();
4618        let persistent = dialect_of!(self is DuckDbDialect)
4619            && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
4620        let create_view_params = self.parse_create_view_params()?;
4621        if self.parse_keyword(Keyword::TABLE) {
4622            self.parse_create_table(or_replace, temporary, global, transient)
4623        } else if self.parse_keyword(Keyword::MATERIALIZED) || self.parse_keyword(Keyword::VIEW) {
4624            self.prev_token();
4625            self.parse_create_view(or_alter, or_replace, temporary, create_view_params)
4626        } else if self.parse_keyword(Keyword::POLICY) {
4627            self.parse_create_policy()
4628        } else if self.parse_keyword(Keyword::EXTERNAL) {
4629            self.parse_create_external_table(or_replace)
4630        } else if self.parse_keyword(Keyword::FUNCTION) {
4631            self.parse_create_function(or_alter, or_replace, temporary)
4632        } else if self.parse_keyword(Keyword::DOMAIN) {
4633            self.parse_create_domain()
4634        } else if self.parse_keyword(Keyword::TRIGGER) {
4635            self.parse_create_trigger(or_alter, or_replace, false)
4636        } else if self.parse_keywords(&[Keyword::CONSTRAINT, Keyword::TRIGGER]) {
4637            self.parse_create_trigger(or_alter, or_replace, true)
4638        } else if self.parse_keyword(Keyword::MACRO) {
4639            self.parse_create_macro(or_replace, temporary)
4640        } else if self.parse_keyword(Keyword::SECRET) {
4641            self.parse_create_secret(or_replace, temporary, persistent)
4642        } else if or_replace {
4643            self.expected(
4644                "[EXTERNAL] TABLE or [MATERIALIZED] VIEW or FUNCTION after CREATE OR REPLACE",
4645                self.peek_token(),
4646            )
4647        } else if self.parse_keyword(Keyword::EXTENSION) {
4648            self.parse_create_extension()
4649        } else if self.parse_keyword(Keyword::INDEX) {
4650            self.parse_create_index(false)
4651        } else if self.parse_keywords(&[Keyword::UNIQUE, Keyword::INDEX]) {
4652            self.parse_create_index(true)
4653        } else if self.parse_keyword(Keyword::VIRTUAL) {
4654            self.parse_create_virtual_table()
4655        } else if self.parse_keyword(Keyword::SCHEMA) {
4656            self.parse_create_schema()
4657        } else if self.parse_keyword(Keyword::DATABASE) {
4658            self.parse_create_database()
4659        } else if self.parse_keyword(Keyword::ROLE) {
4660            self.parse_create_role()
4661        } else if self.parse_keyword(Keyword::SEQUENCE) {
4662            self.parse_create_sequence(temporary)
4663        } else if self.parse_keyword(Keyword::TYPE) {
4664            self.parse_create_type()
4665        } else if self.parse_keyword(Keyword::PROCEDURE) {
4666            self.parse_create_procedure(or_alter)
4667        } else if self.parse_keyword(Keyword::CONNECTOR) {
4668            self.parse_create_connector()
4669        } else {
4670            self.expected("an object type after CREATE", self.peek_token())
4671        }
4672    }
4673
4674    /// See [DuckDB Docs](https://duckdb.org/docs/sql/statements/create_secret.html) for more details.
4675    pub fn parse_create_secret(
4676        &mut self,
4677        or_replace: bool,
4678        temporary: bool,
4679        persistent: bool,
4680    ) -> Result<Statement, ParserError> {
4681        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
4682
4683        let mut storage_specifier = None;
4684        let mut name = None;
4685        if self.peek_token() != Token::LParen {
4686            if self.parse_keyword(Keyword::IN) {
4687                storage_specifier = self.parse_identifier().ok()
4688            } else {
4689                name = self.parse_identifier().ok();
4690            }
4691
4692            // Storage specifier may follow the name
4693            if storage_specifier.is_none()
4694                && self.peek_token() != Token::LParen
4695                && self.parse_keyword(Keyword::IN)
4696            {
4697                storage_specifier = self.parse_identifier().ok();
4698            }
4699        }
4700
4701        self.expect_token(&Token::LParen)?;
4702        self.expect_keyword_is(Keyword::TYPE)?;
4703        let secret_type = self.parse_identifier()?;
4704
4705        let mut options = Vec::new();
4706        if self.consume_token(&Token::Comma) {
4707            options.append(&mut self.parse_comma_separated(|p| {
4708                let key = p.parse_identifier()?;
4709                let value = p.parse_identifier()?;
4710                Ok(SecretOption { key, value })
4711            })?);
4712        }
4713        self.expect_token(&Token::RParen)?;
4714
4715        let temp = match (temporary, persistent) {
4716            (true, false) => Some(true),
4717            (false, true) => Some(false),
4718            (false, false) => None,
4719            _ => self.expected("TEMPORARY or PERSISTENT", self.peek_token())?,
4720        };
4721
4722        Ok(Statement::CreateSecret {
4723            or_replace,
4724            temporary: temp,
4725            if_not_exists,
4726            name,
4727            storage_specifier,
4728            secret_type,
4729            options,
4730        })
4731    }
4732
4733    /// Parse a CACHE TABLE statement
4734    pub fn parse_cache_table(&mut self) -> Result<Statement, ParserError> {
4735        let (mut table_flag, mut options, mut has_as, mut query) = (None, vec![], false, None);
4736        if self.parse_keyword(Keyword::TABLE) {
4737            let table_name = self.parse_object_name(false)?;
4738            if self.peek_token().token != Token::EOF {
4739                if let Token::Word(word) = self.peek_token().token {
4740                    if word.keyword == Keyword::OPTIONS {
4741                        options = self.parse_options(Keyword::OPTIONS)?
4742                    }
4743                };
4744
4745                if self.peek_token().token != Token::EOF {
4746                    let (a, q) = self.parse_as_query()?;
4747                    has_as = a;
4748                    query = Some(q);
4749                }
4750
4751                Ok(Statement::Cache {
4752                    table_flag,
4753                    table_name,
4754                    has_as,
4755                    options,
4756                    query,
4757                })
4758            } else {
4759                Ok(Statement::Cache {
4760                    table_flag,
4761                    table_name,
4762                    has_as,
4763                    options,
4764                    query,
4765                })
4766            }
4767        } else {
4768            table_flag = Some(self.parse_object_name(false)?);
4769            if self.parse_keyword(Keyword::TABLE) {
4770                let table_name = self.parse_object_name(false)?;
4771                if self.peek_token() != Token::EOF {
4772                    if let Token::Word(word) = self.peek_token().token {
4773                        if word.keyword == Keyword::OPTIONS {
4774                            options = self.parse_options(Keyword::OPTIONS)?
4775                        }
4776                    };
4777
4778                    if self.peek_token() != Token::EOF {
4779                        let (a, q) = self.parse_as_query()?;
4780                        has_as = a;
4781                        query = Some(q);
4782                    }
4783
4784                    Ok(Statement::Cache {
4785                        table_flag,
4786                        table_name,
4787                        has_as,
4788                        options,
4789                        query,
4790                    })
4791                } else {
4792                    Ok(Statement::Cache {
4793                        table_flag,
4794                        table_name,
4795                        has_as,
4796                        options,
4797                        query,
4798                    })
4799                }
4800            } else {
4801                if self.peek_token() == Token::EOF {
4802                    self.prev_token();
4803                }
4804                self.expected("a `TABLE` keyword", self.peek_token())
4805            }
4806        }
4807    }
4808
4809    /// Parse 'AS' before as query,such as `WITH XXX AS SELECT XXX` oer `CACHE TABLE AS SELECT XXX`
4810    pub fn parse_as_query(&mut self) -> Result<(bool, Box<Query>), ParserError> {
4811        match self.peek_token().token {
4812            Token::Word(word) => match word.keyword {
4813                Keyword::AS => {
4814                    self.next_token();
4815                    Ok((true, self.parse_query()?))
4816                }
4817                _ => Ok((false, self.parse_query()?)),
4818            },
4819            _ => self.expected("a QUERY statement", self.peek_token()),
4820        }
4821    }
4822
4823    /// Parse a UNCACHE TABLE statement
4824    pub fn parse_uncache_table(&mut self) -> Result<Statement, ParserError> {
4825        self.expect_keyword_is(Keyword::TABLE)?;
4826        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
4827        let table_name = self.parse_object_name(false)?;
4828        Ok(Statement::UNCache {
4829            table_name,
4830            if_exists,
4831        })
4832    }
4833
4834    /// SQLite-specific `CREATE VIRTUAL TABLE`
4835    pub fn parse_create_virtual_table(&mut self) -> Result<Statement, ParserError> {
4836        self.expect_keyword_is(Keyword::TABLE)?;
4837        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
4838        let table_name = self.parse_object_name(false)?;
4839        self.expect_keyword_is(Keyword::USING)?;
4840        let module_name = self.parse_identifier()?;
4841        // SQLite docs note that module "arguments syntax is sufficiently
4842        // general that the arguments can be made to appear as column
4843        // definitions in a traditional CREATE TABLE statement", but
4844        // we don't implement that.
4845        let module_args = self.parse_parenthesized_column_list(Optional, false)?;
4846        Ok(Statement::CreateVirtualTable {
4847            name: table_name,
4848            if_not_exists,
4849            module_name,
4850            module_args,
4851        })
4852    }
4853
4854    pub fn parse_create_schema(&mut self) -> Result<Statement, ParserError> {
4855        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
4856
4857        let schema_name = self.parse_schema_name()?;
4858
4859        let default_collate_spec = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) {
4860            Some(self.parse_expr()?)
4861        } else {
4862            None
4863        };
4864
4865        let with = if self.peek_keyword(Keyword::WITH) {
4866            Some(self.parse_options(Keyword::WITH)?)
4867        } else {
4868            None
4869        };
4870
4871        let options = if self.peek_keyword(Keyword::OPTIONS) {
4872            Some(self.parse_options(Keyword::OPTIONS)?)
4873        } else {
4874            None
4875        };
4876
4877        Ok(Statement::CreateSchema {
4878            schema_name,
4879            if_not_exists,
4880            with,
4881            options,
4882            default_collate_spec,
4883        })
4884    }
4885
4886    fn parse_schema_name(&mut self) -> Result<SchemaName, ParserError> {
4887        if self.parse_keyword(Keyword::AUTHORIZATION) {
4888            Ok(SchemaName::UnnamedAuthorization(self.parse_identifier()?))
4889        } else {
4890            let name = self.parse_object_name(false)?;
4891
4892            if self.parse_keyword(Keyword::AUTHORIZATION) {
4893                Ok(SchemaName::NamedAuthorization(
4894                    name,
4895                    self.parse_identifier()?,
4896                ))
4897            } else {
4898                Ok(SchemaName::Simple(name))
4899            }
4900        }
4901    }
4902
4903    pub fn parse_create_database(&mut self) -> Result<Statement, ParserError> {
4904        let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
4905        let db_name = self.parse_object_name(false)?;
4906        let mut location = None;
4907        let mut managed_location = None;
4908        loop {
4909            match self.parse_one_of_keywords(&[Keyword::LOCATION, Keyword::MANAGEDLOCATION]) {
4910                Some(Keyword::LOCATION) => location = Some(self.parse_literal_string()?),
4911                Some(Keyword::MANAGEDLOCATION) => {
4912                    managed_location = Some(self.parse_literal_string()?)
4913                }
4914                _ => break,
4915            }
4916        }
4917        Ok(Statement::CreateDatabase {
4918            db_name,
4919            if_not_exists: ine,
4920            location,
4921            managed_location,
4922        })
4923    }
4924
4925    pub fn parse_optional_create_function_using(
4926        &mut self,
4927    ) -> Result<Option<CreateFunctionUsing>, ParserError> {
4928        if !self.parse_keyword(Keyword::USING) {
4929            return Ok(None);
4930        };
4931        let keyword =
4932            self.expect_one_of_keywords(&[Keyword::JAR, Keyword::FILE, Keyword::ARCHIVE])?;
4933
4934        let uri = self.parse_literal_string()?;
4935
4936        match keyword {
4937            Keyword::JAR => Ok(Some(CreateFunctionUsing::Jar(uri))),
4938            Keyword::FILE => Ok(Some(CreateFunctionUsing::File(uri))),
4939            Keyword::ARCHIVE => Ok(Some(CreateFunctionUsing::Archive(uri))),
4940            _ => self.expected(
4941                "JAR, FILE or ARCHIVE, got {:?}",
4942                TokenWithSpan::wrap(Token::make_keyword(format!("{keyword:?}").as_str())),
4943            ),
4944        }
4945    }
4946
4947    pub fn parse_create_function(
4948        &mut self,
4949        or_alter: bool,
4950        or_replace: bool,
4951        temporary: bool,
4952    ) -> Result<Statement, ParserError> {
4953        if dialect_of!(self is HiveDialect) {
4954            self.parse_hive_create_function(or_replace, temporary)
4955        } else if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
4956            self.parse_postgres_create_function(or_replace, temporary)
4957        } else if dialect_of!(self is DuckDbDialect) {
4958            self.parse_create_macro(or_replace, temporary)
4959        } else if dialect_of!(self is BigQueryDialect) {
4960            self.parse_bigquery_create_function(or_replace, temporary)
4961        } else if dialect_of!(self is MsSqlDialect) {
4962            self.parse_mssql_create_function(or_alter, or_replace, temporary)
4963        } else {
4964            self.prev_token();
4965            self.expected("an object type after CREATE", self.peek_token())
4966        }
4967    }
4968
4969    /// Parse `CREATE FUNCTION` for [PostgreSQL]
4970    ///
4971    /// [PostgreSQL]: https://www.postgresql.org/docs/15/sql-createfunction.html
4972    fn parse_postgres_create_function(
4973        &mut self,
4974        or_replace: bool,
4975        temporary: bool,
4976    ) -> Result<Statement, ParserError> {
4977        let name = self.parse_object_name(false)?;
4978
4979        self.expect_token(&Token::LParen)?;
4980        let args = if Token::RParen != self.peek_token_ref().token {
4981            self.parse_comma_separated(Parser::parse_function_arg)?
4982        } else {
4983            vec![]
4984        };
4985        self.expect_token(&Token::RParen)?;
4986
4987        let return_type = if self.parse_keyword(Keyword::RETURNS) {
4988            Some(self.parse_data_type()?)
4989        } else {
4990            None
4991        };
4992
4993        #[derive(Default)]
4994        struct Body {
4995            language: Option<Ident>,
4996            behavior: Option<FunctionBehavior>,
4997            function_body: Option<CreateFunctionBody>,
4998            called_on_null: Option<FunctionCalledOnNull>,
4999            parallel: Option<FunctionParallel>,
5000        }
5001        let mut body = Body::default();
5002        loop {
5003            fn ensure_not_set<T>(field: &Option<T>, name: &str) -> Result<(), ParserError> {
5004                if field.is_some() {
5005                    return Err(ParserError::ParserError(format!(
5006                        "{name} specified more than once",
5007                    )));
5008                }
5009                Ok(())
5010            }
5011            if self.parse_keyword(Keyword::AS) {
5012                ensure_not_set(&body.function_body, "AS")?;
5013                body.function_body = Some(CreateFunctionBody::AsBeforeOptions(
5014                    self.parse_create_function_body_string()?,
5015                ));
5016            } else if self.parse_keyword(Keyword::LANGUAGE) {
5017                ensure_not_set(&body.language, "LANGUAGE")?;
5018                body.language = Some(self.parse_identifier()?);
5019            } else if self.parse_keyword(Keyword::IMMUTABLE) {
5020                ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5021                body.behavior = Some(FunctionBehavior::Immutable);
5022            } else if self.parse_keyword(Keyword::STABLE) {
5023                ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5024                body.behavior = Some(FunctionBehavior::Stable);
5025            } else if self.parse_keyword(Keyword::VOLATILE) {
5026                ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5027                body.behavior = Some(FunctionBehavior::Volatile);
5028            } else if self.parse_keywords(&[
5029                Keyword::CALLED,
5030                Keyword::ON,
5031                Keyword::NULL,
5032                Keyword::INPUT,
5033            ]) {
5034                ensure_not_set(
5035                    &body.called_on_null,
5036                    "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5037                )?;
5038                body.called_on_null = Some(FunctionCalledOnNull::CalledOnNullInput);
5039            } else if self.parse_keywords(&[
5040                Keyword::RETURNS,
5041                Keyword::NULL,
5042                Keyword::ON,
5043                Keyword::NULL,
5044                Keyword::INPUT,
5045            ]) {
5046                ensure_not_set(
5047                    &body.called_on_null,
5048                    "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5049                )?;
5050                body.called_on_null = Some(FunctionCalledOnNull::ReturnsNullOnNullInput);
5051            } else if self.parse_keyword(Keyword::STRICT) {
5052                ensure_not_set(
5053                    &body.called_on_null,
5054                    "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5055                )?;
5056                body.called_on_null = Some(FunctionCalledOnNull::Strict);
5057            } else if self.parse_keyword(Keyword::PARALLEL) {
5058                ensure_not_set(&body.parallel, "PARALLEL { UNSAFE | RESTRICTED | SAFE }")?;
5059                if self.parse_keyword(Keyword::UNSAFE) {
5060                    body.parallel = Some(FunctionParallel::Unsafe);
5061                } else if self.parse_keyword(Keyword::RESTRICTED) {
5062                    body.parallel = Some(FunctionParallel::Restricted);
5063                } else if self.parse_keyword(Keyword::SAFE) {
5064                    body.parallel = Some(FunctionParallel::Safe);
5065                } else {
5066                    return self.expected("one of UNSAFE | RESTRICTED | SAFE", self.peek_token());
5067                }
5068            } else if self.parse_keyword(Keyword::RETURN) {
5069                ensure_not_set(&body.function_body, "RETURN")?;
5070                body.function_body = Some(CreateFunctionBody::Return(self.parse_expr()?));
5071            } else {
5072                break;
5073            }
5074        }
5075
5076        Ok(Statement::CreateFunction(CreateFunction {
5077            or_alter: false,
5078            or_replace,
5079            temporary,
5080            name,
5081            args: Some(args),
5082            return_type,
5083            behavior: body.behavior,
5084            called_on_null: body.called_on_null,
5085            parallel: body.parallel,
5086            language: body.language,
5087            function_body: body.function_body,
5088            if_not_exists: false,
5089            using: None,
5090            determinism_specifier: None,
5091            options: None,
5092            remote_connection: None,
5093        }))
5094    }
5095
5096    /// Parse `CREATE FUNCTION` for [Hive]
5097    ///
5098    /// [Hive]: https://cwiki.apache.org/confluence/display/hive/languagemanual+ddl#LanguageManualDDL-Create/Drop/ReloadFunction
5099    fn parse_hive_create_function(
5100        &mut self,
5101        or_replace: bool,
5102        temporary: bool,
5103    ) -> Result<Statement, ParserError> {
5104        let name = self.parse_object_name(false)?;
5105        self.expect_keyword_is(Keyword::AS)?;
5106
5107        let as_ = self.parse_create_function_body_string()?;
5108        let using = self.parse_optional_create_function_using()?;
5109
5110        Ok(Statement::CreateFunction(CreateFunction {
5111            or_alter: false,
5112            or_replace,
5113            temporary,
5114            name,
5115            function_body: Some(CreateFunctionBody::AsBeforeOptions(as_)),
5116            using,
5117            if_not_exists: false,
5118            args: None,
5119            return_type: None,
5120            behavior: None,
5121            called_on_null: None,
5122            parallel: None,
5123            language: None,
5124            determinism_specifier: None,
5125            options: None,
5126            remote_connection: None,
5127        }))
5128    }
5129
5130    /// Parse `CREATE FUNCTION` for [BigQuery]
5131    ///
5132    /// [BigQuery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_function_statement
5133    fn parse_bigquery_create_function(
5134        &mut self,
5135        or_replace: bool,
5136        temporary: bool,
5137    ) -> Result<Statement, ParserError> {
5138        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5139        let (name, args) = self.parse_create_function_name_and_params()?;
5140
5141        let return_type = if self.parse_keyword(Keyword::RETURNS) {
5142            Some(self.parse_data_type()?)
5143        } else {
5144            None
5145        };
5146
5147        let determinism_specifier = if self.parse_keyword(Keyword::DETERMINISTIC) {
5148            Some(FunctionDeterminismSpecifier::Deterministic)
5149        } else if self.parse_keywords(&[Keyword::NOT, Keyword::DETERMINISTIC]) {
5150            Some(FunctionDeterminismSpecifier::NotDeterministic)
5151        } else {
5152            None
5153        };
5154
5155        let language = if self.parse_keyword(Keyword::LANGUAGE) {
5156            Some(self.parse_identifier()?)
5157        } else {
5158            None
5159        };
5160
5161        let remote_connection =
5162            if self.parse_keywords(&[Keyword::REMOTE, Keyword::WITH, Keyword::CONNECTION]) {
5163                Some(self.parse_object_name(false)?)
5164            } else {
5165                None
5166            };
5167
5168        // `OPTIONS` may come before of after the function body but
5169        // may be specified at most once.
5170        let mut options = self.maybe_parse_options(Keyword::OPTIONS)?;
5171
5172        let function_body = if remote_connection.is_none() {
5173            self.expect_keyword_is(Keyword::AS)?;
5174            let expr = self.parse_expr()?;
5175            if options.is_none() {
5176                options = self.maybe_parse_options(Keyword::OPTIONS)?;
5177                Some(CreateFunctionBody::AsBeforeOptions(expr))
5178            } else {
5179                Some(CreateFunctionBody::AsAfterOptions(expr))
5180            }
5181        } else {
5182            None
5183        };
5184
5185        Ok(Statement::CreateFunction(CreateFunction {
5186            or_alter: false,
5187            or_replace,
5188            temporary,
5189            if_not_exists,
5190            name,
5191            args: Some(args),
5192            return_type,
5193            function_body,
5194            language,
5195            determinism_specifier,
5196            options,
5197            remote_connection,
5198            using: None,
5199            behavior: None,
5200            called_on_null: None,
5201            parallel: None,
5202        }))
5203    }
5204
5205    /// Parse `CREATE FUNCTION` for [MsSql]
5206    ///
5207    /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/statements/create-function-transact-sql
5208    fn parse_mssql_create_function(
5209        &mut self,
5210        or_alter: bool,
5211        or_replace: bool,
5212        temporary: bool,
5213    ) -> Result<Statement, ParserError> {
5214        let (name, args) = self.parse_create_function_name_and_params()?;
5215
5216        self.expect_keyword(Keyword::RETURNS)?;
5217
5218        let return_table = self.maybe_parse(|p| {
5219            let return_table_name = p.parse_identifier()?;
5220
5221            p.expect_keyword_is(Keyword::TABLE)?;
5222            p.prev_token();
5223
5224            let table_column_defs = match p.parse_data_type()? {
5225                DataType::Table(Some(table_column_defs)) if !table_column_defs.is_empty() => {
5226                    table_column_defs
5227                }
5228                _ => parser_err!(
5229                    "Expected table column definitions after TABLE keyword",
5230                    p.peek_token().span.start
5231                )?,
5232            };
5233
5234            Ok(DataType::NamedTable {
5235                name: ObjectName(vec![ObjectNamePart::Identifier(return_table_name)]),
5236                columns: table_column_defs,
5237            })
5238        })?;
5239
5240        let return_type = if return_table.is_some() {
5241            return_table
5242        } else {
5243            Some(self.parse_data_type()?)
5244        };
5245
5246        let _ = self.parse_keyword(Keyword::AS);
5247
5248        let function_body = if self.peek_keyword(Keyword::BEGIN) {
5249            let begin_token = self.expect_keyword(Keyword::BEGIN)?;
5250            let statements = self.parse_statement_list(&[Keyword::END])?;
5251            let end_token = self.expect_keyword(Keyword::END)?;
5252
5253            Some(CreateFunctionBody::AsBeginEnd(BeginEndStatements {
5254                begin_token: AttachedToken(begin_token),
5255                statements,
5256                end_token: AttachedToken(end_token),
5257            }))
5258        } else if self.parse_keyword(Keyword::RETURN) {
5259            if self.peek_token() == Token::LParen {
5260                Some(CreateFunctionBody::AsReturnExpr(self.parse_expr()?))
5261            } else if self.peek_keyword(Keyword::SELECT) {
5262                let select = self.parse_select()?;
5263                Some(CreateFunctionBody::AsReturnSelect(select))
5264            } else {
5265                parser_err!(
5266                    "Expected a subquery (or bare SELECT statement) after RETURN",
5267                    self.peek_token().span.start
5268                )?
5269            }
5270        } else {
5271            parser_err!("Unparsable function body", self.peek_token().span.start)?
5272        };
5273
5274        Ok(Statement::CreateFunction(CreateFunction {
5275            or_alter,
5276            or_replace,
5277            temporary,
5278            if_not_exists: false,
5279            name,
5280            args: Some(args),
5281            return_type,
5282            function_body,
5283            language: None,
5284            determinism_specifier: None,
5285            options: None,
5286            remote_connection: None,
5287            using: None,
5288            behavior: None,
5289            called_on_null: None,
5290            parallel: None,
5291        }))
5292    }
5293
5294    fn parse_create_function_name_and_params(
5295        &mut self,
5296    ) -> Result<(ObjectName, Vec<OperateFunctionArg>), ParserError> {
5297        let name = self.parse_object_name(false)?;
5298        let parse_function_param =
5299            |parser: &mut Parser| -> Result<OperateFunctionArg, ParserError> {
5300                let name = parser.parse_identifier()?;
5301                let data_type = parser.parse_data_type()?;
5302                let default_expr = if parser.consume_token(&Token::Eq) {
5303                    Some(parser.parse_expr()?)
5304                } else {
5305                    None
5306                };
5307
5308                Ok(OperateFunctionArg {
5309                    mode: None,
5310                    name: Some(name),
5311                    data_type,
5312                    default_expr,
5313                })
5314            };
5315        self.expect_token(&Token::LParen)?;
5316        let args = self.parse_comma_separated0(parse_function_param, Token::RParen)?;
5317        self.expect_token(&Token::RParen)?;
5318        Ok((name, args))
5319    }
5320
5321    fn parse_function_arg(&mut self) -> Result<OperateFunctionArg, ParserError> {
5322        let mode = if self.parse_keyword(Keyword::IN) {
5323            Some(ArgMode::In)
5324        } else if self.parse_keyword(Keyword::OUT) {
5325            Some(ArgMode::Out)
5326        } else if self.parse_keyword(Keyword::INOUT) {
5327            Some(ArgMode::InOut)
5328        } else {
5329            None
5330        };
5331
5332        // parse: [ argname ] argtype
5333        let mut name = None;
5334        let mut data_type = self.parse_data_type()?;
5335
5336        // To check whether the first token is a name or a type, we need to
5337        // peek the next token, which if it is another type keyword, then the
5338        // first token is a name and not a type in itself.
5339        let data_type_idx = self.get_current_index();
5340        if let Some(next_data_type) = self.maybe_parse(|parser| parser.parse_data_type())? {
5341            let token = self.token_at(data_type_idx);
5342
5343            // We ensure that the token is a `Word` token, and not other special tokens.
5344            if !matches!(token.token, Token::Word(_)) {
5345                return self.expected("a name or type", token.clone());
5346            }
5347
5348            name = Some(Ident::new(token.to_string()));
5349            data_type = next_data_type;
5350        }
5351
5352        let default_expr = if self.parse_keyword(Keyword::DEFAULT) || self.consume_token(&Token::Eq)
5353        {
5354            Some(self.parse_expr()?)
5355        } else {
5356            None
5357        };
5358        Ok(OperateFunctionArg {
5359            mode,
5360            name,
5361            data_type,
5362            default_expr,
5363        })
5364    }
5365
5366    /// Parse statements of the DropTrigger type such as:
5367    ///
5368    /// ```sql
5369    /// DROP TRIGGER [ IF EXISTS ] name ON table_name [ CASCADE | RESTRICT ]
5370    /// ```
5371    pub fn parse_drop_trigger(&mut self) -> Result<Statement, ParserError> {
5372        if !dialect_of!(self is PostgreSqlDialect | GenericDialect | MySqlDialect | MsSqlDialect) {
5373            self.prev_token();
5374            return self.expected("an object type after DROP", self.peek_token());
5375        }
5376        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
5377        let trigger_name = self.parse_object_name(false)?;
5378        let table_name = if self.parse_keyword(Keyword::ON) {
5379            Some(self.parse_object_name(false)?)
5380        } else {
5381            None
5382        };
5383        let option = self
5384            .parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT])
5385            .map(|keyword| match keyword {
5386                Keyword::CASCADE => ReferentialAction::Cascade,
5387                Keyword::RESTRICT => ReferentialAction::Restrict,
5388                _ => unreachable!(),
5389            });
5390        Ok(Statement::DropTrigger {
5391            if_exists,
5392            trigger_name,
5393            table_name,
5394            option,
5395        })
5396    }
5397
5398    pub fn parse_create_trigger(
5399        &mut self,
5400        or_alter: bool,
5401        or_replace: bool,
5402        is_constraint: bool,
5403    ) -> Result<Statement, ParserError> {
5404        if !dialect_of!(self is PostgreSqlDialect | GenericDialect | MySqlDialect | MsSqlDialect) {
5405            self.prev_token();
5406            return self.expected("an object type after CREATE", self.peek_token());
5407        }
5408
5409        let name = self.parse_object_name(false)?;
5410        let period = self.parse_trigger_period()?;
5411
5412        let events = self.parse_keyword_separated(Keyword::OR, Parser::parse_trigger_event)?;
5413        self.expect_keyword_is(Keyword::ON)?;
5414        let table_name = self.parse_object_name(false)?;
5415
5416        let referenced_table_name = if self.parse_keyword(Keyword::FROM) {
5417            self.parse_object_name(true).ok()
5418        } else {
5419            None
5420        };
5421
5422        let characteristics = self.parse_constraint_characteristics()?;
5423
5424        let mut referencing = vec![];
5425        if self.parse_keyword(Keyword::REFERENCING) {
5426            while let Some(refer) = self.parse_trigger_referencing()? {
5427                referencing.push(refer);
5428            }
5429        }
5430
5431        self.expect_keyword_is(Keyword::FOR)?;
5432        let include_each = self.parse_keyword(Keyword::EACH);
5433        let trigger_object =
5434            match self.expect_one_of_keywords(&[Keyword::ROW, Keyword::STATEMENT])? {
5435                Keyword::ROW => TriggerObject::Row,
5436                Keyword::STATEMENT => TriggerObject::Statement,
5437                _ => unreachable!(),
5438            };
5439
5440        let condition = self
5441            .parse_keyword(Keyword::WHEN)
5442            .then(|| self.parse_expr())
5443            .transpose()?;
5444
5445        self.expect_keyword_is(Keyword::EXECUTE)?;
5446
5447        let exec_body = self.parse_trigger_exec_body()?;
5448
5449        Ok(Statement::CreateTrigger {
5450            or_alter,
5451            or_replace,
5452            is_constraint,
5453            name,
5454            period,
5455            events,
5456            table_name,
5457            referenced_table_name,
5458            referencing,
5459            trigger_object,
5460            include_each,
5461            condition,
5462            exec_body: Some(exec_body),
5463            statements: None,
5464            characteristics,
5465        })
5466    }
5467
5468    pub fn parse_trigger_period(&mut self) -> Result<TriggerPeriod, ParserError> {
5469        Ok(
5470            match self.expect_one_of_keywords(&[
5471                Keyword::FOR,
5472                Keyword::BEFORE,
5473                Keyword::AFTER,
5474                Keyword::INSTEAD,
5475            ])? {
5476                Keyword::FOR => TriggerPeriod::For,
5477                Keyword::BEFORE => TriggerPeriod::Before,
5478                Keyword::AFTER => TriggerPeriod::After,
5479                Keyword::INSTEAD => self
5480                    .expect_keyword_is(Keyword::OF)
5481                    .map(|_| TriggerPeriod::InsteadOf)?,
5482                _ => unreachable!(),
5483            },
5484        )
5485    }
5486
5487    pub fn parse_trigger_event(&mut self) -> Result<TriggerEvent, ParserError> {
5488        Ok(
5489            match self.expect_one_of_keywords(&[
5490                Keyword::INSERT,
5491                Keyword::UPDATE,
5492                Keyword::DELETE,
5493                Keyword::TRUNCATE,
5494            ])? {
5495                Keyword::INSERT => TriggerEvent::Insert,
5496                Keyword::UPDATE => {
5497                    if self.parse_keyword(Keyword::OF) {
5498                        let cols = self.parse_comma_separated(Parser::parse_identifier)?;
5499                        TriggerEvent::Update(cols)
5500                    } else {
5501                        TriggerEvent::Update(vec![])
5502                    }
5503                }
5504                Keyword::DELETE => TriggerEvent::Delete,
5505                Keyword::TRUNCATE => TriggerEvent::Truncate,
5506                _ => unreachable!(),
5507            },
5508        )
5509    }
5510
5511    pub fn parse_trigger_referencing(&mut self) -> Result<Option<TriggerReferencing>, ParserError> {
5512        let refer_type = match self.parse_one_of_keywords(&[Keyword::OLD, Keyword::NEW]) {
5513            Some(Keyword::OLD) if self.parse_keyword(Keyword::TABLE) => {
5514                TriggerReferencingType::OldTable
5515            }
5516            Some(Keyword::NEW) if self.parse_keyword(Keyword::TABLE) => {
5517                TriggerReferencingType::NewTable
5518            }
5519            _ => {
5520                return Ok(None);
5521            }
5522        };
5523
5524        let is_as = self.parse_keyword(Keyword::AS);
5525        let transition_relation_name = self.parse_object_name(false)?;
5526        Ok(Some(TriggerReferencing {
5527            refer_type,
5528            is_as,
5529            transition_relation_name,
5530        }))
5531    }
5532
5533    pub fn parse_trigger_exec_body(&mut self) -> Result<TriggerExecBody, ParserError> {
5534        Ok(TriggerExecBody {
5535            exec_type: match self
5536                .expect_one_of_keywords(&[Keyword::FUNCTION, Keyword::PROCEDURE])?
5537            {
5538                Keyword::FUNCTION => TriggerExecBodyType::Function,
5539                Keyword::PROCEDURE => TriggerExecBodyType::Procedure,
5540                _ => unreachable!(),
5541            },
5542            func_desc: self.parse_function_desc()?,
5543        })
5544    }
5545
5546    pub fn parse_create_macro(
5547        &mut self,
5548        or_replace: bool,
5549        temporary: bool,
5550    ) -> Result<Statement, ParserError> {
5551        if dialect_of!(self is DuckDbDialect |  GenericDialect) {
5552            let name = self.parse_object_name(false)?;
5553            self.expect_token(&Token::LParen)?;
5554            let args = if self.consume_token(&Token::RParen) {
5555                self.prev_token();
5556                None
5557            } else {
5558                Some(self.parse_comma_separated(Parser::parse_macro_arg)?)
5559            };
5560
5561            self.expect_token(&Token::RParen)?;
5562            self.expect_keyword_is(Keyword::AS)?;
5563
5564            Ok(Statement::CreateMacro {
5565                or_replace,
5566                temporary,
5567                name,
5568                args,
5569                definition: if self.parse_keyword(Keyword::TABLE) {
5570                    MacroDefinition::Table(self.parse_query()?)
5571                } else {
5572                    MacroDefinition::Expr(self.parse_expr()?)
5573                },
5574            })
5575        } else {
5576            self.prev_token();
5577            self.expected("an object type after CREATE", self.peek_token())
5578        }
5579    }
5580
5581    fn parse_macro_arg(&mut self) -> Result<MacroArg, ParserError> {
5582        let name = self.parse_identifier()?;
5583
5584        let default_expr =
5585            if self.consume_token(&Token::Assignment) || self.consume_token(&Token::RArrow) {
5586                Some(self.parse_expr()?)
5587            } else {
5588                None
5589            };
5590        Ok(MacroArg { name, default_expr })
5591    }
5592
5593    pub fn parse_create_external_table(
5594        &mut self,
5595        or_replace: bool,
5596    ) -> Result<Statement, ParserError> {
5597        self.expect_keyword_is(Keyword::TABLE)?;
5598        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5599        let table_name = self.parse_object_name(false)?;
5600        let (columns, constraints) = self.parse_columns()?;
5601
5602        let hive_distribution = self.parse_hive_distribution()?;
5603        let hive_formats = self.parse_hive_formats()?;
5604
5605        let file_format = if let Some(ff) = &hive_formats.storage {
5606            match ff {
5607                HiveIOFormat::FileFormat { format } => Some(*format),
5608                _ => None,
5609            }
5610        } else {
5611            None
5612        };
5613        let location = hive_formats.location.clone();
5614        let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
5615        let table_options = if !table_properties.is_empty() {
5616            CreateTableOptions::TableProperties(table_properties)
5617        } else {
5618            CreateTableOptions::None
5619        };
5620        Ok(CreateTableBuilder::new(table_name)
5621            .columns(columns)
5622            .constraints(constraints)
5623            .hive_distribution(hive_distribution)
5624            .hive_formats(Some(hive_formats))
5625            .table_options(table_options)
5626            .or_replace(or_replace)
5627            .if_not_exists(if_not_exists)
5628            .external(true)
5629            .file_format(file_format)
5630            .location(location)
5631            .build())
5632    }
5633
5634    pub fn parse_file_format(&mut self) -> Result<FileFormat, ParserError> {
5635        let next_token = self.next_token();
5636        match &next_token.token {
5637            Token::Word(w) => match w.keyword {
5638                Keyword::AVRO => Ok(FileFormat::AVRO),
5639                Keyword::JSONFILE => Ok(FileFormat::JSONFILE),
5640                Keyword::ORC => Ok(FileFormat::ORC),
5641                Keyword::PARQUET => Ok(FileFormat::PARQUET),
5642                Keyword::RCFILE => Ok(FileFormat::RCFILE),
5643                Keyword::SEQUENCEFILE => Ok(FileFormat::SEQUENCEFILE),
5644                Keyword::TEXTFILE => Ok(FileFormat::TEXTFILE),
5645                _ => self.expected("fileformat", next_token),
5646            },
5647            _ => self.expected("fileformat", next_token),
5648        }
5649    }
5650
5651    pub fn parse_analyze_format(&mut self) -> Result<AnalyzeFormat, ParserError> {
5652        let next_token = self.next_token();
5653        match &next_token.token {
5654            Token::Word(w) => match w.keyword {
5655                Keyword::TEXT => Ok(AnalyzeFormat::TEXT),
5656                Keyword::GRAPHVIZ => Ok(AnalyzeFormat::GRAPHVIZ),
5657                Keyword::JSON => Ok(AnalyzeFormat::JSON),
5658                _ => self.expected("fileformat", next_token),
5659            },
5660            _ => self.expected("fileformat", next_token),
5661        }
5662    }
5663
5664    pub fn parse_create_view(
5665        &mut self,
5666        or_alter: bool,
5667        or_replace: bool,
5668        temporary: bool,
5669        create_view_params: Option<CreateViewParams>,
5670    ) -> Result<Statement, ParserError> {
5671        let materialized = self.parse_keyword(Keyword::MATERIALIZED);
5672        self.expect_keyword_is(Keyword::VIEW)?;
5673        let if_not_exists = dialect_of!(self is BigQueryDialect|SQLiteDialect|GenericDialect)
5674            && self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5675        // Many dialects support `OR ALTER` right after `CREATE`, but we don't (yet).
5676        // ANSI SQL and Postgres support RECURSIVE here, but we don't support it either.
5677        let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
5678        let name = self.parse_object_name(allow_unquoted_hyphen)?;
5679        let columns = self.parse_view_columns()?;
5680        let mut options = CreateTableOptions::None;
5681        let with_options = self.parse_options(Keyword::WITH)?;
5682        if !with_options.is_empty() {
5683            options = CreateTableOptions::With(with_options);
5684        }
5685
5686        let cluster_by = if self.parse_keyword(Keyword::CLUSTER) {
5687            self.expect_keyword_is(Keyword::BY)?;
5688            self.parse_parenthesized_column_list(Optional, false)?
5689        } else {
5690            vec![]
5691        };
5692
5693        if dialect_of!(self is BigQueryDialect | GenericDialect) {
5694            if let Some(opts) = self.maybe_parse_options(Keyword::OPTIONS)? {
5695                if !opts.is_empty() {
5696                    options = CreateTableOptions::Options(opts);
5697                }
5698            };
5699        }
5700
5701        let to = if dialect_of!(self is ClickHouseDialect | GenericDialect)
5702            && self.parse_keyword(Keyword::TO)
5703        {
5704            Some(self.parse_object_name(false)?)
5705        } else {
5706            None
5707        };
5708
5709        let comment = if dialect_of!(self is SnowflakeDialect | GenericDialect)
5710            && self.parse_keyword(Keyword::COMMENT)
5711        {
5712            self.expect_token(&Token::Eq)?;
5713            Some(self.parse_comment_value()?)
5714        } else {
5715            None
5716        };
5717
5718        self.expect_keyword_is(Keyword::AS)?;
5719        let query = self.parse_query()?;
5720        // Optional `WITH [ CASCADED | LOCAL ] CHECK OPTION` is widely supported here.
5721
5722        let with_no_schema_binding = dialect_of!(self is RedshiftSqlDialect | GenericDialect)
5723            && self.parse_keywords(&[
5724                Keyword::WITH,
5725                Keyword::NO,
5726                Keyword::SCHEMA,
5727                Keyword::BINDING,
5728            ]);
5729
5730        Ok(Statement::CreateView {
5731            or_alter,
5732            name,
5733            columns,
5734            query,
5735            materialized,
5736            or_replace,
5737            options,
5738            cluster_by,
5739            comment,
5740            with_no_schema_binding,
5741            if_not_exists,
5742            temporary,
5743            to,
5744            params: create_view_params,
5745        })
5746    }
5747
5748    /// Parse optional parameters for the `CREATE VIEW` statement supported by [MySQL].
5749    ///
5750    /// [MySQL]: https://dev.mysql.com/doc/refman/9.1/en/create-view.html
5751    fn parse_create_view_params(&mut self) -> Result<Option<CreateViewParams>, ParserError> {
5752        let algorithm = if self.parse_keyword(Keyword::ALGORITHM) {
5753            self.expect_token(&Token::Eq)?;
5754            Some(
5755                match self.expect_one_of_keywords(&[
5756                    Keyword::UNDEFINED,
5757                    Keyword::MERGE,
5758                    Keyword::TEMPTABLE,
5759                ])? {
5760                    Keyword::UNDEFINED => CreateViewAlgorithm::Undefined,
5761                    Keyword::MERGE => CreateViewAlgorithm::Merge,
5762                    Keyword::TEMPTABLE => CreateViewAlgorithm::TempTable,
5763                    _ => {
5764                        self.prev_token();
5765                        let found = self.next_token();
5766                        return self
5767                            .expected("UNDEFINED or MERGE or TEMPTABLE after ALGORITHM =", found);
5768                    }
5769                },
5770            )
5771        } else {
5772            None
5773        };
5774        let definer = if self.parse_keyword(Keyword::DEFINER) {
5775            self.expect_token(&Token::Eq)?;
5776            Some(self.parse_grantee_name()?)
5777        } else {
5778            None
5779        };
5780        let security = if self.parse_keywords(&[Keyword::SQL, Keyword::SECURITY]) {
5781            Some(
5782                match self.expect_one_of_keywords(&[Keyword::DEFINER, Keyword::INVOKER])? {
5783                    Keyword::DEFINER => CreateViewSecurity::Definer,
5784                    Keyword::INVOKER => CreateViewSecurity::Invoker,
5785                    _ => {
5786                        self.prev_token();
5787                        let found = self.next_token();
5788                        return self.expected("DEFINER or INVOKER after SQL SECURITY", found);
5789                    }
5790                },
5791            )
5792        } else {
5793            None
5794        };
5795        if algorithm.is_some() || definer.is_some() || security.is_some() {
5796            Ok(Some(CreateViewParams {
5797                algorithm,
5798                definer,
5799                security,
5800            }))
5801        } else {
5802            Ok(None)
5803        }
5804    }
5805
5806    pub fn parse_create_role(&mut self) -> Result<Statement, ParserError> {
5807        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5808        let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
5809
5810        let _ = self.parse_keyword(Keyword::WITH); // [ WITH ]
5811
5812        let optional_keywords = if dialect_of!(self is MsSqlDialect) {
5813            vec![Keyword::AUTHORIZATION]
5814        } else if dialect_of!(self is PostgreSqlDialect) {
5815            vec![
5816                Keyword::LOGIN,
5817                Keyword::NOLOGIN,
5818                Keyword::INHERIT,
5819                Keyword::NOINHERIT,
5820                Keyword::BYPASSRLS,
5821                Keyword::NOBYPASSRLS,
5822                Keyword::PASSWORD,
5823                Keyword::CREATEDB,
5824                Keyword::NOCREATEDB,
5825                Keyword::CREATEROLE,
5826                Keyword::NOCREATEROLE,
5827                Keyword::SUPERUSER,
5828                Keyword::NOSUPERUSER,
5829                Keyword::REPLICATION,
5830                Keyword::NOREPLICATION,
5831                Keyword::CONNECTION,
5832                Keyword::VALID,
5833                Keyword::IN,
5834                Keyword::ROLE,
5835                Keyword::ADMIN,
5836                Keyword::USER,
5837            ]
5838        } else {
5839            vec![]
5840        };
5841
5842        // MSSQL
5843        let mut authorization_owner = None;
5844        // Postgres
5845        let mut login = None;
5846        let mut inherit = None;
5847        let mut bypassrls = None;
5848        let mut password = None;
5849        let mut create_db = None;
5850        let mut create_role = None;
5851        let mut superuser = None;
5852        let mut replication = None;
5853        let mut connection_limit = None;
5854        let mut valid_until = None;
5855        let mut in_role = vec![];
5856        let mut in_group = vec![];
5857        let mut role = vec![];
5858        let mut user = vec![];
5859        let mut admin = vec![];
5860
5861        while let Some(keyword) = self.parse_one_of_keywords(&optional_keywords) {
5862            let loc = self
5863                .tokens
5864                .get(self.index - 1)
5865                .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
5866            match keyword {
5867                Keyword::AUTHORIZATION => {
5868                    if authorization_owner.is_some() {
5869                        parser_err!("Found multiple AUTHORIZATION", loc)
5870                    } else {
5871                        authorization_owner = Some(self.parse_object_name(false)?);
5872                        Ok(())
5873                    }
5874                }
5875                Keyword::LOGIN | Keyword::NOLOGIN => {
5876                    if login.is_some() {
5877                        parser_err!("Found multiple LOGIN or NOLOGIN", loc)
5878                    } else {
5879                        login = Some(keyword == Keyword::LOGIN);
5880                        Ok(())
5881                    }
5882                }
5883                Keyword::INHERIT | Keyword::NOINHERIT => {
5884                    if inherit.is_some() {
5885                        parser_err!("Found multiple INHERIT or NOINHERIT", loc)
5886                    } else {
5887                        inherit = Some(keyword == Keyword::INHERIT);
5888                        Ok(())
5889                    }
5890                }
5891                Keyword::BYPASSRLS | Keyword::NOBYPASSRLS => {
5892                    if bypassrls.is_some() {
5893                        parser_err!("Found multiple BYPASSRLS or NOBYPASSRLS", loc)
5894                    } else {
5895                        bypassrls = Some(keyword == Keyword::BYPASSRLS);
5896                        Ok(())
5897                    }
5898                }
5899                Keyword::CREATEDB | Keyword::NOCREATEDB => {
5900                    if create_db.is_some() {
5901                        parser_err!("Found multiple CREATEDB or NOCREATEDB", loc)
5902                    } else {
5903                        create_db = Some(keyword == Keyword::CREATEDB);
5904                        Ok(())
5905                    }
5906                }
5907                Keyword::CREATEROLE | Keyword::NOCREATEROLE => {
5908                    if create_role.is_some() {
5909                        parser_err!("Found multiple CREATEROLE or NOCREATEROLE", loc)
5910                    } else {
5911                        create_role = Some(keyword == Keyword::CREATEROLE);
5912                        Ok(())
5913                    }
5914                }
5915                Keyword::SUPERUSER | Keyword::NOSUPERUSER => {
5916                    if superuser.is_some() {
5917                        parser_err!("Found multiple SUPERUSER or NOSUPERUSER", loc)
5918                    } else {
5919                        superuser = Some(keyword == Keyword::SUPERUSER);
5920                        Ok(())
5921                    }
5922                }
5923                Keyword::REPLICATION | Keyword::NOREPLICATION => {
5924                    if replication.is_some() {
5925                        parser_err!("Found multiple REPLICATION or NOREPLICATION", loc)
5926                    } else {
5927                        replication = Some(keyword == Keyword::REPLICATION);
5928                        Ok(())
5929                    }
5930                }
5931                Keyword::PASSWORD => {
5932                    if password.is_some() {
5933                        parser_err!("Found multiple PASSWORD", loc)
5934                    } else {
5935                        password = if self.parse_keyword(Keyword::NULL) {
5936                            Some(Password::NullPassword)
5937                        } else {
5938                            Some(Password::Password(Expr::Value(self.parse_value()?)))
5939                        };
5940                        Ok(())
5941                    }
5942                }
5943                Keyword::CONNECTION => {
5944                    self.expect_keyword_is(Keyword::LIMIT)?;
5945                    if connection_limit.is_some() {
5946                        parser_err!("Found multiple CONNECTION LIMIT", loc)
5947                    } else {
5948                        connection_limit = Some(Expr::Value(self.parse_number_value()?));
5949                        Ok(())
5950                    }
5951                }
5952                Keyword::VALID => {
5953                    self.expect_keyword_is(Keyword::UNTIL)?;
5954                    if valid_until.is_some() {
5955                        parser_err!("Found multiple VALID UNTIL", loc)
5956                    } else {
5957                        valid_until = Some(Expr::Value(self.parse_value()?));
5958                        Ok(())
5959                    }
5960                }
5961                Keyword::IN => {
5962                    if self.parse_keyword(Keyword::ROLE) {
5963                        if !in_role.is_empty() {
5964                            parser_err!("Found multiple IN ROLE", loc)
5965                        } else {
5966                            in_role = self.parse_comma_separated(|p| p.parse_identifier())?;
5967                            Ok(())
5968                        }
5969                    } else if self.parse_keyword(Keyword::GROUP) {
5970                        if !in_group.is_empty() {
5971                            parser_err!("Found multiple IN GROUP", loc)
5972                        } else {
5973                            in_group = self.parse_comma_separated(|p| p.parse_identifier())?;
5974                            Ok(())
5975                        }
5976                    } else {
5977                        self.expected("ROLE or GROUP after IN", self.peek_token())
5978                    }
5979                }
5980                Keyword::ROLE => {
5981                    if !role.is_empty() {
5982                        parser_err!("Found multiple ROLE", loc)
5983                    } else {
5984                        role = self.parse_comma_separated(|p| p.parse_identifier())?;
5985                        Ok(())
5986                    }
5987                }
5988                Keyword::USER => {
5989                    if !user.is_empty() {
5990                        parser_err!("Found multiple USER", loc)
5991                    } else {
5992                        user = self.parse_comma_separated(|p| p.parse_identifier())?;
5993                        Ok(())
5994                    }
5995                }
5996                Keyword::ADMIN => {
5997                    if !admin.is_empty() {
5998                        parser_err!("Found multiple ADMIN", loc)
5999                    } else {
6000                        admin = self.parse_comma_separated(|p| p.parse_identifier())?;
6001                        Ok(())
6002                    }
6003                }
6004                _ => break,
6005            }?
6006        }
6007
6008        Ok(Statement::CreateRole {
6009            names,
6010            if_not_exists,
6011            login,
6012            inherit,
6013            bypassrls,
6014            password,
6015            create_db,
6016            create_role,
6017            replication,
6018            superuser,
6019            connection_limit,
6020            valid_until,
6021            in_role,
6022            in_group,
6023            role,
6024            user,
6025            admin,
6026            authorization_owner,
6027        })
6028    }
6029
6030    pub fn parse_owner(&mut self) -> Result<Owner, ParserError> {
6031        let owner = match self.parse_one_of_keywords(&[Keyword::CURRENT_USER, Keyword::CURRENT_ROLE, Keyword::SESSION_USER]) {
6032            Some(Keyword::CURRENT_USER) => Owner::CurrentUser,
6033            Some(Keyword::CURRENT_ROLE) => Owner::CurrentRole,
6034            Some(Keyword::SESSION_USER) => Owner::SessionUser,
6035            Some(_) => unreachable!(),
6036            None => {
6037                match self.parse_identifier() {
6038                    Ok(ident) => Owner::Ident(ident),
6039                    Err(e) => {
6040                        return Err(ParserError::ParserError(format!("Expected: CURRENT_USER, CURRENT_ROLE, SESSION_USER or identifier after OWNER TO. {e}")))
6041                    }
6042                }
6043            }
6044        };
6045        Ok(owner)
6046    }
6047
6048    /// Parses a [Statement::CreateDomain] statement.
6049    fn parse_create_domain(&mut self) -> Result<Statement, ParserError> {
6050        let name = self.parse_object_name(false)?;
6051        self.expect_keyword_is(Keyword::AS)?;
6052        let data_type = self.parse_data_type()?;
6053        let collation = if self.parse_keyword(Keyword::COLLATE) {
6054            Some(self.parse_identifier()?)
6055        } else {
6056            None
6057        };
6058        let default = if self.parse_keyword(Keyword::DEFAULT) {
6059            Some(self.parse_expr()?)
6060        } else {
6061            None
6062        };
6063        let mut constraints = Vec::new();
6064        while let Some(constraint) = self.parse_optional_table_constraint()? {
6065            constraints.push(constraint);
6066        }
6067
6068        Ok(Statement::CreateDomain(CreateDomain {
6069            name,
6070            data_type,
6071            collation,
6072            default,
6073            constraints,
6074        }))
6075    }
6076
6077    /// ```sql
6078    ///     CREATE POLICY name ON table_name [ AS { PERMISSIVE | RESTRICTIVE } ]
6079    ///     [ FOR { ALL | SELECT | INSERT | UPDATE | DELETE } ]
6080    ///     [ TO { role_name | PUBLIC | CURRENT_USER | CURRENT_ROLE | SESSION_USER } [, ...] ]
6081    ///     [ USING ( using_expression ) ]
6082    ///     [ WITH CHECK ( with_check_expression ) ]
6083    /// ```
6084    ///
6085    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-createpolicy.html)
6086    pub fn parse_create_policy(&mut self) -> Result<Statement, ParserError> {
6087        let name = self.parse_identifier()?;
6088        self.expect_keyword_is(Keyword::ON)?;
6089        let table_name = self.parse_object_name(false)?;
6090
6091        let policy_type = if self.parse_keyword(Keyword::AS) {
6092            let keyword =
6093                self.expect_one_of_keywords(&[Keyword::PERMISSIVE, Keyword::RESTRICTIVE])?;
6094            Some(match keyword {
6095                Keyword::PERMISSIVE => CreatePolicyType::Permissive,
6096                Keyword::RESTRICTIVE => CreatePolicyType::Restrictive,
6097                _ => unreachable!(),
6098            })
6099        } else {
6100            None
6101        };
6102
6103        let command = if self.parse_keyword(Keyword::FOR) {
6104            let keyword = self.expect_one_of_keywords(&[
6105                Keyword::ALL,
6106                Keyword::SELECT,
6107                Keyword::INSERT,
6108                Keyword::UPDATE,
6109                Keyword::DELETE,
6110            ])?;
6111            Some(match keyword {
6112                Keyword::ALL => CreatePolicyCommand::All,
6113                Keyword::SELECT => CreatePolicyCommand::Select,
6114                Keyword::INSERT => CreatePolicyCommand::Insert,
6115                Keyword::UPDATE => CreatePolicyCommand::Update,
6116                Keyword::DELETE => CreatePolicyCommand::Delete,
6117                _ => unreachable!(),
6118            })
6119        } else {
6120            None
6121        };
6122
6123        let to = if self.parse_keyword(Keyword::TO) {
6124            Some(self.parse_comma_separated(|p| p.parse_owner())?)
6125        } else {
6126            None
6127        };
6128
6129        let using = if self.parse_keyword(Keyword::USING) {
6130            self.expect_token(&Token::LParen)?;
6131            let expr = self.parse_expr()?;
6132            self.expect_token(&Token::RParen)?;
6133            Some(expr)
6134        } else {
6135            None
6136        };
6137
6138        let with_check = if self.parse_keywords(&[Keyword::WITH, Keyword::CHECK]) {
6139            self.expect_token(&Token::LParen)?;
6140            let expr = self.parse_expr()?;
6141            self.expect_token(&Token::RParen)?;
6142            Some(expr)
6143        } else {
6144            None
6145        };
6146
6147        Ok(CreatePolicy {
6148            name,
6149            table_name,
6150            policy_type,
6151            command,
6152            to,
6153            using,
6154            with_check,
6155        })
6156    }
6157
6158    /// ```sql
6159    /// CREATE CONNECTOR [IF NOT EXISTS] connector_name
6160    /// [TYPE datasource_type]
6161    /// [URL datasource_url]
6162    /// [COMMENT connector_comment]
6163    /// [WITH DCPROPERTIES(property_name=property_value, ...)]
6164    /// ```
6165    ///
6166    /// [Hive Documentation](https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=27362034#LanguageManualDDL-CreateDataConnectorCreateConnector)
6167    pub fn parse_create_connector(&mut self) -> Result<Statement, ParserError> {
6168        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6169        let name = self.parse_identifier()?;
6170
6171        let connector_type = if self.parse_keyword(Keyword::TYPE) {
6172            Some(self.parse_literal_string()?)
6173        } else {
6174            None
6175        };
6176
6177        let url = if self.parse_keyword(Keyword::URL) {
6178            Some(self.parse_literal_string()?)
6179        } else {
6180            None
6181        };
6182
6183        let comment = self.parse_optional_inline_comment()?;
6184
6185        let with_dcproperties =
6186            match self.parse_options_with_keywords(&[Keyword::WITH, Keyword::DCPROPERTIES])? {
6187                properties if !properties.is_empty() => Some(properties),
6188                _ => None,
6189            };
6190
6191        Ok(Statement::CreateConnector(CreateConnector {
6192            name,
6193            if_not_exists,
6194            connector_type,
6195            url,
6196            comment,
6197            with_dcproperties,
6198        }))
6199    }
6200
6201    pub fn parse_drop(&mut self) -> Result<Statement, ParserError> {
6202        // MySQL dialect supports `TEMPORARY`
6203        let temporary = dialect_of!(self is MySqlDialect | GenericDialect | DuckDbDialect)
6204            && self.parse_keyword(Keyword::TEMPORARY);
6205        let persistent = dialect_of!(self is DuckDbDialect)
6206            && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
6207
6208        let object_type = if self.parse_keyword(Keyword::TABLE) {
6209            ObjectType::Table
6210        } else if self.parse_keyword(Keyword::VIEW) {
6211            ObjectType::View
6212        } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEW]) {
6213            ObjectType::MaterializedView
6214        } else if self.parse_keyword(Keyword::INDEX) {
6215            ObjectType::Index
6216        } else if self.parse_keyword(Keyword::ROLE) {
6217            ObjectType::Role
6218        } else if self.parse_keyword(Keyword::SCHEMA) {
6219            ObjectType::Schema
6220        } else if self.parse_keyword(Keyword::DATABASE) {
6221            ObjectType::Database
6222        } else if self.parse_keyword(Keyword::SEQUENCE) {
6223            ObjectType::Sequence
6224        } else if self.parse_keyword(Keyword::STAGE) {
6225            ObjectType::Stage
6226        } else if self.parse_keyword(Keyword::TYPE) {
6227            ObjectType::Type
6228        } else if self.parse_keyword(Keyword::FUNCTION) {
6229            return self.parse_drop_function();
6230        } else if self.parse_keyword(Keyword::POLICY) {
6231            return self.parse_drop_policy();
6232        } else if self.parse_keyword(Keyword::CONNECTOR) {
6233            return self.parse_drop_connector();
6234        } else if self.parse_keyword(Keyword::DOMAIN) {
6235            return self.parse_drop_domain();
6236        } else if self.parse_keyword(Keyword::PROCEDURE) {
6237            return self.parse_drop_procedure();
6238        } else if self.parse_keyword(Keyword::SECRET) {
6239            return self.parse_drop_secret(temporary, persistent);
6240        } else if self.parse_keyword(Keyword::TRIGGER) {
6241            return self.parse_drop_trigger();
6242        } else if self.parse_keyword(Keyword::EXTENSION) {
6243            return self.parse_drop_extension();
6244        } else {
6245            return self.expected(
6246                "CONNECTOR, DATABASE, EXTENSION, FUNCTION, INDEX, POLICY, PROCEDURE, ROLE, SCHEMA, SECRET, SEQUENCE, STAGE, TABLE, TRIGGER, TYPE, VIEW, or MATERIALIZED VIEW after DROP",
6247                self.peek_token(),
6248            );
6249        };
6250        // Many dialects support the non-standard `IF EXISTS` clause and allow
6251        // specifying multiple objects to delete in a single statement
6252        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6253        let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
6254
6255        let loc = self.peek_token().span.start;
6256        let cascade = self.parse_keyword(Keyword::CASCADE);
6257        let restrict = self.parse_keyword(Keyword::RESTRICT);
6258        let purge = self.parse_keyword(Keyword::PURGE);
6259        if cascade && restrict {
6260            return parser_err!("Cannot specify both CASCADE and RESTRICT in DROP", loc);
6261        }
6262        if object_type == ObjectType::Role && (cascade || restrict || purge) {
6263            return parser_err!(
6264                "Cannot specify CASCADE, RESTRICT, or PURGE in DROP ROLE",
6265                loc
6266            );
6267        }
6268        let table = if self.parse_keyword(Keyword::ON) {
6269            Some(self.parse_object_name(false)?)
6270        } else {
6271            None
6272        };
6273        Ok(Statement::Drop {
6274            object_type,
6275            if_exists,
6276            names,
6277            cascade,
6278            restrict,
6279            purge,
6280            temporary,
6281            table,
6282        })
6283    }
6284
6285    fn parse_optional_drop_behavior(&mut self) -> Option<DropBehavior> {
6286        match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) {
6287            Some(Keyword::CASCADE) => Some(DropBehavior::Cascade),
6288            Some(Keyword::RESTRICT) => Some(DropBehavior::Restrict),
6289            _ => None,
6290        }
6291    }
6292
6293    /// ```sql
6294    /// DROP FUNCTION [ IF EXISTS ] name [ ( [ [ argmode ] [ argname ] argtype [, ...] ] ) ] [, ...]
6295    /// [ CASCADE | RESTRICT ]
6296    /// ```
6297    fn parse_drop_function(&mut self) -> Result<Statement, ParserError> {
6298        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6299        let func_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
6300        let drop_behavior = self.parse_optional_drop_behavior();
6301        Ok(Statement::DropFunction {
6302            if_exists,
6303            func_desc,
6304            drop_behavior,
6305        })
6306    }
6307
6308    /// ```sql
6309    /// DROP POLICY [ IF EXISTS ] name ON table_name [ CASCADE | RESTRICT ]
6310    /// ```
6311    ///
6312    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-droppolicy.html)
6313    fn parse_drop_policy(&mut self) -> Result<Statement, ParserError> {
6314        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6315        let name = self.parse_identifier()?;
6316        self.expect_keyword_is(Keyword::ON)?;
6317        let table_name = self.parse_object_name(false)?;
6318        let drop_behavior = self.parse_optional_drop_behavior();
6319        Ok(Statement::DropPolicy {
6320            if_exists,
6321            name,
6322            table_name,
6323            drop_behavior,
6324        })
6325    }
6326    /// ```sql
6327    /// DROP CONNECTOR [IF EXISTS] name
6328    /// ```
6329    ///
6330    /// See [Hive](https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=27362034#LanguageManualDDL-DropConnector)
6331    fn parse_drop_connector(&mut self) -> Result<Statement, ParserError> {
6332        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6333        let name = self.parse_identifier()?;
6334        Ok(Statement::DropConnector { if_exists, name })
6335    }
6336
6337    /// ```sql
6338    /// DROP DOMAIN [ IF EXISTS ] name [ CASCADE | RESTRICT ]
6339    /// ```
6340    fn parse_drop_domain(&mut self) -> Result<Statement, ParserError> {
6341        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6342        let name = self.parse_object_name(false)?;
6343        let drop_behavior = self.parse_optional_drop_behavior();
6344        Ok(Statement::DropDomain(DropDomain {
6345            if_exists,
6346            name,
6347            drop_behavior,
6348        }))
6349    }
6350
6351    /// ```sql
6352    /// DROP PROCEDURE [ IF EXISTS ] name [ ( [ [ argmode ] [ argname ] argtype [, ...] ] ) ] [, ...]
6353    /// [ CASCADE | RESTRICT ]
6354    /// ```
6355    fn parse_drop_procedure(&mut self) -> Result<Statement, ParserError> {
6356        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6357        let proc_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
6358        let drop_behavior = self.parse_optional_drop_behavior();
6359        Ok(Statement::DropProcedure {
6360            if_exists,
6361            proc_desc,
6362            drop_behavior,
6363        })
6364    }
6365
6366    fn parse_function_desc(&mut self) -> Result<FunctionDesc, ParserError> {
6367        let name = self.parse_object_name(false)?;
6368
6369        let args = if self.consume_token(&Token::LParen) {
6370            if self.consume_token(&Token::RParen) {
6371                None
6372            } else {
6373                let args = self.parse_comma_separated(Parser::parse_function_arg)?;
6374                self.expect_token(&Token::RParen)?;
6375                Some(args)
6376            }
6377        } else {
6378            None
6379        };
6380
6381        Ok(FunctionDesc { name, args })
6382    }
6383
6384    /// See [DuckDB Docs](https://duckdb.org/docs/sql/statements/create_secret.html) for more details.
6385    fn parse_drop_secret(
6386        &mut self,
6387        temporary: bool,
6388        persistent: bool,
6389    ) -> Result<Statement, ParserError> {
6390        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6391        let name = self.parse_identifier()?;
6392        let storage_specifier = if self.parse_keyword(Keyword::FROM) {
6393            self.parse_identifier().ok()
6394        } else {
6395            None
6396        };
6397        let temp = match (temporary, persistent) {
6398            (true, false) => Some(true),
6399            (false, true) => Some(false),
6400            (false, false) => None,
6401            _ => self.expected("TEMPORARY or PERSISTENT", self.peek_token())?,
6402        };
6403
6404        Ok(Statement::DropSecret {
6405            if_exists,
6406            temporary: temp,
6407            name,
6408            storage_specifier,
6409        })
6410    }
6411
6412    /// Parse a `DECLARE` statement.
6413    ///
6414    /// ```sql
6415    /// DECLARE name [ BINARY ] [ ASENSITIVE | INSENSITIVE ] [ [ NO ] SCROLL ]
6416    ///     CURSOR [ { WITH | WITHOUT } HOLD ] FOR query
6417    /// ```
6418    ///
6419    /// The syntax can vary significantly between warehouses. See the grammar
6420    /// on the warehouse specific function in such cases.
6421    pub fn parse_declare(&mut self) -> Result<Statement, ParserError> {
6422        if dialect_of!(self is BigQueryDialect) {
6423            return self.parse_big_query_declare();
6424        }
6425        if dialect_of!(self is SnowflakeDialect) {
6426            return self.parse_snowflake_declare();
6427        }
6428        if dialect_of!(self is MsSqlDialect) {
6429            return self.parse_mssql_declare();
6430        }
6431
6432        let name = self.parse_identifier()?;
6433
6434        let binary = Some(self.parse_keyword(Keyword::BINARY));
6435        let sensitive = if self.parse_keyword(Keyword::INSENSITIVE) {
6436            Some(true)
6437        } else if self.parse_keyword(Keyword::ASENSITIVE) {
6438            Some(false)
6439        } else {
6440            None
6441        };
6442        let scroll = if self.parse_keyword(Keyword::SCROLL) {
6443            Some(true)
6444        } else if self.parse_keywords(&[Keyword::NO, Keyword::SCROLL]) {
6445            Some(false)
6446        } else {
6447            None
6448        };
6449
6450        self.expect_keyword_is(Keyword::CURSOR)?;
6451        let declare_type = Some(DeclareType::Cursor);
6452
6453        let hold = match self.parse_one_of_keywords(&[Keyword::WITH, Keyword::WITHOUT]) {
6454            Some(keyword) => {
6455                self.expect_keyword_is(Keyword::HOLD)?;
6456
6457                match keyword {
6458                    Keyword::WITH => Some(true),
6459                    Keyword::WITHOUT => Some(false),
6460                    _ => unreachable!(),
6461                }
6462            }
6463            None => None,
6464        };
6465
6466        self.expect_keyword_is(Keyword::FOR)?;
6467
6468        let query = Some(self.parse_query()?);
6469
6470        Ok(Statement::Declare {
6471            stmts: vec![Declare {
6472                names: vec![name],
6473                data_type: None,
6474                assignment: None,
6475                declare_type,
6476                binary,
6477                sensitive,
6478                scroll,
6479                hold,
6480                for_query: query,
6481            }],
6482        })
6483    }
6484
6485    /// Parse a [BigQuery] `DECLARE` statement.
6486    ///
6487    /// Syntax:
6488    /// ```text
6489    /// DECLARE variable_name[, ...] [{ <variable_type> | <DEFAULT expression> }];
6490    /// ```
6491    /// [BigQuery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/procedural-language#declare
6492    pub fn parse_big_query_declare(&mut self) -> Result<Statement, ParserError> {
6493        let names = self.parse_comma_separated(Parser::parse_identifier)?;
6494
6495        let data_type = match self.peek_token().token {
6496            Token::Word(w) if w.keyword == Keyword::DEFAULT => None,
6497            _ => Some(self.parse_data_type()?),
6498        };
6499
6500        let expr = if data_type.is_some() {
6501            if self.parse_keyword(Keyword::DEFAULT) {
6502                Some(self.parse_expr()?)
6503            } else {
6504                None
6505            }
6506        } else {
6507            // If no variable type - default expression must be specified, per BQ docs.
6508            // i.e `DECLARE foo;` is invalid.
6509            self.expect_keyword_is(Keyword::DEFAULT)?;
6510            Some(self.parse_expr()?)
6511        };
6512
6513        Ok(Statement::Declare {
6514            stmts: vec![Declare {
6515                names,
6516                data_type,
6517                assignment: expr.map(|expr| DeclareAssignment::Default(Box::new(expr))),
6518                declare_type: None,
6519                binary: None,
6520                sensitive: None,
6521                scroll: None,
6522                hold: None,
6523                for_query: None,
6524            }],
6525        })
6526    }
6527
6528    /// Parse a [Snowflake] `DECLARE` statement.
6529    ///
6530    /// Syntax:
6531    /// ```text
6532    /// DECLARE
6533    ///   [{ <variable_declaration>
6534    ///      | <cursor_declaration>
6535    ///      | <resultset_declaration>
6536    ///      | <exception_declaration> }; ... ]
6537    ///
6538    /// <variable_declaration>
6539    /// <variable_name> [<type>] [ { DEFAULT | := } <expression>]
6540    ///
6541    /// <cursor_declaration>
6542    /// <cursor_name> CURSOR FOR <query>
6543    ///
6544    /// <resultset_declaration>
6545    /// <resultset_name> RESULTSET [ { DEFAULT | := } ( <query> ) ] ;
6546    ///
6547    /// <exception_declaration>
6548    /// <exception_name> EXCEPTION [ ( <exception_number> , '<exception_message>' ) ] ;
6549    /// ```
6550    ///
6551    /// [Snowflake]: https://docs.snowflake.com/en/sql-reference/snowflake-scripting/declare
6552    pub fn parse_snowflake_declare(&mut self) -> Result<Statement, ParserError> {
6553        let mut stmts = vec![];
6554        loop {
6555            let name = self.parse_identifier()?;
6556            let (declare_type, for_query, assigned_expr, data_type) =
6557                if self.parse_keyword(Keyword::CURSOR) {
6558                    self.expect_keyword_is(Keyword::FOR)?;
6559                    match self.peek_token().token {
6560                        Token::Word(w) if w.keyword == Keyword::SELECT => (
6561                            Some(DeclareType::Cursor),
6562                            Some(self.parse_query()?),
6563                            None,
6564                            None,
6565                        ),
6566                        _ => (
6567                            Some(DeclareType::Cursor),
6568                            None,
6569                            Some(DeclareAssignment::For(Box::new(self.parse_expr()?))),
6570                            None,
6571                        ),
6572                    }
6573                } else if self.parse_keyword(Keyword::RESULTSET) {
6574                    let assigned_expr = if self.peek_token().token != Token::SemiColon {
6575                        self.parse_snowflake_variable_declaration_expression()?
6576                    } else {
6577                        // Nothing more to do. The statement has no further parameters.
6578                        None
6579                    };
6580
6581                    (Some(DeclareType::ResultSet), None, assigned_expr, None)
6582                } else if self.parse_keyword(Keyword::EXCEPTION) {
6583                    let assigned_expr = if self.peek_token().token == Token::LParen {
6584                        Some(DeclareAssignment::Expr(Box::new(self.parse_expr()?)))
6585                    } else {
6586                        // Nothing more to do. The statement has no further parameters.
6587                        None
6588                    };
6589
6590                    (Some(DeclareType::Exception), None, assigned_expr, None)
6591                } else {
6592                    // Without an explicit keyword, the only valid option is variable declaration.
6593                    let (assigned_expr, data_type) = if let Some(assigned_expr) =
6594                        self.parse_snowflake_variable_declaration_expression()?
6595                    {
6596                        (Some(assigned_expr), None)
6597                    } else if let Token::Word(_) = self.peek_token().token {
6598                        let data_type = self.parse_data_type()?;
6599                        (
6600                            self.parse_snowflake_variable_declaration_expression()?,
6601                            Some(data_type),
6602                        )
6603                    } else {
6604                        (None, None)
6605                    };
6606                    (None, None, assigned_expr, data_type)
6607                };
6608            let stmt = Declare {
6609                names: vec![name],
6610                data_type,
6611                assignment: assigned_expr,
6612                declare_type,
6613                binary: None,
6614                sensitive: None,
6615                scroll: None,
6616                hold: None,
6617                for_query,
6618            };
6619
6620            stmts.push(stmt);
6621            if self.consume_token(&Token::SemiColon) {
6622                match self.peek_token().token {
6623                    Token::Word(w)
6624                        if ALL_KEYWORDS
6625                            .binary_search(&w.value.to_uppercase().as_str())
6626                            .is_err() =>
6627                    {
6628                        // Not a keyword - start of a new declaration.
6629                        continue;
6630                    }
6631                    _ => {
6632                        // Put back the semicolon, this is the end of the DECLARE statement.
6633                        self.prev_token();
6634                    }
6635                }
6636            }
6637
6638            break;
6639        }
6640
6641        Ok(Statement::Declare { stmts })
6642    }
6643
6644    /// Parse a [MsSql] `DECLARE` statement.
6645    ///
6646    /// Syntax:
6647    /// ```text
6648    /// DECLARE
6649    // {
6650    //   { @local_variable [AS] data_type [ = value ] }
6651    //   | { @cursor_variable_name CURSOR [ FOR ] }
6652    // } [ ,...n ]
6653    /// ```
6654    /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/language-elements/declare-local-variable-transact-sql?view=sql-server-ver16
6655    pub fn parse_mssql_declare(&mut self) -> Result<Statement, ParserError> {
6656        let stmts = self.parse_comma_separated(Parser::parse_mssql_declare_stmt)?;
6657
6658        Ok(Statement::Declare { stmts })
6659    }
6660
6661    /// Parse the body of a [MsSql] `DECLARE`statement.
6662    ///
6663    /// Syntax:
6664    /// ```text
6665    // {
6666    //   { @local_variable [AS] data_type [ = value ] }
6667    //   | { @cursor_variable_name CURSOR [ FOR ]}
6668    // } [ ,...n ]
6669    /// ```
6670    /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/language-elements/declare-local-variable-transact-sql?view=sql-server-ver16
6671    pub fn parse_mssql_declare_stmt(&mut self) -> Result<Declare, ParserError> {
6672        let name = {
6673            let ident = self.parse_identifier()?;
6674            if !ident.value.starts_with('@')
6675                && !matches!(
6676                    self.peek_token().token,
6677                    Token::Word(w) if w.keyword == Keyword::CURSOR
6678                )
6679            {
6680                Err(ParserError::TokenizerError(
6681                    "Invalid MsSql variable declaration.".to_string(),
6682                ))
6683            } else {
6684                Ok(ident)
6685            }
6686        }?;
6687
6688        let (declare_type, data_type) = match self.peek_token().token {
6689            Token::Word(w) => match w.keyword {
6690                Keyword::CURSOR => {
6691                    self.next_token();
6692                    (Some(DeclareType::Cursor), None)
6693                }
6694                Keyword::AS => {
6695                    self.next_token();
6696                    (None, Some(self.parse_data_type()?))
6697                }
6698                _ => (None, Some(self.parse_data_type()?)),
6699            },
6700            _ => (None, Some(self.parse_data_type()?)),
6701        };
6702
6703        let (for_query, assignment) = if self.peek_keyword(Keyword::FOR) {
6704            self.next_token();
6705            let query = Some(self.parse_query()?);
6706            (query, None)
6707        } else {
6708            let assignment = self.parse_mssql_variable_declaration_expression()?;
6709            (None, assignment)
6710        };
6711
6712        Ok(Declare {
6713            names: vec![name],
6714            data_type,
6715            assignment,
6716            declare_type,
6717            binary: None,
6718            sensitive: None,
6719            scroll: None,
6720            hold: None,
6721            for_query,
6722        })
6723    }
6724
6725    /// Parses the assigned expression in a variable declaration.
6726    ///
6727    /// Syntax:
6728    /// ```text
6729    /// [ { DEFAULT | := } <expression>]
6730    /// ```
6731    /// <https://docs.snowflake.com/en/sql-reference/snowflake-scripting/declare#variable-declaration-syntax>
6732    pub fn parse_snowflake_variable_declaration_expression(
6733        &mut self,
6734    ) -> Result<Option<DeclareAssignment>, ParserError> {
6735        Ok(match self.peek_token().token {
6736            Token::Word(w) if w.keyword == Keyword::DEFAULT => {
6737                self.next_token(); // Skip `DEFAULT`
6738                Some(DeclareAssignment::Default(Box::new(self.parse_expr()?)))
6739            }
6740            Token::Assignment => {
6741                self.next_token(); // Skip `:=`
6742                Some(DeclareAssignment::DuckAssignment(Box::new(
6743                    self.parse_expr()?,
6744                )))
6745            }
6746            _ => None,
6747        })
6748    }
6749
6750    /// Parses the assigned expression in a variable declaration.
6751    ///
6752    /// Syntax:
6753    /// ```text
6754    /// [ = <expression>]
6755    /// ```
6756    pub fn parse_mssql_variable_declaration_expression(
6757        &mut self,
6758    ) -> Result<Option<DeclareAssignment>, ParserError> {
6759        Ok(match self.peek_token().token {
6760            Token::Eq => {
6761                self.next_token(); // Skip `=`
6762                Some(DeclareAssignment::MsSqlAssignment(Box::new(
6763                    self.parse_expr()?,
6764                )))
6765            }
6766            _ => None,
6767        })
6768    }
6769
6770    // FETCH [ direction { FROM | IN } ] cursor INTO target;
6771    pub fn parse_fetch_statement(&mut self) -> Result<Statement, ParserError> {
6772        let direction = if self.parse_keyword(Keyword::NEXT) {
6773            FetchDirection::Next
6774        } else if self.parse_keyword(Keyword::PRIOR) {
6775            FetchDirection::Prior
6776        } else if self.parse_keyword(Keyword::FIRST) {
6777            FetchDirection::First
6778        } else if self.parse_keyword(Keyword::LAST) {
6779            FetchDirection::Last
6780        } else if self.parse_keyword(Keyword::ABSOLUTE) {
6781            FetchDirection::Absolute {
6782                limit: self.parse_number_value()?.value,
6783            }
6784        } else if self.parse_keyword(Keyword::RELATIVE) {
6785            FetchDirection::Relative {
6786                limit: self.parse_number_value()?.value,
6787            }
6788        } else if self.parse_keyword(Keyword::FORWARD) {
6789            if self.parse_keyword(Keyword::ALL) {
6790                FetchDirection::ForwardAll
6791            } else {
6792                FetchDirection::Forward {
6793                    // TODO: Support optional
6794                    limit: Some(self.parse_number_value()?.value),
6795                }
6796            }
6797        } else if self.parse_keyword(Keyword::BACKWARD) {
6798            if self.parse_keyword(Keyword::ALL) {
6799                FetchDirection::BackwardAll
6800            } else {
6801                FetchDirection::Backward {
6802                    // TODO: Support optional
6803                    limit: Some(self.parse_number_value()?.value),
6804                }
6805            }
6806        } else if self.parse_keyword(Keyword::ALL) {
6807            FetchDirection::All
6808        } else {
6809            FetchDirection::Count {
6810                limit: self.parse_number_value()?.value,
6811            }
6812        };
6813
6814        let position = if self.peek_keyword(Keyword::FROM) {
6815            self.expect_keyword(Keyword::FROM)?;
6816            FetchPosition::From
6817        } else if self.peek_keyword(Keyword::IN) {
6818            self.expect_keyword(Keyword::IN)?;
6819            FetchPosition::In
6820        } else {
6821            return parser_err!("Expected FROM or IN", self.peek_token().span.start);
6822        };
6823
6824        let name = self.parse_identifier()?;
6825
6826        let into = if self.parse_keyword(Keyword::INTO) {
6827            Some(self.parse_object_name(false)?)
6828        } else {
6829            None
6830        };
6831
6832        Ok(Statement::Fetch {
6833            name,
6834            direction,
6835            position,
6836            into,
6837        })
6838    }
6839
6840    pub fn parse_discard(&mut self) -> Result<Statement, ParserError> {
6841        let object_type = if self.parse_keyword(Keyword::ALL) {
6842            DiscardObject::ALL
6843        } else if self.parse_keyword(Keyword::PLANS) {
6844            DiscardObject::PLANS
6845        } else if self.parse_keyword(Keyword::SEQUENCES) {
6846            DiscardObject::SEQUENCES
6847        } else if self.parse_keyword(Keyword::TEMP) || self.parse_keyword(Keyword::TEMPORARY) {
6848            DiscardObject::TEMP
6849        } else {
6850            return self.expected(
6851                "ALL, PLANS, SEQUENCES, TEMP or TEMPORARY after DISCARD",
6852                self.peek_token(),
6853            );
6854        };
6855        Ok(Statement::Discard { object_type })
6856    }
6857
6858    pub fn parse_create_index(&mut self, unique: bool) -> Result<Statement, ParserError> {
6859        let concurrently = self.parse_keyword(Keyword::CONCURRENTLY);
6860        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6861        let index_name = if if_not_exists || !self.parse_keyword(Keyword::ON) {
6862            let index_name = self.parse_object_name(false)?;
6863            self.expect_keyword_is(Keyword::ON)?;
6864            Some(index_name)
6865        } else {
6866            None
6867        };
6868        let table_name = self.parse_object_name(false)?;
6869        let using = if self.parse_keyword(Keyword::USING) {
6870            Some(self.parse_index_type()?)
6871        } else {
6872            None
6873        };
6874
6875        self.expect_token(&Token::LParen)?;
6876        let columns = self.parse_comma_separated(Parser::parse_create_index_expr)?;
6877        self.expect_token(&Token::RParen)?;
6878
6879        let include = if self.parse_keyword(Keyword::INCLUDE) {
6880            self.expect_token(&Token::LParen)?;
6881            let columns = self.parse_comma_separated(|p| p.parse_identifier())?;
6882            self.expect_token(&Token::RParen)?;
6883            columns
6884        } else {
6885            vec![]
6886        };
6887
6888        let nulls_distinct = if self.parse_keyword(Keyword::NULLS) {
6889            let not = self.parse_keyword(Keyword::NOT);
6890            self.expect_keyword_is(Keyword::DISTINCT)?;
6891            Some(!not)
6892        } else {
6893            None
6894        };
6895
6896        let with = if self.dialect.supports_create_index_with_clause()
6897            && self.parse_keyword(Keyword::WITH)
6898        {
6899            self.expect_token(&Token::LParen)?;
6900            let with_params = self.parse_comma_separated(Parser::parse_expr)?;
6901            self.expect_token(&Token::RParen)?;
6902            with_params
6903        } else {
6904            Vec::new()
6905        };
6906
6907        let predicate = if self.parse_keyword(Keyword::WHERE) {
6908            Some(self.parse_expr()?)
6909        } else {
6910            None
6911        };
6912
6913        Ok(Statement::CreateIndex(CreateIndex {
6914            name: index_name,
6915            table_name,
6916            using,
6917            columns,
6918            unique,
6919            concurrently,
6920            if_not_exists,
6921            include,
6922            nulls_distinct,
6923            with,
6924            predicate,
6925        }))
6926    }
6927
6928    pub fn parse_create_extension(&mut self) -> Result<Statement, ParserError> {
6929        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6930        let name = self.parse_identifier()?;
6931
6932        let (schema, version, cascade) = if self.parse_keyword(Keyword::WITH) {
6933            let schema = if self.parse_keyword(Keyword::SCHEMA) {
6934                Some(self.parse_identifier()?)
6935            } else {
6936                None
6937            };
6938
6939            let version = if self.parse_keyword(Keyword::VERSION) {
6940                Some(self.parse_identifier()?)
6941            } else {
6942                None
6943            };
6944
6945            let cascade = self.parse_keyword(Keyword::CASCADE);
6946
6947            (schema, version, cascade)
6948        } else {
6949            (None, None, false)
6950        };
6951
6952        Ok(Statement::CreateExtension {
6953            name,
6954            if_not_exists,
6955            schema,
6956            version,
6957            cascade,
6958        })
6959    }
6960
6961    /// Parse a PostgreSQL-specific [Statement::DropExtension] statement.
6962    pub fn parse_drop_extension(&mut self) -> Result<Statement, ParserError> {
6963        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6964        let names = self.parse_comma_separated(|p| p.parse_identifier())?;
6965        let cascade_or_restrict =
6966            self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]);
6967        Ok(Statement::DropExtension {
6968            names,
6969            if_exists,
6970            cascade_or_restrict: cascade_or_restrict
6971                .map(|k| match k {
6972                    Keyword::CASCADE => Ok(ReferentialAction::Cascade),
6973                    Keyword::RESTRICT => Ok(ReferentialAction::Restrict),
6974                    _ => self.expected("CASCADE or RESTRICT", self.peek_token()),
6975                })
6976                .transpose()?,
6977        })
6978    }
6979
6980    //TODO: Implement parsing for Skewed
6981    pub fn parse_hive_distribution(&mut self) -> Result<HiveDistributionStyle, ParserError> {
6982        if self.parse_keywords(&[Keyword::PARTITIONED, Keyword::BY]) {
6983            self.expect_token(&Token::LParen)?;
6984            let columns = self.parse_comma_separated(Parser::parse_column_def)?;
6985            self.expect_token(&Token::RParen)?;
6986            Ok(HiveDistributionStyle::PARTITIONED { columns })
6987        } else {
6988            Ok(HiveDistributionStyle::NONE)
6989        }
6990    }
6991
6992    pub fn parse_hive_formats(&mut self) -> Result<HiveFormat, ParserError> {
6993        let mut hive_format = HiveFormat::default();
6994        loop {
6995            match self.parse_one_of_keywords(&[
6996                Keyword::ROW,
6997                Keyword::STORED,
6998                Keyword::LOCATION,
6999                Keyword::WITH,
7000            ]) {
7001                Some(Keyword::ROW) => {
7002                    hive_format.row_format = Some(self.parse_row_format()?);
7003                }
7004                Some(Keyword::STORED) => {
7005                    self.expect_keyword_is(Keyword::AS)?;
7006                    if self.parse_keyword(Keyword::INPUTFORMAT) {
7007                        let input_format = self.parse_expr()?;
7008                        self.expect_keyword_is(Keyword::OUTPUTFORMAT)?;
7009                        let output_format = self.parse_expr()?;
7010                        hive_format.storage = Some(HiveIOFormat::IOF {
7011                            input_format,
7012                            output_format,
7013                        });
7014                    } else {
7015                        let format = self.parse_file_format()?;
7016                        hive_format.storage = Some(HiveIOFormat::FileFormat { format });
7017                    }
7018                }
7019                Some(Keyword::LOCATION) => {
7020                    hive_format.location = Some(self.parse_literal_string()?);
7021                }
7022                Some(Keyword::WITH) => {
7023                    self.prev_token();
7024                    let properties = self
7025                        .parse_options_with_keywords(&[Keyword::WITH, Keyword::SERDEPROPERTIES])?;
7026                    if !properties.is_empty() {
7027                        hive_format.serde_properties = Some(properties);
7028                    } else {
7029                        break;
7030                    }
7031                }
7032                None => break,
7033                _ => break,
7034            }
7035        }
7036
7037        Ok(hive_format)
7038    }
7039
7040    pub fn parse_row_format(&mut self) -> Result<HiveRowFormat, ParserError> {
7041        self.expect_keyword_is(Keyword::FORMAT)?;
7042        match self.parse_one_of_keywords(&[Keyword::SERDE, Keyword::DELIMITED]) {
7043            Some(Keyword::SERDE) => {
7044                let class = self.parse_literal_string()?;
7045                Ok(HiveRowFormat::SERDE { class })
7046            }
7047            _ => {
7048                let mut row_delimiters = vec![];
7049
7050                loop {
7051                    match self.parse_one_of_keywords(&[
7052                        Keyword::FIELDS,
7053                        Keyword::COLLECTION,
7054                        Keyword::MAP,
7055                        Keyword::LINES,
7056                        Keyword::NULL,
7057                    ]) {
7058                        Some(Keyword::FIELDS) => {
7059                            if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
7060                                row_delimiters.push(HiveRowDelimiter {
7061                                    delimiter: HiveDelimiter::FieldsTerminatedBy,
7062                                    char: self.parse_identifier()?,
7063                                });
7064
7065                                if self.parse_keywords(&[Keyword::ESCAPED, Keyword::BY]) {
7066                                    row_delimiters.push(HiveRowDelimiter {
7067                                        delimiter: HiveDelimiter::FieldsEscapedBy,
7068                                        char: self.parse_identifier()?,
7069                                    });
7070                                }
7071                            } else {
7072                                break;
7073                            }
7074                        }
7075                        Some(Keyword::COLLECTION) => {
7076                            if self.parse_keywords(&[
7077                                Keyword::ITEMS,
7078                                Keyword::TERMINATED,
7079                                Keyword::BY,
7080                            ]) {
7081                                row_delimiters.push(HiveRowDelimiter {
7082                                    delimiter: HiveDelimiter::CollectionItemsTerminatedBy,
7083                                    char: self.parse_identifier()?,
7084                                });
7085                            } else {
7086                                break;
7087                            }
7088                        }
7089                        Some(Keyword::MAP) => {
7090                            if self.parse_keywords(&[
7091                                Keyword::KEYS,
7092                                Keyword::TERMINATED,
7093                                Keyword::BY,
7094                            ]) {
7095                                row_delimiters.push(HiveRowDelimiter {
7096                                    delimiter: HiveDelimiter::MapKeysTerminatedBy,
7097                                    char: self.parse_identifier()?,
7098                                });
7099                            } else {
7100                                break;
7101                            }
7102                        }
7103                        Some(Keyword::LINES) => {
7104                            if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
7105                                row_delimiters.push(HiveRowDelimiter {
7106                                    delimiter: HiveDelimiter::LinesTerminatedBy,
7107                                    char: self.parse_identifier()?,
7108                                });
7109                            } else {
7110                                break;
7111                            }
7112                        }
7113                        Some(Keyword::NULL) => {
7114                            if self.parse_keywords(&[Keyword::DEFINED, Keyword::AS]) {
7115                                row_delimiters.push(HiveRowDelimiter {
7116                                    delimiter: HiveDelimiter::NullDefinedAs,
7117                                    char: self.parse_identifier()?,
7118                                });
7119                            } else {
7120                                break;
7121                            }
7122                        }
7123                        _ => {
7124                            break;
7125                        }
7126                    }
7127                }
7128
7129                Ok(HiveRowFormat::DELIMITED {
7130                    delimiters: row_delimiters,
7131                })
7132            }
7133        }
7134    }
7135
7136    fn parse_optional_on_cluster(&mut self) -> Result<Option<Ident>, ParserError> {
7137        if self.parse_keywords(&[Keyword::ON, Keyword::CLUSTER]) {
7138            Ok(Some(self.parse_identifier()?))
7139        } else {
7140            Ok(None)
7141        }
7142    }
7143
7144    pub fn parse_create_table(
7145        &mut self,
7146        or_replace: bool,
7147        temporary: bool,
7148        global: Option<bool>,
7149        transient: bool,
7150    ) -> Result<Statement, ParserError> {
7151        let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
7152        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7153        let table_name = self.parse_object_name(allow_unquoted_hyphen)?;
7154
7155        // Clickhouse has `ON CLUSTER 'cluster'` syntax for DDLs
7156        let on_cluster = self.parse_optional_on_cluster()?;
7157
7158        let like = if self.parse_keyword(Keyword::LIKE) || self.parse_keyword(Keyword::ILIKE) {
7159            self.parse_object_name(allow_unquoted_hyphen).ok()
7160        } else {
7161            None
7162        };
7163
7164        let clone = if self.parse_keyword(Keyword::CLONE) {
7165            self.parse_object_name(allow_unquoted_hyphen).ok()
7166        } else {
7167            None
7168        };
7169
7170        // parse optional column list (schema)
7171        let (columns, constraints) = self.parse_columns()?;
7172        let comment_after_column_def =
7173            if dialect_of!(self is HiveDialect) && self.parse_keyword(Keyword::COMMENT) {
7174                let next_token = self.next_token();
7175                match next_token.token {
7176                    Token::SingleQuotedString(str) => Some(CommentDef::WithoutEq(str)),
7177                    _ => self.expected("comment", next_token)?,
7178                }
7179            } else {
7180                None
7181            };
7182
7183        // SQLite supports `WITHOUT ROWID` at the end of `CREATE TABLE`
7184        let without_rowid = self.parse_keywords(&[Keyword::WITHOUT, Keyword::ROWID]);
7185
7186        let hive_distribution = self.parse_hive_distribution()?;
7187        let clustered_by = self.parse_optional_clustered_by()?;
7188        let hive_formats = self.parse_hive_formats()?;
7189
7190        let create_table_config = self.parse_optional_create_table_config()?;
7191
7192        // ClickHouse supports `PRIMARY KEY`, before `ORDER BY`
7193        // https://clickhouse.com/docs/en/sql-reference/statements/create/table#primary-key
7194        let primary_key = if dialect_of!(self is ClickHouseDialect | GenericDialect)
7195            && self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY])
7196        {
7197            Some(Box::new(self.parse_expr()?))
7198        } else {
7199            None
7200        };
7201
7202        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
7203            if self.consume_token(&Token::LParen) {
7204                let columns = if self.peek_token() != Token::RParen {
7205                    self.parse_comma_separated(|p| p.parse_expr())?
7206                } else {
7207                    vec![]
7208                };
7209                self.expect_token(&Token::RParen)?;
7210                Some(OneOrManyWithParens::Many(columns))
7211            } else {
7212                Some(OneOrManyWithParens::One(self.parse_expr()?))
7213            }
7214        } else {
7215            None
7216        };
7217
7218        let on_commit = if self.parse_keywords(&[Keyword::ON, Keyword::COMMIT]) {
7219            Some(self.parse_create_table_on_commit()?)
7220        } else {
7221            None
7222        };
7223
7224        let strict = self.parse_keyword(Keyword::STRICT);
7225
7226        // Parse optional `AS ( query )`
7227        let query = if self.parse_keyword(Keyword::AS) {
7228            Some(self.parse_query()?)
7229        } else if self.dialect.supports_create_table_select() && self.parse_keyword(Keyword::SELECT)
7230        {
7231            // rewind the SELECT keyword
7232            self.prev_token();
7233            Some(self.parse_query()?)
7234        } else {
7235            None
7236        };
7237
7238        Ok(CreateTableBuilder::new(table_name)
7239            .temporary(temporary)
7240            .columns(columns)
7241            .constraints(constraints)
7242            .or_replace(or_replace)
7243            .if_not_exists(if_not_exists)
7244            .transient(transient)
7245            .hive_distribution(hive_distribution)
7246            .hive_formats(Some(hive_formats))
7247            .global(global)
7248            .query(query)
7249            .without_rowid(without_rowid)
7250            .like(like)
7251            .clone_clause(clone)
7252            .comment_after_column_def(comment_after_column_def)
7253            .order_by(order_by)
7254            .on_commit(on_commit)
7255            .on_cluster(on_cluster)
7256            .clustered_by(clustered_by)
7257            .partition_by(create_table_config.partition_by)
7258            .cluster_by(create_table_config.cluster_by)
7259            .inherits(create_table_config.inherits)
7260            .table_options(create_table_config.table_options)
7261            .primary_key(primary_key)
7262            .strict(strict)
7263            .build())
7264    }
7265
7266    pub(crate) fn parse_create_table_on_commit(&mut self) -> Result<OnCommit, ParserError> {
7267        if self.parse_keywords(&[Keyword::DELETE, Keyword::ROWS]) {
7268            Ok(OnCommit::DeleteRows)
7269        } else if self.parse_keywords(&[Keyword::PRESERVE, Keyword::ROWS]) {
7270            Ok(OnCommit::PreserveRows)
7271        } else if self.parse_keywords(&[Keyword::DROP]) {
7272            Ok(OnCommit::Drop)
7273        } else {
7274            parser_err!(
7275                "Expecting DELETE ROWS, PRESERVE ROWS or DROP",
7276                self.peek_token()
7277            )
7278        }
7279    }
7280
7281    /// Parse configuration like inheritance, partitioning, clustering information during the table creation.
7282    ///
7283    /// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#syntax_2)
7284    /// [PostgreSQL](https://www.postgresql.org/docs/current/ddl-partitioning.html)
7285    /// [MySql](https://dev.mysql.com/doc/refman/8.4/en/create-table.html)
7286    fn parse_optional_create_table_config(
7287        &mut self,
7288    ) -> Result<CreateTableConfiguration, ParserError> {
7289        let mut table_options = CreateTableOptions::None;
7290
7291        let inherits = if self.parse_keyword(Keyword::INHERITS) {
7292            Some(self.parse_parenthesized_qualified_column_list(IsOptional::Mandatory, false)?)
7293        } else {
7294            None
7295        };
7296
7297        // PostgreSQL supports `WITH ( options )`, before `AS`
7298        let with_options = self.parse_options(Keyword::WITH)?;
7299        if !with_options.is_empty() {
7300            table_options = CreateTableOptions::With(with_options)
7301        }
7302
7303        let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
7304        if !table_properties.is_empty() {
7305            table_options = CreateTableOptions::TableProperties(table_properties);
7306        }
7307        let partition_by = if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
7308            && self.parse_keywords(&[Keyword::PARTITION, Keyword::BY])
7309        {
7310            Some(Box::new(self.parse_expr()?))
7311        } else {
7312            None
7313        };
7314
7315        let mut cluster_by = None;
7316        if dialect_of!(self is BigQueryDialect | GenericDialect) {
7317            if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
7318                cluster_by = Some(WrappedCollection::NoWrapping(
7319                    self.parse_comma_separated(|p| p.parse_expr())?,
7320                ));
7321            };
7322
7323            if let Token::Word(word) = self.peek_token().token {
7324                if word.keyword == Keyword::OPTIONS {
7325                    table_options =
7326                        CreateTableOptions::Options(self.parse_options(Keyword::OPTIONS)?)
7327                }
7328            };
7329        }
7330
7331        if !dialect_of!(self is HiveDialect) && table_options == CreateTableOptions::None {
7332            let plain_options = self.parse_plain_options()?;
7333            if !plain_options.is_empty() {
7334                table_options = CreateTableOptions::Plain(plain_options)
7335            }
7336        };
7337
7338        Ok(CreateTableConfiguration {
7339            partition_by,
7340            cluster_by,
7341            inherits,
7342            table_options,
7343        })
7344    }
7345
7346    fn parse_plain_option(&mut self) -> Result<Option<SqlOption>, ParserError> {
7347        // Single parameter option
7348        // <https://dev.mysql.com/doc/refman/8.4/en/create-table.html>
7349        if self.parse_keywords(&[Keyword::START, Keyword::TRANSACTION]) {
7350            return Ok(Some(SqlOption::Ident(Ident::new("START TRANSACTION"))));
7351        }
7352
7353        // Custom option
7354        // <https://dev.mysql.com/doc/refman/8.4/en/create-table.html>
7355        if self.parse_keywords(&[Keyword::COMMENT]) {
7356            let has_eq = self.consume_token(&Token::Eq);
7357            let value = self.next_token();
7358
7359            let comment = match (has_eq, value.token) {
7360                (true, Token::SingleQuotedString(s)) => {
7361                    Ok(Some(SqlOption::Comment(CommentDef::WithEq(s))))
7362                }
7363                (false, Token::SingleQuotedString(s)) => {
7364                    Ok(Some(SqlOption::Comment(CommentDef::WithoutEq(s))))
7365                }
7366                (_, token) => {
7367                    self.expected("Token::SingleQuotedString", TokenWithSpan::wrap(token))
7368                }
7369            };
7370            return comment;
7371        }
7372
7373        // <https://dev.mysql.com/doc/refman/8.4/en/create-table.html>
7374        // <https://clickhouse.com/docs/sql-reference/statements/create/table>
7375        if self.parse_keywords(&[Keyword::ENGINE]) {
7376            let _ = self.consume_token(&Token::Eq);
7377            let value = self.next_token();
7378
7379            let engine = match value.token {
7380                Token::Word(w) => {
7381                    let parameters = if self.peek_token() == Token::LParen {
7382                        self.parse_parenthesized_identifiers()?
7383                    } else {
7384                        vec![]
7385                    };
7386
7387                    Ok(Some(SqlOption::NamedParenthesizedList(
7388                        NamedParenthesizedList {
7389                            key: Ident::new("ENGINE"),
7390                            name: Some(Ident::new(w.value)),
7391                            values: parameters,
7392                        },
7393                    )))
7394                }
7395                _ => {
7396                    return self.expected("Token::Word", value)?;
7397                }
7398            };
7399
7400            return engine;
7401        }
7402
7403        // <https://dev.mysql.com/doc/refman/8.4/en/create-table.html>
7404        if self.parse_keywords(&[Keyword::TABLESPACE]) {
7405            let _ = self.consume_token(&Token::Eq);
7406            let value = self.next_token();
7407
7408            let tablespace = match value.token {
7409                Token::Word(Word { value: name, .. }) | Token::SingleQuotedString(name) => {
7410                    let storage = match self.parse_keyword(Keyword::STORAGE) {
7411                        true => {
7412                            let _ = self.consume_token(&Token::Eq);
7413                            let storage_token = self.next_token();
7414                            match &storage_token.token {
7415                                Token::Word(w) => match w.value.to_uppercase().as_str() {
7416                                    "DISK" => Some(StorageType::Disk),
7417                                    "MEMORY" => Some(StorageType::Memory),
7418                                    _ => self
7419                                        .expected("Storage type (DISK or MEMORY)", storage_token)?,
7420                                },
7421                                _ => self.expected("Token::Word", storage_token)?,
7422                            }
7423                        }
7424                        false => None,
7425                    };
7426
7427                    Ok(Some(SqlOption::TableSpace(TablespaceOption {
7428                        name,
7429                        storage,
7430                    })))
7431                }
7432                _ => {
7433                    return self.expected("Token::Word", value)?;
7434                }
7435            };
7436
7437            return tablespace;
7438        }
7439
7440        // <https://dev.mysql.com/doc/refman/8.4/en/create-table.html>
7441        if self.parse_keyword(Keyword::UNION) {
7442            let _ = self.consume_token(&Token::Eq);
7443            let value = self.next_token();
7444
7445            match value.token {
7446                Token::LParen => {
7447                    let tables: Vec<Ident> =
7448                        self.parse_comma_separated0(Parser::parse_identifier, Token::RParen)?;
7449                    self.expect_token(&Token::RParen)?;
7450
7451                    return Ok(Some(SqlOption::NamedParenthesizedList(
7452                        NamedParenthesizedList {
7453                            key: Ident::new("UNION"),
7454                            name: None,
7455                            values: tables,
7456                        },
7457                    )));
7458                }
7459                _ => {
7460                    return self.expected("Token::LParen", value)?;
7461                }
7462            }
7463        }
7464
7465        // Key/Value parameter option
7466        let key = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARSET]) {
7467            Ident::new("DEFAULT CHARSET")
7468        } else if self.parse_keyword(Keyword::CHARSET) {
7469            Ident::new("CHARSET")
7470        } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARACTER, Keyword::SET]) {
7471            Ident::new("DEFAULT CHARACTER SET")
7472        } else if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
7473            Ident::new("CHARACTER SET")
7474        } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) {
7475            Ident::new("DEFAULT COLLATE")
7476        } else if self.parse_keyword(Keyword::COLLATE) {
7477            Ident::new("COLLATE")
7478        } else if self.parse_keywords(&[Keyword::DATA, Keyword::DIRECTORY]) {
7479            Ident::new("DATA DIRECTORY")
7480        } else if self.parse_keywords(&[Keyword::INDEX, Keyword::DIRECTORY]) {
7481            Ident::new("INDEX DIRECTORY")
7482        } else if self.parse_keyword(Keyword::KEY_BLOCK_SIZE) {
7483            Ident::new("KEY_BLOCK_SIZE")
7484        } else if self.parse_keyword(Keyword::ROW_FORMAT) {
7485            Ident::new("ROW_FORMAT")
7486        } else if self.parse_keyword(Keyword::PACK_KEYS) {
7487            Ident::new("PACK_KEYS")
7488        } else if self.parse_keyword(Keyword::STATS_AUTO_RECALC) {
7489            Ident::new("STATS_AUTO_RECALC")
7490        } else if self.parse_keyword(Keyword::STATS_PERSISTENT) {
7491            Ident::new("STATS_PERSISTENT")
7492        } else if self.parse_keyword(Keyword::STATS_SAMPLE_PAGES) {
7493            Ident::new("STATS_SAMPLE_PAGES")
7494        } else if self.parse_keyword(Keyword::DELAY_KEY_WRITE) {
7495            Ident::new("DELAY_KEY_WRITE")
7496        } else if self.parse_keyword(Keyword::COMPRESSION) {
7497            Ident::new("COMPRESSION")
7498        } else if self.parse_keyword(Keyword::ENCRYPTION) {
7499            Ident::new("ENCRYPTION")
7500        } else if self.parse_keyword(Keyword::MAX_ROWS) {
7501            Ident::new("MAX_ROWS")
7502        } else if self.parse_keyword(Keyword::MIN_ROWS) {
7503            Ident::new("MIN_ROWS")
7504        } else if self.parse_keyword(Keyword::AUTOEXTEND_SIZE) {
7505            Ident::new("AUTOEXTEND_SIZE")
7506        } else if self.parse_keyword(Keyword::AVG_ROW_LENGTH) {
7507            Ident::new("AVG_ROW_LENGTH")
7508        } else if self.parse_keyword(Keyword::CHECKSUM) {
7509            Ident::new("CHECKSUM")
7510        } else if self.parse_keyword(Keyword::CONNECTION) {
7511            Ident::new("CONNECTION")
7512        } else if self.parse_keyword(Keyword::ENGINE_ATTRIBUTE) {
7513            Ident::new("ENGINE_ATTRIBUTE")
7514        } else if self.parse_keyword(Keyword::PASSWORD) {
7515            Ident::new("PASSWORD")
7516        } else if self.parse_keyword(Keyword::SECONDARY_ENGINE_ATTRIBUTE) {
7517            Ident::new("SECONDARY_ENGINE_ATTRIBUTE")
7518        } else if self.parse_keyword(Keyword::INSERT_METHOD) {
7519            Ident::new("INSERT_METHOD")
7520        } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
7521            Ident::new("AUTO_INCREMENT")
7522        } else {
7523            return Ok(None);
7524        };
7525
7526        let _ = self.consume_token(&Token::Eq);
7527
7528        let value = match self
7529            .maybe_parse(|parser| parser.parse_value())?
7530            .map(Expr::Value)
7531        {
7532            Some(expr) => expr,
7533            None => Expr::Identifier(self.parse_identifier()?),
7534        };
7535
7536        Ok(Some(SqlOption::KeyValue { key, value }))
7537    }
7538
7539    pub fn parse_plain_options(&mut self) -> Result<Vec<SqlOption>, ParserError> {
7540        let mut options = Vec::new();
7541
7542        while let Some(option) = self.parse_plain_option()? {
7543            options.push(option);
7544        }
7545
7546        Ok(options)
7547    }
7548
7549    pub fn parse_optional_inline_comment(&mut self) -> Result<Option<CommentDef>, ParserError> {
7550        let comment = if self.parse_keyword(Keyword::COMMENT) {
7551            let has_eq = self.consume_token(&Token::Eq);
7552            let comment = self.parse_comment_value()?;
7553            Some(if has_eq {
7554                CommentDef::WithEq(comment)
7555            } else {
7556                CommentDef::WithoutEq(comment)
7557            })
7558        } else {
7559            None
7560        };
7561        Ok(comment)
7562    }
7563
7564    pub fn parse_comment_value(&mut self) -> Result<String, ParserError> {
7565        let next_token = self.next_token();
7566        let value = match next_token.token {
7567            Token::SingleQuotedString(str) => str,
7568            Token::DollarQuotedString(str) => str.value,
7569            _ => self.expected("string literal", next_token)?,
7570        };
7571        Ok(value)
7572    }
7573
7574    pub fn parse_optional_procedure_parameters(
7575        &mut self,
7576    ) -> Result<Option<Vec<ProcedureParam>>, ParserError> {
7577        let mut params = vec![];
7578        if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
7579            return Ok(Some(params));
7580        }
7581        loop {
7582            if let Token::Word(_) = self.peek_token().token {
7583                params.push(self.parse_procedure_param()?)
7584            }
7585            let comma = self.consume_token(&Token::Comma);
7586            if self.consume_token(&Token::RParen) {
7587                // allow a trailing comma, even though it's not in standard
7588                break;
7589            } else if !comma {
7590                return self.expected("',' or ')' after parameter definition", self.peek_token());
7591            }
7592        }
7593        Ok(Some(params))
7594    }
7595
7596    pub fn parse_columns(&mut self) -> Result<(Vec<ColumnDef>, Vec<TableConstraint>), ParserError> {
7597        let mut columns = vec![];
7598        let mut constraints = vec![];
7599        if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
7600            return Ok((columns, constraints));
7601        }
7602
7603        loop {
7604            if let Some(constraint) = self.parse_optional_table_constraint()? {
7605                constraints.push(constraint);
7606            } else if let Token::Word(_) = self.peek_token().token {
7607                columns.push(self.parse_column_def()?);
7608            } else {
7609                return self.expected("column name or constraint definition", self.peek_token());
7610            }
7611
7612            let comma = self.consume_token(&Token::Comma);
7613            let rparen = self.peek_token().token == Token::RParen;
7614
7615            if !comma && !rparen {
7616                return self.expected("',' or ')' after column definition", self.peek_token());
7617            };
7618
7619            if rparen
7620                && (!comma
7621                    || self.dialect.supports_column_definition_trailing_commas()
7622                    || self.options.trailing_commas)
7623            {
7624                let _ = self.consume_token(&Token::RParen);
7625                break;
7626            }
7627        }
7628
7629        Ok((columns, constraints))
7630    }
7631
7632    pub fn parse_procedure_param(&mut self) -> Result<ProcedureParam, ParserError> {
7633        let name = self.parse_identifier()?;
7634        let data_type = self.parse_data_type()?;
7635        Ok(ProcedureParam { name, data_type })
7636    }
7637
7638    pub fn parse_column_def(&mut self) -> Result<ColumnDef, ParserError> {
7639        let name = self.parse_identifier()?;
7640        let data_type = if self.is_column_type_sqlite_unspecified() {
7641            DataType::Unspecified
7642        } else {
7643            self.parse_data_type()?
7644        };
7645        let mut options = vec![];
7646        loop {
7647            if self.parse_keyword(Keyword::CONSTRAINT) {
7648                let name = Some(self.parse_identifier()?);
7649                if let Some(option) = self.parse_optional_column_option()? {
7650                    options.push(ColumnOptionDef { name, option });
7651                } else {
7652                    return self.expected(
7653                        "constraint details after CONSTRAINT <name>",
7654                        self.peek_token(),
7655                    );
7656                }
7657            } else if let Some(option) = self.parse_optional_column_option()? {
7658                options.push(ColumnOptionDef { name: None, option });
7659            } else {
7660                break;
7661            };
7662        }
7663        Ok(ColumnDef {
7664            name,
7665            data_type,
7666            options,
7667        })
7668    }
7669
7670    fn is_column_type_sqlite_unspecified(&mut self) -> bool {
7671        if dialect_of!(self is SQLiteDialect) {
7672            match self.peek_token().token {
7673                Token::Word(word) => matches!(
7674                    word.keyword,
7675                    Keyword::CONSTRAINT
7676                        | Keyword::PRIMARY
7677                        | Keyword::NOT
7678                        | Keyword::UNIQUE
7679                        | Keyword::CHECK
7680                        | Keyword::DEFAULT
7681                        | Keyword::COLLATE
7682                        | Keyword::REFERENCES
7683                        | Keyword::GENERATED
7684                        | Keyword::AS
7685                ),
7686                _ => true, // e.g. comma immediately after column name
7687            }
7688        } else {
7689            false
7690        }
7691    }
7692
7693    pub fn parse_optional_column_option(&mut self) -> Result<Option<ColumnOption>, ParserError> {
7694        if let Some(option) = self.dialect.parse_column_option(self)? {
7695            return option;
7696        }
7697
7698        if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
7699            Ok(Some(ColumnOption::CharacterSet(
7700                self.parse_object_name(false)?,
7701            )))
7702        } else if self.parse_keywords(&[Keyword::COLLATE]) {
7703            Ok(Some(ColumnOption::Collation(
7704                self.parse_object_name(false)?,
7705            )))
7706        } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
7707            Ok(Some(ColumnOption::NotNull))
7708        } else if self.parse_keywords(&[Keyword::COMMENT]) {
7709            Ok(Some(ColumnOption::Comment(self.parse_comment_value()?)))
7710        } else if self.parse_keyword(Keyword::NULL) {
7711            Ok(Some(ColumnOption::Null))
7712        } else if self.parse_keyword(Keyword::DEFAULT) {
7713            Ok(Some(ColumnOption::Default(self.parse_expr()?)))
7714        } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
7715            && self.parse_keyword(Keyword::MATERIALIZED)
7716        {
7717            Ok(Some(ColumnOption::Materialized(self.parse_expr()?)))
7718        } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
7719            && self.parse_keyword(Keyword::ALIAS)
7720        {
7721            Ok(Some(ColumnOption::Alias(self.parse_expr()?)))
7722        } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
7723            && self.parse_keyword(Keyword::EPHEMERAL)
7724        {
7725            // The expression is optional for the EPHEMERAL syntax, so we need to check
7726            // if the column definition has remaining tokens before parsing the expression.
7727            if matches!(self.peek_token().token, Token::Comma | Token::RParen) {
7728                Ok(Some(ColumnOption::Ephemeral(None)))
7729            } else {
7730                Ok(Some(ColumnOption::Ephemeral(Some(self.parse_expr()?))))
7731            }
7732        } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
7733            let characteristics = self.parse_constraint_characteristics()?;
7734            Ok(Some(ColumnOption::Unique {
7735                is_primary: true,
7736                characteristics,
7737            }))
7738        } else if self.parse_keyword(Keyword::UNIQUE) {
7739            let characteristics = self.parse_constraint_characteristics()?;
7740            Ok(Some(ColumnOption::Unique {
7741                is_primary: false,
7742                characteristics,
7743            }))
7744        } else if self.parse_keyword(Keyword::REFERENCES) {
7745            let foreign_table = self.parse_object_name(false)?;
7746            // PostgreSQL allows omitting the column list and
7747            // uses the primary key column of the foreign table by default
7748            let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
7749            let mut on_delete = None;
7750            let mut on_update = None;
7751            loop {
7752                if on_delete.is_none() && self.parse_keywords(&[Keyword::ON, Keyword::DELETE]) {
7753                    on_delete = Some(self.parse_referential_action()?);
7754                } else if on_update.is_none()
7755                    && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
7756                {
7757                    on_update = Some(self.parse_referential_action()?);
7758                } else {
7759                    break;
7760                }
7761            }
7762            let characteristics = self.parse_constraint_characteristics()?;
7763
7764            Ok(Some(ColumnOption::ForeignKey {
7765                foreign_table,
7766                referred_columns,
7767                on_delete,
7768                on_update,
7769                characteristics,
7770            }))
7771        } else if self.parse_keyword(Keyword::CHECK) {
7772            self.expect_token(&Token::LParen)?;
7773            let expr = self.parse_expr()?;
7774            self.expect_token(&Token::RParen)?;
7775            Ok(Some(ColumnOption::Check(expr)))
7776        } else if self.parse_keyword(Keyword::AUTO_INCREMENT)
7777            && dialect_of!(self is MySqlDialect | GenericDialect)
7778        {
7779            // Support AUTO_INCREMENT for MySQL
7780            Ok(Some(ColumnOption::DialectSpecific(vec![
7781                Token::make_keyword("AUTO_INCREMENT"),
7782            ])))
7783        } else if self.parse_keyword(Keyword::AUTOINCREMENT)
7784            && dialect_of!(self is SQLiteDialect |  GenericDialect)
7785        {
7786            // Support AUTOINCREMENT for SQLite
7787            Ok(Some(ColumnOption::DialectSpecific(vec![
7788                Token::make_keyword("AUTOINCREMENT"),
7789            ])))
7790        } else if self.parse_keyword(Keyword::ASC)
7791            && self.dialect.supports_asc_desc_in_column_definition()
7792        {
7793            // Support ASC for SQLite
7794            Ok(Some(ColumnOption::DialectSpecific(vec![
7795                Token::make_keyword("ASC"),
7796            ])))
7797        } else if self.parse_keyword(Keyword::DESC)
7798            && self.dialect.supports_asc_desc_in_column_definition()
7799        {
7800            // Support DESC for SQLite
7801            Ok(Some(ColumnOption::DialectSpecific(vec![
7802                Token::make_keyword("DESC"),
7803            ])))
7804        } else if self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
7805            && dialect_of!(self is MySqlDialect | GenericDialect)
7806        {
7807            let expr = self.parse_expr()?;
7808            Ok(Some(ColumnOption::OnUpdate(expr)))
7809        } else if self.parse_keyword(Keyword::GENERATED) {
7810            self.parse_optional_column_option_generated()
7811        } else if dialect_of!(self is BigQueryDialect | GenericDialect)
7812            && self.parse_keyword(Keyword::OPTIONS)
7813        {
7814            self.prev_token();
7815            Ok(Some(ColumnOption::Options(
7816                self.parse_options(Keyword::OPTIONS)?,
7817            )))
7818        } else if self.parse_keyword(Keyword::AS)
7819            && dialect_of!(self is MySqlDialect | SQLiteDialect | DuckDbDialect | GenericDialect)
7820        {
7821            self.parse_optional_column_option_as()
7822        } else if self.parse_keyword(Keyword::SRID)
7823            && dialect_of!(self is MySqlDialect | GenericDialect)
7824        {
7825            Ok(Some(ColumnOption::Srid(Box::new(self.parse_expr()?))))
7826        } else if self.parse_keyword(Keyword::IDENTITY)
7827            && dialect_of!(self is MsSqlDialect | GenericDialect)
7828        {
7829            let parameters = if self.consume_token(&Token::LParen) {
7830                let seed = self.parse_number()?;
7831                self.expect_token(&Token::Comma)?;
7832                let increment = self.parse_number()?;
7833                self.expect_token(&Token::RParen)?;
7834
7835                Some(IdentityPropertyFormatKind::FunctionCall(
7836                    IdentityParameters { seed, increment },
7837                ))
7838            } else {
7839                None
7840            };
7841            Ok(Some(ColumnOption::Identity(
7842                IdentityPropertyKind::Identity(IdentityProperty {
7843                    parameters,
7844                    order: None,
7845                }),
7846            )))
7847        } else if dialect_of!(self is SQLiteDialect | GenericDialect)
7848            && self.parse_keywords(&[Keyword::ON, Keyword::CONFLICT])
7849        {
7850            // Support ON CONFLICT for SQLite
7851            Ok(Some(ColumnOption::OnConflict(
7852                self.expect_one_of_keywords(&[
7853                    Keyword::ROLLBACK,
7854                    Keyword::ABORT,
7855                    Keyword::FAIL,
7856                    Keyword::IGNORE,
7857                    Keyword::REPLACE,
7858                ])?,
7859            )))
7860        } else {
7861            Ok(None)
7862        }
7863    }
7864
7865    pub(crate) fn parse_tag(&mut self) -> Result<Tag, ParserError> {
7866        let name = self.parse_identifier()?;
7867        self.expect_token(&Token::Eq)?;
7868        let value = self.parse_literal_string()?;
7869
7870        Ok(Tag::new(name, value))
7871    }
7872
7873    fn parse_optional_column_option_generated(
7874        &mut self,
7875    ) -> Result<Option<ColumnOption>, ParserError> {
7876        if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS, Keyword::IDENTITY]) {
7877            let mut sequence_options = vec![];
7878            if self.expect_token(&Token::LParen).is_ok() {
7879                sequence_options = self.parse_create_sequence_options()?;
7880                self.expect_token(&Token::RParen)?;
7881            }
7882            Ok(Some(ColumnOption::Generated {
7883                generated_as: GeneratedAs::Always,
7884                sequence_options: Some(sequence_options),
7885                generation_expr: None,
7886                generation_expr_mode: None,
7887                generated_keyword: true,
7888            }))
7889        } else if self.parse_keywords(&[
7890            Keyword::BY,
7891            Keyword::DEFAULT,
7892            Keyword::AS,
7893            Keyword::IDENTITY,
7894        ]) {
7895            let mut sequence_options = vec![];
7896            if self.expect_token(&Token::LParen).is_ok() {
7897                sequence_options = self.parse_create_sequence_options()?;
7898                self.expect_token(&Token::RParen)?;
7899            }
7900            Ok(Some(ColumnOption::Generated {
7901                generated_as: GeneratedAs::ByDefault,
7902                sequence_options: Some(sequence_options),
7903                generation_expr: None,
7904                generation_expr_mode: None,
7905                generated_keyword: true,
7906            }))
7907        } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS]) {
7908            if self.expect_token(&Token::LParen).is_ok() {
7909                let expr = self.parse_expr()?;
7910                self.expect_token(&Token::RParen)?;
7911                let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
7912                    Ok((
7913                        GeneratedAs::ExpStored,
7914                        Some(GeneratedExpressionMode::Stored),
7915                    ))
7916                } else if dialect_of!(self is PostgreSqlDialect) {
7917                    // Postgres' AS IDENTITY branches are above, this one needs STORED
7918                    self.expected("STORED", self.peek_token())
7919                } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
7920                    Ok((GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual)))
7921                } else {
7922                    Ok((GeneratedAs::Always, None))
7923                }?;
7924
7925                Ok(Some(ColumnOption::Generated {
7926                    generated_as: gen_as,
7927                    sequence_options: None,
7928                    generation_expr: Some(expr),
7929                    generation_expr_mode: expr_mode,
7930                    generated_keyword: true,
7931                }))
7932            } else {
7933                Ok(None)
7934            }
7935        } else {
7936            Ok(None)
7937        }
7938    }
7939
7940    fn parse_optional_column_option_as(&mut self) -> Result<Option<ColumnOption>, ParserError> {
7941        // Some DBs allow 'AS (expr)', shorthand for GENERATED ALWAYS AS
7942        self.expect_token(&Token::LParen)?;
7943        let expr = self.parse_expr()?;
7944        self.expect_token(&Token::RParen)?;
7945
7946        let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
7947            (
7948                GeneratedAs::ExpStored,
7949                Some(GeneratedExpressionMode::Stored),
7950            )
7951        } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
7952            (GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual))
7953        } else {
7954            (GeneratedAs::Always, None)
7955        };
7956
7957        Ok(Some(ColumnOption::Generated {
7958            generated_as: gen_as,
7959            sequence_options: None,
7960            generation_expr: Some(expr),
7961            generation_expr_mode: expr_mode,
7962            generated_keyword: false,
7963        }))
7964    }
7965
7966    pub fn parse_optional_clustered_by(&mut self) -> Result<Option<ClusteredBy>, ParserError> {
7967        let clustered_by = if dialect_of!(self is HiveDialect|GenericDialect)
7968            && self.parse_keywords(&[Keyword::CLUSTERED, Keyword::BY])
7969        {
7970            let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
7971
7972            let sorted_by = if self.parse_keywords(&[Keyword::SORTED, Keyword::BY]) {
7973                self.expect_token(&Token::LParen)?;
7974                let sorted_by_columns = self.parse_comma_separated(|p| p.parse_order_by_expr())?;
7975                self.expect_token(&Token::RParen)?;
7976                Some(sorted_by_columns)
7977            } else {
7978                None
7979            };
7980
7981            self.expect_keyword_is(Keyword::INTO)?;
7982            let num_buckets = self.parse_number_value()?.value;
7983            self.expect_keyword_is(Keyword::BUCKETS)?;
7984            Some(ClusteredBy {
7985                columns,
7986                sorted_by,
7987                num_buckets,
7988            })
7989        } else {
7990            None
7991        };
7992        Ok(clustered_by)
7993    }
7994
7995    pub fn parse_referential_action(&mut self) -> Result<ReferentialAction, ParserError> {
7996        if self.parse_keyword(Keyword::RESTRICT) {
7997            Ok(ReferentialAction::Restrict)
7998        } else if self.parse_keyword(Keyword::CASCADE) {
7999            Ok(ReferentialAction::Cascade)
8000        } else if self.parse_keywords(&[Keyword::SET, Keyword::NULL]) {
8001            Ok(ReferentialAction::SetNull)
8002        } else if self.parse_keywords(&[Keyword::NO, Keyword::ACTION]) {
8003            Ok(ReferentialAction::NoAction)
8004        } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
8005            Ok(ReferentialAction::SetDefault)
8006        } else {
8007            self.expected(
8008                "one of RESTRICT, CASCADE, SET NULL, NO ACTION or SET DEFAULT",
8009                self.peek_token(),
8010            )
8011        }
8012    }
8013
8014    pub fn parse_constraint_characteristics(
8015        &mut self,
8016    ) -> Result<Option<ConstraintCharacteristics>, ParserError> {
8017        let mut cc = ConstraintCharacteristics::default();
8018
8019        loop {
8020            if cc.deferrable.is_none() && self.parse_keywords(&[Keyword::NOT, Keyword::DEFERRABLE])
8021            {
8022                cc.deferrable = Some(false);
8023            } else if cc.deferrable.is_none() && self.parse_keyword(Keyword::DEFERRABLE) {
8024                cc.deferrable = Some(true);
8025            } else if cc.initially.is_none() && self.parse_keyword(Keyword::INITIALLY) {
8026                if self.parse_keyword(Keyword::DEFERRED) {
8027                    cc.initially = Some(DeferrableInitial::Deferred);
8028                } else if self.parse_keyword(Keyword::IMMEDIATE) {
8029                    cc.initially = Some(DeferrableInitial::Immediate);
8030                } else {
8031                    self.expected("one of DEFERRED or IMMEDIATE", self.peek_token())?;
8032                }
8033            } else if cc.enforced.is_none() && self.parse_keyword(Keyword::ENFORCED) {
8034                cc.enforced = Some(true);
8035            } else if cc.enforced.is_none()
8036                && self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED])
8037            {
8038                cc.enforced = Some(false);
8039            } else {
8040                break;
8041            }
8042        }
8043
8044        if cc.deferrable.is_some() || cc.initially.is_some() || cc.enforced.is_some() {
8045            Ok(Some(cc))
8046        } else {
8047            Ok(None)
8048        }
8049    }
8050
8051    pub fn parse_optional_table_constraint(
8052        &mut self,
8053    ) -> Result<Option<TableConstraint>, ParserError> {
8054        let name = if self.parse_keyword(Keyword::CONSTRAINT) {
8055            Some(self.parse_identifier()?)
8056        } else {
8057            None
8058        };
8059
8060        let next_token = self.next_token();
8061        match next_token.token {
8062            Token::Word(w) if w.keyword == Keyword::UNIQUE => {
8063                let index_type_display = self.parse_index_type_display();
8064                if !dialect_of!(self is GenericDialect | MySqlDialect)
8065                    && !index_type_display.is_none()
8066                {
8067                    return self
8068                        .expected("`index_name` or `(column_name [, ...])`", self.peek_token());
8069                }
8070
8071                let nulls_distinct = self.parse_optional_nulls_distinct()?;
8072
8073                // optional index name
8074                let index_name = self.parse_optional_ident()?;
8075                let index_type = self.parse_optional_using_then_index_type()?;
8076
8077                let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
8078                let index_options = self.parse_index_options()?;
8079                let characteristics = self.parse_constraint_characteristics()?;
8080                Ok(Some(TableConstraint::Unique {
8081                    name,
8082                    index_name,
8083                    index_type_display,
8084                    index_type,
8085                    columns,
8086                    index_options,
8087                    characteristics,
8088                    nulls_distinct,
8089                }))
8090            }
8091            Token::Word(w) if w.keyword == Keyword::PRIMARY => {
8092                // after `PRIMARY` always stay `KEY`
8093                self.expect_keyword_is(Keyword::KEY)?;
8094
8095                // optional index name
8096                let index_name = self.parse_optional_ident()?;
8097                let index_type = self.parse_optional_using_then_index_type()?;
8098
8099                let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
8100                let index_options = self.parse_index_options()?;
8101                let characteristics = self.parse_constraint_characteristics()?;
8102                Ok(Some(TableConstraint::PrimaryKey {
8103                    name,
8104                    index_name,
8105                    index_type,
8106                    columns,
8107                    index_options,
8108                    characteristics,
8109                }))
8110            }
8111            Token::Word(w) if w.keyword == Keyword::FOREIGN => {
8112                self.expect_keyword_is(Keyword::KEY)?;
8113                let index_name = self.parse_optional_ident()?;
8114                let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
8115                self.expect_keyword_is(Keyword::REFERENCES)?;
8116                let foreign_table = self.parse_object_name(false)?;
8117                let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
8118                let mut on_delete = None;
8119                let mut on_update = None;
8120                loop {
8121                    if on_delete.is_none() && self.parse_keywords(&[Keyword::ON, Keyword::DELETE]) {
8122                        on_delete = Some(self.parse_referential_action()?);
8123                    } else if on_update.is_none()
8124                        && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
8125                    {
8126                        on_update = Some(self.parse_referential_action()?);
8127                    } else {
8128                        break;
8129                    }
8130                }
8131
8132                let characteristics = self.parse_constraint_characteristics()?;
8133
8134                Ok(Some(TableConstraint::ForeignKey {
8135                    name,
8136                    index_name,
8137                    columns,
8138                    foreign_table,
8139                    referred_columns,
8140                    on_delete,
8141                    on_update,
8142                    characteristics,
8143                }))
8144            }
8145            Token::Word(w) if w.keyword == Keyword::CHECK => {
8146                self.expect_token(&Token::LParen)?;
8147                let expr = Box::new(self.parse_expr()?);
8148                self.expect_token(&Token::RParen)?;
8149
8150                let enforced = if self.parse_keyword(Keyword::ENFORCED) {
8151                    Some(true)
8152                } else if self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED]) {
8153                    Some(false)
8154                } else {
8155                    None
8156                };
8157
8158                Ok(Some(TableConstraint::Check {
8159                    name,
8160                    expr,
8161                    enforced,
8162                }))
8163            }
8164            Token::Word(w)
8165                if (w.keyword == Keyword::INDEX || w.keyword == Keyword::KEY)
8166                    && dialect_of!(self is GenericDialect | MySqlDialect)
8167                    && name.is_none() =>
8168            {
8169                let display_as_key = w.keyword == Keyword::KEY;
8170
8171                let name = match self.peek_token().token {
8172                    Token::Word(word) if word.keyword == Keyword::USING => None,
8173                    _ => self.parse_optional_ident()?,
8174                };
8175
8176                let index_type = self.parse_optional_using_then_index_type()?;
8177                let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
8178
8179                Ok(Some(TableConstraint::Index {
8180                    display_as_key,
8181                    name,
8182                    index_type,
8183                    columns,
8184                }))
8185            }
8186            Token::Word(w)
8187                if (w.keyword == Keyword::FULLTEXT || w.keyword == Keyword::SPATIAL)
8188                    && dialect_of!(self is GenericDialect | MySqlDialect) =>
8189            {
8190                if let Some(name) = name {
8191                    return self.expected(
8192                        "FULLTEXT or SPATIAL option without constraint name",
8193                        TokenWithSpan {
8194                            token: Token::make_keyword(&name.to_string()),
8195                            span: next_token.span,
8196                        },
8197                    );
8198                }
8199
8200                let fulltext = w.keyword == Keyword::FULLTEXT;
8201
8202                let index_type_display = self.parse_index_type_display();
8203
8204                let opt_index_name = self.parse_optional_ident()?;
8205
8206                let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
8207
8208                Ok(Some(TableConstraint::FulltextOrSpatial {
8209                    fulltext,
8210                    index_type_display,
8211                    opt_index_name,
8212                    columns,
8213                }))
8214            }
8215            _ => {
8216                if name.is_some() {
8217                    self.expected("PRIMARY, UNIQUE, FOREIGN, or CHECK", next_token)
8218                } else {
8219                    self.prev_token();
8220                    Ok(None)
8221                }
8222            }
8223        }
8224    }
8225
8226    fn parse_optional_nulls_distinct(&mut self) -> Result<NullsDistinctOption, ParserError> {
8227        Ok(if self.parse_keyword(Keyword::NULLS) {
8228            let not = self.parse_keyword(Keyword::NOT);
8229            self.expect_keyword_is(Keyword::DISTINCT)?;
8230            if not {
8231                NullsDistinctOption::NotDistinct
8232            } else {
8233                NullsDistinctOption::Distinct
8234            }
8235        } else {
8236            NullsDistinctOption::None
8237        })
8238    }
8239
8240    pub fn maybe_parse_options(
8241        &mut self,
8242        keyword: Keyword,
8243    ) -> Result<Option<Vec<SqlOption>>, ParserError> {
8244        if let Token::Word(word) = self.peek_token().token {
8245            if word.keyword == keyword {
8246                return Ok(Some(self.parse_options(keyword)?));
8247            }
8248        };
8249        Ok(None)
8250    }
8251
8252    pub fn parse_options(&mut self, keyword: Keyword) -> Result<Vec<SqlOption>, ParserError> {
8253        if self.parse_keyword(keyword) {
8254            self.expect_token(&Token::LParen)?;
8255            let options = self.parse_comma_separated0(Parser::parse_sql_option, Token::RParen)?;
8256            self.expect_token(&Token::RParen)?;
8257            Ok(options)
8258        } else {
8259            Ok(vec![])
8260        }
8261    }
8262
8263    pub fn parse_options_with_keywords(
8264        &mut self,
8265        keywords: &[Keyword],
8266    ) -> Result<Vec<SqlOption>, ParserError> {
8267        if self.parse_keywords(keywords) {
8268            self.expect_token(&Token::LParen)?;
8269            let options = self.parse_comma_separated(Parser::parse_sql_option)?;
8270            self.expect_token(&Token::RParen)?;
8271            Ok(options)
8272        } else {
8273            Ok(vec![])
8274        }
8275    }
8276
8277    pub fn parse_index_type(&mut self) -> Result<IndexType, ParserError> {
8278        Ok(if self.parse_keyword(Keyword::BTREE) {
8279            IndexType::BTree
8280        } else if self.parse_keyword(Keyword::HASH) {
8281            IndexType::Hash
8282        } else if self.parse_keyword(Keyword::GIN) {
8283            IndexType::GIN
8284        } else if self.parse_keyword(Keyword::GIST) {
8285            IndexType::GiST
8286        } else if self.parse_keyword(Keyword::SPGIST) {
8287            IndexType::SPGiST
8288        } else if self.parse_keyword(Keyword::BRIN) {
8289            IndexType::BRIN
8290        } else if self.parse_keyword(Keyword::BLOOM) {
8291            IndexType::Bloom
8292        } else {
8293            IndexType::Custom(self.parse_identifier()?)
8294        })
8295    }
8296
8297    /// Optionally parse the `USING` keyword, followed by an [IndexType]
8298    /// Example:
8299    /// ```sql
8300    //// USING BTREE (name, age DESC)
8301    /// ```
8302    pub fn parse_optional_using_then_index_type(
8303        &mut self,
8304    ) -> Result<Option<IndexType>, ParserError> {
8305        if self.parse_keyword(Keyword::USING) {
8306            Ok(Some(self.parse_index_type()?))
8307        } else {
8308            Ok(None)
8309        }
8310    }
8311
8312    /// Parse `[ident]`, mostly `ident` is name, like:
8313    /// `window_name`, `index_name`, ...
8314    pub fn parse_optional_ident(&mut self) -> Result<Option<Ident>, ParserError> {
8315        self.maybe_parse(|parser| parser.parse_identifier())
8316    }
8317
8318    #[must_use]
8319    pub fn parse_index_type_display(&mut self) -> KeyOrIndexDisplay {
8320        if self.parse_keyword(Keyword::KEY) {
8321            KeyOrIndexDisplay::Key
8322        } else if self.parse_keyword(Keyword::INDEX) {
8323            KeyOrIndexDisplay::Index
8324        } else {
8325            KeyOrIndexDisplay::None
8326        }
8327    }
8328
8329    pub fn parse_optional_index_option(&mut self) -> Result<Option<IndexOption>, ParserError> {
8330        if let Some(index_type) = self.parse_optional_using_then_index_type()? {
8331            Ok(Some(IndexOption::Using(index_type)))
8332        } else if self.parse_keyword(Keyword::COMMENT) {
8333            let s = self.parse_literal_string()?;
8334            Ok(Some(IndexOption::Comment(s)))
8335        } else {
8336            Ok(None)
8337        }
8338    }
8339
8340    pub fn parse_index_options(&mut self) -> Result<Vec<IndexOption>, ParserError> {
8341        let mut options = Vec::new();
8342
8343        loop {
8344            match self.parse_optional_index_option()? {
8345                Some(index_option) => options.push(index_option),
8346                None => return Ok(options),
8347            }
8348        }
8349    }
8350
8351    pub fn parse_sql_option(&mut self) -> Result<SqlOption, ParserError> {
8352        let is_mssql = dialect_of!(self is MsSqlDialect|GenericDialect);
8353
8354        match self.peek_token().token {
8355            Token::Word(w) if w.keyword == Keyword::HEAP && is_mssql => {
8356                Ok(SqlOption::Ident(self.parse_identifier()?))
8357            }
8358            Token::Word(w) if w.keyword == Keyword::PARTITION && is_mssql => {
8359                self.parse_option_partition()
8360            }
8361            Token::Word(w) if w.keyword == Keyword::CLUSTERED && is_mssql => {
8362                self.parse_option_clustered()
8363            }
8364            _ => {
8365                let name = self.parse_identifier()?;
8366                self.expect_token(&Token::Eq)?;
8367                let value = self.parse_expr()?;
8368
8369                Ok(SqlOption::KeyValue { key: name, value })
8370            }
8371        }
8372    }
8373
8374    pub fn parse_option_clustered(&mut self) -> Result<SqlOption, ParserError> {
8375        if self.parse_keywords(&[
8376            Keyword::CLUSTERED,
8377            Keyword::COLUMNSTORE,
8378            Keyword::INDEX,
8379            Keyword::ORDER,
8380        ]) {
8381            Ok(SqlOption::Clustered(
8382                TableOptionsClustered::ColumnstoreIndexOrder(
8383                    self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
8384                ),
8385            ))
8386        } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::COLUMNSTORE, Keyword::INDEX]) {
8387            Ok(SqlOption::Clustered(
8388                TableOptionsClustered::ColumnstoreIndex,
8389            ))
8390        } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::INDEX]) {
8391            self.expect_token(&Token::LParen)?;
8392
8393            let columns = self.parse_comma_separated(|p| {
8394                let name = p.parse_identifier()?;
8395                let asc = p.parse_asc_desc();
8396
8397                Ok(ClusteredIndex { name, asc })
8398            })?;
8399
8400            self.expect_token(&Token::RParen)?;
8401
8402            Ok(SqlOption::Clustered(TableOptionsClustered::Index(columns)))
8403        } else {
8404            Err(ParserError::ParserError(
8405                "invalid CLUSTERED sequence".to_string(),
8406            ))
8407        }
8408    }
8409
8410    pub fn parse_option_partition(&mut self) -> Result<SqlOption, ParserError> {
8411        self.expect_keyword_is(Keyword::PARTITION)?;
8412        self.expect_token(&Token::LParen)?;
8413        let column_name = self.parse_identifier()?;
8414
8415        self.expect_keyword_is(Keyword::RANGE)?;
8416        let range_direction = if self.parse_keyword(Keyword::LEFT) {
8417            Some(PartitionRangeDirection::Left)
8418        } else if self.parse_keyword(Keyword::RIGHT) {
8419            Some(PartitionRangeDirection::Right)
8420        } else {
8421            None
8422        };
8423
8424        self.expect_keywords(&[Keyword::FOR, Keyword::VALUES])?;
8425        self.expect_token(&Token::LParen)?;
8426
8427        let for_values = self.parse_comma_separated(Parser::parse_expr)?;
8428
8429        self.expect_token(&Token::RParen)?;
8430        self.expect_token(&Token::RParen)?;
8431
8432        Ok(SqlOption::Partition {
8433            column_name,
8434            range_direction,
8435            for_values,
8436        })
8437    }
8438
8439    pub fn parse_partition(&mut self) -> Result<Partition, ParserError> {
8440        self.expect_token(&Token::LParen)?;
8441        let partitions = self.parse_comma_separated(Parser::parse_expr)?;
8442        self.expect_token(&Token::RParen)?;
8443        Ok(Partition::Partitions(partitions))
8444    }
8445
8446    pub fn parse_projection_select(&mut self) -> Result<ProjectionSelect, ParserError> {
8447        self.expect_token(&Token::LParen)?;
8448        self.expect_keyword_is(Keyword::SELECT)?;
8449        let projection = self.parse_projection()?;
8450        let group_by = self.parse_optional_group_by()?;
8451        let order_by = self.parse_optional_order_by()?;
8452        self.expect_token(&Token::RParen)?;
8453        Ok(ProjectionSelect {
8454            projection,
8455            group_by,
8456            order_by,
8457        })
8458    }
8459    pub fn parse_alter_table_add_projection(&mut self) -> Result<AlterTableOperation, ParserError> {
8460        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8461        let name = self.parse_identifier()?;
8462        let query = self.parse_projection_select()?;
8463        Ok(AlterTableOperation::AddProjection {
8464            if_not_exists,
8465            name,
8466            select: query,
8467        })
8468    }
8469
8470    pub fn parse_alter_table_operation(&mut self) -> Result<AlterTableOperation, ParserError> {
8471        let operation = if self.parse_keyword(Keyword::ADD) {
8472            if let Some(constraint) = self.parse_optional_table_constraint()? {
8473                AlterTableOperation::AddConstraint(constraint)
8474            } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
8475                && self.parse_keyword(Keyword::PROJECTION)
8476            {
8477                return self.parse_alter_table_add_projection();
8478            } else {
8479                let if_not_exists =
8480                    self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8481                let mut new_partitions = vec![];
8482                loop {
8483                    if self.parse_keyword(Keyword::PARTITION) {
8484                        new_partitions.push(self.parse_partition()?);
8485                    } else {
8486                        break;
8487                    }
8488                }
8489                if !new_partitions.is_empty() {
8490                    AlterTableOperation::AddPartitions {
8491                        if_not_exists,
8492                        new_partitions,
8493                    }
8494                } else {
8495                    let column_keyword = self.parse_keyword(Keyword::COLUMN);
8496
8497                    let if_not_exists = if dialect_of!(self is PostgreSqlDialect | BigQueryDialect | DuckDbDialect | GenericDialect)
8498                    {
8499                        self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS])
8500                            || if_not_exists
8501                    } else {
8502                        false
8503                    };
8504
8505                    let column_def = self.parse_column_def()?;
8506
8507                    let column_position = self.parse_column_position()?;
8508
8509                    AlterTableOperation::AddColumn {
8510                        column_keyword,
8511                        if_not_exists,
8512                        column_def,
8513                        column_position,
8514                    }
8515                }
8516            }
8517        } else if self.parse_keyword(Keyword::RENAME) {
8518            if dialect_of!(self is PostgreSqlDialect) && self.parse_keyword(Keyword::CONSTRAINT) {
8519                let old_name = self.parse_identifier()?;
8520                self.expect_keyword_is(Keyword::TO)?;
8521                let new_name = self.parse_identifier()?;
8522                AlterTableOperation::RenameConstraint { old_name, new_name }
8523            } else if self.parse_keyword(Keyword::TO) {
8524                let table_name = self.parse_object_name(false)?;
8525                AlterTableOperation::RenameTable { table_name }
8526            } else {
8527                let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
8528                let old_column_name = self.parse_identifier()?;
8529                self.expect_keyword_is(Keyword::TO)?;
8530                let new_column_name = self.parse_identifier()?;
8531                AlterTableOperation::RenameColumn {
8532                    old_column_name,
8533                    new_column_name,
8534                }
8535            }
8536        } else if self.parse_keyword(Keyword::DISABLE) {
8537            if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
8538                AlterTableOperation::DisableRowLevelSecurity {}
8539            } else if self.parse_keyword(Keyword::RULE) {
8540                let name = self.parse_identifier()?;
8541                AlterTableOperation::DisableRule { name }
8542            } else if self.parse_keyword(Keyword::TRIGGER) {
8543                let name = self.parse_identifier()?;
8544                AlterTableOperation::DisableTrigger { name }
8545            } else {
8546                return self.expected(
8547                    "ROW LEVEL SECURITY, RULE, or TRIGGER after DISABLE",
8548                    self.peek_token(),
8549                );
8550            }
8551        } else if self.parse_keyword(Keyword::ENABLE) {
8552            if self.parse_keywords(&[Keyword::ALWAYS, Keyword::RULE]) {
8553                let name = self.parse_identifier()?;
8554                AlterTableOperation::EnableAlwaysRule { name }
8555            } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::TRIGGER]) {
8556                let name = self.parse_identifier()?;
8557                AlterTableOperation::EnableAlwaysTrigger { name }
8558            } else if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
8559                AlterTableOperation::EnableRowLevelSecurity {}
8560            } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::RULE]) {
8561                let name = self.parse_identifier()?;
8562                AlterTableOperation::EnableReplicaRule { name }
8563            } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::TRIGGER]) {
8564                let name = self.parse_identifier()?;
8565                AlterTableOperation::EnableReplicaTrigger { name }
8566            } else if self.parse_keyword(Keyword::RULE) {
8567                let name = self.parse_identifier()?;
8568                AlterTableOperation::EnableRule { name }
8569            } else if self.parse_keyword(Keyword::TRIGGER) {
8570                let name = self.parse_identifier()?;
8571                AlterTableOperation::EnableTrigger { name }
8572            } else {
8573                return self.expected(
8574                    "ALWAYS, REPLICA, ROW LEVEL SECURITY, RULE, or TRIGGER after ENABLE",
8575                    self.peek_token(),
8576                );
8577            }
8578        } else if self.parse_keywords(&[Keyword::CLEAR, Keyword::PROJECTION])
8579            && dialect_of!(self is ClickHouseDialect|GenericDialect)
8580        {
8581            let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8582            let name = self.parse_identifier()?;
8583            let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
8584                Some(self.parse_identifier()?)
8585            } else {
8586                None
8587            };
8588            AlterTableOperation::ClearProjection {
8589                if_exists,
8590                name,
8591                partition,
8592            }
8593        } else if self.parse_keywords(&[Keyword::MATERIALIZE, Keyword::PROJECTION])
8594            && dialect_of!(self is ClickHouseDialect|GenericDialect)
8595        {
8596            let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8597            let name = self.parse_identifier()?;
8598            let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
8599                Some(self.parse_identifier()?)
8600            } else {
8601                None
8602            };
8603            AlterTableOperation::MaterializeProjection {
8604                if_exists,
8605                name,
8606                partition,
8607            }
8608        } else if self.parse_keyword(Keyword::DROP) {
8609            if self.parse_keywords(&[Keyword::IF, Keyword::EXISTS, Keyword::PARTITION]) {
8610                self.expect_token(&Token::LParen)?;
8611                let partitions = self.parse_comma_separated(Parser::parse_expr)?;
8612                self.expect_token(&Token::RParen)?;
8613                AlterTableOperation::DropPartitions {
8614                    partitions,
8615                    if_exists: true,
8616                }
8617            } else if self.parse_keyword(Keyword::PARTITION) {
8618                self.expect_token(&Token::LParen)?;
8619                let partitions = self.parse_comma_separated(Parser::parse_expr)?;
8620                self.expect_token(&Token::RParen)?;
8621                AlterTableOperation::DropPartitions {
8622                    partitions,
8623                    if_exists: false,
8624                }
8625            } else if self.parse_keyword(Keyword::CONSTRAINT) {
8626                let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8627                let name = self.parse_identifier()?;
8628                let drop_behavior = self.parse_optional_drop_behavior();
8629                AlterTableOperation::DropConstraint {
8630                    if_exists,
8631                    name,
8632                    drop_behavior,
8633                }
8634            } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
8635                AlterTableOperation::DropPrimaryKey
8636            } else if self.parse_keywords(&[Keyword::FOREIGN, Keyword::KEY]) {
8637                let name = self.parse_identifier()?;
8638                AlterTableOperation::DropForeignKey { name }
8639            } else if self.parse_keyword(Keyword::INDEX) {
8640                let name = self.parse_identifier()?;
8641                AlterTableOperation::DropIndex { name }
8642            } else if self.parse_keyword(Keyword::PROJECTION)
8643                && dialect_of!(self is ClickHouseDialect|GenericDialect)
8644            {
8645                let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8646                let name = self.parse_identifier()?;
8647                AlterTableOperation::DropProjection { if_exists, name }
8648            } else if self.parse_keywords(&[Keyword::CLUSTERING, Keyword::KEY]) {
8649                AlterTableOperation::DropClusteringKey
8650            } else {
8651                let has_column_keyword = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
8652                let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8653                let column_name = self.parse_identifier()?;
8654                let drop_behavior = self.parse_optional_drop_behavior();
8655                AlterTableOperation::DropColumn {
8656                    has_column_keyword,
8657                    column_name,
8658                    if_exists,
8659                    drop_behavior,
8660                }
8661            }
8662        } else if self.parse_keyword(Keyword::PARTITION) {
8663            self.expect_token(&Token::LParen)?;
8664            let before = self.parse_comma_separated(Parser::parse_expr)?;
8665            self.expect_token(&Token::RParen)?;
8666            self.expect_keyword_is(Keyword::RENAME)?;
8667            self.expect_keywords(&[Keyword::TO, Keyword::PARTITION])?;
8668            self.expect_token(&Token::LParen)?;
8669            let renames = self.parse_comma_separated(Parser::parse_expr)?;
8670            self.expect_token(&Token::RParen)?;
8671            AlterTableOperation::RenamePartitions {
8672                old_partitions: before,
8673                new_partitions: renames,
8674            }
8675        } else if self.parse_keyword(Keyword::CHANGE) {
8676            let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
8677            let old_name = self.parse_identifier()?;
8678            let new_name = self.parse_identifier()?;
8679            let data_type = self.parse_data_type()?;
8680            let mut options = vec![];
8681            while let Some(option) = self.parse_optional_column_option()? {
8682                options.push(option);
8683            }
8684
8685            let column_position = self.parse_column_position()?;
8686
8687            AlterTableOperation::ChangeColumn {
8688                old_name,
8689                new_name,
8690                data_type,
8691                options,
8692                column_position,
8693            }
8694        } else if self.parse_keyword(Keyword::MODIFY) {
8695            let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
8696            let col_name = self.parse_identifier()?;
8697            let data_type = self.parse_data_type()?;
8698            let mut options = vec![];
8699            while let Some(option) = self.parse_optional_column_option()? {
8700                options.push(option);
8701            }
8702
8703            let column_position = self.parse_column_position()?;
8704
8705            AlterTableOperation::ModifyColumn {
8706                col_name,
8707                data_type,
8708                options,
8709                column_position,
8710            }
8711        } else if self.parse_keyword(Keyword::ALTER) {
8712            let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
8713            let column_name = self.parse_identifier()?;
8714            let is_postgresql = dialect_of!(self is PostgreSqlDialect);
8715
8716            let op: AlterColumnOperation = if self.parse_keywords(&[
8717                Keyword::SET,
8718                Keyword::NOT,
8719                Keyword::NULL,
8720            ]) {
8721                AlterColumnOperation::SetNotNull {}
8722            } else if self.parse_keywords(&[Keyword::DROP, Keyword::NOT, Keyword::NULL]) {
8723                AlterColumnOperation::DropNotNull {}
8724            } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
8725                AlterColumnOperation::SetDefault {
8726                    value: self.parse_expr()?,
8727                }
8728            } else if self.parse_keywords(&[Keyword::DROP, Keyword::DEFAULT]) {
8729                AlterColumnOperation::DropDefault {}
8730            } else if self.parse_keywords(&[Keyword::SET, Keyword::DATA, Keyword::TYPE])
8731                || (is_postgresql && self.parse_keyword(Keyword::TYPE))
8732            {
8733                let data_type = self.parse_data_type()?;
8734                let using = if is_postgresql && self.parse_keyword(Keyword::USING) {
8735                    Some(self.parse_expr()?)
8736                } else {
8737                    None
8738                };
8739                AlterColumnOperation::SetDataType { data_type, using }
8740            } else if self.parse_keywords(&[Keyword::ADD, Keyword::GENERATED]) {
8741                let generated_as = if self.parse_keyword(Keyword::ALWAYS) {
8742                    Some(GeneratedAs::Always)
8743                } else if self.parse_keywords(&[Keyword::BY, Keyword::DEFAULT]) {
8744                    Some(GeneratedAs::ByDefault)
8745                } else {
8746                    None
8747                };
8748
8749                self.expect_keywords(&[Keyword::AS, Keyword::IDENTITY])?;
8750
8751                let mut sequence_options: Option<Vec<SequenceOptions>> = None;
8752
8753                if self.peek_token().token == Token::LParen {
8754                    self.expect_token(&Token::LParen)?;
8755                    sequence_options = Some(self.parse_create_sequence_options()?);
8756                    self.expect_token(&Token::RParen)?;
8757                }
8758
8759                AlterColumnOperation::AddGenerated {
8760                    generated_as,
8761                    sequence_options,
8762                }
8763            } else {
8764                let message = if is_postgresql {
8765                    "SET/DROP NOT NULL, SET DEFAULT, SET DATA TYPE, or ADD GENERATED after ALTER COLUMN"
8766                } else {
8767                    "SET/DROP NOT NULL, SET DEFAULT, or SET DATA TYPE after ALTER COLUMN"
8768                };
8769
8770                return self.expected(message, self.peek_token());
8771            };
8772            AlterTableOperation::AlterColumn { column_name, op }
8773        } else if self.parse_keyword(Keyword::SWAP) {
8774            self.expect_keyword_is(Keyword::WITH)?;
8775            let table_name = self.parse_object_name(false)?;
8776            AlterTableOperation::SwapWith { table_name }
8777        } else if dialect_of!(self is PostgreSqlDialect | GenericDialect)
8778            && self.parse_keywords(&[Keyword::OWNER, Keyword::TO])
8779        {
8780            let new_owner = self.parse_owner()?;
8781            AlterTableOperation::OwnerTo { new_owner }
8782        } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
8783            && self.parse_keyword(Keyword::ATTACH)
8784        {
8785            AlterTableOperation::AttachPartition {
8786                partition: self.parse_part_or_partition()?,
8787            }
8788        } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
8789            && self.parse_keyword(Keyword::DETACH)
8790        {
8791            AlterTableOperation::DetachPartition {
8792                partition: self.parse_part_or_partition()?,
8793            }
8794        } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
8795            && self.parse_keyword(Keyword::FREEZE)
8796        {
8797            let partition = self.parse_part_or_partition()?;
8798            let with_name = if self.parse_keyword(Keyword::WITH) {
8799                self.expect_keyword_is(Keyword::NAME)?;
8800                Some(self.parse_identifier()?)
8801            } else {
8802                None
8803            };
8804            AlterTableOperation::FreezePartition {
8805                partition,
8806                with_name,
8807            }
8808        } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
8809            && self.parse_keyword(Keyword::UNFREEZE)
8810        {
8811            let partition = self.parse_part_or_partition()?;
8812            let with_name = if self.parse_keyword(Keyword::WITH) {
8813                self.expect_keyword_is(Keyword::NAME)?;
8814                Some(self.parse_identifier()?)
8815            } else {
8816                None
8817            };
8818            AlterTableOperation::UnfreezePartition {
8819                partition,
8820                with_name,
8821            }
8822        } else if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
8823            self.expect_token(&Token::LParen)?;
8824            let exprs = self.parse_comma_separated(|parser| parser.parse_expr())?;
8825            self.expect_token(&Token::RParen)?;
8826            AlterTableOperation::ClusterBy { exprs }
8827        } else if self.parse_keywords(&[Keyword::SUSPEND, Keyword::RECLUSTER]) {
8828            AlterTableOperation::SuspendRecluster
8829        } else if self.parse_keywords(&[Keyword::RESUME, Keyword::RECLUSTER]) {
8830            AlterTableOperation::ResumeRecluster
8831        } else if self.parse_keyword(Keyword::LOCK) {
8832            let equals = self.consume_token(&Token::Eq);
8833            let lock = match self.parse_one_of_keywords(&[
8834                Keyword::DEFAULT,
8835                Keyword::EXCLUSIVE,
8836                Keyword::NONE,
8837                Keyword::SHARED,
8838            ]) {
8839                Some(Keyword::DEFAULT) => AlterTableLock::Default,
8840                Some(Keyword::EXCLUSIVE) => AlterTableLock::Exclusive,
8841                Some(Keyword::NONE) => AlterTableLock::None,
8842                Some(Keyword::SHARED) => AlterTableLock::Shared,
8843                _ => self.expected(
8844                    "DEFAULT, EXCLUSIVE, NONE or SHARED after LOCK [=]",
8845                    self.peek_token(),
8846                )?,
8847            };
8848            AlterTableOperation::Lock { equals, lock }
8849        } else if self.parse_keyword(Keyword::ALGORITHM) {
8850            let equals = self.consume_token(&Token::Eq);
8851            let algorithm = match self.parse_one_of_keywords(&[
8852                Keyword::DEFAULT,
8853                Keyword::INSTANT,
8854                Keyword::INPLACE,
8855                Keyword::COPY,
8856            ]) {
8857                Some(Keyword::DEFAULT) => AlterTableAlgorithm::Default,
8858                Some(Keyword::INSTANT) => AlterTableAlgorithm::Instant,
8859                Some(Keyword::INPLACE) => AlterTableAlgorithm::Inplace,
8860                Some(Keyword::COPY) => AlterTableAlgorithm::Copy,
8861                _ => self.expected(
8862                    "DEFAULT, INSTANT, INPLACE, or COPY after ALGORITHM [=]",
8863                    self.peek_token(),
8864                )?,
8865            };
8866            AlterTableOperation::Algorithm { equals, algorithm }
8867        } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
8868            let equals = self.consume_token(&Token::Eq);
8869            let value = self.parse_number_value()?;
8870            AlterTableOperation::AutoIncrement { equals, value }
8871        } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::IDENTITY]) {
8872            let identity = if self.parse_keyword(Keyword::NONE) {
8873                ReplicaIdentity::None
8874            } else if self.parse_keyword(Keyword::FULL) {
8875                ReplicaIdentity::Full
8876            } else if self.parse_keyword(Keyword::DEFAULT) {
8877                ReplicaIdentity::Default
8878            } else if self.parse_keywords(&[Keyword::USING, Keyword::INDEX]) {
8879                ReplicaIdentity::Index(self.parse_identifier()?)
8880            } else {
8881                return self.expected(
8882                    "NONE, FULL, DEFAULT, or USING INDEX index_name after REPLICA IDENTITY",
8883                    self.peek_token(),
8884                );
8885            };
8886
8887            AlterTableOperation::ReplicaIdentity { identity }
8888        } else {
8889            let options: Vec<SqlOption> =
8890                self.parse_options_with_keywords(&[Keyword::SET, Keyword::TBLPROPERTIES])?;
8891            if !options.is_empty() {
8892                AlterTableOperation::SetTblProperties {
8893                    table_properties: options,
8894                }
8895            } else {
8896                return self.expected(
8897                    "ADD, RENAME, PARTITION, SWAP, DROP, REPLICA IDENTITY, or SET TBLPROPERTIES after ALTER TABLE",
8898                    self.peek_token(),
8899                );
8900            }
8901        };
8902        Ok(operation)
8903    }
8904
8905    fn parse_part_or_partition(&mut self) -> Result<Partition, ParserError> {
8906        let keyword = self.expect_one_of_keywords(&[Keyword::PART, Keyword::PARTITION])?;
8907        match keyword {
8908            Keyword::PART => Ok(Partition::Part(self.parse_expr()?)),
8909            Keyword::PARTITION => Ok(Partition::Expr(self.parse_expr()?)),
8910            // unreachable because expect_one_of_keywords used above
8911            _ => unreachable!(),
8912        }
8913    }
8914
8915    pub fn parse_alter(&mut self) -> Result<Statement, ParserError> {
8916        let object_type = self.expect_one_of_keywords(&[
8917            Keyword::VIEW,
8918            Keyword::TYPE,
8919            Keyword::TABLE,
8920            Keyword::INDEX,
8921            Keyword::ROLE,
8922            Keyword::POLICY,
8923            Keyword::CONNECTOR,
8924            Keyword::ICEBERG,
8925        ])?;
8926        match object_type {
8927            Keyword::VIEW => self.parse_alter_view(),
8928            Keyword::TYPE => self.parse_alter_type(),
8929            Keyword::TABLE => self.parse_alter_table(false),
8930            Keyword::ICEBERG => {
8931                self.expect_keyword(Keyword::TABLE)?;
8932                self.parse_alter_table(true)
8933            }
8934            Keyword::INDEX => {
8935                let index_name = self.parse_object_name(false)?;
8936                let operation = if self.parse_keyword(Keyword::RENAME) {
8937                    if self.parse_keyword(Keyword::TO) {
8938                        let index_name = self.parse_object_name(false)?;
8939                        AlterIndexOperation::RenameIndex { index_name }
8940                    } else {
8941                        return self.expected("TO after RENAME", self.peek_token());
8942                    }
8943                } else {
8944                    return self.expected("RENAME after ALTER INDEX", self.peek_token());
8945                };
8946
8947                Ok(Statement::AlterIndex {
8948                    name: index_name,
8949                    operation,
8950                })
8951            }
8952            Keyword::ROLE => self.parse_alter_role(),
8953            Keyword::POLICY => self.parse_alter_policy(),
8954            Keyword::CONNECTOR => self.parse_alter_connector(),
8955            // unreachable because expect_one_of_keywords used above
8956            _ => unreachable!(),
8957        }
8958    }
8959
8960    /// Parse a [Statement::AlterTable]
8961    pub fn parse_alter_table(&mut self, iceberg: bool) -> Result<Statement, ParserError> {
8962        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8963        let only = self.parse_keyword(Keyword::ONLY); // [ ONLY ]
8964        let table_name = self.parse_object_name(false)?;
8965        let on_cluster = self.parse_optional_on_cluster()?;
8966        let operations = self.parse_comma_separated(Parser::parse_alter_table_operation)?;
8967
8968        let mut location = None;
8969        if self.parse_keyword(Keyword::LOCATION) {
8970            location = Some(HiveSetLocation {
8971                has_set: false,
8972                location: self.parse_identifier()?,
8973            });
8974        } else if self.parse_keywords(&[Keyword::SET, Keyword::LOCATION]) {
8975            location = Some(HiveSetLocation {
8976                has_set: true,
8977                location: self.parse_identifier()?,
8978            });
8979        }
8980
8981        Ok(Statement::AlterTable {
8982            name: table_name,
8983            if_exists,
8984            only,
8985            operations,
8986            location,
8987            on_cluster,
8988            iceberg,
8989        })
8990    }
8991
8992    pub fn parse_alter_view(&mut self) -> Result<Statement, ParserError> {
8993        let name = self.parse_object_name(false)?;
8994        let columns = self.parse_parenthesized_column_list(Optional, false)?;
8995
8996        let with_options = self.parse_options(Keyword::WITH)?;
8997
8998        self.expect_keyword_is(Keyword::AS)?;
8999        let query = self.parse_query()?;
9000
9001        Ok(Statement::AlterView {
9002            name,
9003            columns,
9004            query,
9005            with_options,
9006        })
9007    }
9008
9009    /// Parse a [Statement::AlterType]
9010    pub fn parse_alter_type(&mut self) -> Result<Statement, ParserError> {
9011        let name = self.parse_object_name(false)?;
9012
9013        if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
9014            let new_name = self.parse_identifier()?;
9015            Ok(Statement::AlterType(AlterType {
9016                name,
9017                operation: AlterTypeOperation::Rename(AlterTypeRename { new_name }),
9018            }))
9019        } else if self.parse_keywords(&[Keyword::ADD, Keyword::VALUE]) {
9020            let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
9021            let new_enum_value = self.parse_identifier()?;
9022            let position = if self.parse_keyword(Keyword::BEFORE) {
9023                Some(AlterTypeAddValuePosition::Before(self.parse_identifier()?))
9024            } else if self.parse_keyword(Keyword::AFTER) {
9025                Some(AlterTypeAddValuePosition::After(self.parse_identifier()?))
9026            } else {
9027                None
9028            };
9029
9030            Ok(Statement::AlterType(AlterType {
9031                name,
9032                operation: AlterTypeOperation::AddValue(AlterTypeAddValue {
9033                    if_not_exists,
9034                    value: new_enum_value,
9035                    position,
9036                }),
9037            }))
9038        } else if self.parse_keywords(&[Keyword::RENAME, Keyword::VALUE]) {
9039            let existing_enum_value = self.parse_identifier()?;
9040            self.expect_keyword(Keyword::TO)?;
9041            let new_enum_value = self.parse_identifier()?;
9042
9043            Ok(Statement::AlterType(AlterType {
9044                name,
9045                operation: AlterTypeOperation::RenameValue(AlterTypeRenameValue {
9046                    from: existing_enum_value,
9047                    to: new_enum_value,
9048                }),
9049            }))
9050        } else {
9051            return self.expected_ref(
9052                "{RENAME TO | { RENAME | ADD } VALUE}",
9053                self.peek_token_ref(),
9054            );
9055        }
9056    }
9057
9058    /// Parse a `CALL procedure_name(arg1, arg2, ...)`
9059    /// or `CALL procedure_name` statement
9060    pub fn parse_call(&mut self) -> Result<Statement, ParserError> {
9061        let object_name = self.parse_object_name(false)?;
9062        if self.peek_token().token == Token::LParen {
9063            match self.parse_function(object_name)? {
9064                Expr::Function(f) => Ok(Statement::Call(f)),
9065                other => parser_err!(
9066                    format!("Expected a simple procedure call but found: {other}"),
9067                    self.peek_token().span.start
9068                ),
9069            }
9070        } else {
9071            Ok(Statement::Call(Function {
9072                name: object_name,
9073                uses_odbc_syntax: false,
9074                parameters: FunctionArguments::None,
9075                args: FunctionArguments::None,
9076                over: None,
9077                filter: None,
9078                null_treatment: None,
9079                within_group: vec![],
9080            }))
9081        }
9082    }
9083
9084    /// Parse a copy statement
9085    pub fn parse_copy(&mut self) -> Result<Statement, ParserError> {
9086        let source;
9087        if self.consume_token(&Token::LParen) {
9088            source = CopySource::Query(self.parse_query()?);
9089            self.expect_token(&Token::RParen)?;
9090        } else {
9091            let table_name = self.parse_object_name(false)?;
9092            let columns = self.parse_parenthesized_column_list(Optional, false)?;
9093            source = CopySource::Table {
9094                table_name,
9095                columns,
9096            };
9097        }
9098        let to = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::TO]) {
9099            Some(Keyword::FROM) => false,
9100            Some(Keyword::TO) => true,
9101            _ => self.expected("FROM or TO", self.peek_token())?,
9102        };
9103        if !to {
9104            // Use a separate if statement to prevent Rust compiler from complaining about
9105            // "if statement in this position is unstable: https://github.com/rust-lang/rust/issues/53667"
9106            if let CopySource::Query(_) = source {
9107                return Err(ParserError::ParserError(
9108                    "COPY ... FROM does not support query as a source".to_string(),
9109                ));
9110            }
9111        }
9112        let target = if self.parse_keyword(Keyword::STDIN) {
9113            CopyTarget::Stdin
9114        } else if self.parse_keyword(Keyword::STDOUT) {
9115            CopyTarget::Stdout
9116        } else if self.parse_keyword(Keyword::PROGRAM) {
9117            CopyTarget::Program {
9118                command: self.parse_literal_string()?,
9119            }
9120        } else {
9121            CopyTarget::File {
9122                filename: self.parse_literal_string()?,
9123            }
9124        };
9125        let _ = self.parse_keyword(Keyword::WITH); // [ WITH ]
9126        let mut options = vec![];
9127        if self.consume_token(&Token::LParen) {
9128            options = self.parse_comma_separated(Parser::parse_copy_option)?;
9129            self.expect_token(&Token::RParen)?;
9130        }
9131        let mut legacy_options = vec![];
9132        while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? {
9133            legacy_options.push(opt);
9134        }
9135        let values = if let CopyTarget::Stdin = target {
9136            self.expect_token(&Token::SemiColon)?;
9137            self.parse_tsv()
9138        } else {
9139            vec![]
9140        };
9141        Ok(Statement::Copy {
9142            source,
9143            to,
9144            target,
9145            options,
9146            legacy_options,
9147            values,
9148        })
9149    }
9150
9151    /// Parse [Statement::Open]
9152    fn parse_open(&mut self) -> Result<Statement, ParserError> {
9153        self.expect_keyword(Keyword::OPEN)?;
9154        Ok(Statement::Open(OpenStatement {
9155            cursor_name: self.parse_identifier()?,
9156        }))
9157    }
9158
9159    pub fn parse_close(&mut self) -> Result<Statement, ParserError> {
9160        let cursor = if self.parse_keyword(Keyword::ALL) {
9161            CloseCursor::All
9162        } else {
9163            let name = self.parse_identifier()?;
9164
9165            CloseCursor::Specific { name }
9166        };
9167
9168        Ok(Statement::Close { cursor })
9169    }
9170
9171    fn parse_copy_option(&mut self) -> Result<CopyOption, ParserError> {
9172        let ret = match self.parse_one_of_keywords(&[
9173            Keyword::FORMAT,
9174            Keyword::FREEZE,
9175            Keyword::DELIMITER,
9176            Keyword::NULL,
9177            Keyword::HEADER,
9178            Keyword::QUOTE,
9179            Keyword::ESCAPE,
9180            Keyword::FORCE_QUOTE,
9181            Keyword::FORCE_NOT_NULL,
9182            Keyword::FORCE_NULL,
9183            Keyword::ENCODING,
9184        ]) {
9185            Some(Keyword::FORMAT) => CopyOption::Format(self.parse_identifier()?),
9186            Some(Keyword::FREEZE) => CopyOption::Freeze(!matches!(
9187                self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
9188                Some(Keyword::FALSE)
9189            )),
9190            Some(Keyword::DELIMITER) => CopyOption::Delimiter(self.parse_literal_char()?),
9191            Some(Keyword::NULL) => CopyOption::Null(self.parse_literal_string()?),
9192            Some(Keyword::HEADER) => CopyOption::Header(!matches!(
9193                self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
9194                Some(Keyword::FALSE)
9195            )),
9196            Some(Keyword::QUOTE) => CopyOption::Quote(self.parse_literal_char()?),
9197            Some(Keyword::ESCAPE) => CopyOption::Escape(self.parse_literal_char()?),
9198            Some(Keyword::FORCE_QUOTE) => {
9199                CopyOption::ForceQuote(self.parse_parenthesized_column_list(Mandatory, false)?)
9200            }
9201            Some(Keyword::FORCE_NOT_NULL) => {
9202                CopyOption::ForceNotNull(self.parse_parenthesized_column_list(Mandatory, false)?)
9203            }
9204            Some(Keyword::FORCE_NULL) => {
9205                CopyOption::ForceNull(self.parse_parenthesized_column_list(Mandatory, false)?)
9206            }
9207            Some(Keyword::ENCODING) => CopyOption::Encoding(self.parse_literal_string()?),
9208            _ => self.expected("option", self.peek_token())?,
9209        };
9210        Ok(ret)
9211    }
9212
9213    fn parse_copy_legacy_option(&mut self) -> Result<CopyLegacyOption, ParserError> {
9214        let ret = match self.parse_one_of_keywords(&[
9215            Keyword::BINARY,
9216            Keyword::DELIMITER,
9217            Keyword::NULL,
9218            Keyword::CSV,
9219        ]) {
9220            Some(Keyword::BINARY) => CopyLegacyOption::Binary,
9221            Some(Keyword::DELIMITER) => {
9222                let _ = self.parse_keyword(Keyword::AS); // [ AS ]
9223                CopyLegacyOption::Delimiter(self.parse_literal_char()?)
9224            }
9225            Some(Keyword::NULL) => {
9226                let _ = self.parse_keyword(Keyword::AS); // [ AS ]
9227                CopyLegacyOption::Null(self.parse_literal_string()?)
9228            }
9229            Some(Keyword::CSV) => CopyLegacyOption::Csv({
9230                let mut opts = vec![];
9231                while let Some(opt) =
9232                    self.maybe_parse(|parser| parser.parse_copy_legacy_csv_option())?
9233                {
9234                    opts.push(opt);
9235                }
9236                opts
9237            }),
9238            _ => self.expected("option", self.peek_token())?,
9239        };
9240        Ok(ret)
9241    }
9242
9243    fn parse_copy_legacy_csv_option(&mut self) -> Result<CopyLegacyCsvOption, ParserError> {
9244        let ret = match self.parse_one_of_keywords(&[
9245            Keyword::HEADER,
9246            Keyword::QUOTE,
9247            Keyword::ESCAPE,
9248            Keyword::FORCE,
9249        ]) {
9250            Some(Keyword::HEADER) => CopyLegacyCsvOption::Header,
9251            Some(Keyword::QUOTE) => {
9252                let _ = self.parse_keyword(Keyword::AS); // [ AS ]
9253                CopyLegacyCsvOption::Quote(self.parse_literal_char()?)
9254            }
9255            Some(Keyword::ESCAPE) => {
9256                let _ = self.parse_keyword(Keyword::AS); // [ AS ]
9257                CopyLegacyCsvOption::Escape(self.parse_literal_char()?)
9258            }
9259            Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) => {
9260                CopyLegacyCsvOption::ForceNotNull(
9261                    self.parse_comma_separated(|p| p.parse_identifier())?,
9262                )
9263            }
9264            Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::QUOTE]) => {
9265                CopyLegacyCsvOption::ForceQuote(
9266                    self.parse_comma_separated(|p| p.parse_identifier())?,
9267                )
9268            }
9269            _ => self.expected("csv option", self.peek_token())?,
9270        };
9271        Ok(ret)
9272    }
9273
9274    fn parse_literal_char(&mut self) -> Result<char, ParserError> {
9275        let s = self.parse_literal_string()?;
9276        if s.len() != 1 {
9277            let loc = self
9278                .tokens
9279                .get(self.index - 1)
9280                .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
9281            return parser_err!(format!("Expect a char, found {s:?}"), loc);
9282        }
9283        Ok(s.chars().next().unwrap())
9284    }
9285
9286    /// Parse a tab separated values in
9287    /// COPY payload
9288    pub fn parse_tsv(&mut self) -> Vec<Option<String>> {
9289        self.parse_tab_value()
9290    }
9291
9292    pub fn parse_tab_value(&mut self) -> Vec<Option<String>> {
9293        let mut values = vec![];
9294        let mut content = String::from("");
9295        while let Some(t) = self.next_token_no_skip().map(|t| &t.token) {
9296            match t {
9297                Token::Whitespace(Whitespace::Tab) => {
9298                    values.push(Some(content.to_string()));
9299                    content.clear();
9300                }
9301                Token::Whitespace(Whitespace::Newline) => {
9302                    values.push(Some(content.to_string()));
9303                    content.clear();
9304                }
9305                Token::Backslash => {
9306                    if self.consume_token(&Token::Period) {
9307                        return values;
9308                    }
9309                    if let Token::Word(w) = self.next_token().token {
9310                        if w.value == "N" {
9311                            values.push(None);
9312                        }
9313                    }
9314                }
9315                _ => {
9316                    content.push_str(&t.to_string());
9317                }
9318            }
9319        }
9320        values
9321    }
9322
9323    /// Parse a literal value (numbers, strings, date/time, booleans)
9324    pub fn parse_value(&mut self) -> Result<ValueWithSpan, ParserError> {
9325        let next_token = self.next_token();
9326        let span = next_token.span;
9327        let ok_value = |value: Value| Ok(value.with_span(span));
9328        match next_token.token {
9329            Token::Word(w) => match w.keyword {
9330                Keyword::TRUE if self.dialect.supports_boolean_literals() => {
9331                    ok_value(Value::Boolean(true))
9332                }
9333                Keyword::FALSE if self.dialect.supports_boolean_literals() => {
9334                    ok_value(Value::Boolean(false))
9335                }
9336                Keyword::NULL => ok_value(Value::Null),
9337                Keyword::NoKeyword if w.quote_style.is_some() => match w.quote_style {
9338                    Some('"') => ok_value(Value::DoubleQuotedString(w.value)),
9339                    Some('\'') => ok_value(Value::SingleQuotedString(w.value)),
9340                    _ => self.expected(
9341                        "A value?",
9342                        TokenWithSpan {
9343                            token: Token::Word(w),
9344                            span,
9345                        },
9346                    )?,
9347                },
9348                _ => self.expected(
9349                    "a concrete value",
9350                    TokenWithSpan {
9351                        token: Token::Word(w),
9352                        span,
9353                    },
9354                ),
9355            },
9356            // The call to n.parse() returns a bigdecimal when the
9357            // bigdecimal feature is enabled, and is otherwise a no-op
9358            // (i.e., it returns the input string).
9359            Token::Number(n, l) => ok_value(Value::Number(Self::parse(n, span.start)?, l)),
9360            Token::SingleQuotedString(ref s) => ok_value(Value::SingleQuotedString(s.to_string())),
9361            Token::DoubleQuotedString(ref s) => ok_value(Value::DoubleQuotedString(s.to_string())),
9362            Token::TripleSingleQuotedString(ref s) => {
9363                ok_value(Value::TripleSingleQuotedString(s.to_string()))
9364            }
9365            Token::TripleDoubleQuotedString(ref s) => {
9366                ok_value(Value::TripleDoubleQuotedString(s.to_string()))
9367            }
9368            Token::DollarQuotedString(ref s) => ok_value(Value::DollarQuotedString(s.clone())),
9369            Token::SingleQuotedByteStringLiteral(ref s) => {
9370                ok_value(Value::SingleQuotedByteStringLiteral(s.clone()))
9371            }
9372            Token::DoubleQuotedByteStringLiteral(ref s) => {
9373                ok_value(Value::DoubleQuotedByteStringLiteral(s.clone()))
9374            }
9375            Token::TripleSingleQuotedByteStringLiteral(ref s) => {
9376                ok_value(Value::TripleSingleQuotedByteStringLiteral(s.clone()))
9377            }
9378            Token::TripleDoubleQuotedByteStringLiteral(ref s) => {
9379                ok_value(Value::TripleDoubleQuotedByteStringLiteral(s.clone()))
9380            }
9381            Token::SingleQuotedRawStringLiteral(ref s) => {
9382                ok_value(Value::SingleQuotedRawStringLiteral(s.clone()))
9383            }
9384            Token::DoubleQuotedRawStringLiteral(ref s) => {
9385                ok_value(Value::DoubleQuotedRawStringLiteral(s.clone()))
9386            }
9387            Token::TripleSingleQuotedRawStringLiteral(ref s) => {
9388                ok_value(Value::TripleSingleQuotedRawStringLiteral(s.clone()))
9389            }
9390            Token::TripleDoubleQuotedRawStringLiteral(ref s) => {
9391                ok_value(Value::TripleDoubleQuotedRawStringLiteral(s.clone()))
9392            }
9393            Token::NationalStringLiteral(ref s) => {
9394                ok_value(Value::NationalStringLiteral(s.to_string()))
9395            }
9396            Token::EscapedStringLiteral(ref s) => {
9397                ok_value(Value::EscapedStringLiteral(s.to_string()))
9398            }
9399            Token::UnicodeStringLiteral(ref s) => {
9400                ok_value(Value::UnicodeStringLiteral(s.to_string()))
9401            }
9402            Token::HexStringLiteral(ref s) => ok_value(Value::HexStringLiteral(s.to_string())),
9403            Token::Placeholder(ref s) => ok_value(Value::Placeholder(s.to_string())),
9404            tok @ Token::Colon | tok @ Token::AtSign => {
9405                // Not calling self.parse_identifier(false)? because only in placeholder we want to check numbers as idfentifies
9406                // This because snowflake allows numbers as placeholders
9407                let next_token = self.next_token();
9408                let ident = match next_token.token {
9409                    Token::Word(w) => Ok(w.into_ident(next_token.span)),
9410                    Token::Number(w, false) => Ok(Ident::new(w)),
9411                    _ => self.expected("placeholder", next_token),
9412                }?;
9413                let placeholder = tok.to_string() + &ident.value;
9414                ok_value(Value::Placeholder(placeholder))
9415            }
9416            unexpected => self.expected(
9417                "a value",
9418                TokenWithSpan {
9419                    token: unexpected,
9420                    span,
9421                },
9422            ),
9423        }
9424    }
9425
9426    /// Parse an unsigned numeric literal
9427    pub fn parse_number_value(&mut self) -> Result<ValueWithSpan, ParserError> {
9428        let value_wrapper = self.parse_value()?;
9429        match &value_wrapper.value {
9430            Value::Number(_, _) => Ok(value_wrapper),
9431            Value::Placeholder(_) => Ok(value_wrapper),
9432            _ => {
9433                self.prev_token();
9434                self.expected("literal number", self.peek_token())
9435            }
9436        }
9437    }
9438
9439    /// Parse a numeric literal as an expression. Returns a [`Expr::UnaryOp`] if the number is signed,
9440    /// otherwise returns a [`Expr::Value`]
9441    pub fn parse_number(&mut self) -> Result<Expr, ParserError> {
9442        let next_token = self.next_token();
9443        match next_token.token {
9444            Token::Plus => Ok(Expr::UnaryOp {
9445                op: UnaryOperator::Plus,
9446                expr: Box::new(Expr::Value(self.parse_number_value()?)),
9447            }),
9448            Token::Minus => Ok(Expr::UnaryOp {
9449                op: UnaryOperator::Minus,
9450                expr: Box::new(Expr::Value(self.parse_number_value()?)),
9451            }),
9452            _ => {
9453                self.prev_token();
9454                Ok(Expr::Value(self.parse_number_value()?))
9455            }
9456        }
9457    }
9458
9459    fn parse_introduced_string_expr(&mut self) -> Result<Expr, ParserError> {
9460        let next_token = self.next_token();
9461        let span = next_token.span;
9462        match next_token.token {
9463            Token::SingleQuotedString(ref s) => Ok(Expr::Value(
9464                Value::SingleQuotedString(s.to_string()).with_span(span),
9465            )),
9466            Token::DoubleQuotedString(ref s) => Ok(Expr::Value(
9467                Value::DoubleQuotedString(s.to_string()).with_span(span),
9468            )),
9469            Token::HexStringLiteral(ref s) => Ok(Expr::Value(
9470                Value::HexStringLiteral(s.to_string()).with_span(span),
9471            )),
9472            unexpected => self.expected(
9473                "a string value",
9474                TokenWithSpan {
9475                    token: unexpected,
9476                    span,
9477                },
9478            ),
9479        }
9480    }
9481
9482    /// Parse an unsigned literal integer/long
9483    pub fn parse_literal_uint(&mut self) -> Result<u64, ParserError> {
9484        let next_token = self.next_token();
9485        match next_token.token {
9486            Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start),
9487            _ => self.expected("literal int", next_token),
9488        }
9489    }
9490
9491    /// Parse the body of a `CREATE FUNCTION` specified as a string.
9492    /// e.g. `CREATE FUNCTION ... AS $$ body $$`.
9493    fn parse_create_function_body_string(&mut self) -> Result<Expr, ParserError> {
9494        let peek_token = self.peek_token();
9495        let span = peek_token.span;
9496        match peek_token.token {
9497            Token::DollarQuotedString(s) if dialect_of!(self is PostgreSqlDialect | GenericDialect) =>
9498            {
9499                self.next_token();
9500                Ok(Expr::Value(Value::DollarQuotedString(s).with_span(span)))
9501            }
9502            _ => Ok(Expr::Value(
9503                Value::SingleQuotedString(self.parse_literal_string()?).with_span(span),
9504            )),
9505        }
9506    }
9507
9508    /// Parse a literal string
9509    pub fn parse_literal_string(&mut self) -> Result<String, ParserError> {
9510        let next_token = self.next_token();
9511        match next_token.token {
9512            Token::Word(Word {
9513                value,
9514                keyword: Keyword::NoKeyword,
9515                ..
9516            }) => Ok(value),
9517            Token::SingleQuotedString(s) => Ok(s),
9518            Token::DoubleQuotedString(s) => Ok(s),
9519            Token::EscapedStringLiteral(s) if dialect_of!(self is PostgreSqlDialect | GenericDialect) => {
9520                Ok(s)
9521            }
9522            Token::UnicodeStringLiteral(s) => Ok(s),
9523            _ => self.expected("literal string", next_token),
9524        }
9525    }
9526
9527    /// Parse a literal unicode normalization clause
9528    pub fn parse_unicode_is_normalized(&mut self, expr: Expr) -> Result<Expr, ParserError> {
9529        let neg = self.parse_keyword(Keyword::NOT);
9530        let normalized_form = self.maybe_parse(|parser| {
9531            match parser.parse_one_of_keywords(&[
9532                Keyword::NFC,
9533                Keyword::NFD,
9534                Keyword::NFKC,
9535                Keyword::NFKD,
9536            ]) {
9537                Some(Keyword::NFC) => Ok(NormalizationForm::NFC),
9538                Some(Keyword::NFD) => Ok(NormalizationForm::NFD),
9539                Some(Keyword::NFKC) => Ok(NormalizationForm::NFKC),
9540                Some(Keyword::NFKD) => Ok(NormalizationForm::NFKD),
9541                _ => parser.expected("unicode normalization form", parser.peek_token()),
9542            }
9543        })?;
9544        if self.parse_keyword(Keyword::NORMALIZED) {
9545            return Ok(Expr::IsNormalized {
9546                expr: Box::new(expr),
9547                form: normalized_form,
9548                negated: neg,
9549            });
9550        }
9551        self.expected("unicode normalization form", self.peek_token())
9552    }
9553
9554    pub fn parse_enum_values(&mut self) -> Result<Vec<EnumMember>, ParserError> {
9555        self.expect_token(&Token::LParen)?;
9556        let values = self.parse_comma_separated(|parser| {
9557            let name = parser.parse_literal_string()?;
9558            let e = if parser.consume_token(&Token::Eq) {
9559                let value = parser.parse_number()?;
9560                EnumMember::NamedValue(name, value)
9561            } else {
9562                EnumMember::Name(name)
9563            };
9564            Ok(e)
9565        })?;
9566        self.expect_token(&Token::RParen)?;
9567
9568        Ok(values)
9569    }
9570
9571    /// Parse a SQL datatype (in the context of a CREATE TABLE statement for example)
9572    pub fn parse_data_type(&mut self) -> Result<DataType, ParserError> {
9573        let (ty, trailing_bracket) = self.parse_data_type_helper()?;
9574        if trailing_bracket.0 {
9575            return parser_err!(
9576                format!("unmatched > after parsing data type {ty}"),
9577                self.peek_token()
9578            );
9579        }
9580
9581        Ok(ty)
9582    }
9583
9584    fn parse_data_type_helper(
9585        &mut self,
9586    ) -> Result<(DataType, MatchedTrailingBracket), ParserError> {
9587        let dialect = self.dialect;
9588        self.advance_token();
9589        let next_token = self.get_current_token();
9590        let next_token_index = self.get_current_index();
9591
9592        let mut trailing_bracket: MatchedTrailingBracket = false.into();
9593        let mut data = match &next_token.token {
9594            Token::Word(w) => match w.keyword {
9595                Keyword::BOOLEAN => Ok(DataType::Boolean),
9596                Keyword::BOOL => Ok(DataType::Bool),
9597                Keyword::FLOAT => Ok(DataType::Float(self.parse_optional_precision()?)),
9598                Keyword::REAL => Ok(DataType::Real),
9599                Keyword::FLOAT4 => Ok(DataType::Float4),
9600                Keyword::FLOAT32 => Ok(DataType::Float32),
9601                Keyword::FLOAT64 => Ok(DataType::Float64),
9602                Keyword::FLOAT8 => Ok(DataType::Float8),
9603                Keyword::DOUBLE => {
9604                    if self.parse_keyword(Keyword::PRECISION) {
9605                        Ok(DataType::DoublePrecision)
9606                    } else {
9607                        Ok(DataType::Double(
9608                            self.parse_exact_number_optional_precision_scale()?,
9609                        ))
9610                    }
9611                }
9612                Keyword::TINYINT => {
9613                    let optional_precision = self.parse_optional_precision();
9614                    if self.parse_keyword(Keyword::UNSIGNED) {
9615                        Ok(DataType::TinyIntUnsigned(optional_precision?))
9616                    } else {
9617                        Ok(DataType::TinyInt(optional_precision?))
9618                    }
9619                }
9620                Keyword::INT2 => {
9621                    let optional_precision = self.parse_optional_precision();
9622                    if self.parse_keyword(Keyword::UNSIGNED) {
9623                        Ok(DataType::Int2Unsigned(optional_precision?))
9624                    } else {
9625                        Ok(DataType::Int2(optional_precision?))
9626                    }
9627                }
9628                Keyword::SMALLINT => {
9629                    let optional_precision = self.parse_optional_precision();
9630                    if self.parse_keyword(Keyword::UNSIGNED) {
9631                        Ok(DataType::SmallIntUnsigned(optional_precision?))
9632                    } else {
9633                        Ok(DataType::SmallInt(optional_precision?))
9634                    }
9635                }
9636                Keyword::MEDIUMINT => {
9637                    let optional_precision = self.parse_optional_precision();
9638                    if self.parse_keyword(Keyword::UNSIGNED) {
9639                        Ok(DataType::MediumIntUnsigned(optional_precision?))
9640                    } else {
9641                        Ok(DataType::MediumInt(optional_precision?))
9642                    }
9643                }
9644                Keyword::INT => {
9645                    let optional_precision = self.parse_optional_precision();
9646                    if self.parse_keyword(Keyword::UNSIGNED) {
9647                        Ok(DataType::IntUnsigned(optional_precision?))
9648                    } else {
9649                        Ok(DataType::Int(optional_precision?))
9650                    }
9651                }
9652                Keyword::INT4 => {
9653                    let optional_precision = self.parse_optional_precision();
9654                    if self.parse_keyword(Keyword::UNSIGNED) {
9655                        Ok(DataType::Int4Unsigned(optional_precision?))
9656                    } else {
9657                        Ok(DataType::Int4(optional_precision?))
9658                    }
9659                }
9660                Keyword::INT8 => {
9661                    let optional_precision = self.parse_optional_precision();
9662                    if self.parse_keyword(Keyword::UNSIGNED) {
9663                        Ok(DataType::Int8Unsigned(optional_precision?))
9664                    } else {
9665                        Ok(DataType::Int8(optional_precision?))
9666                    }
9667                }
9668                Keyword::INT16 => Ok(DataType::Int16),
9669                Keyword::INT32 => Ok(DataType::Int32),
9670                Keyword::INT64 => Ok(DataType::Int64),
9671                Keyword::INT128 => Ok(DataType::Int128),
9672                Keyword::INT256 => Ok(DataType::Int256),
9673                Keyword::INTEGER => {
9674                    let optional_precision = self.parse_optional_precision();
9675                    if self.parse_keyword(Keyword::UNSIGNED) {
9676                        Ok(DataType::IntegerUnsigned(optional_precision?))
9677                    } else {
9678                        Ok(DataType::Integer(optional_precision?))
9679                    }
9680                }
9681                Keyword::BIGINT => {
9682                    let optional_precision = self.parse_optional_precision();
9683                    if self.parse_keyword(Keyword::UNSIGNED) {
9684                        Ok(DataType::BigIntUnsigned(optional_precision?))
9685                    } else {
9686                        Ok(DataType::BigInt(optional_precision?))
9687                    }
9688                }
9689                Keyword::HUGEINT => Ok(DataType::HugeInt),
9690                Keyword::UBIGINT => Ok(DataType::UBigInt),
9691                Keyword::UHUGEINT => Ok(DataType::UHugeInt),
9692                Keyword::USMALLINT => Ok(DataType::USmallInt),
9693                Keyword::UTINYINT => Ok(DataType::UTinyInt),
9694                Keyword::UINT8 => Ok(DataType::UInt8),
9695                Keyword::UINT16 => Ok(DataType::UInt16),
9696                Keyword::UINT32 => Ok(DataType::UInt32),
9697                Keyword::UINT64 => Ok(DataType::UInt64),
9698                Keyword::UINT128 => Ok(DataType::UInt128),
9699                Keyword::UINT256 => Ok(DataType::UInt256),
9700                Keyword::VARCHAR => Ok(DataType::Varchar(self.parse_optional_character_length()?)),
9701                Keyword::NVARCHAR => {
9702                    Ok(DataType::Nvarchar(self.parse_optional_character_length()?))
9703                }
9704                Keyword::CHARACTER => {
9705                    if self.parse_keyword(Keyword::VARYING) {
9706                        Ok(DataType::CharacterVarying(
9707                            self.parse_optional_character_length()?,
9708                        ))
9709                    } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
9710                        Ok(DataType::CharacterLargeObject(
9711                            self.parse_optional_precision()?,
9712                        ))
9713                    } else {
9714                        Ok(DataType::Character(self.parse_optional_character_length()?))
9715                    }
9716                }
9717                Keyword::CHAR => {
9718                    if self.parse_keyword(Keyword::VARYING) {
9719                        Ok(DataType::CharVarying(
9720                            self.parse_optional_character_length()?,
9721                        ))
9722                    } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
9723                        Ok(DataType::CharLargeObject(self.parse_optional_precision()?))
9724                    } else {
9725                        Ok(DataType::Char(self.parse_optional_character_length()?))
9726                    }
9727                }
9728                Keyword::CLOB => Ok(DataType::Clob(self.parse_optional_precision()?)),
9729                Keyword::BINARY => Ok(DataType::Binary(self.parse_optional_precision()?)),
9730                Keyword::VARBINARY => Ok(DataType::Varbinary(self.parse_optional_binary_length()?)),
9731                Keyword::BLOB => Ok(DataType::Blob(self.parse_optional_precision()?)),
9732                Keyword::TINYBLOB => Ok(DataType::TinyBlob),
9733                Keyword::MEDIUMBLOB => Ok(DataType::MediumBlob),
9734                Keyword::LONGBLOB => Ok(DataType::LongBlob),
9735                Keyword::BYTES => Ok(DataType::Bytes(self.parse_optional_precision()?)),
9736                Keyword::BIT => {
9737                    if self.parse_keyword(Keyword::VARYING) {
9738                        Ok(DataType::BitVarying(self.parse_optional_precision()?))
9739                    } else {
9740                        Ok(DataType::Bit(self.parse_optional_precision()?))
9741                    }
9742                }
9743                Keyword::VARBIT => Ok(DataType::VarBit(self.parse_optional_precision()?)),
9744                Keyword::UUID => Ok(DataType::Uuid),
9745                Keyword::DATE => Ok(DataType::Date),
9746                Keyword::DATE32 => Ok(DataType::Date32),
9747                Keyword::DATETIME => Ok(DataType::Datetime(self.parse_optional_precision()?)),
9748                Keyword::DATETIME64 => {
9749                    self.prev_token();
9750                    let (precision, time_zone) = self.parse_datetime_64()?;
9751                    Ok(DataType::Datetime64(precision, time_zone))
9752                }
9753                Keyword::TIMESTAMP => {
9754                    let precision = self.parse_optional_precision()?;
9755                    let tz = if self.parse_keyword(Keyword::WITH) {
9756                        self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
9757                        TimezoneInfo::WithTimeZone
9758                    } else if self.parse_keyword(Keyword::WITHOUT) {
9759                        self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
9760                        TimezoneInfo::WithoutTimeZone
9761                    } else {
9762                        TimezoneInfo::None
9763                    };
9764                    Ok(DataType::Timestamp(precision, tz))
9765                }
9766                Keyword::TIMESTAMPTZ => Ok(DataType::Timestamp(
9767                    self.parse_optional_precision()?,
9768                    TimezoneInfo::Tz,
9769                )),
9770                Keyword::TIMESTAMP_NTZ => Ok(DataType::TimestampNtz),
9771                Keyword::TIME => {
9772                    let precision = self.parse_optional_precision()?;
9773                    let tz = if self.parse_keyword(Keyword::WITH) {
9774                        self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
9775                        TimezoneInfo::WithTimeZone
9776                    } else if self.parse_keyword(Keyword::WITHOUT) {
9777                        self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
9778                        TimezoneInfo::WithoutTimeZone
9779                    } else {
9780                        TimezoneInfo::None
9781                    };
9782                    Ok(DataType::Time(precision, tz))
9783                }
9784                Keyword::TIMETZ => Ok(DataType::Time(
9785                    self.parse_optional_precision()?,
9786                    TimezoneInfo::Tz,
9787                )),
9788                // Interval types can be followed by a complicated interval
9789                // qualifier that we don't currently support. See
9790                // parse_interval for a taste.
9791                Keyword::INTERVAL => Ok(DataType::Interval),
9792                Keyword::JSON => Ok(DataType::JSON),
9793                Keyword::JSONB => Ok(DataType::JSONB),
9794                Keyword::REGCLASS => Ok(DataType::Regclass),
9795                Keyword::STRING => Ok(DataType::String(self.parse_optional_precision()?)),
9796                Keyword::FIXEDSTRING => {
9797                    self.expect_token(&Token::LParen)?;
9798                    let character_length = self.parse_literal_uint()?;
9799                    self.expect_token(&Token::RParen)?;
9800                    Ok(DataType::FixedString(character_length))
9801                }
9802                Keyword::TEXT => Ok(DataType::Text),
9803                Keyword::TINYTEXT => Ok(DataType::TinyText),
9804                Keyword::MEDIUMTEXT => Ok(DataType::MediumText),
9805                Keyword::LONGTEXT => Ok(DataType::LongText),
9806                Keyword::BYTEA => Ok(DataType::Bytea),
9807                Keyword::NUMERIC => Ok(DataType::Numeric(
9808                    self.parse_exact_number_optional_precision_scale()?,
9809                )),
9810                Keyword::DECIMAL => Ok(DataType::Decimal(
9811                    self.parse_exact_number_optional_precision_scale()?,
9812                )),
9813                Keyword::DEC => Ok(DataType::Dec(
9814                    self.parse_exact_number_optional_precision_scale()?,
9815                )),
9816                Keyword::BIGNUMERIC => Ok(DataType::BigNumeric(
9817                    self.parse_exact_number_optional_precision_scale()?,
9818                )),
9819                Keyword::BIGDECIMAL => Ok(DataType::BigDecimal(
9820                    self.parse_exact_number_optional_precision_scale()?,
9821                )),
9822                Keyword::ENUM => Ok(DataType::Enum(self.parse_enum_values()?, None)),
9823                Keyword::ENUM8 => Ok(DataType::Enum(self.parse_enum_values()?, Some(8))),
9824                Keyword::ENUM16 => Ok(DataType::Enum(self.parse_enum_values()?, Some(16))),
9825                Keyword::SET => Ok(DataType::Set(self.parse_string_values()?)),
9826                Keyword::ARRAY => {
9827                    if dialect_of!(self is SnowflakeDialect) {
9828                        Ok(DataType::Array(ArrayElemTypeDef::None))
9829                    } else if dialect_of!(self is ClickHouseDialect) {
9830                        Ok(self.parse_sub_type(|internal_type| {
9831                            DataType::Array(ArrayElemTypeDef::Parenthesis(internal_type))
9832                        })?)
9833                    } else {
9834                        self.expect_token(&Token::Lt)?;
9835                        let (inside_type, _trailing_bracket) = self.parse_data_type_helper()?;
9836                        trailing_bracket = self.expect_closing_angle_bracket(_trailing_bracket)?;
9837                        Ok(DataType::Array(ArrayElemTypeDef::AngleBracket(Box::new(
9838                            inside_type,
9839                        ))))
9840                    }
9841                }
9842                Keyword::STRUCT if dialect_is!(dialect is DuckDbDialect) => {
9843                    self.prev_token();
9844                    let field_defs = self.parse_duckdb_struct_type_def()?;
9845                    Ok(DataType::Struct(field_defs, StructBracketKind::Parentheses))
9846                }
9847                Keyword::STRUCT if dialect_is!(dialect is BigQueryDialect | GenericDialect) => {
9848                    self.prev_token();
9849                    let (field_defs, _trailing_bracket) =
9850                        self.parse_struct_type_def(Self::parse_struct_field_def)?;
9851                    trailing_bracket = _trailing_bracket;
9852                    Ok(DataType::Struct(
9853                        field_defs,
9854                        StructBracketKind::AngleBrackets,
9855                    ))
9856                }
9857                Keyword::UNION if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
9858                    self.prev_token();
9859                    let fields = self.parse_union_type_def()?;
9860                    Ok(DataType::Union(fields))
9861                }
9862                Keyword::NULLABLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
9863                    Ok(self.parse_sub_type(DataType::Nullable)?)
9864                }
9865                Keyword::LOWCARDINALITY if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
9866                    Ok(self.parse_sub_type(DataType::LowCardinality)?)
9867                }
9868                Keyword::MAP if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
9869                    self.prev_token();
9870                    let (key_data_type, value_data_type) = self.parse_click_house_map_def()?;
9871                    Ok(DataType::Map(
9872                        Box::new(key_data_type),
9873                        Box::new(value_data_type),
9874                    ))
9875                }
9876                Keyword::NESTED if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
9877                    self.expect_token(&Token::LParen)?;
9878                    let field_defs = self.parse_comma_separated(Parser::parse_column_def)?;
9879                    self.expect_token(&Token::RParen)?;
9880                    Ok(DataType::Nested(field_defs))
9881                }
9882                Keyword::TUPLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
9883                    self.prev_token();
9884                    let field_defs = self.parse_click_house_tuple_def()?;
9885                    Ok(DataType::Tuple(field_defs))
9886                }
9887                Keyword::TRIGGER => Ok(DataType::Trigger),
9888                Keyword::ANY if self.peek_keyword(Keyword::TYPE) => {
9889                    let _ = self.parse_keyword(Keyword::TYPE);
9890                    Ok(DataType::AnyType)
9891                }
9892                Keyword::TABLE => {
9893                    // an LParen after the TABLE keyword indicates that table columns are being defined
9894                    // whereas no LParen indicates an anonymous table expression will be returned
9895                    if self.peek_token() == Token::LParen {
9896                        let columns = self.parse_returns_table_columns()?;
9897                        Ok(DataType::Table(Some(columns)))
9898                    } else {
9899                        Ok(DataType::Table(None))
9900                    }
9901                }
9902                Keyword::SIGNED => {
9903                    if self.parse_keyword(Keyword::INTEGER) {
9904                        Ok(DataType::SignedInteger)
9905                    } else {
9906                        Ok(DataType::Signed)
9907                    }
9908                }
9909                Keyword::UNSIGNED => {
9910                    if self.parse_keyword(Keyword::INTEGER) {
9911                        Ok(DataType::UnsignedInteger)
9912                    } else {
9913                        Ok(DataType::Unsigned)
9914                    }
9915                }
9916                _ => {
9917                    self.prev_token();
9918                    let type_name = self.parse_object_name(false)?;
9919                    if let Some(modifiers) = self.parse_optional_type_modifiers()? {
9920                        Ok(DataType::Custom(type_name, modifiers))
9921                    } else {
9922                        Ok(DataType::Custom(type_name, vec![]))
9923                    }
9924                }
9925            },
9926            _ => self.expected_at("a data type name", next_token_index),
9927        }?;
9928
9929        if self.dialect.supports_array_typedef_with_brackets() {
9930            while self.consume_token(&Token::LBracket) {
9931                // Parse optional array data type size
9932                let size = self.maybe_parse(|p| p.parse_literal_uint())?;
9933                self.expect_token(&Token::RBracket)?;
9934                data = DataType::Array(ArrayElemTypeDef::SquareBracket(Box::new(data), size))
9935            }
9936        }
9937        Ok((data, trailing_bracket))
9938    }
9939
9940    fn parse_returns_table_column(&mut self) -> Result<ColumnDef, ParserError> {
9941        self.parse_column_def()
9942    }
9943
9944    fn parse_returns_table_columns(&mut self) -> Result<Vec<ColumnDef>, ParserError> {
9945        self.expect_token(&Token::LParen)?;
9946        let columns = self.parse_comma_separated(Parser::parse_returns_table_column)?;
9947        self.expect_token(&Token::RParen)?;
9948        Ok(columns)
9949    }
9950
9951    pub fn parse_string_values(&mut self) -> Result<Vec<String>, ParserError> {
9952        self.expect_token(&Token::LParen)?;
9953        let mut values = Vec::new();
9954        loop {
9955            let next_token = self.next_token();
9956            match next_token.token {
9957                Token::SingleQuotedString(value) => values.push(value),
9958                _ => self.expected("a string", next_token)?,
9959            }
9960            let next_token = self.next_token();
9961            match next_token.token {
9962                Token::Comma => (),
9963                Token::RParen => break,
9964                _ => self.expected(", or }", next_token)?,
9965            }
9966        }
9967        Ok(values)
9968    }
9969
9970    /// Strictly parse `identifier AS identifier`
9971    pub fn parse_identifier_with_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
9972        let ident = self.parse_identifier()?;
9973        self.expect_keyword_is(Keyword::AS)?;
9974        let alias = self.parse_identifier()?;
9975        Ok(IdentWithAlias { ident, alias })
9976    }
9977
9978    /// Optionally parses an alias for a select list item
9979    fn maybe_parse_select_item_alias(&mut self) -> Result<Option<Ident>, ParserError> {
9980        fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
9981            parser.dialect.is_select_item_alias(explicit, kw, parser)
9982        }
9983        self.parse_optional_alias_inner(None, validator)
9984    }
9985
9986    /// Optionally parses an alias for a table like in `... FROM generate_series(1, 10) AS t (col)`.
9987    /// In this case, the alias is allowed to optionally name the columns in the table, in
9988    /// addition to the table itself.
9989    pub fn maybe_parse_table_alias(&mut self) -> Result<Option<TableAlias>, ParserError> {
9990        fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
9991            parser.dialect.is_table_factor_alias(explicit, kw, parser)
9992        }
9993        match self.parse_optional_alias_inner(None, validator)? {
9994            Some(name) => {
9995                let columns = self.parse_table_alias_column_defs()?;
9996                Ok(Some(TableAlias { name, columns }))
9997            }
9998            None => Ok(None),
9999        }
10000    }
10001
10002    fn parse_table_index_hints(&mut self) -> Result<Vec<TableIndexHints>, ParserError> {
10003        let mut hints = vec![];
10004        while let Some(hint_type) =
10005            self.parse_one_of_keywords(&[Keyword::USE, Keyword::IGNORE, Keyword::FORCE])
10006        {
10007            let hint_type = match hint_type {
10008                Keyword::USE => TableIndexHintType::Use,
10009                Keyword::IGNORE => TableIndexHintType::Ignore,
10010                Keyword::FORCE => TableIndexHintType::Force,
10011                _ => {
10012                    return self.expected(
10013                        "expected to match USE/IGNORE/FORCE keyword",
10014                        self.peek_token(),
10015                    )
10016                }
10017            };
10018            let index_type = match self.parse_one_of_keywords(&[Keyword::INDEX, Keyword::KEY]) {
10019                Some(Keyword::INDEX) => TableIndexType::Index,
10020                Some(Keyword::KEY) => TableIndexType::Key,
10021                _ => {
10022                    return self.expected("expected to match INDEX/KEY keyword", self.peek_token())
10023                }
10024            };
10025            let for_clause = if self.parse_keyword(Keyword::FOR) {
10026                let clause = if self.parse_keyword(Keyword::JOIN) {
10027                    TableIndexHintForClause::Join
10028                } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
10029                    TableIndexHintForClause::OrderBy
10030                } else if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
10031                    TableIndexHintForClause::GroupBy
10032                } else {
10033                    return self.expected(
10034                        "expected to match FOR/ORDER BY/GROUP BY table hint in for clause",
10035                        self.peek_token(),
10036                    );
10037                };
10038                Some(clause)
10039            } else {
10040                None
10041            };
10042
10043            self.expect_token(&Token::LParen)?;
10044            let index_names = if self.peek_token().token != Token::RParen {
10045                self.parse_comma_separated(Parser::parse_identifier)?
10046            } else {
10047                vec![]
10048            };
10049            self.expect_token(&Token::RParen)?;
10050            hints.push(TableIndexHints {
10051                hint_type,
10052                index_type,
10053                for_clause,
10054                index_names,
10055            });
10056        }
10057        Ok(hints)
10058    }
10059
10060    /// Wrapper for parse_optional_alias_inner, left for backwards-compatibility
10061    /// but new flows should use the context-specific methods such as `maybe_parse_select_item_alias`
10062    /// and `maybe_parse_table_alias`.
10063    pub fn parse_optional_alias(
10064        &mut self,
10065        reserved_kwds: &[Keyword],
10066    ) -> Result<Option<Ident>, ParserError> {
10067        fn validator(_explicit: bool, _kw: &Keyword, _parser: &mut Parser) -> bool {
10068            false
10069        }
10070        self.parse_optional_alias_inner(Some(reserved_kwds), validator)
10071    }
10072
10073    /// Parses an optional alias after a SQL element such as a select list item
10074    /// or a table name.
10075    ///
10076    /// This method accepts an optional list of reserved keywords or a function
10077    /// to call to validate if a keyword should be parsed as an alias, to allow
10078    /// callers to customize the parsing logic based on their context.
10079    fn parse_optional_alias_inner<F>(
10080        &mut self,
10081        reserved_kwds: Option<&[Keyword]>,
10082        validator: F,
10083    ) -> Result<Option<Ident>, ParserError>
10084    where
10085        F: Fn(bool, &Keyword, &mut Parser) -> bool,
10086    {
10087        let after_as = self.parse_keyword(Keyword::AS);
10088
10089        let next_token = self.next_token();
10090        match next_token.token {
10091            // By default, if a word is located after the `AS` keyword we consider it an alias
10092            // as long as it's not reserved.
10093            Token::Word(w)
10094                if after_as || reserved_kwds.is_some_and(|x| !x.contains(&w.keyword)) =>
10095            {
10096                Ok(Some(w.into_ident(next_token.span)))
10097            }
10098            // This pattern allows for customizing the acceptance of words as aliases based on the caller's
10099            // context, such as to what SQL element this word is a potential alias of (select item alias, table name
10100            // alias, etc.) or dialect-specific logic that goes beyond a simple list of reserved keywords.
10101            Token::Word(w) if validator(after_as, &w.keyword, self) => {
10102                Ok(Some(w.into_ident(next_token.span)))
10103            }
10104            // For backwards-compatibility, we accept quoted strings as aliases regardless of the context.
10105            Token::SingleQuotedString(s) => Ok(Some(Ident::with_quote('\'', s))),
10106            Token::DoubleQuotedString(s) => Ok(Some(Ident::with_quote('\"', s))),
10107            _ => {
10108                if after_as {
10109                    return self.expected("an identifier after AS", next_token);
10110                }
10111                self.prev_token();
10112                Ok(None) // no alias found
10113            }
10114        }
10115    }
10116
10117    pub fn parse_optional_group_by(&mut self) -> Result<Option<GroupByExpr>, ParserError> {
10118        if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
10119            let expressions = if self.parse_keyword(Keyword::ALL) {
10120                None
10121            } else {
10122                Some(self.parse_comma_separated(Parser::parse_group_by_expr)?)
10123            };
10124
10125            let mut modifiers = vec![];
10126            if self.dialect.supports_group_by_with_modifier() {
10127                loop {
10128                    if !self.parse_keyword(Keyword::WITH) {
10129                        break;
10130                    }
10131                    let keyword = self.expect_one_of_keywords(&[
10132                        Keyword::ROLLUP,
10133                        Keyword::CUBE,
10134                        Keyword::TOTALS,
10135                    ])?;
10136                    modifiers.push(match keyword {
10137                        Keyword::ROLLUP => GroupByWithModifier::Rollup,
10138                        Keyword::CUBE => GroupByWithModifier::Cube,
10139                        Keyword::TOTALS => GroupByWithModifier::Totals,
10140                        _ => {
10141                            return parser_err!(
10142                                "BUG: expected to match GroupBy modifier keyword",
10143                                self.peek_token().span.start
10144                            )
10145                        }
10146                    });
10147                }
10148            }
10149            if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
10150                self.expect_token(&Token::LParen)?;
10151                let result = self.parse_comma_separated(|p| {
10152                    if p.peek_token_ref().token == Token::LParen {
10153                        p.parse_tuple(true, true)
10154                    } else {
10155                        Ok(vec![p.parse_expr()?])
10156                    }
10157                })?;
10158                self.expect_token(&Token::RParen)?;
10159                modifiers.push(GroupByWithModifier::GroupingSets(Expr::GroupingSets(
10160                    result,
10161                )));
10162            };
10163            let group_by = match expressions {
10164                None => GroupByExpr::All(modifiers),
10165                Some(exprs) => GroupByExpr::Expressions(exprs, modifiers),
10166            };
10167            Ok(Some(group_by))
10168        } else {
10169            Ok(None)
10170        }
10171    }
10172
10173    pub fn parse_optional_order_by(&mut self) -> Result<Option<OrderBy>, ParserError> {
10174        if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
10175            let order_by =
10176                if self.dialect.supports_order_by_all() && self.parse_keyword(Keyword::ALL) {
10177                    let order_by_options = self.parse_order_by_options()?;
10178                    OrderBy {
10179                        kind: OrderByKind::All(order_by_options),
10180                        interpolate: None,
10181                    }
10182                } else {
10183                    let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
10184                    let interpolate = if dialect_of!(self is ClickHouseDialect | GenericDialect) {
10185                        self.parse_interpolations()?
10186                    } else {
10187                        None
10188                    };
10189                    OrderBy {
10190                        kind: OrderByKind::Expressions(exprs),
10191                        interpolate,
10192                    }
10193                };
10194            Ok(Some(order_by))
10195        } else {
10196            Ok(None)
10197        }
10198    }
10199
10200    fn parse_optional_limit_clause(&mut self) -> Result<Option<LimitClause>, ParserError> {
10201        let mut offset = if self.parse_keyword(Keyword::OFFSET) {
10202            Some(self.parse_offset()?)
10203        } else {
10204            None
10205        };
10206
10207        let (limit, limit_by) = if self.parse_keyword(Keyword::LIMIT) {
10208            let expr = self.parse_limit()?;
10209
10210            if self.dialect.supports_limit_comma()
10211                && offset.is_none()
10212                && expr.is_some() // ALL not supported with comma
10213                && self.consume_token(&Token::Comma)
10214            {
10215                let offset = expr.ok_or_else(|| {
10216                    ParserError::ParserError(
10217                        "Missing offset for LIMIT <offset>, <limit>".to_string(),
10218                    )
10219                })?;
10220                return Ok(Some(LimitClause::OffsetCommaLimit {
10221                    offset,
10222                    limit: self.parse_expr()?,
10223                }));
10224            }
10225
10226            let limit_by = if dialect_of!(self is ClickHouseDialect | GenericDialect)
10227                && self.parse_keyword(Keyword::BY)
10228            {
10229                Some(self.parse_comma_separated(Parser::parse_expr)?)
10230            } else {
10231                None
10232            };
10233
10234            (Some(expr), limit_by)
10235        } else {
10236            (None, None)
10237        };
10238
10239        if offset.is_none() && limit.is_some() && self.parse_keyword(Keyword::OFFSET) {
10240            offset = Some(self.parse_offset()?);
10241        }
10242
10243        if offset.is_some() || (limit.is_some() && limit != Some(None)) || limit_by.is_some() {
10244            Ok(Some(LimitClause::LimitOffset {
10245                limit: limit.unwrap_or_default(),
10246                offset,
10247                limit_by: limit_by.unwrap_or_default(),
10248            }))
10249        } else {
10250            Ok(None)
10251        }
10252    }
10253
10254    /// Parse a table object for insertion
10255    /// e.g. `some_database.some_table` or `FUNCTION some_table_func(...)`
10256    pub fn parse_table_object(&mut self) -> Result<TableObject, ParserError> {
10257        if self.dialect.supports_insert_table_function() && self.parse_keyword(Keyword::FUNCTION) {
10258            let fn_name = self.parse_object_name(false)?;
10259            self.parse_function_call(fn_name)
10260                .map(TableObject::TableFunction)
10261        } else {
10262            self.parse_object_name(false).map(TableObject::TableName)
10263        }
10264    }
10265
10266    /// Parse a possibly qualified, possibly quoted identifier, optionally allowing for wildcards,
10267    /// e.g. *, *.*, `foo`.*, or "foo"."bar"
10268    fn parse_object_name_with_wildcards(
10269        &mut self,
10270        in_table_clause: bool,
10271        allow_wildcards: bool,
10272    ) -> Result<ObjectName, ParserError> {
10273        let mut idents = vec![];
10274
10275        if dialect_of!(self is BigQueryDialect) && in_table_clause {
10276            loop {
10277                let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?;
10278                idents.push(ident);
10279                if !self.consume_token(&Token::Period) && !end_with_period {
10280                    break;
10281                }
10282            }
10283        } else {
10284            loop {
10285                let ident = if allow_wildcards && self.peek_token().token == Token::Mul {
10286                    let span = self.next_token().span;
10287                    Ident {
10288                        value: Token::Mul.to_string(),
10289                        quote_style: None,
10290                        span,
10291                    }
10292                } else {
10293                    if self.dialect.supports_object_name_double_dot_notation()
10294                        && idents.len() == 1
10295                        && self.consume_token(&Token::Period)
10296                    {
10297                        // Empty string here means default schema
10298                        idents.push(Ident::new(""));
10299                    }
10300                    self.parse_identifier()?
10301                };
10302                idents.push(ident);
10303                if !self.consume_token(&Token::Period) {
10304                    break;
10305                }
10306            }
10307        }
10308        Ok(ObjectName::from(idents))
10309    }
10310
10311    /// Parse a possibly qualified, possibly quoted identifier, e.g.
10312    /// `foo` or `myschema."table"
10313    ///
10314    /// The `in_table_clause` parameter indicates whether the object name is a table in a FROM, JOIN,
10315    /// or similar table clause. Currently, this is used only to support unquoted hyphenated identifiers
10316    /// in this context on BigQuery.
10317    pub fn parse_object_name(&mut self, in_table_clause: bool) -> Result<ObjectName, ParserError> {
10318        let ObjectName(mut idents) =
10319            self.parse_object_name_with_wildcards(in_table_clause, false)?;
10320
10321        // BigQuery accepts any number of quoted identifiers of a table name.
10322        // https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#quoted_identifiers
10323        if dialect_of!(self is BigQueryDialect)
10324            && idents.iter().any(|part| {
10325                part.as_ident()
10326                    .is_some_and(|ident| ident.value.contains('.'))
10327            })
10328        {
10329            idents = idents
10330                .into_iter()
10331                .flat_map(|part| match part.as_ident() {
10332                    Some(ident) => ident
10333                        .value
10334                        .split('.')
10335                        .map(|value| {
10336                            ObjectNamePart::Identifier(Ident {
10337                                value: value.into(),
10338                                quote_style: ident.quote_style,
10339                                span: ident.span,
10340                            })
10341                        })
10342                        .collect::<Vec<_>>(),
10343                    None => vec![part],
10344                })
10345                .collect()
10346        }
10347
10348        Ok(ObjectName(idents))
10349    }
10350
10351    /// Parse identifiers
10352    pub fn parse_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
10353        let mut idents = vec![];
10354        loop {
10355            match &self.peek_token_ref().token {
10356                Token::Word(w) => {
10357                    idents.push(w.clone().into_ident(self.peek_token_ref().span));
10358                }
10359                Token::EOF | Token::Eq => break,
10360                _ => {}
10361            }
10362            self.advance_token();
10363        }
10364        Ok(idents)
10365    }
10366
10367    /// Parse identifiers of form ident1[.identN]*
10368    ///
10369    /// Similar in functionality to [parse_identifiers], with difference
10370    /// being this function is much more strict about parsing a valid multipart identifier, not
10371    /// allowing extraneous tokens to be parsed, otherwise it fails.
10372    ///
10373    /// For example:
10374    ///
10375    /// ```rust
10376    /// use sqlparser::ast::Ident;
10377    /// use sqlparser::dialect::GenericDialect;
10378    /// use sqlparser::parser::Parser;
10379    ///
10380    /// let dialect = GenericDialect {};
10381    /// let expected = vec![Ident::new("one"), Ident::new("two")];
10382    ///
10383    /// // expected usage
10384    /// let sql = "one.two";
10385    /// let mut parser = Parser::new(&dialect).try_with_sql(sql).unwrap();
10386    /// let actual = parser.parse_multipart_identifier().unwrap();
10387    /// assert_eq!(&actual, &expected);
10388    ///
10389    /// // parse_identifiers is more loose on what it allows, parsing successfully
10390    /// let sql = "one + two";
10391    /// let mut parser = Parser::new(&dialect).try_with_sql(sql).unwrap();
10392    /// let actual = parser.parse_identifiers().unwrap();
10393    /// assert_eq!(&actual, &expected);
10394    ///
10395    /// // expected to strictly fail due to + separator
10396    /// let sql = "one + two";
10397    /// let mut parser = Parser::new(&dialect).try_with_sql(sql).unwrap();
10398    /// let actual = parser.parse_multipart_identifier().unwrap_err();
10399    /// assert_eq!(
10400    ///     actual.to_string(),
10401    ///     "sql parser error: Unexpected token in identifier: +"
10402    /// );
10403    /// ```
10404    ///
10405    /// [parse_identifiers]: Parser::parse_identifiers
10406    pub fn parse_multipart_identifier(&mut self) -> Result<Vec<Ident>, ParserError> {
10407        let mut idents = vec![];
10408
10409        // expecting at least one word for identifier
10410        let next_token = self.next_token();
10411        match next_token.token {
10412            Token::Word(w) => idents.push(w.into_ident(next_token.span)),
10413            Token::EOF => {
10414                return Err(ParserError::ParserError(
10415                    "Empty input when parsing identifier".to_string(),
10416                ))?
10417            }
10418            token => {
10419                return Err(ParserError::ParserError(format!(
10420                    "Unexpected token in identifier: {token}"
10421                )))?
10422            }
10423        };
10424
10425        // parse optional next parts if exist
10426        loop {
10427            match self.next_token().token {
10428                // ensure that optional period is succeeded by another identifier
10429                Token::Period => {
10430                    let next_token = self.next_token();
10431                    match next_token.token {
10432                        Token::Word(w) => idents.push(w.into_ident(next_token.span)),
10433                        Token::EOF => {
10434                            return Err(ParserError::ParserError(
10435                                "Trailing period in identifier".to_string(),
10436                            ))?
10437                        }
10438                        token => {
10439                            return Err(ParserError::ParserError(format!(
10440                                "Unexpected token following period in identifier: {token}"
10441                            )))?
10442                        }
10443                    }
10444                }
10445                Token::EOF => break,
10446                token => {
10447                    return Err(ParserError::ParserError(format!(
10448                        "Unexpected token in identifier: {token}"
10449                    )))?
10450                }
10451            }
10452        }
10453
10454        Ok(idents)
10455    }
10456
10457    /// Parse a simple one-word identifier (possibly quoted, possibly a keyword)
10458    pub fn parse_identifier(&mut self) -> Result<Ident, ParserError> {
10459        let next_token = self.next_token();
10460        match next_token.token {
10461            Token::Word(w) => Ok(w.into_ident(next_token.span)),
10462            Token::SingleQuotedString(s) => Ok(Ident::with_quote('\'', s)),
10463            Token::DoubleQuotedString(s) => Ok(Ident::with_quote('\"', s)),
10464            _ => self.expected("identifier", next_token),
10465        }
10466    }
10467
10468    /// On BigQuery, hyphens are permitted in unquoted identifiers inside of a FROM or
10469    /// TABLE clause.
10470    ///
10471    /// The first segment must be an ordinary unquoted identifier, e.g. it must not start
10472    /// with a digit. Subsequent segments are either must either be valid identifiers or
10473    /// integers, e.g. foo-123 is allowed, but foo-123a is not.
10474    ///
10475    /// [BigQuery-lexical](https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical)
10476    ///
10477    /// Return a tuple of the identifier and a boolean indicating it ends with a period.
10478    fn parse_unquoted_hyphenated_identifier(&mut self) -> Result<(Ident, bool), ParserError> {
10479        match self.peek_token().token {
10480            Token::Word(w) => {
10481                let quote_style_is_none = w.quote_style.is_none();
10482                let mut requires_whitespace = false;
10483                let mut ident = w.into_ident(self.next_token().span);
10484                if quote_style_is_none {
10485                    while matches!(self.peek_token_no_skip().token, Token::Minus) {
10486                        self.next_token();
10487                        ident.value.push('-');
10488
10489                        let token = self
10490                            .next_token_no_skip()
10491                            .cloned()
10492                            .unwrap_or(TokenWithSpan::wrap(Token::EOF));
10493                        requires_whitespace = match token.token {
10494                            Token::Word(next_word) if next_word.quote_style.is_none() => {
10495                                ident.value.push_str(&next_word.value);
10496                                false
10497                            }
10498                            Token::Number(s, false) => {
10499                                // A number token can represent a decimal value ending with a period, e.g., `Number('123.')`.
10500                                // However, for an [ObjectName], it is part of a hyphenated identifier, e.g., `foo-123.bar`.
10501                                //
10502                                // If a number token is followed by a period, it is part of an [ObjectName].
10503                                // Return the identifier with `true` if the number token is followed by a period, indicating that
10504                                // parsing should continue for the next part of the hyphenated identifier.
10505                                if s.ends_with('.') {
10506                                    let Some(s) = s.split('.').next().filter(|s| {
10507                                        !s.is_empty() && s.chars().all(|c| c.is_ascii_digit())
10508                                    }) else {
10509                                        return self.expected(
10510                                            "continuation of hyphenated identifier",
10511                                            TokenWithSpan::new(Token::Number(s, false), token.span),
10512                                        );
10513                                    };
10514                                    ident.value.push_str(s);
10515                                    return Ok((ident, true));
10516                                } else {
10517                                    ident.value.push_str(&s);
10518                                }
10519                                // If next token is period, then it is part of an ObjectName and we don't expect whitespace
10520                                // after the number.
10521                                !matches!(self.peek_token().token, Token::Period)
10522                            }
10523                            _ => {
10524                                return self
10525                                    .expected("continuation of hyphenated identifier", token);
10526                            }
10527                        }
10528                    }
10529
10530                    // If the last segment was a number, we must check that it's followed by whitespace,
10531                    // otherwise foo-123a will be parsed as `foo-123` with the alias `a`.
10532                    if requires_whitespace {
10533                        let token = self.next_token();
10534                        if !matches!(token.token, Token::EOF | Token::Whitespace(_)) {
10535                            return self
10536                                .expected("whitespace following hyphenated identifier", token);
10537                        }
10538                    }
10539                }
10540                Ok((ident, false))
10541            }
10542            _ => Ok((self.parse_identifier()?, false)),
10543        }
10544    }
10545
10546    /// Parses a parenthesized, comma-separated list of column definitions within a view.
10547    fn parse_view_columns(&mut self) -> Result<Vec<ViewColumnDef>, ParserError> {
10548        if self.consume_token(&Token::LParen) {
10549            if self.peek_token().token == Token::RParen {
10550                self.next_token();
10551                Ok(vec![])
10552            } else {
10553                let cols = self.parse_comma_separated_with_trailing_commas(
10554                    Parser::parse_view_column,
10555                    self.dialect.supports_column_definition_trailing_commas(),
10556                    Self::is_reserved_for_column_alias,
10557                )?;
10558                self.expect_token(&Token::RParen)?;
10559                Ok(cols)
10560            }
10561        } else {
10562            Ok(vec![])
10563        }
10564    }
10565
10566    /// Parses a column definition within a view.
10567    fn parse_view_column(&mut self) -> Result<ViewColumnDef, ParserError> {
10568        let name = self.parse_identifier()?;
10569        let options = if (dialect_of!(self is BigQueryDialect | GenericDialect)
10570            && self.parse_keyword(Keyword::OPTIONS))
10571            || (dialect_of!(self is SnowflakeDialect | GenericDialect)
10572                && self.parse_keyword(Keyword::COMMENT))
10573        {
10574            self.prev_token();
10575            self.parse_optional_column_option()?
10576                .map(|option| vec![option])
10577        } else {
10578            None
10579        };
10580        let data_type = if dialect_of!(self is ClickHouseDialect) {
10581            Some(self.parse_data_type()?)
10582        } else {
10583            None
10584        };
10585        Ok(ViewColumnDef {
10586            name,
10587            data_type,
10588            options,
10589        })
10590    }
10591
10592    /// Parses a parenthesized comma-separated list of unqualified, possibly quoted identifiers.
10593    /// For example: `(col1, "col 2", ...)`
10594    pub fn parse_parenthesized_column_list(
10595        &mut self,
10596        optional: IsOptional,
10597        allow_empty: bool,
10598    ) -> Result<Vec<Ident>, ParserError> {
10599        self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| p.parse_identifier())
10600    }
10601
10602    /// Parses a parenthesized comma-separated list of qualified, possibly quoted identifiers.
10603    /// For example: `(db1.sc1.tbl1.col1, db1.sc1.tbl1."col 2", ...)`
10604    pub fn parse_parenthesized_qualified_column_list(
10605        &mut self,
10606        optional: IsOptional,
10607        allow_empty: bool,
10608    ) -> Result<Vec<ObjectName>, ParserError> {
10609        self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| {
10610            p.parse_object_name(true)
10611        })
10612    }
10613
10614    /// Parses a parenthesized comma-separated list of columns using
10615    /// the provided function to parse each element.
10616    fn parse_parenthesized_column_list_inner<F, T>(
10617        &mut self,
10618        optional: IsOptional,
10619        allow_empty: bool,
10620        mut f: F,
10621    ) -> Result<Vec<T>, ParserError>
10622    where
10623        F: FnMut(&mut Parser) -> Result<T, ParserError>,
10624    {
10625        if self.consume_token(&Token::LParen) {
10626            if allow_empty && self.peek_token().token == Token::RParen {
10627                self.next_token();
10628                Ok(vec![])
10629            } else {
10630                let cols = self.parse_comma_separated(|p| f(p))?;
10631                self.expect_token(&Token::RParen)?;
10632                Ok(cols)
10633            }
10634        } else if optional == Optional {
10635            Ok(vec![])
10636        } else {
10637            self.expected("a list of columns in parentheses", self.peek_token())
10638        }
10639    }
10640
10641    /// Parses a parenthesized comma-separated list of table alias column definitions.
10642    fn parse_table_alias_column_defs(&mut self) -> Result<Vec<TableAliasColumnDef>, ParserError> {
10643        if self.consume_token(&Token::LParen) {
10644            let cols = self.parse_comma_separated(|p| {
10645                let name = p.parse_identifier()?;
10646                let data_type = p.maybe_parse(|p| p.parse_data_type())?;
10647                Ok(TableAliasColumnDef { name, data_type })
10648            })?;
10649            self.expect_token(&Token::RParen)?;
10650            Ok(cols)
10651        } else {
10652            Ok(vec![])
10653        }
10654    }
10655
10656    pub fn parse_precision(&mut self) -> Result<u64, ParserError> {
10657        self.expect_token(&Token::LParen)?;
10658        let n = self.parse_literal_uint()?;
10659        self.expect_token(&Token::RParen)?;
10660        Ok(n)
10661    }
10662
10663    pub fn parse_optional_precision(&mut self) -> Result<Option<u64>, ParserError> {
10664        if self.consume_token(&Token::LParen) {
10665            let n = self.parse_literal_uint()?;
10666            self.expect_token(&Token::RParen)?;
10667            Ok(Some(n))
10668        } else {
10669            Ok(None)
10670        }
10671    }
10672
10673    /// Parse datetime64 [1]
10674    /// Syntax
10675    /// ```sql
10676    /// DateTime64(precision[, timezone])
10677    /// ```
10678    ///
10679    /// [1]: https://clickhouse.com/docs/en/sql-reference/data-types/datetime64
10680    pub fn parse_datetime_64(&mut self) -> Result<(u64, Option<String>), ParserError> {
10681        self.expect_keyword_is(Keyword::DATETIME64)?;
10682        self.expect_token(&Token::LParen)?;
10683        let precision = self.parse_literal_uint()?;
10684        let time_zone = if self.consume_token(&Token::Comma) {
10685            Some(self.parse_literal_string()?)
10686        } else {
10687            None
10688        };
10689        self.expect_token(&Token::RParen)?;
10690        Ok((precision, time_zone))
10691    }
10692
10693    pub fn parse_optional_character_length(
10694        &mut self,
10695    ) -> Result<Option<CharacterLength>, ParserError> {
10696        if self.consume_token(&Token::LParen) {
10697            let character_length = self.parse_character_length()?;
10698            self.expect_token(&Token::RParen)?;
10699            Ok(Some(character_length))
10700        } else {
10701            Ok(None)
10702        }
10703    }
10704
10705    pub fn parse_optional_binary_length(&mut self) -> Result<Option<BinaryLength>, ParserError> {
10706        if self.consume_token(&Token::LParen) {
10707            let binary_length = self.parse_binary_length()?;
10708            self.expect_token(&Token::RParen)?;
10709            Ok(Some(binary_length))
10710        } else {
10711            Ok(None)
10712        }
10713    }
10714
10715    pub fn parse_character_length(&mut self) -> Result<CharacterLength, ParserError> {
10716        if self.parse_keyword(Keyword::MAX) {
10717            return Ok(CharacterLength::Max);
10718        }
10719        let length = self.parse_literal_uint()?;
10720        let unit = if self.parse_keyword(Keyword::CHARACTERS) {
10721            Some(CharLengthUnits::Characters)
10722        } else if self.parse_keyword(Keyword::OCTETS) {
10723            Some(CharLengthUnits::Octets)
10724        } else {
10725            None
10726        };
10727        Ok(CharacterLength::IntegerLength { length, unit })
10728    }
10729
10730    pub fn parse_binary_length(&mut self) -> Result<BinaryLength, ParserError> {
10731        if self.parse_keyword(Keyword::MAX) {
10732            return Ok(BinaryLength::Max);
10733        }
10734        let length = self.parse_literal_uint()?;
10735        Ok(BinaryLength::IntegerLength { length })
10736    }
10737
10738    pub fn parse_optional_precision_scale(
10739        &mut self,
10740    ) -> Result<(Option<u64>, Option<u64>), ParserError> {
10741        if self.consume_token(&Token::LParen) {
10742            let n = self.parse_literal_uint()?;
10743            let scale = if self.consume_token(&Token::Comma) {
10744                Some(self.parse_literal_uint()?)
10745            } else {
10746                None
10747            };
10748            self.expect_token(&Token::RParen)?;
10749            Ok((Some(n), scale))
10750        } else {
10751            Ok((None, None))
10752        }
10753    }
10754
10755    pub fn parse_exact_number_optional_precision_scale(
10756        &mut self,
10757    ) -> Result<ExactNumberInfo, ParserError> {
10758        if self.consume_token(&Token::LParen) {
10759            let precision = self.parse_literal_uint()?;
10760            let scale = if self.consume_token(&Token::Comma) {
10761                Some(self.parse_literal_uint()?)
10762            } else {
10763                None
10764            };
10765
10766            self.expect_token(&Token::RParen)?;
10767
10768            match scale {
10769                None => Ok(ExactNumberInfo::Precision(precision)),
10770                Some(scale) => Ok(ExactNumberInfo::PrecisionAndScale(precision, scale)),
10771            }
10772        } else {
10773            Ok(ExactNumberInfo::None)
10774        }
10775    }
10776
10777    pub fn parse_optional_type_modifiers(&mut self) -> Result<Option<Vec<String>>, ParserError> {
10778        if self.consume_token(&Token::LParen) {
10779            let mut modifiers = Vec::new();
10780            loop {
10781                let next_token = self.next_token();
10782                match next_token.token {
10783                    Token::Word(w) => modifiers.push(w.to_string()),
10784                    Token::Number(n, _) => modifiers.push(n),
10785                    Token::SingleQuotedString(s) => modifiers.push(s),
10786
10787                    Token::Comma => {
10788                        continue;
10789                    }
10790                    Token::RParen => {
10791                        break;
10792                    }
10793                    _ => self.expected("type modifiers", next_token)?,
10794                }
10795            }
10796
10797            Ok(Some(modifiers))
10798        } else {
10799            Ok(None)
10800        }
10801    }
10802
10803    /// Parse a parenthesized sub data type
10804    fn parse_sub_type<F>(&mut self, parent_type: F) -> Result<DataType, ParserError>
10805    where
10806        F: FnOnce(Box<DataType>) -> DataType,
10807    {
10808        self.expect_token(&Token::LParen)?;
10809        let inside_type = self.parse_data_type()?;
10810        self.expect_token(&Token::RParen)?;
10811        Ok(parent_type(inside_type.into()))
10812    }
10813
10814    /// Parse a DELETE statement, returning a `Box`ed SetExpr
10815    ///
10816    /// This is used to reduce the size of the stack frames in debug builds
10817    fn parse_delete_setexpr_boxed(&mut self) -> Result<Box<SetExpr>, ParserError> {
10818        Ok(Box::new(SetExpr::Delete(self.parse_delete()?)))
10819    }
10820
10821    pub fn parse_delete(&mut self) -> Result<Statement, ParserError> {
10822        let (tables, with_from_keyword) = if !self.parse_keyword(Keyword::FROM) {
10823            // `FROM` keyword is optional in BigQuery SQL.
10824            // https://cloud.google.com/bigquery/docs/reference/standard-sql/dml-syntax#delete_statement
10825            if dialect_of!(self is BigQueryDialect | GenericDialect) {
10826                (vec![], false)
10827            } else {
10828                let tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
10829                self.expect_keyword_is(Keyword::FROM)?;
10830                (tables, true)
10831            }
10832        } else {
10833            (vec![], true)
10834        };
10835
10836        let from = self.parse_comma_separated(Parser::parse_table_and_joins)?;
10837        let using = if self.parse_keyword(Keyword::USING) {
10838            Some(self.parse_comma_separated(Parser::parse_table_and_joins)?)
10839        } else {
10840            None
10841        };
10842        let selection = if self.parse_keyword(Keyword::WHERE) {
10843            Some(self.parse_expr()?)
10844        } else {
10845            None
10846        };
10847        let returning = if self.parse_keyword(Keyword::RETURNING) {
10848            Some(self.parse_comma_separated(Parser::parse_select_item)?)
10849        } else {
10850            None
10851        };
10852        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
10853            self.parse_comma_separated(Parser::parse_order_by_expr)?
10854        } else {
10855            vec![]
10856        };
10857        let limit = if self.parse_keyword(Keyword::LIMIT) {
10858            self.parse_limit()?
10859        } else {
10860            None
10861        };
10862
10863        Ok(Statement::Delete(Delete {
10864            tables,
10865            from: if with_from_keyword {
10866                FromTable::WithFromKeyword(from)
10867            } else {
10868                FromTable::WithoutKeyword(from)
10869            },
10870            using,
10871            selection,
10872            returning,
10873            order_by,
10874            limit,
10875        }))
10876    }
10877
10878    // KILL [CONNECTION | QUERY | MUTATION] processlist_id
10879    pub fn parse_kill(&mut self) -> Result<Statement, ParserError> {
10880        let modifier_keyword =
10881            self.parse_one_of_keywords(&[Keyword::CONNECTION, Keyword::QUERY, Keyword::MUTATION]);
10882
10883        let id = self.parse_literal_uint()?;
10884
10885        let modifier = match modifier_keyword {
10886            Some(Keyword::CONNECTION) => Some(KillType::Connection),
10887            Some(Keyword::QUERY) => Some(KillType::Query),
10888            Some(Keyword::MUTATION) => {
10889                if dialect_of!(self is ClickHouseDialect | GenericDialect) {
10890                    Some(KillType::Mutation)
10891                } else {
10892                    self.expected(
10893                        "Unsupported type for KILL, allowed: CONNECTION | QUERY",
10894                        self.peek_token(),
10895                    )?
10896                }
10897            }
10898            _ => None,
10899        };
10900
10901        Ok(Statement::Kill { modifier, id })
10902    }
10903
10904    pub fn parse_explain(
10905        &mut self,
10906        describe_alias: DescribeAlias,
10907    ) -> Result<Statement, ParserError> {
10908        let mut analyze = false;
10909        let mut verbose = false;
10910        let mut query_plan = false;
10911        let mut estimate = false;
10912        let mut format = None;
10913        let mut options = None;
10914
10915        // Note: DuckDB is compatible with PostgreSQL syntax for this statement,
10916        // although not all features may be implemented.
10917        if describe_alias == DescribeAlias::Explain
10918            && self.dialect.supports_explain_with_utility_options()
10919            && self.peek_token().token == Token::LParen
10920        {
10921            options = Some(self.parse_utility_options()?)
10922        } else if self.parse_keywords(&[Keyword::QUERY, Keyword::PLAN]) {
10923            query_plan = true;
10924        } else if self.parse_keyword(Keyword::ESTIMATE) {
10925            estimate = true;
10926        } else {
10927            analyze = self.parse_keyword(Keyword::ANALYZE);
10928            verbose = self.parse_keyword(Keyword::VERBOSE);
10929            if self.parse_keyword(Keyword::FORMAT) {
10930                format = Some(self.parse_analyze_format()?);
10931            }
10932        }
10933
10934        match self.maybe_parse(|parser| parser.parse_statement())? {
10935            Some(Statement::Explain { .. }) | Some(Statement::ExplainTable { .. }) => Err(
10936                ParserError::ParserError("Explain must be root of the plan".to_string()),
10937            ),
10938            Some(statement) => Ok(Statement::Explain {
10939                describe_alias,
10940                analyze,
10941                verbose,
10942                query_plan,
10943                estimate,
10944                statement: Box::new(statement),
10945                format,
10946                options,
10947            }),
10948            _ => {
10949                let hive_format =
10950                    match self.parse_one_of_keywords(&[Keyword::EXTENDED, Keyword::FORMATTED]) {
10951                        Some(Keyword::EXTENDED) => Some(HiveDescribeFormat::Extended),
10952                        Some(Keyword::FORMATTED) => Some(HiveDescribeFormat::Formatted),
10953                        _ => None,
10954                    };
10955
10956                let has_table_keyword = if self.dialect.describe_requires_table_keyword() {
10957                    // only allow to use TABLE keyword for DESC|DESCRIBE statement
10958                    self.parse_keyword(Keyword::TABLE)
10959                } else {
10960                    false
10961                };
10962
10963                let table_name = self.parse_object_name(false)?;
10964                Ok(Statement::ExplainTable {
10965                    describe_alias,
10966                    hive_format,
10967                    has_table_keyword,
10968                    table_name,
10969                })
10970            }
10971        }
10972    }
10973
10974    /// Parse a query expression, i.e. a `SELECT` statement optionally
10975    /// preceded with some `WITH` CTE declarations and optionally followed
10976    /// by `ORDER BY`. Unlike some other parse_... methods, this one doesn't
10977    /// expect the initial keyword to be already consumed
10978    pub fn parse_query(&mut self) -> Result<Box<Query>, ParserError> {
10979        let _guard = self.recursion_counter.try_decrease()?;
10980        let with = if self.parse_keyword(Keyword::WITH) {
10981            let with_token = self.get_current_token();
10982            Some(With {
10983                with_token: with_token.clone().into(),
10984                recursive: self.parse_keyword(Keyword::RECURSIVE),
10985                cte_tables: self.parse_comma_separated(Parser::parse_cte)?,
10986            })
10987        } else {
10988            None
10989        };
10990        if self.parse_keyword(Keyword::INSERT) {
10991            Ok(Query {
10992                with,
10993                body: self.parse_insert_setexpr_boxed()?,
10994                order_by: None,
10995                limit_clause: None,
10996                fetch: None,
10997                locks: vec![],
10998                for_clause: None,
10999                settings: None,
11000                format_clause: None,
11001                pipe_operators: vec![],
11002            }
11003            .into())
11004        } else if self.parse_keyword(Keyword::UPDATE) {
11005            Ok(Query {
11006                with,
11007                body: self.parse_update_setexpr_boxed()?,
11008                order_by: None,
11009                limit_clause: None,
11010                fetch: None,
11011                locks: vec![],
11012                for_clause: None,
11013                settings: None,
11014                format_clause: None,
11015                pipe_operators: vec![],
11016            }
11017            .into())
11018        } else if self.parse_keyword(Keyword::DELETE) {
11019            Ok(Query {
11020                with,
11021                body: self.parse_delete_setexpr_boxed()?,
11022                limit_clause: None,
11023                order_by: None,
11024                fetch: None,
11025                locks: vec![],
11026                for_clause: None,
11027                settings: None,
11028                format_clause: None,
11029                pipe_operators: vec![],
11030            }
11031            .into())
11032        } else {
11033            let body = self.parse_query_body(self.dialect.prec_unknown())?;
11034
11035            let order_by = self.parse_optional_order_by()?;
11036
11037            let limit_clause = self.parse_optional_limit_clause()?;
11038
11039            let settings = self.parse_settings()?;
11040
11041            let fetch = if self.parse_keyword(Keyword::FETCH) {
11042                Some(self.parse_fetch()?)
11043            } else {
11044                None
11045            };
11046
11047            let mut for_clause = None;
11048            let mut locks = Vec::new();
11049            while self.parse_keyword(Keyword::FOR) {
11050                if let Some(parsed_for_clause) = self.parse_for_clause()? {
11051                    for_clause = Some(parsed_for_clause);
11052                    break;
11053                } else {
11054                    locks.push(self.parse_lock()?);
11055                }
11056            }
11057            let format_clause = if dialect_of!(self is ClickHouseDialect | GenericDialect)
11058                && self.parse_keyword(Keyword::FORMAT)
11059            {
11060                if self.parse_keyword(Keyword::NULL) {
11061                    Some(FormatClause::Null)
11062                } else {
11063                    let ident = self.parse_identifier()?;
11064                    Some(FormatClause::Identifier(ident))
11065                }
11066            } else {
11067                None
11068            };
11069
11070            let pipe_operators = if self.dialect.supports_pipe_operator() {
11071                self.parse_pipe_operators()?
11072            } else {
11073                Vec::new()
11074            };
11075
11076            Ok(Query {
11077                with,
11078                body,
11079                order_by,
11080                limit_clause,
11081                fetch,
11082                locks,
11083                for_clause,
11084                settings,
11085                format_clause,
11086                pipe_operators,
11087            }
11088            .into())
11089        }
11090    }
11091
11092    fn parse_pipe_operators(&mut self) -> Result<Vec<PipeOperator>, ParserError> {
11093        let mut pipe_operators = Vec::new();
11094
11095        while self.consume_token(&Token::VerticalBarRightAngleBracket) {
11096            let kw = self.expect_one_of_keywords(&[
11097                Keyword::SELECT,
11098                Keyword::EXTEND,
11099                Keyword::SET,
11100                Keyword::DROP,
11101                Keyword::AS,
11102                Keyword::WHERE,
11103                Keyword::LIMIT,
11104                Keyword::AGGREGATE,
11105                Keyword::ORDER,
11106                Keyword::TABLESAMPLE,
11107            ])?;
11108            match kw {
11109                Keyword::SELECT => {
11110                    let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
11111                    pipe_operators.push(PipeOperator::Select { exprs })
11112                }
11113                Keyword::EXTEND => {
11114                    let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
11115                    pipe_operators.push(PipeOperator::Extend { exprs })
11116                }
11117                Keyword::SET => {
11118                    let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
11119                    pipe_operators.push(PipeOperator::Set { assignments })
11120                }
11121                Keyword::DROP => {
11122                    let columns = self.parse_identifiers()?;
11123                    pipe_operators.push(PipeOperator::Drop { columns })
11124                }
11125                Keyword::AS => {
11126                    let alias = self.parse_identifier()?;
11127                    pipe_operators.push(PipeOperator::As { alias })
11128                }
11129                Keyword::WHERE => {
11130                    let expr = self.parse_expr()?;
11131                    pipe_operators.push(PipeOperator::Where { expr })
11132                }
11133                Keyword::LIMIT => {
11134                    let expr = self.parse_expr()?;
11135                    let offset = if self.parse_keyword(Keyword::OFFSET) {
11136                        Some(self.parse_expr()?)
11137                    } else {
11138                        None
11139                    };
11140                    pipe_operators.push(PipeOperator::Limit { expr, offset })
11141                }
11142                Keyword::AGGREGATE => {
11143                    let full_table_exprs = if self.peek_keyword(Keyword::GROUP) {
11144                        vec![]
11145                    } else {
11146                        self.parse_comma_separated(|parser| {
11147                            parser.parse_expr_with_alias_and_order_by()
11148                        })?
11149                    };
11150
11151                    let group_by_expr = if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
11152                        self.parse_comma_separated(|parser| {
11153                            parser.parse_expr_with_alias_and_order_by()
11154                        })?
11155                    } else {
11156                        vec![]
11157                    };
11158
11159                    pipe_operators.push(PipeOperator::Aggregate {
11160                        full_table_exprs,
11161                        group_by_expr,
11162                    })
11163                }
11164                Keyword::ORDER => {
11165                    self.expect_one_of_keywords(&[Keyword::BY])?;
11166                    let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
11167                    pipe_operators.push(PipeOperator::OrderBy { exprs })
11168                }
11169                Keyword::TABLESAMPLE => {
11170                    let sample = self.parse_table_sample(TableSampleModifier::TableSample)?;
11171                    pipe_operators.push(PipeOperator::TableSample { sample });
11172                }
11173                unhandled => {
11174                    return Err(ParserError::ParserError(format!(
11175                    "`expect_one_of_keywords` further up allowed unhandled keyword: {unhandled:?}"
11176                )))
11177                }
11178            }
11179        }
11180        Ok(pipe_operators)
11181    }
11182
11183    fn parse_settings(&mut self) -> Result<Option<Vec<Setting>>, ParserError> {
11184        let settings = if dialect_of!(self is ClickHouseDialect|GenericDialect)
11185            && self.parse_keyword(Keyword::SETTINGS)
11186        {
11187            let key_values = self.parse_comma_separated(|p| {
11188                let key = p.parse_identifier()?;
11189                p.expect_token(&Token::Eq)?;
11190                let value = p.parse_value()?.value;
11191                Ok(Setting { key, value })
11192            })?;
11193            Some(key_values)
11194        } else {
11195            None
11196        };
11197        Ok(settings)
11198    }
11199
11200    /// Parse a mssql `FOR [XML | JSON | BROWSE]` clause
11201    pub fn parse_for_clause(&mut self) -> Result<Option<ForClause>, ParserError> {
11202        if self.parse_keyword(Keyword::XML) {
11203            Ok(Some(self.parse_for_xml()?))
11204        } else if self.parse_keyword(Keyword::JSON) {
11205            Ok(Some(self.parse_for_json()?))
11206        } else if self.parse_keyword(Keyword::BROWSE) {
11207            Ok(Some(ForClause::Browse))
11208        } else {
11209            Ok(None)
11210        }
11211    }
11212
11213    /// Parse a mssql `FOR XML` clause
11214    pub fn parse_for_xml(&mut self) -> Result<ForClause, ParserError> {
11215        let for_xml = if self.parse_keyword(Keyword::RAW) {
11216            let mut element_name = None;
11217            if self.peek_token().token == Token::LParen {
11218                self.expect_token(&Token::LParen)?;
11219                element_name = Some(self.parse_literal_string()?);
11220                self.expect_token(&Token::RParen)?;
11221            }
11222            ForXml::Raw(element_name)
11223        } else if self.parse_keyword(Keyword::AUTO) {
11224            ForXml::Auto
11225        } else if self.parse_keyword(Keyword::EXPLICIT) {
11226            ForXml::Explicit
11227        } else if self.parse_keyword(Keyword::PATH) {
11228            let mut element_name = None;
11229            if self.peek_token().token == Token::LParen {
11230                self.expect_token(&Token::LParen)?;
11231                element_name = Some(self.parse_literal_string()?);
11232                self.expect_token(&Token::RParen)?;
11233            }
11234            ForXml::Path(element_name)
11235        } else {
11236            return Err(ParserError::ParserError(
11237                "Expected FOR XML [RAW | AUTO | EXPLICIT | PATH ]".to_string(),
11238            ));
11239        };
11240        let mut elements = false;
11241        let mut binary_base64 = false;
11242        let mut root = None;
11243        let mut r#type = false;
11244        while self.peek_token().token == Token::Comma {
11245            self.next_token();
11246            if self.parse_keyword(Keyword::ELEMENTS) {
11247                elements = true;
11248            } else if self.parse_keyword(Keyword::BINARY) {
11249                self.expect_keyword_is(Keyword::BASE64)?;
11250                binary_base64 = true;
11251            } else if self.parse_keyword(Keyword::ROOT) {
11252                self.expect_token(&Token::LParen)?;
11253                root = Some(self.parse_literal_string()?);
11254                self.expect_token(&Token::RParen)?;
11255            } else if self.parse_keyword(Keyword::TYPE) {
11256                r#type = true;
11257            }
11258        }
11259        Ok(ForClause::Xml {
11260            for_xml,
11261            elements,
11262            binary_base64,
11263            root,
11264            r#type,
11265        })
11266    }
11267
11268    /// Parse a mssql `FOR JSON` clause
11269    pub fn parse_for_json(&mut self) -> Result<ForClause, ParserError> {
11270        let for_json = if self.parse_keyword(Keyword::AUTO) {
11271            ForJson::Auto
11272        } else if self.parse_keyword(Keyword::PATH) {
11273            ForJson::Path
11274        } else {
11275            return Err(ParserError::ParserError(
11276                "Expected FOR JSON [AUTO | PATH ]".to_string(),
11277            ));
11278        };
11279        let mut root = None;
11280        let mut include_null_values = false;
11281        let mut without_array_wrapper = false;
11282        while self.peek_token().token == Token::Comma {
11283            self.next_token();
11284            if self.parse_keyword(Keyword::ROOT) {
11285                self.expect_token(&Token::LParen)?;
11286                root = Some(self.parse_literal_string()?);
11287                self.expect_token(&Token::RParen)?;
11288            } else if self.parse_keyword(Keyword::INCLUDE_NULL_VALUES) {
11289                include_null_values = true;
11290            } else if self.parse_keyword(Keyword::WITHOUT_ARRAY_WRAPPER) {
11291                without_array_wrapper = true;
11292            }
11293        }
11294        Ok(ForClause::Json {
11295            for_json,
11296            root,
11297            include_null_values,
11298            without_array_wrapper,
11299        })
11300    }
11301
11302    /// Parse a CTE (`alias [( col1, col2, ... )] AS (subquery)`)
11303    pub fn parse_cte(&mut self) -> Result<Cte, ParserError> {
11304        let name = self.parse_identifier()?;
11305
11306        let mut cte = if self.parse_keyword(Keyword::AS) {
11307            let mut is_materialized = None;
11308            if dialect_of!(self is PostgreSqlDialect) {
11309                if self.parse_keyword(Keyword::MATERIALIZED) {
11310                    is_materialized = Some(CteAsMaterialized::Materialized);
11311                } else if self.parse_keywords(&[Keyword::NOT, Keyword::MATERIALIZED]) {
11312                    is_materialized = Some(CteAsMaterialized::NotMaterialized);
11313                }
11314            }
11315            self.expect_token(&Token::LParen)?;
11316
11317            let query = self.parse_query()?;
11318            let closing_paren_token = self.expect_token(&Token::RParen)?;
11319
11320            let alias = TableAlias {
11321                name,
11322                columns: vec![],
11323            };
11324            Cte {
11325                alias,
11326                query,
11327                from: None,
11328                materialized: is_materialized,
11329                closing_paren_token: closing_paren_token.into(),
11330            }
11331        } else {
11332            let columns = self.parse_table_alias_column_defs()?;
11333            self.expect_keyword_is(Keyword::AS)?;
11334            let mut is_materialized = None;
11335            if dialect_of!(self is PostgreSqlDialect) {
11336                if self.parse_keyword(Keyword::MATERIALIZED) {
11337                    is_materialized = Some(CteAsMaterialized::Materialized);
11338                } else if self.parse_keywords(&[Keyword::NOT, Keyword::MATERIALIZED]) {
11339                    is_materialized = Some(CteAsMaterialized::NotMaterialized);
11340                }
11341            }
11342            self.expect_token(&Token::LParen)?;
11343
11344            let query = self.parse_query()?;
11345            let closing_paren_token = self.expect_token(&Token::RParen)?;
11346
11347            let alias = TableAlias { name, columns };
11348            Cte {
11349                alias,
11350                query,
11351                from: None,
11352                materialized: is_materialized,
11353                closing_paren_token: closing_paren_token.into(),
11354            }
11355        };
11356        if self.parse_keyword(Keyword::FROM) {
11357            cte.from = Some(self.parse_identifier()?);
11358        }
11359        Ok(cte)
11360    }
11361
11362    /// Parse a "query body", which is an expression with roughly the
11363    /// following grammar:
11364    /// ```sql
11365    ///   query_body ::= restricted_select | '(' subquery ')' | set_operation
11366    ///   restricted_select ::= 'SELECT' [expr_list] [ from ] [ where ] [ groupby_having ]
11367    ///   subquery ::= query_body [ order_by_limit ]
11368    ///   set_operation ::= query_body { 'UNION' | 'EXCEPT' | 'INTERSECT' } [ 'ALL' ] query_body
11369    /// ```
11370    pub fn parse_query_body(&mut self, precedence: u8) -> Result<Box<SetExpr>, ParserError> {
11371        // We parse the expression using a Pratt parser, as in `parse_expr()`.
11372        // Start by parsing a restricted SELECT or a `(subquery)`:
11373        let expr = if self.peek_keyword(Keyword::SELECT)
11374            || (self.peek_keyword(Keyword::FROM) && self.dialect.supports_from_first_select())
11375        {
11376            SetExpr::Select(self.parse_select().map(Box::new)?)
11377        } else if self.consume_token(&Token::LParen) {
11378            // CTEs are not allowed here, but the parser currently accepts them
11379            let subquery = self.parse_query()?;
11380            self.expect_token(&Token::RParen)?;
11381            SetExpr::Query(subquery)
11382        } else if self.parse_keyword(Keyword::VALUES) {
11383            let is_mysql = dialect_of!(self is MySqlDialect);
11384            SetExpr::Values(self.parse_values(is_mysql)?)
11385        } else if self.parse_keyword(Keyword::TABLE) {
11386            SetExpr::Table(Box::new(self.parse_as_table()?))
11387        } else {
11388            return self.expected(
11389                "SELECT, VALUES, or a subquery in the query body",
11390                self.peek_token(),
11391            );
11392        };
11393
11394        self.parse_remaining_set_exprs(expr, precedence)
11395    }
11396
11397    /// Parse any extra set expressions that may be present in a query body
11398    ///
11399    /// (this is its own function to reduce required stack size in debug builds)
11400    fn parse_remaining_set_exprs(
11401        &mut self,
11402        mut expr: SetExpr,
11403        precedence: u8,
11404    ) -> Result<Box<SetExpr>, ParserError> {
11405        loop {
11406            // The query can be optionally followed by a set operator:
11407            let op = self.parse_set_operator(&self.peek_token().token);
11408            let next_precedence = match op {
11409                // UNION and EXCEPT have the same binding power and evaluate left-to-right
11410                Some(SetOperator::Union) | Some(SetOperator::Except) | Some(SetOperator::Minus) => {
11411                    10
11412                }
11413                // INTERSECT has higher precedence than UNION/EXCEPT
11414                Some(SetOperator::Intersect) => 20,
11415                // Unexpected token or EOF => stop parsing the query body
11416                None => break,
11417            };
11418            if precedence >= next_precedence {
11419                break;
11420            }
11421            self.next_token(); // skip past the set operator
11422            let set_quantifier = self.parse_set_quantifier(&op);
11423            expr = SetExpr::SetOperation {
11424                left: Box::new(expr),
11425                op: op.unwrap(),
11426                set_quantifier,
11427                right: self.parse_query_body(next_precedence)?,
11428            };
11429        }
11430
11431        Ok(expr.into())
11432    }
11433
11434    pub fn parse_set_operator(&mut self, token: &Token) -> Option<SetOperator> {
11435        match token {
11436            Token::Word(w) if w.keyword == Keyword::UNION => Some(SetOperator::Union),
11437            Token::Word(w) if w.keyword == Keyword::EXCEPT => Some(SetOperator::Except),
11438            Token::Word(w) if w.keyword == Keyword::INTERSECT => Some(SetOperator::Intersect),
11439            Token::Word(w) if w.keyword == Keyword::MINUS => Some(SetOperator::Minus),
11440            _ => None,
11441        }
11442    }
11443
11444    pub fn parse_set_quantifier(&mut self, op: &Option<SetOperator>) -> SetQuantifier {
11445        match op {
11446            Some(
11447                SetOperator::Except
11448                | SetOperator::Intersect
11449                | SetOperator::Union
11450                | SetOperator::Minus,
11451            ) => {
11452                if self.parse_keywords(&[Keyword::DISTINCT, Keyword::BY, Keyword::NAME]) {
11453                    SetQuantifier::DistinctByName
11454                } else if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
11455                    SetQuantifier::ByName
11456                } else if self.parse_keyword(Keyword::ALL) {
11457                    if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
11458                        SetQuantifier::AllByName
11459                    } else {
11460                        SetQuantifier::All
11461                    }
11462                } else if self.parse_keyword(Keyword::DISTINCT) {
11463                    SetQuantifier::Distinct
11464                } else {
11465                    SetQuantifier::None
11466                }
11467            }
11468            _ => SetQuantifier::None,
11469        }
11470    }
11471
11472    /// Parse a restricted `SELECT` statement (no CTEs / `UNION` / `ORDER BY`)
11473    pub fn parse_select(&mut self) -> Result<Select, ParserError> {
11474        let mut from_first = None;
11475
11476        if self.dialect.supports_from_first_select() && self.peek_keyword(Keyword::FROM) {
11477            let from_token = self.expect_keyword(Keyword::FROM)?;
11478            let from = self.parse_table_with_joins()?;
11479            if !self.peek_keyword(Keyword::SELECT) {
11480                return Ok(Select {
11481                    select_token: AttachedToken(from_token),
11482                    distinct: None,
11483                    top: None,
11484                    top_before_distinct: false,
11485                    projection: vec![],
11486                    into: None,
11487                    from,
11488                    lateral_views: vec![],
11489                    prewhere: None,
11490                    selection: None,
11491                    group_by: GroupByExpr::Expressions(vec![], vec![]),
11492                    cluster_by: vec![],
11493                    distribute_by: vec![],
11494                    sort_by: vec![],
11495                    having: None,
11496                    named_window: vec![],
11497                    window_before_qualify: false,
11498                    qualify: None,
11499                    value_table_mode: None,
11500                    connect_by: None,
11501                    flavor: SelectFlavor::FromFirstNoSelect,
11502                });
11503            }
11504            from_first = Some(from);
11505        }
11506
11507        let select_token = self.expect_keyword(Keyword::SELECT)?;
11508        let value_table_mode = self.parse_value_table_mode()?;
11509
11510        let mut top_before_distinct = false;
11511        let mut top = None;
11512        if self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
11513            top = Some(self.parse_top()?);
11514            top_before_distinct = true;
11515        }
11516        let distinct = self.parse_all_or_distinct()?;
11517        if !self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
11518            top = Some(self.parse_top()?);
11519        }
11520
11521        let projection =
11522            if self.dialect.supports_empty_projections() && self.peek_keyword(Keyword::FROM) {
11523                vec![]
11524            } else {
11525                self.parse_projection()?
11526            };
11527
11528        let into = if self.parse_keyword(Keyword::INTO) {
11529            Some(self.parse_select_into()?)
11530        } else {
11531            None
11532        };
11533
11534        // Note that for keywords to be properly handled here, they need to be
11535        // added to `RESERVED_FOR_COLUMN_ALIAS` / `RESERVED_FOR_TABLE_ALIAS`,
11536        // otherwise they may be parsed as an alias as part of the `projection`
11537        // or `from`.
11538
11539        let (from, from_first) = if let Some(from) = from_first.take() {
11540            (from, true)
11541        } else if self.parse_keyword(Keyword::FROM) {
11542            (self.parse_table_with_joins()?, false)
11543        } else {
11544            (vec![], false)
11545        };
11546
11547        let mut lateral_views = vec![];
11548        loop {
11549            if self.parse_keywords(&[Keyword::LATERAL, Keyword::VIEW]) {
11550                let outer = self.parse_keyword(Keyword::OUTER);
11551                let lateral_view = self.parse_expr()?;
11552                let lateral_view_name = self.parse_object_name(false)?;
11553                let lateral_col_alias = self
11554                    .parse_comma_separated(|parser| {
11555                        parser.parse_optional_alias(&[
11556                            Keyword::WHERE,
11557                            Keyword::GROUP,
11558                            Keyword::CLUSTER,
11559                            Keyword::HAVING,
11560                            Keyword::LATERAL,
11561                        ]) // This couldn't possibly be a bad idea
11562                    })?
11563                    .into_iter()
11564                    .flatten()
11565                    .collect();
11566
11567                lateral_views.push(LateralView {
11568                    lateral_view,
11569                    lateral_view_name,
11570                    lateral_col_alias,
11571                    outer,
11572                });
11573            } else {
11574                break;
11575            }
11576        }
11577
11578        let prewhere = if dialect_of!(self is ClickHouseDialect|GenericDialect)
11579            && self.parse_keyword(Keyword::PREWHERE)
11580        {
11581            Some(self.parse_expr()?)
11582        } else {
11583            None
11584        };
11585
11586        let selection = if self.parse_keyword(Keyword::WHERE) {
11587            Some(self.parse_expr()?)
11588        } else {
11589            None
11590        };
11591
11592        let group_by = self
11593            .parse_optional_group_by()?
11594            .unwrap_or_else(|| GroupByExpr::Expressions(vec![], vec![]));
11595
11596        let cluster_by = if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
11597            self.parse_comma_separated(Parser::parse_expr)?
11598        } else {
11599            vec![]
11600        };
11601
11602        let distribute_by = if self.parse_keywords(&[Keyword::DISTRIBUTE, Keyword::BY]) {
11603            self.parse_comma_separated(Parser::parse_expr)?
11604        } else {
11605            vec![]
11606        };
11607
11608        let sort_by = if self.parse_keywords(&[Keyword::SORT, Keyword::BY]) {
11609            self.parse_comma_separated(Parser::parse_order_by_expr)?
11610        } else {
11611            vec![]
11612        };
11613
11614        let having = if self.parse_keyword(Keyword::HAVING) {
11615            Some(self.parse_expr()?)
11616        } else {
11617            None
11618        };
11619
11620        // Accept QUALIFY and WINDOW in any order and flag accordingly.
11621        let (named_windows, qualify, window_before_qualify) = if self.parse_keyword(Keyword::WINDOW)
11622        {
11623            let named_windows = self.parse_comma_separated(Parser::parse_named_window)?;
11624            if self.parse_keyword(Keyword::QUALIFY) {
11625                (named_windows, Some(self.parse_expr()?), true)
11626            } else {
11627                (named_windows, None, true)
11628            }
11629        } else if self.parse_keyword(Keyword::QUALIFY) {
11630            let qualify = Some(self.parse_expr()?);
11631            if self.parse_keyword(Keyword::WINDOW) {
11632                (
11633                    self.parse_comma_separated(Parser::parse_named_window)?,
11634                    qualify,
11635                    false,
11636                )
11637            } else {
11638                (Default::default(), qualify, false)
11639            }
11640        } else {
11641            Default::default()
11642        };
11643
11644        let connect_by = if self.dialect.supports_connect_by()
11645            && self
11646                .parse_one_of_keywords(&[Keyword::START, Keyword::CONNECT])
11647                .is_some()
11648        {
11649            self.prev_token();
11650            Some(self.parse_connect_by()?)
11651        } else {
11652            None
11653        };
11654
11655        Ok(Select {
11656            select_token: AttachedToken(select_token),
11657            distinct,
11658            top,
11659            top_before_distinct,
11660            projection,
11661            into,
11662            from,
11663            lateral_views,
11664            prewhere,
11665            selection,
11666            group_by,
11667            cluster_by,
11668            distribute_by,
11669            sort_by,
11670            having,
11671            named_window: named_windows,
11672            window_before_qualify,
11673            qualify,
11674            value_table_mode,
11675            connect_by,
11676            flavor: if from_first {
11677                SelectFlavor::FromFirst
11678            } else {
11679                SelectFlavor::Standard
11680            },
11681        })
11682    }
11683
11684    fn parse_value_table_mode(&mut self) -> Result<Option<ValueTableMode>, ParserError> {
11685        if !dialect_of!(self is BigQueryDialect) {
11686            return Ok(None);
11687        }
11688
11689        let mode = if self.parse_keywords(&[Keyword::DISTINCT, Keyword::AS, Keyword::VALUE]) {
11690            Some(ValueTableMode::DistinctAsValue)
11691        } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::AS, Keyword::STRUCT]) {
11692            Some(ValueTableMode::DistinctAsStruct)
11693        } else if self.parse_keywords(&[Keyword::AS, Keyword::VALUE])
11694            || self.parse_keywords(&[Keyword::ALL, Keyword::AS, Keyword::VALUE])
11695        {
11696            Some(ValueTableMode::AsValue)
11697        } else if self.parse_keywords(&[Keyword::AS, Keyword::STRUCT])
11698            || self.parse_keywords(&[Keyword::ALL, Keyword::AS, Keyword::STRUCT])
11699        {
11700            Some(ValueTableMode::AsStruct)
11701        } else if self.parse_keyword(Keyword::AS) {
11702            self.expected("VALUE or STRUCT", self.peek_token())?
11703        } else {
11704            None
11705        };
11706
11707        Ok(mode)
11708    }
11709
11710    /// Invoke `f` after first setting the parser's `ParserState` to `state`.
11711    ///
11712    /// Upon return, restores the parser's state to what it started at.
11713    fn with_state<T, F>(&mut self, state: ParserState, mut f: F) -> Result<T, ParserError>
11714    where
11715        F: FnMut(&mut Parser) -> Result<T, ParserError>,
11716    {
11717        let current_state = self.state;
11718        self.state = state;
11719        let res = f(self);
11720        self.state = current_state;
11721        res
11722    }
11723
11724    pub fn parse_connect_by(&mut self) -> Result<ConnectBy, ParserError> {
11725        let (condition, relationships) = if self.parse_keywords(&[Keyword::CONNECT, Keyword::BY]) {
11726            let relationships = self.with_state(ParserState::ConnectBy, |parser| {
11727                parser.parse_comma_separated(Parser::parse_expr)
11728            })?;
11729            self.expect_keywords(&[Keyword::START, Keyword::WITH])?;
11730            let condition = self.parse_expr()?;
11731            (condition, relationships)
11732        } else {
11733            self.expect_keywords(&[Keyword::START, Keyword::WITH])?;
11734            let condition = self.parse_expr()?;
11735            self.expect_keywords(&[Keyword::CONNECT, Keyword::BY])?;
11736            let relationships = self.with_state(ParserState::ConnectBy, |parser| {
11737                parser.parse_comma_separated(Parser::parse_expr)
11738            })?;
11739            (condition, relationships)
11740        };
11741        Ok(ConnectBy {
11742            condition,
11743            relationships,
11744        })
11745    }
11746
11747    /// Parse `CREATE TABLE x AS TABLE y`
11748    pub fn parse_as_table(&mut self) -> Result<Table, ParserError> {
11749        let token1 = self.next_token();
11750        let token2 = self.next_token();
11751        let token3 = self.next_token();
11752
11753        let table_name;
11754        let schema_name;
11755        if token2 == Token::Period {
11756            match token1.token {
11757                Token::Word(w) => {
11758                    schema_name = w.value;
11759                }
11760                _ => {
11761                    return self.expected("Schema name", token1);
11762                }
11763            }
11764            match token3.token {
11765                Token::Word(w) => {
11766                    table_name = w.value;
11767                }
11768                _ => {
11769                    return self.expected("Table name", token3);
11770                }
11771            }
11772            Ok(Table {
11773                table_name: Some(table_name),
11774                schema_name: Some(schema_name),
11775            })
11776        } else {
11777            match token1.token {
11778                Token::Word(w) => {
11779                    table_name = w.value;
11780                }
11781                _ => {
11782                    return self.expected("Table name", token1);
11783                }
11784            }
11785            Ok(Table {
11786                table_name: Some(table_name),
11787                schema_name: None,
11788            })
11789        }
11790    }
11791
11792    /// Parse a `SET ROLE` statement. Expects SET to be consumed already.
11793    fn parse_set_role(
11794        &mut self,
11795        modifier: Option<ContextModifier>,
11796    ) -> Result<Statement, ParserError> {
11797        self.expect_keyword_is(Keyword::ROLE)?;
11798
11799        let role_name = if self.parse_keyword(Keyword::NONE) {
11800            None
11801        } else {
11802            Some(self.parse_identifier()?)
11803        };
11804        Ok(Statement::Set(Set::SetRole {
11805            context_modifier: modifier,
11806            role_name,
11807        }))
11808    }
11809
11810    fn parse_set_values(
11811        &mut self,
11812        parenthesized_assignment: bool,
11813    ) -> Result<Vec<Expr>, ParserError> {
11814        let mut values = vec![];
11815
11816        if parenthesized_assignment {
11817            self.expect_token(&Token::LParen)?;
11818        }
11819
11820        loop {
11821            let value = if let Some(expr) = self.try_parse_expr_sub_query()? {
11822                expr
11823            } else if let Ok(expr) = self.parse_expr() {
11824                expr
11825            } else {
11826                self.expected("variable value", self.peek_token())?
11827            };
11828
11829            values.push(value);
11830            if self.consume_token(&Token::Comma) {
11831                continue;
11832            }
11833
11834            if parenthesized_assignment {
11835                self.expect_token(&Token::RParen)?;
11836            }
11837            return Ok(values);
11838        }
11839    }
11840
11841    fn parse_context_modifier(&mut self) -> Option<ContextModifier> {
11842        let modifier =
11843            self.parse_one_of_keywords(&[Keyword::SESSION, Keyword::LOCAL, Keyword::GLOBAL])?;
11844
11845        Self::keyword_to_modifier(modifier)
11846    }
11847
11848    /// Parse a single SET statement assignment `var = expr`.
11849    fn parse_set_assignment(&mut self) -> Result<SetAssignment, ParserError> {
11850        let scope = self.parse_context_modifier();
11851
11852        let name = if self.dialect.supports_parenthesized_set_variables()
11853            && self.consume_token(&Token::LParen)
11854        {
11855            // Parenthesized assignments are handled in the `parse_set` function after
11856            // trying to parse list of assignments using this function.
11857            // If a dialect supports both, and we find a LParen, we early exit from this function.
11858            self.expected("Unparenthesized assignment", self.peek_token())?
11859        } else {
11860            self.parse_object_name(false)?
11861        };
11862
11863        if !(self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO)) {
11864            return self.expected("assignment operator", self.peek_token());
11865        }
11866
11867        let value = self.parse_expr()?;
11868
11869        Ok(SetAssignment { scope, name, value })
11870    }
11871
11872    fn parse_set(&mut self) -> Result<Statement, ParserError> {
11873        let hivevar = self.parse_keyword(Keyword::HIVEVAR);
11874
11875        // Modifier is either HIVEVAR: or a ContextModifier (LOCAL, SESSION, etc), not both
11876        let scope = if !hivevar {
11877            self.parse_context_modifier()
11878        } else {
11879            None
11880        };
11881
11882        if hivevar {
11883            self.expect_token(&Token::Colon)?;
11884        }
11885
11886        if let Some(set_role_stmt) = self.maybe_parse(|parser| parser.parse_set_role(scope))? {
11887            return Ok(set_role_stmt);
11888        }
11889
11890        // Handle special cases first
11891        if self.parse_keywords(&[Keyword::TIME, Keyword::ZONE])
11892            || self.parse_keyword(Keyword::TIMEZONE)
11893        {
11894            if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
11895                return Ok(Set::SingleAssignment {
11896                    scope,
11897                    hivevar,
11898                    variable: ObjectName::from(vec!["TIMEZONE".into()]),
11899                    values: self.parse_set_values(false)?,
11900                }
11901                .into());
11902            } else {
11903                // A shorthand alias for SET TIME ZONE that doesn't require
11904                // the assignment operator. It's originally PostgreSQL specific,
11905                // but we allow it for all the dialects
11906                return Ok(Set::SetTimeZone {
11907                    local: scope == Some(ContextModifier::Local),
11908                    value: self.parse_expr()?,
11909                }
11910                .into());
11911            }
11912        } else if self.dialect.supports_set_names() && self.parse_keyword(Keyword::NAMES) {
11913            if self.parse_keyword(Keyword::DEFAULT) {
11914                return Ok(Set::SetNamesDefault {}.into());
11915            }
11916            let charset_name = self.parse_identifier()?;
11917            let collation_name = if self.parse_one_of_keywords(&[Keyword::COLLATE]).is_some() {
11918                Some(self.parse_literal_string()?)
11919            } else {
11920                None
11921            };
11922
11923            return Ok(Set::SetNames {
11924                charset_name,
11925                collation_name,
11926            }
11927            .into());
11928        } else if self.parse_keyword(Keyword::CHARACTERISTICS) {
11929            self.expect_keywords(&[Keyword::AS, Keyword::TRANSACTION])?;
11930            return Ok(Set::SetTransaction {
11931                modes: self.parse_transaction_modes()?,
11932                snapshot: None,
11933                session: true,
11934            }
11935            .into());
11936        } else if self.parse_keyword(Keyword::TRANSACTION) {
11937            if self.parse_keyword(Keyword::SNAPSHOT) {
11938                let snapshot_id = self.parse_value()?.value;
11939                return Ok(Set::SetTransaction {
11940                    modes: vec![],
11941                    snapshot: Some(snapshot_id),
11942                    session: false,
11943                }
11944                .into());
11945            }
11946            return Ok(Set::SetTransaction {
11947                modes: self.parse_transaction_modes()?,
11948                snapshot: None,
11949                session: false,
11950            }
11951            .into());
11952        }
11953
11954        if self.dialect.supports_comma_separated_set_assignments() {
11955            if scope.is_some() {
11956                self.prev_token();
11957            }
11958
11959            if let Some(assignments) = self
11960                .maybe_parse(|parser| parser.parse_comma_separated(Parser::parse_set_assignment))?
11961            {
11962                return if assignments.len() > 1 {
11963                    Ok(Set::MultipleAssignments { assignments }.into())
11964                } else {
11965                    let SetAssignment { scope, name, value } =
11966                        assignments.into_iter().next().ok_or_else(|| {
11967                            ParserError::ParserError("Expected at least one assignment".to_string())
11968                        })?;
11969
11970                    Ok(Set::SingleAssignment {
11971                        scope,
11972                        hivevar,
11973                        variable: name,
11974                        values: vec![value],
11975                    }
11976                    .into())
11977                };
11978            }
11979        }
11980
11981        let variables = if self.dialect.supports_parenthesized_set_variables()
11982            && self.consume_token(&Token::LParen)
11983        {
11984            let vars = OneOrManyWithParens::Many(
11985                self.parse_comma_separated(|parser: &mut Parser<'a>| parser.parse_identifier())?
11986                    .into_iter()
11987                    .map(|ident| ObjectName::from(vec![ident]))
11988                    .collect(),
11989            );
11990            self.expect_token(&Token::RParen)?;
11991            vars
11992        } else {
11993            OneOrManyWithParens::One(self.parse_object_name(false)?)
11994        };
11995
11996        if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
11997            let stmt = match variables {
11998                OneOrManyWithParens::One(var) => Set::SingleAssignment {
11999                    scope,
12000                    hivevar,
12001                    variable: var,
12002                    values: self.parse_set_values(false)?,
12003                },
12004                OneOrManyWithParens::Many(vars) => Set::ParenthesizedAssignments {
12005                    variables: vars,
12006                    values: self.parse_set_values(true)?,
12007                },
12008            };
12009
12010            return Ok(stmt.into());
12011        }
12012
12013        if self.dialect.supports_set_stmt_without_operator() {
12014            self.prev_token();
12015            return self.parse_set_session_params();
12016        };
12017
12018        self.expected("equals sign or TO", self.peek_token())
12019    }
12020
12021    pub fn parse_set_session_params(&mut self) -> Result<Statement, ParserError> {
12022        if self.parse_keyword(Keyword::STATISTICS) {
12023            let topic = match self.parse_one_of_keywords(&[
12024                Keyword::IO,
12025                Keyword::PROFILE,
12026                Keyword::TIME,
12027                Keyword::XML,
12028            ]) {
12029                Some(Keyword::IO) => SessionParamStatsTopic::IO,
12030                Some(Keyword::PROFILE) => SessionParamStatsTopic::Profile,
12031                Some(Keyword::TIME) => SessionParamStatsTopic::Time,
12032                Some(Keyword::XML) => SessionParamStatsTopic::Xml,
12033                _ => return self.expected("IO, PROFILE, TIME or XML", self.peek_token()),
12034            };
12035            let value = self.parse_session_param_value()?;
12036            Ok(
12037                Set::SetSessionParam(SetSessionParamKind::Statistics(SetSessionParamStatistics {
12038                    topic,
12039                    value,
12040                }))
12041                .into(),
12042            )
12043        } else if self.parse_keyword(Keyword::IDENTITY_INSERT) {
12044            let obj = self.parse_object_name(false)?;
12045            let value = self.parse_session_param_value()?;
12046            Ok(Set::SetSessionParam(SetSessionParamKind::IdentityInsert(
12047                SetSessionParamIdentityInsert { obj, value },
12048            ))
12049            .into())
12050        } else if self.parse_keyword(Keyword::OFFSETS) {
12051            let keywords = self.parse_comma_separated(|parser| {
12052                let next_token = parser.next_token();
12053                match &next_token.token {
12054                    Token::Word(w) => Ok(w.to_string()),
12055                    _ => parser.expected("SQL keyword", next_token),
12056                }
12057            })?;
12058            let value = self.parse_session_param_value()?;
12059            Ok(
12060                Set::SetSessionParam(SetSessionParamKind::Offsets(SetSessionParamOffsets {
12061                    keywords,
12062                    value,
12063                }))
12064                .into(),
12065            )
12066        } else {
12067            let names = self.parse_comma_separated(|parser| {
12068                let next_token = parser.next_token();
12069                match next_token.token {
12070                    Token::Word(w) => Ok(w.to_string()),
12071                    _ => parser.expected("Session param name", next_token),
12072                }
12073            })?;
12074            let value = self.parse_expr()?.to_string();
12075            Ok(
12076                Set::SetSessionParam(SetSessionParamKind::Generic(SetSessionParamGeneric {
12077                    names,
12078                    value,
12079                }))
12080                .into(),
12081            )
12082        }
12083    }
12084
12085    fn parse_session_param_value(&mut self) -> Result<SessionParamValue, ParserError> {
12086        if self.parse_keyword(Keyword::ON) {
12087            Ok(SessionParamValue::On)
12088        } else if self.parse_keyword(Keyword::OFF) {
12089            Ok(SessionParamValue::Off)
12090        } else {
12091            self.expected("ON or OFF", self.peek_token())
12092        }
12093    }
12094
12095    pub fn parse_show(&mut self) -> Result<Statement, ParserError> {
12096        let terse = self.parse_keyword(Keyword::TERSE);
12097        let extended = self.parse_keyword(Keyword::EXTENDED);
12098        let full = self.parse_keyword(Keyword::FULL);
12099        let session = self.parse_keyword(Keyword::SESSION);
12100        let global = self.parse_keyword(Keyword::GLOBAL);
12101        let external = self.parse_keyword(Keyword::EXTERNAL);
12102        if self
12103            .parse_one_of_keywords(&[Keyword::COLUMNS, Keyword::FIELDS])
12104            .is_some()
12105        {
12106            Ok(self.parse_show_columns(extended, full)?)
12107        } else if self.parse_keyword(Keyword::TABLES) {
12108            Ok(self.parse_show_tables(terse, extended, full, external)?)
12109        } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEWS]) {
12110            Ok(self.parse_show_views(terse, true)?)
12111        } else if self.parse_keyword(Keyword::VIEWS) {
12112            Ok(self.parse_show_views(terse, false)?)
12113        } else if self.parse_keyword(Keyword::FUNCTIONS) {
12114            Ok(self.parse_show_functions()?)
12115        } else if extended || full {
12116            Err(ParserError::ParserError(
12117                "EXTENDED/FULL are not supported with this type of SHOW query".to_string(),
12118            ))
12119        } else if self.parse_one_of_keywords(&[Keyword::CREATE]).is_some() {
12120            Ok(self.parse_show_create()?)
12121        } else if self.parse_keyword(Keyword::COLLATION) {
12122            Ok(self.parse_show_collation()?)
12123        } else if self.parse_keyword(Keyword::VARIABLES)
12124            && dialect_of!(self is MySqlDialect | GenericDialect)
12125        {
12126            Ok(Statement::ShowVariables {
12127                filter: self.parse_show_statement_filter()?,
12128                session,
12129                global,
12130            })
12131        } else if self.parse_keyword(Keyword::STATUS)
12132            && dialect_of!(self is MySqlDialect | GenericDialect)
12133        {
12134            Ok(Statement::ShowStatus {
12135                filter: self.parse_show_statement_filter()?,
12136                session,
12137                global,
12138            })
12139        } else if self.parse_keyword(Keyword::DATABASES) {
12140            self.parse_show_databases(terse)
12141        } else if self.parse_keyword(Keyword::SCHEMAS) {
12142            self.parse_show_schemas(terse)
12143        } else {
12144            Ok(Statement::ShowVariable {
12145                variable: self.parse_identifiers()?,
12146            })
12147        }
12148    }
12149
12150    fn parse_show_databases(&mut self, terse: bool) -> Result<Statement, ParserError> {
12151        let history = self.parse_keyword(Keyword::HISTORY);
12152        let show_options = self.parse_show_stmt_options()?;
12153        Ok(Statement::ShowDatabases {
12154            terse,
12155            history,
12156            show_options,
12157        })
12158    }
12159
12160    fn parse_show_schemas(&mut self, terse: bool) -> Result<Statement, ParserError> {
12161        let history = self.parse_keyword(Keyword::HISTORY);
12162        let show_options = self.parse_show_stmt_options()?;
12163        Ok(Statement::ShowSchemas {
12164            terse,
12165            history,
12166            show_options,
12167        })
12168    }
12169
12170    pub fn parse_show_create(&mut self) -> Result<Statement, ParserError> {
12171        let obj_type = match self.expect_one_of_keywords(&[
12172            Keyword::TABLE,
12173            Keyword::TRIGGER,
12174            Keyword::FUNCTION,
12175            Keyword::PROCEDURE,
12176            Keyword::EVENT,
12177            Keyword::VIEW,
12178        ])? {
12179            Keyword::TABLE => Ok(ShowCreateObject::Table),
12180            Keyword::TRIGGER => Ok(ShowCreateObject::Trigger),
12181            Keyword::FUNCTION => Ok(ShowCreateObject::Function),
12182            Keyword::PROCEDURE => Ok(ShowCreateObject::Procedure),
12183            Keyword::EVENT => Ok(ShowCreateObject::Event),
12184            Keyword::VIEW => Ok(ShowCreateObject::View),
12185            keyword => Err(ParserError::ParserError(format!(
12186                "Unable to map keyword to ShowCreateObject: {keyword:?}"
12187            ))),
12188        }?;
12189
12190        let obj_name = self.parse_object_name(false)?;
12191
12192        Ok(Statement::ShowCreate { obj_type, obj_name })
12193    }
12194
12195    pub fn parse_show_columns(
12196        &mut self,
12197        extended: bool,
12198        full: bool,
12199    ) -> Result<Statement, ParserError> {
12200        let show_options = self.parse_show_stmt_options()?;
12201        Ok(Statement::ShowColumns {
12202            extended,
12203            full,
12204            show_options,
12205        })
12206    }
12207
12208    fn parse_show_tables(
12209        &mut self,
12210        terse: bool,
12211        extended: bool,
12212        full: bool,
12213        external: bool,
12214    ) -> Result<Statement, ParserError> {
12215        let history = !external && self.parse_keyword(Keyword::HISTORY);
12216        let show_options = self.parse_show_stmt_options()?;
12217        Ok(Statement::ShowTables {
12218            terse,
12219            history,
12220            extended,
12221            full,
12222            external,
12223            show_options,
12224        })
12225    }
12226
12227    fn parse_show_views(
12228        &mut self,
12229        terse: bool,
12230        materialized: bool,
12231    ) -> Result<Statement, ParserError> {
12232        let show_options = self.parse_show_stmt_options()?;
12233        Ok(Statement::ShowViews {
12234            materialized,
12235            terse,
12236            show_options,
12237        })
12238    }
12239
12240    pub fn parse_show_functions(&mut self) -> Result<Statement, ParserError> {
12241        let filter = self.parse_show_statement_filter()?;
12242        Ok(Statement::ShowFunctions { filter })
12243    }
12244
12245    pub fn parse_show_collation(&mut self) -> Result<Statement, ParserError> {
12246        let filter = self.parse_show_statement_filter()?;
12247        Ok(Statement::ShowCollation { filter })
12248    }
12249
12250    pub fn parse_show_statement_filter(
12251        &mut self,
12252    ) -> Result<Option<ShowStatementFilter>, ParserError> {
12253        if self.parse_keyword(Keyword::LIKE) {
12254            Ok(Some(ShowStatementFilter::Like(
12255                self.parse_literal_string()?,
12256            )))
12257        } else if self.parse_keyword(Keyword::ILIKE) {
12258            Ok(Some(ShowStatementFilter::ILike(
12259                self.parse_literal_string()?,
12260            )))
12261        } else if self.parse_keyword(Keyword::WHERE) {
12262            Ok(Some(ShowStatementFilter::Where(self.parse_expr()?)))
12263        } else {
12264            self.maybe_parse(|parser| -> Result<String, ParserError> {
12265                parser.parse_literal_string()
12266            })?
12267            .map_or(Ok(None), |filter| {
12268                Ok(Some(ShowStatementFilter::NoKeyword(filter)))
12269            })
12270        }
12271    }
12272
12273    pub fn parse_use(&mut self) -> Result<Statement, ParserError> {
12274        // Determine which keywords are recognized by the current dialect
12275        let parsed_keyword = if dialect_of!(self is HiveDialect) {
12276            // HiveDialect accepts USE DEFAULT; statement without any db specified
12277            if self.parse_keyword(Keyword::DEFAULT) {
12278                return Ok(Statement::Use(Use::Default));
12279            }
12280            None // HiveDialect doesn't expect any other specific keyword after `USE`
12281        } else if dialect_of!(self is DatabricksDialect) {
12282            self.parse_one_of_keywords(&[Keyword::CATALOG, Keyword::DATABASE, Keyword::SCHEMA])
12283        } else if dialect_of!(self is SnowflakeDialect) {
12284            self.parse_one_of_keywords(&[
12285                Keyword::DATABASE,
12286                Keyword::SCHEMA,
12287                Keyword::WAREHOUSE,
12288                Keyword::ROLE,
12289                Keyword::SECONDARY,
12290            ])
12291        } else {
12292            None // No specific keywords for other dialects, including GenericDialect
12293        };
12294
12295        let result = if matches!(parsed_keyword, Some(Keyword::SECONDARY)) {
12296            self.parse_secondary_roles()?
12297        } else {
12298            let obj_name = self.parse_object_name(false)?;
12299            match parsed_keyword {
12300                Some(Keyword::CATALOG) => Use::Catalog(obj_name),
12301                Some(Keyword::DATABASE) => Use::Database(obj_name),
12302                Some(Keyword::SCHEMA) => Use::Schema(obj_name),
12303                Some(Keyword::WAREHOUSE) => Use::Warehouse(obj_name),
12304                Some(Keyword::ROLE) => Use::Role(obj_name),
12305                _ => Use::Object(obj_name),
12306            }
12307        };
12308
12309        Ok(Statement::Use(result))
12310    }
12311
12312    fn parse_secondary_roles(&mut self) -> Result<Use, ParserError> {
12313        self.expect_one_of_keywords(&[Keyword::ROLES, Keyword::ROLE])?;
12314        if self.parse_keyword(Keyword::NONE) {
12315            Ok(Use::SecondaryRoles(SecondaryRoles::None))
12316        } else if self.parse_keyword(Keyword::ALL) {
12317            Ok(Use::SecondaryRoles(SecondaryRoles::All))
12318        } else {
12319            let roles = self.parse_comma_separated(|parser| parser.parse_identifier())?;
12320            Ok(Use::SecondaryRoles(SecondaryRoles::List(roles)))
12321        }
12322    }
12323
12324    pub fn parse_table_and_joins(&mut self) -> Result<TableWithJoins, ParserError> {
12325        let relation = self.parse_table_factor()?;
12326        // Note that for keywords to be properly handled here, they need to be
12327        // added to `RESERVED_FOR_TABLE_ALIAS`, otherwise they may be parsed as
12328        // a table alias.
12329        let joins = self.parse_joins()?;
12330        Ok(TableWithJoins { relation, joins })
12331    }
12332
12333    fn parse_joins(&mut self) -> Result<Vec<Join>, ParserError> {
12334        let mut joins = vec![];
12335        loop {
12336            let global = self.parse_keyword(Keyword::GLOBAL);
12337            let join = if self.parse_keyword(Keyword::CROSS) {
12338                let join_operator = if self.parse_keyword(Keyword::JOIN) {
12339                    JoinOperator::CrossJoin
12340                } else if self.parse_keyword(Keyword::APPLY) {
12341                    // MSSQL extension, similar to CROSS JOIN LATERAL
12342                    JoinOperator::CrossApply
12343                } else {
12344                    return self.expected("JOIN or APPLY after CROSS", self.peek_token());
12345                };
12346                Join {
12347                    relation: self.parse_table_factor()?,
12348                    global,
12349                    join_operator,
12350                }
12351            } else if self.parse_keyword(Keyword::OUTER) {
12352                // MSSQL extension, similar to LEFT JOIN LATERAL .. ON 1=1
12353                self.expect_keyword_is(Keyword::APPLY)?;
12354                Join {
12355                    relation: self.parse_table_factor()?,
12356                    global,
12357                    join_operator: JoinOperator::OuterApply,
12358                }
12359            } else if self.parse_keyword(Keyword::ASOF) {
12360                self.expect_keyword_is(Keyword::JOIN)?;
12361                let relation = self.parse_table_factor()?;
12362                self.expect_keyword_is(Keyword::MATCH_CONDITION)?;
12363                let match_condition = self.parse_parenthesized(Self::parse_expr)?;
12364                Join {
12365                    relation,
12366                    global,
12367                    join_operator: JoinOperator::AsOf {
12368                        match_condition,
12369                        constraint: self.parse_join_constraint(false)?,
12370                    },
12371                }
12372            } else {
12373                let natural = self.parse_keyword(Keyword::NATURAL);
12374                let peek_keyword = if let Token::Word(w) = self.peek_token().token {
12375                    w.keyword
12376                } else {
12377                    Keyword::NoKeyword
12378                };
12379
12380                let join_operator_type = match peek_keyword {
12381                    Keyword::INNER | Keyword::JOIN => {
12382                        let inner = self.parse_keyword(Keyword::INNER); // [ INNER ]
12383                        self.expect_keyword_is(Keyword::JOIN)?;
12384                        if inner {
12385                            JoinOperator::Inner
12386                        } else {
12387                            JoinOperator::Join
12388                        }
12389                    }
12390                    kw @ Keyword::LEFT | kw @ Keyword::RIGHT => {
12391                        let _ = self.next_token(); // consume LEFT/RIGHT
12392                        let is_left = kw == Keyword::LEFT;
12393                        let join_type = self.parse_one_of_keywords(&[
12394                            Keyword::OUTER,
12395                            Keyword::SEMI,
12396                            Keyword::ANTI,
12397                            Keyword::JOIN,
12398                        ]);
12399                        match join_type {
12400                            Some(Keyword::OUTER) => {
12401                                self.expect_keyword_is(Keyword::JOIN)?;
12402                                if is_left {
12403                                    JoinOperator::LeftOuter
12404                                } else {
12405                                    JoinOperator::RightOuter
12406                                }
12407                            }
12408                            Some(Keyword::SEMI) => {
12409                                self.expect_keyword_is(Keyword::JOIN)?;
12410                                if is_left {
12411                                    JoinOperator::LeftSemi
12412                                } else {
12413                                    JoinOperator::RightSemi
12414                                }
12415                            }
12416                            Some(Keyword::ANTI) => {
12417                                self.expect_keyword_is(Keyword::JOIN)?;
12418                                if is_left {
12419                                    JoinOperator::LeftAnti
12420                                } else {
12421                                    JoinOperator::RightAnti
12422                                }
12423                            }
12424                            Some(Keyword::JOIN) => {
12425                                if is_left {
12426                                    JoinOperator::Left
12427                                } else {
12428                                    JoinOperator::Right
12429                                }
12430                            }
12431                            _ => {
12432                                return Err(ParserError::ParserError(format!(
12433                                    "expected OUTER, SEMI, ANTI or JOIN after {kw:?}"
12434                                )))
12435                            }
12436                        }
12437                    }
12438                    Keyword::ANTI => {
12439                        let _ = self.next_token(); // consume ANTI
12440                        self.expect_keyword_is(Keyword::JOIN)?;
12441                        JoinOperator::Anti
12442                    }
12443                    Keyword::SEMI => {
12444                        let _ = self.next_token(); // consume SEMI
12445                        self.expect_keyword_is(Keyword::JOIN)?;
12446                        JoinOperator::Semi
12447                    }
12448                    Keyword::FULL => {
12449                        let _ = self.next_token(); // consume FULL
12450                        let _ = self.parse_keyword(Keyword::OUTER); // [ OUTER ]
12451                        self.expect_keyword_is(Keyword::JOIN)?;
12452                        JoinOperator::FullOuter
12453                    }
12454                    Keyword::OUTER => {
12455                        return self.expected("LEFT, RIGHT, or FULL", self.peek_token());
12456                    }
12457                    Keyword::STRAIGHT_JOIN => {
12458                        let _ = self.next_token(); // consume STRAIGHT_JOIN
12459                        JoinOperator::StraightJoin
12460                    }
12461                    _ if natural => {
12462                        return self.expected("a join type after NATURAL", self.peek_token());
12463                    }
12464                    _ => break,
12465                };
12466                let mut relation = self.parse_table_factor()?;
12467
12468                if self.peek_parens_less_nested_join() {
12469                    let joins = self.parse_joins()?;
12470                    relation = TableFactor::NestedJoin {
12471                        table_with_joins: Box::new(TableWithJoins { relation, joins }),
12472                        alias: None,
12473                    };
12474                }
12475
12476                let join_constraint = self.parse_join_constraint(natural)?;
12477                Join {
12478                    relation,
12479                    global,
12480                    join_operator: join_operator_type(join_constraint),
12481                }
12482            };
12483            joins.push(join);
12484        }
12485        Ok(joins)
12486    }
12487
12488    fn peek_parens_less_nested_join(&self) -> bool {
12489        matches!(
12490            self.peek_token_ref().token,
12491            Token::Word(Word {
12492                keyword: Keyword::JOIN
12493                    | Keyword::INNER
12494                    | Keyword::LEFT
12495                    | Keyword::RIGHT
12496                    | Keyword::FULL,
12497                ..
12498            })
12499        )
12500    }
12501
12502    /// A table name or a parenthesized subquery, followed by optional `[AS] alias`
12503    pub fn parse_table_factor(&mut self) -> Result<TableFactor, ParserError> {
12504        if self.parse_keyword(Keyword::LATERAL) {
12505            // LATERAL must always be followed by a subquery or table function.
12506            if self.consume_token(&Token::LParen) {
12507                self.parse_derived_table_factor(Lateral)
12508            } else {
12509                let name = self.parse_object_name(false)?;
12510                self.expect_token(&Token::LParen)?;
12511                let args = self.parse_optional_args()?;
12512                let alias = self.maybe_parse_table_alias()?;
12513                Ok(TableFactor::Function {
12514                    lateral: true,
12515                    name,
12516                    args,
12517                    alias,
12518                })
12519            }
12520        } else if self.parse_keyword(Keyword::TABLE) {
12521            // parse table function (SELECT * FROM TABLE (<expr>) [ AS <alias> ])
12522            self.expect_token(&Token::LParen)?;
12523            let expr = self.parse_expr()?;
12524            self.expect_token(&Token::RParen)?;
12525            let alias = self.maybe_parse_table_alias()?;
12526            Ok(TableFactor::TableFunction { expr, alias })
12527        } else if self.consume_token(&Token::LParen) {
12528            // A left paren introduces either a derived table (i.e., a subquery)
12529            // or a nested join. It's nearly impossible to determine ahead of
12530            // time which it is... so we just try to parse both.
12531            //
12532            // Here's an example that demonstrates the complexity:
12533            //                     /-------------------------------------------------------\
12534            //                     | /-----------------------------------\                 |
12535            //     SELECT * FROM ( ( ( (SELECT 1) UNION (SELECT 2) ) AS t1 NATURAL JOIN t2 ) )
12536            //                   ^ ^ ^ ^
12537            //                   | | | |
12538            //                   | | | |
12539            //                   | | | (4) belongs to a SetExpr::Query inside the subquery
12540            //                   | | (3) starts a derived table (subquery)
12541            //                   | (2) starts a nested join
12542            //                   (1) an additional set of parens around a nested join
12543            //
12544
12545            // If the recently consumed '(' starts a derived table, the call to
12546            // `parse_derived_table_factor` below will return success after parsing the
12547            // subquery, followed by the closing ')', and the alias of the derived table.
12548            // In the example above this is case (3).
12549            if let Some(mut table) =
12550                self.maybe_parse(|parser| parser.parse_derived_table_factor(NotLateral))?
12551            {
12552                while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT])
12553                {
12554                    table = match kw {
12555                        Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
12556                        Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
12557                        _ => unreachable!(),
12558                    }
12559                }
12560                return Ok(table);
12561            }
12562
12563            // A parsing error from `parse_derived_table_factor` indicates that the '(' we've
12564            // recently consumed does not start a derived table (cases 1, 2, or 4).
12565            // `maybe_parse` will ignore such an error and rewind to be after the opening '('.
12566
12567            // Inside the parentheses we expect to find an (A) table factor
12568            // followed by some joins or (B) another level of nesting.
12569            let mut table_and_joins = self.parse_table_and_joins()?;
12570
12571            #[allow(clippy::if_same_then_else)]
12572            if !table_and_joins.joins.is_empty() {
12573                self.expect_token(&Token::RParen)?;
12574                let alias = self.maybe_parse_table_alias()?;
12575                Ok(TableFactor::NestedJoin {
12576                    table_with_joins: Box::new(table_and_joins),
12577                    alias,
12578                }) // (A)
12579            } else if let TableFactor::NestedJoin {
12580                table_with_joins: _,
12581                alias: _,
12582            } = &table_and_joins.relation
12583            {
12584                // (B): `table_and_joins` (what we found inside the parentheses)
12585                // is a nested join `(foo JOIN bar)`, not followed by other joins.
12586                self.expect_token(&Token::RParen)?;
12587                let alias = self.maybe_parse_table_alias()?;
12588                Ok(TableFactor::NestedJoin {
12589                    table_with_joins: Box::new(table_and_joins),
12590                    alias,
12591                })
12592            } else if dialect_of!(self is SnowflakeDialect | GenericDialect) {
12593                // Dialect-specific behavior: Snowflake diverges from the
12594                // standard and from most of the other implementations by
12595                // allowing extra parentheses not only around a join (B), but
12596                // around lone table names (e.g. `FROM (mytable [AS alias])`)
12597                // and around derived tables (e.g. `FROM ((SELECT ...)
12598                // [AS alias])`) as well.
12599                self.expect_token(&Token::RParen)?;
12600
12601                if let Some(outer_alias) = self.maybe_parse_table_alias()? {
12602                    // Snowflake also allows specifying an alias *after* parens
12603                    // e.g. `FROM (mytable) AS alias`
12604                    match &mut table_and_joins.relation {
12605                        TableFactor::Derived { alias, .. }
12606                        | TableFactor::Table { alias, .. }
12607                        | TableFactor::Function { alias, .. }
12608                        | TableFactor::UNNEST { alias, .. }
12609                        | TableFactor::JsonTable { alias, .. }
12610                        | TableFactor::XmlTable { alias, .. }
12611                        | TableFactor::OpenJsonTable { alias, .. }
12612                        | TableFactor::TableFunction { alias, .. }
12613                        | TableFactor::Pivot { alias, .. }
12614                        | TableFactor::Unpivot { alias, .. }
12615                        | TableFactor::MatchRecognize { alias, .. }
12616                        | TableFactor::NestedJoin { alias, .. } => {
12617                            // but not `FROM (mytable AS alias1) AS alias2`.
12618                            if let Some(inner_alias) = alias {
12619                                return Err(ParserError::ParserError(format!(
12620                                    "duplicate alias {inner_alias}"
12621                                )));
12622                            }
12623                            // Act as if the alias was specified normally next
12624                            // to the table name: `(mytable) AS alias` ->
12625                            // `(mytable AS alias)`
12626                            alias.replace(outer_alias);
12627                        }
12628                    };
12629                }
12630                // Do not store the extra set of parens in the AST
12631                Ok(table_and_joins.relation)
12632            } else {
12633                // The SQL spec prohibits derived tables and bare tables from
12634                // appearing alone in parentheses (e.g. `FROM (mytable)`)
12635                self.expected("joined table", self.peek_token())
12636            }
12637        } else if dialect_of!(self is SnowflakeDialect | DatabricksDialect | GenericDialect)
12638            && matches!(
12639                self.peek_tokens(),
12640                [
12641                    Token::Word(Word {
12642                        keyword: Keyword::VALUES,
12643                        ..
12644                    }),
12645                    Token::LParen
12646                ]
12647            )
12648        {
12649            self.expect_keyword_is(Keyword::VALUES)?;
12650
12651            // Snowflake and Databricks allow syntax like below:
12652            // SELECT * FROM VALUES (1, 'a'), (2, 'b') AS t (col1, col2)
12653            // where there are no parentheses around the VALUES clause.
12654            let values = SetExpr::Values(self.parse_values(false)?);
12655            let alias = self.maybe_parse_table_alias()?;
12656            Ok(TableFactor::Derived {
12657                lateral: false,
12658                subquery: Box::new(Query {
12659                    with: None,
12660                    body: Box::new(values),
12661                    order_by: None,
12662                    limit_clause: None,
12663                    fetch: None,
12664                    locks: vec![],
12665                    for_clause: None,
12666                    settings: None,
12667                    format_clause: None,
12668                    pipe_operators: vec![],
12669                }),
12670                alias,
12671            })
12672        } else if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
12673            && self.parse_keyword(Keyword::UNNEST)
12674        {
12675            self.expect_token(&Token::LParen)?;
12676            let array_exprs = self.parse_comma_separated(Parser::parse_expr)?;
12677            self.expect_token(&Token::RParen)?;
12678
12679            let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
12680            let alias = match self.maybe_parse_table_alias() {
12681                Ok(Some(alias)) => Some(alias),
12682                Ok(None) => None,
12683                Err(e) => return Err(e),
12684            };
12685
12686            let with_offset = match self.expect_keywords(&[Keyword::WITH, Keyword::OFFSET]) {
12687                Ok(()) => true,
12688                Err(_) => false,
12689            };
12690
12691            let with_offset_alias = if with_offset {
12692                match self.parse_optional_alias(keywords::RESERVED_FOR_COLUMN_ALIAS) {
12693                    Ok(Some(alias)) => Some(alias),
12694                    Ok(None) => None,
12695                    Err(e) => return Err(e),
12696                }
12697            } else {
12698                None
12699            };
12700
12701            Ok(TableFactor::UNNEST {
12702                alias,
12703                array_exprs,
12704                with_offset,
12705                with_offset_alias,
12706                with_ordinality,
12707            })
12708        } else if self.parse_keyword_with_tokens(Keyword::JSON_TABLE, &[Token::LParen]) {
12709            let json_expr = self.parse_expr()?;
12710            self.expect_token(&Token::Comma)?;
12711            let json_path = self.parse_value()?.value;
12712            self.expect_keyword_is(Keyword::COLUMNS)?;
12713            self.expect_token(&Token::LParen)?;
12714            let columns = self.parse_comma_separated(Parser::parse_json_table_column_def)?;
12715            self.expect_token(&Token::RParen)?;
12716            self.expect_token(&Token::RParen)?;
12717            let alias = self.maybe_parse_table_alias()?;
12718            Ok(TableFactor::JsonTable {
12719                json_expr,
12720                json_path,
12721                columns,
12722                alias,
12723            })
12724        } else if self.parse_keyword_with_tokens(Keyword::OPENJSON, &[Token::LParen]) {
12725            self.prev_token();
12726            self.parse_open_json_table_factor()
12727        } else if self.parse_keyword_with_tokens(Keyword::XMLTABLE, &[Token::LParen]) {
12728            self.prev_token();
12729            self.parse_xml_table_factor()
12730        } else {
12731            let name = self.parse_object_name(true)?;
12732
12733            let json_path = match self.peek_token().token {
12734                Token::LBracket if self.dialect.supports_partiql() => Some(self.parse_json_path()?),
12735                _ => None,
12736            };
12737
12738            let partitions: Vec<Ident> = if dialect_of!(self is MySqlDialect | GenericDialect)
12739                && self.parse_keyword(Keyword::PARTITION)
12740            {
12741                self.parse_parenthesized_identifiers()?
12742            } else {
12743                vec![]
12744            };
12745
12746            // Parse potential version qualifier
12747            let version = self.maybe_parse_table_version()?;
12748
12749            // Postgres, MSSQL, ClickHouse: table-valued functions:
12750            let args = if self.consume_token(&Token::LParen) {
12751                Some(self.parse_table_function_args()?)
12752            } else {
12753                None
12754            };
12755
12756            let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
12757
12758            let mut sample = None;
12759            if self.dialect.supports_table_sample_before_alias() {
12760                if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
12761                    sample = Some(TableSampleKind::BeforeTableAlias(parsed_sample));
12762                }
12763            }
12764
12765            let alias = self.maybe_parse_table_alias()?;
12766
12767            // MYSQL-specific table hints:
12768            let index_hints = if self.dialect.supports_table_hints() {
12769                self.maybe_parse(|p| p.parse_table_index_hints())?
12770                    .unwrap_or(vec![])
12771            } else {
12772                vec![]
12773            };
12774
12775            // MSSQL-specific table hints:
12776            let mut with_hints = vec![];
12777            if self.parse_keyword(Keyword::WITH) {
12778                if self.consume_token(&Token::LParen) {
12779                    with_hints = self.parse_comma_separated(Parser::parse_expr)?;
12780                    self.expect_token(&Token::RParen)?;
12781                } else {
12782                    // rewind, as WITH may belong to the next statement's CTE
12783                    self.prev_token();
12784                }
12785            };
12786
12787            if !self.dialect.supports_table_sample_before_alias() {
12788                if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
12789                    sample = Some(TableSampleKind::AfterTableAlias(parsed_sample));
12790                }
12791            }
12792
12793            let mut table = TableFactor::Table {
12794                name,
12795                alias,
12796                args,
12797                with_hints,
12798                version,
12799                partitions,
12800                with_ordinality,
12801                json_path,
12802                sample,
12803                index_hints,
12804            };
12805
12806            while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT]) {
12807                table = match kw {
12808                    Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
12809                    Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
12810                    _ => unreachable!(),
12811                }
12812            }
12813
12814            if self.dialect.supports_match_recognize()
12815                && self.parse_keyword(Keyword::MATCH_RECOGNIZE)
12816            {
12817                table = self.parse_match_recognize(table)?;
12818            }
12819
12820            Ok(table)
12821        }
12822    }
12823
12824    fn maybe_parse_table_sample(&mut self) -> Result<Option<Box<TableSample>>, ParserError> {
12825        let modifier = if self.parse_keyword(Keyword::TABLESAMPLE) {
12826            TableSampleModifier::TableSample
12827        } else if self.parse_keyword(Keyword::SAMPLE) {
12828            TableSampleModifier::Sample
12829        } else {
12830            return Ok(None);
12831        };
12832        self.parse_table_sample(modifier).map(Some)
12833    }
12834
12835    fn parse_table_sample(
12836        &mut self,
12837        modifier: TableSampleModifier,
12838    ) -> Result<Box<TableSample>, ParserError> {
12839        let name = match self.parse_one_of_keywords(&[
12840            Keyword::BERNOULLI,
12841            Keyword::ROW,
12842            Keyword::SYSTEM,
12843            Keyword::BLOCK,
12844        ]) {
12845            Some(Keyword::BERNOULLI) => Some(TableSampleMethod::Bernoulli),
12846            Some(Keyword::ROW) => Some(TableSampleMethod::Row),
12847            Some(Keyword::SYSTEM) => Some(TableSampleMethod::System),
12848            Some(Keyword::BLOCK) => Some(TableSampleMethod::Block),
12849            _ => None,
12850        };
12851
12852        let parenthesized = self.consume_token(&Token::LParen);
12853
12854        let (quantity, bucket) = if parenthesized && self.parse_keyword(Keyword::BUCKET) {
12855            let selected_bucket = self.parse_number_value()?.value;
12856            self.expect_keywords(&[Keyword::OUT, Keyword::OF])?;
12857            let total = self.parse_number_value()?.value;
12858            let on = if self.parse_keyword(Keyword::ON) {
12859                Some(self.parse_expr()?)
12860            } else {
12861                None
12862            };
12863            (
12864                None,
12865                Some(TableSampleBucket {
12866                    bucket: selected_bucket,
12867                    total,
12868                    on,
12869                }),
12870            )
12871        } else {
12872            let value = match self.maybe_parse(|p| p.parse_expr())? {
12873                Some(num) => num,
12874                None => {
12875                    let next_token = self.next_token();
12876                    if let Token::Word(w) = next_token.token {
12877                        Expr::Value(Value::Placeholder(w.value).with_span(next_token.span))
12878                    } else {
12879                        return parser_err!(
12880                            "Expecting number or byte length e.g. 100M",
12881                            self.peek_token().span.start
12882                        );
12883                    }
12884                }
12885            };
12886            let unit = if self.parse_keyword(Keyword::ROWS) {
12887                Some(TableSampleUnit::Rows)
12888            } else if self.parse_keyword(Keyword::PERCENT) {
12889                Some(TableSampleUnit::Percent)
12890            } else {
12891                None
12892            };
12893            (
12894                Some(TableSampleQuantity {
12895                    parenthesized,
12896                    value,
12897                    unit,
12898                }),
12899                None,
12900            )
12901        };
12902        if parenthesized {
12903            self.expect_token(&Token::RParen)?;
12904        }
12905
12906        let seed = if self.parse_keyword(Keyword::REPEATABLE) {
12907            Some(self.parse_table_sample_seed(TableSampleSeedModifier::Repeatable)?)
12908        } else if self.parse_keyword(Keyword::SEED) {
12909            Some(self.parse_table_sample_seed(TableSampleSeedModifier::Seed)?)
12910        } else {
12911            None
12912        };
12913
12914        let offset = if self.parse_keyword(Keyword::OFFSET) {
12915            Some(self.parse_expr()?)
12916        } else {
12917            None
12918        };
12919
12920        Ok(Box::new(TableSample {
12921            modifier,
12922            name,
12923            quantity,
12924            seed,
12925            bucket,
12926            offset,
12927        }))
12928    }
12929
12930    fn parse_table_sample_seed(
12931        &mut self,
12932        modifier: TableSampleSeedModifier,
12933    ) -> Result<TableSampleSeed, ParserError> {
12934        self.expect_token(&Token::LParen)?;
12935        let value = self.parse_number_value()?.value;
12936        self.expect_token(&Token::RParen)?;
12937        Ok(TableSampleSeed { modifier, value })
12938    }
12939
12940    /// Parses `OPENJSON( jsonExpression [ , path ] )  [ <with_clause> ]` clause,
12941    /// assuming the `OPENJSON` keyword was already consumed.
12942    fn parse_open_json_table_factor(&mut self) -> Result<TableFactor, ParserError> {
12943        self.expect_token(&Token::LParen)?;
12944        let json_expr = self.parse_expr()?;
12945        let json_path = if self.consume_token(&Token::Comma) {
12946            Some(self.parse_value()?.value)
12947        } else {
12948            None
12949        };
12950        self.expect_token(&Token::RParen)?;
12951        let columns = if self.parse_keyword(Keyword::WITH) {
12952            self.expect_token(&Token::LParen)?;
12953            let columns = self.parse_comma_separated(Parser::parse_openjson_table_column_def)?;
12954            self.expect_token(&Token::RParen)?;
12955            columns
12956        } else {
12957            Vec::new()
12958        };
12959        let alias = self.maybe_parse_table_alias()?;
12960        Ok(TableFactor::OpenJsonTable {
12961            json_expr,
12962            json_path,
12963            columns,
12964            alias,
12965        })
12966    }
12967
12968    fn parse_xml_table_factor(&mut self) -> Result<TableFactor, ParserError> {
12969        self.expect_token(&Token::LParen)?;
12970        let namespaces = if self.parse_keyword(Keyword::XMLNAMESPACES) {
12971            self.expect_token(&Token::LParen)?;
12972            let namespaces = self.parse_comma_separated(Parser::parse_xml_namespace_definition)?;
12973            self.expect_token(&Token::RParen)?;
12974            self.expect_token(&Token::Comma)?;
12975            namespaces
12976        } else {
12977            vec![]
12978        };
12979        let row_expression = self.parse_expr()?;
12980        let passing = self.parse_xml_passing_clause()?;
12981        self.expect_keyword_is(Keyword::COLUMNS)?;
12982        let columns = self.parse_comma_separated(Parser::parse_xml_table_column)?;
12983        self.expect_token(&Token::RParen)?;
12984        let alias = self.maybe_parse_table_alias()?;
12985        Ok(TableFactor::XmlTable {
12986            namespaces,
12987            row_expression,
12988            passing,
12989            columns,
12990            alias,
12991        })
12992    }
12993
12994    fn parse_xml_namespace_definition(&mut self) -> Result<XmlNamespaceDefinition, ParserError> {
12995        let uri = self.parse_expr()?;
12996        self.expect_keyword_is(Keyword::AS)?;
12997        let name = self.parse_identifier()?;
12998        Ok(XmlNamespaceDefinition { uri, name })
12999    }
13000
13001    fn parse_xml_table_column(&mut self) -> Result<XmlTableColumn, ParserError> {
13002        let name = self.parse_identifier()?;
13003
13004        let option = if self.parse_keyword(Keyword::FOR) {
13005            self.expect_keyword(Keyword::ORDINALITY)?;
13006            XmlTableColumnOption::ForOrdinality
13007        } else {
13008            let r#type = self.parse_data_type()?;
13009            let mut path = None;
13010            let mut default = None;
13011
13012            if self.parse_keyword(Keyword::PATH) {
13013                path = Some(self.parse_expr()?);
13014            }
13015
13016            if self.parse_keyword(Keyword::DEFAULT) {
13017                default = Some(self.parse_expr()?);
13018            }
13019
13020            let not_null = self.parse_keywords(&[Keyword::NOT, Keyword::NULL]);
13021            if !not_null {
13022                // NULL is the default but can be specified explicitly
13023                let _ = self.parse_keyword(Keyword::NULL);
13024            }
13025
13026            XmlTableColumnOption::NamedInfo {
13027                r#type,
13028                path,
13029                default,
13030                nullable: !not_null,
13031            }
13032        };
13033        Ok(XmlTableColumn { name, option })
13034    }
13035
13036    fn parse_xml_passing_clause(&mut self) -> Result<XmlPassingClause, ParserError> {
13037        let mut arguments = vec![];
13038        if self.parse_keyword(Keyword::PASSING) {
13039            loop {
13040                let by_value =
13041                    self.parse_keyword(Keyword::BY) && self.expect_keyword(Keyword::VALUE).is_ok();
13042                let expr = self.parse_expr()?;
13043                let alias = if self.parse_keyword(Keyword::AS) {
13044                    Some(self.parse_identifier()?)
13045                } else {
13046                    None
13047                };
13048                arguments.push(XmlPassingArgument {
13049                    expr,
13050                    alias,
13051                    by_value,
13052                });
13053                if !self.consume_token(&Token::Comma) {
13054                    break;
13055                }
13056            }
13057        }
13058        Ok(XmlPassingClause { arguments })
13059    }
13060
13061    fn parse_match_recognize(&mut self, table: TableFactor) -> Result<TableFactor, ParserError> {
13062        self.expect_token(&Token::LParen)?;
13063
13064        let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
13065            self.parse_comma_separated(Parser::parse_expr)?
13066        } else {
13067            vec![]
13068        };
13069
13070        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
13071            self.parse_comma_separated(Parser::parse_order_by_expr)?
13072        } else {
13073            vec![]
13074        };
13075
13076        let measures = if self.parse_keyword(Keyword::MEASURES) {
13077            self.parse_comma_separated(|p| {
13078                let expr = p.parse_expr()?;
13079                let _ = p.parse_keyword(Keyword::AS);
13080                let alias = p.parse_identifier()?;
13081                Ok(Measure { expr, alias })
13082            })?
13083        } else {
13084            vec![]
13085        };
13086
13087        let rows_per_match =
13088            if self.parse_keywords(&[Keyword::ONE, Keyword::ROW, Keyword::PER, Keyword::MATCH]) {
13089                Some(RowsPerMatch::OneRow)
13090            } else if self.parse_keywords(&[
13091                Keyword::ALL,
13092                Keyword::ROWS,
13093                Keyword::PER,
13094                Keyword::MATCH,
13095            ]) {
13096                Some(RowsPerMatch::AllRows(
13097                    if self.parse_keywords(&[Keyword::SHOW, Keyword::EMPTY, Keyword::MATCHES]) {
13098                        Some(EmptyMatchesMode::Show)
13099                    } else if self.parse_keywords(&[
13100                        Keyword::OMIT,
13101                        Keyword::EMPTY,
13102                        Keyword::MATCHES,
13103                    ]) {
13104                        Some(EmptyMatchesMode::Omit)
13105                    } else if self.parse_keywords(&[
13106                        Keyword::WITH,
13107                        Keyword::UNMATCHED,
13108                        Keyword::ROWS,
13109                    ]) {
13110                        Some(EmptyMatchesMode::WithUnmatched)
13111                    } else {
13112                        None
13113                    },
13114                ))
13115            } else {
13116                None
13117            };
13118
13119        let after_match_skip =
13120            if self.parse_keywords(&[Keyword::AFTER, Keyword::MATCH, Keyword::SKIP]) {
13121                if self.parse_keywords(&[Keyword::PAST, Keyword::LAST, Keyword::ROW]) {
13122                    Some(AfterMatchSkip::PastLastRow)
13123                } else if self.parse_keywords(&[Keyword::TO, Keyword::NEXT, Keyword::ROW]) {
13124                    Some(AfterMatchSkip::ToNextRow)
13125                } else if self.parse_keywords(&[Keyword::TO, Keyword::FIRST]) {
13126                    Some(AfterMatchSkip::ToFirst(self.parse_identifier()?))
13127                } else if self.parse_keywords(&[Keyword::TO, Keyword::LAST]) {
13128                    Some(AfterMatchSkip::ToLast(self.parse_identifier()?))
13129                } else {
13130                    let found = self.next_token();
13131                    return self.expected("after match skip option", found);
13132                }
13133            } else {
13134                None
13135            };
13136
13137        self.expect_keyword_is(Keyword::PATTERN)?;
13138        let pattern = self.parse_parenthesized(Self::parse_pattern)?;
13139
13140        self.expect_keyword_is(Keyword::DEFINE)?;
13141
13142        let symbols = self.parse_comma_separated(|p| {
13143            let symbol = p.parse_identifier()?;
13144            p.expect_keyword_is(Keyword::AS)?;
13145            let definition = p.parse_expr()?;
13146            Ok(SymbolDefinition { symbol, definition })
13147        })?;
13148
13149        self.expect_token(&Token::RParen)?;
13150
13151        let alias = self.maybe_parse_table_alias()?;
13152
13153        Ok(TableFactor::MatchRecognize {
13154            table: Box::new(table),
13155            partition_by,
13156            order_by,
13157            measures,
13158            rows_per_match,
13159            after_match_skip,
13160            pattern,
13161            symbols,
13162            alias,
13163        })
13164    }
13165
13166    fn parse_base_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
13167        match self.next_token().token {
13168            Token::Caret => Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::Start)),
13169            Token::Placeholder(s) if s == "$" => {
13170                Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::End))
13171            }
13172            Token::LBrace => {
13173                self.expect_token(&Token::Minus)?;
13174                let symbol = self.parse_identifier().map(MatchRecognizeSymbol::Named)?;
13175                self.expect_token(&Token::Minus)?;
13176                self.expect_token(&Token::RBrace)?;
13177                Ok(MatchRecognizePattern::Exclude(symbol))
13178            }
13179            Token::Word(Word {
13180                value,
13181                quote_style: None,
13182                ..
13183            }) if value == "PERMUTE" => {
13184                self.expect_token(&Token::LParen)?;
13185                let symbols = self.parse_comma_separated(|p| {
13186                    p.parse_identifier().map(MatchRecognizeSymbol::Named)
13187                })?;
13188                self.expect_token(&Token::RParen)?;
13189                Ok(MatchRecognizePattern::Permute(symbols))
13190            }
13191            Token::LParen => {
13192                let pattern = self.parse_pattern()?;
13193                self.expect_token(&Token::RParen)?;
13194                Ok(MatchRecognizePattern::Group(Box::new(pattern)))
13195            }
13196            _ => {
13197                self.prev_token();
13198                self.parse_identifier()
13199                    .map(MatchRecognizeSymbol::Named)
13200                    .map(MatchRecognizePattern::Symbol)
13201            }
13202        }
13203    }
13204
13205    fn parse_repetition_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
13206        let mut pattern = self.parse_base_pattern()?;
13207        loop {
13208            let token = self.next_token();
13209            let quantifier = match token.token {
13210                Token::Mul => RepetitionQuantifier::ZeroOrMore,
13211                Token::Plus => RepetitionQuantifier::OneOrMore,
13212                Token::Placeholder(s) if s == "?" => RepetitionQuantifier::AtMostOne,
13213                Token::LBrace => {
13214                    // quantifier is a range like {n} or {n,} or {,m} or {n,m}
13215                    let token = self.next_token();
13216                    match token.token {
13217                        Token::Comma => {
13218                            let next_token = self.next_token();
13219                            let Token::Number(n, _) = next_token.token else {
13220                                return self.expected("literal number", next_token);
13221                            };
13222                            self.expect_token(&Token::RBrace)?;
13223                            RepetitionQuantifier::AtMost(Self::parse(n, token.span.start)?)
13224                        }
13225                        Token::Number(n, _) if self.consume_token(&Token::Comma) => {
13226                            let next_token = self.next_token();
13227                            match next_token.token {
13228                                Token::Number(m, _) => {
13229                                    self.expect_token(&Token::RBrace)?;
13230                                    RepetitionQuantifier::Range(
13231                                        Self::parse(n, token.span.start)?,
13232                                        Self::parse(m, token.span.start)?,
13233                                    )
13234                                }
13235                                Token::RBrace => {
13236                                    RepetitionQuantifier::AtLeast(Self::parse(n, token.span.start)?)
13237                                }
13238                                _ => {
13239                                    return self.expected("} or upper bound", next_token);
13240                                }
13241                            }
13242                        }
13243                        Token::Number(n, _) => {
13244                            self.expect_token(&Token::RBrace)?;
13245                            RepetitionQuantifier::Exactly(Self::parse(n, token.span.start)?)
13246                        }
13247                        _ => return self.expected("quantifier range", token),
13248                    }
13249                }
13250                _ => {
13251                    self.prev_token();
13252                    break;
13253                }
13254            };
13255            pattern = MatchRecognizePattern::Repetition(Box::new(pattern), quantifier);
13256        }
13257        Ok(pattern)
13258    }
13259
13260    fn parse_concat_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
13261        let mut patterns = vec![self.parse_repetition_pattern()?];
13262        while !matches!(self.peek_token().token, Token::RParen | Token::Pipe) {
13263            patterns.push(self.parse_repetition_pattern()?);
13264        }
13265        match <[MatchRecognizePattern; 1]>::try_from(patterns) {
13266            Ok([pattern]) => Ok(pattern),
13267            Err(patterns) => Ok(MatchRecognizePattern::Concat(patterns)),
13268        }
13269    }
13270
13271    fn parse_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
13272        let pattern = self.parse_concat_pattern()?;
13273        if self.consume_token(&Token::Pipe) {
13274            match self.parse_pattern()? {
13275                // flatten nested alternations
13276                MatchRecognizePattern::Alternation(mut patterns) => {
13277                    patterns.insert(0, pattern);
13278                    Ok(MatchRecognizePattern::Alternation(patterns))
13279                }
13280                next => Ok(MatchRecognizePattern::Alternation(vec![pattern, next])),
13281            }
13282        } else {
13283            Ok(pattern)
13284        }
13285    }
13286
13287    /// Parses a the timestamp version specifier (i.e. query historical data)
13288    pub fn maybe_parse_table_version(&mut self) -> Result<Option<TableVersion>, ParserError> {
13289        if self.dialect.supports_timestamp_versioning() {
13290            if self.parse_keywords(&[Keyword::FOR, Keyword::SYSTEM_TIME, Keyword::AS, Keyword::OF])
13291            {
13292                let expr = self.parse_expr()?;
13293                return Ok(Some(TableVersion::ForSystemTimeAsOf(expr)));
13294            } else if self.peek_keyword(Keyword::AT) || self.peek_keyword(Keyword::BEFORE) {
13295                let func_name = self.parse_object_name(true)?;
13296                let func = self.parse_function(func_name)?;
13297                return Ok(Some(TableVersion::Function(func)));
13298            }
13299        }
13300        Ok(None)
13301    }
13302
13303    /// Parses MySQL's JSON_TABLE column definition.
13304    /// For example: `id INT EXISTS PATH '$' DEFAULT '0' ON EMPTY ERROR ON ERROR`
13305    pub fn parse_json_table_column_def(&mut self) -> Result<JsonTableColumn, ParserError> {
13306        if self.parse_keyword(Keyword::NESTED) {
13307            let _has_path_keyword = self.parse_keyword(Keyword::PATH);
13308            let path = self.parse_value()?.value;
13309            self.expect_keyword_is(Keyword::COLUMNS)?;
13310            let columns = self.parse_parenthesized(|p| {
13311                p.parse_comma_separated(Self::parse_json_table_column_def)
13312            })?;
13313            return Ok(JsonTableColumn::Nested(JsonTableNestedColumn {
13314                path,
13315                columns,
13316            }));
13317        }
13318        let name = self.parse_identifier()?;
13319        if self.parse_keyword(Keyword::FOR) {
13320            self.expect_keyword_is(Keyword::ORDINALITY)?;
13321            return Ok(JsonTableColumn::ForOrdinality(name));
13322        }
13323        let r#type = self.parse_data_type()?;
13324        let exists = self.parse_keyword(Keyword::EXISTS);
13325        self.expect_keyword_is(Keyword::PATH)?;
13326        let path = self.parse_value()?.value;
13327        let mut on_empty = None;
13328        let mut on_error = None;
13329        while let Some(error_handling) = self.parse_json_table_column_error_handling()? {
13330            if self.parse_keyword(Keyword::EMPTY) {
13331                on_empty = Some(error_handling);
13332            } else {
13333                self.expect_keyword_is(Keyword::ERROR)?;
13334                on_error = Some(error_handling);
13335            }
13336        }
13337        Ok(JsonTableColumn::Named(JsonTableNamedColumn {
13338            name,
13339            r#type,
13340            path,
13341            exists,
13342            on_empty,
13343            on_error,
13344        }))
13345    }
13346
13347    /// Parses MSSQL's `OPENJSON WITH` column definition.
13348    ///
13349    /// ```sql
13350    /// colName type [ column_path ] [ AS JSON ]
13351    /// ```
13352    ///
13353    /// Reference: <https://learn.microsoft.com/en-us/sql/t-sql/functions/openjson-transact-sql?view=sql-server-ver16#syntax>
13354    pub fn parse_openjson_table_column_def(&mut self) -> Result<OpenJsonTableColumn, ParserError> {
13355        let name = self.parse_identifier()?;
13356        let r#type = self.parse_data_type()?;
13357        let path = if let Token::SingleQuotedString(path) = self.peek_token().token {
13358            self.next_token();
13359            Some(path)
13360        } else {
13361            None
13362        };
13363        let as_json = self.parse_keyword(Keyword::AS);
13364        if as_json {
13365            self.expect_keyword_is(Keyword::JSON)?;
13366        }
13367        Ok(OpenJsonTableColumn {
13368            name,
13369            r#type,
13370            path,
13371            as_json,
13372        })
13373    }
13374
13375    fn parse_json_table_column_error_handling(
13376        &mut self,
13377    ) -> Result<Option<JsonTableColumnErrorHandling>, ParserError> {
13378        let res = if self.parse_keyword(Keyword::NULL) {
13379            JsonTableColumnErrorHandling::Null
13380        } else if self.parse_keyword(Keyword::ERROR) {
13381            JsonTableColumnErrorHandling::Error
13382        } else if self.parse_keyword(Keyword::DEFAULT) {
13383            JsonTableColumnErrorHandling::Default(self.parse_value()?.value)
13384        } else {
13385            return Ok(None);
13386        };
13387        self.expect_keyword_is(Keyword::ON)?;
13388        Ok(Some(res))
13389    }
13390
13391    pub fn parse_derived_table_factor(
13392        &mut self,
13393        lateral: IsLateral,
13394    ) -> Result<TableFactor, ParserError> {
13395        let subquery = self.parse_query()?;
13396        self.expect_token(&Token::RParen)?;
13397        let alias = self.maybe_parse_table_alias()?;
13398        Ok(TableFactor::Derived {
13399            lateral: match lateral {
13400                Lateral => true,
13401                NotLateral => false,
13402            },
13403            subquery,
13404            alias,
13405        })
13406    }
13407
13408    fn parse_aliased_function_call(&mut self) -> Result<ExprWithAlias, ParserError> {
13409        let function_name = match self.next_token().token {
13410            Token::Word(w) => Ok(w.value),
13411            _ => self.expected("a function identifier", self.peek_token()),
13412        }?;
13413        let expr = self.parse_function(ObjectName::from(vec![Ident::new(function_name)]))?;
13414        let alias = if self.parse_keyword(Keyword::AS) {
13415            Some(self.parse_identifier()?)
13416        } else {
13417            None
13418        };
13419
13420        Ok(ExprWithAlias { expr, alias })
13421    }
13422    /// Parses an expression with an optional alias
13423    ///
13424    /// Examples:
13425    ///
13426    /// ```sql
13427    /// SUM(price) AS total_price
13428    /// ```
13429    /// ```sql
13430    /// SUM(price)
13431    /// ```
13432    ///
13433    /// Example
13434    /// ```
13435    /// # use sqlparser::parser::{Parser, ParserError};
13436    /// # use sqlparser::dialect::GenericDialect;
13437    /// # fn main() ->Result<(), ParserError> {
13438    /// let sql = r#"SUM("a") as "b""#;
13439    /// let mut parser = Parser::new(&GenericDialect).try_with_sql(sql)?;
13440    /// let expr_with_alias = parser.parse_expr_with_alias()?;
13441    /// assert_eq!(Some("b".to_string()), expr_with_alias.alias.map(|x|x.value));
13442    /// # Ok(())
13443    /// # }
13444    pub fn parse_expr_with_alias(&mut self) -> Result<ExprWithAlias, ParserError> {
13445        let expr = self.parse_expr()?;
13446        let alias = if self.parse_keyword(Keyword::AS) {
13447            Some(self.parse_identifier()?)
13448        } else {
13449            None
13450        };
13451
13452        Ok(ExprWithAlias { expr, alias })
13453    }
13454
13455    pub fn parse_pivot_table_factor(
13456        &mut self,
13457        table: TableFactor,
13458    ) -> Result<TableFactor, ParserError> {
13459        self.expect_token(&Token::LParen)?;
13460        let aggregate_functions = self.parse_comma_separated(Self::parse_aliased_function_call)?;
13461        self.expect_keyword_is(Keyword::FOR)?;
13462        let value_column = self.parse_period_separated(|p| p.parse_identifier())?;
13463        self.expect_keyword_is(Keyword::IN)?;
13464
13465        self.expect_token(&Token::LParen)?;
13466        let value_source = if self.parse_keyword(Keyword::ANY) {
13467            let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
13468                self.parse_comma_separated(Parser::parse_order_by_expr)?
13469            } else {
13470                vec![]
13471            };
13472            PivotValueSource::Any(order_by)
13473        } else if self.peek_sub_query() {
13474            PivotValueSource::Subquery(self.parse_query()?)
13475        } else {
13476            PivotValueSource::List(self.parse_comma_separated(Self::parse_expr_with_alias)?)
13477        };
13478        self.expect_token(&Token::RParen)?;
13479
13480        let default_on_null =
13481            if self.parse_keywords(&[Keyword::DEFAULT, Keyword::ON, Keyword::NULL]) {
13482                self.expect_token(&Token::LParen)?;
13483                let expr = self.parse_expr()?;
13484                self.expect_token(&Token::RParen)?;
13485                Some(expr)
13486            } else {
13487                None
13488            };
13489
13490        self.expect_token(&Token::RParen)?;
13491        let alias = self.maybe_parse_table_alias()?;
13492        Ok(TableFactor::Pivot {
13493            table: Box::new(table),
13494            aggregate_functions,
13495            value_column,
13496            value_source,
13497            default_on_null,
13498            alias,
13499        })
13500    }
13501
13502    pub fn parse_unpivot_table_factor(
13503        &mut self,
13504        table: TableFactor,
13505    ) -> Result<TableFactor, ParserError> {
13506        let null_inclusion = if self.parse_keyword(Keyword::INCLUDE) {
13507            self.expect_keyword_is(Keyword::NULLS)?;
13508            Some(NullInclusion::IncludeNulls)
13509        } else if self.parse_keyword(Keyword::EXCLUDE) {
13510            self.expect_keyword_is(Keyword::NULLS)?;
13511            Some(NullInclusion::ExcludeNulls)
13512        } else {
13513            None
13514        };
13515        self.expect_token(&Token::LParen)?;
13516        let value = self.parse_identifier()?;
13517        self.expect_keyword_is(Keyword::FOR)?;
13518        let name = self.parse_identifier()?;
13519        self.expect_keyword_is(Keyword::IN)?;
13520        let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
13521        self.expect_token(&Token::RParen)?;
13522        let alias = self.maybe_parse_table_alias()?;
13523        Ok(TableFactor::Unpivot {
13524            table: Box::new(table),
13525            value,
13526            null_inclusion,
13527            name,
13528            columns,
13529            alias,
13530        })
13531    }
13532
13533    pub fn parse_join_constraint(&mut self, natural: bool) -> Result<JoinConstraint, ParserError> {
13534        if natural {
13535            Ok(JoinConstraint::Natural)
13536        } else if self.parse_keyword(Keyword::ON) {
13537            let constraint = self.parse_expr()?;
13538            Ok(JoinConstraint::On(constraint))
13539        } else if self.parse_keyword(Keyword::USING) {
13540            let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
13541            Ok(JoinConstraint::Using(columns))
13542        } else {
13543            Ok(JoinConstraint::None)
13544            //self.expected("ON, or USING after JOIN", self.peek_token())
13545        }
13546    }
13547
13548    /// Parse a GRANT statement.
13549    pub fn parse_grant(&mut self) -> Result<Statement, ParserError> {
13550        let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
13551
13552        self.expect_keyword_is(Keyword::TO)?;
13553        let grantees = self.parse_grantees()?;
13554
13555        let with_grant_option =
13556            self.parse_keywords(&[Keyword::WITH, Keyword::GRANT, Keyword::OPTION]);
13557
13558        let as_grantor = if self.parse_keywords(&[Keyword::AS]) {
13559            Some(self.parse_identifier()?)
13560        } else {
13561            None
13562        };
13563
13564        let granted_by = if self.parse_keywords(&[Keyword::GRANTED, Keyword::BY]) {
13565            Some(self.parse_identifier()?)
13566        } else {
13567            None
13568        };
13569
13570        Ok(Statement::Grant {
13571            privileges,
13572            objects,
13573            grantees,
13574            with_grant_option,
13575            as_grantor,
13576            granted_by,
13577        })
13578    }
13579
13580    fn parse_grantees(&mut self) -> Result<Vec<Grantee>, ParserError> {
13581        let mut values = vec![];
13582        let mut grantee_type = GranteesType::None;
13583        loop {
13584            let new_grantee_type = if self.parse_keyword(Keyword::ROLE) {
13585                GranteesType::Role
13586            } else if self.parse_keyword(Keyword::USER) {
13587                GranteesType::User
13588            } else if self.parse_keyword(Keyword::SHARE) {
13589                GranteesType::Share
13590            } else if self.parse_keyword(Keyword::GROUP) {
13591                GranteesType::Group
13592            } else if self.parse_keyword(Keyword::PUBLIC) {
13593                GranteesType::Public
13594            } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
13595                GranteesType::DatabaseRole
13596            } else if self.parse_keywords(&[Keyword::APPLICATION, Keyword::ROLE]) {
13597                GranteesType::ApplicationRole
13598            } else if self.parse_keyword(Keyword::APPLICATION) {
13599                GranteesType::Application
13600            } else {
13601                grantee_type.clone() // keep from previous iteraton, if not specified
13602            };
13603
13604            if self
13605                .dialect
13606                .get_reserved_grantees_types()
13607                .contains(&new_grantee_type)
13608            {
13609                self.prev_token();
13610            } else {
13611                grantee_type = new_grantee_type;
13612            }
13613
13614            let grantee = if grantee_type == GranteesType::Public {
13615                Grantee {
13616                    grantee_type: grantee_type.clone(),
13617                    name: None,
13618                }
13619            } else {
13620                let mut name = self.parse_grantee_name()?;
13621                if self.consume_token(&Token::Colon) {
13622                    // Redshift supports namespace prefix for external users and groups:
13623                    // <Namespace>:<GroupName> or <Namespace>:<UserName>
13624                    // https://docs.aws.amazon.com/redshift/latest/mgmt/redshift-iam-access-control-native-idp.html
13625                    let ident = self.parse_identifier()?;
13626                    if let GranteeName::ObjectName(namespace) = name {
13627                        name = GranteeName::ObjectName(ObjectName::from(vec![Ident::new(
13628                            format!("{}:{}", namespace, ident),
13629                        )]));
13630                    };
13631                }
13632                Grantee {
13633                    grantee_type: grantee_type.clone(),
13634                    name: Some(name),
13635                }
13636            };
13637
13638            values.push(grantee);
13639
13640            if !self.consume_token(&Token::Comma) {
13641                break;
13642            }
13643        }
13644
13645        Ok(values)
13646    }
13647
13648    pub fn parse_grant_deny_revoke_privileges_objects(
13649        &mut self,
13650    ) -> Result<(Privileges, Option<GrantObjects>), ParserError> {
13651        let privileges = if self.parse_keyword(Keyword::ALL) {
13652            Privileges::All {
13653                with_privileges_keyword: self.parse_keyword(Keyword::PRIVILEGES),
13654            }
13655        } else {
13656            let actions = self.parse_actions_list()?;
13657            Privileges::Actions(actions)
13658        };
13659
13660        let objects = if self.parse_keyword(Keyword::ON) {
13661            if self.parse_keywords(&[Keyword::ALL, Keyword::TABLES, Keyword::IN, Keyword::SCHEMA]) {
13662                Some(GrantObjects::AllTablesInSchema {
13663                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
13664                })
13665            } else if self.parse_keywords(&[
13666                Keyword::ALL,
13667                Keyword::SEQUENCES,
13668                Keyword::IN,
13669                Keyword::SCHEMA,
13670            ]) {
13671                Some(GrantObjects::AllSequencesInSchema {
13672                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
13673                })
13674            } else if self.parse_keywords(&[Keyword::RESOURCE, Keyword::MONITOR]) {
13675                Some(GrantObjects::ResourceMonitors(self.parse_comma_separated(
13676                    |p| p.parse_object_name_with_wildcards(false, true),
13677                )?))
13678            } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
13679                Some(GrantObjects::ComputePools(self.parse_comma_separated(
13680                    |p| p.parse_object_name_with_wildcards(false, true),
13681                )?))
13682            } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
13683                Some(GrantObjects::FailoverGroup(self.parse_comma_separated(
13684                    |p| p.parse_object_name_with_wildcards(false, true),
13685                )?))
13686            } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
13687                Some(GrantObjects::ReplicationGroup(self.parse_comma_separated(
13688                    |p| p.parse_object_name_with_wildcards(false, true),
13689                )?))
13690            } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
13691                Some(GrantObjects::ExternalVolumes(self.parse_comma_separated(
13692                    |p| p.parse_object_name_with_wildcards(false, true),
13693                )?))
13694            } else {
13695                let object_type = self.parse_one_of_keywords(&[
13696                    Keyword::SEQUENCE,
13697                    Keyword::DATABASE,
13698                    Keyword::SCHEMA,
13699                    Keyword::TABLE,
13700                    Keyword::VIEW,
13701                    Keyword::WAREHOUSE,
13702                    Keyword::INTEGRATION,
13703                    Keyword::VIEW,
13704                    Keyword::WAREHOUSE,
13705                    Keyword::INTEGRATION,
13706                    Keyword::USER,
13707                    Keyword::CONNECTION,
13708                ]);
13709                let objects =
13710                    self.parse_comma_separated(|p| p.parse_object_name_with_wildcards(false, true));
13711                match object_type {
13712                    Some(Keyword::DATABASE) => Some(GrantObjects::Databases(objects?)),
13713                    Some(Keyword::SCHEMA) => Some(GrantObjects::Schemas(objects?)),
13714                    Some(Keyword::SEQUENCE) => Some(GrantObjects::Sequences(objects?)),
13715                    Some(Keyword::WAREHOUSE) => Some(GrantObjects::Warehouses(objects?)),
13716                    Some(Keyword::INTEGRATION) => Some(GrantObjects::Integrations(objects?)),
13717                    Some(Keyword::VIEW) => Some(GrantObjects::Views(objects?)),
13718                    Some(Keyword::USER) => Some(GrantObjects::Users(objects?)),
13719                    Some(Keyword::CONNECTION) => Some(GrantObjects::Connections(objects?)),
13720                    Some(Keyword::TABLE) | None => Some(GrantObjects::Tables(objects?)),
13721                    _ => unreachable!(),
13722                }
13723            }
13724        } else {
13725            None
13726        };
13727
13728        Ok((privileges, objects))
13729    }
13730
13731    pub fn parse_grant_permission(&mut self) -> Result<Action, ParserError> {
13732        fn parse_columns(parser: &mut Parser) -> Result<Option<Vec<Ident>>, ParserError> {
13733            let columns = parser.parse_parenthesized_column_list(Optional, false)?;
13734            if columns.is_empty() {
13735                Ok(None)
13736            } else {
13737                Ok(Some(columns))
13738            }
13739        }
13740
13741        // Multi-word privileges
13742        if self.parse_keywords(&[Keyword::IMPORTED, Keyword::PRIVILEGES]) {
13743            Ok(Action::ImportedPrivileges)
13744        } else if self.parse_keywords(&[Keyword::ADD, Keyword::SEARCH, Keyword::OPTIMIZATION]) {
13745            Ok(Action::AddSearchOptimization)
13746        } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::LISTING]) {
13747            Ok(Action::AttachListing)
13748        } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::POLICY]) {
13749            Ok(Action::AttachPolicy)
13750        } else if self.parse_keywords(&[Keyword::BIND, Keyword::SERVICE, Keyword::ENDPOINT]) {
13751            Ok(Action::BindServiceEndpoint)
13752        } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
13753            let role = self.parse_object_name(false)?;
13754            Ok(Action::DatabaseRole { role })
13755        } else if self.parse_keywords(&[Keyword::EVOLVE, Keyword::SCHEMA]) {
13756            Ok(Action::EvolveSchema)
13757        } else if self.parse_keywords(&[Keyword::IMPORT, Keyword::SHARE]) {
13758            Ok(Action::ImportShare)
13759        } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::VERSIONS]) {
13760            Ok(Action::ManageVersions)
13761        } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::RELEASES]) {
13762            Ok(Action::ManageReleases)
13763        } else if self.parse_keywords(&[Keyword::OVERRIDE, Keyword::SHARE, Keyword::RESTRICTIONS]) {
13764            Ok(Action::OverrideShareRestrictions)
13765        } else if self.parse_keywords(&[
13766            Keyword::PURCHASE,
13767            Keyword::DATA,
13768            Keyword::EXCHANGE,
13769            Keyword::LISTING,
13770        ]) {
13771            Ok(Action::PurchaseDataExchangeListing)
13772        } else if self.parse_keywords(&[Keyword::RESOLVE, Keyword::ALL]) {
13773            Ok(Action::ResolveAll)
13774        } else if self.parse_keywords(&[Keyword::READ, Keyword::SESSION]) {
13775            Ok(Action::ReadSession)
13776
13777        // Single-word privileges
13778        } else if self.parse_keyword(Keyword::APPLY) {
13779            let apply_type = self.parse_action_apply_type()?;
13780            Ok(Action::Apply { apply_type })
13781        } else if self.parse_keyword(Keyword::APPLYBUDGET) {
13782            Ok(Action::ApplyBudget)
13783        } else if self.parse_keyword(Keyword::AUDIT) {
13784            Ok(Action::Audit)
13785        } else if self.parse_keyword(Keyword::CONNECT) {
13786            Ok(Action::Connect)
13787        } else if self.parse_keyword(Keyword::CREATE) {
13788            let obj_type = self.maybe_parse_action_create_object_type();
13789            Ok(Action::Create { obj_type })
13790        } else if self.parse_keyword(Keyword::DELETE) {
13791            Ok(Action::Delete)
13792        } else if self.parse_keyword(Keyword::EXEC) {
13793            let obj_type = self.maybe_parse_action_execute_obj_type();
13794            Ok(Action::Exec { obj_type })
13795        } else if self.parse_keyword(Keyword::EXECUTE) {
13796            let obj_type = self.maybe_parse_action_execute_obj_type();
13797            Ok(Action::Execute { obj_type })
13798        } else if self.parse_keyword(Keyword::FAILOVER) {
13799            Ok(Action::Failover)
13800        } else if self.parse_keyword(Keyword::INSERT) {
13801            Ok(Action::Insert {
13802                columns: parse_columns(self)?,
13803            })
13804        } else if self.parse_keyword(Keyword::MANAGE) {
13805            let manage_type = self.parse_action_manage_type()?;
13806            Ok(Action::Manage { manage_type })
13807        } else if self.parse_keyword(Keyword::MODIFY) {
13808            let modify_type = self.parse_action_modify_type();
13809            Ok(Action::Modify { modify_type })
13810        } else if self.parse_keyword(Keyword::MONITOR) {
13811            let monitor_type = self.parse_action_monitor_type();
13812            Ok(Action::Monitor { monitor_type })
13813        } else if self.parse_keyword(Keyword::OPERATE) {
13814            Ok(Action::Operate)
13815        } else if self.parse_keyword(Keyword::REFERENCES) {
13816            Ok(Action::References {
13817                columns: parse_columns(self)?,
13818            })
13819        } else if self.parse_keyword(Keyword::READ) {
13820            Ok(Action::Read)
13821        } else if self.parse_keyword(Keyword::REPLICATE) {
13822            Ok(Action::Replicate)
13823        } else if self.parse_keyword(Keyword::ROLE) {
13824            let role = self.parse_identifier()?;
13825            Ok(Action::Role { role })
13826        } else if self.parse_keyword(Keyword::SELECT) {
13827            Ok(Action::Select {
13828                columns: parse_columns(self)?,
13829            })
13830        } else if self.parse_keyword(Keyword::TEMPORARY) {
13831            Ok(Action::Temporary)
13832        } else if self.parse_keyword(Keyword::TRIGGER) {
13833            Ok(Action::Trigger)
13834        } else if self.parse_keyword(Keyword::TRUNCATE) {
13835            Ok(Action::Truncate)
13836        } else if self.parse_keyword(Keyword::UPDATE) {
13837            Ok(Action::Update {
13838                columns: parse_columns(self)?,
13839            })
13840        } else if self.parse_keyword(Keyword::USAGE) {
13841            Ok(Action::Usage)
13842        } else if self.parse_keyword(Keyword::OWNERSHIP) {
13843            Ok(Action::Ownership)
13844        } else {
13845            self.expected("a privilege keyword", self.peek_token())?
13846        }
13847    }
13848
13849    fn maybe_parse_action_create_object_type(&mut self) -> Option<ActionCreateObjectType> {
13850        // Multi-word object types
13851        if self.parse_keywords(&[Keyword::APPLICATION, Keyword::PACKAGE]) {
13852            Some(ActionCreateObjectType::ApplicationPackage)
13853        } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
13854            Some(ActionCreateObjectType::ComputePool)
13855        } else if self.parse_keywords(&[Keyword::DATA, Keyword::EXCHANGE, Keyword::LISTING]) {
13856            Some(ActionCreateObjectType::DataExchangeListing)
13857        } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
13858            Some(ActionCreateObjectType::ExternalVolume)
13859        } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
13860            Some(ActionCreateObjectType::FailoverGroup)
13861        } else if self.parse_keywords(&[Keyword::NETWORK, Keyword::POLICY]) {
13862            Some(ActionCreateObjectType::NetworkPolicy)
13863        } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::LISTING]) {
13864            Some(ActionCreateObjectType::OrganiationListing)
13865        } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
13866            Some(ActionCreateObjectType::ReplicationGroup)
13867        }
13868        // Single-word object types
13869        else if self.parse_keyword(Keyword::ACCOUNT) {
13870            Some(ActionCreateObjectType::Account)
13871        } else if self.parse_keyword(Keyword::APPLICATION) {
13872            Some(ActionCreateObjectType::Application)
13873        } else if self.parse_keyword(Keyword::DATABASE) {
13874            Some(ActionCreateObjectType::Database)
13875        } else if self.parse_keyword(Keyword::INTEGRATION) {
13876            Some(ActionCreateObjectType::Integration)
13877        } else if self.parse_keyword(Keyword::ROLE) {
13878            Some(ActionCreateObjectType::Role)
13879        } else if self.parse_keyword(Keyword::SHARE) {
13880            Some(ActionCreateObjectType::Share)
13881        } else if self.parse_keyword(Keyword::USER) {
13882            Some(ActionCreateObjectType::User)
13883        } else if self.parse_keyword(Keyword::WAREHOUSE) {
13884            Some(ActionCreateObjectType::Warehouse)
13885        } else {
13886            None
13887        }
13888    }
13889
13890    fn parse_action_apply_type(&mut self) -> Result<ActionApplyType, ParserError> {
13891        if self.parse_keywords(&[Keyword::AGGREGATION, Keyword::POLICY]) {
13892            Ok(ActionApplyType::AggregationPolicy)
13893        } else if self.parse_keywords(&[Keyword::AUTHENTICATION, Keyword::POLICY]) {
13894            Ok(ActionApplyType::AuthenticationPolicy)
13895        } else if self.parse_keywords(&[Keyword::JOIN, Keyword::POLICY]) {
13896            Ok(ActionApplyType::JoinPolicy)
13897        } else if self.parse_keywords(&[Keyword::MASKING, Keyword::POLICY]) {
13898            Ok(ActionApplyType::MaskingPolicy)
13899        } else if self.parse_keywords(&[Keyword::PACKAGES, Keyword::POLICY]) {
13900            Ok(ActionApplyType::PackagesPolicy)
13901        } else if self.parse_keywords(&[Keyword::PASSWORD, Keyword::POLICY]) {
13902            Ok(ActionApplyType::PasswordPolicy)
13903        } else if self.parse_keywords(&[Keyword::PROJECTION, Keyword::POLICY]) {
13904            Ok(ActionApplyType::ProjectionPolicy)
13905        } else if self.parse_keywords(&[Keyword::ROW, Keyword::ACCESS, Keyword::POLICY]) {
13906            Ok(ActionApplyType::RowAccessPolicy)
13907        } else if self.parse_keywords(&[Keyword::SESSION, Keyword::POLICY]) {
13908            Ok(ActionApplyType::SessionPolicy)
13909        } else if self.parse_keyword(Keyword::TAG) {
13910            Ok(ActionApplyType::Tag)
13911        } else {
13912            self.expected("GRANT APPLY type", self.peek_token())
13913        }
13914    }
13915
13916    fn maybe_parse_action_execute_obj_type(&mut self) -> Option<ActionExecuteObjectType> {
13917        if self.parse_keywords(&[Keyword::DATA, Keyword::METRIC, Keyword::FUNCTION]) {
13918            Some(ActionExecuteObjectType::DataMetricFunction)
13919        } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::ALERT]) {
13920            Some(ActionExecuteObjectType::ManagedAlert)
13921        } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::TASK]) {
13922            Some(ActionExecuteObjectType::ManagedTask)
13923        } else if self.parse_keyword(Keyword::ALERT) {
13924            Some(ActionExecuteObjectType::Alert)
13925        } else if self.parse_keyword(Keyword::TASK) {
13926            Some(ActionExecuteObjectType::Task)
13927        } else {
13928            None
13929        }
13930    }
13931
13932    fn parse_action_manage_type(&mut self) -> Result<ActionManageType, ParserError> {
13933        if self.parse_keywords(&[Keyword::ACCOUNT, Keyword::SUPPORT, Keyword::CASES]) {
13934            Ok(ActionManageType::AccountSupportCases)
13935        } else if self.parse_keywords(&[Keyword::EVENT, Keyword::SHARING]) {
13936            Ok(ActionManageType::EventSharing)
13937        } else if self.parse_keywords(&[Keyword::LISTING, Keyword::AUTO, Keyword::FULFILLMENT]) {
13938            Ok(ActionManageType::ListingAutoFulfillment)
13939        } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::SUPPORT, Keyword::CASES]) {
13940            Ok(ActionManageType::OrganizationSupportCases)
13941        } else if self.parse_keywords(&[Keyword::USER, Keyword::SUPPORT, Keyword::CASES]) {
13942            Ok(ActionManageType::UserSupportCases)
13943        } else if self.parse_keyword(Keyword::GRANTS) {
13944            Ok(ActionManageType::Grants)
13945        } else if self.parse_keyword(Keyword::WAREHOUSES) {
13946            Ok(ActionManageType::Warehouses)
13947        } else {
13948            self.expected("GRANT MANAGE type", self.peek_token())
13949        }
13950    }
13951
13952    fn parse_action_modify_type(&mut self) -> Option<ActionModifyType> {
13953        if self.parse_keywords(&[Keyword::LOG, Keyword::LEVEL]) {
13954            Some(ActionModifyType::LogLevel)
13955        } else if self.parse_keywords(&[Keyword::TRACE, Keyword::LEVEL]) {
13956            Some(ActionModifyType::TraceLevel)
13957        } else if self.parse_keywords(&[Keyword::SESSION, Keyword::LOG, Keyword::LEVEL]) {
13958            Some(ActionModifyType::SessionLogLevel)
13959        } else if self.parse_keywords(&[Keyword::SESSION, Keyword::TRACE, Keyword::LEVEL]) {
13960            Some(ActionModifyType::SessionTraceLevel)
13961        } else {
13962            None
13963        }
13964    }
13965
13966    fn parse_action_monitor_type(&mut self) -> Option<ActionMonitorType> {
13967        if self.parse_keyword(Keyword::EXECUTION) {
13968            Some(ActionMonitorType::Execution)
13969        } else if self.parse_keyword(Keyword::SECURITY) {
13970            Some(ActionMonitorType::Security)
13971        } else if self.parse_keyword(Keyword::USAGE) {
13972            Some(ActionMonitorType::Usage)
13973        } else {
13974            None
13975        }
13976    }
13977
13978    pub fn parse_grantee_name(&mut self) -> Result<GranteeName, ParserError> {
13979        let mut name = self.parse_object_name(false)?;
13980        if self.dialect.supports_user_host_grantee()
13981            && name.0.len() == 1
13982            && name.0[0].as_ident().is_some()
13983            && self.consume_token(&Token::AtSign)
13984        {
13985            let user = name.0.pop().unwrap().as_ident().unwrap().clone();
13986            let host = self.parse_identifier()?;
13987            Ok(GranteeName::UserHost { user, host })
13988        } else {
13989            Ok(GranteeName::ObjectName(name))
13990        }
13991    }
13992
13993    /// Parse [`Statement::Deny`]
13994    pub fn parse_deny(&mut self) -> Result<Statement, ParserError> {
13995        self.expect_keyword(Keyword::DENY)?;
13996
13997        let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
13998        let objects = match objects {
13999            Some(o) => o,
14000            None => {
14001                return parser_err!(
14002                    "DENY statements must specify an object",
14003                    self.peek_token().span.start
14004                )
14005            }
14006        };
14007
14008        self.expect_keyword_is(Keyword::TO)?;
14009        let grantees = self.parse_grantees()?;
14010        let cascade = self.parse_cascade_option();
14011        let granted_by = if self.parse_keywords(&[Keyword::AS]) {
14012            Some(self.parse_identifier()?)
14013        } else {
14014            None
14015        };
14016
14017        Ok(Statement::Deny(DenyStatement {
14018            privileges,
14019            objects,
14020            grantees,
14021            cascade,
14022            granted_by,
14023        }))
14024    }
14025
14026    /// Parse a REVOKE statement
14027    pub fn parse_revoke(&mut self) -> Result<Statement, ParserError> {
14028        let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
14029
14030        self.expect_keyword_is(Keyword::FROM)?;
14031        let grantees = self.parse_grantees()?;
14032
14033        let granted_by = if self.parse_keywords(&[Keyword::GRANTED, Keyword::BY]) {
14034            Some(self.parse_identifier()?)
14035        } else {
14036            None
14037        };
14038
14039        let cascade = self.parse_cascade_option();
14040
14041        Ok(Statement::Revoke {
14042            privileges,
14043            objects,
14044            grantees,
14045            granted_by,
14046            cascade,
14047        })
14048    }
14049
14050    /// Parse an REPLACE statement
14051    pub fn parse_replace(&mut self) -> Result<Statement, ParserError> {
14052        if !dialect_of!(self is MySqlDialect | GenericDialect) {
14053            return parser_err!(
14054                "Unsupported statement REPLACE",
14055                self.peek_token().span.start
14056            );
14057        }
14058
14059        let mut insert = self.parse_insert()?;
14060        if let Statement::Insert(Insert { replace_into, .. }) = &mut insert {
14061            *replace_into = true;
14062        }
14063
14064        Ok(insert)
14065    }
14066
14067    /// Parse an INSERT statement, returning a `Box`ed SetExpr
14068    ///
14069    /// This is used to reduce the size of the stack frames in debug builds
14070    fn parse_insert_setexpr_boxed(&mut self) -> Result<Box<SetExpr>, ParserError> {
14071        Ok(Box::new(SetExpr::Insert(self.parse_insert()?)))
14072    }
14073
14074    /// Parse an INSERT statement
14075    pub fn parse_insert(&mut self) -> Result<Statement, ParserError> {
14076        let or = self.parse_conflict_clause();
14077        let priority = if !dialect_of!(self is MySqlDialect | GenericDialect) {
14078            None
14079        } else if self.parse_keyword(Keyword::LOW_PRIORITY) {
14080            Some(MysqlInsertPriority::LowPriority)
14081        } else if self.parse_keyword(Keyword::DELAYED) {
14082            Some(MysqlInsertPriority::Delayed)
14083        } else if self.parse_keyword(Keyword::HIGH_PRIORITY) {
14084            Some(MysqlInsertPriority::HighPriority)
14085        } else {
14086            None
14087        };
14088
14089        let ignore = dialect_of!(self is MySqlDialect | GenericDialect)
14090            && self.parse_keyword(Keyword::IGNORE);
14091
14092        let replace_into = false;
14093
14094        let overwrite = self.parse_keyword(Keyword::OVERWRITE);
14095        let into = self.parse_keyword(Keyword::INTO);
14096
14097        let local = self.parse_keyword(Keyword::LOCAL);
14098
14099        if self.parse_keyword(Keyword::DIRECTORY) {
14100            let path = self.parse_literal_string()?;
14101            let file_format = if self.parse_keywords(&[Keyword::STORED, Keyword::AS]) {
14102                Some(self.parse_file_format()?)
14103            } else {
14104                None
14105            };
14106            let source = self.parse_query()?;
14107            Ok(Statement::Directory {
14108                local,
14109                path,
14110                overwrite,
14111                file_format,
14112                source,
14113            })
14114        } else {
14115            // Hive lets you put table here regardless
14116            let table = self.parse_keyword(Keyword::TABLE);
14117            let table_object = self.parse_table_object()?;
14118
14119            let table_alias =
14120                if dialect_of!(self is PostgreSqlDialect) && self.parse_keyword(Keyword::AS) {
14121                    Some(self.parse_identifier()?)
14122                } else {
14123                    None
14124                };
14125
14126            let is_mysql = dialect_of!(self is MySqlDialect);
14127
14128            let (columns, partitioned, after_columns, source, assignments) = if self
14129                .parse_keywords(&[Keyword::DEFAULT, Keyword::VALUES])
14130            {
14131                (vec![], None, vec![], None, vec![])
14132            } else {
14133                let (columns, partitioned, after_columns) = if !self.peek_subquery_start() {
14134                    let columns = self.parse_parenthesized_column_list(Optional, is_mysql)?;
14135
14136                    let partitioned = self.parse_insert_partition()?;
14137                    // Hive allows you to specify columns after partitions as well if you want.
14138                    let after_columns = if dialect_of!(self is HiveDialect) {
14139                        self.parse_parenthesized_column_list(Optional, false)?
14140                    } else {
14141                        vec![]
14142                    };
14143                    (columns, partitioned, after_columns)
14144                } else {
14145                    Default::default()
14146                };
14147
14148                let (source, assignments) = if self.peek_keyword(Keyword::FORMAT)
14149                    || self.peek_keyword(Keyword::SETTINGS)
14150                {
14151                    (None, vec![])
14152                } else if self.dialect.supports_insert_set() && self.parse_keyword(Keyword::SET) {
14153                    (None, self.parse_comma_separated(Parser::parse_assignment)?)
14154                } else {
14155                    (Some(self.parse_query()?), vec![])
14156                };
14157
14158                (columns, partitioned, after_columns, source, assignments)
14159            };
14160
14161            let (format_clause, settings) = if self.dialect.supports_insert_format() {
14162                // Settings always comes before `FORMAT` for ClickHouse:
14163                // <https://clickhouse.com/docs/en/sql-reference/statements/insert-into>
14164                let settings = self.parse_settings()?;
14165
14166                let format = if self.parse_keyword(Keyword::FORMAT) {
14167                    Some(self.parse_input_format_clause()?)
14168                } else {
14169                    None
14170                };
14171
14172                (format, settings)
14173            } else {
14174                Default::default()
14175            };
14176
14177            let insert_alias = if dialect_of!(self is MySqlDialect | GenericDialect)
14178                && self.parse_keyword(Keyword::AS)
14179            {
14180                let row_alias = self.parse_object_name(false)?;
14181                let col_aliases = Some(self.parse_parenthesized_column_list(Optional, false)?);
14182                Some(InsertAliases {
14183                    row_alias,
14184                    col_aliases,
14185                })
14186            } else {
14187                None
14188            };
14189
14190            let on = if self.parse_keyword(Keyword::ON) {
14191                if self.parse_keyword(Keyword::CONFLICT) {
14192                    let conflict_target =
14193                        if self.parse_keywords(&[Keyword::ON, Keyword::CONSTRAINT]) {
14194                            Some(ConflictTarget::OnConstraint(self.parse_object_name(false)?))
14195                        } else if self.peek_token() == Token::LParen {
14196                            Some(ConflictTarget::Columns(
14197                                self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
14198                            ))
14199                        } else {
14200                            None
14201                        };
14202
14203                    self.expect_keyword_is(Keyword::DO)?;
14204                    let action = if self.parse_keyword(Keyword::NOTHING) {
14205                        OnConflictAction::DoNothing
14206                    } else {
14207                        self.expect_keyword_is(Keyword::UPDATE)?;
14208                        self.expect_keyword_is(Keyword::SET)?;
14209                        let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
14210                        let selection = if self.parse_keyword(Keyword::WHERE) {
14211                            Some(self.parse_expr()?)
14212                        } else {
14213                            None
14214                        };
14215                        OnConflictAction::DoUpdate(DoUpdate {
14216                            assignments,
14217                            selection,
14218                        })
14219                    };
14220
14221                    Some(OnInsert::OnConflict(OnConflict {
14222                        conflict_target,
14223                        action,
14224                    }))
14225                } else {
14226                    self.expect_keyword_is(Keyword::DUPLICATE)?;
14227                    self.expect_keyword_is(Keyword::KEY)?;
14228                    self.expect_keyword_is(Keyword::UPDATE)?;
14229                    let l = self.parse_comma_separated(Parser::parse_assignment)?;
14230
14231                    Some(OnInsert::DuplicateKeyUpdate(l))
14232                }
14233            } else {
14234                None
14235            };
14236
14237            let returning = if self.parse_keyword(Keyword::RETURNING) {
14238                Some(self.parse_comma_separated(Parser::parse_select_item)?)
14239            } else {
14240                None
14241            };
14242
14243            Ok(Statement::Insert(Insert {
14244                or,
14245                table: table_object,
14246                table_alias,
14247                ignore,
14248                into,
14249                overwrite,
14250                partitioned,
14251                columns,
14252                after_columns,
14253                source,
14254                assignments,
14255                has_table_keyword: table,
14256                on,
14257                returning,
14258                replace_into,
14259                priority,
14260                insert_alias,
14261                settings,
14262                format_clause,
14263            }))
14264        }
14265    }
14266
14267    // Parses input format clause used for [ClickHouse].
14268    //
14269    // <https://clickhouse.com/docs/en/interfaces/formats>
14270    pub fn parse_input_format_clause(&mut self) -> Result<InputFormatClause, ParserError> {
14271        let ident = self.parse_identifier()?;
14272        let values = self
14273            .maybe_parse(|p| p.parse_comma_separated(|p| p.parse_expr()))?
14274            .unwrap_or_default();
14275
14276        Ok(InputFormatClause { ident, values })
14277    }
14278
14279    /// Returns true if the immediate tokens look like the
14280    /// beginning of a subquery. `(SELECT ...`
14281    fn peek_subquery_start(&mut self) -> bool {
14282        let [maybe_lparen, maybe_select] = self.peek_tokens();
14283        Token::LParen == maybe_lparen
14284            && matches!(maybe_select, Token::Word(w) if w.keyword == Keyword::SELECT)
14285    }
14286
14287    fn parse_conflict_clause(&mut self) -> Option<SqliteOnConflict> {
14288        if self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]) {
14289            Some(SqliteOnConflict::Replace)
14290        } else if self.parse_keywords(&[Keyword::OR, Keyword::ROLLBACK]) {
14291            Some(SqliteOnConflict::Rollback)
14292        } else if self.parse_keywords(&[Keyword::OR, Keyword::ABORT]) {
14293            Some(SqliteOnConflict::Abort)
14294        } else if self.parse_keywords(&[Keyword::OR, Keyword::FAIL]) {
14295            Some(SqliteOnConflict::Fail)
14296        } else if self.parse_keywords(&[Keyword::OR, Keyword::IGNORE]) {
14297            Some(SqliteOnConflict::Ignore)
14298        } else if self.parse_keyword(Keyword::REPLACE) {
14299            Some(SqliteOnConflict::Replace)
14300        } else {
14301            None
14302        }
14303    }
14304
14305    pub fn parse_insert_partition(&mut self) -> Result<Option<Vec<Expr>>, ParserError> {
14306        if self.parse_keyword(Keyword::PARTITION) {
14307            self.expect_token(&Token::LParen)?;
14308            let partition_cols = Some(self.parse_comma_separated(Parser::parse_expr)?);
14309            self.expect_token(&Token::RParen)?;
14310            Ok(partition_cols)
14311        } else {
14312            Ok(None)
14313        }
14314    }
14315
14316    pub fn parse_load_data_table_format(
14317        &mut self,
14318    ) -> Result<Option<HiveLoadDataFormat>, ParserError> {
14319        if self.parse_keyword(Keyword::INPUTFORMAT) {
14320            let input_format = self.parse_expr()?;
14321            self.expect_keyword_is(Keyword::SERDE)?;
14322            let serde = self.parse_expr()?;
14323            Ok(Some(HiveLoadDataFormat {
14324                input_format,
14325                serde,
14326            }))
14327        } else {
14328            Ok(None)
14329        }
14330    }
14331
14332    /// Parse an UPDATE statement, returning a `Box`ed SetExpr
14333    ///
14334    /// This is used to reduce the size of the stack frames in debug builds
14335    fn parse_update_setexpr_boxed(&mut self) -> Result<Box<SetExpr>, ParserError> {
14336        Ok(Box::new(SetExpr::Update(self.parse_update()?)))
14337    }
14338
14339    pub fn parse_update(&mut self) -> Result<Statement, ParserError> {
14340        let or = self.parse_conflict_clause();
14341        let table = self.parse_table_and_joins()?;
14342        let from_before_set = if self.parse_keyword(Keyword::FROM) {
14343            Some(UpdateTableFromKind::BeforeSet(
14344                self.parse_table_with_joins()?,
14345            ))
14346        } else {
14347            None
14348        };
14349        self.expect_keyword(Keyword::SET)?;
14350        let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
14351        let from = if from_before_set.is_none() && self.parse_keyword(Keyword::FROM) {
14352            Some(UpdateTableFromKind::AfterSet(
14353                self.parse_table_with_joins()?,
14354            ))
14355        } else {
14356            from_before_set
14357        };
14358        let selection = if self.parse_keyword(Keyword::WHERE) {
14359            Some(self.parse_expr()?)
14360        } else {
14361            None
14362        };
14363        let returning = if self.parse_keyword(Keyword::RETURNING) {
14364            Some(self.parse_comma_separated(Parser::parse_select_item)?)
14365        } else {
14366            None
14367        };
14368        Ok(Statement::Update {
14369            table,
14370            assignments,
14371            from,
14372            selection,
14373            returning,
14374            or,
14375        })
14376    }
14377
14378    /// Parse a `var = expr` assignment, used in an UPDATE statement
14379    pub fn parse_assignment(&mut self) -> Result<Assignment, ParserError> {
14380        let target = self.parse_assignment_target()?;
14381        self.expect_token(&Token::Eq)?;
14382        let value = self.parse_expr()?;
14383        Ok(Assignment { target, value })
14384    }
14385
14386    /// Parse the left-hand side of an assignment, used in an UPDATE statement
14387    pub fn parse_assignment_target(&mut self) -> Result<AssignmentTarget, ParserError> {
14388        if self.consume_token(&Token::LParen) {
14389            let columns = self.parse_comma_separated(|p| p.parse_object_name(false))?;
14390            self.expect_token(&Token::RParen)?;
14391            Ok(AssignmentTarget::Tuple(columns))
14392        } else {
14393            let column = self.parse_object_name(false)?;
14394            Ok(AssignmentTarget::ColumnName(column))
14395        }
14396    }
14397
14398    pub fn parse_function_args(&mut self) -> Result<FunctionArg, ParserError> {
14399        let arg = if self.dialect.supports_named_fn_args_with_expr_name() {
14400            self.maybe_parse(|p| {
14401                let name = p.parse_expr()?;
14402                let operator = p.parse_function_named_arg_operator()?;
14403                let arg = p.parse_wildcard_expr()?.into();
14404                Ok(FunctionArg::ExprNamed {
14405                    name,
14406                    arg,
14407                    operator,
14408                })
14409            })?
14410        } else {
14411            self.maybe_parse(|p| {
14412                let name = p.parse_identifier()?;
14413                let operator = p.parse_function_named_arg_operator()?;
14414                let arg = p.parse_wildcard_expr()?.into();
14415                Ok(FunctionArg::Named {
14416                    name,
14417                    arg,
14418                    operator,
14419                })
14420            })?
14421        };
14422        if let Some(arg) = arg {
14423            return Ok(arg);
14424        }
14425        Ok(FunctionArg::Unnamed(self.parse_wildcard_expr()?.into()))
14426    }
14427
14428    fn parse_function_named_arg_operator(&mut self) -> Result<FunctionArgOperator, ParserError> {
14429        if self.parse_keyword(Keyword::VALUE) {
14430            return Ok(FunctionArgOperator::Value);
14431        }
14432        let tok = self.next_token();
14433        match tok.token {
14434            Token::RArrow if self.dialect.supports_named_fn_args_with_rarrow_operator() => {
14435                Ok(FunctionArgOperator::RightArrow)
14436            }
14437            Token::Eq if self.dialect.supports_named_fn_args_with_eq_operator() => {
14438                Ok(FunctionArgOperator::Equals)
14439            }
14440            Token::Assignment
14441                if self
14442                    .dialect
14443                    .supports_named_fn_args_with_assignment_operator() =>
14444            {
14445                Ok(FunctionArgOperator::Assignment)
14446            }
14447            Token::Colon if self.dialect.supports_named_fn_args_with_colon_operator() => {
14448                Ok(FunctionArgOperator::Colon)
14449            }
14450            _ => {
14451                self.prev_token();
14452                self.expected("argument operator", tok)
14453            }
14454        }
14455    }
14456
14457    pub fn parse_optional_args(&mut self) -> Result<Vec<FunctionArg>, ParserError> {
14458        if self.consume_token(&Token::RParen) {
14459            Ok(vec![])
14460        } else {
14461            let args = self.parse_comma_separated(Parser::parse_function_args)?;
14462            self.expect_token(&Token::RParen)?;
14463            Ok(args)
14464        }
14465    }
14466
14467    fn parse_table_function_args(&mut self) -> Result<TableFunctionArgs, ParserError> {
14468        if self.consume_token(&Token::RParen) {
14469            return Ok(TableFunctionArgs {
14470                args: vec![],
14471                settings: None,
14472            });
14473        }
14474        let mut args = vec![];
14475        let settings = loop {
14476            if let Some(settings) = self.parse_settings()? {
14477                break Some(settings);
14478            }
14479            args.push(self.parse_function_args()?);
14480            if self.is_parse_comma_separated_end() {
14481                break None;
14482            }
14483        };
14484        self.expect_token(&Token::RParen)?;
14485        Ok(TableFunctionArgs { args, settings })
14486    }
14487
14488    /// Parses a potentially empty list of arguments to a window function
14489    /// (including the closing parenthesis).
14490    ///
14491    /// Examples:
14492    /// ```sql
14493    /// FIRST_VALUE(x ORDER BY 1,2,3);
14494    /// FIRST_VALUE(x IGNORE NULL);
14495    /// ```
14496    fn parse_function_argument_list(&mut self) -> Result<FunctionArgumentList, ParserError> {
14497        let mut clauses = vec![];
14498
14499        // For MSSQL empty argument list with json-null-clause case, e.g. `JSON_ARRAY(NULL ON NULL)`
14500        if let Some(null_clause) = self.parse_json_null_clause() {
14501            clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
14502        }
14503
14504        if self.consume_token(&Token::RParen) {
14505            return Ok(FunctionArgumentList {
14506                duplicate_treatment: None,
14507                args: vec![],
14508                clauses,
14509            });
14510        }
14511
14512        let duplicate_treatment = self.parse_duplicate_treatment()?;
14513        let args = self.parse_comma_separated(Parser::parse_function_args)?;
14514
14515        if self.dialect.supports_window_function_null_treatment_arg() {
14516            if let Some(null_treatment) = self.parse_null_treatment()? {
14517                clauses.push(FunctionArgumentClause::IgnoreOrRespectNulls(null_treatment));
14518            }
14519        }
14520
14521        if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
14522            clauses.push(FunctionArgumentClause::OrderBy(
14523                self.parse_comma_separated(Parser::parse_order_by_expr)?,
14524            ));
14525        }
14526
14527        if self.parse_keyword(Keyword::LIMIT) {
14528            clauses.push(FunctionArgumentClause::Limit(self.parse_expr()?));
14529        }
14530
14531        if dialect_of!(self is GenericDialect | BigQueryDialect)
14532            && self.parse_keyword(Keyword::HAVING)
14533        {
14534            let kind = match self.expect_one_of_keywords(&[Keyword::MIN, Keyword::MAX])? {
14535                Keyword::MIN => HavingBoundKind::Min,
14536                Keyword::MAX => HavingBoundKind::Max,
14537                _ => unreachable!(),
14538            };
14539            clauses.push(FunctionArgumentClause::Having(HavingBound(
14540                kind,
14541                self.parse_expr()?,
14542            )))
14543        }
14544
14545        if dialect_of!(self is GenericDialect | MySqlDialect)
14546            && self.parse_keyword(Keyword::SEPARATOR)
14547        {
14548            clauses.push(FunctionArgumentClause::Separator(self.parse_value()?.value));
14549        }
14550
14551        if let Some(on_overflow) = self.parse_listagg_on_overflow()? {
14552            clauses.push(FunctionArgumentClause::OnOverflow(on_overflow));
14553        }
14554
14555        if let Some(null_clause) = self.parse_json_null_clause() {
14556            clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
14557        }
14558
14559        self.expect_token(&Token::RParen)?;
14560        Ok(FunctionArgumentList {
14561            duplicate_treatment,
14562            args,
14563            clauses,
14564        })
14565    }
14566
14567    /// Parses MSSQL's json-null-clause
14568    fn parse_json_null_clause(&mut self) -> Option<JsonNullClause> {
14569        if self.parse_keywords(&[Keyword::ABSENT, Keyword::ON, Keyword::NULL]) {
14570            Some(JsonNullClause::AbsentOnNull)
14571        } else if self.parse_keywords(&[Keyword::NULL, Keyword::ON, Keyword::NULL]) {
14572            Some(JsonNullClause::NullOnNull)
14573        } else {
14574            None
14575        }
14576    }
14577
14578    fn parse_duplicate_treatment(&mut self) -> Result<Option<DuplicateTreatment>, ParserError> {
14579        let loc = self.peek_token().span.start;
14580        match (
14581            self.parse_keyword(Keyword::ALL),
14582            self.parse_keyword(Keyword::DISTINCT),
14583        ) {
14584            (true, false) => Ok(Some(DuplicateTreatment::All)),
14585            (false, true) => Ok(Some(DuplicateTreatment::Distinct)),
14586            (false, false) => Ok(None),
14587            (true, true) => parser_err!("Cannot specify both ALL and DISTINCT".to_string(), loc),
14588        }
14589    }
14590
14591    /// Parse a comma-delimited list of projections after SELECT
14592    pub fn parse_select_item(&mut self) -> Result<SelectItem, ParserError> {
14593        let prefix = self
14594            .parse_one_of_keywords(
14595                self.dialect
14596                    .get_reserved_keywords_for_select_item_operator(),
14597            )
14598            .map(|keyword| Ident::new(format!("{:?}", keyword)));
14599
14600        match self.parse_wildcard_expr()? {
14601            Expr::QualifiedWildcard(prefix, token) => Ok(SelectItem::QualifiedWildcard(
14602                SelectItemQualifiedWildcardKind::ObjectName(prefix),
14603                self.parse_wildcard_additional_options(token.0)?,
14604            )),
14605            Expr::Wildcard(token) => Ok(SelectItem::Wildcard(
14606                self.parse_wildcard_additional_options(token.0)?,
14607            )),
14608            Expr::Identifier(v) if v.value.to_lowercase() == "from" && v.quote_style.is_none() => {
14609                parser_err!(
14610                    format!("Expected an expression, found: {}", v),
14611                    self.peek_token().span.start
14612                )
14613            }
14614            Expr::BinaryOp {
14615                left,
14616                op: BinaryOperator::Eq,
14617                right,
14618            } if self.dialect.supports_eq_alias_assignment()
14619                && matches!(left.as_ref(), Expr::Identifier(_)) =>
14620            {
14621                let Expr::Identifier(alias) = *left else {
14622                    return parser_err!(
14623                        "BUG: expected identifier expression as alias",
14624                        self.peek_token().span.start
14625                    );
14626                };
14627                Ok(SelectItem::ExprWithAlias {
14628                    expr: *right,
14629                    alias,
14630                })
14631            }
14632            expr if self.dialect.supports_select_expr_star()
14633                && self.consume_tokens(&[Token::Period, Token::Mul]) =>
14634            {
14635                let wildcard_token = self.get_previous_token().clone();
14636                Ok(SelectItem::QualifiedWildcard(
14637                    SelectItemQualifiedWildcardKind::Expr(expr),
14638                    self.parse_wildcard_additional_options(wildcard_token)?,
14639                ))
14640            }
14641            expr => self
14642                .maybe_parse_select_item_alias()
14643                .map(|alias| match alias {
14644                    Some(alias) => SelectItem::ExprWithAlias {
14645                        expr: maybe_prefixed_expr(expr, prefix),
14646                        alias,
14647                    },
14648                    None => SelectItem::UnnamedExpr(maybe_prefixed_expr(expr, prefix)),
14649                }),
14650        }
14651    }
14652
14653    /// Parse an [`WildcardAdditionalOptions`] information for wildcard select items.
14654    ///
14655    /// If it is not possible to parse it, will return an option.
14656    pub fn parse_wildcard_additional_options(
14657        &mut self,
14658        wildcard_token: TokenWithSpan,
14659    ) -> Result<WildcardAdditionalOptions, ParserError> {
14660        let opt_ilike = if dialect_of!(self is GenericDialect | SnowflakeDialect) {
14661            self.parse_optional_select_item_ilike()?
14662        } else {
14663            None
14664        };
14665        let opt_exclude = if opt_ilike.is_none()
14666            && dialect_of!(self is GenericDialect | DuckDbDialect | SnowflakeDialect)
14667        {
14668            self.parse_optional_select_item_exclude()?
14669        } else {
14670            None
14671        };
14672        let opt_except = if self.dialect.supports_select_wildcard_except() {
14673            self.parse_optional_select_item_except()?
14674        } else {
14675            None
14676        };
14677        let opt_replace = if dialect_of!(self is GenericDialect | BigQueryDialect | ClickHouseDialect | DuckDbDialect | SnowflakeDialect)
14678        {
14679            self.parse_optional_select_item_replace()?
14680        } else {
14681            None
14682        };
14683        let opt_rename = if dialect_of!(self is GenericDialect | SnowflakeDialect) {
14684            self.parse_optional_select_item_rename()?
14685        } else {
14686            None
14687        };
14688
14689        Ok(WildcardAdditionalOptions {
14690            wildcard_token: wildcard_token.into(),
14691            opt_ilike,
14692            opt_exclude,
14693            opt_except,
14694            opt_rename,
14695            opt_replace,
14696        })
14697    }
14698
14699    /// Parse an [`Ilike`](IlikeSelectItem) information for wildcard select items.
14700    ///
14701    /// If it is not possible to parse it, will return an option.
14702    pub fn parse_optional_select_item_ilike(
14703        &mut self,
14704    ) -> Result<Option<IlikeSelectItem>, ParserError> {
14705        let opt_ilike = if self.parse_keyword(Keyword::ILIKE) {
14706            let next_token = self.next_token();
14707            let pattern = match next_token.token {
14708                Token::SingleQuotedString(s) => s,
14709                _ => return self.expected("ilike pattern", next_token),
14710            };
14711            Some(IlikeSelectItem { pattern })
14712        } else {
14713            None
14714        };
14715        Ok(opt_ilike)
14716    }
14717
14718    /// Parse an [`Exclude`](ExcludeSelectItem) information for wildcard select items.
14719    ///
14720    /// If it is not possible to parse it, will return an option.
14721    pub fn parse_optional_select_item_exclude(
14722        &mut self,
14723    ) -> Result<Option<ExcludeSelectItem>, ParserError> {
14724        let opt_exclude = if self.parse_keyword(Keyword::EXCLUDE) {
14725            if self.consume_token(&Token::LParen) {
14726                let columns = self.parse_comma_separated(|parser| parser.parse_identifier())?;
14727                self.expect_token(&Token::RParen)?;
14728                Some(ExcludeSelectItem::Multiple(columns))
14729            } else {
14730                let column = self.parse_identifier()?;
14731                Some(ExcludeSelectItem::Single(column))
14732            }
14733        } else {
14734            None
14735        };
14736
14737        Ok(opt_exclude)
14738    }
14739
14740    /// Parse an [`Except`](ExceptSelectItem) information for wildcard select items.
14741    ///
14742    /// If it is not possible to parse it, will return an option.
14743    pub fn parse_optional_select_item_except(
14744        &mut self,
14745    ) -> Result<Option<ExceptSelectItem>, ParserError> {
14746        let opt_except = if self.parse_keyword(Keyword::EXCEPT) {
14747            if self.peek_token().token == Token::LParen {
14748                let idents = self.parse_parenthesized_column_list(Mandatory, false)?;
14749                match &idents[..] {
14750                    [] => {
14751                        return self.expected(
14752                            "at least one column should be parsed by the expect clause",
14753                            self.peek_token(),
14754                        )?;
14755                    }
14756                    [first, idents @ ..] => Some(ExceptSelectItem {
14757                        first_element: first.clone(),
14758                        additional_elements: idents.to_vec(),
14759                    }),
14760                }
14761            } else {
14762                // Clickhouse allows EXCEPT column_name
14763                let ident = self.parse_identifier()?;
14764                Some(ExceptSelectItem {
14765                    first_element: ident,
14766                    additional_elements: vec![],
14767                })
14768            }
14769        } else {
14770            None
14771        };
14772
14773        Ok(opt_except)
14774    }
14775
14776    /// Parse a [`Rename`](RenameSelectItem) information for wildcard select items.
14777    pub fn parse_optional_select_item_rename(
14778        &mut self,
14779    ) -> Result<Option<RenameSelectItem>, ParserError> {
14780        let opt_rename = if self.parse_keyword(Keyword::RENAME) {
14781            if self.consume_token(&Token::LParen) {
14782                let idents =
14783                    self.parse_comma_separated(|parser| parser.parse_identifier_with_alias())?;
14784                self.expect_token(&Token::RParen)?;
14785                Some(RenameSelectItem::Multiple(idents))
14786            } else {
14787                let ident = self.parse_identifier_with_alias()?;
14788                Some(RenameSelectItem::Single(ident))
14789            }
14790        } else {
14791            None
14792        };
14793
14794        Ok(opt_rename)
14795    }
14796
14797    /// Parse a [`Replace`](ReplaceSelectItem) information for wildcard select items.
14798    pub fn parse_optional_select_item_replace(
14799        &mut self,
14800    ) -> Result<Option<ReplaceSelectItem>, ParserError> {
14801        let opt_replace = if self.parse_keyword(Keyword::REPLACE) {
14802            if self.consume_token(&Token::LParen) {
14803                let items = self.parse_comma_separated(|parser| {
14804                    Ok(Box::new(parser.parse_replace_elements()?))
14805                })?;
14806                self.expect_token(&Token::RParen)?;
14807                Some(ReplaceSelectItem { items })
14808            } else {
14809                let tok = self.next_token();
14810                return self.expected("( after REPLACE but", tok);
14811            }
14812        } else {
14813            None
14814        };
14815
14816        Ok(opt_replace)
14817    }
14818    pub fn parse_replace_elements(&mut self) -> Result<ReplaceSelectElement, ParserError> {
14819        let expr = self.parse_expr()?;
14820        let as_keyword = self.parse_keyword(Keyword::AS);
14821        let ident = self.parse_identifier()?;
14822        Ok(ReplaceSelectElement {
14823            expr,
14824            column_name: ident,
14825            as_keyword,
14826        })
14827    }
14828
14829    /// Parse ASC or DESC, returns an Option with true if ASC, false of DESC or `None` if none of
14830    /// them.
14831    pub fn parse_asc_desc(&mut self) -> Option<bool> {
14832        if self.parse_keyword(Keyword::ASC) {
14833            Some(true)
14834        } else if self.parse_keyword(Keyword::DESC) {
14835            Some(false)
14836        } else {
14837            None
14838        }
14839    }
14840
14841    /// Parse an [OrderByExpr] expression.
14842    pub fn parse_order_by_expr(&mut self) -> Result<OrderByExpr, ParserError> {
14843        self.parse_order_by_expr_inner(false)
14844            .map(|(order_by, _)| order_by)
14845    }
14846
14847    /// Parse an [IndexColumn].
14848    pub fn parse_create_index_expr(&mut self) -> Result<IndexColumn, ParserError> {
14849        self.parse_order_by_expr_inner(true)
14850            .map(|(column, operator_class)| IndexColumn {
14851                column,
14852                operator_class,
14853            })
14854    }
14855
14856    fn parse_order_by_expr_inner(
14857        &mut self,
14858        with_operator_class: bool,
14859    ) -> Result<(OrderByExpr, Option<Ident>), ParserError> {
14860        let expr = self.parse_expr()?;
14861
14862        let operator_class: Option<Ident> = if with_operator_class {
14863            // We check that if non of the following keywords are present, then we parse an
14864            // identifier as operator class.
14865            if self
14866                .peek_one_of_keywords(&[Keyword::ASC, Keyword::DESC, Keyword::NULLS, Keyword::WITH])
14867                .is_some()
14868            {
14869                None
14870            } else {
14871                self.maybe_parse(|parser| parser.parse_identifier())?
14872            }
14873        } else {
14874            None
14875        };
14876
14877        let options = self.parse_order_by_options()?;
14878
14879        let with_fill = if dialect_of!(self is ClickHouseDialect | GenericDialect)
14880            && self.parse_keywords(&[Keyword::WITH, Keyword::FILL])
14881        {
14882            Some(self.parse_with_fill()?)
14883        } else {
14884            None
14885        };
14886
14887        Ok((
14888            OrderByExpr {
14889                expr,
14890                options,
14891                with_fill,
14892            },
14893            operator_class,
14894        ))
14895    }
14896
14897    fn parse_order_by_options(&mut self) -> Result<OrderByOptions, ParserError> {
14898        let asc = self.parse_asc_desc();
14899
14900        let nulls_first = if self.parse_keywords(&[Keyword::NULLS, Keyword::FIRST]) {
14901            Some(true)
14902        } else if self.parse_keywords(&[Keyword::NULLS, Keyword::LAST]) {
14903            Some(false)
14904        } else {
14905            None
14906        };
14907
14908        Ok(OrderByOptions { asc, nulls_first })
14909    }
14910
14911    // Parse a WITH FILL clause (ClickHouse dialect)
14912    // that follow the WITH FILL keywords in a ORDER BY clause
14913    pub fn parse_with_fill(&mut self) -> Result<WithFill, ParserError> {
14914        let from = if self.parse_keyword(Keyword::FROM) {
14915            Some(self.parse_expr()?)
14916        } else {
14917            None
14918        };
14919
14920        let to = if self.parse_keyword(Keyword::TO) {
14921            Some(self.parse_expr()?)
14922        } else {
14923            None
14924        };
14925
14926        let step = if self.parse_keyword(Keyword::STEP) {
14927            Some(self.parse_expr()?)
14928        } else {
14929            None
14930        };
14931
14932        Ok(WithFill { from, to, step })
14933    }
14934
14935    // Parse a set of comma separated INTERPOLATE expressions (ClickHouse dialect)
14936    // that follow the INTERPOLATE keyword in an ORDER BY clause with the WITH FILL modifier
14937    pub fn parse_interpolations(&mut self) -> Result<Option<Interpolate>, ParserError> {
14938        if !self.parse_keyword(Keyword::INTERPOLATE) {
14939            return Ok(None);
14940        }
14941
14942        if self.consume_token(&Token::LParen) {
14943            let interpolations =
14944                self.parse_comma_separated0(|p| p.parse_interpolation(), Token::RParen)?;
14945            self.expect_token(&Token::RParen)?;
14946            // INTERPOLATE () and INTERPOLATE ( ... ) variants
14947            return Ok(Some(Interpolate {
14948                exprs: Some(interpolations),
14949            }));
14950        }
14951
14952        // INTERPOLATE
14953        Ok(Some(Interpolate { exprs: None }))
14954    }
14955
14956    // Parse a INTERPOLATE expression (ClickHouse dialect)
14957    pub fn parse_interpolation(&mut self) -> Result<InterpolateExpr, ParserError> {
14958        let column = self.parse_identifier()?;
14959        let expr = if self.parse_keyword(Keyword::AS) {
14960            Some(self.parse_expr()?)
14961        } else {
14962            None
14963        };
14964        Ok(InterpolateExpr { column, expr })
14965    }
14966
14967    /// Parse a TOP clause, MSSQL equivalent of LIMIT,
14968    /// that follows after `SELECT [DISTINCT]`.
14969    pub fn parse_top(&mut self) -> Result<Top, ParserError> {
14970        let quantity = if self.consume_token(&Token::LParen) {
14971            let quantity = self.parse_expr()?;
14972            self.expect_token(&Token::RParen)?;
14973            Some(TopQuantity::Expr(quantity))
14974        } else {
14975            let next_token = self.next_token();
14976            let quantity = match next_token.token {
14977                Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start)?,
14978                _ => self.expected("literal int", next_token)?,
14979            };
14980            Some(TopQuantity::Constant(quantity))
14981        };
14982
14983        let percent = self.parse_keyword(Keyword::PERCENT);
14984
14985        let with_ties = self.parse_keywords(&[Keyword::WITH, Keyword::TIES]);
14986
14987        Ok(Top {
14988            with_ties,
14989            percent,
14990            quantity,
14991        })
14992    }
14993
14994    /// Parse a LIMIT clause
14995    pub fn parse_limit(&mut self) -> Result<Option<Expr>, ParserError> {
14996        if self.parse_keyword(Keyword::ALL) {
14997            Ok(None)
14998        } else {
14999            Ok(Some(self.parse_expr()?))
15000        }
15001    }
15002
15003    /// Parse an OFFSET clause
15004    pub fn parse_offset(&mut self) -> Result<Offset, ParserError> {
15005        let value = self.parse_expr()?;
15006        let rows = if self.parse_keyword(Keyword::ROW) {
15007            OffsetRows::Row
15008        } else if self.parse_keyword(Keyword::ROWS) {
15009            OffsetRows::Rows
15010        } else {
15011            OffsetRows::None
15012        };
15013        Ok(Offset { value, rows })
15014    }
15015
15016    /// Parse a FETCH clause
15017    pub fn parse_fetch(&mut self) -> Result<Fetch, ParserError> {
15018        self.expect_one_of_keywords(&[Keyword::FIRST, Keyword::NEXT])?;
15019        let (quantity, percent) = if self
15020            .parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS])
15021            .is_some()
15022        {
15023            (None, false)
15024        } else {
15025            let quantity = Expr::Value(self.parse_value()?);
15026            let percent = self.parse_keyword(Keyword::PERCENT);
15027            self.expect_one_of_keywords(&[Keyword::ROW, Keyword::ROWS])?;
15028            (Some(quantity), percent)
15029        };
15030        let with_ties = if self.parse_keyword(Keyword::ONLY) {
15031            false
15032        } else if self.parse_keywords(&[Keyword::WITH, Keyword::TIES]) {
15033            true
15034        } else {
15035            return self.expected("one of ONLY or WITH TIES", self.peek_token());
15036        };
15037        Ok(Fetch {
15038            with_ties,
15039            percent,
15040            quantity,
15041        })
15042    }
15043
15044    /// Parse a FOR UPDATE/FOR SHARE clause
15045    pub fn parse_lock(&mut self) -> Result<LockClause, ParserError> {
15046        let lock_type = match self.expect_one_of_keywords(&[Keyword::UPDATE, Keyword::SHARE])? {
15047            Keyword::UPDATE => LockType::Update,
15048            Keyword::SHARE => LockType::Share,
15049            _ => unreachable!(),
15050        };
15051        let of = if self.parse_keyword(Keyword::OF) {
15052            Some(self.parse_object_name(false)?)
15053        } else {
15054            None
15055        };
15056        let nonblock = if self.parse_keyword(Keyword::NOWAIT) {
15057            Some(NonBlock::Nowait)
15058        } else if self.parse_keywords(&[Keyword::SKIP, Keyword::LOCKED]) {
15059            Some(NonBlock::SkipLocked)
15060        } else {
15061            None
15062        };
15063        Ok(LockClause {
15064            lock_type,
15065            of,
15066            nonblock,
15067        })
15068    }
15069
15070    pub fn parse_values(&mut self, allow_empty: bool) -> Result<Values, ParserError> {
15071        let mut explicit_row = false;
15072
15073        let rows = self.parse_comma_separated(|parser| {
15074            if parser.parse_keyword(Keyword::ROW) {
15075                explicit_row = true;
15076            }
15077
15078            parser.expect_token(&Token::LParen)?;
15079            if allow_empty && parser.peek_token().token == Token::RParen {
15080                parser.next_token();
15081                Ok(vec![])
15082            } else {
15083                let exprs = parser.parse_comma_separated(Parser::parse_expr)?;
15084                parser.expect_token(&Token::RParen)?;
15085                Ok(exprs)
15086            }
15087        })?;
15088        Ok(Values { explicit_row, rows })
15089    }
15090
15091    pub fn parse_start_transaction(&mut self) -> Result<Statement, ParserError> {
15092        self.expect_keyword_is(Keyword::TRANSACTION)?;
15093        Ok(Statement::StartTransaction {
15094            modes: self.parse_transaction_modes()?,
15095            begin: false,
15096            transaction: Some(BeginTransactionKind::Transaction),
15097            modifier: None,
15098            statements: vec![],
15099            exception_statements: None,
15100            has_end_keyword: false,
15101        })
15102    }
15103
15104    pub fn parse_begin(&mut self) -> Result<Statement, ParserError> {
15105        let modifier = if !self.dialect.supports_start_transaction_modifier() {
15106            None
15107        } else if self.parse_keyword(Keyword::DEFERRED) {
15108            Some(TransactionModifier::Deferred)
15109        } else if self.parse_keyword(Keyword::IMMEDIATE) {
15110            Some(TransactionModifier::Immediate)
15111        } else if self.parse_keyword(Keyword::EXCLUSIVE) {
15112            Some(TransactionModifier::Exclusive)
15113        } else if self.parse_keyword(Keyword::TRY) {
15114            Some(TransactionModifier::Try)
15115        } else if self.parse_keyword(Keyword::CATCH) {
15116            Some(TransactionModifier::Catch)
15117        } else {
15118            None
15119        };
15120        let transaction = match self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK]) {
15121            Some(Keyword::TRANSACTION) => Some(BeginTransactionKind::Transaction),
15122            Some(Keyword::WORK) => Some(BeginTransactionKind::Work),
15123            _ => None,
15124        };
15125        Ok(Statement::StartTransaction {
15126            modes: self.parse_transaction_modes()?,
15127            begin: true,
15128            transaction,
15129            modifier,
15130            statements: vec![],
15131            exception_statements: None,
15132            has_end_keyword: false,
15133        })
15134    }
15135
15136    pub fn parse_end(&mut self) -> Result<Statement, ParserError> {
15137        let modifier = if !self.dialect.supports_end_transaction_modifier() {
15138            None
15139        } else if self.parse_keyword(Keyword::TRY) {
15140            Some(TransactionModifier::Try)
15141        } else if self.parse_keyword(Keyword::CATCH) {
15142            Some(TransactionModifier::Catch)
15143        } else {
15144            None
15145        };
15146        Ok(Statement::Commit {
15147            chain: self.parse_commit_rollback_chain()?,
15148            end: true,
15149            modifier,
15150        })
15151    }
15152
15153    pub fn parse_transaction_modes(&mut self) -> Result<Vec<TransactionMode>, ParserError> {
15154        let mut modes = vec![];
15155        let mut required = false;
15156        loop {
15157            let mode = if self.parse_keywords(&[Keyword::ISOLATION, Keyword::LEVEL]) {
15158                let iso_level = if self.parse_keywords(&[Keyword::READ, Keyword::UNCOMMITTED]) {
15159                    TransactionIsolationLevel::ReadUncommitted
15160                } else if self.parse_keywords(&[Keyword::READ, Keyword::COMMITTED]) {
15161                    TransactionIsolationLevel::ReadCommitted
15162                } else if self.parse_keywords(&[Keyword::REPEATABLE, Keyword::READ]) {
15163                    TransactionIsolationLevel::RepeatableRead
15164                } else if self.parse_keyword(Keyword::SERIALIZABLE) {
15165                    TransactionIsolationLevel::Serializable
15166                } else if self.parse_keyword(Keyword::SNAPSHOT) {
15167                    TransactionIsolationLevel::Snapshot
15168                } else {
15169                    self.expected("isolation level", self.peek_token())?
15170                };
15171                TransactionMode::IsolationLevel(iso_level)
15172            } else if self.parse_keywords(&[Keyword::READ, Keyword::ONLY]) {
15173                TransactionMode::AccessMode(TransactionAccessMode::ReadOnly)
15174            } else if self.parse_keywords(&[Keyword::READ, Keyword::WRITE]) {
15175                TransactionMode::AccessMode(TransactionAccessMode::ReadWrite)
15176            } else if required {
15177                self.expected("transaction mode", self.peek_token())?
15178            } else {
15179                break;
15180            };
15181            modes.push(mode);
15182            // ANSI requires a comma after each transaction mode, but
15183            // PostgreSQL, for historical reasons, does not. We follow
15184            // PostgreSQL in making the comma optional, since that is strictly
15185            // more general.
15186            required = self.consume_token(&Token::Comma);
15187        }
15188        Ok(modes)
15189    }
15190
15191    pub fn parse_commit(&mut self) -> Result<Statement, ParserError> {
15192        Ok(Statement::Commit {
15193            chain: self.parse_commit_rollback_chain()?,
15194            end: false,
15195            modifier: None,
15196        })
15197    }
15198
15199    pub fn parse_rollback(&mut self) -> Result<Statement, ParserError> {
15200        let chain = self.parse_commit_rollback_chain()?;
15201        let savepoint = self.parse_rollback_savepoint()?;
15202
15203        Ok(Statement::Rollback { chain, savepoint })
15204    }
15205
15206    pub fn parse_commit_rollback_chain(&mut self) -> Result<bool, ParserError> {
15207        let _ = self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK]);
15208        if self.parse_keyword(Keyword::AND) {
15209            let chain = !self.parse_keyword(Keyword::NO);
15210            self.expect_keyword_is(Keyword::CHAIN)?;
15211            Ok(chain)
15212        } else {
15213            Ok(false)
15214        }
15215    }
15216
15217    pub fn parse_rollback_savepoint(&mut self) -> Result<Option<Ident>, ParserError> {
15218        if self.parse_keyword(Keyword::TO) {
15219            let _ = self.parse_keyword(Keyword::SAVEPOINT);
15220            let savepoint = self.parse_identifier()?;
15221
15222            Ok(Some(savepoint))
15223        } else {
15224            Ok(None)
15225        }
15226    }
15227
15228    /// Parse a 'RAISERROR' statement
15229    pub fn parse_raiserror(&mut self) -> Result<Statement, ParserError> {
15230        self.expect_token(&Token::LParen)?;
15231        let message = Box::new(self.parse_expr()?);
15232        self.expect_token(&Token::Comma)?;
15233        let severity = Box::new(self.parse_expr()?);
15234        self.expect_token(&Token::Comma)?;
15235        let state = Box::new(self.parse_expr()?);
15236        let arguments = if self.consume_token(&Token::Comma) {
15237            self.parse_comma_separated(Parser::parse_expr)?
15238        } else {
15239            vec![]
15240        };
15241        self.expect_token(&Token::RParen)?;
15242        let options = if self.parse_keyword(Keyword::WITH) {
15243            self.parse_comma_separated(Parser::parse_raiserror_option)?
15244        } else {
15245            vec![]
15246        };
15247        Ok(Statement::RaisError {
15248            message,
15249            severity,
15250            state,
15251            arguments,
15252            options,
15253        })
15254    }
15255
15256    pub fn parse_raiserror_option(&mut self) -> Result<RaisErrorOption, ParserError> {
15257        match self.expect_one_of_keywords(&[Keyword::LOG, Keyword::NOWAIT, Keyword::SETERROR])? {
15258            Keyword::LOG => Ok(RaisErrorOption::Log),
15259            Keyword::NOWAIT => Ok(RaisErrorOption::NoWait),
15260            Keyword::SETERROR => Ok(RaisErrorOption::SetError),
15261            _ => self.expected(
15262                "LOG, NOWAIT OR SETERROR raiserror option",
15263                self.peek_token(),
15264            ),
15265        }
15266    }
15267
15268    pub fn parse_deallocate(&mut self) -> Result<Statement, ParserError> {
15269        let prepare = self.parse_keyword(Keyword::PREPARE);
15270        let name = self.parse_identifier()?;
15271        Ok(Statement::Deallocate { name, prepare })
15272    }
15273
15274    pub fn parse_execute(&mut self) -> Result<Statement, ParserError> {
15275        let name = if self.dialect.supports_execute_immediate()
15276            && self.parse_keyword(Keyword::IMMEDIATE)
15277        {
15278            None
15279        } else {
15280            let name = self.parse_object_name(false)?;
15281            Some(name)
15282        };
15283
15284        let has_parentheses = self.consume_token(&Token::LParen);
15285
15286        let end_token = match (has_parentheses, self.peek_token().token) {
15287            (true, _) => Token::RParen,
15288            (false, Token::EOF) => Token::EOF,
15289            (false, Token::Word(w)) if w.keyword == Keyword::USING => Token::Word(w),
15290            (false, _) => Token::SemiColon,
15291        };
15292
15293        let parameters = self.parse_comma_separated0(Parser::parse_expr, end_token)?;
15294
15295        if has_parentheses {
15296            self.expect_token(&Token::RParen)?;
15297        }
15298
15299        let into = if self.parse_keyword(Keyword::INTO) {
15300            self.parse_comma_separated(Self::parse_identifier)?
15301        } else {
15302            vec![]
15303        };
15304
15305        let using = if self.parse_keyword(Keyword::USING) {
15306            self.parse_comma_separated(Self::parse_expr_with_alias)?
15307        } else {
15308            vec![]
15309        };
15310
15311        Ok(Statement::Execute {
15312            immediate: name.is_none(),
15313            name,
15314            parameters,
15315            has_parentheses,
15316            into,
15317            using,
15318        })
15319    }
15320
15321    pub fn parse_prepare(&mut self) -> Result<Statement, ParserError> {
15322        let name = self.parse_identifier()?;
15323
15324        let mut data_types = vec![];
15325        if self.consume_token(&Token::LParen) {
15326            data_types = self.parse_comma_separated(Parser::parse_data_type)?;
15327            self.expect_token(&Token::RParen)?;
15328        }
15329
15330        self.expect_keyword_is(Keyword::AS)?;
15331        let statement = Box::new(self.parse_statement()?);
15332        Ok(Statement::Prepare {
15333            name,
15334            data_types,
15335            statement,
15336        })
15337    }
15338
15339    pub fn parse_unload(&mut self) -> Result<Statement, ParserError> {
15340        self.expect_token(&Token::LParen)?;
15341        let query = self.parse_query()?;
15342        self.expect_token(&Token::RParen)?;
15343
15344        self.expect_keyword_is(Keyword::TO)?;
15345        let to = self.parse_identifier()?;
15346
15347        let with_options = self.parse_options(Keyword::WITH)?;
15348
15349        Ok(Statement::Unload {
15350            query,
15351            to,
15352            with: with_options,
15353        })
15354    }
15355
15356    pub fn parse_merge_clauses(&mut self) -> Result<Vec<MergeClause>, ParserError> {
15357        let mut clauses = vec![];
15358        loop {
15359            if !(self.parse_keyword(Keyword::WHEN)) {
15360                break;
15361            }
15362
15363            let mut clause_kind = MergeClauseKind::Matched;
15364            if self.parse_keyword(Keyword::NOT) {
15365                clause_kind = MergeClauseKind::NotMatched;
15366            }
15367            self.expect_keyword_is(Keyword::MATCHED)?;
15368
15369            if matches!(clause_kind, MergeClauseKind::NotMatched)
15370                && self.parse_keywords(&[Keyword::BY, Keyword::SOURCE])
15371            {
15372                clause_kind = MergeClauseKind::NotMatchedBySource;
15373            } else if matches!(clause_kind, MergeClauseKind::NotMatched)
15374                && self.parse_keywords(&[Keyword::BY, Keyword::TARGET])
15375            {
15376                clause_kind = MergeClauseKind::NotMatchedByTarget;
15377            }
15378
15379            let predicate = if self.parse_keyword(Keyword::AND) {
15380                Some(self.parse_expr()?)
15381            } else {
15382                None
15383            };
15384
15385            self.expect_keyword_is(Keyword::THEN)?;
15386
15387            let merge_clause = match self.parse_one_of_keywords(&[
15388                Keyword::UPDATE,
15389                Keyword::INSERT,
15390                Keyword::DELETE,
15391            ]) {
15392                Some(Keyword::UPDATE) => {
15393                    if matches!(
15394                        clause_kind,
15395                        MergeClauseKind::NotMatched | MergeClauseKind::NotMatchedByTarget
15396                    ) {
15397                        return Err(ParserError::ParserError(format!(
15398                            "UPDATE is not allowed in a {clause_kind} merge clause"
15399                        )));
15400                    }
15401                    self.expect_keyword_is(Keyword::SET)?;
15402                    MergeAction::Update {
15403                        assignments: self.parse_comma_separated(Parser::parse_assignment)?,
15404                    }
15405                }
15406                Some(Keyword::DELETE) => {
15407                    if matches!(
15408                        clause_kind,
15409                        MergeClauseKind::NotMatched | MergeClauseKind::NotMatchedByTarget
15410                    ) {
15411                        return Err(ParserError::ParserError(format!(
15412                            "DELETE is not allowed in a {clause_kind} merge clause"
15413                        )));
15414                    }
15415                    MergeAction::Delete
15416                }
15417                Some(Keyword::INSERT) => {
15418                    if !matches!(
15419                        clause_kind,
15420                        MergeClauseKind::NotMatched | MergeClauseKind::NotMatchedByTarget
15421                    ) {
15422                        return Err(ParserError::ParserError(format!(
15423                            "INSERT is not allowed in a {clause_kind} merge clause"
15424                        )));
15425                    }
15426                    let is_mysql = dialect_of!(self is MySqlDialect);
15427
15428                    let columns = self.parse_parenthesized_column_list(Optional, is_mysql)?;
15429                    let kind = if dialect_of!(self is BigQueryDialect | GenericDialect)
15430                        && self.parse_keyword(Keyword::ROW)
15431                    {
15432                        MergeInsertKind::Row
15433                    } else {
15434                        self.expect_keyword_is(Keyword::VALUES)?;
15435                        let values = self.parse_values(is_mysql)?;
15436                        MergeInsertKind::Values(values)
15437                    };
15438                    MergeAction::Insert(MergeInsertExpr { columns, kind })
15439                }
15440                _ => {
15441                    return Err(ParserError::ParserError(
15442                        "expected UPDATE, DELETE or INSERT in merge clause".to_string(),
15443                    ));
15444                }
15445            };
15446            clauses.push(MergeClause {
15447                clause_kind,
15448                predicate,
15449                action: merge_clause,
15450            });
15451        }
15452        Ok(clauses)
15453    }
15454
15455    fn parse_output(&mut self) -> Result<OutputClause, ParserError> {
15456        self.expect_keyword_is(Keyword::OUTPUT)?;
15457        let select_items = self.parse_projection()?;
15458        self.expect_keyword_is(Keyword::INTO)?;
15459        let into_table = self.parse_select_into()?;
15460
15461        Ok(OutputClause {
15462            select_items,
15463            into_table,
15464        })
15465    }
15466
15467    fn parse_select_into(&mut self) -> Result<SelectInto, ParserError> {
15468        let temporary = self
15469            .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
15470            .is_some();
15471        let unlogged = self.parse_keyword(Keyword::UNLOGGED);
15472        let table = self.parse_keyword(Keyword::TABLE);
15473        let name = self.parse_object_name(false)?;
15474
15475        Ok(SelectInto {
15476            temporary,
15477            unlogged,
15478            table,
15479            name,
15480        })
15481    }
15482
15483    pub fn parse_merge(&mut self) -> Result<Statement, ParserError> {
15484        let into = self.parse_keyword(Keyword::INTO);
15485
15486        let table = self.parse_table_factor()?;
15487
15488        self.expect_keyword_is(Keyword::USING)?;
15489        let source = self.parse_table_factor()?;
15490        self.expect_keyword_is(Keyword::ON)?;
15491        let on = self.parse_expr()?;
15492        let clauses = self.parse_merge_clauses()?;
15493        let output = if self.peek_keyword(Keyword::OUTPUT) {
15494            Some(self.parse_output()?)
15495        } else {
15496            None
15497        };
15498
15499        Ok(Statement::Merge {
15500            into,
15501            table,
15502            source,
15503            on: Box::new(on),
15504            clauses,
15505            output,
15506        })
15507    }
15508
15509    fn parse_pragma_value(&mut self) -> Result<Value, ParserError> {
15510        match self.parse_value()?.value {
15511            v @ Value::SingleQuotedString(_) => Ok(v),
15512            v @ Value::DoubleQuotedString(_) => Ok(v),
15513            v @ Value::Number(_, _) => Ok(v),
15514            v @ Value::Placeholder(_) => Ok(v),
15515            _ => {
15516                self.prev_token();
15517                self.expected("number or string or ? placeholder", self.peek_token())
15518            }
15519        }
15520    }
15521
15522    // PRAGMA [schema-name '.'] pragma-name [('=' pragma-value) | '(' pragma-value ')']
15523    pub fn parse_pragma(&mut self) -> Result<Statement, ParserError> {
15524        let name = self.parse_object_name(false)?;
15525        if self.consume_token(&Token::LParen) {
15526            let value = self.parse_pragma_value()?;
15527            self.expect_token(&Token::RParen)?;
15528            Ok(Statement::Pragma {
15529                name,
15530                value: Some(value),
15531                is_eq: false,
15532            })
15533        } else if self.consume_token(&Token::Eq) {
15534            Ok(Statement::Pragma {
15535                name,
15536                value: Some(self.parse_pragma_value()?),
15537                is_eq: true,
15538            })
15539        } else {
15540            Ok(Statement::Pragma {
15541                name,
15542                value: None,
15543                is_eq: false,
15544            })
15545        }
15546    }
15547
15548    /// `INSTALL [extension_name]`
15549    pub fn parse_install(&mut self) -> Result<Statement, ParserError> {
15550        let extension_name = self.parse_identifier()?;
15551
15552        Ok(Statement::Install { extension_name })
15553    }
15554
15555    /// Parse a SQL LOAD statement
15556    pub fn parse_load(&mut self) -> Result<Statement, ParserError> {
15557        if self.dialect.supports_load_extension() {
15558            let extension_name = self.parse_identifier()?;
15559            Ok(Statement::Load { extension_name })
15560        } else if self.parse_keyword(Keyword::DATA) && self.dialect.supports_load_data() {
15561            let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
15562            self.expect_keyword_is(Keyword::INPATH)?;
15563            let inpath = self.parse_literal_string()?;
15564            let overwrite = self.parse_one_of_keywords(&[Keyword::OVERWRITE]).is_some();
15565            self.expect_keyword_is(Keyword::INTO)?;
15566            self.expect_keyword_is(Keyword::TABLE)?;
15567            let table_name = self.parse_object_name(false)?;
15568            let partitioned = self.parse_insert_partition()?;
15569            let table_format = self.parse_load_data_table_format()?;
15570            Ok(Statement::LoadData {
15571                local,
15572                inpath,
15573                overwrite,
15574                table_name,
15575                partitioned,
15576                table_format,
15577            })
15578        } else {
15579            self.expected(
15580                "`DATA` or an extension name after `LOAD`",
15581                self.peek_token(),
15582            )
15583        }
15584    }
15585
15586    /// ```sql
15587    /// OPTIMIZE TABLE [db.]name [ON CLUSTER cluster] [PARTITION partition | PARTITION ID 'partition_id'] [FINAL] [DEDUPLICATE [BY expression]]
15588    /// ```
15589    /// [ClickHouse](https://clickhouse.com/docs/en/sql-reference/statements/optimize)
15590    pub fn parse_optimize_table(&mut self) -> Result<Statement, ParserError> {
15591        self.expect_keyword_is(Keyword::TABLE)?;
15592        let name = self.parse_object_name(false)?;
15593        let on_cluster = self.parse_optional_on_cluster()?;
15594
15595        let partition = if self.parse_keyword(Keyword::PARTITION) {
15596            if self.parse_keyword(Keyword::ID) {
15597                Some(Partition::Identifier(self.parse_identifier()?))
15598            } else {
15599                Some(Partition::Expr(self.parse_expr()?))
15600            }
15601        } else {
15602            None
15603        };
15604
15605        let include_final = self.parse_keyword(Keyword::FINAL);
15606        let deduplicate = if self.parse_keyword(Keyword::DEDUPLICATE) {
15607            if self.parse_keyword(Keyword::BY) {
15608                Some(Deduplicate::ByExpression(self.parse_expr()?))
15609            } else {
15610                Some(Deduplicate::All)
15611            }
15612        } else {
15613            None
15614        };
15615
15616        Ok(Statement::OptimizeTable {
15617            name,
15618            on_cluster,
15619            partition,
15620            include_final,
15621            deduplicate,
15622        })
15623    }
15624
15625    /// ```sql
15626    /// CREATE [ { TEMPORARY | TEMP } ] SEQUENCE [ IF NOT EXISTS ] <sequence_name>
15627    /// ```
15628    ///
15629    /// See [Postgres docs](https://www.postgresql.org/docs/current/sql-createsequence.html) for more details.
15630    pub fn parse_create_sequence(&mut self, temporary: bool) -> Result<Statement, ParserError> {
15631        //[ IF NOT EXISTS ]
15632        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
15633        //name
15634        let name = self.parse_object_name(false)?;
15635        //[ AS data_type ]
15636        let mut data_type: Option<DataType> = None;
15637        if self.parse_keywords(&[Keyword::AS]) {
15638            data_type = Some(self.parse_data_type()?)
15639        }
15640        let sequence_options = self.parse_create_sequence_options()?;
15641        // [ OWNED BY { table_name.column_name | NONE } ]
15642        let owned_by = if self.parse_keywords(&[Keyword::OWNED, Keyword::BY]) {
15643            if self.parse_keywords(&[Keyword::NONE]) {
15644                Some(ObjectName::from(vec![Ident::new("NONE")]))
15645            } else {
15646                Some(self.parse_object_name(false)?)
15647            }
15648        } else {
15649            None
15650        };
15651        Ok(Statement::CreateSequence {
15652            temporary,
15653            if_not_exists,
15654            name,
15655            data_type,
15656            sequence_options,
15657            owned_by,
15658        })
15659    }
15660
15661    fn parse_create_sequence_options(&mut self) -> Result<Vec<SequenceOptions>, ParserError> {
15662        let mut sequence_options = vec![];
15663        //[ INCREMENT [ BY ] increment ]
15664        if self.parse_keywords(&[Keyword::INCREMENT]) {
15665            if self.parse_keywords(&[Keyword::BY]) {
15666                sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, true));
15667            } else {
15668                sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, false));
15669            }
15670        }
15671        //[ MINVALUE minvalue | NO MINVALUE ]
15672        if self.parse_keyword(Keyword::MINVALUE) {
15673            sequence_options.push(SequenceOptions::MinValue(Some(self.parse_number()?)));
15674        } else if self.parse_keywords(&[Keyword::NO, Keyword::MINVALUE]) {
15675            sequence_options.push(SequenceOptions::MinValue(None));
15676        }
15677        //[ MAXVALUE maxvalue | NO MAXVALUE ]
15678        if self.parse_keywords(&[Keyword::MAXVALUE]) {
15679            sequence_options.push(SequenceOptions::MaxValue(Some(self.parse_number()?)));
15680        } else if self.parse_keywords(&[Keyword::NO, Keyword::MAXVALUE]) {
15681            sequence_options.push(SequenceOptions::MaxValue(None));
15682        }
15683
15684        //[ START [ WITH ] start ]
15685        if self.parse_keywords(&[Keyword::START]) {
15686            if self.parse_keywords(&[Keyword::WITH]) {
15687                sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, true));
15688            } else {
15689                sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, false));
15690            }
15691        }
15692        //[ CACHE cache ]
15693        if self.parse_keywords(&[Keyword::CACHE]) {
15694            sequence_options.push(SequenceOptions::Cache(self.parse_number()?));
15695        }
15696        // [ [ NO ] CYCLE ]
15697        if self.parse_keywords(&[Keyword::NO, Keyword::CYCLE]) {
15698            sequence_options.push(SequenceOptions::Cycle(true));
15699        } else if self.parse_keywords(&[Keyword::CYCLE]) {
15700            sequence_options.push(SequenceOptions::Cycle(false));
15701        }
15702
15703        Ok(sequence_options)
15704    }
15705
15706    /// The index of the first unprocessed token.
15707    pub fn index(&self) -> usize {
15708        self.index
15709    }
15710
15711    pub fn parse_named_window(&mut self) -> Result<NamedWindowDefinition, ParserError> {
15712        let ident = self.parse_identifier()?;
15713        self.expect_keyword_is(Keyword::AS)?;
15714
15715        let window_expr = if self.consume_token(&Token::LParen) {
15716            NamedWindowExpr::WindowSpec(self.parse_window_spec()?)
15717        } else if self.dialect.supports_window_clause_named_window_reference() {
15718            NamedWindowExpr::NamedWindow(self.parse_identifier()?)
15719        } else {
15720            return self.expected("(", self.peek_token());
15721        };
15722
15723        Ok(NamedWindowDefinition(ident, window_expr))
15724    }
15725
15726    pub fn parse_create_procedure(&mut self, or_alter: bool) -> Result<Statement, ParserError> {
15727        let name = self.parse_object_name(false)?;
15728        let params = self.parse_optional_procedure_parameters()?;
15729        self.expect_keyword_is(Keyword::AS)?;
15730
15731        let body = self.parse_conditional_statements(&[Keyword::END])?;
15732
15733        Ok(Statement::CreateProcedure {
15734            name,
15735            or_alter,
15736            params,
15737            body,
15738        })
15739    }
15740
15741    pub fn parse_window_spec(&mut self) -> Result<WindowSpec, ParserError> {
15742        let window_name = match self.peek_token().token {
15743            Token::Word(word) if word.keyword == Keyword::NoKeyword => {
15744                self.parse_optional_ident()?
15745            }
15746            _ => None,
15747        };
15748
15749        let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
15750            self.parse_comma_separated(Parser::parse_expr)?
15751        } else {
15752            vec![]
15753        };
15754        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
15755            self.parse_comma_separated(Parser::parse_order_by_expr)?
15756        } else {
15757            vec![]
15758        };
15759
15760        let window_frame = if !self.consume_token(&Token::RParen) {
15761            let window_frame = self.parse_window_frame()?;
15762            self.expect_token(&Token::RParen)?;
15763            Some(window_frame)
15764        } else {
15765            None
15766        };
15767        Ok(WindowSpec {
15768            window_name,
15769            partition_by,
15770            order_by,
15771            window_frame,
15772        })
15773    }
15774
15775    pub fn parse_create_type(&mut self) -> Result<Statement, ParserError> {
15776        let name = self.parse_object_name(false)?;
15777        self.expect_keyword_is(Keyword::AS)?;
15778
15779        if self.parse_keyword(Keyword::ENUM) {
15780            return self.parse_create_type_enum(name);
15781        }
15782
15783        let mut attributes = vec![];
15784        if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
15785            return Ok(Statement::CreateType {
15786                name,
15787                representation: UserDefinedTypeRepresentation::Composite { attributes },
15788            });
15789        }
15790
15791        loop {
15792            let attr_name = self.parse_identifier()?;
15793            let attr_data_type = self.parse_data_type()?;
15794            let attr_collation = if self.parse_keyword(Keyword::COLLATE) {
15795                Some(self.parse_object_name(false)?)
15796            } else {
15797                None
15798            };
15799            attributes.push(UserDefinedTypeCompositeAttributeDef {
15800                name: attr_name,
15801                data_type: attr_data_type,
15802                collation: attr_collation,
15803            });
15804            let comma = self.consume_token(&Token::Comma);
15805            if self.consume_token(&Token::RParen) {
15806                // allow a trailing comma
15807                break;
15808            } else if !comma {
15809                return self.expected("',' or ')' after attribute definition", self.peek_token());
15810            }
15811        }
15812
15813        Ok(Statement::CreateType {
15814            name,
15815            representation: UserDefinedTypeRepresentation::Composite { attributes },
15816        })
15817    }
15818
15819    /// Parse remainder of `CREATE TYPE AS ENUM` statement (see [Statement::CreateType] and [Self::parse_create_type])
15820    ///
15821    /// See [PostgreSQL](https://www.postgresql.org/docs/current/sql-createtype.html)
15822    pub fn parse_create_type_enum(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
15823        self.expect_token(&Token::LParen)?;
15824        let labels = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?;
15825        self.expect_token(&Token::RParen)?;
15826
15827        Ok(Statement::CreateType {
15828            name,
15829            representation: UserDefinedTypeRepresentation::Enum { labels },
15830        })
15831    }
15832
15833    fn parse_parenthesized_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
15834        self.expect_token(&Token::LParen)?;
15835        let partitions = self.parse_comma_separated(|p| p.parse_identifier())?;
15836        self.expect_token(&Token::RParen)?;
15837        Ok(partitions)
15838    }
15839
15840    fn parse_column_position(&mut self) -> Result<Option<MySQLColumnPosition>, ParserError> {
15841        if dialect_of!(self is MySqlDialect | GenericDialect) {
15842            if self.parse_keyword(Keyword::FIRST) {
15843                Ok(Some(MySQLColumnPosition::First))
15844            } else if self.parse_keyword(Keyword::AFTER) {
15845                let ident = self.parse_identifier()?;
15846                Ok(Some(MySQLColumnPosition::After(ident)))
15847            } else {
15848                Ok(None)
15849            }
15850        } else {
15851            Ok(None)
15852        }
15853    }
15854
15855    /// Parse [Statement::Print]
15856    fn parse_print(&mut self) -> Result<Statement, ParserError> {
15857        Ok(Statement::Print(PrintStatement {
15858            message: Box::new(self.parse_expr()?),
15859        }))
15860    }
15861
15862    /// Parse [Statement::Return]
15863    fn parse_return(&mut self) -> Result<Statement, ParserError> {
15864        match self.maybe_parse(|p| p.parse_expr())? {
15865            Some(expr) => Ok(Statement::Return(ReturnStatement {
15866                value: Some(ReturnStatementValue::Expr(expr)),
15867            })),
15868            None => Ok(Statement::Return(ReturnStatement { value: None })),
15869        }
15870    }
15871
15872    /// Consume the parser and return its underlying token buffer
15873    pub fn into_tokens(self) -> Vec<TokenWithSpan> {
15874        self.tokens
15875    }
15876
15877    /// Returns true if the next keyword indicates a sub query, i.e. SELECT or WITH
15878    fn peek_sub_query(&mut self) -> bool {
15879        if self
15880            .parse_one_of_keywords(&[Keyword::SELECT, Keyword::WITH])
15881            .is_some()
15882        {
15883            self.prev_token();
15884            return true;
15885        }
15886        false
15887    }
15888
15889    pub(crate) fn parse_show_stmt_options(&mut self) -> Result<ShowStatementOptions, ParserError> {
15890        let show_in;
15891        let mut filter_position = None;
15892        if self.dialect.supports_show_like_before_in() {
15893            if let Some(filter) = self.parse_show_statement_filter()? {
15894                filter_position = Some(ShowStatementFilterPosition::Infix(filter));
15895            }
15896            show_in = self.maybe_parse_show_stmt_in()?;
15897        } else {
15898            show_in = self.maybe_parse_show_stmt_in()?;
15899            if let Some(filter) = self.parse_show_statement_filter()? {
15900                filter_position = Some(ShowStatementFilterPosition::Suffix(filter));
15901            }
15902        }
15903        let starts_with = self.maybe_parse_show_stmt_starts_with()?;
15904        let limit = self.maybe_parse_show_stmt_limit()?;
15905        let from = self.maybe_parse_show_stmt_from()?;
15906        Ok(ShowStatementOptions {
15907            filter_position,
15908            show_in,
15909            starts_with,
15910            limit,
15911            limit_from: from,
15912        })
15913    }
15914
15915    fn maybe_parse_show_stmt_in(&mut self) -> Result<Option<ShowStatementIn>, ParserError> {
15916        let clause = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::IN]) {
15917            Some(Keyword::FROM) => ShowStatementInClause::FROM,
15918            Some(Keyword::IN) => ShowStatementInClause::IN,
15919            None => return Ok(None),
15920            _ => return self.expected("FROM or IN", self.peek_token()),
15921        };
15922
15923        let (parent_type, parent_name) = match self.parse_one_of_keywords(&[
15924            Keyword::ACCOUNT,
15925            Keyword::DATABASE,
15926            Keyword::SCHEMA,
15927            Keyword::TABLE,
15928            Keyword::VIEW,
15929        ]) {
15930            // If we see these next keywords it means we don't have a parent name
15931            Some(Keyword::DATABASE)
15932                if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
15933                    | self.peek_keyword(Keyword::LIMIT) =>
15934            {
15935                (Some(ShowStatementInParentType::Database), None)
15936            }
15937            Some(Keyword::SCHEMA)
15938                if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
15939                    | self.peek_keyword(Keyword::LIMIT) =>
15940            {
15941                (Some(ShowStatementInParentType::Schema), None)
15942            }
15943            Some(parent_kw) => {
15944                // The parent name here is still optional, for example:
15945                // SHOW TABLES IN ACCOUNT, so parsing the object name
15946                // may fail because the statement ends.
15947                let parent_name = self.maybe_parse(|p| p.parse_object_name(false))?;
15948                match parent_kw {
15949                    Keyword::ACCOUNT => (Some(ShowStatementInParentType::Account), parent_name),
15950                    Keyword::DATABASE => (Some(ShowStatementInParentType::Database), parent_name),
15951                    Keyword::SCHEMA => (Some(ShowStatementInParentType::Schema), parent_name),
15952                    Keyword::TABLE => (Some(ShowStatementInParentType::Table), parent_name),
15953                    Keyword::VIEW => (Some(ShowStatementInParentType::View), parent_name),
15954                    _ => {
15955                        return self.expected(
15956                            "one of ACCOUNT, DATABASE, SCHEMA, TABLE or VIEW",
15957                            self.peek_token(),
15958                        )
15959                    }
15960                }
15961            }
15962            None => {
15963                // Parsing MySQL style FROM tbl_name FROM db_name
15964                // which is equivalent to FROM tbl_name.db_name
15965                let mut parent_name = self.parse_object_name(false)?;
15966                if self
15967                    .parse_one_of_keywords(&[Keyword::FROM, Keyword::IN])
15968                    .is_some()
15969                {
15970                    parent_name
15971                        .0
15972                        .insert(0, ObjectNamePart::Identifier(self.parse_identifier()?));
15973                }
15974                (None, Some(parent_name))
15975            }
15976        };
15977
15978        Ok(Some(ShowStatementIn {
15979            clause,
15980            parent_type,
15981            parent_name,
15982        }))
15983    }
15984
15985    fn maybe_parse_show_stmt_starts_with(&mut self) -> Result<Option<Value>, ParserError> {
15986        if self.parse_keywords(&[Keyword::STARTS, Keyword::WITH]) {
15987            Ok(Some(self.parse_value()?.value))
15988        } else {
15989            Ok(None)
15990        }
15991    }
15992
15993    fn maybe_parse_show_stmt_limit(&mut self) -> Result<Option<Expr>, ParserError> {
15994        if self.parse_keyword(Keyword::LIMIT) {
15995            Ok(self.parse_limit()?)
15996        } else {
15997            Ok(None)
15998        }
15999    }
16000
16001    fn maybe_parse_show_stmt_from(&mut self) -> Result<Option<Value>, ParserError> {
16002        if self.parse_keyword(Keyword::FROM) {
16003            Ok(Some(self.parse_value()?.value))
16004        } else {
16005            Ok(None)
16006        }
16007    }
16008}
16009
16010fn maybe_prefixed_expr(expr: Expr, prefix: Option<Ident>) -> Expr {
16011    if let Some(prefix) = prefix {
16012        Expr::Prefixed {
16013            prefix,
16014            value: Box::new(expr),
16015        }
16016    } else {
16017        expr
16018    }
16019}
16020
16021impl Word {
16022    #[deprecated(since = "0.54.0", note = "please use `into_ident` instead")]
16023    pub fn to_ident(&self, span: Span) -> Ident {
16024        Ident {
16025            value: self.value.clone(),
16026            quote_style: self.quote_style,
16027            span,
16028        }
16029    }
16030
16031    /// Convert this word into an [`Ident`] identifier
16032    pub fn into_ident(self, span: Span) -> Ident {
16033        Ident {
16034            value: self.value,
16035            quote_style: self.quote_style,
16036            span,
16037        }
16038    }
16039}
16040
16041#[cfg(test)]
16042mod tests {
16043    use crate::test_utils::{all_dialects, TestedDialects};
16044
16045    use super::*;
16046
16047    #[test]
16048    fn test_prev_index() {
16049        let sql = "SELECT version";
16050        all_dialects().run_parser_method(sql, |parser| {
16051            assert_eq!(parser.peek_token(), Token::make_keyword("SELECT"));
16052            assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
16053            parser.prev_token();
16054            assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
16055            assert_eq!(parser.next_token(), Token::make_word("version", None));
16056            parser.prev_token();
16057            assert_eq!(parser.peek_token(), Token::make_word("version", None));
16058            assert_eq!(parser.next_token(), Token::make_word("version", None));
16059            assert_eq!(parser.peek_token(), Token::EOF);
16060            parser.prev_token();
16061            assert_eq!(parser.next_token(), Token::make_word("version", None));
16062            assert_eq!(parser.next_token(), Token::EOF);
16063            assert_eq!(parser.next_token(), Token::EOF);
16064            parser.prev_token();
16065        });
16066    }
16067
16068    #[test]
16069    fn test_peek_tokens() {
16070        all_dialects().run_parser_method("SELECT foo AS bar FROM baz", |parser| {
16071            assert!(matches!(
16072                parser.peek_tokens(),
16073                [Token::Word(Word {
16074                    keyword: Keyword::SELECT,
16075                    ..
16076                })]
16077            ));
16078
16079            assert!(matches!(
16080                parser.peek_tokens(),
16081                [
16082                    Token::Word(Word {
16083                        keyword: Keyword::SELECT,
16084                        ..
16085                    }),
16086                    Token::Word(_),
16087                    Token::Word(Word {
16088                        keyword: Keyword::AS,
16089                        ..
16090                    }),
16091                ]
16092            ));
16093
16094            for _ in 0..4 {
16095                parser.next_token();
16096            }
16097
16098            assert!(matches!(
16099                parser.peek_tokens(),
16100                [
16101                    Token::Word(Word {
16102                        keyword: Keyword::FROM,
16103                        ..
16104                    }),
16105                    Token::Word(_),
16106                    Token::EOF,
16107                    Token::EOF,
16108                ]
16109            ))
16110        })
16111    }
16112
16113    #[cfg(test)]
16114    mod test_parse_data_type {
16115        use crate::ast::{
16116            CharLengthUnits, CharacterLength, DataType, ExactNumberInfo, ObjectName, TimezoneInfo,
16117        };
16118        use crate::dialect::{AnsiDialect, GenericDialect};
16119        use crate::test_utils::TestedDialects;
16120
16121        macro_rules! test_parse_data_type {
16122            ($dialect:expr, $input:expr, $expected_type:expr $(,)?) => {{
16123                $dialect.run_parser_method(&*$input, |parser| {
16124                    let data_type = parser.parse_data_type().unwrap();
16125                    assert_eq!($expected_type, data_type);
16126                    assert_eq!($input.to_string(), data_type.to_string());
16127                });
16128            }};
16129        }
16130
16131        #[test]
16132        fn test_ansii_character_string_types() {
16133            // Character string types: <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#character-string-type>
16134            let dialect =
16135                TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
16136
16137            test_parse_data_type!(dialect, "CHARACTER", DataType::Character(None));
16138
16139            test_parse_data_type!(
16140                dialect,
16141                "CHARACTER(20)",
16142                DataType::Character(Some(CharacterLength::IntegerLength {
16143                    length: 20,
16144                    unit: None
16145                }))
16146            );
16147
16148            test_parse_data_type!(
16149                dialect,
16150                "CHARACTER(20 CHARACTERS)",
16151                DataType::Character(Some(CharacterLength::IntegerLength {
16152                    length: 20,
16153                    unit: Some(CharLengthUnits::Characters)
16154                }))
16155            );
16156
16157            test_parse_data_type!(
16158                dialect,
16159                "CHARACTER(20 OCTETS)",
16160                DataType::Character(Some(CharacterLength::IntegerLength {
16161                    length: 20,
16162                    unit: Some(CharLengthUnits::Octets)
16163                }))
16164            );
16165
16166            test_parse_data_type!(dialect, "CHAR", DataType::Char(None));
16167
16168            test_parse_data_type!(
16169                dialect,
16170                "CHAR(20)",
16171                DataType::Char(Some(CharacterLength::IntegerLength {
16172                    length: 20,
16173                    unit: None
16174                }))
16175            );
16176
16177            test_parse_data_type!(
16178                dialect,
16179                "CHAR(20 CHARACTERS)",
16180                DataType::Char(Some(CharacterLength::IntegerLength {
16181                    length: 20,
16182                    unit: Some(CharLengthUnits::Characters)
16183                }))
16184            );
16185
16186            test_parse_data_type!(
16187                dialect,
16188                "CHAR(20 OCTETS)",
16189                DataType::Char(Some(CharacterLength::IntegerLength {
16190                    length: 20,
16191                    unit: Some(CharLengthUnits::Octets)
16192                }))
16193            );
16194
16195            test_parse_data_type!(
16196                dialect,
16197                "CHARACTER VARYING(20)",
16198                DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
16199                    length: 20,
16200                    unit: None
16201                }))
16202            );
16203
16204            test_parse_data_type!(
16205                dialect,
16206                "CHARACTER VARYING(20 CHARACTERS)",
16207                DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
16208                    length: 20,
16209                    unit: Some(CharLengthUnits::Characters)
16210                }))
16211            );
16212
16213            test_parse_data_type!(
16214                dialect,
16215                "CHARACTER VARYING(20 OCTETS)",
16216                DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
16217                    length: 20,
16218                    unit: Some(CharLengthUnits::Octets)
16219                }))
16220            );
16221
16222            test_parse_data_type!(
16223                dialect,
16224                "CHAR VARYING(20)",
16225                DataType::CharVarying(Some(CharacterLength::IntegerLength {
16226                    length: 20,
16227                    unit: None
16228                }))
16229            );
16230
16231            test_parse_data_type!(
16232                dialect,
16233                "CHAR VARYING(20 CHARACTERS)",
16234                DataType::CharVarying(Some(CharacterLength::IntegerLength {
16235                    length: 20,
16236                    unit: Some(CharLengthUnits::Characters)
16237                }))
16238            );
16239
16240            test_parse_data_type!(
16241                dialect,
16242                "CHAR VARYING(20 OCTETS)",
16243                DataType::CharVarying(Some(CharacterLength::IntegerLength {
16244                    length: 20,
16245                    unit: Some(CharLengthUnits::Octets)
16246                }))
16247            );
16248
16249            test_parse_data_type!(
16250                dialect,
16251                "VARCHAR(20)",
16252                DataType::Varchar(Some(CharacterLength::IntegerLength {
16253                    length: 20,
16254                    unit: None
16255                }))
16256            );
16257        }
16258
16259        #[test]
16260        fn test_ansii_character_large_object_types() {
16261            // Character large object types: <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#character-large-object-length>
16262            let dialect =
16263                TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
16264
16265            test_parse_data_type!(
16266                dialect,
16267                "CHARACTER LARGE OBJECT",
16268                DataType::CharacterLargeObject(None)
16269            );
16270            test_parse_data_type!(
16271                dialect,
16272                "CHARACTER LARGE OBJECT(20)",
16273                DataType::CharacterLargeObject(Some(20))
16274            );
16275
16276            test_parse_data_type!(
16277                dialect,
16278                "CHAR LARGE OBJECT",
16279                DataType::CharLargeObject(None)
16280            );
16281            test_parse_data_type!(
16282                dialect,
16283                "CHAR LARGE OBJECT(20)",
16284                DataType::CharLargeObject(Some(20))
16285            );
16286
16287            test_parse_data_type!(dialect, "CLOB", DataType::Clob(None));
16288            test_parse_data_type!(dialect, "CLOB(20)", DataType::Clob(Some(20)));
16289        }
16290
16291        #[test]
16292        fn test_parse_custom_types() {
16293            let dialect =
16294                TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
16295
16296            test_parse_data_type!(
16297                dialect,
16298                "GEOMETRY",
16299                DataType::Custom(ObjectName::from(vec!["GEOMETRY".into()]), vec![])
16300            );
16301
16302            test_parse_data_type!(
16303                dialect,
16304                "GEOMETRY(POINT)",
16305                DataType::Custom(
16306                    ObjectName::from(vec!["GEOMETRY".into()]),
16307                    vec!["POINT".to_string()]
16308                )
16309            );
16310
16311            test_parse_data_type!(
16312                dialect,
16313                "GEOMETRY(POINT, 4326)",
16314                DataType::Custom(
16315                    ObjectName::from(vec!["GEOMETRY".into()]),
16316                    vec!["POINT".to_string(), "4326".to_string()]
16317                )
16318            );
16319        }
16320
16321        #[test]
16322        fn test_ansii_exact_numeric_types() {
16323            // Exact numeric types: <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#exact-numeric-type>
16324            let dialect =
16325                TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
16326
16327            test_parse_data_type!(dialect, "NUMERIC", DataType::Numeric(ExactNumberInfo::None));
16328
16329            test_parse_data_type!(
16330                dialect,
16331                "NUMERIC(2)",
16332                DataType::Numeric(ExactNumberInfo::Precision(2))
16333            );
16334
16335            test_parse_data_type!(
16336                dialect,
16337                "NUMERIC(2,10)",
16338                DataType::Numeric(ExactNumberInfo::PrecisionAndScale(2, 10))
16339            );
16340
16341            test_parse_data_type!(dialect, "DECIMAL", DataType::Decimal(ExactNumberInfo::None));
16342
16343            test_parse_data_type!(
16344                dialect,
16345                "DECIMAL(2)",
16346                DataType::Decimal(ExactNumberInfo::Precision(2))
16347            );
16348
16349            test_parse_data_type!(
16350                dialect,
16351                "DECIMAL(2,10)",
16352                DataType::Decimal(ExactNumberInfo::PrecisionAndScale(2, 10))
16353            );
16354
16355            test_parse_data_type!(dialect, "DEC", DataType::Dec(ExactNumberInfo::None));
16356
16357            test_parse_data_type!(
16358                dialect,
16359                "DEC(2)",
16360                DataType::Dec(ExactNumberInfo::Precision(2))
16361            );
16362
16363            test_parse_data_type!(
16364                dialect,
16365                "DEC(2,10)",
16366                DataType::Dec(ExactNumberInfo::PrecisionAndScale(2, 10))
16367            );
16368        }
16369
16370        #[test]
16371        fn test_ansii_date_type() {
16372            // Datetime types: <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#datetime-type>
16373            let dialect =
16374                TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
16375
16376            test_parse_data_type!(dialect, "DATE", DataType::Date);
16377
16378            test_parse_data_type!(dialect, "TIME", DataType::Time(None, TimezoneInfo::None));
16379
16380            test_parse_data_type!(
16381                dialect,
16382                "TIME(6)",
16383                DataType::Time(Some(6), TimezoneInfo::None)
16384            );
16385
16386            test_parse_data_type!(
16387                dialect,
16388                "TIME WITH TIME ZONE",
16389                DataType::Time(None, TimezoneInfo::WithTimeZone)
16390            );
16391
16392            test_parse_data_type!(
16393                dialect,
16394                "TIME(6) WITH TIME ZONE",
16395                DataType::Time(Some(6), TimezoneInfo::WithTimeZone)
16396            );
16397
16398            test_parse_data_type!(
16399                dialect,
16400                "TIME WITHOUT TIME ZONE",
16401                DataType::Time(None, TimezoneInfo::WithoutTimeZone)
16402            );
16403
16404            test_parse_data_type!(
16405                dialect,
16406                "TIME(6) WITHOUT TIME ZONE",
16407                DataType::Time(Some(6), TimezoneInfo::WithoutTimeZone)
16408            );
16409
16410            test_parse_data_type!(
16411                dialect,
16412                "TIMESTAMP",
16413                DataType::Timestamp(None, TimezoneInfo::None)
16414            );
16415
16416            test_parse_data_type!(
16417                dialect,
16418                "TIMESTAMP(22)",
16419                DataType::Timestamp(Some(22), TimezoneInfo::None)
16420            );
16421
16422            test_parse_data_type!(
16423                dialect,
16424                "TIMESTAMP(22) WITH TIME ZONE",
16425                DataType::Timestamp(Some(22), TimezoneInfo::WithTimeZone)
16426            );
16427
16428            test_parse_data_type!(
16429                dialect,
16430                "TIMESTAMP(33) WITHOUT TIME ZONE",
16431                DataType::Timestamp(Some(33), TimezoneInfo::WithoutTimeZone)
16432            );
16433        }
16434    }
16435
16436    #[test]
16437    fn test_parse_schema_name() {
16438        // The expected name should be identical as the input name, that's why I don't receive both
16439        macro_rules! test_parse_schema_name {
16440            ($input:expr, $expected_name:expr $(,)?) => {{
16441                all_dialects().run_parser_method(&*$input, |parser| {
16442                    let schema_name = parser.parse_schema_name().unwrap();
16443                    // Validate that the structure is the same as expected
16444                    assert_eq!(schema_name, $expected_name);
16445                    // Validate that the input and the expected structure serialization are the same
16446                    assert_eq!(schema_name.to_string(), $input.to_string());
16447                });
16448            }};
16449        }
16450
16451        let dummy_name = ObjectName::from(vec![Ident::new("dummy_name")]);
16452        let dummy_authorization = Ident::new("dummy_authorization");
16453
16454        test_parse_schema_name!(
16455            format!("{dummy_name}"),
16456            SchemaName::Simple(dummy_name.clone())
16457        );
16458
16459        test_parse_schema_name!(
16460            format!("AUTHORIZATION {dummy_authorization}"),
16461            SchemaName::UnnamedAuthorization(dummy_authorization.clone()),
16462        );
16463        test_parse_schema_name!(
16464            format!("{dummy_name} AUTHORIZATION {dummy_authorization}"),
16465            SchemaName::NamedAuthorization(dummy_name.clone(), dummy_authorization.clone()),
16466        );
16467    }
16468
16469    #[test]
16470    fn mysql_parse_index_table_constraint() {
16471        macro_rules! test_parse_table_constraint {
16472            ($dialect:expr, $input:expr, $expected:expr $(,)?) => {{
16473                $dialect.run_parser_method(&*$input, |parser| {
16474                    let constraint = parser.parse_optional_table_constraint().unwrap().unwrap();
16475                    // Validate that the structure is the same as expected
16476                    assert_eq!(constraint, $expected);
16477                    // Validate that the input and the expected structure serialization are the same
16478                    assert_eq!(constraint.to_string(), $input.to_string());
16479                });
16480            }};
16481        }
16482
16483        let dialect =
16484            TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(MySqlDialect {})]);
16485
16486        test_parse_table_constraint!(
16487            dialect,
16488            "INDEX (c1)",
16489            TableConstraint::Index {
16490                display_as_key: false,
16491                name: None,
16492                index_type: None,
16493                columns: vec![Ident::new("c1")],
16494            }
16495        );
16496
16497        test_parse_table_constraint!(
16498            dialect,
16499            "KEY (c1)",
16500            TableConstraint::Index {
16501                display_as_key: true,
16502                name: None,
16503                index_type: None,
16504                columns: vec![Ident::new("c1")],
16505            }
16506        );
16507
16508        test_parse_table_constraint!(
16509            dialect,
16510            "INDEX 'index' (c1, c2)",
16511            TableConstraint::Index {
16512                display_as_key: false,
16513                name: Some(Ident::with_quote('\'', "index")),
16514                index_type: None,
16515                columns: vec![Ident::new("c1"), Ident::new("c2")],
16516            }
16517        );
16518
16519        test_parse_table_constraint!(
16520            dialect,
16521            "INDEX USING BTREE (c1)",
16522            TableConstraint::Index {
16523                display_as_key: false,
16524                name: None,
16525                index_type: Some(IndexType::BTree),
16526                columns: vec![Ident::new("c1")],
16527            }
16528        );
16529
16530        test_parse_table_constraint!(
16531            dialect,
16532            "INDEX USING HASH (c1)",
16533            TableConstraint::Index {
16534                display_as_key: false,
16535                name: None,
16536                index_type: Some(IndexType::Hash),
16537                columns: vec![Ident::new("c1")],
16538            }
16539        );
16540
16541        test_parse_table_constraint!(
16542            dialect,
16543            "INDEX idx_name USING BTREE (c1)",
16544            TableConstraint::Index {
16545                display_as_key: false,
16546                name: Some(Ident::new("idx_name")),
16547                index_type: Some(IndexType::BTree),
16548                columns: vec![Ident::new("c1")],
16549            }
16550        );
16551
16552        test_parse_table_constraint!(
16553            dialect,
16554            "INDEX idx_name USING HASH (c1)",
16555            TableConstraint::Index {
16556                display_as_key: false,
16557                name: Some(Ident::new("idx_name")),
16558                index_type: Some(IndexType::Hash),
16559                columns: vec![Ident::new("c1")],
16560            }
16561        );
16562    }
16563
16564    #[test]
16565    fn test_tokenizer_error_loc() {
16566        let sql = "foo '";
16567        let ast = Parser::parse_sql(&GenericDialect, sql);
16568        assert_eq!(
16569            ast,
16570            Err(ParserError::TokenizerError(
16571                "Unterminated string literal at Line: 1, Column: 5".to_string()
16572            ))
16573        );
16574    }
16575
16576    #[test]
16577    fn test_parser_error_loc() {
16578        let sql = "SELECT this is a syntax error";
16579        let ast = Parser::parse_sql(&GenericDialect, sql);
16580        assert_eq!(
16581            ast,
16582            Err(ParserError::ParserError(
16583                "Expected: [NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS, found: a at Line: 1, Column: 16"
16584                    .to_string()
16585            ))
16586        );
16587    }
16588
16589    #[test]
16590    fn test_nested_explain_error() {
16591        let sql = "EXPLAIN EXPLAIN SELECT 1";
16592        let ast = Parser::parse_sql(&GenericDialect, sql);
16593        assert_eq!(
16594            ast,
16595            Err(ParserError::ParserError(
16596                "Explain must be root of the plan".to_string()
16597            ))
16598        );
16599    }
16600
16601    #[test]
16602    fn test_parse_multipart_identifier_positive() {
16603        let dialect = TestedDialects::new(vec![Box::new(GenericDialect {})]);
16604
16605        // parse multipart with quotes
16606        let expected = vec![
16607            Ident {
16608                value: "CATALOG".to_string(),
16609                quote_style: None,
16610                span: Span::empty(),
16611            },
16612            Ident {
16613                value: "F(o)o. \"bar".to_string(),
16614                quote_style: Some('"'),
16615                span: Span::empty(),
16616            },
16617            Ident {
16618                value: "table".to_string(),
16619                quote_style: None,
16620                span: Span::empty(),
16621            },
16622        ];
16623        dialect.run_parser_method(r#"CATALOG."F(o)o. ""bar".table"#, |parser| {
16624            let actual = parser.parse_multipart_identifier().unwrap();
16625            assert_eq!(expected, actual);
16626        });
16627
16628        // allow whitespace between ident parts
16629        let expected = vec![
16630            Ident {
16631                value: "CATALOG".to_string(),
16632                quote_style: None,
16633                span: Span::empty(),
16634            },
16635            Ident {
16636                value: "table".to_string(),
16637                quote_style: None,
16638                span: Span::empty(),
16639            },
16640        ];
16641        dialect.run_parser_method("CATALOG . table", |parser| {
16642            let actual = parser.parse_multipart_identifier().unwrap();
16643            assert_eq!(expected, actual);
16644        });
16645    }
16646
16647    #[test]
16648    fn test_parse_multipart_identifier_negative() {
16649        macro_rules! test_parse_multipart_identifier_error {
16650            ($input:expr, $expected_err:expr $(,)?) => {{
16651                all_dialects().run_parser_method(&*$input, |parser| {
16652                    let actual_err = parser.parse_multipart_identifier().unwrap_err();
16653                    assert_eq!(actual_err.to_string(), $expected_err);
16654                });
16655            }};
16656        }
16657
16658        test_parse_multipart_identifier_error!(
16659            "",
16660            "sql parser error: Empty input when parsing identifier",
16661        );
16662
16663        test_parse_multipart_identifier_error!(
16664            "*schema.table",
16665            "sql parser error: Unexpected token in identifier: *",
16666        );
16667
16668        test_parse_multipart_identifier_error!(
16669            "schema.table*",
16670            "sql parser error: Unexpected token in identifier: *",
16671        );
16672
16673        test_parse_multipart_identifier_error!(
16674            "schema.table.",
16675            "sql parser error: Trailing period in identifier",
16676        );
16677
16678        test_parse_multipart_identifier_error!(
16679            "schema.*",
16680            "sql parser error: Unexpected token following period in identifier: *",
16681        );
16682    }
16683
16684    #[test]
16685    fn test_mysql_partition_selection() {
16686        let sql = "SELECT * FROM employees PARTITION (p0, p2)";
16687        let expected = vec!["p0", "p2"];
16688
16689        let ast: Vec<Statement> = Parser::parse_sql(&MySqlDialect {}, sql).unwrap();
16690        assert_eq!(ast.len(), 1);
16691        if let Statement::Query(v) = &ast[0] {
16692            if let SetExpr::Select(select) = &*v.body {
16693                assert_eq!(select.from.len(), 1);
16694                let from: &TableWithJoins = &select.from[0];
16695                let table_factor = &from.relation;
16696                if let TableFactor::Table { partitions, .. } = table_factor {
16697                    let actual: Vec<&str> = partitions
16698                        .iter()
16699                        .map(|ident| ident.value.as_str())
16700                        .collect();
16701                    assert_eq!(expected, actual);
16702                }
16703            }
16704        } else {
16705            panic!("fail to parse mysql partition selection");
16706        }
16707    }
16708
16709    #[test]
16710    fn test_replace_into_placeholders() {
16711        let sql = "REPLACE INTO t (a) VALUES (&a)";
16712
16713        assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
16714    }
16715
16716    #[test]
16717    fn test_replace_into_set_placeholder() {
16718        let sql = "REPLACE INTO t SET ?";
16719
16720        assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
16721    }
16722
16723    #[test]
16724    fn test_replace_incomplete() {
16725        let sql = r#"REPLACE"#;
16726
16727        assert!(Parser::parse_sql(&MySqlDialect {}, sql).is_err());
16728    }
16729}