1#[cfg(not(feature = "std"))]
19use crate::alloc::string::ToString;
20use crate::ast::helpers::key_value_options::{KeyValueOption, KeyValueOptionType, KeyValueOptions};
21use crate::ast::helpers::stmt_create_table::CreateTableBuilder;
22use crate::ast::helpers::stmt_data_loading::{
23 FileStagingCommand, StageLoadSelectItem, StageLoadSelectItemKind, StageParamsObject,
24};
25use crate::ast::{
26 ColumnOption, ColumnPolicy, ColumnPolicyProperty, CopyIntoSnowflakeKind, Ident,
27 IdentityParameters, IdentityProperty, IdentityPropertyFormatKind, IdentityPropertyKind,
28 IdentityPropertyOrder, ObjectName, RowAccessPolicy, ShowObjects, SqlOption, Statement,
29 TagsColumnOption, WrappedCollection,
30};
31use crate::dialect::{Dialect, Precedence};
32use crate::keywords::Keyword;
33use crate::parser::{IsOptional, Parser, ParserError};
34use crate::tokenizer::{Token, Word};
35#[cfg(not(feature = "std"))]
36use alloc::boxed::Box;
37#[cfg(not(feature = "std"))]
38use alloc::string::String;
39#[cfg(not(feature = "std"))]
40use alloc::vec::Vec;
41#[cfg(not(feature = "std"))]
42use alloc::{format, vec};
43
44use super::keywords::RESERVED_FOR_IDENTIFIER;
45use sqlparser::ast::StorageSerializationPolicy;
46
47const RESERVED_KEYWORDS_FOR_SELECT_ITEM_OPERATOR: [Keyword; 1] = [Keyword::CONNECT_BY_ROOT];
48#[derive(Debug, Default)]
50pub struct SnowflakeDialect;
51
52impl Dialect for SnowflakeDialect {
53 fn is_identifier_start(&self, ch: char) -> bool {
55 ch.is_ascii_lowercase() || ch.is_ascii_uppercase() || ch == '_'
56 }
57
58 fn supports_projection_trailing_commas(&self) -> bool {
59 true
60 }
61
62 fn supports_from_trailing_commas(&self) -> bool {
63 true
64 }
65
66 fn supports_object_name_double_dot_notation(&self) -> bool {
71 true
72 }
73
74 fn is_identifier_part(&self, ch: char) -> bool {
75 ch.is_ascii_lowercase()
76 || ch.is_ascii_uppercase()
77 || ch.is_ascii_digit()
78 || ch == '$'
79 || ch == '_'
80 }
81
82 fn supports_string_literal_backslash_escape(&self) -> bool {
84 true
85 }
86
87 fn supports_within_after_array_aggregation(&self) -> bool {
88 true
89 }
90
91 fn supports_outer_join_operator(&self) -> bool {
93 true
94 }
95
96 fn supports_connect_by(&self) -> bool {
97 true
98 }
99
100 fn supports_execute_immediate(&self) -> bool {
102 true
103 }
104
105 fn supports_match_recognize(&self) -> bool {
106 true
107 }
108
109 fn supports_dictionary_syntax(&self) -> bool {
114 true
115 }
116
117 fn supports_window_function_null_treatment_arg(&self) -> bool {
120 true
121 }
122
123 fn supports_parenthesized_set_variables(&self) -> bool {
125 true
126 }
127
128 fn supports_comment_on(&self) -> bool {
130 true
131 }
132
133 fn parse_statement(&self, parser: &mut Parser) -> Option<Result<Statement, ParserError>> {
134 if parser.parse_keywords(&[Keyword::ALTER, Keyword::SESSION]) {
135 let set = match parser.parse_one_of_keywords(&[Keyword::SET, Keyword::UNSET]) {
137 Some(Keyword::SET) => true,
138 Some(Keyword::UNSET) => false,
139 _ => return Some(parser.expected("SET or UNSET", parser.peek_token())),
140 };
141 return Some(parse_alter_session(parser, set));
142 }
143
144 if parser.parse_keyword(Keyword::CREATE) {
145 let or_replace = parser.parse_keywords(&[Keyword::OR, Keyword::REPLACE]);
148 let global = match parser.parse_one_of_keywords(&[Keyword::LOCAL, Keyword::GLOBAL]) {
150 Some(Keyword::LOCAL) => Some(false),
151 Some(Keyword::GLOBAL) => Some(true),
152 _ => None,
153 };
154
155 let mut temporary = false;
156 let mut volatile = false;
157 let mut transient = false;
158 let mut iceberg = false;
159
160 match parser.parse_one_of_keywords(&[
161 Keyword::TEMP,
162 Keyword::TEMPORARY,
163 Keyword::VOLATILE,
164 Keyword::TRANSIENT,
165 Keyword::ICEBERG,
166 ]) {
167 Some(Keyword::TEMP | Keyword::TEMPORARY) => temporary = true,
168 Some(Keyword::VOLATILE) => volatile = true,
169 Some(Keyword::TRANSIENT) => transient = true,
170 Some(Keyword::ICEBERG) => iceberg = true,
171 _ => {}
172 }
173
174 if parser.parse_keyword(Keyword::STAGE) {
175 return Some(parse_create_stage(or_replace, temporary, parser));
177 } else if parser.parse_keyword(Keyword::TABLE) {
178 return Some(parse_create_table(
179 or_replace, global, temporary, volatile, transient, iceberg, parser,
180 ));
181 } else {
182 let mut back = 1;
184 if or_replace {
185 back += 2
186 }
187 if temporary {
188 back += 1
189 }
190 for _i in 0..back {
191 parser.prev_token();
192 }
193 }
194 }
195 if parser.parse_keywords(&[Keyword::COPY, Keyword::INTO]) {
196 return Some(parse_copy_into(parser));
198 }
199
200 if let Some(kw) = parser.parse_one_of_keywords(&[
201 Keyword::LIST,
202 Keyword::LS,
203 Keyword::REMOVE,
204 Keyword::RM,
205 ]) {
206 return Some(parse_file_staging_command(kw, parser));
207 }
208
209 if parser.parse_keyword(Keyword::SHOW) {
210 let terse = parser.parse_keyword(Keyword::TERSE);
211 if parser.parse_keyword(Keyword::OBJECTS) {
212 return Some(parse_show_objects(terse, parser));
213 }
214 if terse {
216 parser.prev_token();
217 }
218 parser.prev_token();
220 }
221
222 None
223 }
224
225 fn parse_column_option(
226 &self,
227 parser: &mut Parser,
228 ) -> Result<Option<Result<Option<ColumnOption>, ParserError>>, ParserError> {
229 parser.maybe_parse(|parser| {
230 let with = parser.parse_keyword(Keyword::WITH);
231
232 if parser.parse_keyword(Keyword::IDENTITY) {
233 Ok(parse_identity_property(parser)
234 .map(|p| Some(ColumnOption::Identity(IdentityPropertyKind::Identity(p)))))
235 } else if parser.parse_keyword(Keyword::AUTOINCREMENT) {
236 Ok(parse_identity_property(parser).map(|p| {
237 Some(ColumnOption::Identity(IdentityPropertyKind::Autoincrement(
238 p,
239 )))
240 }))
241 } else if parser.parse_keywords(&[Keyword::MASKING, Keyword::POLICY]) {
242 Ok(parse_column_policy_property(parser, with)
243 .map(|p| Some(ColumnOption::Policy(ColumnPolicy::MaskingPolicy(p)))))
244 } else if parser.parse_keywords(&[Keyword::PROJECTION, Keyword::POLICY]) {
245 Ok(parse_column_policy_property(parser, with)
246 .map(|p| Some(ColumnOption::Policy(ColumnPolicy::ProjectionPolicy(p)))))
247 } else if parser.parse_keywords(&[Keyword::TAG]) {
248 Ok(parse_column_tags(parser, with).map(|p| Some(ColumnOption::Tags(p))))
249 } else {
250 Err(ParserError::ParserError("not found match".to_string()))
251 }
252 })
253 }
254
255 fn get_next_precedence(&self, parser: &Parser) -> Option<Result<u8, ParserError>> {
256 let token = parser.peek_token();
257 match token.token {
259 Token::Colon => Some(Ok(self.prec_value(Precedence::DoubleColon))),
260 _ => None,
261 }
262 }
263
264 fn describe_requires_table_keyword(&self) -> bool {
265 true
266 }
267
268 fn allow_extract_custom(&self) -> bool {
269 true
270 }
271
272 fn allow_extract_single_quotes(&self) -> bool {
273 true
274 }
275
276 fn supports_show_like_before_in(&self) -> bool {
279 true
280 }
281
282 fn is_reserved_for_identifier(&self, kw: Keyword) -> bool {
283 if matches!(kw, Keyword::INTERVAL) {
286 false
287 } else {
288 RESERVED_FOR_IDENTIFIER.contains(&kw)
289 }
290 }
291
292 fn supports_partiql(&self) -> bool {
293 true
294 }
295
296 fn is_select_item_alias(&self, explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
297 explicit
298 || match kw {
299 Keyword::EXCEPT
303 | Keyword::LIMIT
305 | Keyword::OFFSET
307 | Keyword::RETURNING if !matches!(parser.peek_token_ref().token, Token::Comma | Token::EOF) =>
309 {
310 false
311 }
312
313 Keyword::FETCH
316 if parser.peek_keyword(Keyword::FIRST) || parser.peek_keyword(Keyword::NEXT) =>
317 {
318 false
319 }
320
321 Keyword::FROM
325 | Keyword::GROUP
326 | Keyword::HAVING
327 | Keyword::INTERSECT
328 | Keyword::INTO
329 | Keyword::MINUS
330 | Keyword::ORDER
331 | Keyword::SELECT
332 | Keyword::UNION
333 | Keyword::WHERE
334 | Keyword::WITH => false,
335
336 _ => true,
338 }
339 }
340
341 fn supports_timestamp_versioning(&self) -> bool {
343 true
344 }
345
346 fn supports_group_by_expr(&self) -> bool {
348 true
349 }
350
351 fn get_reserved_keywords_for_select_item_operator(&self) -> &[Keyword] {
353 &RESERVED_KEYWORDS_FOR_SELECT_ITEM_OPERATOR
354 }
355}
356
357fn parse_file_staging_command(kw: Keyword, parser: &mut Parser) -> Result<Statement, ParserError> {
358 let stage = parse_snowflake_stage_name(parser)?;
359 let pattern = if parser.parse_keyword(Keyword::PATTERN) {
360 parser.expect_token(&Token::Eq)?;
361 Some(parser.parse_literal_string()?)
362 } else {
363 None
364 };
365
366 match kw {
367 Keyword::LIST | Keyword::LS => Ok(Statement::List(FileStagingCommand { stage, pattern })),
368 Keyword::REMOVE | Keyword::RM => {
369 Ok(Statement::Remove(FileStagingCommand { stage, pattern }))
370 }
371 _ => Err(ParserError::ParserError(
372 "unexpected stage command, expecting LIST, LS, REMOVE or RM".to_string(),
373 )),
374 }
375}
376
377fn parse_alter_session(parser: &mut Parser, set: bool) -> Result<Statement, ParserError> {
380 let session_options = parse_session_options(parser, set)?;
381 Ok(Statement::AlterSession {
382 set,
383 session_params: KeyValueOptions {
384 options: session_options,
385 },
386 })
387}
388
389pub fn parse_create_table(
393 or_replace: bool,
394 global: Option<bool>,
395 temporary: bool,
396 volatile: bool,
397 transient: bool,
398 iceberg: bool,
399 parser: &mut Parser,
400) -> Result<Statement, ParserError> {
401 let if_not_exists = parser.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
402 let table_name = parser.parse_object_name(false)?;
403
404 let mut builder = CreateTableBuilder::new(table_name)
405 .or_replace(or_replace)
406 .if_not_exists(if_not_exists)
407 .temporary(temporary)
408 .transient(transient)
409 .volatile(volatile)
410 .iceberg(iceberg)
411 .global(global)
412 .hive_formats(Some(Default::default()));
413
414 let mut plain_options = vec![];
421
422 loop {
423 let next_token = parser.next_token();
424 match &next_token.token {
425 Token::Word(word) => match word.keyword {
426 Keyword::COPY => {
427 parser.expect_keyword_is(Keyword::GRANTS)?;
428 builder = builder.copy_grants(true);
429 }
430 Keyword::COMMENT => {
431 parser.prev_token();
433 if let Some(comment_def) = parser.parse_optional_inline_comment()? {
434 plain_options.push(SqlOption::Comment(comment_def))
435 }
436 }
437 Keyword::AS => {
438 let query = parser.parse_query()?;
439 builder = builder.query(Some(query));
440 break;
441 }
442 Keyword::CLONE => {
443 let clone = parser.parse_object_name(false).ok();
444 builder = builder.clone_clause(clone);
445 break;
446 }
447 Keyword::LIKE => {
448 let like = parser.parse_object_name(false).ok();
449 builder = builder.like(like);
450 break;
451 }
452 Keyword::CLUSTER => {
453 parser.expect_keyword_is(Keyword::BY)?;
454 parser.expect_token(&Token::LParen)?;
455 let cluster_by = Some(WrappedCollection::Parentheses(
456 parser.parse_comma_separated(|p| p.parse_expr())?,
457 ));
458 parser.expect_token(&Token::RParen)?;
459
460 builder = builder.cluster_by(cluster_by)
461 }
462 Keyword::ENABLE_SCHEMA_EVOLUTION => {
463 parser.expect_token(&Token::Eq)?;
464 let enable_schema_evolution =
465 match parser.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]) {
466 Some(Keyword::TRUE) => true,
467 Some(Keyword::FALSE) => false,
468 _ => {
469 return parser.expected("TRUE or FALSE", next_token);
470 }
471 };
472
473 builder = builder.enable_schema_evolution(Some(enable_schema_evolution));
474 }
475 Keyword::CHANGE_TRACKING => {
476 parser.expect_token(&Token::Eq)?;
477 let change_tracking =
478 match parser.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]) {
479 Some(Keyword::TRUE) => true,
480 Some(Keyword::FALSE) => false,
481 _ => {
482 return parser.expected("TRUE or FALSE", next_token);
483 }
484 };
485
486 builder = builder.change_tracking(Some(change_tracking));
487 }
488 Keyword::DATA_RETENTION_TIME_IN_DAYS => {
489 parser.expect_token(&Token::Eq)?;
490 let data_retention_time_in_days = parser.parse_literal_uint()?;
491 builder =
492 builder.data_retention_time_in_days(Some(data_retention_time_in_days));
493 }
494 Keyword::MAX_DATA_EXTENSION_TIME_IN_DAYS => {
495 parser.expect_token(&Token::Eq)?;
496 let max_data_extension_time_in_days = parser.parse_literal_uint()?;
497 builder = builder
498 .max_data_extension_time_in_days(Some(max_data_extension_time_in_days));
499 }
500 Keyword::DEFAULT_DDL_COLLATION => {
501 parser.expect_token(&Token::Eq)?;
502 let default_ddl_collation = parser.parse_literal_string()?;
503 builder = builder.default_ddl_collation(Some(default_ddl_collation));
504 }
505 Keyword::WITH => {
508 parser.expect_one_of_keywords(&[
509 Keyword::AGGREGATION,
510 Keyword::TAG,
511 Keyword::ROW,
512 ])?;
513 parser.prev_token();
514 }
515 Keyword::AGGREGATION => {
516 parser.expect_keyword_is(Keyword::POLICY)?;
517 let aggregation_policy = parser.parse_object_name(false)?;
518 builder = builder.with_aggregation_policy(Some(aggregation_policy));
519 }
520 Keyword::ROW => {
521 parser.expect_keywords(&[Keyword::ACCESS, Keyword::POLICY])?;
522 let policy = parser.parse_object_name(false)?;
523 parser.expect_keyword_is(Keyword::ON)?;
524 parser.expect_token(&Token::LParen)?;
525 let columns = parser.parse_comma_separated(|p| p.parse_identifier())?;
526 parser.expect_token(&Token::RParen)?;
527
528 builder =
529 builder.with_row_access_policy(Some(RowAccessPolicy::new(policy, columns)))
530 }
531 Keyword::TAG => {
532 parser.expect_token(&Token::LParen)?;
533 let tags = parser.parse_comma_separated(Parser::parse_tag)?;
534 parser.expect_token(&Token::RParen)?;
535 builder = builder.with_tags(Some(tags));
536 }
537 Keyword::ON if parser.parse_keyword(Keyword::COMMIT) => {
538 let on_commit = Some(parser.parse_create_table_on_commit()?);
539 builder = builder.on_commit(on_commit);
540 }
541 Keyword::EXTERNAL_VOLUME => {
542 parser.expect_token(&Token::Eq)?;
543 builder.external_volume = Some(parser.parse_literal_string()?);
544 }
545 Keyword::CATALOG => {
546 parser.expect_token(&Token::Eq)?;
547 builder.catalog = Some(parser.parse_literal_string()?);
548 }
549 Keyword::BASE_LOCATION => {
550 parser.expect_token(&Token::Eq)?;
551 builder.base_location = Some(parser.parse_literal_string()?);
552 }
553 Keyword::CATALOG_SYNC => {
554 parser.expect_token(&Token::Eq)?;
555 builder.catalog_sync = Some(parser.parse_literal_string()?);
556 }
557 Keyword::STORAGE_SERIALIZATION_POLICY => {
558 parser.expect_token(&Token::Eq)?;
559
560 builder.storage_serialization_policy =
561 Some(parse_storage_serialization_policy(parser)?);
562 }
563 Keyword::IF if parser.parse_keywords(&[Keyword::NOT, Keyword::EXISTS]) => {
564 builder = builder.if_not_exists(true);
565 }
566 _ => {
567 return parser.expected("end of statement", next_token);
568 }
569 },
570 Token::LParen => {
571 parser.prev_token();
572 let (columns, constraints) = parser.parse_columns()?;
573 builder = builder.columns(columns).constraints(constraints);
574 }
575 Token::EOF => {
576 if builder.columns.is_empty() {
577 return Err(ParserError::ParserError(
578 "unexpected end of input".to_string(),
579 ));
580 }
581
582 break;
583 }
584 Token::SemiColon => {
585 if builder.columns.is_empty() {
586 return Err(ParserError::ParserError(
587 "unexpected end of input".to_string(),
588 ));
589 }
590
591 parser.prev_token();
592 break;
593 }
594 _ => {
595 return parser.expected("end of statement", next_token);
596 }
597 }
598 }
599 let table_options = if !plain_options.is_empty() {
600 crate::ast::CreateTableOptions::Plain(plain_options)
601 } else {
602 crate::ast::CreateTableOptions::None
603 };
604
605 builder = builder.table_options(table_options);
606
607 if iceberg && builder.base_location.is_none() {
608 return Err(ParserError::ParserError(
609 "BASE_LOCATION is required for ICEBERG tables".to_string(),
610 ));
611 }
612
613 Ok(builder.build())
614}
615
616pub fn parse_storage_serialization_policy(
617 parser: &mut Parser,
618) -> Result<StorageSerializationPolicy, ParserError> {
619 let next_token = parser.next_token();
620 match &next_token.token {
621 Token::Word(w) => match w.keyword {
622 Keyword::COMPATIBLE => Ok(StorageSerializationPolicy::Compatible),
623 Keyword::OPTIMIZED => Ok(StorageSerializationPolicy::Optimized),
624 _ => parser.expected("storage_serialization_policy", next_token),
625 },
626 _ => parser.expected("storage_serialization_policy", next_token),
627 }
628}
629
630pub fn parse_create_stage(
631 or_replace: bool,
632 temporary: bool,
633 parser: &mut Parser,
634) -> Result<Statement, ParserError> {
635 let if_not_exists = parser.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
637 let name = parser.parse_object_name(false)?;
638 let mut directory_table_params = Vec::new();
639 let mut file_format = Vec::new();
640 let mut copy_options = Vec::new();
641 let mut comment = None;
642
643 let stage_params = parse_stage_params(parser)?;
645
646 if parser.parse_keyword(Keyword::DIRECTORY) {
648 parser.expect_token(&Token::Eq)?;
649 directory_table_params = parse_parentheses_options(parser)?;
650 }
651
652 if parser.parse_keyword(Keyword::FILE_FORMAT) {
654 parser.expect_token(&Token::Eq)?;
655 file_format = parse_parentheses_options(parser)?;
656 }
657
658 if parser.parse_keyword(Keyword::COPY_OPTIONS) {
660 parser.expect_token(&Token::Eq)?;
661 copy_options = parse_parentheses_options(parser)?;
662 }
663
664 if parser.parse_keyword(Keyword::COMMENT) {
666 parser.expect_token(&Token::Eq)?;
667 comment = Some(parser.parse_comment_value()?);
668 }
669
670 Ok(Statement::CreateStage {
671 or_replace,
672 temporary,
673 if_not_exists,
674 name,
675 stage_params,
676 directory_table_params: KeyValueOptions {
677 options: directory_table_params,
678 },
679 file_format: KeyValueOptions {
680 options: file_format,
681 },
682 copy_options: KeyValueOptions {
683 options: copy_options,
684 },
685 comment,
686 })
687}
688
689pub fn parse_stage_name_identifier(parser: &mut Parser) -> Result<Ident, ParserError> {
690 let mut ident = String::new();
691 while let Some(next_token) = parser.next_token_no_skip() {
692 match &next_token.token {
693 Token::Whitespace(_) | Token::SemiColon => break,
694 Token::Period => {
695 parser.prev_token();
696 break;
697 }
698 Token::RParen => {
699 parser.prev_token();
700 break;
701 }
702 Token::AtSign => ident.push('@'),
703 Token::Tilde => ident.push('~'),
704 Token::Mod => ident.push('%'),
705 Token::Div => ident.push('/'),
706 Token::Word(w) => ident.push_str(&w.to_string()),
707 _ => return parser.expected("stage name identifier", parser.peek_token()),
708 }
709 }
710 Ok(Ident::new(ident))
711}
712
713pub fn parse_snowflake_stage_name(parser: &mut Parser) -> Result<ObjectName, ParserError> {
714 match parser.next_token().token {
715 Token::AtSign => {
716 parser.prev_token();
717 let mut idents = vec![];
718 loop {
719 idents.push(parse_stage_name_identifier(parser)?);
720 if !parser.consume_token(&Token::Period) {
721 break;
722 }
723 }
724 Ok(ObjectName::from(idents))
725 }
726 _ => {
727 parser.prev_token();
728 Ok(parser.parse_object_name(false)?)
729 }
730 }
731}
732
733pub fn parse_copy_into(parser: &mut Parser) -> Result<Statement, ParserError> {
736 let kind = match parser.peek_token().token {
737 Token::AtSign => CopyIntoSnowflakeKind::Location,
739 Token::SingleQuotedString(s) if s.contains("://") => CopyIntoSnowflakeKind::Location,
741 _ => CopyIntoSnowflakeKind::Table,
742 };
743
744 let mut files: Vec<String> = vec![];
745 let mut from_transformations: Option<Vec<StageLoadSelectItemKind>> = None;
746 let mut from_stage_alias = None;
747 let mut from_stage = None;
748 let mut stage_params = StageParamsObject {
749 url: None,
750 encryption: KeyValueOptions { options: vec![] },
751 endpoint: None,
752 storage_integration: None,
753 credentials: KeyValueOptions { options: vec![] },
754 };
755 let mut from_query = None;
756 let mut partition = None;
757 let mut file_format = Vec::new();
758 let mut pattern = None;
759 let mut validation_mode = None;
760 let mut copy_options = Vec::new();
761
762 let into: ObjectName = parse_snowflake_stage_name(parser)?;
763 if kind == CopyIntoSnowflakeKind::Location {
764 stage_params = parse_stage_params(parser)?;
765 }
766
767 let into_columns = match &parser.peek_token().token {
768 Token::LParen => Some(parser.parse_parenthesized_column_list(IsOptional::Optional, true)?),
769 _ => None,
770 };
771
772 parser.expect_keyword_is(Keyword::FROM)?;
773 match parser.next_token().token {
774 Token::LParen if kind == CopyIntoSnowflakeKind::Table => {
775 parser.expect_keyword_is(Keyword::SELECT)?;
777 from_transformations = parse_select_items_for_data_load(parser)?;
778
779 parser.expect_keyword_is(Keyword::FROM)?;
780 from_stage = Some(parse_snowflake_stage_name(parser)?);
781 stage_params = parse_stage_params(parser)?;
782
783 from_stage_alias = parser
785 .maybe_parse_table_alias()?
786 .map(|table_alias| table_alias.name);
787 parser.expect_token(&Token::RParen)?;
788 }
789 Token::LParen if kind == CopyIntoSnowflakeKind::Location => {
790 from_query = Some(parser.parse_query()?);
792 parser.expect_token(&Token::RParen)?;
793 }
794 _ => {
795 parser.prev_token();
796 from_stage = Some(parse_snowflake_stage_name(parser)?);
797 stage_params = parse_stage_params(parser)?;
798
799 from_stage_alias = if parser.parse_keyword(Keyword::AS) {
801 Some(match parser.next_token().token {
802 Token::Word(w) => Ok(Ident::new(w.value)),
803 _ => parser.expected("stage alias", parser.peek_token()),
804 }?)
805 } else {
806 None
807 };
808 }
809 }
810
811 loop {
812 if parser.parse_keyword(Keyword::FILE_FORMAT) {
814 parser.expect_token(&Token::Eq)?;
815 file_format = parse_parentheses_options(parser)?;
816 } else if parser.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
818 partition = Some(Box::new(parser.parse_expr()?))
819 } else if parser.parse_keyword(Keyword::FILES) {
821 parser.expect_token(&Token::Eq)?;
822 parser.expect_token(&Token::LParen)?;
823 let mut continue_loop = true;
824 while continue_loop {
825 continue_loop = false;
826 let next_token = parser.next_token();
827 match next_token.token {
828 Token::SingleQuotedString(s) => files.push(s),
829 _ => parser.expected("file token", next_token)?,
830 };
831 if parser.next_token().token.eq(&Token::Comma) {
832 continue_loop = true;
833 } else {
834 parser.prev_token(); }
836 }
837 parser.expect_token(&Token::RParen)?;
838 } else if parser.parse_keyword(Keyword::PATTERN) {
840 parser.expect_token(&Token::Eq)?;
841 let next_token = parser.next_token();
842 pattern = Some(match next_token.token {
843 Token::SingleQuotedString(s) => s,
844 _ => parser.expected("pattern", next_token)?,
845 });
846 } else if parser.parse_keyword(Keyword::VALIDATION_MODE) {
848 parser.expect_token(&Token::Eq)?;
849 validation_mode = Some(parser.next_token().token.to_string());
850 } else if parser.parse_keyword(Keyword::COPY_OPTIONS) {
852 parser.expect_token(&Token::Eq)?;
853 copy_options = parse_parentheses_options(parser)?;
854 } else {
855 match parser.next_token().token {
856 Token::SemiColon | Token::EOF => break,
857 Token::Comma => continue,
858 Token::Word(key) => copy_options.push(parse_option(parser, key)?),
861 _ => return parser.expected("another copy option, ; or EOF'", parser.peek_token()),
862 }
863 }
864 }
865
866 Ok(Statement::CopyIntoSnowflake {
867 kind,
868 into,
869 into_columns,
870 from_obj: from_stage,
871 from_obj_alias: from_stage_alias,
872 stage_params,
873 from_transformations,
874 from_query,
875 files: if files.is_empty() { None } else { Some(files) },
876 pattern,
877 file_format: KeyValueOptions {
878 options: file_format,
879 },
880 copy_options: KeyValueOptions {
881 options: copy_options,
882 },
883 validation_mode,
884 partition,
885 })
886}
887
888fn parse_select_items_for_data_load(
889 parser: &mut Parser,
890) -> Result<Option<Vec<StageLoadSelectItemKind>>, ParserError> {
891 let mut select_items: Vec<StageLoadSelectItemKind> = vec![];
892 loop {
893 match parser.maybe_parse(parse_select_item_for_data_load)? {
894 Some(item) => select_items.push(StageLoadSelectItemKind::StageLoadSelectItem(item)),
896 None => select_items.push(StageLoadSelectItemKind::SelectItem(
898 parser.parse_select_item()?,
899 )),
900 }
901 if matches!(parser.peek_token_ref().token, Token::Comma) {
902 parser.advance_token();
903 } else {
904 break;
905 }
906 }
907 Ok(Some(select_items))
908}
909
910fn parse_select_item_for_data_load(
911 parser: &mut Parser,
912) -> Result<StageLoadSelectItem, ParserError> {
913 let mut alias: Option<Ident> = None;
914 let mut file_col_num: i32 = 0;
915 let mut element: Option<Ident> = None;
916 let mut item_as: Option<Ident> = None;
917
918 let next_token = parser.next_token();
919 match next_token.token {
920 Token::Placeholder(w) => {
921 file_col_num = w.to_string().split_off(1).parse::<i32>().map_err(|e| {
922 ParserError::ParserError(format!("Could not parse '{w}' as i32: {e}"))
923 })?;
924 Ok(())
925 }
926 Token::Word(w) => {
927 alias = Some(Ident::new(w.value));
928 Ok(())
929 }
930 _ => parser.expected("alias or file_col_num", next_token),
931 }?;
932
933 if alias.is_some() {
934 parser.expect_token(&Token::Period)?;
935 let col_num_token = parser.next_token();
937 match col_num_token.token {
938 Token::Placeholder(w) => {
939 file_col_num = w.to_string().split_off(1).parse::<i32>().map_err(|e| {
940 ParserError::ParserError(format!("Could not parse '{w}' as i32: {e}"))
941 })?;
942 Ok(())
943 }
944 _ => parser.expected("file_col_num", col_num_token),
945 }?;
946 }
947
948 match parser.next_token().token {
950 Token::Colon => {
951 element = Some(Ident::new(match parser.next_token().token {
953 Token::Word(w) => Ok(w.value),
954 _ => parser.expected("file_col_num", parser.peek_token()),
955 }?));
956 }
957 _ => {
958 parser.prev_token();
960 }
961 }
962
963 if parser.parse_keyword(Keyword::AS) {
965 item_as = Some(match parser.next_token().token {
966 Token::Word(w) => Ok(Ident::new(w.value)),
967 _ => parser.expected("column item alias", parser.peek_token()),
968 }?);
969 }
970
971 Ok(StageLoadSelectItem {
972 alias,
973 file_col_num,
974 element,
975 item_as,
976 })
977}
978
979fn parse_stage_params(parser: &mut Parser) -> Result<StageParamsObject, ParserError> {
980 let (mut url, mut storage_integration, mut endpoint) = (None, None, None);
981 let mut encryption: KeyValueOptions = KeyValueOptions { options: vec![] };
982 let mut credentials: KeyValueOptions = KeyValueOptions { options: vec![] };
983
984 if parser.parse_keyword(Keyword::URL) {
986 parser.expect_token(&Token::Eq)?;
987 url = Some(match parser.next_token().token {
988 Token::SingleQuotedString(word) => Ok(word),
989 _ => parser.expected("a URL statement", parser.peek_token()),
990 }?)
991 }
992
993 if parser.parse_keyword(Keyword::STORAGE_INTEGRATION) {
995 parser.expect_token(&Token::Eq)?;
996 storage_integration = Some(parser.next_token().token.to_string());
997 }
998
999 if parser.parse_keyword(Keyword::ENDPOINT) {
1001 parser.expect_token(&Token::Eq)?;
1002 endpoint = Some(match parser.next_token().token {
1003 Token::SingleQuotedString(word) => Ok(word),
1004 _ => parser.expected("an endpoint statement", parser.peek_token()),
1005 }?)
1006 }
1007
1008 if parser.parse_keyword(Keyword::CREDENTIALS) {
1010 parser.expect_token(&Token::Eq)?;
1011 credentials = KeyValueOptions {
1012 options: parse_parentheses_options(parser)?,
1013 };
1014 }
1015
1016 if parser.parse_keyword(Keyword::ENCRYPTION) {
1018 parser.expect_token(&Token::Eq)?;
1019 encryption = KeyValueOptions {
1020 options: parse_parentheses_options(parser)?,
1021 };
1022 }
1023
1024 Ok(StageParamsObject {
1025 url,
1026 encryption,
1027 endpoint,
1028 storage_integration,
1029 credentials,
1030 })
1031}
1032
1033fn parse_session_options(
1038 parser: &mut Parser,
1039 set: bool,
1040) -> Result<Vec<KeyValueOption>, ParserError> {
1041 let mut options: Vec<KeyValueOption> = Vec::new();
1042 let empty = String::new;
1043 loop {
1044 let next_token = parser.peek_token();
1045 match next_token.token {
1046 Token::SemiColon | Token::EOF => break,
1047 Token::Comma => {
1048 parser.advance_token();
1049 continue;
1050 }
1051 Token::Word(key) => {
1052 parser.advance_token();
1053 if set {
1054 let option = parse_option(parser, key)?;
1055 options.push(option);
1056 } else {
1057 options.push(KeyValueOption {
1058 option_name: key.value,
1059 option_type: KeyValueOptionType::STRING,
1060 value: empty(),
1061 });
1062 }
1063 }
1064 _ => {
1065 return parser.expected("another option or end of statement", next_token);
1066 }
1067 }
1068 }
1069 if options.is_empty() {
1070 Err(ParserError::ParserError(
1071 "expected at least one option".to_string(),
1072 ))
1073 } else {
1074 Ok(options)
1075 }
1076}
1077
1078fn parse_parentheses_options(parser: &mut Parser) -> Result<Vec<KeyValueOption>, ParserError> {
1085 let mut options: Vec<KeyValueOption> = Vec::new();
1086 parser.expect_token(&Token::LParen)?;
1087 loop {
1088 match parser.next_token().token {
1089 Token::RParen => break,
1090 Token::Comma => continue,
1091 Token::Word(key) => options.push(parse_option(parser, key)?),
1092 _ => return parser.expected("another option or ')'", parser.peek_token()),
1093 };
1094 }
1095 Ok(options)
1096}
1097
1098fn parse_option(parser: &mut Parser, key: Word) -> Result<KeyValueOption, ParserError> {
1100 parser.expect_token(&Token::Eq)?;
1101 if parser.parse_keyword(Keyword::TRUE) {
1102 Ok(KeyValueOption {
1103 option_name: key.value,
1104 option_type: KeyValueOptionType::BOOLEAN,
1105 value: "TRUE".to_string(),
1106 })
1107 } else if parser.parse_keyword(Keyword::FALSE) {
1108 Ok(KeyValueOption {
1109 option_name: key.value,
1110 option_type: KeyValueOptionType::BOOLEAN,
1111 value: "FALSE".to_string(),
1112 })
1113 } else {
1114 match parser.next_token().token {
1115 Token::SingleQuotedString(value) => Ok(KeyValueOption {
1116 option_name: key.value,
1117 option_type: KeyValueOptionType::STRING,
1118 value,
1119 }),
1120 Token::Word(word) => Ok(KeyValueOption {
1121 option_name: key.value,
1122 option_type: KeyValueOptionType::ENUM,
1123 value: word.value,
1124 }),
1125 Token::Number(n, _) => Ok(KeyValueOption {
1126 option_name: key.value,
1127 option_type: KeyValueOptionType::NUMBER,
1128 value: n,
1129 }),
1130 _ => parser.expected("expected option value", parser.peek_token()),
1131 }
1132 }
1133}
1134
1135fn parse_identity_property(parser: &mut Parser) -> Result<IdentityProperty, ParserError> {
1142 let parameters = if parser.consume_token(&Token::LParen) {
1143 let seed = parser.parse_number()?;
1144 parser.expect_token(&Token::Comma)?;
1145 let increment = parser.parse_number()?;
1146 parser.expect_token(&Token::RParen)?;
1147
1148 Some(IdentityPropertyFormatKind::FunctionCall(
1149 IdentityParameters { seed, increment },
1150 ))
1151 } else if parser.parse_keyword(Keyword::START) {
1152 let seed = parser.parse_number()?;
1153 parser.expect_keyword_is(Keyword::INCREMENT)?;
1154 let increment = parser.parse_number()?;
1155
1156 Some(IdentityPropertyFormatKind::StartAndIncrement(
1157 IdentityParameters { seed, increment },
1158 ))
1159 } else {
1160 None
1161 };
1162 let order = match parser.parse_one_of_keywords(&[Keyword::ORDER, Keyword::NOORDER]) {
1163 Some(Keyword::ORDER) => Some(IdentityPropertyOrder::Order),
1164 Some(Keyword::NOORDER) => Some(IdentityPropertyOrder::NoOrder),
1165 _ => None,
1166 };
1167 Ok(IdentityProperty { parameters, order })
1168}
1169
1170fn parse_column_policy_property(
1177 parser: &mut Parser,
1178 with: bool,
1179) -> Result<ColumnPolicyProperty, ParserError> {
1180 let policy_name = parser.parse_identifier()?;
1181 let using_columns = if parser.parse_keyword(Keyword::USING) {
1182 parser.expect_token(&Token::LParen)?;
1183 let columns = parser.parse_comma_separated(|p| p.parse_identifier())?;
1184 parser.expect_token(&Token::RParen)?;
1185 Some(columns)
1186 } else {
1187 None
1188 };
1189
1190 Ok(ColumnPolicyProperty {
1191 with,
1192 policy_name,
1193 using_columns,
1194 })
1195}
1196
1197fn parse_column_tags(parser: &mut Parser, with: bool) -> Result<TagsColumnOption, ParserError> {
1204 parser.expect_token(&Token::LParen)?;
1205 let tags = parser.parse_comma_separated(Parser::parse_tag)?;
1206 parser.expect_token(&Token::RParen)?;
1207
1208 Ok(TagsColumnOption { with, tags })
1209}
1210
1211fn parse_show_objects(terse: bool, parser: &mut Parser) -> Result<Statement, ParserError> {
1214 let show_options = parser.parse_show_stmt_options()?;
1215 Ok(Statement::ShowObjects(ShowObjects {
1216 terse,
1217 show_options,
1218 }))
1219}