From 07c07c3c3b0392036a5f5a4cf0fda87ec63dd68a Mon Sep 17 00:00:00 2001 From: Yoav Cohen Date: Mon, 20 Jan 2025 10:53:01 +0100 Subject: [PATCH 1/5] Parse Snowflake COPY INTO --- src/ast/helpers/stmt_data_loading.rs | 10 +- src/ast/mod.rs | 46 +++++++- src/ast/spans.rs | 11 +- src/dialect/snowflake.rs | 161 ++++++++++++++++++++------- tests/sqlparser_snowflake.rs | 120 ++++++++++++++++---- 5 files changed, 276 insertions(+), 72 deletions(-) diff --git a/src/ast/helpers/stmt_data_loading.rs b/src/ast/helpers/stmt_data_loading.rs index 42e1df06b..77de5d9ec 100644 --- a/src/ast/helpers/stmt_data_loading.rs +++ b/src/ast/helpers/stmt_data_loading.rs @@ -58,6 +58,7 @@ pub enum DataLoadingOptionType { STRING, BOOLEAN, ENUM, + NUMBER, } #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] @@ -128,12 +129,9 @@ impl fmt::Display for DataLoadingOption { DataLoadingOptionType::STRING => { write!(f, "{}='{}'", self.option_name, self.value)?; } - DataLoadingOptionType::ENUM => { - // single quote is omitted - write!(f, "{}={}", self.option_name, self.value)?; - } - DataLoadingOptionType::BOOLEAN => { - // single quote is omitted + DataLoadingOptionType::ENUM + | DataLoadingOptionType::BOOLEAN + | DataLoadingOptionType::NUMBER => { write!(f, "{}={}", self.option_name, self.value)?; } } diff --git a/src/ast/mod.rs b/src/ast/mod.rs index b473dc11f..c29946f21 100644 --- a/src/ast/mod.rs +++ b/src/ast/mod.rs @@ -2491,14 +2491,14 @@ pub enum Statement { values: Vec>, }, /// ```sql - /// COPY INTO + /// COPY INTO /// ``` /// See /// Copy Into syntax available for Snowflake is different than the one implemented in /// Postgres. Although they share common prefix, it is reasonable to implement them /// in different enums. This can be refactored later once custom dialects /// are allowed to have custom Statements. - CopyIntoSnowflake { + CopyIntoSnowflakeTable { into: ObjectName, from_stage: ObjectName, from_stage_alias: Option, @@ -2511,6 +2511,19 @@ pub enum Statement { validation_mode: Option, }, /// ```sql + /// COPY INTO + /// ``` + /// See + CopyIntoSnowflakeLocation { + into: ObjectName, + from_table: Option, + from_query: Option>, + stage_params: StageParamsObject, + partition: Option, + file_format: DataLoadingOptions, + copy_options: DataLoadingOptions, + }, + /// ```sql /// CLOSE /// ``` /// Closes the portal underlying an open cursor. @@ -4981,7 +4994,7 @@ impl fmt::Display for Statement { } Ok(()) } - Statement::CopyIntoSnowflake { + Statement::CopyIntoSnowflakeTable { into, from_stage, from_stage_alias, @@ -5039,6 +5052,33 @@ impl fmt::Display for Statement { } Ok(()) } + Statement::CopyIntoSnowflakeLocation { + into, + from_table, + from_query, + stage_params, + partition, + file_format, + copy_options, + } => { + write!(f, "COPY INTO {into} FROM")?; + if let Some(from_table) = from_table { + write!(f, " {from_table}")?; + } else if let Some(from_query) = from_query { + write!(f, " ({from_query})")?; + } + write!(f, "{stage_params}")?; + if let Some(partition) = partition { + write!(f, " PARTITION BY {partition}")?; + } + if !file_format.options.is_empty() { + write!(f, " FILE_FORMAT=({})", file_format)?; + } + if !copy_options.options.is_empty() { + write!(f, " {}", copy_options)?; + } + Ok(()) + } Statement::CreateType { name, representation, diff --git a/src/ast/spans.rs b/src/ast/spans.rs index 5316bfbda..a3acc969b 100644 --- a/src/ast/spans.rs +++ b/src/ast/spans.rs @@ -330,7 +330,7 @@ impl Spanned for Statement { legacy_options: _, values: _, } => source.span(), - Statement::CopyIntoSnowflake { + Statement::CopyIntoSnowflakeTable { into: _, from_stage: _, from_stage_alias: _, @@ -342,6 +342,15 @@ impl Spanned for Statement { copy_options: _, validation_mode: _, } => Span::empty(), + Statement::CopyIntoSnowflakeLocation { + into: _, + from_table: _, + from_query: _, + stage_params: _, + partition: _, + file_format: _, + copy_options: _, + } => Span::empty(), Statement::Close { cursor } => match cursor { CloseCursor::All => Span::empty(), CloseCursor::Specific { name } => name.span, diff --git a/src/dialect/snowflake.rs b/src/dialect/snowflake.rs index eb8ea4de9..64d2f3c9a 100644 --- a/src/dialect/snowflake.rs +++ b/src/dialect/snowflake.rs @@ -30,7 +30,7 @@ use crate::ast::{ use crate::dialect::{Dialect, Precedence}; use crate::keywords::Keyword; use crate::parser::{Parser, ParserError}; -use crate::tokenizer::Token; +use crate::tokenizer::{Token, Word}; #[cfg(not(feature = "std"))] use alloc::string::String; #[cfg(not(feature = "std"))] @@ -665,7 +665,82 @@ pub fn parse_snowflake_stage_name(parser: &mut Parser) -> Result` +/// and `COPY INTO ` which have different syntax. pub fn parse_copy_into(parser: &mut Parser) -> Result { + if is_copy_into_location(parser) { + parse_copy_into_location(parser) + } else { + parse_copy_into_table(parser) + } +} + +/// Returns true if the `COPY INTO` statement is a `COPY INTO ` +/// by peeking at the prefix of the target's object name and trying to +/// determine if it's a Snowflake stage or a table. +fn is_copy_into_location(parser: &mut Parser) -> bool { + match parser.peek_token().token { + // Indicates an internal stage + Token::AtSign => true, + // Indicates an external stage, i.e. s3://, gcs:// or azure:// + Token::SingleQuotedString(s) if s.contains("://") => true, + _ => false, + } +} + +fn parse_copy_into_location(parser: &mut Parser) -> Result { + let into: ObjectName = parse_snowflake_stage_name(parser)?; + parser.expect_keyword_is(Keyword::FROM)?; + // Two options: `FROM (query)` or `FROM
` + let (from_table, from_query) = match parser.next_token().token { + Token::LParen => { + let query = parser.parse_query()?; + parser.expect_token(&Token::RParen)?; + (None, Some(query)) + } + _ => { + parser.prev_token(); + let table = parser.parse_object_name(true)?; + (Some(table), None) + } + }; + let stage_params = parse_stage_params(parser)?; + + // The order of the next options is not defined, so we need to loop + // until we reach the end of the statement + let mut partition = None; + let mut file_format = Vec::new(); + let mut options: Vec = Vec::new(); + loop { + if parser.parse_keyword(Keyword::FILE_FORMAT) { + parser.expect_token(&Token::Eq)?; + file_format = parse_parentheses_options(parser)?; + } else if parser.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) { + partition = Some(parser.parse_expr()?) + } else { + match parser.next_token().token { + Token::SemiColon | Token::EOF => break, + Token::Comma => continue, + Token::Word(key) => options.push(parse_copy_option(parser, key)?), + _ => return parser.expected("another option, ; or EOF'", parser.peek_token()), + } + } + } + + Ok(Statement::CopyIntoSnowflakeLocation { + into, + from_table, + from_query, + stage_params, + partition, + file_format: DataLoadingOptions { + options: file_format, + }, + copy_options: DataLoadingOptions { options }, + }) +} + +fn parse_copy_into_table(parser: &mut Parser) -> Result { let into: ObjectName = parse_snowflake_stage_name(parser)?; let mut files: Vec = vec![]; let mut from_transformations: Option> = None; @@ -766,7 +841,7 @@ pub fn parse_copy_into(parser: &mut Parser) -> Result { validation_mode = Some(parser.next_token().token.to_string()); } - Ok(Statement::CopyIntoSnowflake { + Ok(Statement::CopyIntoSnowflakeTable { into, from_stage, from_stage_alias, @@ -930,55 +1005,55 @@ fn parse_stage_params(parser: &mut Parser) -> Result Result, ParserError> { let mut options: Vec = Vec::new(); - parser.expect_token(&Token::LParen)?; loop { match parser.next_token().token { Token::RParen => break, - Token::Word(key) => { - parser.expect_token(&Token::Eq)?; - if parser.parse_keyword(Keyword::TRUE) { - options.push(DataLoadingOption { - option_name: key.value, - option_type: DataLoadingOptionType::BOOLEAN, - value: "TRUE".to_string(), - }); - Ok(()) - } else if parser.parse_keyword(Keyword::FALSE) { - options.push(DataLoadingOption { - option_name: key.value, - option_type: DataLoadingOptionType::BOOLEAN, - value: "FALSE".to_string(), - }); - Ok(()) - } else { - match parser.next_token().token { - Token::SingleQuotedString(value) => { - options.push(DataLoadingOption { - option_name: key.value, - option_type: DataLoadingOptionType::STRING, - value, - }); - Ok(()) - } - Token::Word(word) => { - options.push(DataLoadingOption { - option_name: key.value, - option_type: DataLoadingOptionType::ENUM, - value: word.value, - }); - Ok(()) - } - _ => parser.expected("expected option value", parser.peek_token()), - } - } - } - _ => parser.expected("another option or ')'", parser.peek_token()), - }?; + Token::Comma => continue, + Token::Word(key) => options.push(parse_copy_option(parser, key)?), + _ => return parser.expected("another option or ')'", parser.peek_token()), + }; } Ok(options) } +/// Parses a `KEY = VALUE` construct based on the specified key +fn parse_copy_option(parser: &mut Parser, key: Word) -> Result { + parser.expect_token(&Token::Eq)?; + if parser.parse_keyword(Keyword::TRUE) { + Ok(DataLoadingOption { + option_name: key.value, + option_type: DataLoadingOptionType::BOOLEAN, + value: "TRUE".to_string(), + }) + } else if parser.parse_keyword(Keyword::FALSE) { + Ok(DataLoadingOption { + option_name: key.value, + option_type: DataLoadingOptionType::BOOLEAN, + value: "FALSE".to_string(), + }) + } else { + match parser.next_token().token { + Token::SingleQuotedString(value) => Ok(DataLoadingOption { + option_name: key.value, + option_type: DataLoadingOptionType::STRING, + value, + }), + Token::Word(word) => Ok(DataLoadingOption { + option_name: key.value, + option_type: DataLoadingOptionType::ENUM, + value: word.value, + }), + Token::Number(n, _) => Ok(DataLoadingOption { + option_name: key.value, + option_type: DataLoadingOptionType::NUMBER, + value: n, + }), + _ => parser.expected("expected option value", parser.peek_token()), + } + } +} + /// Parsing a property of identity or autoincrement column option /// Syntax: /// ```sql diff --git a/tests/sqlparser_snowflake.rs b/tests/sqlparser_snowflake.rs index 2b2350936..238e4e7e8 100644 --- a/tests/sqlparser_snowflake.rs +++ b/tests/sqlparser_snowflake.rs @@ -2027,7 +2027,7 @@ fn test_copy_into() { "FROM 'gcs://mybucket/./../a.csv'" ); match snowflake().verified_stmt(sql) { - Statement::CopyIntoSnowflake { + Statement::CopyIntoSnowflakeTable { into, from_stage, files, @@ -2050,6 +2050,28 @@ fn test_copy_into() { _ => unreachable!(), }; assert_eq!(snowflake().verified_stmt(sql).to_string(), sql); + + let sql = concat!( + "COPY INTO 's3://a/b/c/data.parquet' ", + "FROM (SELECT * FROM tbl)" + ); + match snowflake().verified_stmt(sql) { + Statement::CopyIntoSnowflakeLocation { + into, + from_table, + from_query, + .. + } => { + assert_eq!( + into, + ObjectName::from(vec![Ident::with_quote('\'', "s3://a/b/c/data.parquet")]) + ); + assert!(from_query.is_some()); + assert!(from_table.is_none()); + } + _ => unreachable!(), + }; + assert_eq!(snowflake().verified_stmt(sql).to_string(), sql); } #[test] @@ -2064,7 +2086,7 @@ fn test_copy_into_with_stage_params() { ); match snowflake().verified_stmt(sql) { - Statement::CopyIntoSnowflake { + Statement::CopyIntoSnowflakeTable { from_stage, stage_params, .. @@ -2124,7 +2146,7 @@ fn test_copy_into_with_stage_params() { ); match snowflake().verified_stmt(sql) { - Statement::CopyIntoSnowflake { + Statement::CopyIntoSnowflakeTable { from_stage, stage_params, .. @@ -2150,7 +2172,7 @@ fn test_copy_into_with_files_and_pattern_and_verification() { ); match snowflake().verified_stmt(sql) { - Statement::CopyIntoSnowflake { + Statement::CopyIntoSnowflakeTable { files, pattern, validation_mode, @@ -2178,7 +2200,7 @@ fn test_copy_into_with_transformations() { ); match snowflake().verified_stmt(sql) { - Statement::CopyIntoSnowflake { + Statement::CopyIntoSnowflakeTable { from_stage, from_transformations, .. @@ -2231,7 +2253,7 @@ fn test_copy_into_file_format() { ); match snowflake_without_unescape().verified_stmt(sql) { - Statement::CopyIntoSnowflake { file_format, .. } => { + Statement::CopyIntoSnowflakeTable { file_format, .. } => { assert!(file_format.options.contains(&DataLoadingOption { option_name: "COMPRESSION".to_string(), option_type: DataLoadingOptionType::ENUM, @@ -2254,6 +2276,41 @@ fn test_copy_into_file_format() { snowflake_without_unescape().verified_stmt(sql).to_string(), sql ); + + // Test commas in file format + let sql = concat!( + "COPY INTO my_company.emp_basic ", + "FROM 'gcs://mybucket/./../a.csv' ", + "FILES = ('file1.json', 'file2.json') ", + "PATTERN = '.*employees0[1-5].csv.gz' ", + r#"FILE_FORMAT=(COMPRESSION=AUTO, BINARY_FORMAT=HEX, ESCAPE='\\')"# + ); + + match snowflake_without_unescape() + .parse_sql_statements(sql) + .unwrap() + .first() + .unwrap() + { + Statement::CopyIntoSnowflakeTable { file_format, .. } => { + assert!(file_format.options.contains(&DataLoadingOption { + option_name: "COMPRESSION".to_string(), + option_type: DataLoadingOptionType::ENUM, + value: "AUTO".to_string() + })); + assert!(file_format.options.contains(&DataLoadingOption { + option_name: "BINARY_FORMAT".to_string(), + option_type: DataLoadingOptionType::ENUM, + value: "HEX".to_string() + })); + assert!(file_format.options.contains(&DataLoadingOption { + option_name: "ESCAPE".to_string(), + option_type: DataLoadingOptionType::STRING, + value: r#"\\"#.to_string() + })); + } + _ => unreachable!(), + } } #[test] @@ -2267,7 +2324,7 @@ fn test_copy_into_copy_options() { ); match snowflake().verified_stmt(sql) { - Statement::CopyIntoSnowflake { copy_options, .. } => { + Statement::CopyIntoSnowflakeTable { copy_options, .. } => { assert!(copy_options.options.contains(&DataLoadingOption { option_name: "ON_ERROR".to_string(), option_type: DataLoadingOptionType::ENUM, @@ -2285,14 +2342,7 @@ fn test_copy_into_copy_options() { } #[test] -fn test_snowflake_stage_object_names() { - let allowed_formatted_names = [ - "my_company.emp_basic", - "@namespace.%table_name", - "@namespace.%table_name/path", - "@namespace.stage_name/path", - "@~/path", - ]; +fn test_snowflake_stage_object_names_into_location() { let mut allowed_object_names = [ ObjectName::from(vec![Ident::new("my_company"), Ident::new("emp_basic")]), ObjectName::from(vec![Ident::new("@namespace"), Ident::new("%table_name")]), @@ -2307,7 +2357,39 @@ fn test_snowflake_stage_object_names() { ObjectName::from(vec![Ident::new("@~/path")]), ]; - for it in allowed_formatted_names + let allowed_names_into_location = [ + "@namespace.%table_name", + "@namespace.%table_name/path", + "@namespace.stage_name/path", + "@~/path", + ]; + for it in allowed_names_into_location + .iter() + .zip(allowed_object_names.iter_mut()) + { + let (formatted_name, object_name) = it; + let sql = format!( + "COPY INTO {} FROM 'gcs://mybucket/./../a.csv'", + formatted_name + ); + match snowflake().verified_stmt(&sql) { + Statement::CopyIntoSnowflakeLocation { into, .. } => { + assert_eq!(into.0, object_name.0) + } + _ => unreachable!(), + } + } +} + +#[test] +fn test_snowflake_stage_object_names_into_table() { + let mut allowed_object_names = [ + ObjectName::from(vec![Ident::new("my_company"), Ident::new("emp_basic")]), + ObjectName::from(vec![Ident::new("emp_basic")]), + ]; + + let allowed_names_into_table = ["my_company.emp_basic", "emp_basic"]; + for it in allowed_names_into_table .iter() .zip(allowed_object_names.iter_mut()) { @@ -2317,7 +2399,7 @@ fn test_snowflake_stage_object_names() { formatted_name ); match snowflake().verified_stmt(&sql) { - Statement::CopyIntoSnowflake { into, .. } => { + Statement::CopyIntoSnowflakeTable { into, .. } => { assert_eq!(into.0, object_name.0) } _ => unreachable!(), @@ -2330,7 +2412,7 @@ fn test_snowflake_copy_into() { let sql = "COPY INTO a.b FROM @namespace.stage_name"; assert_eq!(snowflake().verified_stmt(sql).to_string(), sql); match snowflake().verified_stmt(sql) { - Statement::CopyIntoSnowflake { + Statement::CopyIntoSnowflakeTable { into, from_stage, .. } => { assert_eq!( @@ -2351,7 +2433,7 @@ fn test_snowflake_copy_into_stage_name_ends_with_parens() { let sql = "COPY INTO SCHEMA.SOME_MONITORING_SYSTEM FROM (SELECT t.$1:st AS st FROM @schema.general_finished)"; assert_eq!(snowflake().verified_stmt(sql).to_string(), sql); match snowflake().verified_stmt(sql) { - Statement::CopyIntoSnowflake { + Statement::CopyIntoSnowflakeTable { into, from_stage, .. } => { assert_eq!( From 28f706b141a2eb6ea2aec4b9059de123daf3e996 Mon Sep 17 00:00:00 2001 From: Yoav Cohen Date: Mon, 20 Jan 2025 10:57:38 +0100 Subject: [PATCH 2/5] Docs fix --- src/ast/spans.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/ast/spans.rs b/src/ast/spans.rs index a3acc969b..4e432adcd 100644 --- a/src/ast/spans.rs +++ b/src/ast/spans.rs @@ -211,7 +211,8 @@ impl Spanned for Values { /// # partial span /// /// Missing spans: -/// - [Statement::CopyIntoSnowflake] +/// - [Statement::CopyIntoSnowflakeTable] +/// - [Statement::CopyIntoSnowflakeLocation] /// - [Statement::CreateSecret] /// - [Statement::CreateRole] /// - [Statement::AlterRole] From 784e94c246c54c1ac39c824815277f9908d23c31 Mon Sep 17 00:00:00 2001 From: Yoav Cohen Date: Fri, 24 Jan 2025 10:26:32 +0100 Subject: [PATCH 3/5] Refactor to use the existing struct for Snowflake COPY INTO --- src/ast/mod.rs | 142 ++++++++++------------ src/ast/spans.rs | 17 +-- src/dialect/snowflake.rs | 223 +++++++++++++++-------------------- tests/sqlparser_snowflake.rs | 124 ++++++++++++------- 4 files changed, 245 insertions(+), 261 deletions(-) diff --git a/src/ast/mod.rs b/src/ast/mod.rs index c29946f21..a317f3307 100644 --- a/src/ast/mod.rs +++ b/src/ast/mod.rs @@ -2491,37 +2491,30 @@ pub enum Statement { values: Vec>, }, /// ```sql - /// COPY INTO
+ /// COPY INTO
| /// ``` - /// See + /// See: + /// + /// + /// /// Copy Into syntax available for Snowflake is different than the one implemented in /// Postgres. Although they share common prefix, it is reasonable to implement them /// in different enums. This can be refactored later once custom dialects /// are allowed to have custom Statements. - CopyIntoSnowflakeTable { + CopyIntoSnowflake { + kind: CopyIntoSnowflakeKind, into: ObjectName, - from_stage: ObjectName, - from_stage_alias: Option, + from_obj: Option, + from_obj_alias: Option, stage_params: StageParamsObject, from_transformations: Option>, + from_query: Option>, files: Option>, pattern: Option, file_format: DataLoadingOptions, copy_options: DataLoadingOptions, validation_mode: Option, - }, - /// ```sql - /// COPY INTO - /// ``` - /// See - CopyIntoSnowflakeLocation { - into: ObjectName, - from_table: Option, - from_query: Option>, - stage_params: StageParamsObject, - partition: Option, - file_format: DataLoadingOptions, - copy_options: DataLoadingOptions, + partition: Option>, }, /// ```sql /// CLOSE @@ -4994,80 +4987,54 @@ impl fmt::Display for Statement { } Ok(()) } - Statement::CopyIntoSnowflakeTable { + Statement::CopyIntoSnowflake { + kind, into, - from_stage, - from_stage_alias, + from_obj, + from_obj_alias, stage_params, from_transformations, + from_query, files, pattern, file_format, copy_options, validation_mode, + partition, } => { write!(f, "COPY INTO {}", into)?; - if from_transformations.is_none() { - // Standard data load - write!(f, " FROM {}{}", from_stage, stage_params)?; - if from_stage_alias.as_ref().is_some() { - write!(f, " AS {}", from_stage_alias.as_ref().unwrap())?; - } - } else { + if let Some(from_transformations) = from_transformations { // Data load with transformation - write!( - f, - " FROM (SELECT {} FROM {}{}", - display_separated(from_transformations.as_ref().unwrap(), ", "), - from_stage, - stage_params, - )?; - if from_stage_alias.as_ref().is_some() { - write!(f, " AS {}", from_stage_alias.as_ref().unwrap())?; + if let Some(from_stage) = from_obj { + write!( + f, + " FROM (SELECT {} FROM {}{}", + display_separated(from_transformations, ", "), + from_stage, + stage_params + )?; + } + if let Some(from_obj_alias) = from_obj_alias { + write!(f, " AS {}", from_obj_alias)?; } write!(f, ")")?; + } else if let Some(from_obj) = from_obj { + // Standard data load + write!(f, " FROM {}{}", from_obj, stage_params)?; + if let Some(from_obj_alias) = from_obj_alias { + write!(f, " AS {from_obj_alias}")?; + } + } else if let Some(from_query) = from_query { + // Data unload from query + write!(f, " FROM ({from_query})")?; } - if files.is_some() { - write!( - f, - " FILES = ('{}')", - display_separated(files.as_ref().unwrap(), "', '") - )?; - } - if pattern.is_some() { - write!(f, " PATTERN = '{}'", pattern.as_ref().unwrap())?; - } - if !file_format.options.is_empty() { - write!(f, " FILE_FORMAT=({})", file_format)?; - } - if !copy_options.options.is_empty() { - write!(f, " COPY_OPTIONS=({})", copy_options)?; - } - if validation_mode.is_some() { - write!( - f, - " VALIDATION_MODE = {}", - validation_mode.as_ref().unwrap() - )?; + + if let Some(files) = files { + write!(f, " FILES = ('{}')", display_separated(files, "', '"))?; } - Ok(()) - } - Statement::CopyIntoSnowflakeLocation { - into, - from_table, - from_query, - stage_params, - partition, - file_format, - copy_options, - } => { - write!(f, "COPY INTO {into} FROM")?; - if let Some(from_table) = from_table { - write!(f, " {from_table}")?; - } else if let Some(from_query) = from_query { - write!(f, " ({from_query})")?; + if let Some(pattern) = pattern { + write!(f, " PATTERN = '{}'", pattern)?; } - write!(f, "{stage_params}")?; if let Some(partition) = partition { write!(f, " PARTITION BY {partition}")?; } @@ -5075,7 +5042,15 @@ impl fmt::Display for Statement { write!(f, " FILE_FORMAT=({})", file_format)?; } if !copy_options.options.is_empty() { - write!(f, " {}", copy_options)?; + match kind { + CopyIntoSnowflakeKind::Table => { + write!(f, " COPY_OPTIONS=({})", copy_options)? + } + CopyIntoSnowflakeKind::Location => write!(f, " {copy_options}")?, + } + } + if let Some(validation_mode) = validation_mode { + write!(f, " VALIDATION_MODE = {}", validation_mode)?; } Ok(()) } @@ -8489,6 +8464,19 @@ impl Display for StorageSerializationPolicy { } } +/// Variants of the Snowflake `COPY INTO` statement +#[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum CopyIntoSnowflakeKind { + /// Loads data from files to a table + /// See: + Table, + /// Unloads data from a table or query to external files + /// See: + Location, +} + #[cfg(test)] mod tests { use super::*; diff --git a/src/ast/spans.rs b/src/ast/spans.rs index 4e432adcd..1f9c2b5f9 100644 --- a/src/ast/spans.rs +++ b/src/ast/spans.rs @@ -211,8 +211,7 @@ impl Spanned for Values { /// # partial span /// /// Missing spans: -/// - [Statement::CopyIntoSnowflakeTable] -/// - [Statement::CopyIntoSnowflakeLocation] +/// - [Statement::CopyIntoSnowflake] /// - [Statement::CreateSecret] /// - [Statement::CreateRole] /// - [Statement::AlterRole] @@ -331,10 +330,10 @@ impl Spanned for Statement { legacy_options: _, values: _, } => source.span(), - Statement::CopyIntoSnowflakeTable { + Statement::CopyIntoSnowflake { into: _, - from_stage: _, - from_stage_alias: _, + from_obj: _, + from_obj_alias: _, stage_params: _, from_transformations: _, files: _, @@ -342,15 +341,9 @@ impl Spanned for Statement { file_format: _, copy_options: _, validation_mode: _, - } => Span::empty(), - Statement::CopyIntoSnowflakeLocation { - into: _, - from_table: _, + kind: _, from_query: _, - stage_params: _, partition: _, - file_format: _, - copy_options: _, } => Span::empty(), Statement::Close { cursor } => match cursor { CloseCursor::All => Span::empty(), diff --git a/src/dialect/snowflake.rs b/src/dialect/snowflake.rs index 64d2f3c9a..19e443b40 100644 --- a/src/dialect/snowflake.rs +++ b/src/dialect/snowflake.rs @@ -23,9 +23,10 @@ use crate::ast::helpers::stmt_data_loading::{ StageLoadSelectItem, StageParamsObject, }; use crate::ast::{ - ColumnOption, ColumnPolicy, ColumnPolicyProperty, Ident, IdentityParameters, IdentityProperty, - IdentityPropertyFormatKind, IdentityPropertyKind, IdentityPropertyOrder, ObjectName, - RowAccessPolicy, Statement, TagsColumnOption, WrappedCollection, + ColumnOption, ColumnPolicy, ColumnPolicyProperty, CopyIntoSnowflakeKind, Ident, + IdentityParameters, IdentityProperty, IdentityPropertyFormatKind, IdentityPropertyKind, + IdentityPropertyOrder, ObjectName, RowAccessPolicy, Statement, TagsColumnOption, + WrappedCollection, }; use crate::dialect::{Dialect, Precedence}; use crate::keywords::Keyword; @@ -668,96 +669,46 @@ pub fn parse_snowflake_stage_name(parser: &mut Parser) -> Result` /// and `COPY INTO ` which have different syntax. pub fn parse_copy_into(parser: &mut Parser) -> Result { - if is_copy_into_location(parser) { - parse_copy_into_location(parser) - } else { - parse_copy_into_table(parser) - } -} - -/// Returns true if the `COPY INTO` statement is a `COPY INTO ` -/// by peeking at the prefix of the target's object name and trying to -/// determine if it's a Snowflake stage or a table. -fn is_copy_into_location(parser: &mut Parser) -> bool { - match parser.peek_token().token { + let kind = match parser.peek_token().token { // Indicates an internal stage - Token::AtSign => true, + Token::AtSign => CopyIntoSnowflakeKind::Location, // Indicates an external stage, i.e. s3://, gcs:// or azure:// - Token::SingleQuotedString(s) if s.contains("://") => true, - _ => false, - } -} - -fn parse_copy_into_location(parser: &mut Parser) -> Result { - let into: ObjectName = parse_snowflake_stage_name(parser)?; - parser.expect_keyword_is(Keyword::FROM)?; - // Two options: `FROM (query)` or `FROM
` - let (from_table, from_query) = match parser.next_token().token { - Token::LParen => { - let query = parser.parse_query()?; - parser.expect_token(&Token::RParen)?; - (None, Some(query)) - } - _ => { - parser.prev_token(); - let table = parser.parse_object_name(true)?; - (Some(table), None) - } + Token::SingleQuotedString(s) if s.contains("://") => CopyIntoSnowflakeKind::Location, + _ => CopyIntoSnowflakeKind::Table, }; - let stage_params = parse_stage_params(parser)?; - // The order of the next options is not defined, so we need to loop - // until we reach the end of the statement + let mut files: Vec = vec![]; + let mut from_transformations: Option> = None; + let mut from_stage_alias = None; + let mut from_stage = None; + let mut stage_params = StageParamsObject { + url: None, + encryption: DataLoadingOptions { options: vec![] }, + endpoint: None, + storage_integration: None, + credentials: DataLoadingOptions { options: vec![] }, + }; + let mut from_query = None; let mut partition = None; let mut file_format = Vec::new(); - let mut options: Vec = Vec::new(); - loop { - if parser.parse_keyword(Keyword::FILE_FORMAT) { - parser.expect_token(&Token::Eq)?; - file_format = parse_parentheses_options(parser)?; - } else if parser.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) { - partition = Some(parser.parse_expr()?) - } else { - match parser.next_token().token { - Token::SemiColon | Token::EOF => break, - Token::Comma => continue, - Token::Word(key) => options.push(parse_copy_option(parser, key)?), - _ => return parser.expected("another option, ; or EOF'", parser.peek_token()), - } - } - } - - Ok(Statement::CopyIntoSnowflakeLocation { - into, - from_table, - from_query, - stage_params, - partition, - file_format: DataLoadingOptions { - options: file_format, - }, - copy_options: DataLoadingOptions { options }, - }) -} + let mut pattern = None; + let mut validation_mode = None; + let mut copy_options = Vec::new(); -fn parse_copy_into_table(parser: &mut Parser) -> Result { let into: ObjectName = parse_snowflake_stage_name(parser)?; - let mut files: Vec = vec![]; - let mut from_transformations: Option> = None; - let from_stage_alias; - let from_stage: ObjectName; - let stage_params: StageParamsObject; + if kind == CopyIntoSnowflakeKind::Location { + stage_params = parse_stage_params(parser)?; + } parser.expect_keyword_is(Keyword::FROM)?; - // check if data load transformations are present match parser.next_token().token { - Token::LParen => { - // data load with transformations + Token::LParen if kind == CopyIntoSnowflakeKind::Table => { + // Data load with transformations parser.expect_keyword_is(Keyword::SELECT)?; from_transformations = parse_select_items_for_data_load(parser)?; parser.expect_keyword_is(Keyword::FROM)?; - from_stage = parse_snowflake_stage_name(parser)?; + from_stage = Some(parse_snowflake_stage_name(parser)?); stage_params = parse_stage_params(parser)?; // as @@ -771,9 +722,14 @@ fn parse_copy_into_table(parser: &mut Parser) -> Result }; parser.expect_token(&Token::RParen)?; } + Token::LParen if kind == CopyIntoSnowflakeKind::Location => { + // Data unload with a query + from_query = Some(parser.parse_query()?); + parser.expect_token(&Token::RParen)?; + } _ => { parser.prev_token(); - from_stage = parse_snowflake_stage_name(parser)?; + from_stage = Some(parse_snowflake_stage_name(parser)?); stage_params = parse_stage_params(parser)?; // as @@ -786,67 +742,71 @@ fn parse_copy_into_table(parser: &mut Parser) -> Result None }; } - }; + } - // [ files ] - if parser.parse_keyword(Keyword::FILES) { - parser.expect_token(&Token::Eq)?; - parser.expect_token(&Token::LParen)?; - let mut continue_loop = true; - while continue_loop { - continue_loop = false; + loop { + // FILE_FORMAT + if parser.parse_keyword(Keyword::FILE_FORMAT) { + parser.expect_token(&Token::Eq)?; + file_format = parse_parentheses_options(parser)?; + // PARTITION BY + } else if parser.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) { + partition = Some(Box::new(parser.parse_expr()?)) + // FILES + } else if parser.parse_keyword(Keyword::FILES) { + parser.expect_token(&Token::Eq)?; + parser.expect_token(&Token::LParen)?; + let mut continue_loop = true; + while continue_loop { + continue_loop = false; + let next_token = parser.next_token(); + match next_token.token { + Token::SingleQuotedString(s) => files.push(s), + _ => parser.expected("file token", next_token)?, + }; + if parser.next_token().token.eq(&Token::Comma) { + continue_loop = true; + } else { + parser.prev_token(); // not a comma, need to go back + } + } + parser.expect_token(&Token::RParen)?; + // PATTERN + } else if parser.parse_keyword(Keyword::PATTERN) { + parser.expect_token(&Token::Eq)?; let next_token = parser.next_token(); - match next_token.token { - Token::SingleQuotedString(s) => files.push(s), - _ => parser.expected("file token", next_token)?, - }; - if parser.next_token().token.eq(&Token::Comma) { - continue_loop = true; - } else { - parser.prev_token(); // not a comma, need to go back + pattern = Some(match next_token.token { + Token::SingleQuotedString(s) => s, + _ => parser.expected("pattern", next_token)?, + }); + // VALIDATION MODE + } else if parser.parse_keyword(Keyword::VALIDATION_MODE) { + parser.expect_token(&Token::Eq)?; + validation_mode = Some(parser.next_token().token.to_string()); + // COPY OPTIONS + } else if parser.parse_keyword(Keyword::COPY_OPTIONS) { + parser.expect_token(&Token::Eq)?; + copy_options = parse_parentheses_options(parser)?; + } else { + match parser.next_token().token { + Token::SemiColon | Token::EOF => break, + Token::Comma => continue, + // In `COPY INTO ` the copy options do not have a shared key + // like in `COPY INTO
` + Token::Word(key) => copy_options.push(parse_copy_option(parser, key)?), + _ => return parser.expected("another copy option, ; or EOF'", parser.peek_token()), } } - parser.expect_token(&Token::RParen)?; } - // [ pattern ] - let mut pattern = None; - if parser.parse_keyword(Keyword::PATTERN) { - parser.expect_token(&Token::Eq)?; - let next_token = parser.next_token(); - pattern = Some(match next_token.token { - Token::SingleQuotedString(s) => s, - _ => parser.expected("pattern", next_token)?, - }); - } - - // [ file_format] - let mut file_format = Vec::new(); - if parser.parse_keyword(Keyword::FILE_FORMAT) { - parser.expect_token(&Token::Eq)?; - file_format = parse_parentheses_options(parser)?; - } - - // [ copy_options ] - let mut copy_options = Vec::new(); - if parser.parse_keyword(Keyword::COPY_OPTIONS) { - parser.expect_token(&Token::Eq)?; - copy_options = parse_parentheses_options(parser)?; - } - - // [ VALIDATION_MODE ] - let mut validation_mode = None; - if parser.parse_keyword(Keyword::VALIDATION_MODE) { - parser.expect_token(&Token::Eq)?; - validation_mode = Some(parser.next_token().token.to_string()); - } - - Ok(Statement::CopyIntoSnowflakeTable { + Ok(Statement::CopyIntoSnowflake { + kind, into, - from_stage, - from_stage_alias, + from_obj: from_stage, + from_obj_alias: from_stage_alias, stage_params, from_transformations, + from_query, files: if files.is_empty() { None } else { Some(files) }, pattern, file_format: DataLoadingOptions { @@ -856,6 +816,7 @@ fn parse_copy_into_table(parser: &mut Parser) -> Result options: copy_options, }, validation_mode, + partition, }) } diff --git a/tests/sqlparser_snowflake.rs b/tests/sqlparser_snowflake.rs index 238e4e7e8..1840df17c 100644 --- a/tests/sqlparser_snowflake.rs +++ b/tests/sqlparser_snowflake.rs @@ -2027,21 +2027,23 @@ fn test_copy_into() { "FROM 'gcs://mybucket/./../a.csv'" ); match snowflake().verified_stmt(sql) { - Statement::CopyIntoSnowflakeTable { + Statement::CopyIntoSnowflake { + kind, into, - from_stage, + from_obj, files, pattern, validation_mode, .. } => { + assert_eq!(kind, CopyIntoSnowflakeKind::Table); assert_eq!( into, ObjectName::from(vec![Ident::new("my_company"), Ident::new("emp_basic")]) ); assert_eq!( - from_stage, - ObjectName::from(vec![Ident::with_quote('\'', "gcs://mybucket/./../a.csv")]) + from_obj, + Some(ObjectName::from(vec![Ident::with_quote('\'', "gcs://mybucket/./../a.csv")])) ); assert!(files.is_none()); assert!(pattern.is_none()); @@ -2051,23 +2053,55 @@ fn test_copy_into() { }; assert_eq!(snowflake().verified_stmt(sql).to_string(), sql); + let sql = concat!("COPY INTO 's3://a/b/c/data.parquet' ", "FROM db.sc.tbl ", "PARTITION BY ('date=' || to_varchar(dt, 'YYYY-MM-DD') || '/hour=' || to_varchar(date_part(hour, ts)))"); + match snowflake().verified_stmt(sql) { + Statement::CopyIntoSnowflake { + kind, + into, + from_obj, + from_query, + partition, + .. + } => { + assert_eq!(kind, CopyIntoSnowflakeKind::Location); + assert_eq!( + into, + ObjectName::from(vec![Ident::with_quote('\'', "s3://a/b/c/data.parquet")]) + ); + assert_eq!( + from_obj, + Some(ObjectName::from(vec![ + Ident::new("db"), + Ident::new("sc"), + Ident::new("tbl") + ])) + ); + assert!(from_query.is_none()); + assert!(partition.is_some()); + } + _ => unreachable!(), + }; + assert_eq!(snowflake().verified_stmt(sql).to_string(), sql); + let sql = concat!( "COPY INTO 's3://a/b/c/data.parquet' ", "FROM (SELECT * FROM tbl)" ); match snowflake().verified_stmt(sql) { - Statement::CopyIntoSnowflakeLocation { + Statement::CopyIntoSnowflake { + kind, into, - from_table, + from_obj, from_query, .. } => { + assert_eq!(kind, CopyIntoSnowflakeKind::Location); assert_eq!( into, ObjectName::from(vec![Ident::with_quote('\'', "s3://a/b/c/data.parquet")]) ); assert!(from_query.is_some()); - assert!(from_table.is_none()); + assert!(from_obj.is_none()); } _ => unreachable!(), }; @@ -2086,15 +2120,18 @@ fn test_copy_into_with_stage_params() { ); match snowflake().verified_stmt(sql) { - Statement::CopyIntoSnowflakeTable { - from_stage, + Statement::CopyIntoSnowflake { + from_obj, stage_params, .. } => { //assert_eq!("s3://load/files/", stage_params.url.unwrap()); assert_eq!( - from_stage, - ObjectName::from(vec![Ident::with_quote('\'', "s3://load/files/")]) + from_obj, + Some(ObjectName::from(vec![Ident::with_quote( + '\'', + "s3://load/files/" + )])) ); assert_eq!("myint", stage_params.storage_integration.unwrap()); assert_eq!( @@ -2146,14 +2183,17 @@ fn test_copy_into_with_stage_params() { ); match snowflake().verified_stmt(sql) { - Statement::CopyIntoSnowflakeTable { - from_stage, + Statement::CopyIntoSnowflake { + from_obj, stage_params, .. } => { assert_eq!( - from_stage, - ObjectName::from(vec![Ident::with_quote('\'', "s3://load/files/")]) + from_obj, + Some(ObjectName::from(vec![Ident::with_quote( + '\'', + "s3://load/files/" + )])) ); assert_eq!("myint", stage_params.storage_integration.unwrap()); } @@ -2172,17 +2212,17 @@ fn test_copy_into_with_files_and_pattern_and_verification() { ); match snowflake().verified_stmt(sql) { - Statement::CopyIntoSnowflakeTable { + Statement::CopyIntoSnowflake { files, pattern, validation_mode, - from_stage_alias, + from_obj_alias, .. } => { assert_eq!(files.unwrap(), vec!["file1.json", "file2.json"]); assert_eq!(pattern.unwrap(), ".*employees0[1-5].csv.gz"); assert_eq!(validation_mode.unwrap(), "RETURN_7_ROWS"); - assert_eq!(from_stage_alias.unwrap(), Ident::new("some_alias")); + assert_eq!(from_obj_alias.unwrap(), Ident::new("some_alias")); } _ => unreachable!(), } @@ -2200,14 +2240,17 @@ fn test_copy_into_with_transformations() { ); match snowflake().verified_stmt(sql) { - Statement::CopyIntoSnowflakeTable { - from_stage, + Statement::CopyIntoSnowflake { + from_obj, from_transformations, .. } => { assert_eq!( - from_stage, - ObjectName::from(vec![Ident::new("@schema"), Ident::new("general_finished")]) + from_obj, + Some(ObjectName::from(vec![ + Ident::new("@schema"), + Ident::new("general_finished") + ])) ); assert_eq!( from_transformations.as_ref().unwrap()[0], @@ -2253,7 +2296,7 @@ fn test_copy_into_file_format() { ); match snowflake_without_unescape().verified_stmt(sql) { - Statement::CopyIntoSnowflakeTable { file_format, .. } => { + Statement::CopyIntoSnowflake { file_format, .. } => { assert!(file_format.options.contains(&DataLoadingOption { option_name: "COMPRESSION".to_string(), option_type: DataLoadingOptionType::ENUM, @@ -2292,7 +2335,7 @@ fn test_copy_into_file_format() { .first() .unwrap() { - Statement::CopyIntoSnowflakeTable { file_format, .. } => { + Statement::CopyIntoSnowflake { file_format, .. } => { assert!(file_format.options.contains(&DataLoadingOption { option_name: "COMPRESSION".to_string(), option_type: DataLoadingOptionType::ENUM, @@ -2324,7 +2367,7 @@ fn test_copy_into_copy_options() { ); match snowflake().verified_stmt(sql) { - Statement::CopyIntoSnowflakeTable { copy_options, .. } => { + Statement::CopyIntoSnowflake { copy_options, .. } => { assert!(copy_options.options.contains(&DataLoadingOption { option_name: "ON_ERROR".to_string(), option_type: DataLoadingOptionType::ENUM, @@ -2373,7 +2416,7 @@ fn test_snowflake_stage_object_names_into_location() { formatted_name ); match snowflake().verified_stmt(&sql) { - Statement::CopyIntoSnowflakeLocation { into, .. } => { + Statement::CopyIntoSnowflake { into, .. } => { assert_eq!(into.0, object_name.0) } _ => unreachable!(), @@ -2399,7 +2442,7 @@ fn test_snowflake_stage_object_names_into_table() { formatted_name ); match snowflake().verified_stmt(&sql) { - Statement::CopyIntoSnowflakeTable { into, .. } => { + Statement::CopyIntoSnowflake { into, .. } => { assert_eq!(into.0, object_name.0) } _ => unreachable!(), @@ -2412,16 +2455,14 @@ fn test_snowflake_copy_into() { let sql = "COPY INTO a.b FROM @namespace.stage_name"; assert_eq!(snowflake().verified_stmt(sql).to_string(), sql); match snowflake().verified_stmt(sql) { - Statement::CopyIntoSnowflakeTable { - into, from_stage, .. - } => { + Statement::CopyIntoSnowflake { into, from_obj, .. } => { + assert_eq!(into, ObjectName::from(vec![Ident::new("a"), Ident::new("b")])); assert_eq!( - into, - ObjectName::from(vec![Ident::new("a"), Ident::new("b")]) - ); - assert_eq!( - from_stage, - ObjectName::from(vec![Ident::new("@namespace"), Ident::new("stage_name")]) + from_obj, + Some(ObjectName::from(vec![ + Ident::new("@namespace"), + Ident::new("stage_name") + ])) ) } _ => unreachable!(), @@ -2433,9 +2474,7 @@ fn test_snowflake_copy_into_stage_name_ends_with_parens() { let sql = "COPY INTO SCHEMA.SOME_MONITORING_SYSTEM FROM (SELECT t.$1:st AS st FROM @schema.general_finished)"; assert_eq!(snowflake().verified_stmt(sql).to_string(), sql); match snowflake().verified_stmt(sql) { - Statement::CopyIntoSnowflakeTable { - into, from_stage, .. - } => { + Statement::CopyIntoSnowflake { into, from_obj, .. } => { assert_eq!( into, ObjectName::from(vec![ @@ -2444,8 +2483,11 @@ fn test_snowflake_copy_into_stage_name_ends_with_parens() { ]) ); assert_eq!( - from_stage, - ObjectName::from(vec![Ident::new("@schema"), Ident::new("general_finished")]) + from_obj, + Some(ObjectName::from(vec![ + Ident::new("@schema"), + Ident::new("general_finished") + ])) ) } _ => unreachable!(), From 61e8175d6996561b6c27a465e15d4d48db4372a7 Mon Sep 17 00:00:00 2001 From: Yoav Cohen Date: Fri, 24 Jan 2025 10:32:48 +0100 Subject: [PATCH 4/5] Fix build --- src/dialect/snowflake.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/dialect/snowflake.rs b/src/dialect/snowflake.rs index 19e443b40..89d7fd854 100644 --- a/src/dialect/snowflake.rs +++ b/src/dialect/snowflake.rs @@ -33,14 +33,16 @@ use crate::keywords::Keyword; use crate::parser::{Parser, ParserError}; use crate::tokenizer::{Token, Word}; #[cfg(not(feature = "std"))] +use alloc::boxed::Box; +#[cfg(not(feature = "std"))] use alloc::string::String; #[cfg(not(feature = "std"))] use alloc::vec::Vec; #[cfg(not(feature = "std"))] use alloc::{format, vec}; -use sqlparser::ast::StorageSerializationPolicy; use super::keywords::RESERVED_FOR_IDENTIFIER; +use sqlparser::ast::StorageSerializationPolicy; /// A [`Dialect`] for [Snowflake](https://www.snowflake.com/) #[derive(Debug, Default)] From ccb7cd26134043bdbfe52a58901dbcef283e6564 Mon Sep 17 00:00:00 2001 From: Yoav Cohen Date: Mon, 27 Jan 2025 09:16:59 +0100 Subject: [PATCH 5/5] Fix unit test after merge --- tests/sqlparser_snowflake.rs | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/tests/sqlparser_snowflake.rs b/tests/sqlparser_snowflake.rs index 1840df17c..3bf9cd39a 100644 --- a/tests/sqlparser_snowflake.rs +++ b/tests/sqlparser_snowflake.rs @@ -2043,7 +2043,10 @@ fn test_copy_into() { ); assert_eq!( from_obj, - Some(ObjectName::from(vec![Ident::with_quote('\'', "gcs://mybucket/./../a.csv")])) + Some(ObjectName::from(vec![Ident::with_quote( + '\'', + "gcs://mybucket/./../a.csv" + )])) ); assert!(files.is_none()); assert!(pattern.is_none()); @@ -2387,7 +2390,6 @@ fn test_copy_into_copy_options() { #[test] fn test_snowflake_stage_object_names_into_location() { let mut allowed_object_names = [ - ObjectName::from(vec![Ident::new("my_company"), Ident::new("emp_basic")]), ObjectName::from(vec![Ident::new("@namespace"), Ident::new("%table_name")]), ObjectName::from(vec![ Ident::new("@namespace"), @@ -2456,7 +2458,10 @@ fn test_snowflake_copy_into() { assert_eq!(snowflake().verified_stmt(sql).to_string(), sql); match snowflake().verified_stmt(sql) { Statement::CopyIntoSnowflake { into, from_obj, .. } => { - assert_eq!(into, ObjectName::from(vec![Ident::new("a"), Ident::new("b")])); + assert_eq!( + into, + ObjectName::from(vec![Ident::new("a"), Ident::new("b")]) + ); assert_eq!( from_obj, Some(ObjectName::from(vec![