diff --git a/Cargo.lock b/Cargo.lock index 3d0b2ad72030d..7619857c4c156 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -302,12 +302,6 @@ version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9d151e35f61089500b617991b791fc8bfd237ae50cd5950803758a179b41e67a" -[[package]] -name = "arrayvec" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23b62fc65de8e4e7f52534fb52b0f3ed04746ae267519eef2a83941e8085068b" - [[package]] name = "arrayvec" version = "0.7.4" @@ -1489,7 +1483,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "23285ad32269793932e830392f2fe2f83e26488fd3ec778883a93c8323735780" dependencies = [ "arrayref", - "arrayvec 0.7.4", + "arrayvec", "constant_time_eq", ] @@ -1500,7 +1494,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e9ec96fe9a81b5e365f9db71fe00edc4fe4ca2cc7dcb7861f0603012a7caa210" dependencies = [ "arrayref", - "arrayvec 0.7.4", + "arrayvec", "cc", "cfg-if", "constant_time_eq", @@ -2984,7 +2978,7 @@ dependencies = [ [[package]] name = "databend-common-ast" -version = "0.0.4" +version = "0.1.3" dependencies = [ "criterion", "derive-visitor", @@ -3001,7 +2995,6 @@ dependencies = [ "ordered-float 4.5.0", "percent-encoding", "pratt", - "pretty", "pretty_assertions", "recursive", "rspack-codespan-reporting", @@ -10186,7 +10179,7 @@ version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a652d9771a63711fd3c3deb670acfbe5c30a4072e664d7a3bf5a9e1056ac72c3" dependencies = [ - "arrayvec 0.7.4", + "arrayvec", "itoa", ] @@ -11228,18 +11221,6 @@ dependencies = [ "termtree", ] -[[package]] -name = "pretty" -version = "0.11.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83f3aa1e3ca87d3b124db7461265ac176b40c277f37e503eaa29c9c75c037846" -dependencies = [ - "arrayvec 0.5.2", - "log", - "typed-arena", - "unicode-segmentation", -] - [[package]] name = "pretty_assertions" version = "1.4.0" @@ -12538,7 +12519,7 @@ version = "1.35.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1790d1c4c0ca81211399e0e0af16333276f375209e71a37b67698a373db5b47a" dependencies = [ - "arrayvec 0.7.4", + "arrayvec", "borsh", "bytes", "num-traits", @@ -14841,12 +14822,6 @@ dependencies = [ "static_assertions", ] -[[package]] -name = "typed-arena" -version = "2.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6af6ae20167a9ece4bcb41af5b80f8a1f1df981f6391189ce00fd257af04126a" - [[package]] name = "typed-builder" version = "0.19.1" @@ -14944,7 +14919,7 @@ version = "3.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7c8a2469e56e6e5095c82ccd3afb98dad95f7af7929aab6d8ba8d6e0f73657da" dependencies = [ - "arrayvec 0.7.4", + "arrayvec", ] [[package]] diff --git a/src/query/ast/Cargo.toml b/src/query/ast/Cargo.toml index 8230b245d4a99..db67adf4d2572 100644 --- a/src/query/ast/Cargo.toml +++ b/src/query/ast/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "databend-common-ast" -version = "0.0.4" +version = "0.1.3" publish = true description = "SQL parser for Databend" authors = { workspace = true } @@ -25,7 +25,6 @@ nom-rule = { workspace = true } ordered-float = { workspace = true } percent-encoding = { workspace = true } pratt = { workspace = true } -pretty = { workspace = true } pretty_assertions = { workspace = true } recursive = { workspace = true } rspack-codespan-reporting = { workspace = true } diff --git a/src/query/ast/README.md b/src/query/ast/README.md new file mode 100644 index 0000000000000..724de3745528c --- /dev/null +++ b/src/query/ast/README.md @@ -0,0 +1,3 @@ +# databend-common-ast + +`databend-common-ast` is a module of the Databend project. It provides the Abstract Syntax Tree (AST) used for parsing and interpreting SQL queries. diff --git a/src/query/ast/src/ast/expr.rs b/src/query/ast/src/ast/expr.rs index 9ddae7e4d5866..9f4c5fb3515f3 100644 --- a/src/query/ast/src/ast/expr.rs +++ b/src/query/ast/src/ast/expr.rs @@ -1241,27 +1241,42 @@ pub struct WindowSpec { impl Display for WindowSpec { fn fmt(&self, f: &mut Formatter) -> std::fmt::Result { write!(f, "(")?; + + let mut write = false; + if let Some(existing_window_name) = &self.existing_window_name { - write!(f, " {existing_window_name}")?; + write!(f, "{existing_window_name}")?; + write = true; } if !self.partition_by.is_empty() { - write!(f, " PARTITION BY ")?; + if write { + write!(f, " ")?; + } + write = true; + write!(f, "PARTITION BY ")?; write_comma_separated_list(f, &self.partition_by)?; } if !self.order_by.is_empty() { - write!(f, " ORDER BY ")?; + if write { + write!(f, " ")?; + } + write = true; + write!(f, "ORDER BY ")?; write_comma_separated_list(f, &self.order_by)?; } if let Some(frame) = &self.window_frame { + if write { + write!(f, " ")?; + } match frame.units { WindowFrameUnits::Rows => { - write!(f, " ROWS")?; + write!(f, "ROWS")?; } WindowFrameUnits::Range => { - write!(f, " RANGE")?; + write!(f, "RANGE")?; } } @@ -1281,7 +1296,7 @@ impl Display for WindowSpec { format_frame(&frame.end_bound) )? } - write!(f, " )")?; + write!(f, ")")?; Ok(()) } } @@ -1868,10 +1883,10 @@ impl ExprReplacer { self.replace_expr(expr); } SelectTarget::StarColumns { column_filter, .. } => { - if let Some(column_filter) = column_filter - && let ColumnFilter::Lambda(lambda) = column_filter - { - self.replace_expr(&mut lambda.expr); + if let Some(column_filter) = column_filter { + if let ColumnFilter::Lambda(lambda) = column_filter { + self.replace_expr(&mut lambda.expr); + } } } } @@ -2006,10 +2021,10 @@ impl ExprReplacer { } } Expr::CountAll { window, .. } => { - if let Some(window) = window - && let Window::WindowSpec(window_spec) = window - { - self.replace_window_spec(window_spec); + if let Some(window) = window { + if let Window::WindowSpec(window_spec) = window { + self.replace_window_spec(window_spec); + } } } Expr::Tuple { exprs, .. } => { @@ -2024,10 +2039,10 @@ impl ExprReplacer { for param in func.params.iter_mut() { self.replace_expr(param); } - if let Some(window_desc) = &mut func.window - && let Window::WindowSpec(window_spec) = &mut window_desc.window - { - self.replace_window_spec(window_spec); + if let Some(window_desc) = &mut func.window { + if let Window::WindowSpec(window_spec) = &mut window_desc.window { + self.replace_window_spec(window_spec); + } } if let Some(lambda) = &mut func.lambda { self.replace_expr(&mut lambda.expr); diff --git a/src/query/ast/src/ast/format/mod.rs b/src/query/ast/src/ast/format/mod.rs index 1515db8817c0d..60ac3b2699a1d 100644 --- a/src/query/ast/src/ast/format/mod.rs +++ b/src/query/ast/src/ast/format/mod.rs @@ -14,12 +14,9 @@ mod indent_format; mod pretty_format; -mod syntax; use std::fmt::Display; -pub use syntax::pretty_statement; - #[derive(Clone)] pub struct FormatTreeNode { pub payload: T, diff --git a/src/query/ast/src/ast/format/syntax/ddl.rs b/src/query/ast/src/ast/format/syntax/ddl.rs deleted file mode 100644 index fc275f13a1e37..0000000000000 --- a/src/query/ast/src/ast/format/syntax/ddl.rs +++ /dev/null @@ -1,509 +0,0 @@ -// Copyright 2021 Datafuse Labs -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -use pretty::RcDoc; - -use super::expr::pretty_expr; -use super::inline_comma; -use super::query::pretty_query; -use super::query::pretty_table; -use crate::ast::format::syntax::interweave_comma; -use crate::ast::format::syntax::parenthesized; -use crate::ast::format::syntax::NEST_FACTOR; -use crate::ast::AddColumnOption; -use crate::ast::AlterTableAction; -use crate::ast::AlterTableStmt; -use crate::ast::AlterViewStmt; -use crate::ast::ClusterType; -use crate::ast::CreateDictionaryStmt; -use crate::ast::CreateOption; -use crate::ast::CreateStreamStmt; -use crate::ast::CreateTableSource; -use crate::ast::CreateTableStmt; -use crate::ast::CreateViewStmt; -use crate::ast::TableType; -use crate::ast::TimeTravelPoint; - -pub(crate) fn pretty_create_table(stmt: CreateTableStmt) -> RcDoc<'static> { - RcDoc::text("CREATE") - .append(if let CreateOption::CreateOrReplace = stmt.create_option { - RcDoc::space().append(RcDoc::text("OR REPLACE")) - } else { - RcDoc::nil() - }) - .append(match stmt.table_type { - TableType::Transient => RcDoc::space().append(RcDoc::text("TRANSIENT")), - TableType::Temporary => RcDoc::space().append(RcDoc::text("TEMPORARY")), - TableType::Normal => RcDoc::nil(), - }) - .append(RcDoc::space().append(RcDoc::text("TABLE"))) - .append(match stmt.create_option { - CreateOption::Create => RcDoc::nil(), - CreateOption::CreateIfNotExists => RcDoc::space().append(RcDoc::text("IF NOT EXISTS")), - CreateOption::CreateOrReplace => RcDoc::nil(), - }) - .append( - RcDoc::space() - .append(if let Some(catalog) = stmt.catalog { - RcDoc::text(catalog.to_string()).append(RcDoc::text(".")) - } else { - RcDoc::nil() - }) - .append(if let Some(database) = stmt.database { - RcDoc::text(database.to_string()).append(RcDoc::text(".")) - } else { - RcDoc::nil() - }) - .append(RcDoc::text(stmt.table.to_string())), - ) - .append(if let Some(source) = stmt.source { - pretty_table_source(source) - } else { - RcDoc::nil() - }) - .append(if let Some(engine) = stmt.engine { - RcDoc::space() - .append(RcDoc::text("ENGINE =")) - .append(RcDoc::space()) - .append(engine.to_string()) - } else { - RcDoc::nil() - }) - .append(if let Some(cluster_by) = stmt.cluster_by { - RcDoc::line() - .append(RcDoc::text("CLUSTER BY ")) - .append(match cluster_by.cluster_type { - ClusterType::Linear => RcDoc::text("LINEAR"), - ClusterType::Hilbert => RcDoc::text("HILBERT"), - }) - .append(parenthesized( - interweave_comma(cluster_by.cluster_exprs.into_iter().map(pretty_expr)).group(), - )) - } else { - RcDoc::nil() - }) - .append(if !stmt.table_options.is_empty() { - RcDoc::line() - .append(interweave_comma(stmt.table_options.iter().map(|(k, v)| { - RcDoc::text(k.clone()) - .append(RcDoc::space()) - .append(RcDoc::text("=")) - .append(RcDoc::space()) - .append(RcDoc::text("'")) - .append(RcDoc::text(v.clone())) - .append(RcDoc::text("'")) - }))) - .group() - } else { - RcDoc::nil() - }) - .append(if let Some(as_query) = stmt.as_query { - RcDoc::line().append(RcDoc::text("AS")).append( - RcDoc::line() - .nest(NEST_FACTOR) - .append(pretty_query(*as_query).nest(NEST_FACTOR).group()), - ) - } else { - RcDoc::nil() - }) -} - -fn pretty_table_source(source: CreateTableSource) -> RcDoc<'static> { - match source { - CreateTableSource::Columns(columns, inverted_indexes) => RcDoc::space() - .append(parenthesized( - interweave_comma( - columns - .into_iter() - .map(|column| RcDoc::text(column.to_string())), - ) - .group(), - )) - .append(if let Some(inverted_indexes) = inverted_indexes { - parenthesized( - interweave_comma( - inverted_indexes - .into_iter() - .map(|inverted_index| RcDoc::text(inverted_index.to_string())), - ) - .group(), - ) - } else { - RcDoc::nil() - }), - CreateTableSource::Like { - catalog, - database, - table, - } => RcDoc::space() - .append(RcDoc::text("LIKE")) - .append(RcDoc::space()) - .append(if let Some(catalog) = catalog { - RcDoc::text(catalog.to_string()).append(RcDoc::text(".")) - } else { - RcDoc::nil() - }) - .append(if let Some(database) = database { - RcDoc::text(database.to_string()).append(RcDoc::text(".")) - } else { - RcDoc::nil() - }) - .append(RcDoc::text(table.to_string())), - } -} - -pub(crate) fn pretty_alter_table(stmt: AlterTableStmt) -> RcDoc<'static> { - RcDoc::text("ALTER TABLE") - .append(if stmt.if_exists { - RcDoc::space().append(RcDoc::text("IF EXISTS")) - } else { - RcDoc::nil() - }) - .append( - RcDoc::line() - .nest(NEST_FACTOR) - .append(pretty_table(stmt.table_reference)), - ) - .append(pretty_alter_table_action(stmt.action)) -} - -pub(crate) fn pretty_alter_table_action(action: AlterTableAction) -> RcDoc<'static> { - match action { - AlterTableAction::RenameTable { new_table } => RcDoc::line() - .append(RcDoc::text("RENAME TO ")) - .append(RcDoc::text(new_table.to_string())), - AlterTableAction::ModifyTableComment { new_comment } => RcDoc::line() - .append(RcDoc::text("COMMENT='")) - .append(RcDoc::text(new_comment)) - .append(RcDoc::text("'")), - AlterTableAction::RenameColumn { - old_column, - new_column, - } => RcDoc::line() - .append(RcDoc::text("RENAME COLUMN ")) - .append(RcDoc::text(old_column.to_string())) - .append(RcDoc::text(" TO ")) - .append(RcDoc::text(new_column.to_string())), - AlterTableAction::AddColumn { column, option } => RcDoc::line() - .append(RcDoc::text("ADD COLUMN ")) - .append(RcDoc::text(column.to_string())) - .append(match option { - AddColumnOption::First => RcDoc::space().append(RcDoc::text("FIRST")), - AddColumnOption::After(ident) => { - RcDoc::space().append(RcDoc::text(format!("AFTER {ident}"))) - } - AddColumnOption::End => RcDoc::nil(), - }), - AlterTableAction::ModifyColumn { action } => RcDoc::line() - .append(RcDoc::text("MODIFY COLUMN ")) - .append(RcDoc::text(action.to_string())) - .append(RcDoc::text(format!(" {}", action))), - AlterTableAction::DropColumn { column } => RcDoc::line() - .append(RcDoc::text("DROP COLUMN ")) - .append(RcDoc::text(column.to_string())), - AlterTableAction::AlterTableClusterKey { cluster_by } => RcDoc::line() - .append(RcDoc::text("CLUSTER BY ")) - .append(match cluster_by.cluster_type { - ClusterType::Linear => RcDoc::text("LINEAR"), - ClusterType::Hilbert => RcDoc::text("HILBERT"), - }) - .append(parenthesized( - interweave_comma(cluster_by.cluster_exprs.into_iter().map(pretty_expr)).group(), - )), - AlterTableAction::DropTableClusterKey => { - RcDoc::line().append(RcDoc::text("DROP CLUSTER KEY")) - } - AlterTableAction::ReclusterTable { - is_final, - selection, - limit, - } => RcDoc::line() - .append(RcDoc::text("RECLUSTER")) - .append(if is_final { - RcDoc::space().append(RcDoc::text("FINAL")) - } else { - RcDoc::nil() - }) - .append(if let Some(selection) = selection { - RcDoc::line().append(RcDoc::text("WHERE")).append( - RcDoc::line() - .nest(NEST_FACTOR) - .append(pretty_expr(selection).nest(NEST_FACTOR).group()), - ) - } else { - RcDoc::nil() - }) - .append(if let Some(limit) = limit { - RcDoc::text(format!(" LIMIT {limit}")) - } else { - RcDoc::nil() - }), - AlterTableAction::FlashbackTo { point } => match point { - TimeTravelPoint::Snapshot(sid) => RcDoc::text(format!(" AT (SNAPSHOT => {sid})")), - TimeTravelPoint::Timestamp(ts) => RcDoc::text(format!(" AT (TIMESTAMP => {ts})")), - TimeTravelPoint::Offset(num) => RcDoc::text(format!(" AT (OFFSET => {num})")), - TimeTravelPoint::Stream { - catalog, - database, - name, - } => RcDoc::space() - .append(RcDoc::text("AT (STREAM => ")) - .append( - RcDoc::space() - .append(if let Some(catalog) = catalog { - RcDoc::text(catalog.to_string()).append(RcDoc::text(".")) - } else { - RcDoc::nil() - }) - .append(if let Some(database) = database { - RcDoc::text(database.to_string()).append(RcDoc::text(".")) - } else { - RcDoc::nil() - }) - .append(RcDoc::text(name.to_string())), - ) - .append(RcDoc::text(")")), - }, - AlterTableAction::SetOptions { set_options } => { - let mut doc = RcDoc::line(); - doc = doc.append(RcDoc::text("SET OPTIONS: ")); - for (key, value) in set_options.into_iter() { - doc = doc.append(RcDoc::text(format!("{key} to {value} "))); - } - doc - } - AlterTableAction::UnsetOptions { targets } => { - let mut doc = RcDoc::line(); - doc = doc.append(RcDoc::text("UNSET OPTIONS: ")); - for opt in targets.into_iter() { - doc = doc.append(RcDoc::text(format!("{opt} "))); - } - doc - } - } -} - -pub(crate) fn pretty_create_view(stmt: CreateViewStmt) -> RcDoc<'static> { - RcDoc::text("CREATE") - .append(if let CreateOption::CreateOrReplace = stmt.create_option { - RcDoc::space().append(RcDoc::text("OR REPLACE")) - } else { - RcDoc::nil() - }) - .append(RcDoc::space().append(RcDoc::text("VIEW"))) - .append(match stmt.create_option { - CreateOption::Create => RcDoc::nil(), - CreateOption::CreateIfNotExists => RcDoc::space().append(RcDoc::text("IF NOT EXISTS")), - CreateOption::CreateOrReplace => RcDoc::nil(), - }) - .append( - RcDoc::space() - .append(if let Some(catalog) = stmt.catalog { - RcDoc::text(catalog.to_string()).append(RcDoc::text(".")) - } else { - RcDoc::nil() - }) - .append(if let Some(database) = stmt.database { - RcDoc::text(database.to_string()).append(RcDoc::text(".")) - } else { - RcDoc::nil() - }) - .append(RcDoc::text(stmt.view.to_string())), - ) - .append( - RcDoc::line().append(RcDoc::text("AS")).append( - RcDoc::line() - .nest(NEST_FACTOR) - .append(pretty_query(*stmt.query).nest(NEST_FACTOR).group()), - ), - ) -} - -pub(crate) fn pretty_alter_view(stmt: AlterViewStmt) -> RcDoc<'static> { - RcDoc::text("ALTER VIEW") - .append( - RcDoc::space() - .append(if let Some(catalog) = stmt.catalog { - RcDoc::text(catalog.to_string()).append(RcDoc::text(".")) - } else { - RcDoc::nil() - }) - .append(if let Some(database) = stmt.database { - RcDoc::text(database.to_string()).append(RcDoc::text(".")) - } else { - RcDoc::nil() - }) - .append(RcDoc::text(stmt.view.to_string())), - ) - .append( - RcDoc::line().append(RcDoc::text("AS")).append( - RcDoc::line() - .nest(NEST_FACTOR) - .append(pretty_query(*stmt.query).nest(NEST_FACTOR).group()), - ), - ) -} - -pub(crate) fn pretty_create_stream(stmt: CreateStreamStmt) -> RcDoc<'static> { - RcDoc::text("CREATE") - .append(if let CreateOption::CreateOrReplace = stmt.create_option { - RcDoc::space().append(RcDoc::text("OR REPLACE")) - } else { - RcDoc::nil() - }) - .append(RcDoc::space().append(RcDoc::text("STREAM"))) - .append(match stmt.create_option { - CreateOption::Create => RcDoc::nil(), - CreateOption::CreateIfNotExists => RcDoc::space().append(RcDoc::text("IF NOT EXISTS")), - CreateOption::CreateOrReplace => RcDoc::nil(), - }) - .append( - RcDoc::space() - .append(if let Some(catalog) = stmt.catalog { - RcDoc::text(catalog.to_string()).append(RcDoc::text(".")) - } else { - RcDoc::nil() - }) - .append(if let Some(database) = stmt.database { - RcDoc::text(database.to_string()).append(RcDoc::text(".")) - } else { - RcDoc::nil() - }) - .append(RcDoc::text(stmt.stream.to_string())), - ) - .append( - RcDoc::space().append(RcDoc::text("ON TABLE")).append( - RcDoc::space() - .append(if let Some(database) = stmt.table_database { - RcDoc::text(database.to_string()).append(RcDoc::text(".")) - } else { - RcDoc::nil() - }) - .append(RcDoc::text(stmt.table.to_string())), - ), - ) - .append(match stmt.travel_point { - Some(TimeTravelPoint::Snapshot(sid)) => RcDoc::text(format!(" AT (SNAPSHOT => {sid})")), - Some(TimeTravelPoint::Timestamp(ts)) => RcDoc::text(format!(" AT (TIMESTAMP => {ts})")), - Some(TimeTravelPoint::Offset(num)) => RcDoc::text(format!(" AT (OFFSET => {num})")), - Some(TimeTravelPoint::Stream { - catalog, - database, - name, - }) => RcDoc::space() - .append(RcDoc::text("AT (STREAM => ")) - .append( - RcDoc::space() - .append(if let Some(catalog) = catalog { - RcDoc::text(catalog.to_string()).append(RcDoc::text(".")) - } else { - RcDoc::nil() - }) - .append(if let Some(database) = database { - RcDoc::text(database.to_string()).append(RcDoc::text(".")) - } else { - RcDoc::nil() - }) - .append(RcDoc::text(name.to_string())), - ) - .append(RcDoc::text(")")), - None => RcDoc::nil(), - }) - .append(if !stmt.append_only { - RcDoc::space().append(RcDoc::text("APPEND_ONLY = false")) - } else { - RcDoc::nil() - }) - .append(if let Some(comment) = stmt.comment { - RcDoc::space().append(RcDoc::text(format!("COMMENT = '{comment}'"))) - } else { - RcDoc::nil() - }) -} - -pub(crate) fn pretty_create_dictionary(stmt: CreateDictionaryStmt) -> RcDoc<'static> { - RcDoc::text("CREATE") - .append(if let CreateOption::CreateOrReplace = stmt.create_option { - RcDoc::space().append(RcDoc::text("OR REPLACE")) - } else { - RcDoc::nil() - }) - .append(RcDoc::space().append(RcDoc::text("DICTIONARY"))) - .append(match stmt.create_option { - CreateOption::Create => RcDoc::nil(), - CreateOption::CreateIfNotExists => RcDoc::space().append(RcDoc::text("IF NOT EXISTS")), - CreateOption::CreateOrReplace => RcDoc::nil(), - }) - .append( - RcDoc::space() - .append(if let Some(catalog) = stmt.catalog { - RcDoc::text(catalog.to_string()).append(RcDoc::text(".")) - } else { - RcDoc::nil() - }) - .append(if let Some(database) = stmt.database { - RcDoc::text(database.to_string()).append(RcDoc::text(".")) - } else { - RcDoc::nil() - }) - .append(RcDoc::text(stmt.dictionary_name.to_string())), - ) - .append(parenthesized( - interweave_comma( - stmt.columns - .iter() - .map(|column| RcDoc::text(column.to_string())), - ) - .group(), - )) - .append(if !stmt.primary_keys.is_empty() { - RcDoc::line().append(RcDoc::text("PRIMARY KEY ")).append( - inline_comma( - stmt.primary_keys - .iter() - .map(|k| -> RcDoc<'static> { RcDoc::text(k.to_string()) }), - ) - .group(), - ) - } else { - RcDoc::nil() - }) - .append(RcDoc::text("SOURCE ")) - .append(parenthesized( - RcDoc::text(stmt.source_name.to_string()).append(parenthesized( - if !stmt.source_options.is_empty() { - RcDoc::line() - .append(interweave_comma(stmt.source_options.iter().map( - |(k, v)| { - RcDoc::text(k.clone()) - .append(RcDoc::space()) - .append(RcDoc::text("=")) - .append(RcDoc::space()) - .append(RcDoc::text("'")) - .append(RcDoc::text(v.clone())) - .append(RcDoc::text("'")) - }, - ))) - .group() - } else { - RcDoc::nil() - }, - )), - )) - .append(if let Some(comment) = stmt.comment { - RcDoc::text("COMMENT ").append(RcDoc::text(comment)) - } else { - RcDoc::nil() - }) -} diff --git a/src/query/ast/src/ast/format/syntax/dml.rs b/src/query/ast/src/ast/format/syntax/dml.rs deleted file mode 100644 index 440368b25e7a8..0000000000000 --- a/src/query/ast/src/ast/format/syntax/dml.rs +++ /dev/null @@ -1,263 +0,0 @@ -// Copyright 2021 Datafuse Labs -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -use pretty::RcDoc; - -use super::expr::pretty_expr; -use super::query::pretty_query; -use super::query::pretty_table; -use crate::ast::format::syntax::inline_comma; -use crate::ast::format::syntax::interweave_comma; -use crate::ast::format::syntax::parenthesized; -use crate::ast::format::syntax::NEST_FACTOR; -use crate::ast::CopyIntoLocationSource; -use crate::ast::CopyIntoLocationStmt; -use crate::ast::CopyIntoTableSource; -use crate::ast::CopyIntoTableStmt; -use crate::ast::DeleteStmt; -use crate::ast::FileFormatOptions; -use crate::ast::InsertSource; -use crate::ast::InsertStmt; -use crate::ast::UpdateExpr; -use crate::ast::UpdateStmt; - -pub(crate) fn pretty_insert(insert_stmt: InsertStmt) -> RcDoc<'static> { - RcDoc::text("INSERT") - .append(RcDoc::space()) - .append(if insert_stmt.overwrite { - RcDoc::text("OVERWRITE") - } else { - RcDoc::text("INTO") - }) - .append( - RcDoc::line() - .nest(NEST_FACTOR) - .append(if let Some(catalog) = insert_stmt.catalog { - RcDoc::text(catalog.to_string()).append(RcDoc::text(".")) - } else { - RcDoc::nil() - }) - .append(if let Some(database) = insert_stmt.database { - RcDoc::text(database.to_string()).append(RcDoc::text(".")) - } else { - RcDoc::nil() - }) - .append(RcDoc::text(insert_stmt.table.to_string())) - .append(if !insert_stmt.columns.is_empty() { - RcDoc::space() - .append(RcDoc::text("(")) - .append(inline_comma( - insert_stmt - .columns - .into_iter() - .map(|ident| RcDoc::text(ident.to_string())), - )) - .append(RcDoc::text(")")) - } else { - RcDoc::nil() - }), - ) - .append(pretty_source(insert_stmt.source)) -} - -fn pretty_source(source: InsertSource) -> RcDoc<'static> { - RcDoc::line().append(match source { - InsertSource::Values { rows } => RcDoc::text("VALUES").append( - RcDoc::line().nest(NEST_FACTOR).append( - interweave_comma(rows.into_iter().map(|row| { - RcDoc::text("(") - .append(inline_comma(row.into_iter().map(pretty_expr))) - .append(RcDoc::text(")")) - })) - .nest(NEST_FACTOR) - .group(), - ), - ), - InsertSource::RawValues { rest_str, .. } => RcDoc::text("VALUES").append( - RcDoc::line() - .nest(NEST_FACTOR) - .append(RcDoc::text(rest_str)), - ), - InsertSource::Select { query } => pretty_query(*query), - }) -} - -pub(crate) fn pretty_delete(delete_stmt: DeleteStmt) -> RcDoc<'static> { - RcDoc::text("DELETE FROM") - .append( - RcDoc::line() - .nest(NEST_FACTOR) - .append(pretty_table(delete_stmt.table)), - ) - .append(if let Some(selection) = delete_stmt.selection { - RcDoc::line().append(RcDoc::text("WHERE")).append( - RcDoc::line() - .nest(NEST_FACTOR) - .append(pretty_expr(selection).nest(NEST_FACTOR).group()), - ) - } else { - RcDoc::nil() - }) -} - -pub(crate) fn pretty_update(update_stmt: UpdateStmt) -> RcDoc<'static> { - RcDoc::text("UPDATE") - .append( - RcDoc::line() - .nest(NEST_FACTOR) - .append(pretty_table(update_stmt.table)), - ) - .append(RcDoc::line().append(RcDoc::text("SET"))) - .append(pretty_update_list(update_stmt.update_list)) - .append(if let Some(selection) = update_stmt.selection { - RcDoc::line().append(RcDoc::text("WHERE")).append( - RcDoc::line() - .nest(NEST_FACTOR) - .append(pretty_expr(selection).nest(NEST_FACTOR).group()), - ) - } else { - RcDoc::nil() - }) -} - -fn pretty_update_list(update_list: Vec) -> RcDoc<'static> { - if update_list.len() > 1 { - RcDoc::line() - } else { - RcDoc::space() - } - .nest(NEST_FACTOR) - .append( - interweave_comma(update_list.into_iter().map(|update_expr| { - RcDoc::text(update_expr.name.to_string()) - .append(RcDoc::space()) - .append(RcDoc::text("=")) - .append(RcDoc::space()) - .append(pretty_expr(update_expr.expr)) - })) - .nest(NEST_FACTOR) - .group(), - ) -} - -pub(crate) fn pretty_copy_into_table(copy_stmt: CopyIntoTableStmt) -> RcDoc<'static> { - RcDoc::text("COPY") - .append(RcDoc::line().append(RcDoc::text("INTO "))) - .append(RcDoc::text(format!("{}", copy_stmt.dst))) - .append(if let Some(cols) = ©_stmt.dst_columns { - parenthesized( - interweave_comma(cols.iter().map(|file| RcDoc::text(format!("{:?}", file)))) - .group(), - ) - } else { - RcDoc::nil() - }) - .append(RcDoc::line().append(RcDoc::text("FROM "))) - .append(match copy_stmt.src { - CopyIntoTableSource::Location(v) => RcDoc::text(format!("{v}")), - CopyIntoTableSource::Query(query) => RcDoc::text("(") - .append(pretty_query(*query)) - .append(RcDoc::text(")")), - }) - .append(pretty_file_format(©_stmt.file_format)) - .append(if let Some(pattern) = ©_stmt.pattern { - RcDoc::line() - .append(RcDoc::text("PATTERN = ")) - .append(RcDoc::text(format!("'{}'", pattern))) - } else { - RcDoc::nil() - }) - .append(if let Some(files) = ©_stmt.files { - RcDoc::line() - .append(RcDoc::text("FILES = ")) - .append(parenthesized( - interweave_comma(files.iter().map(|file| RcDoc::text(format!("{:?}", file)))) - .group(), - )) - } else { - RcDoc::nil() - }) - .append(if !copy_stmt.options.validation_mode.is_empty() { - RcDoc::line() - .append(RcDoc::text("VALIDATION_MODE = ")) - .append(RcDoc::text(copy_stmt.options.validation_mode)) - } else { - RcDoc::nil() - }) - .append(if copy_stmt.options.size_limit != 0 { - RcDoc::line() - .append(RcDoc::text("SIZE_LIMIT = ")) - .append(RcDoc::text(format!("{}", copy_stmt.options.size_limit))) - } else { - RcDoc::nil() - }) - .append(if copy_stmt.options.max_files != 0 { - RcDoc::line() - .append(RcDoc::text("MAX_FILES = ")) - .append(RcDoc::text(format!("{}", copy_stmt.options.max_files))) - } else { - RcDoc::nil() - }) - .append( - RcDoc::line() - .append(RcDoc::text("PURGE = ")) - .append(RcDoc::text(format!("{}", copy_stmt.options.purge))), - ) - .append( - RcDoc::line() - .append(RcDoc::text("DISABLE_VARIANT_CHECK = ")) - .append(RcDoc::text(format!( - "{}", - copy_stmt.options.disable_variant_check - ))), - ) -} - -pub(crate) fn pretty_copy_into_location(copy_stmt: CopyIntoLocationStmt) -> RcDoc<'static> { - RcDoc::text("COPY") - .append(RcDoc::line().append(RcDoc::text("INTO "))) - .append(RcDoc::text(format!("{:?}", copy_stmt.dst))) - .append(RcDoc::line().append(RcDoc::text("FROM "))) - .append(match copy_stmt.src { - CopyIntoLocationSource::Table(v) => RcDoc::text(format!("{v}")), - CopyIntoLocationSource::Query(query) => RcDoc::text("(") - .append(pretty_query(*query)) - .append(RcDoc::text(")")), - }) - .append(pretty_file_format(©_stmt.file_format)) - .append( - RcDoc::line() - .append(RcDoc::text("SINGLE = ")) - .append(RcDoc::text(copy_stmt.options.single.to_string())), - ) -} - -fn pretty_file_format(file_format: &FileFormatOptions) -> RcDoc<'static> { - if !file_format.is_empty() { - RcDoc::line() - .append(RcDoc::text("FILE_FORMAT = ")) - .append(parenthesized( - interweave_comma(file_format.options.iter().map(|(k, v)| { - RcDoc::text(k.to_string()) - .append(RcDoc::space()) - .append(RcDoc::text("=")) - .append(RcDoc::space()) - .append(RcDoc::text(format!("{}", v))) - })) - .group(), - )) - } else { - RcDoc::nil() - } -} diff --git a/src/query/ast/src/ast/format/syntax/expr.rs b/src/query/ast/src/ast/format/syntax/expr.rs deleted file mode 100644 index cba0c8d2e786c..0000000000000 --- a/src/query/ast/src/ast/format/syntax/expr.rs +++ /dev/null @@ -1,463 +0,0 @@ -// Copyright 2021 Datafuse Labs -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -use pretty::RcDoc; - -use super::query::pretty_query; -use crate::ast::format::syntax::inline_comma; -use crate::ast::format::syntax::interweave_comma; -use crate::ast::format::syntax::parenthesized; -use crate::ast::format::syntax::NEST_FACTOR; -use crate::ast::BinaryOperator; -use crate::ast::Expr; -use crate::ast::FunctionCall; -use crate::ast::MapAccessor; - -pub(crate) fn pretty_expr(expr: Expr) -> RcDoc<'static> { - match expr { - Expr::ColumnRef { column, .. } => if let Some(database) = column.database { - RcDoc::text(database.to_string()).append(RcDoc::text(".")) - } else { - RcDoc::nil() - } - .append(if let Some(table) = column.table { - RcDoc::text(table.to_string()).append(RcDoc::text(".")) - } else { - RcDoc::nil() - }) - .append(RcDoc::text(column.column.to_string())), - Expr::IsNull { expr, not, .. } => pretty_expr(*expr) - .append(RcDoc::space()) - .append(RcDoc::text("IS")) - .append(if not { - RcDoc::space().append(RcDoc::text("NOT")) - } else { - RcDoc::nil() - }) - .append(RcDoc::space()) - .append(RcDoc::text("NULL")), - Expr::IsDistinctFrom { - left, right, not, .. - } => pretty_expr(*left) - .append(RcDoc::space()) - .append(RcDoc::text("IS")) - .append(if not { - RcDoc::space().append(RcDoc::text("NOT")) - } else { - RcDoc::nil() - }) - .append(RcDoc::space()) - .append(RcDoc::text("DISTINCT FROM")) - .append(RcDoc::space()) - .append(pretty_expr(*right)), - Expr::InList { - expr, list, not, .. - } => pretty_expr(*expr) - .append(if not { - RcDoc::space().append(RcDoc::text("NOT")) - } else { - RcDoc::nil() - }) - .append(RcDoc::space()) - .append(RcDoc::text("IN (")) - .append(inline_comma(list.into_iter().map(pretty_expr))) - .append(RcDoc::text(")")), - Expr::InSubquery { - expr, - subquery, - not, - .. - } => pretty_expr(*expr) - .append(if not { - RcDoc::space().append(RcDoc::text("NOT")) - } else { - RcDoc::nil() - }) - .append(RcDoc::space()) - .append(RcDoc::text("IN (")) - .append(pretty_query(*subquery)) - .append(RcDoc::text(")")), - Expr::Between { - expr, - low, - high, - not, - .. - } => pretty_expr(*expr) - .append(if not { - RcDoc::space().append(RcDoc::text("NOT")) - } else { - RcDoc::nil() - }) - .append(RcDoc::space()) - .append(RcDoc::text("BETWEEN")) - .append(RcDoc::space()) - .append(pretty_expr(*low)) - .append(RcDoc::space()) - .append(RcDoc::text("AND")) - .append(RcDoc::space()) - .append(pretty_expr(*high)), - Expr::UnaryOp { op, expr, .. } => RcDoc::text("(") - .append(RcDoc::text(op.to_string())) - .append(RcDoc::space()) - .append(pretty_expr(*expr)) - .append(RcDoc::text(")")), - Expr::BinaryOp { - op, left, right, .. - } => match op { - BinaryOperator::And | BinaryOperator::Or | BinaryOperator::Xor => parenthesized( - pretty_expr(*left) - .append(RcDoc::line_()) - .append(RcDoc::text(op.to_string())) - .append(RcDoc::space()) - .append(pretty_expr(*right)), - ), - _ => RcDoc::text("(") - .append(pretty_expr(*left)) - .append(RcDoc::space()) - .append(RcDoc::text(op.to_string())) - .append(RcDoc::space()) - .append(pretty_expr(*right)) - .append(RcDoc::text(")")), - }, - Expr::JsonOp { - op, left, right, .. - } => RcDoc::text("(") - .append(pretty_expr(*left)) - .append(RcDoc::space()) - .append(RcDoc::text(op.to_string())) - .append(RcDoc::space()) - .append(pretty_expr(*right)) - .append(RcDoc::text(")")), - Expr::Cast { - expr, - target_type, - pg_style, - .. - } => { - if pg_style { - pretty_expr(*expr) - .append(RcDoc::text("::")) - .append(RcDoc::text(target_type.to_string())) - } else { - RcDoc::text("CAST(") - .append(pretty_expr(*expr)) - .append(RcDoc::space()) - .append(RcDoc::text("AS")) - .append(RcDoc::space()) - .append(RcDoc::text(target_type.to_string())) - .append(RcDoc::text(")")) - } - } - Expr::TryCast { - expr, target_type, .. - } => RcDoc::text("TRY_CAST(") - .append(pretty_expr(*expr)) - .append(RcDoc::space()) - .append(RcDoc::text("AS")) - .append(RcDoc::space()) - .append(RcDoc::text(target_type.to_string())) - .append(RcDoc::text(")")), - Expr::Extract { - kind: field, expr, .. - } => RcDoc::text("EXTRACT(") - .append(RcDoc::text(field.to_string())) - .append(RcDoc::space()) - .append(RcDoc::text("FROM")) - .append(RcDoc::space()) - .append(pretty_expr(*expr)) - .append(RcDoc::text(")")), - Expr::DatePart { - kind: field, expr, .. - } => RcDoc::text("DATE_PART(") - .append(RcDoc::text(field.to_string())) - .append(RcDoc::space()) - .append(RcDoc::text(",")) - .append(RcDoc::space()) - .append(pretty_expr(*expr)) - .append(RcDoc::text(")")), - Expr::Position { - substr_expr, - str_expr, - .. - } => RcDoc::text("POSITION(") - .append(pretty_expr(*substr_expr)) - .append(RcDoc::space()) - .append(RcDoc::text("IN")) - .append(RcDoc::space()) - .append(pretty_expr(*str_expr)) - .append(RcDoc::text(")")), - Expr::Substring { - expr, - substring_from, - substring_for, - .. - } => RcDoc::text("SUBSTRING(") - .append(pretty_expr(*expr)) - .append(RcDoc::space()) - .append(RcDoc::text("FROM")) - .append(RcDoc::space()) - .append(pretty_expr(*substring_from)) - .append(if let Some(substring_for) = substring_for { - RcDoc::space() - .append(RcDoc::text("FOR")) - .append(RcDoc::space()) - .append(pretty_expr(*substring_for)) - } else { - RcDoc::nil() - }) - .append(RcDoc::text(")")), - Expr::Trim { - expr, trim_where, .. - } => RcDoc::text("TRIM(") - .append(if let Some((trim_where, trim_expr)) = trim_where { - RcDoc::text(trim_where.to_string()) - .append(RcDoc::space()) - .append(pretty_expr(*trim_expr)) - .append(RcDoc::space()) - .append(RcDoc::text("FROM")) - .append(RcDoc::space()) - } else { - RcDoc::nil() - }) - .append(pretty_expr(*expr)) - .append(RcDoc::text(")")), - Expr::Literal { value, .. } => RcDoc::text(value.to_string()), - Expr::CountAll { window, .. } => { - RcDoc::text("COUNT(*)").append(if let Some(window) = window { - RcDoc::text(" OVER (") - .append(RcDoc::text(window.to_string())) - .append(")") - } else { - RcDoc::nil() - }) - } - Expr::Tuple { exprs, .. } => RcDoc::text("(") - .append(inline_comma(exprs.into_iter().map(pretty_expr))) - .append(RcDoc::text(")")), - Expr::FunctionCall { func, .. } => { - let FunctionCall { - name, - distinct, - args, - params, - window, - lambda, - } = func; - - RcDoc::text(name.to_string()) - .append(if !params.is_empty() { - RcDoc::text("(") - .append(inline_comma( - params - .into_iter() - .map(|literal| RcDoc::text(literal.to_string())), - )) - .append(")") - } else { - RcDoc::nil() - }) - .append(RcDoc::text("(")) - .append(if distinct { - RcDoc::text("DISTINCT").append(RcDoc::space()) - } else { - RcDoc::nil() - }) - .append(inline_comma(args.into_iter().map(pretty_expr))) - .append(if let Some(lambda) = lambda { - if lambda.params.len() == 1 { - RcDoc::text(lambda.params[0].to_string()) - } else { - RcDoc::text("(") - .append(inline_comma( - lambda - .params - .iter() - .map(|param| RcDoc::text(param.to_string())), - )) - .append(RcDoc::text(")")) - } - .append(RcDoc::text(" -> ")) - .append(pretty_expr(*lambda.expr)) - } else { - RcDoc::nil() - }) - .append(RcDoc::text(")")) - .append(if let Some(window) = window { - if let Some(ignore_null) = window.ignore_nulls { - if ignore_null { - RcDoc::text(" IGNORE NULLS ") - } else { - RcDoc::text(" RESPECT NULLS ") - } - .append("OVER (") - } else { - RcDoc::text(" OVER (") - } - .append(RcDoc::text(window.window.to_string())) - .append(")") - } else { - RcDoc::nil() - }) - } - Expr::Case { - operand, - conditions, - results, - else_result, - .. - } => RcDoc::text("CASE") - .append(if let Some(op) = operand { - RcDoc::space().append(RcDoc::text(op.to_string())) - } else { - RcDoc::nil() - }) - .append( - RcDoc::line() - .append(interweave_comma(conditions.iter().zip(results).map( - |(cond, res)| { - RcDoc::text("WHEN") - .append(RcDoc::space()) - .append(pretty_expr(cond.clone())) - .append(RcDoc::space()) - .append(RcDoc::text("THEN")) - .append(RcDoc::space()) - .append(pretty_expr(res)) - }, - ))) - .nest(NEST_FACTOR) - .group(), - ) - .append(if let Some(el) = else_result { - RcDoc::line() - .nest(NEST_FACTOR) - .append(RcDoc::text("ELSE")) - .append(RcDoc::space()) - .append(pretty_expr(*el)) - } else { - RcDoc::nil() - }) - .append(RcDoc::line()) - .append(RcDoc::text("END")), - Expr::Exists { not, subquery, .. } => if not { - RcDoc::text("NOT").append(RcDoc::space()) - } else { - RcDoc::nil() - } - .append(RcDoc::text("EXISTS")) - .append(RcDoc::space()) - .append(parenthesized(pretty_query(*subquery))), - Expr::Subquery { - subquery, modifier, .. - } => if let Some(m) = modifier { - RcDoc::text(m.to_string()).append(RcDoc::space()) - } else { - RcDoc::nil() - } - .append(parenthesized(pretty_query(*subquery))), - Expr::MapAccess { expr, accessor, .. } => pretty_expr(*expr).append(match accessor { - MapAccessor::Bracket { key } => RcDoc::text("[") - .append(RcDoc::text(key.to_string())) - .append(RcDoc::text("]")), - MapAccessor::DotNumber { key } => RcDoc::text(".").append(RcDoc::text(key.to_string())), - MapAccessor::Colon { key } => RcDoc::text(":").append(RcDoc::text(key.to_string())), - }), - Expr::Array { exprs, .. } => RcDoc::text("[") - .append(inline_comma(exprs.into_iter().map(pretty_expr))) - .append(RcDoc::text("]")), - Expr::Map { kvs, .. } => RcDoc::text("{") - .append(inline_comma(kvs.into_iter().map(|(k, v)| { - RcDoc::text(k.to_string()) - .append(RcDoc::text(":")) - .append(pretty_expr(v)) - }))) - .append(RcDoc::text("}")), - Expr::Interval { expr, unit, .. } => RcDoc::text("INTERVAL") - .append(RcDoc::space()) - .append(pretty_expr(*expr)) - .append(RcDoc::space()) - .append(RcDoc::text(unit.to_string())), - Expr::DateAdd { - unit, - interval, - date, - .. - } => RcDoc::text("DATE_ADD(") - .append(RcDoc::text(unit.to_string())) - .append(RcDoc::text(",")) - .append(RcDoc::space()) - .append(RcDoc::text("INTERVAL")) - .append(RcDoc::space()) - .append(pretty_expr(*interval)) - .append(RcDoc::text(",")) - .append(RcDoc::space()) - .append(pretty_expr(*date)) - .append(RcDoc::text(")")), - Expr::DateDiff { - unit, - date_start, - date_end, - .. - } => RcDoc::text("DATE_DIFF(") - .append(RcDoc::text(unit.to_string())) - .append(RcDoc::text(",")) - .append(RcDoc::space()) - .append(pretty_expr(*date_start)) - .append(RcDoc::text(",")) - .append(RcDoc::space()) - .append(pretty_expr(*date_end)) - .append(RcDoc::text(")")), - Expr::DateSub { - unit, - interval, - date, - .. - } => RcDoc::text("DATE_SUB(") - .append(RcDoc::text(unit.to_string())) - .append(RcDoc::text(",")) - .append(RcDoc::space()) - .append(RcDoc::text("INTERVAL")) - .append(RcDoc::space()) - .append(pretty_expr(*interval)) - .append(RcDoc::text(",")) - .append(RcDoc::space()) - .append(pretty_expr(*date)) - .append(RcDoc::text(")")), - Expr::DateTrunc { unit, date, .. } => RcDoc::text("DATE_TRUNC(") - .append(RcDoc::text(unit.to_string())) - .append(RcDoc::text(",")) - .append(RcDoc::space()) - .append(pretty_expr(*date)) - .append(RcDoc::text(")")), - Expr::LastDay { unit, date, .. } => RcDoc::text("LAST_DAY(") - .append(pretty_expr(*date)) - .append(RcDoc::text(",")) - .append(RcDoc::space()) - .append(RcDoc::text(unit.to_string())) - .append(RcDoc::text(")")), - Expr::PreviousDay { unit, date, .. } => RcDoc::text("PREVIOUS_DAY(") - .append(pretty_expr(*date)) - .append(RcDoc::text(",")) - .append(RcDoc::space()) - .append(RcDoc::text(unit.to_string())) - .append(RcDoc::text(")")), - Expr::NextDay { unit, date, .. } => RcDoc::text("NEXT_DAY(") - .append(pretty_expr(*date)) - .append(RcDoc::text(",")) - .append(RcDoc::space()) - .append(RcDoc::text(unit.to_string())) - .append(RcDoc::text(")")), - Expr::Hole { name, .. } => RcDoc::text(":").append(RcDoc::text(name.to_string())), - } -} diff --git a/src/query/ast/src/ast/format/syntax/mod.rs b/src/query/ast/src/ast/format/syntax/mod.rs deleted file mode 100644 index b7ab5f4d30596..0000000000000 --- a/src/query/ast/src/ast/format/syntax/mod.rs +++ /dev/null @@ -1,82 +0,0 @@ -// Copyright 2021 Datafuse Labs -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -mod ddl; -mod dml; -mod expr; -mod query; - -use ddl::*; -use dml::*; -use pretty::RcDoc; -use query::*; - -use crate::ast::Statement; -use crate::ParseError; -use crate::Result; - -pub fn pretty_statement(stmt: Statement, max_width: usize) -> Result { - let pretty_stmt = match stmt { - // Format and beautify large SQL statements to make them easy to read. - Statement::Query(query) => pretty_query(*query), - Statement::Insert(insert_stmt) => pretty_insert(insert_stmt), - Statement::Delete(delete_stmt) => pretty_delete(delete_stmt), - Statement::CopyIntoTable(copy_stmt) => pretty_copy_into_table(copy_stmt), - Statement::CopyIntoLocation(copy_stmt) => pretty_copy_into_location(copy_stmt), - Statement::Update(update_stmt) => pretty_update(update_stmt), - Statement::CreateTable(create_table_stmt) => pretty_create_table(create_table_stmt), - Statement::AlterTable(alter_table_stmt) => pretty_alter_table(alter_table_stmt), - Statement::CreateView(create_view_stmt) => pretty_create_view(create_view_stmt), - Statement::AlterView(alter_view_stmt) => pretty_alter_view(alter_view_stmt), - Statement::CreateStream(create_stream_stmt) => pretty_create_stream(create_stream_stmt), - Statement::CreateDictionary(create_dictionary_stmt) => { - pretty_create_dictionary(create_dictionary_stmt) - } - // Other SQL statements are relatively short and don't need extra format. - _ => RcDoc::text(stmt.to_string()), - }; - - let mut bs = Vec::new(); - pretty_stmt - .render(max_width, &mut bs) - .map_err(|err| ParseError(None, err.to_string()))?; - String::from_utf8(bs).map_err(|err| ParseError(None, err.to_string())) -} - -pub(crate) const NEST_FACTOR: isize = 4; - -pub(crate) fn interweave_comma<'a, D>(docs: D) -> RcDoc<'a> -where D: Iterator> { - RcDoc::intersperse(docs, RcDoc::text(",").append(RcDoc::line())) -} - -pub(crate) fn inline_comma<'a, D>(docs: D) -> RcDoc<'a> -where D: Iterator> { - RcDoc::intersperse(docs, RcDoc::text(",").append(RcDoc::space())) -} - -pub(crate) fn inline_dot<'a, D>(docs: D) -> RcDoc<'a> -where D: Iterator> { - RcDoc::intersperse(docs, RcDoc::text(".")) -} - -pub(crate) fn parenthesized(doc: RcDoc<'_>) -> RcDoc<'_> { - RcDoc::text("(") - .append(RcDoc::line_()) - .append(doc) - .nest(NEST_FACTOR) - .append(RcDoc::line_()) - .append(RcDoc::text(")")) - .group() -} diff --git a/src/query/ast/src/ast/format/syntax/query.rs b/src/query/ast/src/ast/format/syntax/query.rs deleted file mode 100644 index 0e93bfc94acb9..0000000000000 --- a/src/query/ast/src/ast/format/syntax/query.rs +++ /dev/null @@ -1,567 +0,0 @@ -// Copyright 2021 Datafuse Labs -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -use pretty::RcDoc; - -use super::expr::pretty_expr; -use crate::ast::format::syntax::inline_comma; -use crate::ast::format::syntax::inline_dot; -use crate::ast::format::syntax::interweave_comma; -use crate::ast::format::syntax::parenthesized; -use crate::ast::format::syntax::NEST_FACTOR; -use crate::ast::Expr; -use crate::ast::GroupBy; -use crate::ast::JoinCondition; -use crate::ast::JoinOperator; -use crate::ast::OrderByExpr; -use crate::ast::Query; -use crate::ast::SelectTarget; -use crate::ast::SetExpr; -use crate::ast::SetOperator; -use crate::ast::TableReference; -use crate::ast::WindowDefinition; -use crate::ast::With; -use crate::ast::CTE; - -pub(crate) fn pretty_query(query: Query) -> RcDoc<'static> { - pretty_with(query.with) - .append(pretty_body(query.body)) - .append(pretty_order_by(query.order_by)) - .append(pretty_limit(query.limit)) - .append(pretty_offset(query.offset)) - .group() -} - -fn pretty_with(with: Option) -> RcDoc<'static> { - if let Some(with) = with { - RcDoc::text("WITH") - .append(if with.recursive { - RcDoc::space().append(RcDoc::text("RECURSIVE")) - } else { - RcDoc::nil() - }) - .append(RcDoc::line().nest(NEST_FACTOR)) - .append( - interweave_comma(with.ctes.into_iter().map(pretty_cte)) - .nest(NEST_FACTOR) - .group(), - ) - .append(RcDoc::line()) - } else { - RcDoc::nil() - } -} - -fn pretty_cte(cte: CTE) -> RcDoc<'static> { - RcDoc::text(format!("{} AS ", cte.alias)) - .append(RcDoc::softline()) - .append(if cte.materialized { - RcDoc::text("MATERIALIZED ".to_string()) - } else { - RcDoc::nil() - }) - .append(parenthesized(pretty_query(*cte.query))) -} - -fn pretty_body(body: SetExpr) -> RcDoc<'static> { - match body { - SetExpr::Select(select_stmt) => if select_stmt.distinct { - RcDoc::text("SELECT DISTINCT") - } else { - RcDoc::text("SELECT") - } - .append(pretty_select_list(select_stmt.select_list)) - .append(pretty_from(select_stmt.from)) - .append(pretty_selection(select_stmt.selection)) - .append(pretty_group_by(select_stmt.group_by)) - .append(pretty_having(select_stmt.having)) - .append(pretty_window(select_stmt.window_list)), - SetExpr::Query(query) => parenthesized(pretty_query(*query)), - SetExpr::SetOperation(set_operation) => pretty_body(*set_operation.left) - .append( - RcDoc::line() - .append(match set_operation.op { - SetOperator::Union => RcDoc::text("UNION"), - SetOperator::Except => RcDoc::text("EXCEPT"), - SetOperator::Intersect => RcDoc::text("INTERSECT"), - }) - .append(if set_operation.all { - RcDoc::space().append(RcDoc::text("ALL")) - } else { - RcDoc::nil() - }), - ) - .append(RcDoc::line()) - .append(pretty_body(*set_operation.right)), - SetExpr::Values { values, .. } => { - RcDoc::text("VALUES").append(inline_comma(values.into_iter().map(|row_values| { - RcDoc::text("(") - .append(inline_comma(row_values.into_iter().map(pretty_expr))) - .append(RcDoc::text(")")) - }))) - } - } -} - -fn pretty_select_list(select_list: Vec) -> RcDoc<'static> { - if select_list.len() > 1 { - RcDoc::line() - } else { - RcDoc::space() - } - .nest(NEST_FACTOR) - .append( - interweave_comma( - select_list - .into_iter() - .map(|select_target| match select_target { - SelectTarget::AliasedExpr { expr, alias } => { - pretty_expr(*expr).append(if let Some(alias) = alias { - RcDoc::space() - .append(RcDoc::text("AS")) - .append(RcDoc::space()) - .append(RcDoc::text(alias.to_string())) - } else { - RcDoc::nil() - }) - } - SelectTarget::StarColumns { - qualified: object_name, - column_filter, - } => { - let docs = inline_dot( - object_name - .into_iter() - .map(|indirection| RcDoc::text(indirection.to_string())), - ) - .group(); - docs.append(if let Some(filter) = column_filter { - match filter { - crate::ast::ColumnFilter::Excludes(exclude) => RcDoc::line() - .append( - RcDoc::text("EXCLUDE").append( - if exclude.len() > 1 { - RcDoc::line() - } else { - RcDoc::space() - } - .nest(NEST_FACTOR), - ), - ) - .append( - interweave_comma(exclude.into_iter().map(|ident| { - RcDoc::space() - .append(RcDoc::space()) - .append(RcDoc::text(ident.to_string())) - })) - .nest(NEST_FACTOR) - .group(), - ), - crate::ast::ColumnFilter::Lambda(lambda) => RcDoc::line() - .append(RcDoc::text("(")) - .append(inline_comma( - lambda - .params - .iter() - .map(|ident| RcDoc::text(ident.to_string())), - )) - .append(RcDoc::text("->")) - .append(pretty_expr(*lambda.expr)), - } - } else { - RcDoc::nil() - }) - } - }), - ) - .nest(NEST_FACTOR) - .group(), - ) -} - -fn pretty_from(from: Vec) -> RcDoc<'static> { - if !from.is_empty() { - RcDoc::line() - .append(RcDoc::text("FROM").append(RcDoc::line().nest(NEST_FACTOR))) - .append( - interweave_comma(from.into_iter().map(pretty_table)) - .nest(NEST_FACTOR) - .group(), - ) - } else { - RcDoc::nil() - } -} - -fn pretty_selection(selection: Option) -> RcDoc<'static> { - if let Some(selection) = selection { - RcDoc::line().append(RcDoc::text("WHERE")).append( - RcDoc::line() - .nest(NEST_FACTOR) - .append(pretty_expr(selection).nest(NEST_FACTOR).group()), - ) - } else { - RcDoc::nil() - } -} - -fn pretty_group_set(set: Vec) -> RcDoc<'static> { - RcDoc::nil() - .append(RcDoc::text("(")) - .append(inline_comma(set.into_iter().map(pretty_expr))) - .append(RcDoc::text(")")) -} - -fn pretty_group_by(group_by: Option) -> RcDoc<'static> { - if let Some(group_by) = group_by { - match group_by { - GroupBy::Normal(exprs) => RcDoc::line() - .append( - RcDoc::text("GROUP BY").append( - if exprs.len() > 1 { - RcDoc::line() - } else { - RcDoc::space() - } - .nest(NEST_FACTOR), - ), - ) - .append( - interweave_comma(exprs.into_iter().map(pretty_expr)) - .nest(NEST_FACTOR) - .group(), - ), - GroupBy::All => RcDoc::line().append(RcDoc::text("GROUP BY ALL")), - GroupBy::GroupingSets(sets) => RcDoc::line() - .append( - RcDoc::text("GROUP BY GROUPING SETS (").append(RcDoc::line().nest(NEST_FACTOR)), - ) - .append( - interweave_comma(sets.into_iter().map(pretty_group_set)) - .nest(NEST_FACTOR) - .group(), - ) - .append(RcDoc::line()) - .append(RcDoc::text(")")), - GroupBy::Rollup(exprs) => RcDoc::line() - .append(RcDoc::text("GROUP BY ROLLUP (").append(RcDoc::line().nest(NEST_FACTOR))) - .append( - interweave_comma(exprs.into_iter().map(pretty_expr)) - .nest(NEST_FACTOR) - .group(), - ) - .append(RcDoc::line()) - .append(RcDoc::text(")")), - GroupBy::Cube(exprs) => RcDoc::line() - .append(RcDoc::text("GROUP BY CUBE (").append(RcDoc::line().nest(NEST_FACTOR))) - .append( - interweave_comma(exprs.into_iter().map(pretty_expr)) - .nest(NEST_FACTOR) - .group(), - ) - .append(RcDoc::line()) - .append(RcDoc::text(")")), - - GroupBy::Combined(sets) => RcDoc::line() - .append(RcDoc::text("GROUP BY ").append(RcDoc::line().nest(NEST_FACTOR))) - .append( - interweave_comma(sets.into_iter().map(|s| RcDoc::text(s.to_string()))) - .nest(NEST_FACTOR) - .group(), - ) - .append(RcDoc::line()), - } - } else { - RcDoc::nil() - } -} - -fn pretty_having(having: Option) -> RcDoc<'static> { - if let Some(having) = having { - RcDoc::line() - .append(RcDoc::text("HAVING").append(RcDoc::line().nest(NEST_FACTOR))) - .append(pretty_expr(having)) - } else { - RcDoc::nil() - } -} - -fn pretty_window(window: Option>) -> RcDoc<'static> { - if let Some(window) = window { - RcDoc::line() - .append(RcDoc::text("WINDOW").append(RcDoc::line().nest(NEST_FACTOR))) - .append( - interweave_comma(window.into_iter().map(pretty_window_def)) - .nest(NEST_FACTOR) - .group(), - ) - } else { - RcDoc::nil() - } -} - -fn pretty_window_def(def: WindowDefinition) -> RcDoc<'static> { - RcDoc::text(def.name.to_string()) - .append(RcDoc::space()) - .append(RcDoc::text("AS (")) - .append(RcDoc::text(def.spec.to_string())) - .append(RcDoc::text(")")) -} - -pub(crate) fn pretty_table(table: TableReference) -> RcDoc<'static> { - match table { - TableReference::Table { - span: _, - catalog, - database, - table, - alias, - temporal, - with_options, - pivot, - unpivot, - sample, - } => if let Some(catalog) = catalog { - RcDoc::text(catalog.to_string()).append(RcDoc::text(".")) - } else { - RcDoc::nil() - } - .append(if let Some(database) = database { - RcDoc::text(database.to_string()).append(RcDoc::text(".")) - } else { - RcDoc::nil() - }) - .append(RcDoc::text(table.to_string())) - .append(if let Some(temporal) = temporal { - RcDoc::text(format!(" {temporal}")) - } else { - RcDoc::nil() - }) - .append(if let Some(with_options) = with_options { - RcDoc::text(format!(" {with_options}")) - } else { - RcDoc::nil() - }) - .append(if let Some(pivot) = pivot { - RcDoc::text(format!(" {pivot}")) - } else { - RcDoc::nil() - }) - .append(if let Some(unpivot) = unpivot { - RcDoc::text(format!(" {unpivot}")) - } else { - RcDoc::nil() - }) - .append(if let Some(sample) = sample { - RcDoc::text(format!(" {sample}")) - } else { - RcDoc::nil() - }) - .append(if let Some(alias) = alias { - RcDoc::text(format!(" AS {alias}")) - } else { - RcDoc::nil() - }), - TableReference::Subquery { - span: _, - lateral, - subquery, - alias, - pivot, - unpivot, - } => (if lateral { - RcDoc::text("LATERAL") - } else { - RcDoc::nil() - }) - .append(parenthesized(pretty_query(*subquery))) - .append(if let Some(alias) = alias { - RcDoc::text(format!(" AS {alias}")) - } else { - RcDoc::nil() - }) - .append(if let Some(pivot) = pivot { - RcDoc::text(format!(" {pivot}")) - } else { - RcDoc::nil() - }) - .append(if let Some(unpivot) = unpivot { - RcDoc::text(format!(" {unpivot}")) - } else { - RcDoc::nil() - }), - TableReference::TableFunction { - span: _, - lateral, - name, - params, - named_params, - alias, - sample, - } => { - let separator = if !named_params.is_empty() && !params.is_empty() { - RcDoc::text(", ") - } else { - RcDoc::nil() - }; - if lateral { - RcDoc::text("LATERAL ") - } else { - RcDoc::nil() - } - .append(RcDoc::text(name.to_string())) - .append(RcDoc::text("(")) - .append(inline_comma(params.into_iter().map(pretty_expr))) - .append(separator) - .append(inline_comma(named_params.into_iter().map(|(k, v)| { - RcDoc::text(k.to_string()) - .append(RcDoc::text("=>")) - .append(pretty_expr(v)) - }))) - .append(RcDoc::text(")")) - .append(if let Some(sample) = sample { - RcDoc::text(format!(" {sample}")) - } else { - RcDoc::nil() - }) - .append(if let Some(alias) = alias { - RcDoc::text(format!(" AS {alias}")) - } else { - RcDoc::nil() - }) - } - TableReference::Join { span: _, join } => pretty_table(*join.left) - .append(RcDoc::line()) - .append(if join.condition == JoinCondition::Natural { - RcDoc::text("NATURAL").append(RcDoc::space()) - } else { - RcDoc::nil() - }) - .append(match join.op { - JoinOperator::Inner => RcDoc::text("INNER JOIN"), - JoinOperator::LeftOuter => RcDoc::text("LEFT OUTER JOIN"), - JoinOperator::RightOuter => RcDoc::text("RIGHT OUTER JOIN"), - JoinOperator::FullOuter => RcDoc::text("FULL OUTER JOIN"), - JoinOperator::CrossJoin => RcDoc::text("CROSS JOIN"), - JoinOperator::LeftAnti => RcDoc::text("LEFT ANTI JOIN"), - JoinOperator::RightAnti => RcDoc::text("RIGHT ANTI JOIN"), - JoinOperator::LeftSemi => RcDoc::text("LEFT SEMI JOIN"), - JoinOperator::RightSemi => RcDoc::text("RIGHT SEMI JOIN"), - }) - .append(RcDoc::space().append(pretty_table(*join.right))) - .append(match &join.condition { - JoinCondition::On(expr) => RcDoc::space() - .append(RcDoc::text("ON")) - .append(RcDoc::space()) - .append(pretty_expr(*expr.clone())), - JoinCondition::Using(idents) => RcDoc::space() - .append(RcDoc::text("USING(")) - .append(inline_comma( - idents.iter().map(|ident| RcDoc::text(ident.to_string())), - )) - .append(RcDoc::text(")")), - _ => RcDoc::nil(), - }), - TableReference::Location { - span: _, - location, - options, - alias, - } => RcDoc::text(location.to_string()) - .append(options.to_string()) - .append(if let Some(a) = alias { - RcDoc::text(format!(" AS {a}")) - } else { - RcDoc::nil() - }), - } -} - -fn pretty_order_by(order_by: Vec) -> RcDoc<'static> { - if !order_by.is_empty() { - RcDoc::line() - .append( - RcDoc::text("ORDER BY").append( - if order_by.len() > 1 { - RcDoc::line() - } else { - RcDoc::space() - } - .nest(NEST_FACTOR), - ), - ) - .append( - interweave_comma(order_by.into_iter().map(pretty_order_by_expr)) - .nest(NEST_FACTOR) - .group(), - ) - } else { - RcDoc::nil() - } -} - -fn pretty_limit(limit: Vec) -> RcDoc<'static> { - if !limit.is_empty() { - RcDoc::line() - .append( - RcDoc::text("LIMIT").append( - if limit.len() > 1 { - RcDoc::line() - } else { - RcDoc::space() - } - .nest(NEST_FACTOR), - ), - ) - .append( - interweave_comma(limit.into_iter().map(pretty_expr)) - .nest(NEST_FACTOR) - .group(), - ) - } else { - RcDoc::nil() - } -} - -fn pretty_offset(offset: Option) -> RcDoc<'static> { - if let Some(offset) = offset { - RcDoc::line() - .append(RcDoc::text("OFFSET").append(RcDoc::space().nest(NEST_FACTOR))) - .append(pretty_expr(offset)) - } else { - RcDoc::nil() - } -} - -fn pretty_order_by_expr(order_by_expr: OrderByExpr) -> RcDoc<'static> { - RcDoc::text(order_by_expr.expr.to_string()) - .append(if let Some(asc) = order_by_expr.asc { - if asc { - RcDoc::space().append(RcDoc::text("ASC")) - } else { - RcDoc::space().append(RcDoc::text("DESC")) - } - } else { - RcDoc::nil() - }) - .append(if let Some(nulls_first) = order_by_expr.nulls_first { - if nulls_first { - RcDoc::space().append(RcDoc::text("NULLS FIRST")) - } else { - RcDoc::space().append(RcDoc::text("NULLS LAST")) - } - } else { - RcDoc::nil() - }) -} diff --git a/src/query/ast/src/ast/statements/hint.rs b/src/query/ast/src/ast/statements/hint.rs index f08fa6f1a37c7..62b365b3bcf14 100644 --- a/src/query/ast/src/ast/statements/hint.rs +++ b/src/query/ast/src/ast/statements/hint.rs @@ -40,7 +40,7 @@ impl Display for Hint { write!(f, "{}", hint.name)?; write!(f, "=")?; write!(f, "{}", hint.expr)?; - write!(f, ") ")?; + write!(f, ")")?; } write!(f, "*/") } diff --git a/src/query/ast/src/ast/statements/procedure.rs b/src/query/ast/src/ast/statements/procedure.rs index a2d75d52ea2ad..6538b8db4c3b8 100644 --- a/src/query/ast/src/ast/statements/procedure.rs +++ b/src/query/ast/src/ast/statements/procedure.rs @@ -94,47 +94,47 @@ impl Display for CreateProcedureStmt { // LANGUAGE SQL // [ COMMENT = '' ] AS fn fmt(&self, f: &mut Formatter) -> std::fmt::Result { - write!(f, "CREATE ")?; + write!(f, "CREATE")?; if let CreateOption::CreateOrReplace = self.create_option { - write!(f, "OR REPLACE ")?; + write!(f, " OR REPLACE")?; } - write!(f, "PROCEDURE ")?; + write!(f, " PROCEDURE")?; if let CreateOption::CreateIfNotExists = self.create_option { - write!(f, "IF NOT EXISTS ")?; + write!(f, " IF NOT EXISTS")?; } - write!(f, "{}", self.name.name)?; + write!(f, " {}", self.name.name)?; if let Some(args) = &self.args { if args.is_empty() { - write!(f, "() ")?; + write!(f, "()")?; } else { write!(f, "(")?; write_comma_separated_list(f, args.clone())?; - write!(f, ") ")?; + write!(f, ")")?; } } else { - write!(f, "() ")?; + write!(f, "()")?; } if self.return_type.len() == 1 { if let Some(name) = &self.return_type[0].name { write!( f, - "RETURNS TABLE({} {}) ", + " RETURNS TABLE({} {})", name, self.return_type[0].data_type )?; } else { - write!(f, "RETURNS {} ", self.return_type[0].data_type)?; + write!(f, " RETURNS {}", self.return_type[0].data_type)?; } } else { - write!(f, "RETURNS TABLE(")?; + write!(f, " RETURNS TABLE(")?; write_comma_separated_list(f, self.return_type.clone())?; - write!(f, ") ")?; + write!(f, ")")?; } - write!(f, "{}", self.language)?; + write!(f, " {}", self.language)?; if let Some(comment) = &self.comment { - write!(f, "COMMENT='{}' ", comment)?; + write!(f, " COMMENT='{}'", comment)?; } - write!(f, "AS $$\n{}\n$$", self.script)?; + write!(f, " AS $$\n{}\n$$", self.script)?; Ok(()) } } @@ -167,11 +167,11 @@ impl Display for DescProcedureStmt { write!(f, "DESCRIBE PROCEDURE {}", self.name)?; if self.args.is_empty() { - write!(f, "() ")?; + write!(f, "()")?; } else { write!(f, "(")?; write_comma_separated_list(f, self.args.clone())?; - write!(f, ") ")?; + write!(f, ")")?; } Ok(()) diff --git a/src/query/ast/src/ast/statements/table.rs b/src/query/ast/src/ast/statements/table.rs index 4eddfc0c0dfdb..0ce678d93fd60 100644 --- a/src/query/ast/src/ast/statements/table.rs +++ b/src/query/ast/src/ast/statements/table.rs @@ -192,19 +192,20 @@ pub enum TableType { impl Display for CreateTableStmt { fn fmt(&self, f: &mut Formatter) -> std::fmt::Result { - write!(f, "CREATE ")?; + write!(f, "CREATE")?; if let CreateOption::CreateOrReplace = self.create_option { - write!(f, "OR REPLACE ")?; + write!(f, " OR REPLACE")?; } match self.table_type { TableType::Normal => {} - TableType::Transient => write!(f, "TRANSIENT ")?, - TableType::Temporary => write!(f, "TEMPORARY ")?, + TableType::Transient => write!(f, " TRANSIENT ")?, + TableType::Temporary => write!(f, " TEMPORARY ")?, }; - write!(f, "TABLE ")?; + write!(f, " TABLE")?; if let CreateOption::CreateIfNotExists = self.create_option { - write!(f, "IF NOT EXISTS ")?; + write!(f, " IF NOT EXISTS")?; } + write!(f, " ")?; write_dot_separated_list( f, self.catalog @@ -380,11 +381,11 @@ pub struct AlterTableStmt { impl Display for AlterTableStmt { fn fmt(&self, f: &mut Formatter) -> std::fmt::Result { - write!(f, "ALTER TABLE ")?; + write!(f, "ALTER TABLE")?; if self.if_exists { - write!(f, "IF EXISTS ")?; + write!(f, " IF EXISTS")?; } - write!(f, "{}", self.table_reference)?; + write!(f, " {}", self.table_reference)?; write!(f, " {}", self.action) } } @@ -462,7 +463,7 @@ impl Display for AlterTableAction { write!(f, "DROP COLUMN {column}")?; } AlterTableAction::AlterTableClusterKey { cluster_by } => { - write!(f, " {cluster_by}")?; + write!(f, "{cluster_by}")?; } AlterTableAction::DropTableClusterKey => { write!(f, "DROP CLUSTER KEY")?; diff --git a/src/query/ast/src/lib.rs b/src/query/ast/src/lib.rs index 697125c05768c..f3792a8c9d027 100644 --- a/src/query/ast/src/lib.rs +++ b/src/query/ast/src/lib.rs @@ -12,9 +12,9 @@ // See the License for the specific language governing permissions and // limitations under the License. -#![feature(let_chains)] - // TODO(xuanwo): Add crate level documents here. + +#[allow(clippy::collapsible_match)] pub mod ast; mod error; pub mod parser; diff --git a/src/query/ast/src/parser/parser.rs b/src/query/ast/src/parser/parser.rs index 6335c012a632d..8cbc23d969fb1 100644 --- a/src/query/ast/src/parser/parser.rs +++ b/src/query/ast/src/parser/parser.rs @@ -148,6 +148,7 @@ pub fn run_parser( #[allow(dead_code)] fn assert_reparse(sql: &str, stmt: StatementWithFormat) { let stmt = reset_ast(stmt); + let new_sql = stmt.to_string(); let new_tokens = crate::parser::tokenize_sql(&new_sql).unwrap(); let new_stmt = run_parser( @@ -157,8 +158,9 @@ fn assert_reparse(sql: &str, stmt: StatementWithFormat) { false, statement, ) - .map_err(|err| panic!("{}", err.1)) + .map_err(|err| panic!("{} in {}", err.1, new_sql)) .unwrap(); + let new_stmt = reset_ast(new_stmt); assert_eq!(stmt, new_stmt, "\nleft:\n{}\nright:\n{}", sql, new_sql); } diff --git a/src/query/ast/src/parser/statement.rs b/src/query/ast/src/parser/statement.rs index 4939e5f410f59..4b12c78a3fb2c 100644 --- a/src/query/ast/src/parser/statement.rs +++ b/src/query/ast/src/parser/statement.rs @@ -62,10 +62,7 @@ pub fn statement_body(i: Input) -> IResult { Ok(Statement::Explain { kind: match opt_kind.map(|token| token.kind) { Some(TokenKind::SYNTAX) | Some(TokenKind::AST) => { - let pretty_stmt = - pretty_statement(statement.stmt.clone(), 10).map_err(|_| { - nom::Err::Failure(ErrorKind::Other("invalid statement")) - })?; + let pretty_stmt = statement.stmt.to_string(); ExplainKind::Syntax(pretty_stmt) } Some(TokenKind::PIPELINE) => ExplainKind::Pipeline, diff --git a/src/query/ast/tests/it/parser.rs b/src/query/ast/tests/it/parser.rs index 1e30e949194ee..1d8d789906695 100644 --- a/src/query/ast/tests/it/parser.rs +++ b/src/query/ast/tests/it/parser.rs @@ -205,6 +205,7 @@ fn test_statement() { r#"select * from a right semi join b on a.a = b.a;"#, r#"select * from a right anti join b on a.a = b.a;"#, r#"select * from a full outer join b on a.a = b.a;"#, + r#"select * FROM fuse_compat_table ignore_result;"#, r#"select * from a inner join b on a.a = b.a;"#, r#"select * from a left outer join b using(a);"#, r#"select * from a right outer join b using(a);"#, @@ -291,6 +292,7 @@ fn test_statement() { r#"VACUUM DROP TABLE FROM db;"#, r#"VACUUM DROP TABLE FROM db LIMIT 10;"#, r#"CREATE TABLE t (a INT COMMENT 'col comment') COMMENT='table comment';"#, + r#"CREATE TEMPORARY TABLE t (a INT COMMENT 'col comment')"#, r#"GRANT CREATE, CREATE USER ON * TO 'test-grant';"#, r#"GRANT SELECT, CREATE ON * TO 'test-grant';"#, r#"GRANT SELECT, CREATE ON *.* TO 'test-grant';"#, @@ -765,6 +767,8 @@ fn test_statement() { r#"SELECT first_value(d) respect nulls OVER (w) FROM e;"#, r#"SELECT sum(d) IGNORE NULLS OVER (w) FROM e;"#, r#"SELECT sum(d) OVER w FROM e WINDOW w AS (PARTITION BY f ORDER BY g);"#, + r#"SELECT number, rank() OVER (PARTITION BY number % 3 ORDER BY number) + FROM numbers(10) where number > 10 and number > 9 and number > 8;"#, r#"GRANT OWNERSHIP ON d20_0014.* TO ROLE 'd20_0015_owner';"#, r#"GRANT OWNERSHIP ON d20_0014.t TO ROLE 'd20_0015_owner';"#, r#"GRANT OWNERSHIP ON STAGE s1 TO ROLE 'd20_0015_owner';"#, diff --git a/src/query/ast/tests/it/testdata/expr.txt b/src/query/ast/tests/it/testdata/expr.txt index 0f4d8a38645b4..eb279496999c7 100644 --- a/src/query/ast/tests/it/testdata/expr.txt +++ b/src/query/ast/tests/it/testdata/expr.txt @@ -3490,7 +3490,7 @@ Map { ---------- Input ---------- ROW_NUMBER() OVER (ORDER BY salary DESC) ---------- Output --------- -ROW_NUMBER() OVER ( ORDER BY salary DESC ) +ROW_NUMBER() OVER (ORDER BY salary DESC) ---------- AST ------------ FunctionCall { span: Some( @@ -3555,7 +3555,7 @@ FunctionCall { ---------- Input ---------- SUM(salary) OVER () ---------- Output --------- -SUM(salary) OVER ( ) +SUM(salary) OVER () ---------- AST ------------ FunctionCall { span: Some( @@ -3614,7 +3614,7 @@ FunctionCall { ---------- Input ---------- AVG(salary) OVER (PARTITION BY department) ---------- Output --------- -AVG(salary) OVER ( PARTITION BY department ) +AVG(salary) OVER (PARTITION BY department) ---------- AST ------------ FunctionCall { span: Some( @@ -3693,7 +3693,7 @@ FunctionCall { ---------- Input ---------- SUM(salary) OVER (PARTITION BY department ORDER BY salary DESC ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) ---------- Output --------- -SUM(salary) OVER ( PARTITION BY department ORDER BY salary DESC ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW ) +SUM(salary) OVER (PARTITION BY department ORDER BY salary DESC ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) ---------- AST ------------ FunctionCall { span: Some( @@ -3806,7 +3806,7 @@ FunctionCall { ---------- Input ---------- AVG(salary) OVER (PARTITION BY department ORDER BY hire_date ROWS BETWEEN 2 PRECEDING AND CURRENT ROW) ---------- Output --------- -AVG(salary) OVER ( PARTITION BY department ORDER BY hire_date ROWS BETWEEN 2 PRECEDING AND CURRENT ROW ) +AVG(salary) OVER (PARTITION BY department ORDER BY hire_date ROWS BETWEEN 2 PRECEDING AND CURRENT ROW) ---------- AST ------------ FunctionCall { span: Some( @@ -3926,7 +3926,7 @@ FunctionCall { ---------- Input ---------- COUNT() OVER (ORDER BY hire_date RANGE BETWEEN INTERVAL '7' DAY PRECEDING AND CURRENT ROW) ---------- Output --------- -COUNT() OVER ( ORDER BY hire_date RANGE BETWEEN INTERVAL '7' DAY PRECEDING AND CURRENT ROW ) +COUNT() OVER (ORDER BY hire_date RANGE BETWEEN INTERVAL '7' DAY PRECEDING AND CURRENT ROW) ---------- AST ------------ FunctionCall { span: Some( @@ -4012,7 +4012,7 @@ FunctionCall { ---------- Input ---------- COUNT() OVER (ORDER BY hire_date ROWS UNBOUNDED PRECEDING) ---------- Output --------- -COUNT() OVER ( ORDER BY hire_date ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW ) +COUNT() OVER (ORDER BY hire_date ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) ---------- AST ------------ FunctionCall { span: Some( @@ -4083,7 +4083,7 @@ FunctionCall { ---------- Input ---------- COUNT() OVER (ORDER BY hire_date ROWS CURRENT ROW) ---------- Output --------- -COUNT() OVER ( ORDER BY hire_date ROWS BETWEEN CURRENT ROW AND CURRENT ROW ) +COUNT() OVER (ORDER BY hire_date ROWS BETWEEN CURRENT ROW AND CURRENT ROW) ---------- AST ------------ FunctionCall { span: Some( @@ -4152,7 +4152,7 @@ FunctionCall { ---------- Input ---------- COUNT() OVER (ORDER BY hire_date ROWS 3 PRECEDING) ---------- Output --------- -COUNT() OVER ( ORDER BY hire_date ROWS BETWEEN 3 PRECEDING AND CURRENT ROW ) +COUNT() OVER (ORDER BY hire_date ROWS BETWEEN 3 PRECEDING AND CURRENT ROW) ---------- AST ------------ FunctionCall { span: Some( diff --git a/src/query/ast/tests/it/testdata/query.txt b/src/query/ast/tests/it/testdata/query.txt index d6d61ba217e16..e4ce72e9a1af6 100644 --- a/src/query/ast/tests/it/testdata/query.txt +++ b/src/query/ast/tests/it/testdata/query.txt @@ -7523,7 +7523,7 @@ Query { ---------- Input ---------- select sum(a) over w from customer window w as (partition by a order by b) ---------- Output --------- -SELECT sum(a) OVER w FROM customer WINDOW w AS ( PARTITION BY a ORDER BY b ) +SELECT sum(a) OVER w FROM customer WINDOW w AS (PARTITION BY a ORDER BY b) ---------- AST ------------ Query { span: Some( @@ -7702,7 +7702,7 @@ Query { ---------- Input ---------- select a, sum(a) over w, sum(a) over w1, sum(a) over w2 from t1 window w as (partition by a), w2 as (w1 rows current row), w1 as (w order by a) order by a ---------- Output --------- -SELECT a, sum(a) OVER w, sum(a) OVER w1, sum(a) OVER w2 FROM t1 WINDOW w AS ( PARTITION BY a ), w2 AS ( w1 ROWS BETWEEN CURRENT ROW AND CURRENT ROW ), w1 AS ( w ORDER BY a ) ORDER BY a +SELECT a, sum(a) OVER w, sum(a) OVER w1, sum(a) OVER w2 FROM t1 WINDOW w AS (PARTITION BY a), w2 AS (w1 ROWS BETWEEN CURRENT ROW AND CURRENT ROW), w1 AS (w ORDER BY a) ORDER BY a ---------- AST ------------ Query { span: Some( diff --git a/src/query/ast/tests/it/testdata/stmt.txt b/src/query/ast/tests/it/testdata/stmt.txt index d7e6697414621..83fa07ddbcfd4 100644 --- a/src/query/ast/tests/it/testdata/stmt.txt +++ b/src/query/ast/tests/it/testdata/stmt.txt @@ -7940,6 +7940,75 @@ Query( ) +---------- Input ---------- +select * FROM fuse_compat_table ignore_result; +---------- Output --------- +SELECT * FROM fuse_compat_table IGNORE_RESULT +---------- AST ------------ +Query( + Query { + span: Some( + 0..31, + ), + with: None, + body: Select( + SelectStmt { + span: Some( + 0..31, + ), + hints: None, + distinct: false, + top_n: None, + select_list: [ + StarColumns { + qualified: [ + Star( + Some( + 7..8, + ), + ), + ], + column_filter: None, + }, + ], + from: [ + Table { + span: Some( + 14..31, + ), + catalog: None, + database: None, + table: Identifier { + span: Some( + 14..31, + ), + name: "fuse_compat_table", + quote: None, + ident_type: None, + }, + alias: None, + temporal: None, + with_options: None, + pivot: None, + unpivot: None, + sample: None, + }, + ], + selection: None, + group_by: None, + having: None, + window_list: None, + qualify: None, + }, + ), + order_by: [], + limit: [], + offset: None, + ignore_result: true, + }, +) + + ---------- Input ---------- select * from a inner join b on a.a = b.a; ---------- Output --------- @@ -11808,7 +11877,7 @@ OptimizeTable( ---------- Input ---------- ALTER TABLE t CLUSTER BY(c1); ---------- Output --------- -ALTER TABLE t CLUSTER BY LINEAR(c1) +ALTER TABLE t CLUSTER BY LINEAR(c1) ---------- AST ------------ AlterTable( AlterTableStmt { @@ -13165,6 +13234,56 @@ CreateTable( ) +---------- Input ---------- +CREATE TEMPORARY TABLE t (a INT COMMENT 'col comment') +---------- Output --------- +CREATE TEMPORARY TABLE t (a Int32 COMMENT 'col comment') +---------- AST ------------ +CreateTable( + CreateTableStmt { + create_option: Create, + catalog: None, + database: None, + table: Identifier { + span: Some( + 23..24, + ), + name: "t", + quote: None, + ident_type: None, + }, + source: Some( + Columns( + [ + ColumnDefinition { + name: Identifier { + span: Some( + 26..27, + ), + name: "a", + quote: None, + ident_type: None, + }, + data_type: Int32, + expr: None, + comment: Some( + "col comment", + ), + }, + ], + None, + ), + ), + engine: None, + uri_location: None, + cluster_by: None, + table_options: {}, + as_query: None, + table_type: Temporary, + }, +) + + ---------- Input ---------- GRANT CREATE, CREATE USER ON * TO 'test-grant'; ---------- Output --------- @@ -22160,7 +22279,7 @@ RemoveStage { ---------- Input ---------- SELECT sum(d) OVER (w) FROM e; ---------- Output --------- -SELECT sum(d) OVER ( w ) FROM e +SELECT sum(d) OVER (w) FROM e ---------- AST ------------ Query( Query { @@ -22283,7 +22402,7 @@ Query( ---------- Input ---------- SELECT first_value(d) OVER (w) FROM e; ---------- Output --------- -SELECT first_value(d) OVER ( w ) FROM e +SELECT first_value(d) OVER (w) FROM e ---------- AST ------------ Query( Query { @@ -22406,7 +22525,7 @@ Query( ---------- Input ---------- SELECT first_value(d) ignore nulls OVER (w) FROM e; ---------- Output --------- -SELECT first_value(d) IGNORE NULLS OVER ( w ) FROM e +SELECT first_value(d) IGNORE NULLS OVER (w) FROM e ---------- AST ------------ Query( Query { @@ -22531,7 +22650,7 @@ Query( ---------- Input ---------- SELECT first_value(d) respect nulls OVER (w) FROM e; ---------- Output --------- -SELECT first_value(d) RESPECT NULLS OVER ( w ) FROM e +SELECT first_value(d) RESPECT NULLS OVER (w) FROM e ---------- AST ------------ Query( Query { @@ -22656,7 +22775,7 @@ Query( ---------- Input ---------- SELECT sum(d) IGNORE NULLS OVER (w) FROM e; ---------- Output --------- -SELECT sum(d) IGNORE NULLS OVER ( w ) FROM e +SELECT sum(d) IGNORE NULLS OVER (w) FROM e ---------- AST ------------ Query( Query { @@ -22781,7 +22900,7 @@ Query( ---------- Input ---------- SELECT sum(d) OVER w FROM e WINDOW w AS (PARTITION BY f ORDER BY g); ---------- Output --------- -SELECT sum(d) OVER w FROM e WINDOW w AS ( PARTITION BY f ORDER BY g ) +SELECT sum(d) OVER w FROM e WINDOW w AS (PARTITION BY f ORDER BY g) ---------- AST ------------ Query( Query { @@ -22959,6 +23078,299 @@ Query( ) +---------- Input ---------- +SELECT number, rank() OVER (PARTITION BY number % 3 ORDER BY number) +FROM numbers(10) where number > 10 and number > 9 and number > 8; +---------- Output --------- +SELECT number, rank() OVER (PARTITION BY number % 3 ORDER BY number) FROM numbers(10) WHERE number > 10 AND number > 9 AND number > 8 +---------- AST ------------ +Query( + Query { + span: Some( + 0..133, + ), + with: None, + body: Select( + SelectStmt { + span: Some( + 0..133, + ), + hints: None, + distinct: false, + top_n: None, + select_list: [ + AliasedExpr { + expr: ColumnRef { + span: Some( + 7..13, + ), + column: ColumnRef { + database: None, + table: None, + column: Name( + Identifier { + span: Some( + 7..13, + ), + name: "number", + quote: None, + ident_type: None, + }, + ), + }, + }, + alias: None, + }, + AliasedExpr { + expr: FunctionCall { + span: Some( + 15..68, + ), + func: FunctionCall { + distinct: false, + name: Identifier { + span: Some( + 15..19, + ), + name: "rank", + quote: None, + ident_type: None, + }, + args: [], + params: [], + window: Some( + WindowDesc { + ignore_nulls: None, + window: WindowSpec( + WindowSpec { + existing_window_name: None, + partition_by: [ + BinaryOp { + span: Some( + 48..49, + ), + op: Modulo, + left: ColumnRef { + span: Some( + 41..47, + ), + column: ColumnRef { + database: None, + table: None, + column: Name( + Identifier { + span: Some( + 41..47, + ), + name: "number", + quote: None, + ident_type: None, + }, + ), + }, + }, + right: Literal { + span: Some( + 50..51, + ), + value: UInt64( + 3, + ), + }, + }, + ], + order_by: [ + OrderByExpr { + expr: ColumnRef { + span: Some( + 61..67, + ), + column: ColumnRef { + database: None, + table: None, + column: Name( + Identifier { + span: Some( + 61..67, + ), + name: "number", + quote: None, + ident_type: None, + }, + ), + }, + }, + asc: None, + nulls_first: None, + }, + ], + window_frame: None, + }, + ), + }, + ), + lambda: None, + }, + }, + alias: None, + }, + ], + from: [ + TableFunction { + span: Some( + 74..85, + ), + lateral: false, + name: Identifier { + span: Some( + 74..81, + ), + name: "numbers", + quote: None, + ident_type: None, + }, + params: [ + Literal { + span: Some( + 82..84, + ), + value: UInt64( + 10, + ), + }, + ], + named_params: [], + alias: None, + sample: None, + }, + ], + selection: Some( + BinaryOp { + span: Some( + 119..122, + ), + op: And, + left: BinaryOp { + span: Some( + 104..107, + ), + op: And, + left: BinaryOp { + span: Some( + 99..100, + ), + op: Gt, + left: ColumnRef { + span: Some( + 92..98, + ), + column: ColumnRef { + database: None, + table: None, + column: Name( + Identifier { + span: Some( + 92..98, + ), + name: "number", + quote: None, + ident_type: None, + }, + ), + }, + }, + right: Literal { + span: Some( + 101..103, + ), + value: UInt64( + 10, + ), + }, + }, + right: BinaryOp { + span: Some( + 115..116, + ), + op: Gt, + left: ColumnRef { + span: Some( + 108..114, + ), + column: ColumnRef { + database: None, + table: None, + column: Name( + Identifier { + span: Some( + 108..114, + ), + name: "number", + quote: None, + ident_type: None, + }, + ), + }, + }, + right: Literal { + span: Some( + 117..118, + ), + value: UInt64( + 9, + ), + }, + }, + }, + right: BinaryOp { + span: Some( + 130..131, + ), + op: Gt, + left: ColumnRef { + span: Some( + 123..129, + ), + column: ColumnRef { + database: None, + table: None, + column: Name( + Identifier { + span: Some( + 123..129, + ), + name: "number", + quote: None, + ident_type: None, + }, + ), + }, + }, + right: Literal { + span: Some( + 132..133, + ), + value: UInt64( + 8, + ), + }, + }, + }, + ), + group_by: None, + having: None, + window_list: None, + qualify: None, + }, + ), + order_by: [], + limit: [], + offset: None, + ignore_result: false, + }, +) + + ---------- Input ---------- GRANT OWNERSHIP ON d20_0014.* TO ROLE 'd20_0015_owner'; ---------- Output --------- @@ -23716,7 +24128,7 @@ abc as ( ) select * from abc; ---------- Output --------- -WITH abc AS (SELECT id, uid, eid, match_id, created_at, updated_at FROM (SELECT * FROM ddd.ccc WHERE score > 0 LIMIT 10) QUALIFY row_number() OVER ( PARTITION BY uid, eid ORDER BY updated_at DESC ) = 1) SELECT * FROM abc +WITH abc AS (SELECT id, uid, eid, match_id, created_at, updated_at FROM (SELECT * FROM ddd.ccc WHERE score > 0 LIMIT 10) QUALIFY row_number() OVER (PARTITION BY uid, eid ORDER BY updated_at DESC) = 1) SELECT * FROM abc ---------- AST ------------ Query( Query { @@ -24306,7 +24718,7 @@ CreateDictionary( ---------- Input ---------- describe PROCEDURE p1() ---------- Output --------- -DESCRIBE PROCEDURE p1() +DESCRIBE PROCEDURE p1() ---------- AST ------------ DescProcedure( DescProcedureStmt { @@ -24319,7 +24731,7 @@ DescProcedure( ---------- Input ---------- describe PROCEDURE p1(string, timestamp) ---------- Output --------- -DESCRIBE PROCEDURE p1(STRING, TIMESTAMP) +DESCRIBE PROCEDURE p1(STRING, TIMESTAMP) ---------- AST ------------ DescProcedure( DescProcedureStmt { @@ -24468,7 +24880,7 @@ BEGIN END; $$; ---------- Output --------- -CREATE OR REPLACE PROCEDURE p1() RETURNS STRING NOT NULL LANGUAGE SQL COMMENT='test' AS $$ +CREATE OR REPLACE PROCEDURE p1() RETURNS STRING NOT NULL LANGUAGE SQL COMMENT='test' AS $$ BEGIN LET sum := 0; FOR x IN SELECT * FROM numbers(100) DO @@ -24514,7 +24926,7 @@ BEGIN END; $$; ---------- Output --------- -CREATE PROCEDURE IF NOT EXISTS p1() RETURNS STRING NOT NULL LANGUAGE SQL COMMENT='test' AS $$ +CREATE PROCEDURE IF NOT EXISTS p1() RETURNS STRING NOT NULL LANGUAGE SQL COMMENT='test' AS $$ BEGIN LET sum := 0; FOR x IN SELECT * FROM numbers(100) DO @@ -24560,7 +24972,7 @@ BEGIN END; $$; ---------- Output --------- -CREATE PROCEDURE p1() RETURNS STRING NOT NULL LANGUAGE SQL COMMENT='test' AS $$ +CREATE PROCEDURE p1() RETURNS STRING NOT NULL LANGUAGE SQL COMMENT='test' AS $$ BEGIN LET sum := 0; FOR x IN SELECT * FROM numbers(100) DO @@ -24606,7 +25018,7 @@ BEGIN END; $$; ---------- Output --------- -CREATE PROCEDURE p1(a Int32, b STRING) RETURNS STRING NOT NULL LANGUAGE SQL COMMENT='test' AS $$ +CREATE PROCEDURE p1(a Int32, b STRING) RETURNS STRING NOT NULL LANGUAGE SQL COMMENT='test' AS $$ BEGIN LET sum := 0; FOR x IN SELECT * FROM numbers(100) DO @@ -24667,7 +25079,7 @@ BEGIN END; $$; ---------- Output --------- -CREATE PROCEDURE p1() RETURNS TABLE(a STRING NOT NULL, b Int32 NULL) LANGUAGE SQL COMMENT='test' AS $$ +CREATE PROCEDURE p1() RETURNS TABLE(a STRING NOT NULL, b Int32 NULL) LANGUAGE SQL COMMENT='test' AS $$ BEGIN LET sum := 0; FOR x IN SELECT * FROM numbers(100) DO diff --git a/src/query/sql/src/planner/binder/select.rs b/src/query/sql/src/planner/binder/select.rs index d4b115d3e190f..7f887c2d88a6a 100644 --- a/src/query/sql/src/planner/binder/select.rs +++ b/src/query/sql/src/planner/binder/select.rs @@ -507,6 +507,7 @@ impl Binder { if stmt.group_by.is_some() || stmt.having.is_some() || stmt.distinct + || stmt.qualify.is_some() || !bind_context.aggregate_info.group_items.is_empty() || !bind_context.aggregate_info.aggregate_functions.is_empty() { diff --git a/tests/sqllogictests/suites/base/05_ddl/05_0019_ddl_create_view.test b/tests/sqllogictests/suites/base/05_ddl/05_0019_ddl_create_view.test index 6e42b5271695f..62c26f30d7545 100644 --- a/tests/sqllogictests/suites/base/05_ddl/05_0019_ddl_create_view.test +++ b/tests/sqllogictests/suites/base/05_ddl/05_0019_ddl_create_view.test @@ -111,15 +111,6 @@ create view tmp_view2(d1, d2) as select * from numbers(3) statement ok drop view if exists tmp_view -query T -explain syntax create view test as SELECT * FROM 's3://testbucket/admin/data/tuple.parquet'(files => ('tuple.parquet', 'test.parquet'), pattern => '.*.parquet', FILE_FORMAT => 'parquet', CONNECTION => (aws_key_id = 'minioadmin', aws_secret_key = 'minioadmin', endpoint_url = 'http://127.0.0.1:9900/')) ----- -CREATE VIEW test -AS - SELECT * - FROM - 's3://testbucket/admin/data/tuple.parquet' ( FILES => ('tuple.parquet', 'test.parquet'), FILE_FORMAT => 'parquet', PATTERN => '.*.parquet', CONNECTION => (aws_key_id = 'minioadmin', aws_secret_key = 'minioadmin', endpoint_url = 'http://127.0.0.1:9900/' ) ) - statement ok drop view if exists loop_view1; diff --git a/tests/sqllogictests/suites/base/09_fuse_engine/09_0025_lazy_read.test b/tests/sqllogictests/suites/base/09_fuse_engine/09_0025_lazy_read.test index 8e49beef8a1cb..bc730936448a4 100644 --- a/tests/sqllogictests/suites/base/09_fuse_engine/09_0025_lazy_read.test +++ b/tests/sqllogictests/suites/base/09_fuse_engine/09_0025_lazy_read.test @@ -100,4 +100,25 @@ select * from t_11882 where substr(b,1,1)='b' limit 2 3 bbb statement ok -drop table t_11882 \ No newline at end of file +create or replace table t_17146( + data variant, + user_id int, + created_at timestamp +); + +statement ok +SELECT data, + user_id, + created_at +FROM t_17146 +WHERE data::STRING LIKE '%xxx%' QUALIFY row_number() OVER ( + PARTITION BY user_id + ORDER BY created_at DESC + )=1 +LIMIT 1; + +statement ok +drop table t_11882 + +statement ok +drop table t_17146 \ No newline at end of file diff --git a/tests/sqllogictests/suites/mode/cluster/distributed_merge_into.test b/tests/sqllogictests/suites/mode/cluster/distributed_merge_into.test index f387e354e85b6..eb06be236d4c8 100644 --- a/tests/sqllogictests/suites/mode/cluster/distributed_merge_into.test +++ b/tests/sqllogictests/suites/mode/cluster/distributed_merge_into.test @@ -219,7 +219,7 @@ statement ok set disable_join_reorder = 1; query T -MERGE INTO t1 USING (SELECT number FROM t2_s QUALIFY row_number() OVER ( PARTITION BY number ORDER BY number DESC ) = 1) AS t2 ON t1.number = t2.number WHEN MATCHED then UPDATE SET t1.number = t2.number WHEN NOT MATCHED THEN INSERT(number) VALUES(t2.number); +MERGE INTO t1 USING (SELECT number FROM t2_s QUALIFY row_number() OVER (PARTITION BY number ORDER BY number DESC) = 1) AS t2 ON t1.number = t2.number WHEN MATCHED then UPDATE SET t1.number = t2.number WHEN NOT MATCHED THEN INSERT(number) VALUES(t2.number); ---- 216667 250000 diff --git a/tests/sqllogictests/suites/mode/cluster/shuffle.test b/tests/sqllogictests/suites/mode/cluster/shuffle.test index b928aea7da826..de225a52d99ef 100644 --- a/tests/sqllogictests/suites/mode/cluster/shuffle.test +++ b/tests/sqllogictests/suites/mode/cluster/shuffle.test @@ -17,7 +17,7 @@ statement ok set disable_join_reorder = 1; query T -explain select * from (select t1.number, t2.number from t1 right outer join (SELECT number FROM t2 QUALIFY row_number() OVER ( PARTITION BY number ORDER BY number DESC ) = 1) AS t2 ON t1.number = t2.number) as tt(a, b) order by a; +explain select * from (select t1.number, t2.number from t1 right outer join (SELECT number FROM t2 QUALIFY row_number() OVER (PARTITION BY number ORDER BY number DESC ) = 1) AS t2 ON t1.number = t2.number) as tt(a, b) order by a; ---- Sort ├── output columns: [t1.number (#0), t2.number (#1)] @@ -39,10 +39,10 @@ Sort ├── estimated rows: 0.01 ├── Filter(Build) │ ├── output columns: [t2.number (#1)] - │ ├── filters: [row_number() OVER ( PARTITION BY number ORDER BY number DESC ) (#2) = 1] + │ ├── filters: [row_number() OVER (PARTITION BY number ORDER BY number DESC) (#2) = 1] │ ├── estimated rows: 0.01 │ └── Window - │ ├── output columns: [t2.number (#1), row_number() OVER ( PARTITION BY number ORDER BY number DESC ) (#2)] + │ ├── output columns: [t2.number (#1), row_number() OVER (PARTITION BY number ORDER BY number DESC) (#2)] │ ├── aggregate function: [row_number] │ ├── partition by: [number] │ ├── order by: [number] diff --git a/tests/sqllogictests/suites/mode/cluster/window.test b/tests/sqllogictests/suites/mode/cluster/window.test index 1781c28748518..3f67d2841a862 100644 --- a/tests/sqllogictests/suites/mode/cluster/window.test +++ b/tests/sqllogictests/suites/mode/cluster/window.test @@ -65,7 +65,7 @@ Charlie Marketing 55000.00 1 David Marketing 50000.00 2 -query +query explain SELECT e.name AS EmployeeName, d.department_name AS DepartmentName, e.salary AS Salary, @@ -76,10 +76,10 @@ JOIN departments d ON e.department_id = d.department_id; ---- Exchange -├── output columns: [e.name (#1), e.salary (#3), d.department_name (#5), d.department_id (#4), ROW_NUMBER() OVER ( PARTITION BY d.department_id ORDER BY e.salary DESC ) (#6)] +├── output columns: [e.name (#1), e.salary (#3), d.department_name (#5), d.department_id (#4), ROW_NUMBER() OVER (PARTITION BY d.department_id ORDER BY e.salary DESC) (#6)] ├── exchange type: Merge └── Window - ├── output columns: [e.name (#1), e.salary (#3), d.department_name (#5), d.department_id (#4), ROW_NUMBER() OVER ( PARTITION BY d.department_id ORDER BY e.salary DESC ) (#6)] + ├── output columns: [e.name (#1), e.salary (#3), d.department_name (#5), d.department_id (#4), ROW_NUMBER() OVER (PARTITION BY d.department_id ORDER BY e.salary DESC) (#6)] ├── aggregate function: [row_number] ├── partition by: [department_id] ├── order by: [salary] @@ -122,22 +122,22 @@ Exchange ├── push downs: [filters: [], limit: NONE] └── estimated rows: 10.00 -query +query explain select number, lead(number,1, 0) over (partition by number % 3 order by number+ 1), lead(number,2, 0) over (partition by number % 3 order by number + 1) from numbers(50); ---- Exchange -├── output columns: [numbers.number (#0), lead_default_value (#1), lead_part_0 (#2), lead_order_0 (#3), lead(number, 1, 0) OVER ( PARTITION BY number % 3 ORDER BY number + 1 ) (#4), lead(number, 2, 0) OVER ( PARTITION BY number % 3 ORDER BY number + 1 ) (#5)] +├── output columns: [numbers.number (#0), lead_default_value (#1), lead_part_0 (#2), lead_order_0 (#3), lead(number, 1, 0) OVER (PARTITION BY number % 3 ORDER BY number + 1) (#4), lead(number, 2, 0) OVER (PARTITION BY number % 3 ORDER BY number + 1) (#5)] ├── exchange type: Merge └── Window - ├── output columns: [numbers.number (#0), lead_default_value (#1), lead_part_0 (#2), lead_order_0 (#3), lead(number, 1, 0) OVER ( PARTITION BY number % 3 ORDER BY number + 1 ) (#4), lead(number, 2, 0) OVER ( PARTITION BY number % 3 ORDER BY number + 1 ) (#5)] + ├── output columns: [numbers.number (#0), lead_default_value (#1), lead_part_0 (#2), lead_order_0 (#3), lead(number, 1, 0) OVER (PARTITION BY number % 3 ORDER BY number + 1) (#4), lead(number, 2, 0) OVER (PARTITION BY number % 3 ORDER BY number + 1) (#5)] ├── aggregate function: [lead] ├── partition by: [lead_part_0] ├── order by: [lead_order_0] ├── frame: [Rows: Following(Some(Number(2_u64))) ~ Following(Some(Number(2_u64)))] └── Window - ├── output columns: [numbers.number (#0), lead_default_value (#1), lead_part_0 (#2), lead_order_0 (#3), lead(number, 1, 0) OVER ( PARTITION BY number % 3 ORDER BY number + 1 ) (#4)] + ├── output columns: [numbers.number (#0), lead_default_value (#1), lead_part_0 (#2), lead_order_0 (#3), lead(number, 1, 0) OVER (PARTITION BY number % 3 ORDER BY number + 1) (#4)] ├── aggregate function: [lead] ├── partition by: [lead_part_0] ├── order by: [lead_order_0] @@ -173,7 +173,7 @@ CREATE OR REPLACE TABLE sales ( net_paid DECIMAL(10, 2) NOT NULL ); -query +query explain SELECT customer_id, ROUND(AVG(net_paid) OVER (PARTITION BY customer_id), 3) AS customer_avg, ROUND(AVG(net_paid) OVER () - AVG(net_paid) OVER (PARTITION BY customer_id), 3) AS diff_from_overall_avg @@ -194,19 +194,19 @@ Limit ├── estimated rows: 0.00 └── EvalScalar ├── output columns: [sales.customer_id (#2), customer_avg (#8), diff_from_overall_avg (#9)] - ├── expressions: [round(3)(AVG(net_paid) OVER ( PARTITION BY customer_id ) (#6), 3), round(3)(AVG(net_paid) OVER ( ) (#7) - AVG(net_paid) OVER ( PARTITION BY customer_id ) (#6), 3)] + ├── expressions: [round(3)(AVG(net_paid) OVER (PARTITION BY customer_id) (#6), 3), round(3)(AVG(net_paid) OVER () (#7) - AVG(net_paid) OVER (PARTITION BY customer_id) (#6), 3)] ├── estimated rows: 0.00 └── Window - ├── output columns: [sales.customer_id (#2), sales.net_paid (#5), AVG(net_paid) OVER ( PARTITION BY customer_id ) (#6), AVG(net_paid) OVER ( ) (#7)] + ├── output columns: [sales.customer_id (#2), sales.net_paid (#5), AVG(net_paid) OVER (PARTITION BY customer_id) (#6), AVG(net_paid) OVER () (#7)] ├── aggregate function: [avg(net_paid)] ├── partition by: [] ├── order by: [] ├── frame: [Range: Preceding(None) ~ Following(None)] └── Exchange - ├── output columns: [sales.customer_id (#2), sales.net_paid (#5), AVG(net_paid) OVER ( PARTITION BY customer_id ) (#6)] + ├── output columns: [sales.customer_id (#2), sales.net_paid (#5), AVG(net_paid) OVER (PARTITION BY customer_id) (#6)] ├── exchange type: Merge └── Window - ├── output columns: [sales.customer_id (#2), sales.net_paid (#5), AVG(net_paid) OVER ( PARTITION BY customer_id ) (#6)] + ├── output columns: [sales.customer_id (#2), sales.net_paid (#5), AVG(net_paid) OVER (PARTITION BY customer_id) (#6)] ├── aggregate function: [avg(net_paid)] ├── partition by: [customer_id] ├── order by: [] diff --git a/tests/sqllogictests/suites/mode/standalone/explain/explain.test b/tests/sqllogictests/suites/mode/standalone/explain/explain.test index e4f5fe40817bd..15401b3b5a769 100644 --- a/tests/sqllogictests/suites/mode/standalone/explain/explain.test +++ b/tests/sqllogictests/suites/mode/standalone/explain/explain.test @@ -161,247 +161,90 @@ EvalScalar query T explain syntax select 1, 'ab', [1,2,3], (1, 'a') ---- -SELECT - 1, - 'ab', - [1, 2, 3], - (1, 'a') +SELECT 1, 'ab', [1, 2, 3], (1, 'a') query T explain syntax select a, sum(b) as sum from t1 where a in (1, 2) and b > 0 and b < 100 group by a order by a ---- -SELECT - a, - sum(b) AS sum -FROM - t1 -WHERE - ( - ( - a IN (1, 2) - AND (b > 0) - ) - AND (b < 100) - ) -GROUP BY a -ORDER BY a +SELECT a, sum(b) AS sum FROM t1 WHERE a IN(1, 2) AND b > 0 AND b < 100 GROUP BY a ORDER BY a query T explain syntax select * from t1 inner join t2 on t1.a = t2.a and t1.b = t2.b and t1.a > 2 ---- -SELECT * -FROM - t1 - INNER JOIN t2 ON ( - ( - (t1.a = t2.a) - AND (t1.b = t2.b) - ) - AND (t1.a > 2) - ) +SELECT * FROM t1 INNER JOIN t2 ON t1.a = t2.a AND t1.b = t2.b AND t1.a > 2 query T explain syntax delete from t1 where a > 100 and b > 1 and b < 10 ---- -DELETE FROM - t1 -WHERE - ( - ( - (a > 100) - AND (b > 1) - ) - AND (b < 10) - ) +DELETE FROM t1 WHERE a > 100 AND b > 1 AND b < 10 query T explain syntax copy into t1 from 's3://mybucket/data.csv' file_format = ( type = CSV field_delimiter = ',' record_delimiter = '\n' skip_header = 1) size_limit=10 ---- -COPY -INTO t1 -FROM 's3://mybucket/data.csv' -FILE_FORMAT = ( - field_delimiter = ',', - record_delimiter = '\n', - skip_header = 1, - type = CSV -) -SIZE_LIMIT = 10 -PURGE = false -DISABLE_VARIANT_CHECK = false +COPY INTO t1 FROM 's3://mybucket/data.csv' FILE_FORMAT = (field_delimiter = ',', record_delimiter = '\n', skip_header = 1, type = CSV) SIZE_LIMIT = 10 PURGE = false FORCE = false DISABLE_VARIANT_CHECK = false ON_ERROR = abort RETURN_FAILED_ONLY = false query T explain syntax copy into 's3://mybucket/data.csv' from t1 file_format = ( type = CSV field_delimiter = ',' record_delimiter = '\n' skip_header = 1) ---- -COPY -INTO Uri(UriLocation { protocol: "s3", name: "mybucket", path: "/data.csv", connection: Connection { visited_keys: {}, conns: {} } }) -FROM t1 -FILE_FORMAT = ( - field_delimiter = ',', - record_delimiter = '\n', - skip_header = 1, - type = CSV -) -SINGLE = false +COPY INTO 's3://mybucket/data.csv' FROM t1 FILE_FORMAT = (field_delimiter = ',', record_delimiter = '\n', skip_header = 1, type = CSV) SINGLE = false MAX_FILE_SIZE = 0 DETAILED_OUTPUT = false INCLUDE_QUERY_ID = true USE_RAW_PATH = false OVERWRITE = false query T explain syntax create OR REPLACE table t3(a int64, b uint64, c float64, d string, e array(int32), f tuple(f1 bool, f2 string)) engine=fuse cluster by (a, b, c) comment='test' compression='LZ4' ---- -CREATE OR REPLACE TABLE t3 ( - a Int64, - b UInt64, - c Float64, - d STRING, - e ARRAY(Int32), - f TUPLE(f1 BOOLEAN, f2 STRING) -) ENGINE = FUSE -CLUSTER BY LINEAR( - a, - b, - c -) -comment = 'test', -compression = 'LZ4' +CREATE OR REPLACE TABLE t3 (a Int64, b UInt64, c Float64, d STRING, e ARRAY(Int32), f TUPLE(f1 BOOLEAN, f2 STRING)) ENGINE = FUSE CLUSTER BY LINEAR(a, b, c) comment = 'test' compression = 'LZ4' query T explain syntax create view v as select number % 3 as a from numbers(100) where number > 10 ---- -CREATE VIEW v -AS - SELECT (number % 3) AS a - FROM - numbers(100) - WHERE - (number > 10) +CREATE VIEW v AS SELECT number % 3 AS a FROM numbers(100) WHERE number > 10 query T explain syntax select 1, 'ab', [1,2,3] as a, (1, 'a') as t ---- -SELECT - 1, - 'ab', - [1, 2, 3] AS a, - (1, 'a') AS t +SELECT 1, 'ab', [1, 2, 3] AS a, (1, 'a') AS t query T explain syntax select case when a > 1 then 'x' when a < 10 then 'y' else 'z' end from t1 ---- -SELECT CASE - WHEN (a > 1) THEN 'x', - WHEN (a < 10) THEN 'y' - ELSE 'z' - END -FROM - t1 +SELECT CASE WHEN a > 1 THEN 'x' WHEN a < 10 THEN 'y' ELSE 'z' END FROM t1 query T explain syntax select a, sum(b) as sum from t1 where a in (1, 2) and b > 0 and b < 100 group by a order by a limit 3 ---- -SELECT - a, - sum(b) AS sum -FROM - t1 -WHERE - ( - ( - a IN (1, 2) - AND (b > 0) - ) - AND (b < 100) - ) -GROUP BY a -ORDER BY a -LIMIT 3 +SELECT a, sum(b) AS sum FROM t1 WHERE a IN(1, 2) AND b > 0 AND b < 100 GROUP BY a ORDER BY a LIMIT 3 query T explain syntax select * from t1 inner join t2 on t1.a = t2.a and t1.b = t2.b and t1.a > 2 ---- -SELECT * -FROM - t1 - INNER JOIN t2 ON ( - ( - (t1.a = t2.a) - AND (t1.b = t2.b) - ) - AND (t1.a > 2) - ) +SELECT * FROM t1 INNER JOIN t2 ON t1.a = t2.a AND t1.b = t2.b AND t1.a > 2 query T explain syntax with cte (a, b) as (select 1, 2 union all select 3, 4) select a, b from cte ---- -WITH - cte(a, b) AS - ( - SELECT - 1, - 2 - UNION ALL - SELECT - 3, - 4 - ) -SELECT - a, b -FROM - cte +WITH cte(a, b) AS (SELECT 1, 2 UNION ALL SELECT 3, 4) SELECT a, b FROM cte query T explain syntax with cte (a, b) as (values(1,2),(3,4)) select a, b from cte ---- -WITH - cte(a, b) AS - ( - VALUES(1, 2), (3, 4) - ) -SELECT - a, b -FROM - cte +WITH cte(a, b) AS (VALUES(1, 2), (3, 4)) SELECT a, b FROM cte query T explain syntax insert into t1 (a, b) values (1, 2),(3, 4) ---- -INSERT INTO - t1 (a, b) -VALUES - (1, 2), - (3, 4) +INSERT INTO t1 (a, b) VALUES (1, 2), (3, 4) query T explain syntax delete from t1 where a > 100 and b > 1 and b < 10 ---- -DELETE FROM - t1 -WHERE - ( - ( - (a > 100) - AND (b > 1) - ) - AND (b < 10) - ) +DELETE FROM t1 WHERE a > 100 AND b > 1 AND b < 10 query T explain syntax copy into t1 from 's3://mybucket/data.csv' file_format = ( type = CSV field_delimiter = ',' record_delimiter = '\n' skip_header = 1) size_limit=10 max_files=10 ---- -COPY -INTO t1 -FROM 's3://mybucket/data.csv' -FILE_FORMAT = ( - field_delimiter = ',', - record_delimiter = '\n', - skip_header = 1, - type = CSV -) -SIZE_LIMIT = 10 -MAX_FILES = 10 -PURGE = false -DISABLE_VARIANT_CHECK = false +COPY INTO t1 FROM 's3://mybucket/data.csv' FILE_FORMAT = (field_delimiter = ',', record_delimiter = '\n', skip_header = 1, type = CSV) SIZE_LIMIT = 10 MAX_FILES = 10 PURGE = false FORCE = false DISABLE_VARIANT_CHECK = false ON_ERROR = abort RETURN_FAILED_ONLY = false query T explain syntax create database db1 engine=default @@ -411,32 +254,12 @@ CREATE DATABASE db1 ENGINE = DEFAULT query T explain syntax create OR REPLACE table t3(a int64, b uint64, c float64, d string, e array(int32), f tuple(f1 bool, f2 string)) engine=fuse cluster by (a, b, c) comment='test' compression='LZ4' ---- -CREATE OR REPLACE TABLE t3 ( - a Int64, - b UInt64, - c Float64, - d STRING, - e ARRAY(Int32), - f TUPLE(f1 BOOLEAN, f2 STRING) -) ENGINE = FUSE -CLUSTER BY LINEAR( - a, - b, - c -) -comment = 'test', -compression = 'LZ4' +CREATE OR REPLACE TABLE t3 (a Int64, b UInt64, c Float64, d STRING, e ARRAY(Int32), f TUPLE(f1 BOOLEAN, f2 STRING)) ENGINE = FUSE CLUSTER BY LINEAR(a, b, c) comment = 'test' compression = 'LZ4' query T explain syntax create view v as select number % 3 as a from numbers(100) where number > 10 ---- -CREATE VIEW v -AS - SELECT (number % 3) AS a - FROM - numbers(100) - WHERE - (number > 10) +CREATE VIEW v AS SELECT number % 3 AS a FROM numbers(100) WHERE number > 10 query T explain syntax show create table t1 @@ -451,25 +274,17 @@ CREATE USER 'test'@'%' IDENTIFIED WITH sha256_password BY 'new_password' query T explain syntax select unknown_table.a + 1 from unknown_table1 ---- -SELECT (unknown_table.a + 1) -FROM - unknown_table1 +SELECT unknown_table.a + 1 FROM unknown_table1 query T explain syntax SELECT * FROM monthly_sales PIVOT(SUM(amount) FOR MONTH IN ('JAN', 'FEB', 'MAR', 'APR')) ORDER BY EMPID ---- -SELECT * -FROM - monthly_sales PIVOT(SUM(amount) FOR MONTH IN ('JAN', 'FEB', 'MAR', 'APR')) -ORDER BY EMPID +SELECT * FROM monthly_sales PIVOT(SUM(amount) FOR MONTH IN ('JAN', 'FEB', 'MAR', 'APR')) ORDER BY EMPID query T explain syntax SELECT * FROM monthly_sales_1 UNPIVOT(sales FOR month IN (jan, feb, mar, april)) ORDER BY empid ---- -SELECT * -FROM - monthly_sales_1 UNPIVOT(sales FOR month IN (jan, feb, mar, april)) -ORDER BY empid +SELECT * FROM monthly_sales_1 UNPIVOT(sales FOR month IN (jan, feb, mar, april)) ORDER BY empid query T explain select a from t1 UNION ALL select a from t2 @@ -750,7 +565,7 @@ Limit ├── push downs: [filters: [(t2.a (#2) > 2 OR t2.b (#3) < 4)], limit: NONE] └── estimated rows: 5.00 -query +query explain select * from t1,t2 where (t1.a > 1 or t1.b < 2) and (t1.a > 1 or t1.b < 2) ---- HashJoin @@ -785,7 +600,7 @@ HashJoin ├── push downs: [filters: [], limit: NONE] └── estimated rows: 5.00 -query +query explain select count(distinct a) from t1; ---- AggregateFinal @@ -817,7 +632,7 @@ AggregateFinal ├── push downs: [filters: [], limit: NONE] └── estimated rows: 1.00 -query +query explain select count_distinct(a) from t1; ---- AggregateFinal @@ -849,7 +664,7 @@ AggregateFinal ├── push downs: [filters: [], limit: NONE] └── estimated rows: 1.00 -query +query explain select * from (values(1, 'a'),(2, 'b')) t(c1,c2) ---- ConstantTableScan @@ -863,33 +678,25 @@ drop table t1 statement ok drop table t2 -query +query explain syntax select * from read_parquet('p1', 'p2', 'p3'); ---- -SELECT * -FROM - read_parquet('p1', 'p2', 'p3') +SELECT * FROM read_parquet('p1', 'p2', 'p3') -query +query explain syntax select * from read_parquet(prune_page=>true, refresh_meta_cache=>true); ---- -SELECT * -FROM - read_parquet(prune_page=>TRUE, refresh_meta_cache=>TRUE) +SELECT * FROM read_parquet(prune_page=>TRUE,refresh_meta_cache=>TRUE) -query +query explain syntax select * from read_parquet('p1', 'p2', 'p3', prune_page=>true, refresh_meta_cache=>true); ---- -SELECT * -FROM - read_parquet('p1', 'p2', 'p3', prune_page=>TRUE, refresh_meta_cache=>TRUE) +SELECT * FROM read_parquet('p1', 'p2', 'p3',prune_page=>TRUE,refresh_meta_cache=>TRUE) -query +query explain syntax select * from read_parquet('p1', 'p2', 'p3', prune_page=>true, refresh_meta_cache=>true); ---- -SELECT * -FROM - read_parquet('p1', 'p2', 'p3', prune_page=>TRUE, refresh_meta_cache=>TRUE) +SELECT * FROM read_parquet('p1', 'p2', 'p3',prune_page=>TRUE,refresh_meta_cache=>TRUE) statement ok drop table if exists t4 @@ -897,7 +704,7 @@ drop table if exists t4 statement ok create OR REPLACE table t4(a int, b string); -query +query explain select * from t4 where a = 1 and try_cast(get(try_parse_json(b),'bb') as varchar) = 'xx'; ---- Filter @@ -920,7 +727,7 @@ drop view if exists v4 statement ok create view v4 as select a as a, try_cast(get(try_parse_json(b), 'bb') as varchar) as b from t4; -query +query explain select * from v4 where b = 'xx'; ---- EvalScalar diff --git a/tests/sqllogictests/suites/mode/standalone/explain/window.test b/tests/sqllogictests/suites/mode/standalone/explain/window.test index 44f4039494174..d548cb11d30d3 100644 --- a/tests/sqllogictests/suites/mode/standalone/explain/window.test +++ b/tests/sqllogictests/suites/mode/standalone/explain/window.test @@ -14,11 +14,11 @@ query T explain SELECT depname, empno, salary, sum(salary) OVER (PARTITION BY depname ORDER BY empno) FROM empsalary ORDER BY depname, empno ---- Sort -├── output columns: [empsalary.depname (#0), empsalary.empno (#1), empsalary.salary (#2), sum(salary) OVER ( PARTITION BY depname ORDER BY empno ) (#4)] +├── output columns: [empsalary.depname (#0), empsalary.empno (#1), empsalary.salary (#2), sum(salary) OVER (PARTITION BY depname ORDER BY empno) (#4)] ├── sort keys: [depname ASC NULLS LAST, empno ASC NULLS LAST] ├── estimated rows: 0.00 └── Window - ├── output columns: [empsalary.depname (#0), empsalary.empno (#1), empsalary.salary (#2), sum(salary) OVER ( PARTITION BY depname ORDER BY empno ) (#4)] + ├── output columns: [empsalary.depname (#0), empsalary.empno (#1), empsalary.salary (#2), sum(salary) OVER (PARTITION BY depname ORDER BY empno) (#4)] ├── aggregate function: [sum(salary)] ├── partition by: [depname] ├── order by: [empno] @@ -101,7 +101,7 @@ Filter ├── filters: [t2.rank (#6) = 1] ├── estimated rows: 0.00 └── Window - ├── output columns: [k (#4), v (#5), rank() OVER ( PARTITION BY k ORDER BY v DESC ) (#6)] + ├── output columns: [k (#4), v (#5), rank() OVER (PARTITION BY k ORDER BY v DESC) (#6)] ├── aggregate function: [rank] ├── partition by: [k] ├── order by: [v] @@ -149,7 +149,7 @@ Filter ├── filters: [t2.rank (#6) = 1, is_true(t2.k (#4) = 12)] ├── estimated rows: 0.00 └── Window - ├── output columns: [k (#4), v (#5), rank() OVER ( PARTITION BY v ORDER BY v DESC ) (#6)] + ├── output columns: [k (#4), v (#5), rank() OVER (PARTITION BY v ORDER BY v DESC) (#6)] ├── aggregate function: [rank] ├── partition by: [v] ├── order by: [v] @@ -189,7 +189,7 @@ Filter ├── filters: [t2.rank (#6) = 1, is_true(t2.k (#4) = 12)] ├── estimated rows: 0.00 └── Window - ├── output columns: [k (#4), v (#5), rank() OVER ( ORDER BY v DESC ) (#6)] + ├── output columns: [k (#4), v (#5), rank() OVER (ORDER BY v DESC) (#6)] ├── aggregate function: [rank] ├── partition by: [] ├── order by: [v] @@ -230,11 +230,11 @@ query T explain select max(a) OVER (partition by a) FROM t qualify max(a) OVER (partition by a) > 3; ---- Filter -├── output columns: [max(a) OVER ( PARTITION BY a ) (#1)] -├── filters: [is_true(max(a) OVER ( PARTITION BY a ) (#1) > 3)] +├── output columns: [max(a) OVER (PARTITION BY a) (#1)] +├── filters: [is_true(max(a) OVER (PARTITION BY a) (#1) > 3)] ├── estimated rows: 0.00 └── Window - ├── output columns: [t.a (#0), max(a) OVER ( PARTITION BY a ) (#1)] + ├── output columns: [t.a (#0), max(a) OVER (PARTITION BY a) (#1)] ├── aggregate function: [max(a)] ├── partition by: [a] ├── order by: [] @@ -270,7 +270,7 @@ query T explain select b, row_number() over (order by b) from tbpush where b > 3; ---- Window -├── output columns: [tbpush.b (#0), row_number() OVER ( ORDER BY b ) (#1)] +├── output columns: [tbpush.b (#0), row_number() OVER (ORDER BY b) (#1)] ├── aggregate function: [row_number] ├── partition by: [] ├── order by: [b] @@ -324,11 +324,11 @@ query T explain select * from (select b, row_number() over (order by b) from tbpush) where b > 3; ---- Filter -├── output columns: [tbpush.b (#0), row_number() OVER ( ORDER BY b ) (#1)] +├── output columns: [tbpush.b (#0), row_number() OVER (ORDER BY b) (#1)] ├── filters: [is_true(tbpush.b (#0) > 3)] ├── estimated rows: 0.00 └── Window - ├── output columns: [tbpush.b (#0), row_number() OVER ( ORDER BY b ) (#1)] + ├── output columns: [tbpush.b (#0), row_number() OVER (ORDER BY b) (#1)] ├── aggregate function: [row_number] ├── partition by: [] ├── order by: [b] @@ -484,20 +484,20 @@ query T explain select *, sum(a) over (partition by a order by a desc rows between unbounded preceding and current row) from t where a > 1 order by b limit 3 ---- RowFetch -├── output columns: [t.a (#0), t.b (#1), t._row_id (#7), sum(a) OVER ( PARTITION BY a ORDER BY a DESC ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW ) (#6), t.c (#2), t.d (#3), t.e (#4), t.f (#5)] +├── output columns: [t.a (#0), t.b (#1), t._row_id (#7), sum(a) OVER (PARTITION BY a ORDER BY a DESC ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) (#6), t.c (#2), t.d (#3), t.e (#4), t.f (#5)] ├── columns to fetch: [c, d, e, f] ├── estimated rows: 0.00 └── Limit - ├── output columns: [t.a (#0), t.b (#1), t._row_id (#7), sum(a) OVER ( PARTITION BY a ORDER BY a DESC ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW ) (#6)] + ├── output columns: [t.a (#0), t.b (#1), t._row_id (#7), sum(a) OVER (PARTITION BY a ORDER BY a DESC ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) (#6)] ├── limit: 3 ├── offset: 0 ├── estimated rows: 0.00 └── Sort - ├── output columns: [t.a (#0), t.b (#1), t._row_id (#7), sum(a) OVER ( PARTITION BY a ORDER BY a DESC ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW ) (#6)] + ├── output columns: [t.a (#0), t.b (#1), t._row_id (#7), sum(a) OVER (PARTITION BY a ORDER BY a DESC ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) (#6)] ├── sort keys: [b ASC NULLS LAST] ├── estimated rows: 0.00 └── Window - ├── output columns: [t.a (#0), t.b (#1), t._row_id (#7), sum(a) OVER ( PARTITION BY a ORDER BY a DESC ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW ) (#6)] + ├── output columns: [t.a (#0), t.b (#1), t._row_id (#7), sum(a) OVER (PARTITION BY a ORDER BY a DESC ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) (#6)] ├── aggregate function: [sum(a)] ├── partition by: [a] ├── order by: [a] @@ -546,7 +546,7 @@ query T explain optimized select time, rowkey from (select *, row_number() over(partition by rowkey order by time desc) as rn from table43764_orc) a where rn < 1 ---- EvalScalar -├── scalars: [a.rowkey (#0) AS (#0), table43764_orc.rowkey (#0) AS (#0), a.time (#1) AS (#1), table43764_orc.time (#1) AS (#1), table43764_orc.sirc_action (#2) AS (#2), table43764_orc.sirc_operation_count (#3) AS (#3), table43764_orc.akc087 (#4) AS (#4), table43764_orc.aae035 (#5) AS (#5), row_number() OVER ( PARTITION BY rowkey ORDER BY time DESC ) (#6) AS (#6), a.rn (#6) AS (#7)] +├── scalars: [a.rowkey (#0) AS (#0), table43764_orc.rowkey (#0) AS (#0), a.time (#1) AS (#1), table43764_orc.time (#1) AS (#1), table43764_orc.sirc_action (#2) AS (#2), table43764_orc.sirc_operation_count (#3) AS (#3), table43764_orc.akc087 (#4) AS (#4), table43764_orc.aae035 (#5) AS (#5), row_number() OVER (PARTITION BY rowkey ORDER BY time DESC) (#6) AS (#6), a.rn (#6) AS (#7)] └── EmptyResultScan # same order multi window @@ -586,17 +586,17 @@ explain select a, sum(number - 1) over (partition by number % 3) from (select n from numbers(50)); ---- Window -├── output columns: [numbers.number (#0), rank_part_0 (#1), rank() OVER ( PARTITION BY number % 3 ORDER BY number + 1 ) (#3), sum_arg_0 (#4), rank_part_0 (#1), sum(number - 1) OVER ( PARTITION BY number % 3 ) (#5)] +├── output columns: [numbers.number (#0), rank_part_0 (#1), rank() OVER (PARTITION BY number % 3 ORDER BY number + 1) (#3), sum_arg_0 (#4), rank_part_0 (#1), sum(number - 1) OVER (PARTITION BY number % 3) (#5)] ├── aggregate function: [sum(sum_arg_0)] ├── partition by: [rank_part_0] ├── order by: [] ├── frame: [Range: Preceding(None) ~ Following(None)] └── EvalScalar - ├── output columns: [numbers.number (#0), rank_part_0 (#1), rank() OVER ( PARTITION BY number % 3 ORDER BY number + 1 ) (#3), sum_arg_0 (#4), rank_part_0 (#1)] + ├── output columns: [numbers.number (#0), rank_part_0 (#1), rank() OVER (PARTITION BY number % 3 ORDER BY number + 1) (#3), sum_arg_0 (#4), rank_part_0 (#1)] ├── expressions: [numbers.number (#0) - 1, numbers.number (#0) % 3] ├── estimated rows: 50.00 └── Window - ├── output columns: [numbers.number (#0), rank_part_0 (#1), rank_order_0 (#2), rank() OVER ( PARTITION BY number % 3 ORDER BY number + 1 ) (#3)] + ├── output columns: [numbers.number (#0), rank_part_0 (#1), rank_order_0 (#2), rank() OVER (PARTITION BY number % 3 ORDER BY number + 1) (#3)] ├── aggregate function: [rank] ├── partition by: [rank_part_0] ├── order by: [rank_order_0] @@ -624,19 +624,19 @@ explain select number, avg(number) over (partition by number % 3), rank() over from numbers(50); ---- Window -├── output columns: [numbers.number (#0), avg_part_0 (#1), rank_order_0 (#3), rank() OVER ( PARTITION BY number % 3 ORDER BY number + 1 ) (#4), avg(number) OVER ( PARTITION BY number % 3 ) (#2), sum(number) OVER ( PARTITION BY number % 3 ) (#5)] +├── output columns: [numbers.number (#0), avg_part_0 (#1), rank_order_0 (#3), rank() OVER (PARTITION BY number % 3 ORDER BY number + 1) (#4), avg(number) OVER (PARTITION BY number % 3) (#2), sum(number) OVER (PARTITION BY number % 3) (#5)] ├── aggregate function: [sum(number)] ├── partition by: [avg_part_0] ├── order by: [] ├── frame: [Range: Preceding(None) ~ Following(None)] └── Window - ├── output columns: [numbers.number (#0), avg_part_0 (#1), rank_order_0 (#3), rank() OVER ( PARTITION BY number % 3 ORDER BY number + 1 ) (#4), avg(number) OVER ( PARTITION BY number % 3 ) (#2)] + ├── output columns: [numbers.number (#0), avg_part_0 (#1), rank_order_0 (#3), rank() OVER (PARTITION BY number % 3 ORDER BY number + 1) (#4), avg(number) OVER (PARTITION BY number % 3) (#2)] ├── aggregate function: [avg(number)] ├── partition by: [avg_part_0] ├── order by: [] ├── frame: [Range: Preceding(None) ~ Following(None)] └── Window - ├── output columns: [numbers.number (#0), avg_part_0 (#1), rank_order_0 (#3), rank() OVER ( PARTITION BY number % 3 ORDER BY number + 1 ) (#4)] + ├── output columns: [numbers.number (#0), avg_part_0 (#1), rank_order_0 (#3), rank() OVER (PARTITION BY number % 3 ORDER BY number + 1) (#4)] ├── aggregate function: [rank] ├── partition by: [avg_part_0] ├── order by: [rank_order_0] @@ -663,17 +663,17 @@ query T explain select a, sum(number - 1) over (partition by number % 3) from (select number, rank()over (partition by number % 3 order by number) a from numbers(50)); ---- Window -├── output columns: [numbers.number (#0), rank_part_0 (#1), rank() OVER ( PARTITION BY number % 3 ORDER BY number ) (#2), sum_arg_0 (#3), rank_part_0 (#1), sum(number - 1) OVER ( PARTITION BY number % 3 ) (#4)] +├── output columns: [numbers.number (#0), rank_part_0 (#1), rank() OVER (PARTITION BY number % 3 ORDER BY number) (#2), sum_arg_0 (#3), rank_part_0 (#1), sum(number - 1) OVER (PARTITION BY number % 3) (#4)] ├── aggregate function: [sum(sum_arg_0)] ├── partition by: [rank_part_0] ├── order by: [] ├── frame: [Range: Preceding(None) ~ Following(None)] └── EvalScalar - ├── output columns: [numbers.number (#0), rank_part_0 (#1), rank() OVER ( PARTITION BY number % 3 ORDER BY number ) (#2), sum_arg_0 (#3), rank_part_0 (#1)] + ├── output columns: [numbers.number (#0), rank_part_0 (#1), rank() OVER (PARTITION BY number % 3 ORDER BY number) (#2), sum_arg_0 (#3), rank_part_0 (#1)] ├── expressions: [numbers.number (#0) - 1, numbers.number (#0) % 3] ├── estimated rows: 50.00 └── Window - ├── output columns: [numbers.number (#0), rank_part_0 (#1), rank() OVER ( PARTITION BY number % 3 ORDER BY number ) (#2)] + ├── output columns: [numbers.number (#0), rank_part_0 (#1), rank() OVER (PARTITION BY number % 3 ORDER BY number) (#2)] ├── aggregate function: [rank] ├── partition by: [rank_part_0] ├── order by: [number] @@ -700,17 +700,17 @@ query T explain select a, sum(number - 1) over (partition by number % 3) from (select number, rank()over (partition by number % 3 order by number) a from numbers(50)) t(number); ---- Window -├── output columns: [numbers.number (#0), rank_part_0 (#1), rank() OVER ( PARTITION BY number % 3 ORDER BY number ) (#2), sum_arg_0 (#3), rank_part_0 (#1), sum(number - 1) OVER ( PARTITION BY number % 3 ) (#4)] +├── output columns: [numbers.number (#0), rank_part_0 (#1), rank() OVER (PARTITION BY number % 3 ORDER BY number) (#2), sum_arg_0 (#3), rank_part_0 (#1), sum(number - 1) OVER (PARTITION BY number % 3) (#4)] ├── aggregate function: [sum(sum_arg_0)] ├── partition by: [rank_part_0] ├── order by: [] ├── frame: [Range: Preceding(None) ~ Following(None)] └── EvalScalar - ├── output columns: [numbers.number (#0), rank_part_0 (#1), rank() OVER ( PARTITION BY number % 3 ORDER BY number ) (#2), sum_arg_0 (#3), rank_part_0 (#1)] + ├── output columns: [numbers.number (#0), rank_part_0 (#1), rank() OVER (PARTITION BY number % 3 ORDER BY number) (#2), sum_arg_0 (#3), rank_part_0 (#1)] ├── expressions: [t.number (#0) - 1, t.number (#0) % 3] ├── estimated rows: 50.00 └── Window - ├── output columns: [numbers.number (#0), rank_part_0 (#1), rank() OVER ( PARTITION BY number % 3 ORDER BY number ) (#2)] + ├── output columns: [numbers.number (#0), rank_part_0 (#1), rank() OVER (PARTITION BY number % 3 ORDER BY number) (#2)] ├── aggregate function: [rank] ├── partition by: [rank_part_0] ├── order by: [number] diff --git a/tests/sqllogictests/suites/mode/standalone/explain_native/explain.test b/tests/sqllogictests/suites/mode/standalone/explain_native/explain.test index 325390bb9f2b6..a7bb3f48fa091 100644 --- a/tests/sqllogictests/suites/mode/standalone/explain_native/explain.test +++ b/tests/sqllogictests/suites/mode/standalone/explain_native/explain.test @@ -137,247 +137,90 @@ EvalScalar query T explain syntax select 1, 'ab', [1,2,3], (1, 'a') ---- -SELECT - 1, - 'ab', - [1, 2, 3], - (1, 'a') +SELECT 1, 'ab', [1, 2, 3], (1, 'a') query T explain syntax select a, sum(b) as sum from t1 where a in (1, 2) and b > 0 and b < 100 group by a order by a ---- -SELECT - a, - sum(b) AS sum -FROM - t1 -WHERE - ( - ( - a IN (1, 2) - AND (b > 0) - ) - AND (b < 100) - ) -GROUP BY a -ORDER BY a +SELECT a, sum(b) AS sum FROM t1 WHERE a IN(1, 2) AND b > 0 AND b < 100 GROUP BY a ORDER BY a query T explain syntax select * from t1 inner join t2 on t1.a = t2.a and t1.b = t2.b and t1.a > 2 ---- -SELECT * -FROM - t1 - INNER JOIN t2 ON ( - ( - (t1.a = t2.a) - AND (t1.b = t2.b) - ) - AND (t1.a > 2) - ) +SELECT * FROM t1 INNER JOIN t2 ON t1.a = t2.a AND t1.b = t2.b AND t1.a > 2 query T explain syntax delete from t1 where a > 100 and b > 1 and b < 10 ---- -DELETE FROM - t1 -WHERE - ( - ( - (a > 100) - AND (b > 1) - ) - AND (b < 10) - ) +DELETE FROM t1 WHERE a > 100 AND b > 1 AND b < 10 query T explain syntax copy into t1 from 's3://mybucket/data.csv' file_format = ( type = CSV field_delimiter = ',' record_delimiter = '\n' skip_header = 1) size_limit=10 ---- -COPY -INTO t1 -FROM 's3://mybucket/data.csv' -FILE_FORMAT = ( - field_delimiter = ',', - record_delimiter = '\n', - skip_header = 1, - type = CSV -) -SIZE_LIMIT = 10 -PURGE = false -DISABLE_VARIANT_CHECK = false +COPY INTO t1 FROM 's3://mybucket/data.csv' FILE_FORMAT = (field_delimiter = ',', record_delimiter = '\n', skip_header = 1, type = CSV) SIZE_LIMIT = 10 PURGE = false FORCE = false DISABLE_VARIANT_CHECK = false ON_ERROR = abort RETURN_FAILED_ONLY = false query T explain syntax copy into 's3://mybucket/data.csv' from t1 file_format = ( type = CSV field_delimiter = ',' record_delimiter = '\n' skip_header = 1) ---- -COPY -INTO Uri(UriLocation { protocol: "s3", name: "mybucket", path: "/data.csv", connection: Connection { visited_keys: {}, conns: {} } }) -FROM t1 -FILE_FORMAT = ( - field_delimiter = ',', - record_delimiter = '\n', - skip_header = 1, - type = CSV -) -SINGLE = false +COPY INTO 's3://mybucket/data.csv' FROM t1 FILE_FORMAT = (field_delimiter = ',', record_delimiter = '\n', skip_header = 1, type = CSV) SINGLE = false MAX_FILE_SIZE = 0 DETAILED_OUTPUT = false INCLUDE_QUERY_ID = true USE_RAW_PATH = false OVERWRITE = false query T explain syntax create table t3(a int64, b uint64, c float64, d string, e array(int32), f tuple(f1 bool, f2 string)) engine=fuse cluster by (a, b, c) comment='test' compression='LZ4' ---- -CREATE TABLE t3 ( - a Int64, - b UInt64, - c Float64, - d STRING, - e ARRAY(Int32), - f TUPLE(f1 BOOLEAN, f2 STRING) -) ENGINE = FUSE -CLUSTER BY LINEAR( - a, - b, - c -) -comment = 'test', -compression = 'LZ4' +CREATE TABLE t3 (a Int64, b UInt64, c Float64, d STRING, e ARRAY(Int32), f TUPLE(f1 BOOLEAN, f2 STRING)) ENGINE = FUSE CLUSTER BY LINEAR(a, b, c) comment = 'test' compression = 'LZ4' query T explain syntax create view v as select number % 3 as a from numbers(100) where number > 10 ---- -CREATE VIEW v -AS - SELECT (number % 3) AS a - FROM - numbers(100) - WHERE - (number > 10) +CREATE VIEW v AS SELECT number % 3 AS a FROM numbers(100) WHERE number > 10 query T explain syntax select 1, 'ab', [1,2,3] as a, (1, 'a') as t ---- -SELECT - 1, - 'ab', - [1, 2, 3] AS a, - (1, 'a') AS t +SELECT 1, 'ab', [1, 2, 3] AS a, (1, 'a') AS t query T explain syntax select case when a > 1 then 'x' when a < 10 then 'y' else 'z' end from t1 ---- -SELECT CASE - WHEN (a > 1) THEN 'x', - WHEN (a < 10) THEN 'y' - ELSE 'z' - END -FROM - t1 +SELECT CASE WHEN a > 1 THEN 'x' WHEN a < 10 THEN 'y' ELSE 'z' END FROM t1 query T explain syntax select a, sum(b) as sum from t1 where a in (1, 2) and b > 0 and b < 100 group by a order by a limit 3 ---- -SELECT - a, - sum(b) AS sum -FROM - t1 -WHERE - ( - ( - a IN (1, 2) - AND (b > 0) - ) - AND (b < 100) - ) -GROUP BY a -ORDER BY a -LIMIT 3 +SELECT a, sum(b) AS sum FROM t1 WHERE a IN(1, 2) AND b > 0 AND b < 100 GROUP BY a ORDER BY a LIMIT 3 query T explain syntax select * from t1 inner join t2 on t1.a = t2.a and t1.b = t2.b and t1.a > 2 ---- -SELECT * -FROM - t1 - INNER JOIN t2 ON ( - ( - (t1.a = t2.a) - AND (t1.b = t2.b) - ) - AND (t1.a > 2) - ) +SELECT * FROM t1 INNER JOIN t2 ON t1.a = t2.a AND t1.b = t2.b AND t1.a > 2 query T explain syntax with cte (a, b) as (select 1, 2 union all select 3, 4) select a, b from cte ---- -WITH - cte(a, b) AS - ( - SELECT - 1, - 2 - UNION ALL - SELECT - 3, - 4 - ) -SELECT - a, b -FROM - cte +WITH cte(a, b) AS (SELECT 1, 2 UNION ALL SELECT 3, 4) SELECT a, b FROM cte query T explain syntax with cte (a, b) as (values(1,2),(3,4)) select a, b from cte ---- -WITH - cte(a, b) AS - ( - VALUES(1, 2), (3, 4) - ) -SELECT - a, b -FROM - cte +WITH cte(a, b) AS (VALUES(1, 2), (3, 4)) SELECT a, b FROM cte query T explain syntax insert into t1 (a, b) values (1, 2),(3, 4) ---- -INSERT INTO - t1 (a, b) -VALUES - (1, 2), - (3, 4) +INSERT INTO t1 (a, b) VALUES (1, 2), (3, 4) query T explain syntax delete from t1 where a > 100 and b > 1 and b < 10 ---- -DELETE FROM - t1 -WHERE - ( - ( - (a > 100) - AND (b > 1) - ) - AND (b < 10) - ) +DELETE FROM t1 WHERE a > 100 AND b > 1 AND b < 10 query T explain syntax copy into t1 from 's3://mybucket/data.csv' file_format = ( type = CSV field_delimiter = ',' record_delimiter = '\n' skip_header = 1) size_limit=10 max_files=10 ---- -COPY -INTO t1 -FROM 's3://mybucket/data.csv' -FILE_FORMAT = ( - field_delimiter = ',', - record_delimiter = '\n', - skip_header = 1, - type = CSV -) -SIZE_LIMIT = 10 -MAX_FILES = 10 -PURGE = false -DISABLE_VARIANT_CHECK = false +COPY INTO t1 FROM 's3://mybucket/data.csv' FILE_FORMAT = (field_delimiter = ',', record_delimiter = '\n', skip_header = 1, type = CSV) SIZE_LIMIT = 10 MAX_FILES = 10 PURGE = false FORCE = false DISABLE_VARIANT_CHECK = false ON_ERROR = abort RETURN_FAILED_ONLY = false query T explain syntax create database db1 engine=default @@ -387,32 +230,12 @@ CREATE DATABASE db1 ENGINE = DEFAULT query T explain syntax create table t3(a int64, b uint64, c float64, d string, e array(int32), f tuple(f1 bool, f2 string)) engine=fuse cluster by (a, b, c) comment='test' compression='LZ4' ---- -CREATE TABLE t3 ( - a Int64, - b UInt64, - c Float64, - d STRING, - e ARRAY(Int32), - f TUPLE(f1 BOOLEAN, f2 STRING) -) ENGINE = FUSE -CLUSTER BY LINEAR( - a, - b, - c -) -comment = 'test', -compression = 'LZ4' +CREATE TABLE t3 (a Int64, b UInt64, c Float64, d STRING, e ARRAY(Int32), f TUPLE(f1 BOOLEAN, f2 STRING)) ENGINE = FUSE CLUSTER BY LINEAR(a, b, c) comment = 'test' compression = 'LZ4' query T explain syntax create view v as select number % 3 as a from numbers(100) where number > 10 ---- -CREATE VIEW v -AS - SELECT (number % 3) AS a - FROM - numbers(100) - WHERE - (number > 10) +CREATE VIEW v AS SELECT number % 3 AS a FROM numbers(100) WHERE number > 10 query T explain syntax show create table t1 @@ -427,25 +250,17 @@ CREATE USER 'test'@'%' IDENTIFIED WITH sha256_password BY 'new_password' query T explain syntax select unknown_table.a + 1 from unknown_table1 ---- -SELECT (unknown_table.a + 1) -FROM - unknown_table1 +SELECT unknown_table.a + 1 FROM unknown_table1 query T explain syntax SELECT * FROM monthly_sales PIVOT(SUM(amount) FOR MONTH IN ('JAN', 'FEB', 'MAR', 'APR')) ORDER BY EMPID ---- -SELECT * -FROM - monthly_sales PIVOT(SUM(amount) FOR MONTH IN ('JAN', 'FEB', 'MAR', 'APR')) -ORDER BY EMPID +SELECT * FROM monthly_sales PIVOT(SUM(amount) FOR MONTH IN ('JAN', 'FEB', 'MAR', 'APR')) ORDER BY EMPID query T explain syntax SELECT * FROM monthly_sales_1 UNPIVOT(sales FOR month IN (jan, feb, mar, april)) ORDER BY empid ---- -SELECT * -FROM - monthly_sales_1 UNPIVOT(sales FOR month IN (jan, feb, mar, april)) -ORDER BY empid +SELECT * FROM monthly_sales_1 UNPIVOT(sales FOR month IN (jan, feb, mar, april)) ORDER BY empid query T explain select a from t1 UNION ALL select a from t2 @@ -806,30 +621,22 @@ drop table t2 query T explain syntax select * from read_parquet('p1', 'p2', 'p3'); ---- -SELECT * -FROM - read_parquet('p1', 'p2', 'p3') +SELECT * FROM read_parquet('p1', 'p2', 'p3') query T explain syntax select * from read_parquet(prune_page=>true, refresh_meta_cache=>true); ---- -SELECT * -FROM - read_parquet(prune_page=>TRUE, refresh_meta_cache=>TRUE) +SELECT * FROM read_parquet(prune_page=>TRUE,refresh_meta_cache=>TRUE) query T explain syntax select * from read_parquet('p1', 'p2', 'p3', prune_page=>true, refresh_meta_cache=>true); ---- -SELECT * -FROM - read_parquet('p1', 'p2', 'p3', prune_page=>TRUE, refresh_meta_cache=>TRUE) +SELECT * FROM read_parquet('p1', 'p2', 'p3',prune_page=>TRUE,refresh_meta_cache=>TRUE) query T explain syntax select * from read_parquet('p1', 'p2', 'p3', prune_page=>true, refresh_meta_cache=>true); ---- -SELECT * -FROM - read_parquet('p1', 'p2', 'p3', prune_page=>TRUE, refresh_meta_cache=>TRUE) +SELECT * FROM read_parquet('p1', 'p2', 'p3',prune_page=>TRUE,refresh_meta_cache=>TRUE) statement ok drop table if exists t4 diff --git a/tests/sqllogictests/suites/query/window_function/window_basic.test b/tests/sqllogictests/suites/query/window_function/window_basic.test index 73b8cd034d5eb..75110235c9de6 100644 --- a/tests/sqllogictests/suites/query/window_function/window_basic.test +++ b/tests/sqllogictests/suites/query/window_function/window_basic.test @@ -798,7 +798,7 @@ with results_rollup AS ( select sum(number) gross_margin, 2 as lochierarchy from numbers(1000000) group by number % 4 ) -SELECT gross_margin, rank() OVER ( PARTITION BY lochierarchy +SELECT gross_margin, rank() OVER (PARTITION BY lochierarchy ORDER BY gross_margin ASC) AS rank_within_parent FROM results_rollup order by 1,2; ---- @@ -812,7 +812,7 @@ with results_rollup AS ( select sum(number) gross_margin, 2 as lochierarchy from numbers(1000000) ) -SELECT gross_margin, rank() OVER ( PARTITION BY lochierarchy +SELECT gross_margin, rank() OVER (PARTITION BY lochierarchy ORDER BY gross_margin ASC) AS rank_within_parent FROM results_rollup order by 1,2; ---- diff --git a/tests/sqllogictests/suites/query/window_function/window_ntile.test b/tests/sqllogictests/suites/query/window_function/window_ntile.test index d9f467c3cd95f..45425f47194ca 100644 --- a/tests/sqllogictests/suites/query/window_function/window_ntile.test +++ b/tests/sqllogictests/suites/query/window_function/window_ntile.test @@ -203,7 +203,7 @@ SELECT FROM ( SELECT - id, kind, ntile(10) OVER ( ORDER BY per ASC ) AS quantile + id, kind, ntile(10) OVER (ORDER BY per ASC ) AS quantile FROM (SELECT * FROM t) ) @@ -231,7 +231,7 @@ FROM SELECT id, kind, - ntile(10) OVER ( PARTITION BY id, kind ORDER BY per ASC ) AS quantile + ntile(10) OVER (PARTITION BY id, kind ORDER BY per ASC) AS quantile FROM (SELECT * FROM t) ) diff --git a/tests/sqllogictests/suites/tpcds/materialized_cte.test b/tests/sqllogictests/suites/tpcds/materialized_cte.test index cb4c12bff5b31..66df1da5b389b 100644 --- a/tests/sqllogictests/suites/tpcds/materialized_cte.test +++ b/tests/sqllogictests/suites/tpcds/materialized_cte.test @@ -1024,7 +1024,7 @@ SELECT gross_margin, i_category, i_class, lochierarchy, - rank() OVER ( PARTITION BY lochierarchy, + rank() OVER (PARTITION BY lochierarchy, CASE WHEN t_class = 0 THEN i_category END diff --git a/tests/sqllogictests/suites/tpcds/queries.test b/tests/sqllogictests/suites/tpcds/queries.test index 2241c5d84a078..9bdd5e2190dc6 100644 --- a/tests/sqllogictests/suites/tpcds/queries.test +++ b/tests/sqllogictests/suites/tpcds/queries.test @@ -3292,7 +3292,7 @@ SELECT gross_margin, i_category, i_class, lochierarchy, - rank() OVER ( PARTITION BY lochierarchy, + rank() OVER (PARTITION BY lochierarchy, CASE WHEN t_class = 0 THEN i_category END @@ -7317,7 +7317,7 @@ SELECT sum(ws_net_paid) AS total_sum , i_category , i_class , grouping(i_category)+grouping(i_class) AS lochierarchy , - rank() OVER ( PARTITION BY grouping(i_category)+grouping(i_class), + rank() OVER (PARTITION BY grouping(i_category)+grouping(i_class), CASE WHEN grouping(i_class) = 0 THEN i_category END