diff --git a/src/frontend/src/instance.rs b/src/frontend/src/instance.rs index e8ae74c7b41a..4dcb28ae06dc 100644 --- a/src/frontend/src/instance.rs +++ b/src/frontend/src/instance.rs @@ -507,7 +507,7 @@ pub fn check_permission( } fn validate_param(name: &ObjectName, query_ctx: &QueryContextRef) -> Result<()> { - let (catalog, schema, _) = table_idents_to_full_name(name, query_ctx.clone()) + let (catalog, schema, _) = table_idents_to_full_name(name, query_ctx) .map_err(BoxedError::new) .context(ExternalSnafu)?; diff --git a/src/frontend/src/instance/grpc.rs b/src/frontend/src/instance/grpc.rs index cd678b78601e..49deda71fd8a 100644 --- a/src/frontend/src/instance/grpc.rs +++ b/src/frontend/src/instance/grpc.rs @@ -105,7 +105,7 @@ impl GrpcQueryHandler for Instance { // TODO(weny): supports to create multiple region table. let _ = self .statement_executor - .create_table_inner(&mut expr, None) + .create_table_inner(&mut expr, None, &ctx) .await?; Output::AffectedRows(0) } diff --git a/src/operator/src/expr_factory.rs b/src/operator/src/expr_factory.rs index 545699c94ff0..55072ddd3c51 100644 --- a/src/operator/src/expr_factory.rs +++ b/src/operator/src/expr_factory.rs @@ -22,6 +22,7 @@ use api::v1::{ }; use common_error::ext::BoxedError; use common_grpc_expr::util::ColumnExpr; +use common_time::Timezone; use datatypes::schema::{ColumnSchema, COMMENT_KEY}; use file_engine::FileOptions; use query::sql::{ @@ -122,7 +123,7 @@ pub(crate) async fn create_external_expr( query_ctx: QueryContextRef, ) -> Result { let (catalog_name, schema_name, table_name) = - table_idents_to_full_name(&create.name, query_ctx) + table_idents_to_full_name(&create.name, &query_ctx) .map_err(BoxedError::new) .context(ExternalSnafu)?; @@ -141,7 +142,8 @@ pub(crate) async fn create_external_expr( // expanded form let time_index = find_time_index(&create.constraints)?; let primary_keys = find_primary_keys(&create.columns, &create.constraints)?; - let column_schemas = columns_to_column_schemas(&create.columns, &time_index)?; + let column_schemas = + columns_to_column_schemas(&create.columns, &time_index, Some(&query_ctx.timezone()))?; (time_index, primary_keys, column_schemas) } else { // inferred form @@ -182,7 +184,7 @@ pub(crate) async fn create_external_expr( /// Convert `CreateTable` statement to `CreateExpr` gRPC request. pub fn create_to_expr(create: &CreateTable, query_ctx: QueryContextRef) -> Result { let (catalog_name, schema_name, table_name) = - table_idents_to_full_name(&create.name, query_ctx) + table_idents_to_full_name(&create.name, &query_ctx) .map_err(BoxedError::new) .context(ExternalSnafu)?; @@ -199,7 +201,12 @@ pub fn create_to_expr(create: &CreateTable, query_ctx: QueryContextRef) -> Resul schema_name, table_name, desc: String::default(), - column_defs: columns_to_expr(&create.columns, &time_index, &primary_keys)?, + column_defs: columns_to_expr( + &create.columns, + &time_index, + &primary_keys, + Some(&query_ctx.timezone()), + )?, time_index, primary_keys, create_if_not_exists: create.if_not_exists, @@ -293,18 +300,23 @@ fn columns_to_expr( column_defs: &[ColumnDef], time_index: &str, primary_keys: &[String], + timezone: Option<&Timezone>, ) -> Result> { - let column_schemas = columns_to_column_schemas(column_defs, time_index)?; + let column_schemas = columns_to_column_schemas(column_defs, time_index, timezone)?; column_schemas_to_defs(column_schemas, primary_keys) } fn columns_to_column_schemas( column_defs: &[ColumnDef], time_index: &str, + timezone: Option<&Timezone>, ) -> Result> { column_defs .iter() - .map(|c| column_def_to_schema(c, c.name.to_string() == time_index).context(ParseSqlSnafu)) + .map(|c| { + column_def_to_schema(c, c.name.to_string() == time_index, timezone) + .context(ParseSqlSnafu) + }) .collect::>>() } @@ -365,7 +377,7 @@ pub(crate) fn to_alter_expr( query_ctx: QueryContextRef, ) -> Result { let (catalog_name, schema_name, table_name) = - table_idents_to_full_name(alter_table.table_name(), query_ctx) + table_idents_to_full_name(alter_table.table_name(), &query_ctx) .map_err(BoxedError::new) .context(ExternalSnafu)?; @@ -382,7 +394,7 @@ pub(crate) fn to_alter_expr( } => Kind::AddColumns(AddColumns { add_columns: vec![AddColumn { column_def: Some( - sql_column_def_to_grpc_column_def(column_def) + sql_column_def_to_grpc_column_def(column_def, Some(&query_ctx.timezone())) .map_err(BoxedError::new) .context(ExternalSnafu)?, ), @@ -409,10 +421,12 @@ pub(crate) fn to_alter_expr( #[cfg(test)] mod tests { - use session::context::QueryContext; + use datatypes::value::Value; + use session::context::{QueryContext, QueryContextBuilder}; use sql::dialect::GreptimeDbDialect; use sql::parser::{ParseOptions, ParserContext}; use sql::statements::statement::Statement; + use store_api::storage::ColumnDefaultConstraint; use super::*; @@ -435,4 +449,100 @@ mod tests { expr.table_options.get("write_buffer_size").unwrap() ); } + + #[test] + fn test_create_to_expr_with_default_timestamp_value() { + let sql = "CREATE TABLE monitor (v double,ts TIMESTAMP default '2024-01-30T00:01:01',TIME INDEX (ts)) engine=mito;"; + let stmt = + ParserContext::create_with_dialect(sql, &GreptimeDbDialect {}, ParseOptions::default()) + .unwrap() + .pop() + .unwrap(); + + let Statement::CreateTable(create_table) = stmt else { + unreachable!() + }; + + // query context with system timezone UTC. + let expr = create_to_expr(&create_table, QueryContext::arc()).unwrap(); + let ts_column = &expr.column_defs[1]; + let constraint = assert_ts_column(ts_column); + assert!( + matches!(constraint, ColumnDefaultConstraint::Value(Value::Timestamp(ts)) + if ts.to_iso8601_string() == "2024-01-30 00:01:01+0000") + ); + + // query context with timezone `+08:00` + let ctx = QueryContextBuilder::default() + .timezone(Timezone::from_tz_string("+08:00").unwrap().into()) + .build(); + let expr = create_to_expr(&create_table, ctx).unwrap(); + let ts_column = &expr.column_defs[1]; + let constraint = assert_ts_column(ts_column); + assert!( + matches!(constraint, ColumnDefaultConstraint::Value(Value::Timestamp(ts)) + if ts.to_iso8601_string() == "2024-01-29 16:01:01+0000") + ); + } + + fn assert_ts_column(ts_column: &api::v1::ColumnDef) -> ColumnDefaultConstraint { + assert_eq!("ts", ts_column.name); + assert_eq!( + ColumnDataType::TimestampMillisecond as i32, + ts_column.data_type + ); + assert!(!ts_column.default_constraint.is_empty()); + + ColumnDefaultConstraint::try_from(&ts_column.default_constraint[..]).unwrap() + } + + #[test] + fn test_to_alter_expr() { + let sql = "ALTER TABLE monitor add column ts TIMESTAMP default '2024-01-30T00:01:01';"; + let stmt = + ParserContext::create_with_dialect(sql, &GreptimeDbDialect {}, ParseOptions::default()) + .unwrap() + .pop() + .unwrap(); + + let Statement::Alter(alter_table) = stmt else { + unreachable!() + }; + + // query context with system timezone UTC. + let expr = to_alter_expr(alter_table.clone(), QueryContext::arc()).unwrap(); + let kind = expr.kind.unwrap(); + + let Kind::AddColumns(AddColumns { add_columns, .. }) = kind else { + unreachable!() + }; + + assert_eq!(1, add_columns.len()); + let ts_column = add_columns[0].column_def.clone().unwrap(); + let constraint = assert_ts_column(&ts_column); + assert!( + matches!(constraint, ColumnDefaultConstraint::Value(Value::Timestamp(ts)) + if ts.to_iso8601_string() == "2024-01-30 00:01:01+0000") + ); + + // + // query context with timezone `+08:00` + let ctx = QueryContextBuilder::default() + .timezone(Timezone::from_tz_string("+08:00").unwrap().into()) + .build(); + let expr = to_alter_expr(alter_table, ctx).unwrap(); + let kind = expr.kind.unwrap(); + + let Kind::AddColumns(AddColumns { add_columns, .. }) = kind else { + unreachable!() + }; + + assert_eq!(1, add_columns.len()); + let ts_column = add_columns[0].column_def.clone().unwrap(); + let constraint = assert_ts_column(&ts_column); + assert!( + matches!(constraint, ColumnDefaultConstraint::Value(Value::Timestamp(ts)) + if ts.to_iso8601_string() == "2024-01-29 16:01:01+0000") + ); + } } diff --git a/src/operator/src/insert.rs b/src/operator/src/insert.rs index 07523c348215..9256d76f74cf 100644 --- a/src/operator/src/insert.rs +++ b/src/operator/src/insert.rs @@ -181,7 +181,7 @@ impl Inserter { ) -> Result { let inserts = StatementToRegion::new(self.catalog_manager.as_ref(), &self.partition_manager, ctx) - .convert(insert) + .convert(insert, ctx) .await?; let affected_rows = self.do_request(inserts, ctx).await?; @@ -334,7 +334,7 @@ impl Inserter { // create physical table let res = statement_executor - .create_table_inner(create_table_expr, None) + .create_table_inner(create_table_expr, None, ctx) .await; match res { @@ -431,7 +431,7 @@ impl Inserter { // TODO(weny): multiple regions table. let res = statement_executor - .create_table_inner(create_table_expr, None) + .create_table_inner(create_table_expr, None, ctx) .await; match res { diff --git a/src/operator/src/req_convert/insert/stmt_to_region.rs b/src/operator/src/req_convert/insert/stmt_to_region.rs index eb3b1b2be978..5f40564b2994 100644 --- a/src/operator/src/req_convert/insert/stmt_to_region.rs +++ b/src/operator/src/req_convert/insert/stmt_to_region.rs @@ -16,9 +16,10 @@ use api::helper::{value_to_grpc_value, ColumnDataTypeWrapper}; use api::v1::region::InsertRequests as RegionInsertRequests; use api::v1::{ColumnSchema as GrpcColumnSchema, Row, Rows, Value as GrpcValue}; use catalog::CatalogManager; +use common_time::Timezone; use datatypes::schema::{ColumnSchema, SchemaRef}; use partition::manager::PartitionRuleManager; -use session::context::QueryContext; +use session::context::{QueryContext, QueryContextRef}; use snafu::{ensure, OptionExt, ResultExt}; use sql::statements; use sql::statements::insert::Insert; @@ -54,7 +55,11 @@ impl<'a> StatementToRegion<'a> { } } - pub async fn convert(&self, stmt: &Insert) -> Result { + pub async fn convert( + &self, + stmt: &Insert, + query_ctx: &QueryContextRef, + ) -> Result { let (catalog, schema, table_name) = self.get_full_name(stmt.table_name())?; let table = self.get_table(&catalog, &schema, &table_name).await?; let table_schema = table.schema(); @@ -110,7 +115,11 @@ impl<'a> StatementToRegion<'a> { schema.push(grpc_column_schema); for (sql_row, grpc_row) in sql_rows.iter().zip(rows.iter_mut()) { - let value = sql_value_to_grpc_value(column_schema, &sql_row[i])?; + let value = sql_value_to_grpc_value( + column_schema, + &sql_row[i], + Some(&query_ctx.timezone()), + )?; grpc_row.values.push(value); } } @@ -169,7 +178,11 @@ fn column_names<'a>(stmt: &'a Insert, table_schema: &'a SchemaRef) -> Vec<&'a St } } -fn sql_value_to_grpc_value(column_schema: &ColumnSchema, sql_val: &SqlValue) -> Result { +fn sql_value_to_grpc_value( + column_schema: &ColumnSchema, + sql_val: &SqlValue, + timezone: Option<&Timezone>, +) -> Result { let column = &column_schema.name; let value = if replace_default(sql_val) { let default_value = column_schema @@ -182,7 +195,7 @@ fn sql_value_to_grpc_value(column_schema: &ColumnSchema, sql_val: &SqlValue) -> column: column.clone(), })? } else { - statements::sql_value_to_value(column, &column_schema.data_type, sql_val) + statements::sql_value_to_value(column, &column_schema.data_type, sql_val, timezone) .context(ParseSqlSnafu)? }; diff --git a/src/operator/src/statement.rs b/src/operator/src/statement.rs index 86600a8c8b9a..f76823df9694 100644 --- a/src/operator/src/statement.rs +++ b/src/operator/src/statement.rs @@ -160,7 +160,7 @@ impl StatementExecutor { Statement::Alter(alter_table) => self.alter_table(alter_table, query_ctx).await, Statement::DropTable(stmt) => { let (catalog, schema, table) = - table_idents_to_full_name(stmt.table_name(), query_ctx) + table_idents_to_full_name(stmt.table_name(), &query_ctx) .map_err(BoxedError::new) .context(error::ExternalSnafu)?; let table_name = TableName::new(catalog, schema, table); @@ -168,7 +168,7 @@ impl StatementExecutor { } Statement::TruncateTable(stmt) => { let (catalog, schema, table) = - table_idents_to_full_name(stmt.table_name(), query_ctx) + table_idents_to_full_name(stmt.table_name(), &query_ctx) .map_err(BoxedError::new) .context(error::ExternalSnafu)?; let table_name = TableName::new(catalog, schema, table); @@ -186,7 +186,7 @@ impl StatementExecutor { Statement::ShowCreateTable(show) => { let (catalog, schema, table) = - table_idents_to_full_name(&show.table_name, query_ctx.clone()) + table_idents_to_full_name(&show.table_name, &query_ctx) .map_err(BoxedError::new) .context(error::ExternalSnafu)?; @@ -298,9 +298,10 @@ fn to_copy_table_request(stmt: CopyTable, query_ctx: QueryContextRef) -> Result< CopyTable::To(arg) => arg, CopyTable::From(arg) => arg, }; - let (catalog_name, schema_name, table_name) = table_idents_to_full_name(&table_name, query_ctx) - .map_err(BoxedError::new) - .context(ExternalSnafu)?; + let (catalog_name, schema_name, table_name) = + table_idents_to_full_name(&table_name, &query_ctx) + .map_err(BoxedError::new) + .context(ExternalSnafu)?; let pattern = with .get(common_datasource::file_format::FILE_PATTERN) diff --git a/src/operator/src/statement/ddl.rs b/src/operator/src/statement/ddl.rs index 0827d44dbd9d..2a20f34cd667 100644 --- a/src/operator/src/statement/ddl.rs +++ b/src/operator/src/statement/ddl.rs @@ -69,8 +69,9 @@ impl StatementExecutor { #[tracing::instrument(skip_all)] pub async fn create_table(&self, stmt: CreateTable, ctx: QueryContextRef) -> Result { - let create_expr = &mut expr_factory::create_to_expr(&stmt, ctx)?; - self.create_table_inner(create_expr, stmt.partitions).await + let create_expr = &mut expr_factory::create_to_expr(&stmt, ctx.clone())?; + self.create_table_inner(create_expr, stmt.partitions, &ctx) + .await } #[tracing::instrument(skip_all)] @@ -79,14 +80,15 @@ impl StatementExecutor { create_expr: CreateExternalTable, ctx: QueryContextRef, ) -> Result { - let create_expr = &mut expr_factory::create_external_expr(create_expr, ctx).await?; - self.create_table_inner(create_expr, None).await + let create_expr = &mut expr_factory::create_external_expr(create_expr, ctx.clone()).await?; + self.create_table_inner(create_expr, None, &ctx).await } pub async fn create_table_inner( &self, create_table: &mut CreateTableExpr, partitions: Option, + query_ctx: &QueryContextRef, ) -> Result { let _timer = crate::metrics::DIST_CREATE_TABLE.start_timer(); let schema = self @@ -144,7 +146,7 @@ impl StatementExecutor { &create_table.table_name, ); - let (partitions, partition_cols) = parse_partitions(create_table, partitions)?; + let (partitions, partition_cols) = parse_partitions(create_table, partitions, query_ctx)?; validate_partition_columns(create_table, &partition_cols)?; @@ -565,11 +567,13 @@ fn validate_partition_columns( fn parse_partitions( create_table: &CreateTableExpr, partitions: Option, + query_ctx: &QueryContextRef, ) -> Result<(Vec, Vec)> { // If partitions are not defined by user, use the timestamp column (which has to be existed) as // the partition column, and create only one partition. let partition_columns = find_partition_columns(&partitions)?; - let partition_entries = find_partition_entries(create_table, &partitions, &partition_columns)?; + let partition_entries = + find_partition_entries(create_table, &partitions, &partition_columns, query_ctx)?; Ok(( partition_entries @@ -686,6 +690,7 @@ fn find_partition_entries( create_table: &CreateTableExpr, partitions: &Option, partition_columns: &[String], + query_ctx: &QueryContextRef, ) -> Result>> { let entries = if let Some(partitions) = partitions { let column_defs = partition_columns @@ -718,7 +723,8 @@ fn find_partition_entries( let v = match v { SqlValue::Number(n, _) if n == MAXVALUE => PartitionBound::MaxValue, _ => PartitionBound::Value( - sql_value_to_value(column_name, data_type, v).context(ParseSqlSnafu)?, + sql_value_to_value(column_name, data_type, v, Some(&query_ctx.timezone())) + .context(ParseSqlSnafu)?, ), }; values.push(v); @@ -739,7 +745,7 @@ fn merge_options(mut table_opts: TableOptions, schema_opts: SchemaNameValue) -> #[cfg(test)] mod test { - use session::context::QueryContext; + use session::context::{QueryContext, QueryContextBuilder}; use sql::dialect::GreptimeDbDialect; use sql::parser::{ParseOptions, ParserContext}; use sql::statements::statement::Statement; @@ -806,6 +812,7 @@ ENGINE=mito", r#"[{"column_list":["b","a"],"value_list":["{\"Value\":{\"String\":\"hz\"}}","{\"Value\":{\"Int32\":10}}"]},{"column_list":["b","a"],"value_list":["{\"Value\":{\"String\":\"sh\"}}","{\"Value\":{\"Int32\":20}}"]},{"column_list":["b","a"],"value_list":["\"MaxValue\"","\"MaxValue\""]}]"#, ), ]; + let ctx = QueryContextBuilder::default().build(); for (sql, expected) in cases { let result = ParserContext::create_with_dialect( sql, @@ -816,7 +823,8 @@ ENGINE=mito", match &result[0] { Statement::CreateTable(c) => { let expr = expr_factory::create_to_expr(c, QueryContext::arc()).unwrap(); - let (partitions, _) = parse_partitions(&expr, c.partitions.clone()).unwrap(); + let (partitions, _) = + parse_partitions(&expr, c.partitions.clone(), &ctx).unwrap(); let json = serde_json::to_string(&partitions).unwrap(); assert_eq!(json, expected); } diff --git a/src/operator/src/statement/describe.rs b/src/operator/src/statement/describe.rs index 7a28aa2d8388..a9162c352655 100644 --- a/src/operator/src/statement/describe.rs +++ b/src/operator/src/statement/describe.rs @@ -33,7 +33,7 @@ impl StatementExecutor { stmt: DescribeTable, query_ctx: QueryContextRef, ) -> Result { - let (catalog, schema, table) = table_idents_to_full_name(stmt.name(), query_ctx) + let (catalog, schema, table) = table_idents_to_full_name(stmt.name(), &query_ctx) .map_err(BoxedError::new) .context(ExternalSnafu)?; diff --git a/src/operator/src/table.rs b/src/operator/src/table.rs index abfc27732bf5..1d42c6e7a394 100644 --- a/src/operator/src/table.rs +++ b/src/operator/src/table.rs @@ -31,16 +31,16 @@ use crate::insert::InserterRef; /// Converts maybe fully-qualified table name (`..`) to tuple. pub fn table_idents_to_full_name( obj_name: &ObjectName, - query_ctx: QueryContextRef, + query_ctx: &QueryContextRef, ) -> Result<(String, String, String)> { match &obj_name.0[..] { [table] => Ok(( - query_ctx.current_catalog().to_owned(), - query_ctx.current_schema().to_owned(), + query_ctx.current_catalog().to_string(), + query_ctx.current_schema().to_string(), table.value.clone(), )), [schema, table] => Ok(( - query_ctx.current_catalog().to_owned(), + query_ctx.current_catalog().to_string(), schema.value.clone(), table.value.clone(), )), diff --git a/src/sql/src/parsers/create_parser.rs b/src/sql/src/parsers/create_parser.rs index 24586701b5c9..cbdb05a985bd 100644 --- a/src/sql/src/parsers/create_parser.rs +++ b/src/sql/src/parsers/create_parser.rs @@ -786,8 +786,10 @@ fn ensure_value_lists_strictly_increased<'a>( } } - let x = sql_value_to_value(column_name, &cdt, x)?; - let y = sql_value_to_value(column_name, &cdt, y)?; + // We only want to comnpare the `x` and `y` values, + // so the `timezone` can be ignored. + let x = sql_value_to_value(column_name, &cdt, x, None)?; + let y = sql_value_to_value(column_name, &cdt, y, None)?; match x.cmp(&y) { Ordering::Less => break, Ordering::Equal => equal_tuples += 1, diff --git a/src/sql/src/statements.rs b/src/sql/src/statements.rs index df9df9a02b35..eaa6b1480871 100644 --- a/src/sql/src/statements.rs +++ b/src/sql/src/statements.rs @@ -36,6 +36,7 @@ use api::v1::add_column_location::LocationType; use api::v1::{AddColumnLocation as Location, SemanticType}; use common_base::bytes::Bytes; use common_query::AddColumnLocation; +use common_time::timezone::Timezone; use common_time::Timestamp; use datatypes::prelude::ConcreteDataType; use datatypes::schema::constraint::{CURRENT_TIMESTAMP, CURRENT_TIMESTAMP_FN}; @@ -61,6 +62,7 @@ fn parse_string_to_value( column_name: &str, s: String, data_type: &ConcreteDataType, + timezone: Option<&Timezone>, ) -> Result { ensure!( data_type.is_stringifiable(), @@ -74,7 +76,7 @@ fn parse_string_to_value( match data_type { ConcreteDataType::String(_) => Ok(Value::String(s.into())), ConcreteDataType::Date(_) => { - if let Ok(date) = common_time::date::Date::from_str_utc(&s) { + if let Ok(date) = common_time::date::Date::from_str(&s, timezone) { Ok(Value::Date(date)) } else { ParseSqlValueSnafu { @@ -84,7 +86,7 @@ fn parse_string_to_value( } } ConcreteDataType::DateTime(_) => { - if let Ok(datetime) = common_time::datetime::DateTime::from_str_system(&s) { + if let Ok(datetime) = common_time::datetime::DateTime::from_str(&s, timezone) { Ok(Value::DateTime(datetime)) } else { ParseSqlValueSnafu { @@ -94,7 +96,7 @@ fn parse_string_to_value( } } ConcreteDataType::Timestamp(t) => { - if let Ok(ts) = Timestamp::from_str_utc(&s) { + if let Ok(ts) = Timestamp::from_str(&s, timezone) { Ok(Value::Timestamp(ts.convert_to(t.unit()).context( TimestampOverflowSnafu { timestamp: ts, @@ -215,6 +217,7 @@ pub fn sql_value_to_value( column_name: &str, data_type: &ConcreteDataType, sql_val: &SqlValue, + timezone: Option<&Timezone>, ) -> Result { let value = match sql_val { SqlValue::Number(n, _) => sql_number_to_value(data_type, n)?, @@ -232,7 +235,7 @@ pub fn sql_value_to_value( (*b).into() } SqlValue::DoubleQuotedString(s) | SqlValue::SingleQuotedString(s) => { - parse_string_to_value(column_name, s.clone(), data_type)? + parse_string_to_value(column_name, s.clone(), data_type, timezone)? } SqlValue::HexStringLiteral(s) => parse_hex_string(s)?, SqlValue::Placeholder(s) => return InvalidSqlValueSnafu { value: s }.fail(), @@ -283,15 +286,16 @@ fn parse_column_default_constraint( column_name: &str, data_type: &ConcreteDataType, opts: &[ColumnOptionDef], + timezone: Option<&Timezone>, ) -> Result> { if let Some(opt) = opts .iter() .find(|o| matches!(o.option, ColumnOption::Default(_))) { let default_constraint = match &opt.option { - ColumnOption::Default(Expr::Value(v)) => { - ColumnDefaultConstraint::Value(sql_value_to_value(column_name, data_type, v)?) - } + ColumnOption::Default(Expr::Value(v)) => ColumnDefaultConstraint::Value( + sql_value_to_value(column_name, data_type, v, timezone)?, + ), ColumnOption::Default(Expr::Function(func)) => { let mut func = format!("{func}").to_lowercase(); // normalize CURRENT_TIMESTAMP to CURRENT_TIMESTAMP() @@ -341,7 +345,11 @@ pub fn has_primary_key_option(column_def: &ColumnDef) -> bool { // TODO(yingwen): Make column nullable by default, and checks invalid case like // a column is not nullable but has a default value null. /// Create a `ColumnSchema` from `ColumnDef`. -pub fn column_def_to_schema(column_def: &ColumnDef, is_time_index: bool) -> Result { +pub fn column_def_to_schema( + column_def: &ColumnDef, + is_time_index: bool, + timezone: Option<&Timezone>, +) -> Result { let is_nullable = column_def .options .iter() @@ -351,7 +359,7 @@ pub fn column_def_to_schema(column_def: &ColumnDef, is_time_index: bool) -> Resu let name = column_def.name.value.clone(); let data_type = sql_data_type_to_concrete_data_type(&column_def.data_type)?; let default_constraint = - parse_column_default_constraint(&name, &data_type, &column_def.options)?; + parse_column_default_constraint(&name, &data_type, &column_def.options, timezone)?; let mut column_schema = ColumnSchema::new(name, data_type, is_nullable) .with_time_index(is_time_index) @@ -376,7 +384,10 @@ pub fn column_def_to_schema(column_def: &ColumnDef, is_time_index: bool) -> Resu } /// Convert `ColumnDef` in sqlparser to `ColumnDef` in gRPC proto. -pub fn sql_column_def_to_grpc_column_def(col: &ColumnDef) -> Result { +pub fn sql_column_def_to_grpc_column_def( + col: &ColumnDef, + timezone: Option<&Timezone>, +) -> Result { let name = col.name.value.clone(); let data_type = sql_data_type_to_concrete_data_type(&col.data_type)?; @@ -385,10 +396,11 @@ pub fn sql_column_def_to_grpc_column_def(col: &ColumnDef) -> Result { + assert_eq!(1645459261000000000, ts.value()); + assert_eq!("2022-02-21 16:01:01+0000", ts.to_iso8601_string()); + assert_eq!(TimeUnit::Nanosecond, ts.unit()); + } + _ => { + unreachable!() + } + } } #[test] @@ -822,9 +877,13 @@ mod tests { }, ]; - let constraint = - parse_column_default_constraint("coll", &ConcreteDataType::Boolean(BooleanType), &opts) - .unwrap(); + let constraint = parse_column_default_constraint( + "coll", + &ConcreteDataType::Boolean(BooleanType), + &opts, + None, + ) + .unwrap(); assert_matches!( constraint, @@ -842,7 +901,7 @@ mod tests { options: vec![], }; - let grpc_column_def = sql_column_def_to_grpc_column_def(&column_def).unwrap(); + let grpc_column_def = sql_column_def_to_grpc_column_def(&column_def, None).unwrap(); assert_eq!("col", grpc_column_def.name); assert!(grpc_column_def.is_nullable); // nullable when options are empty @@ -861,7 +920,7 @@ mod tests { }], }; - let grpc_column_def = sql_column_def_to_grpc_column_def(&column_def).unwrap(); + let grpc_column_def = sql_column_def_to_grpc_column_def(&column_def, None).unwrap(); assert!(!grpc_column_def.is_nullable); // test primary key @@ -875,10 +934,64 @@ mod tests { }], }; - let grpc_column_def = sql_column_def_to_grpc_column_def(&column_def).unwrap(); + let grpc_column_def = sql_column_def_to_grpc_column_def(&column_def, None).unwrap(); assert_eq!(grpc_column_def.semantic_type, SemanticType::Tag as i32); } + #[test] + pub fn test_sql_column_def_to_grpc_column_def_with_timezone() { + let column_def = ColumnDef { + name: "col".into(), + // MILLISECOND + data_type: SqlDataType::Timestamp(Some(3), TimezoneInfo::None), + collation: None, + options: vec![ColumnOptionDef { + name: None, + option: ColumnOption::Default(Expr::Value(SqlValue::SingleQuotedString( + "2024-01-30T00:01:01".to_string(), + ))), + }], + }; + + // with timezone "Asia/Shanghai" + let grpc_column_def = sql_column_def_to_grpc_column_def( + &column_def, + Some(&Timezone::from_tz_string("Asia/Shanghai").unwrap()), + ) + .unwrap(); + assert_eq!("col", grpc_column_def.name); + assert!(grpc_column_def.is_nullable); // nullable when options are empty + assert_eq!( + ColumnDataType::TimestampMillisecond as i32, + grpc_column_def.data_type + ); + assert!(!grpc_column_def.default_constraint.is_empty()); + + let constraint = + ColumnDefaultConstraint::try_from(&grpc_column_def.default_constraint[..]).unwrap(); + assert!( + matches!(constraint, ColumnDefaultConstraint::Value(Value::Timestamp(ts)) + if ts.to_iso8601_string() == "2024-01-29 16:01:01+0000") + ); + + // without timezone + let grpc_column_def = sql_column_def_to_grpc_column_def(&column_def, None).unwrap(); + assert_eq!("col", grpc_column_def.name); + assert!(grpc_column_def.is_nullable); // nullable when options are empty + assert_eq!( + ColumnDataType::TimestampMillisecond as i32, + grpc_column_def.data_type + ); + assert!(!grpc_column_def.default_constraint.is_empty()); + + let constraint = + ColumnDefaultConstraint::try_from(&grpc_column_def.default_constraint[..]).unwrap(); + assert!( + matches!(constraint, ColumnDefaultConstraint::Value(Value::Timestamp(ts)) + if ts.to_iso8601_string() == "2024-01-30 00:01:01+0000") + ); + } + #[test] pub fn test_has_primary_key_option() { let column_def = ColumnDef { @@ -910,7 +1023,7 @@ mod tests { options: vec![], }; - let column_schema = column_def_to_schema(&column_def, false).unwrap(); + let column_schema = column_def_to_schema(&column_def, false, None).unwrap(); assert_eq!("col", column_schema.name); assert_eq!( @@ -920,7 +1033,7 @@ mod tests { assert!(column_schema.is_nullable()); assert!(!column_schema.is_time_index()); - let column_schema = column_def_to_schema(&column_def, true).unwrap(); + let column_schema = column_def_to_schema(&column_def, true, None).unwrap(); assert_eq!("col", column_schema.name); assert_eq!( @@ -946,7 +1059,7 @@ mod tests { ], }; - let column_schema = column_def_to_schema(&column_def, false).unwrap(); + let column_schema = column_def_to_schema(&column_def, false, None).unwrap(); assert_eq!("col2", column_schema.name); assert_eq!(ConcreteDataType::string_datatype(), column_schema.data_type); @@ -958,12 +1071,67 @@ mod tests { ); } + #[test] + pub fn test_column_def_to_schema_timestamp_with_timezone() { + let column_def = ColumnDef { + name: "col".into(), + // MILLISECOND + data_type: SqlDataType::Timestamp(Some(3), TimezoneInfo::None), + collation: None, + options: vec![ColumnOptionDef { + name: None, + option: ColumnOption::Default(Expr::Value(SqlValue::SingleQuotedString( + "2024-01-30T00:01:01".to_string(), + ))), + }], + }; + + // with timezone "Asia/Shanghai" + + let column_schema = column_def_to_schema( + &column_def, + false, + Some(&Timezone::from_tz_string("Asia/Shanghai").unwrap()), + ) + .unwrap(); + + assert_eq!("col", column_schema.name); + assert_eq!( + ConcreteDataType::timestamp_millisecond_datatype(), + column_schema.data_type + ); + assert!(column_schema.is_nullable()); + + let constraint = column_schema.default_constraint().unwrap(); + assert!( + matches!(constraint, ColumnDefaultConstraint::Value(Value::Timestamp(ts)) + if ts.to_iso8601_string() == "2024-01-29 16:01:01+0000") + ); + + // without timezone + let column_schema = column_def_to_schema(&column_def, false, None).unwrap(); + + assert_eq!("col", column_schema.name); + assert_eq!( + ConcreteDataType::timestamp_millisecond_datatype(), + column_schema.data_type + ); + assert!(column_schema.is_nullable()); + + let constraint = column_schema.default_constraint().unwrap(); + assert!( + matches!(constraint, ColumnDefaultConstraint::Value(Value::Timestamp(ts)) + if ts.to_iso8601_string() == "2024-01-30 00:01:01+0000") + ); + } + #[test] pub fn test_parse_placeholder_value() { assert!(sql_value_to_value( "test", &ConcreteDataType::string_datatype(), - &SqlValue::Placeholder("default".into()) + &SqlValue::Placeholder("default".into()), + None, ) .is_err()); } diff --git a/tests/cases/standalone/common/alter/alter_table_default.result b/tests/cases/standalone/common/alter/alter_table_default.result new file mode 100644 index 000000000000..95d0358c8755 --- /dev/null +++ b/tests/cases/standalone/common/alter/alter_table_default.result @@ -0,0 +1,72 @@ +--- alter table to add new column with default timestamp values aware of session timezone test --- +CREATE TABLE test1 (i INTEGER, j TIMESTAMP time index, PRIMARY KEY(i)); + +Affected Rows: 0 + +INSERT INTO test1 values (1, 1), (2, 2); + +Affected Rows: 2 + +SELECT * FROM test1; + ++---+-------------------------+ +| i | j | ++---+-------------------------+ +| 1 | 1970-01-01T00:00:00.001 | +| 2 | 1970-01-01T00:00:00.002 | ++---+-------------------------+ + +--- add ts1 column --- +ALTER TABLE test1 ADD COLUMN ts1 TIMESTAMP DEFAULT '2024-01-30 00:01:01' PRIMARY KEY; + +Affected Rows: 0 + +INSERT INTO test1 values (3, 3, DEFAULT), (4, 4, '2024-01-31 00:01:01'); + +Affected Rows: 2 + +SELECT i, ts1 FROM test1; + ++---+---------------------+ +| i | ts1 | ++---+---------------------+ +| 1 | 2024-01-30T00:01:01 | +| 2 | 2024-01-30T00:01:01 | +| 3 | 2024-01-30T00:01:01 | +| 4 | 2024-01-31T00:01:01 | ++---+---------------------+ + +SET time_zone = 'Asia/Shanghai'; + +Affected Rows: 0 + +--- add ts2 column, default value is the same as ts1, but with different session timezone --- +ALTER TABLE test1 ADD COLUMN ts2 TIMESTAMP DEFAULT '2024-01-30 00:01:01' PRIMARY KEY; + +Affected Rows: 0 + +INSERT INTO test1 values (5, 5, DEFAULT, DEFAULT), (6, 6, DEFAULT, '2024-01-31 00:01:01'); + +Affected Rows: 2 + +SELECT i, ts1, ts2 FROM test1; + ++---+---------------------+---------------------+ +| i | ts1 | ts2 | ++---+---------------------+---------------------+ +| 1 | 2024-01-30T00:01:01 | 2024-01-29T16:01:01 | +| 2 | 2024-01-30T00:01:01 | 2024-01-29T16:01:01 | +| 3 | 2024-01-30T00:01:01 | 2024-01-29T16:01:01 | +| 4 | 2024-01-31T00:01:01 | 2024-01-29T16:01:01 | +| 5 | 2024-01-30T00:01:01 | 2024-01-29T16:01:01 | +| 6 | 2024-01-30T00:01:01 | 2024-01-30T16:01:01 | ++---+---------------------+---------------------+ + +SET time_zone = 'UTC'; + +Affected Rows: 0 + +DROP TABLE test1; + +Affected Rows: 0 + diff --git a/tests/cases/standalone/common/alter/alter_table_default.sql b/tests/cases/standalone/common/alter/alter_table_default.sql new file mode 100644 index 000000000000..3169963b7a44 --- /dev/null +++ b/tests/cases/standalone/common/alter/alter_table_default.sql @@ -0,0 +1,27 @@ +--- alter table to add new column with default timestamp values aware of session timezone test --- + +CREATE TABLE test1 (i INTEGER, j TIMESTAMP time index, PRIMARY KEY(i)); + +INSERT INTO test1 values (1, 1), (2, 2); + +SELECT * FROM test1; + +--- add ts1 column --- +ALTER TABLE test1 ADD COLUMN ts1 TIMESTAMP DEFAULT '2024-01-30 00:01:01' PRIMARY KEY; + +INSERT INTO test1 values (3, 3, DEFAULT), (4, 4, '2024-01-31 00:01:01'); + +SELECT i, ts1 FROM test1; + +SET time_zone = 'Asia/Shanghai'; + +--- add ts2 column, default value is the same as ts1, but with different session timezone --- +ALTER TABLE test1 ADD COLUMN ts2 TIMESTAMP DEFAULT '2024-01-30 00:01:01' PRIMARY KEY; + +INSERT INTO test1 values (5, 5, DEFAULT, DEFAULT), (6, 6, DEFAULT, '2024-01-31 00:01:01'); + +SELECT i, ts1, ts2 FROM test1; + +SET time_zone = 'UTC'; + +DROP TABLE test1; diff --git a/tests/cases/standalone/common/insert/insert_default_timezone.result b/tests/cases/standalone/common/insert/insert_default_timezone.result new file mode 100644 index 000000000000..e44e9692ee96 --- /dev/null +++ b/tests/cases/standalone/common/insert/insert_default_timezone.result @@ -0,0 +1,66 @@ +--- insert timestamp with default values aware of session timezone test --- +CREATE TABLE test1 (i INTEGER, j TIMESTAMP default '2024-01-30 00:01:01' TIME INDEX, PRIMARY KEY(i)); + +Affected Rows: 0 + +INSERT INTO test1 VALUES (1, DEFAULT), (2, DEFAULT), (3, '2024-01-31 00:01:01'), (4, '2025-02-01 00:01:01'); + +Affected Rows: 4 + +SELECT * FROM test1; + ++---+---------------------+ +| i | j | ++---+---------------------+ +| 1 | 2024-01-30T00:01:01 | +| 2 | 2024-01-30T00:01:01 | +| 3 | 2024-01-31T00:01:01 | +| 4 | 2025-02-01T00:01:01 | ++---+---------------------+ + +SET time_zone = 'Asia/Shanghai'; + +Affected Rows: 0 + +CREATE TABLE test2 (i INTEGER, j TIMESTAMP default '2024-01-30 00:01:01' TIME INDEX, PRIMARY KEY(i)); + +Affected Rows: 0 + +INSERT INTO test2 VALUES (1, DEFAULT), (2, DEFAULT), (3, '2024-01-31 00:01:01'), (4, '2025-02-01 00:01:01'); + +Affected Rows: 4 + +SELECT * FROM test2; + ++---+---------------------+ +| i | j | ++---+---------------------+ +| 1 | 2024-01-29T16:01:01 | +| 2 | 2024-01-29T16:01:01 | +| 3 | 2024-01-30T16:01:01 | +| 4 | 2025-01-31T16:01:01 | ++---+---------------------+ + +SELECT * FROM test1; + ++---+---------------------+ +| i | j | ++---+---------------------+ +| 1 | 2024-01-30T00:01:01 | +| 2 | 2024-01-30T00:01:01 | +| 3 | 2024-01-31T00:01:01 | +| 4 | 2025-02-01T00:01:01 | ++---+---------------------+ + +SET time_zone = 'UTC'; + +Affected Rows: 0 + +DROP TABLE test1; + +Affected Rows: 0 + +DROP TABLE test2; + +Affected Rows: 0 + diff --git a/tests/cases/standalone/common/insert/insert_default_timezone.sql b/tests/cases/standalone/common/insert/insert_default_timezone.sql new file mode 100644 index 000000000000..403b534f5a06 --- /dev/null +++ b/tests/cases/standalone/common/insert/insert_default_timezone.sql @@ -0,0 +1,23 @@ +--- insert timestamp with default values aware of session timezone test --- + +CREATE TABLE test1 (i INTEGER, j TIMESTAMP default '2024-01-30 00:01:01' TIME INDEX, PRIMARY KEY(i)); + +INSERT INTO test1 VALUES (1, DEFAULT), (2, DEFAULT), (3, '2024-01-31 00:01:01'), (4, '2025-02-01 00:01:01'); + +SELECT * FROM test1; + +SET time_zone = 'Asia/Shanghai'; + +CREATE TABLE test2 (i INTEGER, j TIMESTAMP default '2024-01-30 00:01:01' TIME INDEX, PRIMARY KEY(i)); + +INSERT INTO test2 VALUES (1, DEFAULT), (2, DEFAULT), (3, '2024-01-31 00:01:01'), (4, '2025-02-01 00:01:01'); + +SELECT * FROM test2; + +SELECT * FROM test1; + +SET time_zone = 'UTC'; + +DROP TABLE test1; + +DROP TABLE test2;