From a84d8bada74c16b38701f05d9f243b0a306bb433 Mon Sep 17 00:00:00 2001 From: Kould Date: Sun, 10 Nov 2024 03:07:27 +0800 Subject: [PATCH] feat: `Update` supports `Set` using expressions fix: - primary key value position when declaring multiple primary keys(update & insert & delete) - decimal type conversion - the `Eq` expression of `PushPredicateIntoScan` must satisfy all index columns before it can be used - `PrimaryKey` index supplementary composite index type - `Tuple` deserialize may fail due to a large difference in the number of projection columns and table schema --- .gitignore | 1 + src/binder/alter_table.rs | 2 +- src/binder/create_table.rs | 25 ++- src/binder/delete.rs | 26 +-- src/binder/expr.rs | 6 +- src/binder/mod.rs | 8 +- src/binder/update.rs | 49 ++--- src/catalog/column.rs | 18 +- src/catalog/table.rs | 34 ++-- src/db.rs | 10 +- src/errors.rs | 13 +- src/execution/ddl/drop_column.rs | 2 +- src/execution/dml/copy_from_file.rs | 6 +- src/execution/dml/insert.rs | 5 +- src/execution/dml/update.rs | 57 +++--- src/execution/dql/aggregate/hash_agg.rs | 2 +- src/execution/dql/join/hash_join.rs | 2 +- src/execution/dql/join/nested_loop_join.rs | 2 +- src/execution/dql/sort.rs | 6 +- src/execution/mod.rs | 3 +- src/expression/mod.rs | 6 +- src/function/numbers.rs | 2 +- src/lib.rs | 1 - src/optimizer/core/histogram.rs | 2 +- src/optimizer/core/memo.rs | 4 +- src/optimizer/core/statistics_meta.rs | 2 +- .../rule/implementation/dql/table_scan.rs | 2 +- .../normalization/compilation_in_advance.rs | 12 +- .../rule/normalization/pushdown_predicates.rs | 72 ++++--- .../rule/normalization/simplification.rs | 4 +- src/planner/operator/delete.rs | 2 +- src/planner/operator/mod.rs | 2 +- src/planner/operator/update.rs | 12 +- src/serdes/column.rs | 6 +- src/storage/mod.rs | 181 ++++++++++-------- src/storage/rocksdb.rs | 6 +- src/storage/table_codec.rs | 33 ++-- src/types/index.rs | 7 +- src/types/mod.rs | 25 ++- src/types/tuple.rs | 42 ++-- src/types/tuple_builder.rs | 62 ++++-- src/types/value.rs | 3 +- tests/slt/insert.slt | 15 ++ tests/slt/projection.slt | 13 ++ tests/slt/update.slt | 12 ++ 45 files changed, 486 insertions(+), 319 deletions(-) create mode 100644 tests/slt/projection.slt diff --git a/.gitignore b/.gitignore index 3255c698..8f4bce36 100644 --- a/.gitignore +++ b/.gitignore @@ -26,5 +26,6 @@ Cargo.lock fncksql_data fncksql_bench sqlite_bench +fnck_sql_tpcc tests/data/row_20000.csv \ No newline at end of file diff --git a/src/binder/alter_table.rs b/src/binder/alter_table.rs index f30a5063..bf73e5cf 100644 --- a/src/binder/alter_table.rs +++ b/src/binder/alter_table.rs @@ -30,7 +30,7 @@ impl Binder<'_, '_, T> { column_def, } => { let plan = TableScanOperator::build(table_name.clone(), table); - let column = self.bind_column(column_def)?; + let column = self.bind_column(column_def, None)?; if !is_valid_identifier(column.name()) { return Err(DatabaseError::InvalidColumn( diff --git a/src/binder/create_table.rs b/src/binder/create_table.rs index 29af3d5a..14756e64 100644 --- a/src/binder/create_table.rs +++ b/src/binder/create_table.rs @@ -47,7 +47,8 @@ impl Binder<'_, '_, T> { } let mut columns: Vec = columns .iter() - .map(|col| self.bind_column(col)) + .enumerate() + .map(|(i, col)| self.bind_column(col, Some(i))) .try_collect()?; for constraint in constraints { match constraint { @@ -56,13 +57,17 @@ impl Binder<'_, '_, T> { is_primary, .. } => { - for column_name in column_names.iter().map(|ident| ident.value.to_lowercase()) { + for (i, column_name) in column_names + .iter() + .map(|ident| ident.value.to_lowercase()) + .enumerate() + { if let Some(column) = columns .iter_mut() .find(|column| column.name() == column_name) { if *is_primary { - column.desc_mut().set_primary(true); + column.desc_mut().set_primary(Some(i)); } else { column.desc_mut().set_unique(true); } @@ -89,11 +94,15 @@ impl Binder<'_, '_, T> { )) } - pub fn bind_column(&mut self, column_def: &ColumnDef) -> Result { + pub fn bind_column( + &mut self, + column_def: &ColumnDef, + column_index: Option, + ) -> Result { let column_name = column_def.name.value.to_lowercase(); let mut column_desc = ColumnDesc::new( LogicalType::try_from(column_def.data_type.clone())?, - false, + None, false, None, )?; @@ -106,7 +115,7 @@ impl Binder<'_, '_, T> { ColumnOption::NotNull => nullable = false, ColumnOption::Unique { is_primary, .. } => { if *is_primary { - column_desc.set_primary(true); + column_desc.set_primary(column_index); nullable = false; // Skip other options when using primary key break; @@ -184,7 +193,7 @@ mod tests { debug_assert_eq!(op.columns[0].nullable(), false); debug_assert_eq!( op.columns[0].desc(), - &ColumnDesc::new(LogicalType::Integer, true, false, None)? + &ColumnDesc::new(LogicalType::Integer, Some(0), false, None)? ); debug_assert_eq!(op.columns[1].name(), "name"); debug_assert_eq!(op.columns[1].nullable(), true); @@ -192,7 +201,7 @@ mod tests { op.columns[1].desc(), &ColumnDesc::new( LogicalType::Varchar(Some(10), CharLengthUnits::Characters), - false, + None, false, None )? diff --git a/src/binder/delete.rs b/src/binder/delete.rs index 4e1e1e94..b166c943 100644 --- a/src/binder/delete.rs +++ b/src/binder/delete.rs @@ -5,6 +5,7 @@ use crate::planner::operator::table_scan::TableScanOperator; use crate::planner::operator::Operator; use crate::planner::LogicalPlan; use crate::storage::Transaction; +use itertools::Itertools; use sqlparser::ast::{Expr, TableAlias, TableFactor, TableWithJoins}; use std::sync::Arc; @@ -23,20 +24,19 @@ impl Binder<'_, '_, T> { table_alias = Some(Arc::new(name.value.to_lowercase())); alias_idents = Some(columns); } - let source = self + let Source::Table(table) = self .context - .source_and_bind(table_name.clone(), table_alias.as_ref(), None, false)? - .ok_or(DatabaseError::SourceNotFound)?; - let schema_buf = self.table_schema_buf.entry(table_name.clone()).or_default(); - let primary_key_column = source - .columns(schema_buf) - .find(|column| column.desc().is_primary()) - .cloned() - .unwrap(); - let mut plan = match source { - Source::Table(table) => TableScanOperator::build(table_name.clone(), table), - Source::View(view) => LogicalPlan::clone(&view.plan), + .source_and_bind(table_name.clone(), table_alias.as_ref(), None, true)? + .ok_or(DatabaseError::TableNotFound)? + else { + unreachable!() }; + let primary_keys = table + .primary_keys() + .iter() + .map(|(_, column)| column.clone()) + .collect_vec(); + let mut plan = TableScanOperator::build(table_name.clone(), table); if let Some(alias_idents) = alias_idents { plan = @@ -50,7 +50,7 @@ impl Binder<'_, '_, T> { Ok(LogicalPlan::new( Operator::Delete(DeleteOperator { table_name, - primary_key_column, + primary_keys, }), vec![plan], )) diff --git a/src/binder/expr.rs b/src/binder/expr.rs index 869dccd4..e3c5181e 100644 --- a/src/binder/expr.rs +++ b/src/binder/expr.rs @@ -333,7 +333,7 @@ impl<'a, T: Transaction> Binder<'a, '_, T> { Ok(ScalarExpression::ColumnRef( source .column(&full_name.1, schema_buf) - .ok_or_else(|| DatabaseError::NotFound("column", full_name.1.to_string()))?, + .ok_or_else(|| DatabaseError::ColumnNotFound(full_name.1.to_string()))?, )) } else { let op = @@ -373,7 +373,7 @@ impl<'a, T: Transaction> Binder<'a, '_, T> { if let Some(parent) = self.parent { op(&mut got_column, &parent.context, &mut self.table_schema_buf); } - Ok(got_column.ok_or(DatabaseError::NotFound("column", full_name.1))?) + Ok(got_column.ok_or(DatabaseError::ColumnNotFound(full_name.1))?) } } @@ -621,7 +621,7 @@ impl<'a, T: Transaction> Binder<'a, '_, T> { })); } - Err(DatabaseError::NotFound("function", summary.name)) + Err(DatabaseError::FunctionNotFound(summary.name)) } fn return_type( diff --git a/src/binder/mod.rs b/src/binder/mod.rs index 102e97e0..ed6225bf 100644 --- a/src/binder/mod.rs +++ b/src/binder/mod.rs @@ -569,12 +569,12 @@ pub mod test { ColumnCatalog::new( "c1".to_string(), false, - ColumnDesc::new(Integer, true, false, None)?, + ColumnDesc::new(Integer, Some(0), false, None)?, ), ColumnCatalog::new( "c2".to_string(), false, - ColumnDesc::new(Integer, false, true, None)?, + ColumnDesc::new(Integer, None, true, None)?, ), ], false, @@ -587,12 +587,12 @@ pub mod test { ColumnCatalog::new( "c3".to_string(), false, - ColumnDesc::new(Integer, true, false, None)?, + ColumnDesc::new(Integer, Some(0), false, None)?, ), ColumnCatalog::new( "c4".to_string(), false, - ColumnDesc::new(Integer, false, false, None)?, + ColumnDesc::new(Integer, None, false, None)?, ), ], false, diff --git a/src/binder/update.rs b/src/binder/update.rs index d75e5f98..0cd5f00e 100644 --- a/src/binder/update.rs +++ b/src/binder/update.rs @@ -26,13 +26,13 @@ impl Binder<'_, '_, T> { if let Some(predicate) = selection { plan = self.bind_where(plan, predicate)?; } + let mut value_exprs = Vec::with_capacity(assignments.len()); - let mut schema = Vec::with_capacity(assignments.len()); - let mut row = Vec::with_capacity(assignments.len()); - + if assignments.is_empty() { + return Err(DatabaseError::ColumnsEmpty); + } for Assignment { id, value } in assignments { - let mut expression = self.bind_expr(value)?; - expression.constant_calculation()?; + let expression = self.bind_expr(value)?; for ident in id { match self.bind_column_ref_from_identifiers( @@ -40,38 +40,27 @@ impl Binder<'_, '_, T> { Some(table_name.to_string()), )? { ScalarExpression::ColumnRef(column) => { - match &expression { - ScalarExpression::Constant(value) => { - let ty = column.datatype(); - // Check if the value length is too long - value.check_len(ty)?; - - let mut value = value.clone(); - if value.logical_type() != *ty { - value = value.cast(ty)?; - } - row.push(value); - } - ScalarExpression::Empty => { - let default_value = column - .default_value()? - .ok_or(DatabaseError::DefaultNotExist)?; - row.push(default_value); - } - _ => return Err(DatabaseError::UnsupportedStmt(value.to_string())), - } - schema.push(column); + let expr = if matches!(expression, ScalarExpression::Empty) { + let default_value = column + .default_value()? + .ok_or(DatabaseError::DefaultNotExist)?; + ScalarExpression::Constant(default_value) + } else { + expression.clone() + }; + value_exprs.push((column, expr)); } _ => return Err(DatabaseError::InvalidColumn(ident.to_string())), } } } self.context.allow_default = false; - let values_plan = self.bind_values(vec![row], Arc::new(schema)); - Ok(LogicalPlan::new( - Operator::Update(UpdateOperator { table_name }), - vec![plan, values_plan], + Operator::Update(UpdateOperator { + table_name, + value_exprs, + }), + vec![plan], )) } else { unreachable!("only table") diff --git a/src/catalog/column.rs b/src/catalog/column.rs index 555923ea..93a9e967 100644 --- a/src/catalog/column.rs +++ b/src/catalog/column.rs @@ -100,7 +100,7 @@ impl ColumnCatalog { // SAFETY: default expr must not be [`ScalarExpression::ColumnRef`] desc: ColumnDesc::new( LogicalType::Varchar(None, CharLengthUnits::Characters), - false, + None, false, None, ) @@ -187,7 +187,7 @@ impl ColumnCatalog { #[derive(Debug, Clone, PartialEq, Eq, Hash, ReferenceSerialization)] pub struct ColumnDesc { pub(crate) column_datatype: LogicalType, - is_primary: bool, + primary: Option, is_unique: bool, pub(crate) default: Option, } @@ -195,7 +195,7 @@ pub struct ColumnDesc { impl ColumnDesc { pub fn new( column_datatype: LogicalType, - is_primary: bool, + primary: Option, is_unique: bool, default: Option, ) -> Result { @@ -207,18 +207,22 @@ impl ColumnDesc { Ok(ColumnDesc { column_datatype, - is_primary, + primary, is_unique, default, }) } + pub(crate) fn primary(&self) -> Option { + self.primary + } + pub(crate) fn is_primary(&self) -> bool { - self.is_primary + self.primary.is_some() } - pub(crate) fn set_primary(&mut self, is_primary: bool) { - self.is_primary = is_primary + pub(crate) fn set_primary(&mut self, is_primary: Option) { + self.primary = is_primary } pub(crate) fn is_unique(&self) -> bool { diff --git a/src/catalog/table.rs b/src/catalog/table.rs index 5966fc51..0736df82 100644 --- a/src/catalog/table.rs +++ b/src/catalog/table.rs @@ -169,13 +169,7 @@ impl TableCatalog { .add_column(col_catalog, &mut generator) .unwrap(); } - table_catalog.primary_keys = table_catalog - .schema_ref - .iter() - .enumerate() - .filter(|&(_, column)| column.desc().is_primary()) - .map(|(i, column)| (i, column.clone())) - .collect_vec(); + table_catalog.primary_keys = Self::build_primary_keys(&table_catalog.schema_ref); Ok(table_catalog) } @@ -197,12 +191,7 @@ impl TableCatalog { columns.insert(column_id, i); } let schema_ref = Arc::new(column_refs.clone()); - let primary_keys = schema_ref - .iter() - .enumerate() - .filter(|&(_, column)| column.desc().is_primary()) - .map(|(i, column)| (i, column.clone())) - .collect_vec(); + let primary_keys = Self::build_primary_keys(&schema_ref); Ok(TableCatalog { name, @@ -213,6 +202,21 @@ impl TableCatalog { primary_keys, }) } + + fn build_primary_keys(schema_ref: &Arc>) -> Vec<(usize, ColumnRef)> { + schema_ref + .iter() + .enumerate() + .filter_map(|(i, column)| { + column + .desc() + .primary() + .map(|p_i| (p_i, (i, column.clone()))) + }) + .sorted_by_key(|(p_i, _)| *p_i) + .map(|(_, entry)| entry) + .collect_vec() + } } impl TableMeta { @@ -236,12 +240,12 @@ mod tests { let col0 = ColumnCatalog::new( "a".into(), false, - ColumnDesc::new(LogicalType::Integer, false, false, None).unwrap(), + ColumnDesc::new(LogicalType::Integer, None, false, None).unwrap(), ); let col1 = ColumnCatalog::new( "b".into(), false, - ColumnDesc::new(LogicalType::Boolean, false, false, None).unwrap(), + ColumnDesc::new(LogicalType::Boolean, None, false, None).unwrap(), ); let col_catalogs = vec![col0, col1]; let table_catalog = TableCatalog::new(Arc::new("test".to_string()), col_catalogs).unwrap(); diff --git a/src/db.rs b/src/db.rs index 394f3bc1..a3372e1b 100644 --- a/src/db.rs +++ b/src/db.rs @@ -342,17 +342,17 @@ pub(crate) mod test { ColumnCatalog::new( "c1".to_string(), false, - ColumnDesc::new(LogicalType::Integer, true, false, None).unwrap(), + ColumnDesc::new(LogicalType::Integer, Some(0), false, None).unwrap(), ), ColumnCatalog::new( "c2".to_string(), false, - ColumnDesc::new(LogicalType::Boolean, false, false, None).unwrap(), + ColumnDesc::new(LogicalType::Boolean, None, false, None).unwrap(), ), ColumnCatalog::new( "c3".to_string(), false, - ColumnDesc::new(LogicalType::Integer, false, false, None).unwrap(), + ColumnDesc::new(LogicalType::Integer, None, false, None).unwrap(), ), ]; let _ = @@ -389,7 +389,7 @@ pub(crate) mod test { Arc::new(vec![ColumnRef::from(ColumnCatalog::new( "current_date()".to_string(), true, - ColumnDesc::new(LogicalType::Date, false, false, None).unwrap() + ColumnDesc::new(LogicalType::Date, None, false, None).unwrap() ))]) ); debug_assert_eq!( @@ -415,7 +415,7 @@ pub(crate) mod test { let mut column = ColumnCatalog::new( "number".to_string(), true, - ColumnDesc::new(LogicalType::Integer, false, false, None).unwrap(), + ColumnDesc::new(LogicalType::Integer, None, false, None).unwrap(), ); let number_column_id = schema[0].id().unwrap(); column.set_ref_table(Arc::new("a".to_string()), number_column_id, false); diff --git a/src/errors.rs b/src/errors.rs index 27993afa..86f0f48d 100644 --- a/src/errors.rs +++ b/src/errors.rs @@ -1,4 +1,5 @@ use crate::expression::{BinaryOperator, UnaryOperator}; +use crate::types::tuple::TupleId; use crate::types::LogicalType; use chrono::ParseError; use sqlparser::parser::ParserError; @@ -24,6 +25,10 @@ pub enum DatabaseError { ChannelClose, #[error("columns empty")] ColumnsEmpty, + #[error("column id: {0} not found")] + ColumnIdNotFound(String), + #[error("column: {0} not found")] + ColumnNotFound(String), #[error("csv error: {0}")] Csv( #[from] @@ -42,6 +47,8 @@ pub enum DatabaseError { DuplicatePrimaryKey, #[error("the column has been declared unique and the value already exists")] DuplicateUniqueValue, + #[error("function: {0} not found")] + FunctionNotFound(String), #[error("empty plan")] EmptyPlan, #[error("sql statement is empty")] @@ -78,8 +85,6 @@ pub enum DatabaseError { NoTransactionBegin, #[error("cannot be Null")] NotNull, - #[error("{0} not found: {1}")] - NotFound(&'static str, String), #[error("parser bool: {0}")] ParseBool( #[source] @@ -122,7 +127,7 @@ pub enum DatabaseError { ), #[error("the number of caches cannot be divisible by the number of shards")] SharedNotAlign, - #[error("the view not found")] + #[error("the table or view not found")] SourceNotFound, #[error("the table already exists")] TableExists, @@ -144,6 +149,8 @@ pub enum DatabaseError { ), #[error("too long")] TooLong, + #[error("tuple id: {0} not found")] + TupleIdNotFound(TupleId), #[error("there are more buckets: {0} than elements: {1}")] TooManyBuckets(usize, usize), #[error("unsupported unary operator: {0} cannot support {1} for calculations")] diff --git a/src/execution/ddl/drop_column.rs b/src/execution/ddl/drop_column.rs index d7f3ff0d..0183ca3d 100644 --- a/src/execution/ddl/drop_column.rs +++ b/src/execution/ddl/drop_column.rs @@ -75,7 +75,7 @@ impl<'a, T: Transaction + 'a> WriteExecutor<'a, T> for DropColumn { } else if if_exists { return; } else { - yield Err(DatabaseError::NotFound("drop column", column_name)); + yield Err(DatabaseError::ColumnNotFound(column_name)); } }, ) diff --git a/src/execution/dml/copy_from_file.rs b/src/execution/dml/copy_from_file.rs index 8e12ea1a..e65986de 100644 --- a/src/execution/dml/copy_from_file.rs +++ b/src/execution/dml/copy_from_file.rs @@ -141,7 +141,7 @@ mod tests { }, }, false, - ColumnDesc::new(LogicalType::Integer, true, false, None)?, + ColumnDesc::new(LogicalType::Integer, Some(0), false, None)?, false, )), ColumnRef::from(ColumnCatalog::direct_new( @@ -154,7 +154,7 @@ mod tests { }, }, false, - ColumnDesc::new(LogicalType::Float, false, false, None)?, + ColumnDesc::new(LogicalType::Float, None, false, None)?, false, )), ColumnRef::from(ColumnCatalog::direct_new( @@ -169,7 +169,7 @@ mod tests { false, ColumnDesc::new( LogicalType::Varchar(Some(10), CharLengthUnits::Characters), - false, + None, false, None, )?, diff --git a/src/execution/dml/insert.rs b/src/execution/dml/insert.rs index 7d74fc82..c5b2b70a 100644 --- a/src/execution/dml/insert.rs +++ b/src/execution/dml/insert.rs @@ -81,8 +81,9 @@ impl<'a, T: Transaction + 'a> WriteExecutor<'a, T> for Insert { let primary_keys = schema .iter() - .filter(|&col| col.desc().is_primary()) - .map(|col| col.key(is_mapping_by_name)) + .filter_map(|column| column.desc().primary().map(|i| (i, column))) + .sorted_by_key(|(i, _)| *i) + .map(|(_, col)| col.key(is_mapping_by_name)) .collect_vec(); if primary_keys.is_empty() { throw!(Err(DatabaseError::NotNull)) diff --git a/src/execution/dml/update.rs b/src/execution/dml/update.rs index 5a14bdcf..30a5a1e3 100644 --- a/src/execution/dml/update.rs +++ b/src/execution/dml/update.rs @@ -1,6 +1,7 @@ -use crate::catalog::TableName; +use crate::catalog::{ColumnRef, TableName}; use crate::execution::dql::projection::Projection; use crate::execution::{build_read, Executor, WriteExecutor}; +use crate::expression::ScalarExpression; use crate::planner::operator::update::UpdateOperator; use crate::planner::LogicalPlan; use crate::storage::{StatisticsMetaCache, TableCache, Transaction, ViewCache}; @@ -8,8 +9,7 @@ use crate::throw; use crate::types::index::Index; use crate::types::tuple::types; use crate::types::tuple::Tuple; -use crate::types::tuple_builder::TupleBuilder; -use crate::types::value::DataValue; +use crate::types::tuple_builder::{TupleBuilder, TupleIdBuilder}; use std::collections::HashMap; use std::ops::Coroutine; use std::ops::CoroutineState; @@ -17,18 +17,24 @@ use std::pin::Pin; pub struct Update { table_name: TableName, + value_exprs: Vec<(ColumnRef, ScalarExpression)>, input: LogicalPlan, - values: LogicalPlan, } -impl From<(UpdateOperator, LogicalPlan, LogicalPlan)> for Update { +impl From<(UpdateOperator, LogicalPlan)> for Update { fn from( - (UpdateOperator { table_name }, input, values): (UpdateOperator, LogicalPlan, LogicalPlan), + ( + UpdateOperator { + table_name, + value_exprs, + }, + input, + ): (UpdateOperator, LogicalPlan), ) -> Self { Update { table_name, + value_exprs, input, - values, } } } @@ -44,30 +50,23 @@ impl<'a, T: Transaction + 'a> WriteExecutor<'a, T> for Update { move || { let Update { table_name, + value_exprs, mut input, - mut values, } = self; - let values_schema = values.output_schema().clone(); + let mut exprs_map = HashMap::with_capacity(value_exprs.len()); + for (column, expr) in value_exprs { + exprs_map.insert(column.id(), expr); + } + let input_schema = input.output_schema().clone(); let types = types(&input_schema); if let Some(table_catalog) = throw!(transaction.table(cache.0, table_name.clone())).cloned() { - let mut value_map = HashMap::new(); let mut tuples = Vec::new(); - // only once - let mut coroutine = build_read(values, cache, transaction); - - while let CoroutineState::Yielded(tuple) = Pin::new(&mut coroutine).resume(()) { - let Tuple { values, .. } = throw!(tuple); - for i in 0..values.len() { - value_map.insert(values_schema[i].id(), values[i].clone()); - } - } - drop(coroutine); let mut coroutine = build_read(input, cache, transaction); while let CoroutineState::Yielded(tuple) = Pin::new(&mut coroutine).resume(()) { @@ -92,23 +91,21 @@ impl<'a, T: Transaction + 'a> WriteExecutor<'a, T> for Update { } index_metas.push((index_meta, exprs)); } + let mut id_builder = TupleIdBuilder::new(&input_schema); + for mut tuple in tuples { let mut is_overwrite = true; - let mut primary_keys = Vec::new(); + for (i, column) in input_schema.iter().enumerate() { - if let Some(value) = value_map.get(&column.id()) { + if let Some(expr) = exprs_map.get(&column.id()) { + let value = throw!(expr.eval(&tuple, &input_schema)); if column.desc().is_primary() { - primary_keys.push(value.clone()); + id_builder.append(value.clone()); } - tuple.values[i] = value.clone(); + tuple.values[i] = value; } } - if !primary_keys.is_empty() { - let id = if primary_keys.len() == 1 { - primary_keys.pop().unwrap() - } else { - DataValue::Tuple(Some(primary_keys)) - }; + if let Some(id) = id_builder.build() { if &id != tuple.id.as_ref().unwrap() { let old_key = tuple.id.replace(id).unwrap(); diff --git a/src/execution/dql/aggregate/hash_agg.rs b/src/execution/dql/aggregate/hash_agg.rs index 0666e47a..803c4485 100644 --- a/src/execution/dql/aggregate/hash_agg.rs +++ b/src/execution/dql/aggregate/hash_agg.rs @@ -190,7 +190,7 @@ mod test { let temp_dir = TempDir::new().expect("unable to create temporary working directory"); let storage = RocksStorage::new(temp_dir.path()).unwrap(); let transaction = storage.transaction()?; - let desc = ColumnDesc::new(LogicalType::Integer, false, false, None)?; + let desc = ColumnDesc::new(LogicalType::Integer, None, false, None)?; let t1_schema = Arc::new(vec![ ColumnRef::from(ColumnCatalog::new("c1".to_string(), true, desc.clone())), diff --git a/src/execution/dql/join/hash_join.rs b/src/execution/dql/join/hash_join.rs index 9d812808..62ae7ca3 100644 --- a/src/execution/dql/join/hash_join.rs +++ b/src/execution/dql/join/hash_join.rs @@ -443,7 +443,7 @@ mod test { LogicalPlan, LogicalPlan, ) { - let desc = ColumnDesc::new(LogicalType::Integer, false, false, None).unwrap(); + let desc = ColumnDesc::new(LogicalType::Integer, None, false, None).unwrap(); let t1_columns = vec![ ColumnRef::from(ColumnCatalog::new("c1".to_string(), true, desc.clone())), diff --git a/src/execution/dql/join/nested_loop_join.rs b/src/execution/dql/join/nested_loop_join.rs index 12e58411..c88a1f22 100644 --- a/src/execution/dql/join/nested_loop_join.rs +++ b/src/execution/dql/join/nested_loop_join.rs @@ -414,7 +414,7 @@ mod test { LogicalPlan, ScalarExpression, ) { - let desc = ColumnDesc::new(LogicalType::Integer, false, false, None).unwrap(); + let desc = ColumnDesc::new(LogicalType::Integer, None, false, None).unwrap(); let t1_columns = vec![ ColumnRef::from(ColumnCatalog::new("c1".to_string(), true, desc.clone())), diff --git a/src/execution/dql/sort.rs b/src/execution/dql/sort.rs index d93e5eec..176bc4f4 100644 --- a/src/execution/dql/sort.rs +++ b/src/execution/dql/sort.rs @@ -307,7 +307,7 @@ mod test { let schema = Arc::new(vec![ColumnRef::from(ColumnCatalog::new( "c1".to_string(), true, - ColumnDesc::new(LogicalType::Integer, false, false, None).unwrap(), + ColumnDesc::new(LogicalType::Integer, None, false, None).unwrap(), ))]); let tuples = NullableVec(vec![ Some(( @@ -476,12 +476,12 @@ mod test { ColumnRef::from(ColumnCatalog::new( "c1".to_string(), true, - ColumnDesc::new(LogicalType::Integer, false, false, None).unwrap(), + ColumnDesc::new(LogicalType::Integer, None, false, None).unwrap(), )), ColumnRef::from(ColumnCatalog::new( "c2".to_string(), true, - ColumnDesc::new(LogicalType::Integer, false, false, None).unwrap(), + ColumnDesc::new(LogicalType::Integer, None, false, None).unwrap(), )), ]); let tuples = NullableVec(vec![ diff --git a/src/execution/mod.rs b/src/execution/mod.rs index bf783963..24fa8195 100644 --- a/src/execution/mod.rs +++ b/src/execution/mod.rs @@ -167,10 +167,9 @@ pub fn build_write<'a, T: Transaction + 'a>( Insert::from((op, input)).execute_mut(cache, transaction) } Operator::Update(op) => { - let values = childrens.pop().unwrap(); let input = childrens.pop().unwrap(); - Update::from((op, input, values)).execute_mut(cache, transaction) + Update::from((op, input)).execute_mut(cache, transaction) } Operator::Delete(op) => { let input = childrens.pop().unwrap(); diff --git a/src/expression/mod.rs b/src/expression/mod.rs index ef8d4ffc..4b1b5249 100644 --- a/src/expression/mod.rs +++ b/src/expression/mod.rs @@ -1169,7 +1169,7 @@ impl ScalarExpression { self.output_name(), true, // SAFETY: default expr must not be [`ScalarExpression::ColumnRef`] - ColumnDesc::new(self.return_type(), false, false, None).unwrap(), + ColumnDesc::new(self.return_type(), None, false, None).unwrap(), )), } } @@ -1392,7 +1392,7 @@ mod test { }, }, false, - ColumnDesc::new(LogicalType::Integer, false, false, None)?, + ColumnDesc::new(LogicalType::Integer, None, false, None)?, false, ))), Some((&transaction, &table_cache)), @@ -1406,7 +1406,7 @@ mod test { relation: ColumnRelation::None, }, false, - ColumnDesc::new(LogicalType::Boolean, false, false, None)?, + ColumnDesc::new(LogicalType::Boolean, None, false, None)?, false, ))), Some((&transaction, &table_cache)), diff --git a/src/function/numbers.rs b/src/function/numbers.rs index 8d475a64..c6b942fa 100644 --- a/src/function/numbers.rs +++ b/src/function/numbers.rs @@ -21,7 +21,7 @@ lazy_static! { vec![ColumnCatalog::new( "number".to_lowercase(), true, - ColumnDesc::new(LogicalType::Integer, false, false, None).unwrap(), + ColumnDesc::new(LogicalType::Integer, None, false, None).unwrap(), )], ) .unwrap() diff --git a/src/lib.rs b/src/lib.rs index f0d320c8..dc316679 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -92,7 +92,6 @@ #![feature(coroutine_trait)] #![feature(iterator_try_collect)] #![feature(slice_pattern)] -#![feature(is_sorted)] #![feature(stmt_expr_attributes)] extern crate core; diff --git a/src/optimizer/core/histogram.rs b/src/optimizer/core/histogram.rs index f2ca702b..ca3062ca 100644 --- a/src/optimizer/core/histogram.rs +++ b/src/optimizer/core/histogram.rs @@ -475,7 +475,7 @@ mod tests { table_name: Arc::new("t1".to_string()), pk_ty: LogicalType::Integer, name: "pk_c1".to_string(), - ty: IndexType::PrimaryKey, + ty: IndexType::PrimaryKey { is_multiple: false }, } } diff --git a/src/optimizer/core/memo.rs b/src/optimizer/core/memo.rs index c1986ee1..e1fe8202 100644 --- a/src/optimizer/core/memo.rs +++ b/src/optimizer/core/memo.rs @@ -186,8 +186,8 @@ mod tests { column_ids: vec![*c1_column_id], table_name: Arc::new("t1".to_string()), pk_ty: LogicalType::Integer, - name: "pk_c1".to_string(), - ty: IndexType::PrimaryKey, + name: "pk_index".to_string(), + ty: IndexType::PrimaryKey { is_multiple: false }, }), range: Some(Range::SortedRanges(vec![ Range::Eq(DataValue::Int32(Some(2))), diff --git a/src/optimizer/core/statistics_meta.rs b/src/optimizer/core/statistics_meta.rs index 76dc4591..b266ce12 100644 --- a/src/optimizer/core/statistics_meta.rs +++ b/src/optimizer/core/statistics_meta.rs @@ -124,7 +124,7 @@ mod tests { table_name: Arc::new("t1".to_string()), pk_ty: LogicalType::Integer, name: "pk_c1".to_string(), - ty: IndexType::PrimaryKey, + ty: IndexType::PrimaryKey { is_multiple: false }, }; let mut builder = HistogramBuilder::new(&index, Some(15))?; diff --git a/src/optimizer/rule/implementation/dql/table_scan.rs b/src/optimizer/rule/implementation/dql/table_scan.rs index 92802171..c6d7435a 100644 --- a/src/optimizer/rule/implementation/dql/table_scan.rs +++ b/src/optimizer/rule/implementation/dql/table_scan.rs @@ -83,7 +83,7 @@ impl ImplementationRule for IndexScanImplementation { { let mut row_count = statistics_meta.collect_count(range)?; - if !matches!(index_info.meta.ty, IndexType::PrimaryKey) { + if !matches!(index_info.meta.ty, IndexType::PrimaryKey { .. }) { // need to return table query(non-covering index) row_count *= 2; } diff --git a/src/optimizer/rule/normalization/compilation_in_advance.rs b/src/optimizer/rule/normalization/compilation_in_advance.rs index 4f4ec4d2..19ab9e98 100644 --- a/src/optimizer/rule/normalization/compilation_in_advance.rs +++ b/src/optimizer/rule/normalization/compilation_in_advance.rs @@ -89,6 +89,11 @@ impl ExpressionRemapper { expr.try_reference(output_exprs); } } + Operator::Update(op) => { + for (_, expr) in op.value_exprs.iter_mut() { + expr.try_reference(output_exprs); + } + } Operator::Dummy | Operator::TableScan(_) | Operator::Limit(_) @@ -97,7 +102,6 @@ impl ExpressionRemapper { | Operator::Explain | Operator::Describe(_) | Operator::Insert(_) - | Operator::Update(_) | Operator::Delete(_) | Operator::Analyze(_) | Operator::AddColumn(_) @@ -191,6 +195,11 @@ impl EvaluatorBind { expr.bind_evaluator()?; } } + Operator::Update(op) => { + for (_, expr) in op.value_exprs.iter_mut() { + expr.bind_evaluator()?; + } + } Operator::Dummy | Operator::TableScan(_) | Operator::Limit(_) @@ -199,7 +208,6 @@ impl EvaluatorBind { | Operator::Explain | Operator::Describe(_) | Operator::Insert(_) - | Operator::Update(_) | Operator::Delete(_) | Operator::Analyze(_) | Operator::AddColumn(_) diff --git a/src/optimizer/rule/normalization/pushdown_predicates.rs b/src/optimizer/rule/normalization/pushdown_predicates.rs index 146ea53d..4dce53ec 100644 --- a/src/optimizer/rule/normalization/pushdown_predicates.rs +++ b/src/optimizer/rule/normalization/pushdown_predicates.rs @@ -1,6 +1,6 @@ use crate::catalog::ColumnRef; use crate::errors::DatabaseError; -use crate::expression::range_detacher::RangeDetacher; +use crate::expression::range_detacher::{Range, RangeDetacher}; use crate::expression::{BinaryOperator, ScalarExpression}; use crate::optimizer::core::pattern::Pattern; use crate::optimizer::core::pattern::PatternChildrenPredicate; @@ -9,7 +9,7 @@ use crate::optimizer::heuristic::graph::{HepGraph, HepNodeId}; use crate::planner::operator::filter::FilterOperator; use crate::planner::operator::join::JoinType; use crate::planner::operator::Operator; -use crate::types::index::{IndexInfo, IndexType}; +use crate::types::index::{IndexInfo, IndexMetaRef, IndexType}; use crate::types::LogicalType; use itertools::Itertools; use lazy_static::lazy_static; @@ -225,35 +225,16 @@ impl NormalizationRule for PushPredicateIntoScan { continue; } *range = match meta.ty { - IndexType::PrimaryKey | IndexType::Unique | IndexType::Normal => { + IndexType::PrimaryKey { is_multiple: false } + | IndexType::Unique + | IndexType::Normal => { RangeDetacher::new(meta.table_name.as_str(), &meta.column_ids[0]) .detach(&op.predicate) } - IndexType::Composite => { - let mut res = None; - let mut eq_ranges = Vec::with_capacity(meta.column_ids.len()); - - for column_id in meta.column_ids.iter() { - if let Some(range) = - RangeDetacher::new(meta.table_name.as_str(), column_id) - .detach(&op.predicate) - { - if range.only_eq() { - eq_ranges.push(range); - continue; - } - res = range.combining_eqs(&eq_ranges); - } - break; - } - if res.is_none() { - if let Some(range) = eq_ranges.pop() { - res = range.combining_eqs(&eq_ranges); - } - } - res + IndexType::PrimaryKey { is_multiple: true } | IndexType::Composite => { + Self::composite_range(&op, meta) } - } + }; } } } @@ -263,6 +244,41 @@ impl NormalizationRule for PushPredicateIntoScan { } } +impl PushPredicateIntoScan { + fn composite_range(op: &FilterOperator, meta: &mut IndexMetaRef) -> Option { + let mut res = None; + let mut eq_ranges = Vec::with_capacity(meta.column_ids.len()); + let mut apply_column_count = 0; + + for column_id in meta.column_ids.iter() { + if let Some(range) = + RangeDetacher::new(meta.table_name.as_str(), column_id).detach(&op.predicate) + { + apply_column_count += 1; + + if range.only_eq() { + eq_ranges.push(range); + continue; + } + res = range.combining_eqs(&eq_ranges); + } + break; + } + if res.is_none() { + if let Some(range) = eq_ranges.pop() { + res = range.combining_eqs(&eq_ranges); + } + } + res.and_then(|range| { + if range.only_eq() && apply_column_count != meta.column_ids.len() { + None + } else { + Some(range) + } + }) + } +} + #[cfg(test)] mod tests { use crate::binder::test::build_t1_table; @@ -303,7 +319,7 @@ mod tests { max: Bound::Unbounded, }; - debug_assert_eq!(op.index_infos[1].range, Some(mock_range)); + debug_assert_eq!(op.index_infos[0].range, Some(mock_range)); } else { unreachable!("Should be a filter operator") } diff --git a/src/optimizer/rule/normalization/simplification.rs b/src/optimizer/rule/normalization/simplification.rs index 3202260c..4bfc74b0 100644 --- a/src/optimizer/rule/normalization/simplification.rs +++ b/src/optimizer/rule/normalization/simplification.rs @@ -260,7 +260,7 @@ mod test { }, }, false, - ColumnDesc::new(LogicalType::Integer, true, false, None)?, + ColumnDesc::new(LogicalType::Integer, Some(0), false, None)?, false, ); let c2_col = ColumnCatalog::direct_new( @@ -273,7 +273,7 @@ mod test { }, }, false, - ColumnDesc::new(LogicalType::Integer, false, true, None)?, + ColumnDesc::new(LogicalType::Integer, None, true, None)?, false, ); diff --git a/src/planner/operator/delete.rs b/src/planner/operator/delete.rs index 1d5eabf2..cab2ae16 100644 --- a/src/planner/operator/delete.rs +++ b/src/planner/operator/delete.rs @@ -7,7 +7,7 @@ use std::fmt::Formatter; pub struct DeleteOperator { pub table_name: TableName, // for column pruning - pub primary_key_column: ColumnRef, + pub primary_keys: Vec, } impl fmt::Display for DeleteOperator { diff --git a/src/planner/operator/mod.rs b/src/planner/operator/mod.rs index 4b26bc23..467b3883 100644 --- a/src/planner/operator/mod.rs +++ b/src/planner/operator/mod.rs @@ -232,7 +232,7 @@ impl Operator { .cloned() .collect_vec(), Operator::Analyze(_) => vec![], - Operator::Delete(op) => vec![op.primary_key_column.clone()], + Operator::Delete(op) => op.primary_keys.clone(), Operator::Dummy | Operator::Limit(_) | Operator::Show diff --git a/src/planner/operator/update.rs b/src/planner/operator/update.rs index 37d97626..89a4d817 100644 --- a/src/planner/operator/update.rs +++ b/src/planner/operator/update.rs @@ -1,16 +1,24 @@ -use crate::catalog::TableName; +use crate::catalog::{ColumnRef, TableName}; +use crate::expression::ScalarExpression; use fnck_sql_serde_macros::ReferenceSerialization; +use itertools::Itertools; use std::fmt; use std::fmt::Formatter; #[derive(Debug, PartialEq, Eq, Clone, Hash, ReferenceSerialization)] pub struct UpdateOperator { pub table_name: TableName, + pub value_exprs: Vec<(ColumnRef, ScalarExpression)>, } impl fmt::Display for UpdateOperator { fn fmt(&self, f: &mut Formatter) -> fmt::Result { - write!(f, "Update {}", self.table_name)?; + let values = self + .value_exprs + .iter() + .map(|(column, expr)| format!("{} -> {}", column.full_name(), expr)) + .join(", "); + write!(f, "Update {} set {}", self.table_name, values)?; Ok(()) } diff --git a/src/serdes/column.rs b/src/serdes/column.rs index 45a8446b..7a7670e7 100644 --- a/src/serdes/column.rs +++ b/src/serdes/column.rs @@ -190,7 +190,7 @@ pub(crate) mod test { }, }, false, - ColumnDesc::new(LogicalType::Integer, false, false, None)?, + ColumnDesc::new(LogicalType::Integer, None, false, None)?, false, ))); @@ -225,7 +225,7 @@ pub(crate) mod test { false, ColumnDesc::new( LogicalType::Integer, - false, + None, false, Some(ScalarExpression::Constant(DataValue::UInt64(Some(42)))), )?, @@ -313,7 +313,7 @@ pub(crate) mod test { let mut reference_tables = ReferenceTables::new(); let desc = ColumnDesc::new( LogicalType::Integer, - false, + None, false, Some(ScalarExpression::Constant(DataValue::UInt64(Some(42)))), )?; diff --git a/src/storage/mod.rs b/src/storage/mod.rs index 71de5b0c..f4657a1e 100644 --- a/src/storage/mod.rs +++ b/src/storage/mod.rs @@ -153,7 +153,7 @@ pub trait Transaction: Sized { index: Index, tuple_id: &TupleId, ) -> Result<(), DatabaseError> { - if matches!(index.ty, IndexType::PrimaryKey) { + if matches!(index.ty, IndexType::PrimaryKey { .. }) { return Ok(()); } let (key, value) = TableCodec::encode_index(table_name, &index, tuple_id)?; @@ -178,7 +178,7 @@ pub trait Transaction: Sized { index: &Index, tuple_id: Option<&TupleId>, ) -> Result<(), DatabaseError> { - if matches!(index.ty, IndexType::PrimaryKey) { + if matches!(index.ty, IndexType::PrimaryKey { .. }) { return Ok(()); } self.remove(&TableCodec::encode_index_key(table_name, index, tuple_id)?)?; @@ -532,32 +532,36 @@ pub trait Transaction: Sized { table: &mut TableCatalog, ) -> Result<(), DatabaseError> { let table_name = table.name.clone(); - let index_column = table - .columns() - .filter(|column| column.desc().is_primary() || column.desc().is_unique()) - .map(|column| (column.id().unwrap(), column.clone())) - .collect_vec(); + let mut primary_keys = Vec::new(); - for (col_id, col) in index_column { - let is_primary = col.desc().is_primary(); - let index_ty = if is_primary { - IndexType::PrimaryKey + // FIXME: no clone + for col in table.columns().cloned().collect_vec() { + let col_id = col.id().unwrap(); + let index_ty = if let Some(i) = col.desc().primary() { + primary_keys.push((i, col_id)); + continue; } else if col.desc().is_unique() { IndexType::Unique } else { continue; }; - // FIXME: composite indexes may exist on future - let prefix = if is_primary { "pk" } else { "uk" }; - - let meta_ref = table.add_index_meta( - format!("{}_{}", prefix, col.name()), - vec![col_id], - index_ty, - )?; + let meta_ref = + table.add_index_meta(format!("uk_{}_index", col.name()), vec![col_id], index_ty)?; let (key, value) = TableCodec::encode_index_meta(&table_name, meta_ref)?; self.set(key, value)?; } + let primary_keys = table + .primary_keys() + .iter() + .map(|(_, column)| column.id().unwrap()) + .collect_vec(); + let pk_index_ty = IndexType::PrimaryKey { + is_multiple: primary_keys.len() != 1, + }; + let meta_ref = table.add_index_meta("pk_index".to_string(), primary_keys, pk_index_ty)?; + let (key, value) = TableCodec::encode_index_meta(&table_name, meta_ref)?; + self.set(key, value)?; + Ok(()) } @@ -607,7 +611,7 @@ enum IndexImplEnum { impl IndexImplEnum { fn instance(index_type: IndexType) -> IndexImplEnum { match index_type { - IndexType::PrimaryKey => IndexImplEnum::PrimaryKey(PrimaryKeyIndexImpl), + IndexType::PrimaryKey { .. } => IndexImplEnum::PrimaryKey(PrimaryKeyIndexImpl), IndexType::Unique => IndexImplEnum::Unique(UniqueIndexImpl), IndexType::Normal => IndexImplEnum::Normal(NormalIndexImpl), IndexType::Composite => IndexImplEnum::Composite(CompositeIndexImpl), @@ -631,6 +635,21 @@ struct IndexImplParams<'a, T: Transaction> { } impl IndexImplParams<'_, T> { + pub(crate) fn pk_ty(&self) -> &LogicalType { + &self.index_meta.pk_ty + } + + pub(crate) fn try_cast(&self, mut val: DataValue) -> Result { + let pk_ty = self.pk_ty(); + + if matches!(self.index_meta.ty, IndexType::PrimaryKey { .. }) + && &val.logical_type() != pk_ty + { + val = val.cast(pk_ty)?; + } + Ok(val) + } + fn get_tuple_by_id(&self, tuple_id: &TupleId) -> Result, DatabaseError> { let key = TableCodec::encode_tuple_key(self.table_name, tuple_id)?; @@ -646,7 +665,7 @@ impl IndexImplParams<'_, T> { } enum IndexResult<'a, T: Transaction + 'a> { - Tuple(Tuple), + Tuple(Option), Scope(T::IterType<'a>), } @@ -711,18 +730,17 @@ impl IndexImpl for PrimaryKeyIndexImpl { value: &DataValue, params: &IndexImplParams<'a, T>, ) -> Result, DatabaseError> { - let bytes = params + let tuple = params .tx .get(&TableCodec::encode_tuple_key(params.table_name, value)?)? - .ok_or_else(|| { - DatabaseError::NotFound("secondary index", format!("tuple_id -> {}", value)) - })?; - let tuple = TableCodec::decode_tuple( - ¶ms.table_types, - ¶ms.projections, - ¶ms.tuple_schema_ref, - &bytes, - ); + .map(|bytes| { + TableCodec::decode_tuple( + ¶ms.table_types, + ¶ms.projections, + ¶ms.tuple_schema_ref, + &bytes, + ) + }); Ok(IndexResult::Tuple(tuple)) } @@ -743,7 +761,7 @@ fn secondary_index_lookup( let tuple_id = TableCodec::decode_index(bytes, ¶ms.index_meta.pk_ty)?; params .get_tuple_by_id(&tuple_id)? - .ok_or_else(|| DatabaseError::NotFound("index's tuple_id", tuple_id.to_string())) + .ok_or(DatabaseError::TupleIdNotFound(tuple_id)) } impl IndexImpl for UniqueIndexImpl { @@ -760,17 +778,14 @@ impl IndexImpl for UniqueIndexImpl { value: &DataValue, params: &IndexImplParams<'a, T>, ) -> Result, DatabaseError> { - let bytes = params - .tx - .get(&self.bound_key(params, value, false)?)? - .ok_or_else(|| { - DatabaseError::NotFound("secondary index", format!("index_value -> {}", value)) - })?; + let Some(bytes) = params.tx.get(&self.bound_key(params, value, false)?)? else { + return Ok(IndexResult::Tuple(None)); + }; let tuple_id = TableCodec::decode_index(&bytes, ¶ms.index_meta.pk_ty)?; - let tuple = params.get_tuple_by_id(&tuple_id)?.ok_or_else(|| { - DatabaseError::NotFound("secondary index", format!("tuple_id -> {}", value)) - })?; - Ok(IndexResult::Tuple(tuple)) + let tuple = params + .get_tuple_by_id(&tuple_id)? + .ok_or(DatabaseError::TupleIdNotFound(tuple_id))?; + Ok(IndexResult::Tuple(Some(tuple))) } fn bound_key( @@ -976,24 +991,33 @@ impl Iter for IndexIter<'_, T> { let bound_encode = |bound: Bound, is_upper: bool| -> Result<_, DatabaseError> { match bound { - Bound::Included(val) => Ok(Bound::Included(self.inner.bound_key( - &self.params, - &val, - is_upper, - )?)), - Bound::Excluded(val) => Ok(Bound::Excluded(self.inner.bound_key( - &self.params, - &val, - is_upper, - )?)), + Bound::Included(mut val) => { + val = self.params.try_cast(val)?; + + Ok(Bound::Included(self.inner.bound_key( + &self.params, + &val, + is_upper, + )?)) + } + Bound::Excluded(mut val) => { + val = self.params.try_cast(val)?; + + Ok(Bound::Excluded(self.inner.bound_key( + &self.params, + &val, + is_upper, + )?)) + } Bound::Unbounded => Ok(Bound::Unbounded), } }; - let (bound_min, bound_max) = if matches!(index_meta.ty, IndexType::PrimaryKey) { - TableCodec::tuple_bound(table_name) - } else { - TableCodec::index_bound(table_name, &index_meta.id)? - }; + let (bound_min, bound_max) = + if matches!(index_meta.ty, IndexType::PrimaryKey { .. }) { + TableCodec::tuple_bound(table_name) + } else { + TableCodec::index_bound(table_name, &index_meta.id)? + }; let check_bound = |value: &mut Bound>, bound: Vec| { if matches!(value, Bound::Unbounded) { let _ = mem::replace(value, Bound::Included(bound)); @@ -1012,16 +1036,20 @@ impl Iter for IndexIter<'_, T> { )?; self.scope_iter = Some(iter); } - Range::Eq(val) => match self.inner.eq_to_res(&val, &self.params)? { - IndexResult::Tuple(tuple) => { - if Self::offset_move(&mut self.offset) { - return self.next_tuple(); + Range::Eq(mut val) => { + val = self.params.try_cast(val)?; + + match self.inner.eq_to_res(&val, &self.params)? { + IndexResult::Tuple(tuple) => { + if Self::offset_move(&mut self.offset) { + return self.next_tuple(); + } + Self::limit_sub(&mut self.limit); + return Ok(tuple); } - Self::limit_sub(&mut self.limit); - return Ok(Some(tuple)); + IndexResult::Scope(iter) => self.scope_iter = Some(iter), } - IndexResult::Scope(iter) => self.scope_iter = Some(iter), - }, + } _ => (), } } @@ -1068,7 +1096,7 @@ mod test { ColumnRef::from(ColumnCatalog::new( "c1".to_string(), false, - ColumnDesc::new(LogicalType::Integer, true, false, None).unwrap(), + ColumnDesc::new(LogicalType::Integer, Some(0), false, None).unwrap(), )), ), ( @@ -1076,7 +1104,7 @@ mod test { ColumnRef::from(ColumnCatalog::new( "c2".to_string(), false, - ColumnDesc::new(LogicalType::Boolean, false, false, None).unwrap(), + ColumnDesc::new(LogicalType::Boolean, None, false, None).unwrap(), )), ), ( @@ -1084,7 +1112,7 @@ mod test { ColumnRef::from(ColumnCatalog::new( "c3".to_string(), false, - ColumnDesc::new(LogicalType::Integer, false, false, None).unwrap(), + ColumnDesc::new(LogicalType::Integer, None, false, None).unwrap(), )), ), ] @@ -1148,8 +1176,11 @@ mod test { Arc::new("t1".to_string()) ); assert_eq!(primary_key_index_meta.pk_ty, LogicalType::Integer); - assert_eq!(primary_key_index_meta.name, "pk_c1".to_string()); - assert_eq!(primary_key_index_meta.ty, IndexType::PrimaryKey); + assert_eq!(primary_key_index_meta.name, "pk_index".to_string()); + assert_eq!( + primary_key_index_meta.ty, + IndexType::PrimaryKey { is_multiple: false } + ); let mut column_iter = table.columns(); let c1_column = column_iter.next().unwrap(); @@ -1167,7 +1198,7 @@ mod test { ); assert_eq!( c1_column.desc(), - &ColumnDesc::new(LogicalType::Integer, true, false, None)? + &ColumnDesc::new(LogicalType::Integer, Some(0), false, None)? ); let c2_column = column_iter.next().unwrap(); @@ -1185,7 +1216,7 @@ mod test { ); assert_eq!( c2_column.desc(), - &ColumnDesc::new(LogicalType::Boolean, false, false, None)? + &ColumnDesc::new(LogicalType::Boolean, None, false, None)? ); let c3_column = column_iter.next().unwrap(); @@ -1203,7 +1234,7 @@ mod test { ); assert_eq!( c3_column.desc(), - &ColumnDesc::new(LogicalType::Integer, false, false, None)? + &ColumnDesc::new(LogicalType::Integer, None, false, None)? ); Ok(()) @@ -1495,7 +1526,7 @@ mod test { let new_column = ColumnCatalog::new( "c4".to_string(), true, - ColumnDesc::new(LogicalType::Integer, false, false, None)?, + ColumnDesc::new(LogicalType::Integer, None, false, None)?, ); let new_column_id = transaction.add_column(&table_cache, &table_name, &new_column, false)?; @@ -1517,7 +1548,7 @@ mod test { let mut new_column = ColumnCatalog::new( "c4".to_string(), true, - ColumnDesc::new(LogicalType::Integer, false, false, None)?, + ColumnDesc::new(LogicalType::Integer, None, false, None)?, ); new_column.summary_mut().relation = ColumnRelation::Table { column_id: *table.get_column_id_by_name("c4").unwrap(), diff --git a/src/storage/rocksdb.rs b/src/storage/rocksdb.rs index 6bf48cbb..3d9b3693 100644 --- a/src/storage/rocksdb.rs +++ b/src/storage/rocksdb.rs @@ -160,12 +160,12 @@ mod test { ColumnRef::from(ColumnCatalog::new( "c1".to_string(), false, - ColumnDesc::new(LogicalType::Integer, true, false, None).unwrap(), + ColumnDesc::new(LogicalType::Integer, Some(0), false, None).unwrap(), )), ColumnRef::from(ColumnCatalog::new( "c2".to_string(), false, - ColumnDesc::new(LogicalType::Boolean, false, false, None).unwrap(), + ColumnDesc::new(LogicalType::Boolean, None, false, None).unwrap(), )), ]); @@ -255,7 +255,7 @@ mod test { table_name, pk_ty: LogicalType::Integer, name: "pk_a".to_string(), - ty: IndexType::PrimaryKey, + ty: IndexType::PrimaryKey { is_multiple: false }, }), table_name: &table.name, table_types: table.types(), diff --git a/src/storage/table_codec.rs b/src/storage/table_codec.rs index 2fe3eb63..29c34564 100644 --- a/src/storage/table_codec.rs +++ b/src/storage/table_codec.rs @@ -342,7 +342,7 @@ impl TableCodec { if let Some(tuple_id) = tuple_id { if matches!(index.ty, IndexType::Normal | IndexType::Composite) { - tuple_id.to_raw(&mut key_prefix)?; + tuple_id.inner_encode(&mut key_prefix, &tuple_id.logical_type())?; } } Ok(key_prefix) @@ -352,10 +352,7 @@ impl TableCodec { bytes: &[u8], primary_key_ty: &LogicalType, ) -> Result { - DataValue::inner_decode( - &mut Cursor::new(bytes), - primary_key_ty, - ) + DataValue::inner_decode(&mut Cursor::new(bytes), primary_key_ty) } /// Key: {TableName}{COLUMN_TAG}{BOUND_MIN_TAG}{ColumnId} @@ -517,12 +514,12 @@ mod tests { ColumnCatalog::new( "c1".into(), false, - ColumnDesc::new(LogicalType::Integer, true, false, None).unwrap(), + ColumnDesc::new(LogicalType::Integer, Some(0), false, None).unwrap(), ), ColumnCatalog::new( "c2".into(), false, - ColumnDesc::new(LogicalType::Decimal(None, None), false, false, None).unwrap(), + ColumnDesc::new(LogicalType::Decimal(None, None), None, false, None).unwrap(), ), ]; TableCatalog::new(Arc::new("t1".to_string()), columns).unwrap() @@ -584,7 +581,7 @@ mod tests { table_name: Arc::new("T1".to_string()), pk_ty: LogicalType::Integer, name: "index_1".to_string(), - ty: IndexType::PrimaryKey, + ty: IndexType::PrimaryKey { is_multiple: false }, }; let (_, bytes) = TableCodec::encode_index_meta(&"T1".to_string(), &index_meta)?; @@ -600,7 +597,11 @@ mod tests { fn test_table_codec_index() -> Result<(), DatabaseError> { let table_catalog = build_table_codec(); let value = Arc::new(DataValue::Int32(Some(0))); - let index = Index::new(0, slice::from_ref(&value), IndexType::PrimaryKey); + let index = Index::new( + 0, + slice::from_ref(&value), + IndexType::PrimaryKey { is_multiple: false }, + ); let tuple_id = DataValue::Int32(Some(0)); let (_, bytes) = TableCodec::encode_index(&table_catalog.name, &index, &tuple_id)?; @@ -617,7 +618,7 @@ mod tests { let mut col: ColumnCatalog = ColumnCatalog::new( "c2".to_string(), false, - ColumnDesc::new(LogicalType::Boolean, false, false, None).unwrap(), + ColumnDesc::new(LogicalType::Boolean, None, false, None).unwrap(), ); col.summary_mut().relation = ColumnRelation::Table { column_id: Ulid::new(), @@ -699,7 +700,7 @@ mod tests { let mut col = ColumnCatalog::new( "".to_string(), false, - ColumnDesc::new(LogicalType::SqlNull, false, false, None).unwrap(), + ColumnDesc::new(LogicalType::SqlNull, None, false, None).unwrap(), ); col.summary_mut().relation = ColumnRelation::Table { @@ -751,8 +752,8 @@ mod tests { column_ids: vec![], table_name: Arc::new(table_name.to_string()), pk_ty: LogicalType::Integer, - name: "".to_string(), - ty: IndexType::PrimaryKey, + name: format!("{}_index", index_id), + ty: IndexType::PrimaryKey { is_multiple: false }, }; let (key, _) = @@ -794,7 +795,7 @@ mod tests { let column = ColumnCatalog::new( "".to_string(), false, - ColumnDesc::new(LogicalType::Boolean, false, false, None).unwrap(), + ColumnDesc::new(LogicalType::Boolean, None, false, None).unwrap(), ); let table_catalog = TableCatalog::new(Arc::new("T0".to_string()), vec![column]).unwrap(); @@ -803,7 +804,7 @@ mod tests { let index = Index::new( index_id as u32, slice::from_ref(&value), - IndexType::PrimaryKey, + IndexType::PrimaryKey { is_multiple: false }, ); TableCodec::encode_index_key(table_name, &index, None).unwrap() @@ -859,7 +860,7 @@ mod tests { let index = Index::new( index_id as u32, slice::from_ref(&value), - IndexType::PrimaryKey, + IndexType::PrimaryKey { is_multiple: false }, ); TableCodec::encode_index_key(&table_name.to_string(), &index, None).unwrap() diff --git a/src/types/index.rs b/src/types/index.rs index f9e9b83a..bc6b6d38 100644 --- a/src/types/index.rs +++ b/src/types/index.rs @@ -14,7 +14,7 @@ pub type IndexMetaRef = Arc; #[derive(Debug, Clone, Copy, Eq, PartialEq, Hash, ReferenceSerialization)] pub enum IndexType { - PrimaryKey, + PrimaryKey { is_multiple: bool }, Unique, Normal, Composite, @@ -47,10 +47,7 @@ impl IndexMeta { if let Some(column) = table.get_column_by_id(column_id) { exprs.push(ScalarExpression::ColumnRef(column.clone())); } else { - return Err(DatabaseError::NotFound( - "Column by id", - column_id.to_string(), - )); + return Err(DatabaseError::ColumnNotFound(column_id.to_string())); } } Ok(exprs) diff --git a/src/types/mod.rs b/src/types/mod.rs index 6be2cd5f..5715aaa9 100644 --- a/src/types/mod.rs +++ b/src/types/mod.rs @@ -152,6 +152,7 @@ impl LogicalType { | LogicalType::UBigint | LogicalType::Float | LogicalType::Double + | LogicalType::Decimal(_, _) ) } @@ -280,6 +281,7 @@ impl LogicalType { | LogicalType::Bigint | LogicalType::Float | LogicalType::Double + | LogicalType::Decimal(_, _) ), LogicalType::UTinyint => matches!( to, @@ -291,6 +293,7 @@ impl LogicalType { | LogicalType::Bigint | LogicalType::Float | LogicalType::Double + | LogicalType::Decimal(_, _) ), LogicalType::Smallint => matches!( to, @@ -298,6 +301,7 @@ impl LogicalType { | LogicalType::Bigint | LogicalType::Float | LogicalType::Double + | LogicalType::Decimal(_, _) ), LogicalType::USmallint => matches!( to, @@ -307,10 +311,14 @@ impl LogicalType { | LogicalType::Bigint | LogicalType::Float | LogicalType::Double + | LogicalType::Decimal(_, _) ), LogicalType::Integer => matches!( to, - LogicalType::Bigint | LogicalType::Float | LogicalType::Double + LogicalType::Bigint + | LogicalType::Float + | LogicalType::Double + | LogicalType::Decimal(_, _) ), LogicalType::UInteger => matches!( to, @@ -318,10 +326,17 @@ impl LogicalType { | LogicalType::Bigint | LogicalType::Float | LogicalType::Double + | LogicalType::Decimal(_, _) + ), + LogicalType::Bigint => matches!( + to, + LogicalType::Float | LogicalType::Double | LogicalType::Decimal(_, _) + ), + LogicalType::UBigint => matches!( + to, + LogicalType::Float | LogicalType::Double | LogicalType::Decimal(_, _) ), - LogicalType::Bigint => matches!(to, LogicalType::Float | LogicalType::Double), - LogicalType::UBigint => matches!(to, LogicalType::Float | LogicalType::Double), - LogicalType::Float => matches!(to, LogicalType::Double), + LogicalType::Float => matches!(to, LogicalType::Double | LogicalType::Decimal(_, _)), LogicalType::Double => false, LogicalType::Char(..) => false, LogicalType::Varchar(..) => false, @@ -377,7 +392,7 @@ impl TryFrom for LogicalType { char_unit.unwrap_or(CharLengthUnits::Characters), )) } - sqlparser::ast::DataType::String => { + sqlparser::ast::DataType::String | sqlparser::ast::DataType::Text => { Ok(LogicalType::Varchar(None, CharLengthUnits::Characters)) } sqlparser::ast::DataType::Float(_) => Ok(LogicalType::Float), diff --git a/src/types/tuple.rs b/src/types/tuple.rs index c2bd58f8..6c9ac67f 100644 --- a/src/types/tuple.rs +++ b/src/types/tuple.rs @@ -3,7 +3,6 @@ use crate::errors::DatabaseError; use crate::types::value::DataValue; use crate::types::LogicalType; use comfy_table::{Cell, Table}; -use integer_encoding::FixedInt; use itertools::Itertools; use lazy_static::lazy_static; use std::sync::Arc; @@ -50,7 +49,7 @@ impl Tuple { bits & (1 << (7 - i)) > 0 } - let values_len = schema.len(); + let values_len = table_types.len(); let mut tuple_values = Vec::with_capacity(values_len); let bits_len = (values_len + BITS_MAX_INDEX) / BITS_MAX_INDEX; let mut primary_keys = Vec::new(); @@ -59,7 +58,7 @@ impl Tuple { let mut pos = bits_len; for (i, logic_type) in table_types.iter().enumerate() { - if projection_i >= values_len { + if projection_i >= values_len || projection_i > projections.len() - 1 { break; } if is_none(bytes[i / BITS_MAX_INDEX], i % BITS_MAX_INDEX) { @@ -76,7 +75,8 @@ impl Tuple { pos += len; } else { /// variable length (e.g.: varchar) - let len = u32::decode_fixed(&bytes[pos..pos + 4]) as usize; + let le_bytes: [u8; 4] = bytes[pos..pos + 4].try_into().unwrap(); + let len = u32::from_le_bytes(le_bytes) as usize; pos += 4; if projections[projection_i] == i { tuple_values.push(DataValue::from_raw(&bytes[pos..pos + len], logic_type)); @@ -134,7 +134,7 @@ impl Tuple { if logical_type.raw_len().is_none() { let index = bytes.len() - value_len; - bytes.splice(index..index, (value_len as u32).encode_fixed_vec()); + bytes.splice(index..index, (value_len as u32).to_le_bytes()); } } } @@ -188,19 +188,19 @@ mod tests { ColumnRef::from(ColumnCatalog::new( "c1".to_string(), false, - ColumnDesc::new(LogicalType::Integer, true, false, None).unwrap(), + ColumnDesc::new(LogicalType::Integer, Some(0), false, None).unwrap(), )), ColumnRef::from(ColumnCatalog::new( "c2".to_string(), false, - ColumnDesc::new(LogicalType::UInteger, false, false, None).unwrap(), + ColumnDesc::new(LogicalType::UInteger, None, false, None).unwrap(), )), ColumnRef::from(ColumnCatalog::new( "c3".to_string(), false, ColumnDesc::new( LogicalType::Varchar(Some(2), CharLengthUnits::Characters), - false, + None, false, None, ) @@ -209,59 +209,59 @@ mod tests { ColumnRef::from(ColumnCatalog::new( "c4".to_string(), false, - ColumnDesc::new(LogicalType::Smallint, false, false, None).unwrap(), + ColumnDesc::new(LogicalType::Smallint, None, false, None).unwrap(), )), ColumnRef::from(ColumnCatalog::new( "c5".to_string(), false, - ColumnDesc::new(LogicalType::USmallint, false, false, None).unwrap(), + ColumnDesc::new(LogicalType::USmallint, None, false, None).unwrap(), )), ColumnRef::from(ColumnCatalog::new( "c6".to_string(), false, - ColumnDesc::new(LogicalType::Float, false, false, None).unwrap(), + ColumnDesc::new(LogicalType::Float, None, false, None).unwrap(), )), ColumnRef::from(ColumnCatalog::new( "c7".to_string(), false, - ColumnDesc::new(LogicalType::Double, false, false, None).unwrap(), + ColumnDesc::new(LogicalType::Double, None, false, None).unwrap(), )), ColumnRef::from(ColumnCatalog::new( "c8".to_string(), false, - ColumnDesc::new(LogicalType::Tinyint, false, false, None).unwrap(), + ColumnDesc::new(LogicalType::Tinyint, None, false, None).unwrap(), )), ColumnRef::from(ColumnCatalog::new( "c9".to_string(), false, - ColumnDesc::new(LogicalType::UTinyint, false, false, None).unwrap(), + ColumnDesc::new(LogicalType::UTinyint, None, false, None).unwrap(), )), ColumnRef::from(ColumnCatalog::new( "c10".to_string(), false, - ColumnDesc::new(LogicalType::Boolean, false, false, None).unwrap(), + ColumnDesc::new(LogicalType::Boolean, None, false, None).unwrap(), )), ColumnRef::from(ColumnCatalog::new( "c11".to_string(), false, - ColumnDesc::new(LogicalType::DateTime, false, false, None).unwrap(), + ColumnDesc::new(LogicalType::DateTime, None, false, None).unwrap(), )), ColumnRef::from(ColumnCatalog::new( "c12".to_string(), false, - ColumnDesc::new(LogicalType::Date, false, false, None).unwrap(), + ColumnDesc::new(LogicalType::Date, None, false, None).unwrap(), )), ColumnRef::from(ColumnCatalog::new( "c13".to_string(), false, - ColumnDesc::new(LogicalType::Decimal(None, None), false, false, None).unwrap(), + ColumnDesc::new(LogicalType::Decimal(None, None), None, false, None).unwrap(), )), ColumnRef::from(ColumnCatalog::new( "c14".to_string(), false, ColumnDesc::new( LogicalType::Char(1, CharLengthUnits::Characters), - false, + None, false, None, ) @@ -272,7 +272,7 @@ mod tests { false, ColumnDesc::new( LogicalType::Varchar(Some(2), CharLengthUnits::Octets), - false, + None, false, None, ) @@ -283,7 +283,7 @@ mod tests { false, ColumnDesc::new( LogicalType::Char(1, CharLengthUnits::Octets), - false, + None, false, None, ) diff --git a/src/types/tuple_builder.rs b/src/types/tuple_builder.rs index 443b1986..a5dc784b 100644 --- a/src/types/tuple_builder.rs +++ b/src/types/tuple_builder.rs @@ -1,12 +1,56 @@ use crate::errors::DatabaseError; -use crate::types::tuple::{Schema, Tuple}; +use crate::types::tuple::{Schema, Tuple, TupleId}; use crate::types::value::{DataValue, Utf8Type}; +use itertools::Itertools; use sqlparser::ast::CharLengthUnits; +pub(crate) struct TupleIdBuilder { + primary_indexes: Vec, + tmp_keys: Vec>, +} + pub struct TupleBuilder<'a> { schema: &'a Schema, } +impl TupleIdBuilder { + pub(crate) fn new(schema: &Schema) -> Self { + let primary_indexes = schema + .iter() + .filter_map(|column| column.desc().primary()) + .enumerate() + .sorted_by_key(|(_, p_i)| *p_i) + .map(|(i, _)| i) + .collect_vec(); + let tmp_keys = Vec::with_capacity(primary_indexes.len()); + Self { + primary_indexes, + tmp_keys, + } + } + + pub(crate) fn append(&mut self, value: DataValue) { + self.tmp_keys.push(Some(value)); + } + + pub(crate) fn build(&mut self) -> Option { + (!self.tmp_keys.is_empty()).then(|| { + if self.tmp_keys.len() == 1 { + self.tmp_keys.pop().unwrap().unwrap() + } else { + let mut primary_keys = Vec::new(); + + for i in self.primary_indexes.iter() { + primary_keys.push(self.tmp_keys[*i].take().unwrap()); + } + self.tmp_keys.clear(); + + DataValue::Tuple(Some(primary_keys)) + } + }) + } +} + impl<'a> TupleBuilder<'a> { pub fn new(schema: &'a Schema) -> Self { TupleBuilder { schema } @@ -27,7 +71,7 @@ impl<'a> TupleBuilder<'a> { row: impl IntoIterator, ) -> Result { let mut values = Vec::with_capacity(self.schema.len()); - let mut primary_keys = Vec::new(); + let mut id_builder = TupleIdBuilder::new(self.schema); for (i, value) in row.into_iter().enumerate() { let data_value = DataValue::Utf8 { @@ -38,21 +82,17 @@ impl<'a> TupleBuilder<'a> { .cast(self.schema[i].datatype())?; if self.schema[i].desc().is_primary() { - primary_keys.push(data_value.clone()); + id_builder.append(data_value.clone()); } values.push(data_value); } if values.len() != self.schema.len() { return Err(DatabaseError::MisMatch("types", "values")); } - let id = (!primary_keys.is_empty()).then(|| { - if primary_keys.len() == 1 { - primary_keys.pop().unwrap() - } else { - DataValue::Tuple(Some(primary_keys)) - } - }); - Ok(Tuple { id, values }) + Ok(Tuple { + id: id_builder.build(), + values, + }) } } diff --git a/src/types/value.rs b/src/types/value.rs index 337b0128..039a6e47 100644 --- a/src/types/value.rs +++ b/src/types/value.rs @@ -355,7 +355,8 @@ impl DataValue { ) => Self::check_string_len(val, *len as usize, CharLengthUnits::Octets), (LogicalType::Decimal(full_len, scale_len), DataValue::Decimal(Some(val))) => { if let Some(len) = full_len { - if val.mantissa().ilog10() + 1 > *len as u32 { + let mantissa = val.mantissa().abs(); + if mantissa != 0 && mantissa.ilog10() + 1 > *len as u32 { return Err(DatabaseError::TooLong); } } diff --git a/tests/slt/insert.slt b/tests/slt/insert.slt index 1c004e13..2382f64c 100644 --- a/tests/slt/insert.slt +++ b/tests/slt/insert.slt @@ -99,3 +99,18 @@ true statement ok drop table t2; + +statement ok +create table t3_decimal(id int primary key, v1 decimal(5, 2)); + +statement ok +insert into t3_decimal (id, v1) values (0, 1); + +statement ok +insert into t3_decimal (id, v1) values (1, 2.2); + +statement ok +insert into t3_decimal (id, v1) values (2, -10); + +statement ok +drop table t3_decimal; diff --git a/tests/slt/projection.slt b/tests/slt/projection.slt new file mode 100644 index 00000000..94a356a7 --- /dev/null +++ b/tests/slt/projection.slt @@ -0,0 +1,13 @@ +statement ok +create table wide_table(id int primary key, v1 int, v2 int, v3 varchar, v4 decimal(5, 2), v5 date, v6 float, v7 datetime, v8 boolean, v9 text, v10 int, v11 int); + +statement ok +insert into wide_table values(0, 1, 2, 'hello', 0.11, '2024-11-10', 2.1, '2024-11-10', true, 'sql best', 3, 3); + +query II +select id, v1 from wide_table; +---- +0 1 + +statement ok +drop table wide_table diff --git a/tests/slt/update.slt b/tests/slt/update.slt index 590c223d..acf234da 100644 --- a/tests/slt/update.slt +++ b/tests/slt/update.slt @@ -43,5 +43,17 @@ select * from t 3 3 9 233 4 4 9 233 +statement ok +update t set v3 = v1 + 1 + +query IIII rowsort +select * from t +---- +0 1 9 2 +1 1 9 2 +2 2 9 3 +3 3 9 4 +4 4 9 5 + statement ok drop table t