Skip to content

Commit

Permalink
Implement COPY TO functionality for exporting data to CSV.
Browse files Browse the repository at this point in the history
Extended the COPY TO command to support exporting table data to CSV files. Enhanced the planning, execution, and test modules to handle the file writing mechanics and schema handling. Updated README to mark VIEW support as implemented and added relevant tests.
  • Loading branch information
loloxwg committed Nov 14, 2024
1 parent 6cfb28f commit f35986e
Show file tree
Hide file tree
Showing 8 changed files with 256 additions and 11 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -192,7 +192,7 @@ let fnck_sql = DataBaseBuilder::path("./data")
- Drop
- [x] Table
- [ ] Index
- [ ] View
- [x] View
- Alert
- [x] Add Column
- [x] Drop Column
Expand Down
6 changes: 5 additions & 1 deletion src/binder/copy.rs
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,11 @@ impl<T: Transaction> Binder<'_, '_, T> {
if to {
// COPY <source_table> TO <dest_file>
Ok(LogicalPlan::new(
Operator::CopyToFile(CopyToFileOperator { source: ext_source }),
Operator::CopyToFile(CopyToFileOperator {
table: table.name.to_string(),
target: ext_source,
schema_ref,
}),
vec![],
))
} else {
Expand Down
201 changes: 199 additions & 2 deletions src/execution/dml/copy_to_file.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,203 @@
use crate::binder::copy::FileFormat;
use crate::errors::DatabaseError;
use crate::execution::{Executor, ReadExecutor};
use crate::planner::operator::copy_to_file::CopyToFileOperator;
use crate::storage::{Iter, StatisticsMetaCache, TableCache, Transaction, ViewCache};
use crate::throw;
use crate::types::tuple::{column_names, full_columns};
use crate::types::tuple_builder::TupleBuilder;
use std::sync::Arc;

#[allow(dead_code)]
pub struct CopyToFile {
op: CopyToFileOperator,
pub op: CopyToFileOperator,
}

impl From<CopyToFileOperator> for CopyToFile {
fn from(op: CopyToFileOperator) -> Self {
CopyToFile { op }
}
}

impl<'a, T: Transaction + 'a> ReadExecutor<'a, T> for CopyToFile {
fn execute(
self,
cache: (&'a TableCache, &'a ViewCache, &'a StatisticsMetaCache),
transaction: &'a T,
) -> Executor<'a> {
Box::new(
#[coroutine]
move || {
let table_name = Arc::new(self.op.table.clone());

let mut writer = throw!(self.create_writer());

let mut iter = throw!(transaction.read(
cache.0,
table_name.clone(),
(None, None),
full_columns(&*self.op.schema_ref)
));

while let Some(tuple) = throw!(iter.next_tuple()) {
throw!(writer
.write_record(tuple.to_str_vec())
.map_err(|e| DatabaseError::from(e)));
}

throw!(writer.flush().map_err(|e| DatabaseError::from(e)));

let tuple = TupleBuilder::build_result(format!("{}", self.op));
yield Ok(tuple)
},
)
}
}

impl CopyToFile {
fn create_writer(&self) -> Result<csv::Writer<std::fs::File>, DatabaseError> {
let mut writer = match self.op.target.format {
FileFormat::Csv {
delimiter,
quote,
header,
..
} => csv::WriterBuilder::new()
.delimiter(delimiter as u8)
.quote(quote as u8)
.has_headers(header)
.from_path(self.op.target.path.clone())?,
};

if let FileFormat::Csv { header: true, .. } = self.op.target.format {
let headers = column_names(&*self.op.schema_ref);
writer.write_record(headers)?;
}

Ok(writer)
}
}

#[cfg(test)]
mod tests {
use super::*;
use crate::binder::copy::ExtSource;
use crate::catalog::{ColumnCatalog, ColumnDesc, ColumnRef, ColumnRelation, ColumnSummary};
use crate::db::DataBaseBuilder;
use crate::errors::DatabaseError;
use crate::storage::Storage;
use crate::types::LogicalType;
use sqlparser::ast::CharLengthUnits;
use std::ops::{Coroutine, CoroutineState};
use std::pin::Pin;
use std::sync::Arc;
use tempfile::TempDir;
use ulid::Ulid;

#[test]
fn read_csv() -> Result<(), DatabaseError> {
let columns = vec![
ColumnRef::from(ColumnCatalog::direct_new(
ColumnSummary {
name: "a".to_string(),
relation: ColumnRelation::Table {
column_id: Ulid::new(),
table_name: Arc::new("t1".to_string()),
is_temp: false,
},
},
false,
ColumnDesc::new(LogicalType::Integer, Some(0), false, None)?,
false,
)),
ColumnRef::from(ColumnCatalog::direct_new(
ColumnSummary {
name: "b".to_string(),
relation: ColumnRelation::Table {
column_id: Ulid::new(),
table_name: Arc::new("t1".to_string()),
is_temp: false,
},
},
false,
ColumnDesc::new(LogicalType::Float, None, false, None)?,
false,
)),
ColumnRef::from(ColumnCatalog::direct_new(
ColumnSummary {
name: "c".to_string(),
relation: ColumnRelation::Table {
column_id: Ulid::new(),
table_name: Arc::new("t1".to_string()),
is_temp: false,
},
},
false,
ColumnDesc::new(
LogicalType::Varchar(Some(10), CharLengthUnits::Characters),
None,
false,
None,
)?,
false,
)),
];

let tmp_dir = TempDir::new()?;
let file_path = tmp_dir.path().join("test.csv");

let op = CopyToFileOperator {
table: "t1".to_string(),
target: ExtSource {
path: file_path.clone(),
format: FileFormat::Csv {
delimiter: ',',
quote: '"',
escape: None,
header: true,
},
},
schema_ref: Arc::new(columns),
};

let temp_dir = TempDir::new().unwrap();
let db = DataBaseBuilder::path(temp_dir.path()).build()?;
let _ = db.run("create table t1 (a int primary key, b float, c varchar(10))");
let _ = db.run("insert into t1 values (1, 1.1, 'foo')");
let _ = db.run("insert into t1 values (2, 2.0, 'fooo')");
let _ = db.run("insert into t1 values (3, 2.1, 'fnck')");

let storage = db.storage;
let mut transaction = storage.transaction()?;

let executor = CopyToFile { op: op.clone() };
let mut coroutine = executor.execute(
(&db.table_cache, &db.view_cache, &db.meta_cache),
&mut transaction,
);

let tuple = match Pin::new(&mut coroutine).resume(()) {
CoroutineState::Yielded(tuple) => tuple,
CoroutineState::Complete(()) => unreachable!(),
}?;

let mut rdr = csv::Reader::from_path(file_path)?;
let headers = rdr.headers()?.clone();
assert_eq!(headers, vec!["a", "b", "c"]);

let mut records = rdr.records();
let record1 = records.next().unwrap()?;
assert_eq!(record1, vec!["1", "1.1", "foo"]);

let record2 = records.next().unwrap()?;
assert_eq!(record2, vec!["2", "2.0", "fooo"]);

let record3 = records.next().unwrap()?;
assert_eq!(record3, vec!["3", "2.1", "fnck"]);

assert!(records.next().is_none());

assert_eq!(tuple, TupleBuilder::build_result(format!("{}", op)));

Ok(())
}
}
7 changes: 3 additions & 4 deletions src/execution/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ use crate::execution::ddl::drop_view::DropView;
use crate::execution::ddl::truncate::Truncate;
use crate::execution::dml::analyze::Analyze;
use crate::execution::dml::copy_from_file::CopyFromFile;
use crate::execution::dml::copy_to_file::CopyToFile;
use crate::execution::dml::delete::Delete;
use crate::execution::dml::insert::Insert;
use crate::execution::dml::update::Update;
Expand Down Expand Up @@ -196,10 +197,8 @@ pub fn build_write<'a, T: Transaction + 'a>(
Operator::DropView(op) => DropView::from(op).execute_mut(cache, transaction),
Operator::Truncate(op) => Truncate::from(op).execute_mut(cache, transaction),
Operator::CopyFromFile(op) => CopyFromFile::from(op).execute_mut(cache, transaction),
#[warn(unused_assignments)]
Operator::CopyToFile(_op) => {
todo!()
}
Operator::CopyToFile(op) => CopyToFile::from(op).execute(cache, transaction),

Operator::Analyze(op) => {
let input = childrens.pop().unwrap();

Expand Down
4 changes: 3 additions & 1 deletion src/planner/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,9 @@ impl LogicalPlan {
Operator::CopyFromFile(_) => SchemaOutput::Schema(vec![ColumnRef::from(
ColumnCatalog::new_dummy("COPY FROM SOURCE".to_string()),
)]),
Operator::CopyToFile(_) => todo!(),
Operator::CopyToFile(_) => SchemaOutput::Schema(vec![ColumnRef::from(
ColumnCatalog::new_dummy("COPY TO TARGET".to_string()),
)]),
}
}

Expand Down
27 changes: 26 additions & 1 deletion src/planner/operator/copy_to_file.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,32 @@
use crate::binder::copy::ExtSource;
use crate::types::tuple::SchemaRef;
use fnck_sql_serde_macros::ReferenceSerialization;
use itertools::Itertools;
use std::fmt;
use std::fmt::Formatter;

#[derive(Debug, PartialEq, Eq, Clone, Hash, ReferenceSerialization)]
pub struct CopyToFileOperator {
pub source: ExtSource,
pub table: String,
pub target: ExtSource,
pub schema_ref: SchemaRef,
}

impl fmt::Display for CopyToFileOperator {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
let columns = self
.schema_ref
.iter()
.map(|column| column.name().to_string())
.join(", ");
write!(
f,
"Copy {} -> {} [{}]",
self.table,
self.target.path.display(),
columns
)?;

Ok(())
}
}
13 changes: 13 additions & 0 deletions src/types/tuple.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,16 @@ pub fn types(schema: &Schema) -> Vec<LogicalType> {
.map(|column| column.datatype().clone())
.collect_vec()
}
pub fn full_columns(schema: &Schema) -> Vec<(usize, ColumnRef)> {
schema
.iter()
.enumerate()
.map(|(index, column_ref)| (index, column_ref.clone()))
.collect()
}
pub fn column_names(schema: &Schema) -> Vec<&str> {
schema.iter().map(|c| c.name()).collect_vec()
}

#[derive(Clone, Debug, PartialEq)]
pub struct Tuple {
Expand All @@ -33,6 +43,9 @@ pub struct Tuple {
}

impl Tuple {
pub fn to_str_vec(&self) -> Vec<String> {
self.values.iter().map(|v| v.to_string()).collect()
}
pub fn deserialize_from(
table_types: &[LogicalType],
id_builder: &mut TupleIdBuilder,
Expand Down
7 changes: 6 additions & 1 deletion tests/slt/copy.slt
Original file line number Diff line number Diff line change
Expand Up @@ -11,4 +11,9 @@ query I
SELECT * FROM test_copy
----
0 1.5 one
1 2.5 two
1 2.5 two

query I
COPY test_copy TO '/tmp/copy.csv' ( DELIMITER ',' );
----
Copy test_copy -> /tmp/copy.csv [a, b, c]

0 comments on commit f35986e

Please sign in to comment.