Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(batch): support decimal type for iceberg type #15298

Merged
merged 4 commits into from
Feb 28, 2024
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 28 additions & 0 deletions src/common/src/array/arrow/arrow_impl.rs
Original file line number Diff line number Diff line change
Expand Up @@ -160,6 +160,14 @@ macro_rules! converts_generic {
.unwrap()
.try_into()?,
)),
// This arrow decimal type is used by iceberg source to read iceberg decimal into RW decimal.
Decimal128(_, _) => Ok(ArrayImpl::Decimal(
array
.as_any()
.downcast_ref::<arrow_array::Decimal128Array>()
.unwrap()
.try_into()?,
)),
t => Err(ArrayError::from_arrow(format!("unsupported data type: {t:?}"))),
}
}
Expand Down Expand Up @@ -506,6 +514,26 @@ impl From<&DecimalArray> for arrow_array::LargeBinaryArray {
}
}

// This arrow decimal type is used by iceberg source to read iceberg decimal into RW decimal.
impl From<&arrow_array::Decimal128Array> for DecimalArray {
chenzl25 marked this conversation as resolved.
Show resolved Hide resolved
fn from(array: &arrow_array::Decimal128Array) -> Self {
assert!(array.scale() >= 0, "todo: support negative scale");
chenzl25 marked this conversation as resolved.
Show resolved Hide resolved
let from_arrow = |value| {
const NAN: i128 = i128::MIN + 1;
match value {
NAN => Decimal::NaN,
i128::MAX => Decimal::PositiveInf,
i128::MIN => Decimal::NegativeInf,
_ => Decimal::Normalized(rust_decimal::Decimal::from_i128_with_scale(
value,
array.scale() as u32,
)),
}
};
array.iter().map(|o| o.map(from_arrow)).collect()
}
}

impl TryFrom<&arrow_array::LargeBinaryArray> for DecimalArray {
type Error = ArrayError;

Expand Down
54 changes: 53 additions & 1 deletion src/frontend/src/handler/create_source.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,12 @@ use std::rc::Rc;
use std::sync::LazyLock;

use anyhow::{anyhow, Context};
use arrow_schema::{DataType as ArrowDataType, Schema as ArrowSchema};
use either::Either;
use itertools::Itertools;
use maplit::{convert_args, hashmap};
use pgwire::pg_response::{PgResponse, StatementType};
use risingwave_common::bail;
use risingwave_common::catalog::{
is_column_ids_dedup, ColumnCatalog, ColumnDesc, Schema, TableId, INITIAL_SOURCE_VERSION_ID,
KAFKA_TIMESTAMP_COLUMN_NAME,
Expand Down Expand Up @@ -1188,7 +1190,57 @@ pub async fn check_iceberg_source(
.collect::<Vec<_>>();
let new_iceberg_schema = arrow_schema::Schema::new(new_iceberg_field);

risingwave_connector::sink::iceberg::try_matches_arrow_schema(&schema, &new_iceberg_schema)?;
try_matches_arrow_schema_for_iceberg_source(&schema, &new_iceberg_schema)?;

Ok(())
}

/// Try to match our schema with iceberg schema.
pub fn try_matches_arrow_schema_for_iceberg_source(
chenzl25 marked this conversation as resolved.
Show resolved Hide resolved
rw_schema: &Schema,
arrow_schema: &ArrowSchema,
) -> Result<()> {
if rw_schema.fields.len() != arrow_schema.fields().len() {
bail!(
"Schema length not match, ours is {}, and iceberg is {}",
chenzl25 marked this conversation as resolved.
Show resolved Hide resolved
rw_schema.fields.len(),
arrow_schema.fields.len()
);
}

let mut schema_fields = HashMap::new();
rw_schema.fields.iter().for_each(|field| {
let res = schema_fields.insert(&field.name, &field.data_type);
// This assert is to make sure there is no duplicate field name in the schema.
assert!(res.is_none())
chenzl25 marked this conversation as resolved.
Show resolved Hide resolved
});

for arrow_field in &arrow_schema.fields {
let our_field_type = schema_fields
.get(arrow_field.name())
.ok_or_else(|| anyhow!("Field {} not found in our schema", arrow_field.name()))?;
chenzl25 marked this conversation as resolved.
Show resolved Hide resolved

// Iceberg source should be able to read iceberg decimal type.
// Since the arrow type default conversion is used by udf, in udf, decimal is converted to
// large binary type which is not compatible with iceberg decimal type,
// so we need to convert it to decimal type manually.
let converted_arrow_data_type = if matches!(our_field_type, DataType::Decimal) {
// RisingWave decimal type cannot specify precision and scale, so we use the default value.
ArrowDataType::Decimal128(38, 0)
} else {
ArrowDataType::try_from(*our_field_type).map_err(|e| anyhow!(e))?
};

let compatible = match (&converted_arrow_data_type, arrow_field.data_type()) {
(ArrowDataType::Decimal128(_, _), ArrowDataType::Decimal128(_, _)) => true,
(left, right) => left == right,
};
if !compatible {
bail!("Field {}'s type not compatible, risingwave converted data type {}, iceberg's data type: {}",
arrow_field.name(), converted_arrow_data_type, arrow_field.data_type()
);
}
}

Ok(())
}
Expand Down
Loading