Skip to content

Commit

Permalink
Merge branch 'main' into xzhseh/sql-udf-semantic-check
Browse files Browse the repository at this point in the history
  • Loading branch information
xzhseh authored Jan 22, 2024
2 parents 50e01a0 + 705be19 commit 862a8fc
Show file tree
Hide file tree
Showing 237 changed files with 2,354 additions and 1,355 deletions.
311 changes: 178 additions & 133 deletions Cargo.lock

Large diffs are not rendered by default.

24 changes: 13 additions & 11 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -124,16 +124,17 @@ prost = { version = "0.12" }
icelake = { git = "https://github.com/icelake-io/icelake", rev = "32c0bbf242f5c47b1e743f10577012fe7436c770", features = [
"prometheus",
] }
arrow-array = "49"
arrow-arith = "49"
arrow-cast = "49"
arrow-schema = "49"
arrow-buffer = "49"
arrow-flight = "49"
arrow-select = "49"
arrow-ord = "49"
arrow-row = "49"
arrow-udf-wasm = { git = "https://github.com/risingwavelabs/arrow-udf.git", rev = "f9a9e0d" }
arrow-array = "50"
arrow-arith = "50"
arrow-cast = "50"
arrow-schema = "50"
arrow-buffer = "50"
arrow-flight = "50"
arrow-select = "50"
arrow-ord = "50"
arrow-row = "50"
arrow-udf-js = { git = "https://github.com/risingwavelabs/arrow-udf.git", rev = "7ba1c22" }

Check warning on line 136 in Cargo.toml

View workflow job for this annotation

GitHub Actions / Spell Check with Typos

"ba" should be "be" or "by".
arrow-udf-wasm = "0.1"
arrow-array-deltalake = { package = "arrow-array", version = "48.0.1" }
arrow-buffer-deltalake = { package = "arrow-buffer", version = "48.0.1" }
arrow-cast-deltalake = { package = "arrow-cast", version = "48.0.1" }
Expand All @@ -143,7 +144,7 @@ arrow-schema-deltalake = { package = "arrow-schema", version = "48.0.1" }
deltalake = { git = "https://github.com/risingwavelabs/delta-rs", rev = "5c2dccd4640490202ffe98adbd13b09cef8e007b", features = [
"s3-no-concurrent-write",
] }
parquet = "49"
parquet = "50"
thiserror-ext = "0.0.11"
tikv-jemalloc-ctl = { git = "https://github.com/risingwavelabs/jemallocator.git", rev = "64a2d9" }
tikv-jemallocator = { git = "https://github.com/risingwavelabs/jemallocator.git", features = [
Expand Down Expand Up @@ -239,6 +240,7 @@ lto = "thin"
[profile.ci-release]
inherits = "release"
incremental = false
lto = "off"
debug = "line-tables-only"
split-debuginfo = "off"
debug-assertions = true
Expand Down
4 changes: 2 additions & 2 deletions ci/rust-toolchain
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# To update toolchain, do the following:
# 1. update this file
# 2. update lints/rust-toolchain, lints/Cargo.toml
# 3. update ci/build-ci-image.sh and ci/docker-compose.yml to build a new CI image
# 2. update ci/build-ci-image.sh and ci/docker-compose.yml to build a new CI image
# 3. (optional) **follow the instructions in lints/README.md** to update the toolchain and dependencies for lints

[toolchain]
channel = "nightly-2023-12-26"
3 changes: 2 additions & 1 deletion ci/scripts/e2e-clickhouse-sink-test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ sleep 2

echo "--- testing sinks"
sqllogictest -p 4566 -d dev './e2e_test/sink/clickhouse_sink.slt'
sleep 1
sleep 5
./clickhouse client --host=clickhouse-server --port=9000 --query="select * from demo_test FORMAT CSV;" > ./query_result.csv


Expand All @@ -52,6 +52,7 @@ if ($1 == 1 && $2 == 50 && $3 == "\"1-50\"") c1++;
echo "Clickhouse sink check passed"
else
echo "The output is not as expected."
cat ./query_result.csv
exit 1
fi

Expand Down
2 changes: 1 addition & 1 deletion ci/workflows/main-cron.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ steps:
run: rw-build-env
config: ci/docker-compose.yml
mount-buildkite-agent: true
timeout_in_minutes: 30
timeout_in_minutes: 20
retry: *auto-retry

- label: "build other components"
Expand Down
8 changes: 4 additions & 4 deletions docker/docker-compose-with-hdfs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
version: "3"
services:
compactor-0:
image: ghcr.io/risingwavelabs/risingwave:RisingWave_v1.5.4_HDFS_2.7-x86_64
image: ghcr.io/risingwavelabs/risingwave:RisingWave_1.6.0_HDFS_2.7-x86_64
command:
- compactor-node
- "--listen-addr"
Expand Down Expand Up @@ -42,7 +42,7 @@ services:
reservations:
memory: 1G
compute-node-0:
image: "ghcr.io/risingwavelabs/risingwave:RisingWave_v1.5.4_HDFS_2.7-x86_64"
image: "ghcr.io/risingwavelabs/risingwave:RisingWave_1.6.0_HDFS_2.7-x86_64"
command:
- compute-node
- "--listen-addr"
Expand Down Expand Up @@ -132,7 +132,7 @@ services:
retries: 5
restart: always
frontend-node-0:
image: "ghcr.io/risingwavelabs/risingwave:RisingWave_v1.5.4_HDFS_2.7-x86_64"
image: "ghcr.io/risingwavelabs/risingwave:RisingWave_1.6.0_HDFS_2.7-x86_64"
command:
- frontend-node
- "--listen-addr"
Expand Down Expand Up @@ -195,7 +195,7 @@ services:
retries: 5
restart: always
meta-node-0:
image: "ghcr.io/risingwavelabs/risingwave:RisingWave_v1.5.4_HDFS_2.7-x86_64"
image: "ghcr.io/risingwavelabs/risingwave:RisingWave_1.6.0_HDFS_2.7-x86_64"
command:
- meta-node
- "--listen-addr"
Expand Down
3 changes: 2 additions & 1 deletion e2e_test/iceberg/start_spark_connect_server.sh
Original file line number Diff line number Diff line change
Expand Up @@ -15,11 +15,12 @@ tar -xzf $SPARK_FILE --no-same-owner
./spark-${SPARK_VERSION}-bin-hadoop3/sbin/start-connect-server.sh --packages $PACKAGES \
--master local[3] \
--conf spark.driver.bindAddress=0.0.0.0 \
--conf spark.sql.catalog.demo=org.apache.iceberg.spark.SparkCatalog \
--conf spark.sql.extensions=org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions \
--conf spark.sql.catalog.demo=org.apache.iceberg.spark.SparkCatalog \
--conf spark.sql.catalog.demo.type=hadoop \
--conf spark.sql.catalog.demo.warehouse=s3a://icebergdata/demo \
--conf spark.sql.catalog.demo.hadoop.fs.s3a.endpoint=http://127.0.0.1:9301 \
--conf spark.sql.catalog.demo.hadoop.fs.s3a.path.style.access=true \
--conf spark.sql.catalog.demo.hadoop.fs.s3a.access.key=hummockadmin \
--conf spark.sql.catalog.demo.hadoop.fs.s3a.secret.key=hummockadmin \
--conf spark.sql.defaultCatalog=demo
Expand Down
12 changes: 3 additions & 9 deletions e2e_test/iceberg/test_case/cdc/load.slt
Original file line number Diff line number Diff line change
Expand Up @@ -7,16 +7,16 @@ create source mysql_mydb with (
port = '3306',
username = 'root',
password = '123456',
database.name = 'my@db',
server.id = '2'
database.name = 'mydb',
server.id = '5085'
);

statement ok
create table products ( id INT,
name STRING,
description STRING,
PRIMARY KEY (id)
) FROM mysql_mydb TABLE 'my@db.products';
) FROM mysql_mydb TABLE 'mydb.products';


statement ok
Expand All @@ -35,15 +35,9 @@ CREATE SINK s1 AS select * from products WITH (
primary_key = 'id'
);

statement ok
flush;

sleep 20s

query I
select count(*) from products;
----
8

statement ok
flush;
6 changes: 3 additions & 3 deletions e2e_test/iceberg/test_case/cdc/mysql_cdc.sql
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
DROP DATABASE IF EXISTS `my@db`;
CREATE DATABASE `my@db`;
DROP DATABASE IF EXISTS `mydb`;
CREATE DATABASE `mydb`;

USE `my@db`;
USE `mydb`;

CREATE TABLE products (
id INTEGER NOT NULL AUTO_INCREMENT PRIMARY KEY,
Expand Down
2 changes: 1 addition & 1 deletion e2e_test/iceberg/test_case/cdc/mysql_cdc_insert.sql
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
USE `my@db`;
USE `mydb`;

INSERT INTO products VALUES (default,"109","109"),
(default,"110","110"),
Expand Down
4 changes: 4 additions & 0 deletions e2e_test/iceberg/test_case/iceberg_sink_append_only.slt
Original file line number Diff line number Diff line change
Expand Up @@ -43,13 +43,17 @@ INSERT INTO t6 VALUES
statement ok
FLUSH;

sleep 5s

statement ok
INSERT INTO t6 VALUES
(5, 5, 5000, 5.5, 5.55, '5-5', true, '2022-03-15', '2022-03-15 05:00:00Z'::timestamptz, '2022-03-15 05:00:00');

statement ok
FLUSH;

sleep 5s

statement ok
DROP SINK s6;

Expand Down
14 changes: 14 additions & 0 deletions e2e_test/iceberg/test_case/iceberg_sink_upsert.slt
Original file line number Diff line number Diff line change
Expand Up @@ -29,12 +29,26 @@ INSERT INTO t6 VALUES (1, 1, 2, '1-2'), (1, 2, 2, '2-2'), (1, 3, 2, '3-2'), (1,
statement ok
FLUSH;

sleep 5s

statement ok
INSERT INTO t6 VALUES (1, 1, 50, '1-50');

statement ok
FLUSH;

sleep 10s

query I
select count(*) from t6;
----
7

statement ok
FLUSH;

sleep 10s

statement ok
DROP SINK s6;

Expand Down
4 changes: 4 additions & 0 deletions e2e_test/sink/iceberg_sink.slt
Original file line number Diff line number Diff line change
Expand Up @@ -25,12 +25,16 @@ INSERT INTO t6 VALUES (1, 2, '1-2'), (2, 2, '2-2'), (3, 2, '3-2'), (5, 2, '5-2')
statement ok
FLUSH;

sleep 5s

statement ok
INSERT INTO t6 VALUES (1, 50, '1-50');

statement ok
FLUSH;

sleep 5s

statement ok
DROP SINK s6;

Expand Down
7 changes: 4 additions & 3 deletions e2e_test/sink/kafka/avro.slt
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
statement ok
create table from_kafka ( primary key (some_key) )
create table from_kafka ( *, gen_i32_field int as int32_field + 2, primary key (some_key) )
include key as some_key
with (
connector = 'kafka',
Expand Down Expand Up @@ -52,6 +52,7 @@ select
float_field,
double_field,
int32_field,
gen_i32_field,
int64_field,
record_field,
array_field,
Expand All @@ -61,8 +62,8 @@ select
time_micros_field,
time_millis_field from from_kafka order by string_field;
----
t Rising \x6130 3.5 4.25 22 23 NULL {{NULL,3},NULL,{7,NULL,2}} 2006-01-02 22:04:05+00:00 NULL NULL 12:34:56.123456 NULL
f Wave \x5a4446 1.5 NULL 11 12 (,foo) NULL NULL 2006-01-02 22:04:05+00:00 2021-04-01 NULL 23:45:16.654
t Rising \x6130 3.5 4.25 22 24 23 NULL {{NULL,3},NULL,{7,NULL,2}} 2006-01-02 22:04:05+00:00 NULL NULL 12:34:56.123456 NULL
f Wave \x5a4446 1.5 NULL 11 13 12 (,foo) NULL NULL 2006-01-02 22:04:05+00:00 2021-04-01 NULL 23:45:16.654

statement error SchemaFetchError
create sink sink_err from into_kafka with (
Expand Down
Loading

0 comments on commit 862a8fc

Please sign in to comment.