Skip to content

Commit

Permalink
add e2e test
Browse files Browse the repository at this point in the history
  • Loading branch information
ZENOTME committed Jan 29, 2024
1 parent 0ee0497 commit 397faa4
Show file tree
Hide file tree
Showing 6 changed files with 100 additions and 20 deletions.
2 changes: 2 additions & 0 deletions ci/scripts/e2e-iceberg-sink-v2-test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,8 @@ bash ./start_spark_connect_server.sh
"$HOME"/.local/bin/poetry run python main.py -t ./test_case/no_partition_upsert.toml
"$HOME"/.local/bin/poetry run python main.py -t ./test_case/partition_append_only.toml
"$HOME"/.local/bin/poetry run python main.py -t ./test_case/partition_upsert.toml
"$HOME"/.local/bin/poetry run python main.py -t ./test_case/range_partition_append_only.toml
"$HOME"/.local/bin/poetry run python main.py -t ./test_case/range_partition_upsert.toml


echo "--- Kill cluster"
Expand Down
8 changes: 4 additions & 4 deletions e2e_test/iceberg/test_case/iceberg_sink_upsert.slt
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,13 @@ statement ok
set streaming_parallelism=4;

statement ok
CREATE TABLE t6 (id int, v1 int primary key, v2 bigint, v3 varchar);
CREATE TABLE t6 (id int, v1 int primary key, v2 bigint, v3 varchar, v4 date);

statement ok
CREATE MATERIALIZED VIEW mv6 AS SELECT * FROM t6;

statement ok
CREATE SINK s6 AS select mv6.id as id, mv6.v1 as v1, mv6.v2 as v2, mv6.v3 as v3 from mv6 WITH (
CREATE SINK s6 AS select mv6.id as id, mv6.v1 as v1, mv6.v2 as v2, mv6.v3 as v3, mv6.v4 as v4 from mv6 WITH (
connector = 'iceberg',
type = 'upsert',
force_append_only = 'false',
Expand All @@ -24,15 +24,15 @@ CREATE SINK s6 AS select mv6.id as id, mv6.v1 as v1, mv6.v2 as v2, mv6.v3 as v3
);

statement ok
INSERT INTO t6 VALUES (1, 1, 2, '1-2'), (1, 2, 2, '2-2'), (1, 3, 2, '3-2'), (1, 5, 2, '5-2'), (1, 8, 2, '8-2'), (1, 13, 2, '13-2'), (1, 21, 2, '21-2');
INSERT INTO t6 VALUES (1, 1, 2, '1-2', '2022-03-11'), (1, 2, 2, '2-2', '2022-03-12'), (1, 3, 2, '3-2', '2022-03-13'), (1, 5, 2, '5-2', '2022-03-15'), (1, 8, 2, '8-2', '2022-03-18'), (1, 13, 2, '13-2', '2022-03-13'), (1, 21, 2, '21-2', '2022-03-21');

statement ok
FLUSH;

sleep 5s

statement ok
INSERT INTO t6 VALUES (1, 1, 50, '1-50');
INSERT INTO t6 VALUES (1, 1, 50, '1-50', '2022-03-11');

statement ok
FLUSH;
Expand Down
17 changes: 9 additions & 8 deletions e2e_test/iceberg/test_case/no_partition_upsert.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,8 @@ init_sqls = [
id int,
v1 int,
v2 long,
v3 string
v3 string,
v4 date
) USING iceberg
TBLPROPERTIES ('format-version'='2');
'''
Expand All @@ -19,13 +20,13 @@ verify_schema = ['int','int','long','string']
verify_sql = 'SELECT * FROM demo_db.demo_table ORDER BY id, v1 ASC'

verify_data = """
1,1,50,1-50
1,2,2,2-2
1,3,2,3-2
1,5,2,5-2
1,8,2,8-2
1,13,2,13-2
1,21,2,21-2
1,1,50,1-50,2022-03-11
1,2,2,2-2,2022-03-12
1,3,2,3-2,2022-03-13
1,5,2,5-2,2022-03-15
1,8,2,8-2,2022-03-18
1,13,2,13-2,2022-03-13
1,21,2,21-2,2022-03-21
"""

drop_sqls = [
Expand Down
17 changes: 9 additions & 8 deletions e2e_test/iceberg/test_case/partition_upsert.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,8 @@ init_sqls = [
id int,
v1 int,
v2 long,
v3 string
v3 string,
v4 date
) USING iceberg
PARTITIONED BY (v1,v2,truncate(2,v3))
TBLPROPERTIES ('format-version'='2');
Expand All @@ -20,13 +21,13 @@ verify_schema = ['int','int','long','string']
verify_sql = 'SELECT * FROM demo_db.demo_table ORDER BY id, v1 ASC'

verify_data = """
1,1,50,1-50
1,2,2,2-2
1,3,2,3-2
1,5,2,5-2
1,8,2,8-2
1,13,2,13-2
1,21,2,21-2
1,1,50,1-50,2022-03-11
1,2,2,2-2,2022-03-12
1,3,2,3-2,2022-03-13
1,5,2,5-2,2022-03-15
1,8,2,8-2,2022-03-18
1,13,2,13-2,2022-03-13
1,21,2,21-2,2022-03-21
"""

drop_sqls = [
Expand Down
40 changes: 40 additions & 0 deletions e2e_test/iceberg/test_case/range_partition_append_only.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
init_sqls = [
'CREATE SCHEMA IF NOT EXISTS demo_db',
'DROP TABLE IF EXISTS demo_db.demo_table',
'''
CREATE TABLE demo_db.demo_table (
id long,
v_int int,
v_long long,
v_float float,
v_double double,
v_varchar string,
v_bool boolean,
v_date date,
v_timestamp timestamp,
v_ts_ntz timestamp_ntz
)
PARTITIONED BY (years(v_date),months(v_timestamp),days(v_ts_ntz))
TBLPROPERTIES ('format-version'='2');
'''
]

slt = 'test_case/iceberg_sink_append_only.slt'

verify_schema = ['long', 'int', 'long', 'float', 'double', 'string', 'boolean', 'date', 'timestamp', 'timestamp_ntz']

verify_sql = 'SELECT * FROM demo_db.demo_table ORDER BY id ASC'


verify_data = """
1,1,1000,1.1,1.11,1-1,true,2022-03-11,2022-03-11 01:00:00+00:00,2022-03-11 01:00:00
2,2,2000,2.2,2.22,2-2,false,2022-03-12,2022-03-12 02:00:00+00:00,2022-03-12 02:00:00
3,3,3000,3.3,3.33,3-3,true,2022-03-13,2022-03-13 03:00:00+00:00,2022-03-13 03:00:00
4,4,4000,4.4,4.44,4-4,false,2022-03-14,2022-03-14 04:00:00+00:00,2022-03-14 04:00:00
5,5,5000,5.5,5.55,5-5,true,2022-03-15,2022-03-15 05:00:00+00:00,2022-03-15 05:00:00
"""

drop_sqls = [
'DROP TABLE IF EXISTS demo_db.demo_table',
'DROP SCHEMA IF EXISTS demo_db'
]
36 changes: 36 additions & 0 deletions e2e_test/iceberg/test_case/range_partition_upsert.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
init_sqls = [
'CREATE SCHEMA IF NOT EXISTS demo_db',
'DROP TABLE IF EXISTS demo_db.demo_table',
'''
CREATE TABLE demo_db.demo_table (
id int,
v1 int,
v2 long,
v3 string,
v4 date
) USING iceberg
PARTITIONED BY (days(v4))
TBLPROPERTIES ('format-version'='2');
'''
]

slt = 'test_case/iceberg_sink_upsert.slt'

verify_schema = ['int','int','long','string']

verify_sql = 'SELECT * FROM demo_db.demo_table ORDER BY id, v1 ASC'

verify_data = """
1,1,50,1-50,2022-03-11
1,2,2,2-2,2022-03-12
1,3,2,3-2,2022-03-13
1,5,2,5-2,2022-03-15
1,8,2,8-2,2022-03-18
1,13,2,13-2,2022-03-13
1,21,2,21-2,2022-03-21
"""

drop_sqls = [
'DROP TABLE IF EXISTS demo_db.demo_table',
'DROP SCHEMA IF EXISTS demo_db'
]

0 comments on commit 397faa4

Please sign in to comment.