Skip to content

Commit

Permalink
Merge branch 'main' into yiming/iter-log-fix-user-key-range
Browse files Browse the repository at this point in the history
  • Loading branch information
wenym1 authored Jul 30, 2024
2 parents 4724728 + cf30276 commit 4c79e71
Show file tree
Hide file tree
Showing 51 changed files with 653 additions and 155 deletions.
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

37 changes: 19 additions & 18 deletions ci/workflows/main-cron.yml
Original file line number Diff line number Diff line change
Expand Up @@ -881,24 +881,25 @@ steps:
timeout_in_minutes: 10
retry: *auto-retry

- label: "end-to-end cassandra sink test"
key: "e2e-cassandra-sink-tests"
command: "ci/scripts/e2e-cassandra-sink-test.sh -p ci-release"
if: |
!(build.pull_request.labels includes "ci/main-cron/run-selected") && build.env("CI_STEPS") == null
|| build.pull_request.labels includes "ci/run-e2e-cassandra-sink-tests"
|| build.env("CI_STEPS") =~ /(^|,)e2e-cassandra-sink-tests?(,|$$)/
depends_on:
- "build"
- "build-other"
plugins:
- docker-compose#v5.1.0:
run: sink-test-env
config: ci/docker-compose.yml
mount-buildkite-agent: true
- ./ci/plugins/upload-failure-logs
timeout_in_minutes: 10
retry: *auto-retry
# FIXME(xxhZs): https://github.com/risingwavelabs/risingwave/issues/17855
# - label: "end-to-end cassandra sink test"
# key: "e2e-cassandra-sink-tests"
# command: "ci/scripts/e2e-cassandra-sink-test.sh -p ci-release"
# if: |
# !(build.pull_request.labels includes "ci/main-cron/run-selected") && build.env("CI_STEPS") == null
# || build.pull_request.labels includes "ci/run-e2e-cassandra-sink-tests"
# || build.env("CI_STEPS") =~ /(^|,)e2e-cassandra-sink-tests?(,|$$)/
# depends_on:
# - "build"
# - "build-other"
# plugins:
# - docker-compose#v5.1.0:
# run: sink-test-env
# config: ci/docker-compose.yml
# mount-buildkite-agent: true
# - ./ci/plugins/upload-failure-logs
# timeout_in_minutes: 10
# retry: *auto-retry

- label: "end-to-end clickhouse sink test"
key: "e2e-clickhouse-sink-tests"
Expand Down
1 change: 1 addition & 0 deletions docker/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@ services:
# If ENABLE_TELEMETRY is not set, telemetry will start by default
ENABLE_TELEMETRY: ${ENABLE_TELEMETRY:-true}
RW_TELEMETRY_TYPE: ${RW_TELEMETRY_TYPE:-"docker-compose"}
RW_SECRET_STORE_PRIVATE_KEY_HEX: ${RW_SECRET_STORE_PRIVATE_KEY_HEX:-0123456789abcdef}
container_name: risingwave-standalone
healthcheck:
test:
Expand Down
2 changes: 1 addition & 1 deletion e2e_test/backfill/sink/create_sink.slt
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ statement ok
create table t (v1 int);

statement ok
SET STREAMING_RATE_LIMIT = 500;
SET BACKFILL_RATE_LIMIT = 500;

# Should finish in 20s
statement ok
Expand Down
2 changes: 1 addition & 1 deletion e2e_test/backfill/sink/different_pk_and_dist_key.slt
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ statement ok
create materialized view m1 as select t.v1, t.v2, t.v3 from t join t2 using(v1);

statement ok
set streaming_rate_limit = 1;
set backfill_rate_limit = 1;

statement ok
set background_ddl = true;
Expand Down
2 changes: 1 addition & 1 deletion e2e_test/background_ddl/basic.slt
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ statement ok
FLUSH;

statement ok
SET STREAMING_RATE_LIMIT=10000;
SET BACKFILL_RATE_LIMIT=10000;

statement ok
CREATE MATERIALIZED VIEW m1 as SELECT * FROM t;
Expand Down
2 changes: 1 addition & 1 deletion e2e_test/background_ddl/sim/basic.slt
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ statement ok
FLUSH;

statement ok
SET STREAMING_RATE_LIMIT=4000;
SET BACKFILL_RATE_LIMIT=4000;

statement ok
CREATE MATERIALIZED VIEW m1 as SELECT * FROM t;
Expand Down
3 changes: 2 additions & 1 deletion e2e_test/batch/catalog/pg_settings.slt.part
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ postmaster license_key
postmaster max_concurrent_creating_streaming_jobs
postmaster pause_on_next_bootstrap
user application_name
user backfill_rate_limit
user background_ddl
user batch_enable_distributed_dml
user batch_parallelism
Expand Down Expand Up @@ -52,10 +53,10 @@ user server_encoding
user server_version
user server_version_num
user sink_decouple
user source_rate_limit
user standard_conforming_strings
user statement_timeout
user streaming_parallelism
user streaming_rate_limit
user streaming_use_arrangement_backfill
user synchronize_seqscans
user timezone
Expand Down
4 changes: 2 additions & 2 deletions e2e_test/ddl/drop/drop_creating_mv.slt
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ statement ok
flush;

statement ok
set streaming_rate_limit=1;
set backfill_rate_limit=1;

############## Test drop foreground mv
onlyif can-use-recover
Expand Down Expand Up @@ -61,7 +61,7 @@ drop materialized view m1;

############## Make sure the mv can still be successfully created later.
statement ok
set streaming_rate_limit=default;
set backfill_rate_limit=default;

statement ok
set background_ddl=false;
Expand Down
30 changes: 30 additions & 0 deletions e2e_test/ddl/secret.slt
Original file line number Diff line number Diff line change
@@ -1,3 +1,33 @@
statement ok
ALTER SYSTEM SET license_key TO '';

statement error
create secret secret_1 with (
backend = 'fake-backend'
) as 'demo_secret';
----
db error: ERROR: Failed to run the query

Caused by:
feature SecretManagement is only available for tier Paid and above, while the current tier is Free

Hint: You may want to set a license key with `ALTER SYSTEM SET license_key = '...';` command.


statement error
drop secret secret_1;
----
db error: ERROR: Failed to run the query

Caused by:
feature SecretManagement is only available for tier Paid and above, while the current tier is Free

Hint: You may want to set a license key with `ALTER SYSTEM SET license_key = '...';` command.


statement ok
ALTER SYSTEM SET license_key TO DEFAULT;

statement error secret backend "fake-backend" is not supported
create secret secret_1 with (
backend = 'fake-backend'
Expand Down
14 changes: 8 additions & 6 deletions e2e_test/ddl/throttle.slt
Original file line number Diff line number Diff line change
@@ -1,20 +1,22 @@
# streaming_rate_limit also applies to create sink and create source, please refer to
# e2e_test/source/basic/kafka.slt and e2e_test/sink/kafka/create_sink.slt for this part
# streaming_rate_limit applies to create source, please refer to
# e2e_test/source/basic/kafka.slt.
# backfill_rate_limit applies to create sink, please refer to
# e2e_test/sink/kafka/create_sink.slt.

statement ok
create table t1 (v1 int);

# tracked in https://github.com/risingwavelabs/risingwave/issues/13474
# create with duplicate streaming_rate_limit
# create with duplicate backfill_rate_limit
statement error Duplicated option
create materialized view mv1 with (streaming_rate_limit = 1000, streaming_rate_limit = 2000) as select * from t1;
create materialized view mv1 with (backfill_rate_limit = 1000, backfill_rate_limit = 2000) as select * from t1;

# create with unknown fields
statement error unexpected options in WITH clause
create materialized view mv1 with (streaming_rate_limit = 1000, unknown_field = 2000) as select * from t1;
create materialized view mv1 with (backfill_rate_limit = 1000, unknown_field = 2000) as select * from t1;

statement ok
create materialized view mv1 with (streaming_rate_limit = 1000) as select * from t1;
create materialized view mv1 with (backfill_rate_limit = 1000) as select * from t1;

statement ok
drop materialized view mv1;
Expand Down
4 changes: 2 additions & 2 deletions e2e_test/sink/kafka/create_sink.slt
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,7 @@ create sink multiple_pk_throttle from t_kafka with (
topic = 'test-rw-sink-debezium',
type = 'debezium',
primary_key = 'id,v_varchar',
streaming_rate_limit = 200
backfill_rate_limit = 200
);

statement ok
Expand All @@ -165,7 +165,7 @@ create sink multiple_pk_throttle_1
topic = 'test-rw-sink-debezium',
type = 'debezium',
primary_key = 'id,v_varchar',
streaming_rate_limit = 200
backfill_rate_limit = 200
);

statement ok
Expand Down
4 changes: 2 additions & 2 deletions e2e_test/slow_tests/backfill/rate_limit/slow-udf.slt
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ statement ok
insert into t select 2 from generate_series(1, 1000000);

statement ok
set streaming_rate_limit=1;
set backfill_rate_limit=1;

statement ok
set background_ddl=true;
Expand All @@ -25,7 +25,7 @@ statement ok
set background_ddl = false;

statement ok
set streaming_rate_limit=default;
set backfill_rate_limit=default;

statement ok
flush;
Expand Down
4 changes: 2 additions & 2 deletions e2e_test/slow_tests/udf/always_retry_python.slt
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ statement ok
flush;

statement ok
SET STREAMING_RATE_LIMIT=1;
SET BACKFILL_RATE_LIMIT=1;

statement ok
SET BACKGROUND_DDL=true;
Expand Down Expand Up @@ -57,7 +57,7 @@ SELECT count(*) FROM mv_always_retry where s1 is NULL;
# t

statement ok
SET STREAMING_RATE_LIMIT TO DEFAULT;
SET BACKFILL_RATE_LIMIT TO DEFAULT;

statement ok
SET BACKGROUND_DDL=false;
Expand Down
8 changes: 4 additions & 4 deletions e2e_test/source/basic/alter/rate_limit_source_kafka.slt
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ sleep 3s
############## Create MV on source

statement ok
SET STREAMING_RATE_LIMIT=0;
SET SOURCE_RATE_LIMIT=0;

statement ok
create materialized view rl_mv1 as select count(*) from kafka_source;
Expand All @@ -56,7 +56,7 @@ statement ok
create materialized view rl_mv3 as select count(*) from kafka_source;

statement ok
SET STREAMING_RATE_LIMIT=default;
SET SOURCE_RATE_LIMIT=default;

############## MVs should have 0 records, since source has (rate_limit = 0)

Expand All @@ -82,11 +82,11 @@ select * from rl_mv3;

skipif in-memory
query I
alter source kafka_source set streaming_rate_limit to 1000;
alter source kafka_source set source_rate_limit to 1000;

skipif in-memory
query I
alter source kafka_source set streaming_rate_limit to default;
alter source kafka_source set source_rate_limit to default;

skipif in-memory
sleep 3s
Expand Down
6 changes: 3 additions & 3 deletions e2e_test/source/basic/alter/rate_limit_table_kafka.slt
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ create table kafka_source (v1 int) with (
topic = 'kafka_source',
properties.bootstrap.server = 'message_queue:29092',
scan.startup.mode = 'earliest',
streaming_rate_limit = 0
source_rate_limit = 0
) FORMAT PLAIN ENCODE JSON

statement ok
Expand Down Expand Up @@ -61,11 +61,11 @@ select count(*) from kafka_source;

skipif in-memory
query I
alter table kafka_source set streaming_rate_limit to 1000;
alter table kafka_source set source_rate_limit to 1000;

skipif in-memory
query I
alter table kafka_source set streaming_rate_limit to default;
alter table kafka_source set source_rate_limit to default;

skipif in-memory
sleep 3s
Expand Down
2 changes: 1 addition & 1 deletion e2e_test/source/basic/kafka.slt
Original file line number Diff line number Diff line change
Expand Up @@ -391,7 +391,7 @@ create table s29 (id bytea) with (
topic = 'kafka_source_format_bytes',
properties.bootstrap.server = 'message_queue:29092',
scan.startup.mode = 'earliest',
streaming_rate_limit = 200
source_rate_limit = 200
) FORMAT PLAIN ENCODE BYTES

statement ok
Expand Down
Loading

0 comments on commit 4c79e71

Please sign in to comment.