Skip to content

Commit

Permalink
Merge branch 'main' into xzhseh/sql-udf-creation-better-funciton-hint…
Browse files Browse the repository at this point in the history
…-display
  • Loading branch information
xzhseh authored Feb 26, 2024
2 parents 32314b4 + aedfd66 commit 66cc802
Show file tree
Hide file tree
Showing 395 changed files with 7,111 additions and 5,123 deletions.
22 changes: 4 additions & 18 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions Makefile.toml
Original file line number Diff line number Diff line change
Expand Up @@ -757,10 +757,10 @@ tmux list-windows -t risedev -F "#{window_name} #{pane_id}" \
if [[ -n $(tmux list-windows -t risedev | grep kafka) ]];
then
echo "kill kafka"
kill_kafka
kill_kafka || true
echo "kill zookeeper"
kill_zookeeper
kill_zookeeper || true
# Kill their tmux sessions
tmux list-windows -t risedev -F "#{pane_id}" | xargs -I {} tmux send-keys -t {} C-c C-d
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ Don’t have Docker? Learn how to install RisingWave on Mac, Ubuntu, and other e

## Production deployments

For **single-node deployment**, please refer to [Docker Compose](https://docs.risingwave.com/docs/current/risingwave-trial/?method=docker-compose).
For **single-node deployment**, please refer to [Docker Compose](https://docs.risingwave.com/docs/current/risingwave-docker-compose/).

For **distributed deployment**, please refer to [Kubernetes with Helm](https://docs.risingwave.com/docs/current/risingwave-k8s-helm/) or [Kubernetes with Operator](https://docs.risingwave.com/docs/current/risingwave-kubernetes/).

Expand Down
28 changes: 18 additions & 10 deletions backwards-compat-tests/scripts/utils.sh
Original file line number Diff line number Diff line change
Expand Up @@ -103,19 +103,21 @@ insert_json_kafka() {
local JSON=$1
echo "$JSON" | "$KAFKA_PATH"/bin/kafka-console-producer.sh \
--topic backwards_compat_test_kafka_source \
--bootstrap-server localhost:29092
--bootstrap-server localhost:29092 \
--property "parse.key=true" \
--property "key.separator=,"
}

seed_json_kafka() {
insert_json_kafka '{"timestamp": "2023-07-28 07:11:00", "user_id": 1, "page_id": 1, "action": "gtrgretrg"}'
insert_json_kafka '{"timestamp": "2023-07-28 07:11:00", "user_id": 2, "page_id": 1, "action": "fsdfgerrg"}'
insert_json_kafka '{"timestamp": "2023-07-28 07:11:00", "user_id": 3, "page_id": 1, "action": "sdfergtth"}'
insert_json_kafka '{"timestamp": "2023-07-28 06:54:00", "user_id": 4, "page_id": 2, "action": "erwerhghj"}'
insert_json_kafka '{"timestamp": "2023-07-28 06:54:00", "user_id": 5, "page_id": 2, "action": "kiku7ikkk"}'
insert_json_kafka '{"timestamp": "2023-07-28 06:54:00", "user_id": 6, "page_id": 3, "action": "6786745ge"}'
insert_json_kafka '{"timestamp": "2023-07-28 06:54:00", "user_id": 7, "page_id": 3, "action": "fgbgfnyyy"}'
insert_json_kafka '{"timestamp": "2023-07-28 06:54:00", "user_id": 8, "page_id": 4, "action": "werwerwwe"}'
insert_json_kafka '{"timestamp": "2023-07-28 06:54:00", "user_id": 9, "page_id": 4, "action": "yjtyjtyyy"}'
insert_json_kafka '{"user_id": 1},{"timestamp": "2023-07-28 07:11:00", "user_id": 1, "page_id": 1, "action": "gtrgretrg"}'
insert_json_kafka '{"user_id": 2},{"timestamp": "2023-07-28 07:11:00", "user_id": 2, "page_id": 1, "action": "fsdfgerrg"}'
insert_json_kafka '{"user_id": 3},{"timestamp": "2023-07-28 07:11:00", "user_id": 3, "page_id": 1, "action": "sdfergtth"}'
insert_json_kafka '{"user_id": 4},{"timestamp": "2023-07-28 06:54:00", "user_id": 4, "page_id": 2, "action": "erwerhghj"}'
insert_json_kafka '{"user_id": 5},{"timestamp": "2023-07-28 06:54:00", "user_id": 5, "page_id": 2, "action": "kiku7ikkk"}'
insert_json_kafka '{"user_id": 6},{"timestamp": "2023-07-28 06:54:00", "user_id": 6, "page_id": 3, "action": "6786745ge"}'
insert_json_kafka '{"user_id": 7},{"timestamp": "2023-07-28 06:54:00", "user_id": 7, "page_id": 3, "action": "fgbgfnyyy"}'
insert_json_kafka '{"user_id": 8},{"timestamp": "2023-07-28 06:54:00", "user_id": 8, "page_id": 4, "action": "werwerwwe"}'
insert_json_kafka '{"user_id": 9},{"timestamp": "2023-07-28 06:54:00", "user_id": 9, "page_id": 4, "action": "yjtyjtyyy"}'
}

# https://stackoverflow.com/a/4024263
Expand Down Expand Up @@ -225,6 +227,12 @@ seed_old_cluster() {
create_kafka_topic
seed_json_kafka
sqllogictest -d dev -h localhost -p 4566 "$TEST_DIR/kafka/seed.slt"
# use the old syntax for version at most 1.5.4
if version_le "$OLD_VERSION" "1.5.4" ; then
sqllogictest -d dev -h localhost -p 4566 "$TEST_DIR/kafka/upsert/deprecate_upsert.slt"
else
sqllogictest -d dev -h localhost -p 4566 "$TEST_DIR/kafka/upsert/include_key_as.slt"
fi

echo "--- KAFKA TEST: wait 5s for kafka to process data"
sleep 5
Expand Down
16 changes: 16 additions & 0 deletions backwards-compat-tests/slt/kafka/upsert/deprecate_upsert.slt
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
statement ok
CREATE TABLE IF NOT EXISTS kafka_table
(
action varchar,
user_id integer,
obj_id integer,
name varchar,
page_id integer,
age integer
)
WITH (
connector='kafka',
topic='backwards_compat_test_kafka_source',
properties.bootstrap.server='localhost:29092',
scan.startup.mode='earliest',
) FORMAT UPSERT ENCODE JSON;
18 changes: 18 additions & 0 deletions backwards-compat-tests/slt/kafka/upsert/include_key_as.slt
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
statement ok
CREATE TABLE IF NOT EXISTS kafka_table
(
action varchar,
user_id integer,
obj_id integer,
name varchar,
page_id integer,
age integer,
primary key (_rw_key)
)
INCLUDE key as _rw_key
WITH (
connector='kafka',
topic='backwards_compat_test_kafka_source',
properties.bootstrap.server='localhost:29092',
scan.startup.mode='earliest',
) FORMAT UPSERT ENCODE JSON;
13 changes: 13 additions & 0 deletions backwards-compat-tests/slt/kafka/validate_restart.slt
Original file line number Diff line number Diff line change
Expand Up @@ -50,3 +50,16 @@ werwerwwe 8 NULL NULL 4 NULL
yjtyjtyyy 9 NULL NULL 4 NULL
yjtyjtyyy 9 NULL NULL 4 NULL

# kafka_table should do the upsert and overwrite the existing records
query I rowsort
SELECT action, user_id, obj_id, name, page_id, age, _rw_key FROM kafka_table;
----
6786745ge 6 NULL NULL 3 NULL \x7b22757365725f6964223a20367d
erwerhghj 4 NULL NULL 2 NULL \x7b22757365725f6964223a20347d
fgbgfnyyy 7 NULL NULL 3 NULL \x7b22757365725f6964223a20377d
fsdfgerrg 2 NULL NULL 1 NULL \x7b22757365725f6964223a20327d
gtrgretrg 1 NULL NULL 1 NULL \x7b22757365725f6964223a20317d
kiku7ikkk 5 NULL NULL 2 NULL \x7b22757365725f6964223a20357d
sdfergtth 3 NULL NULL 1 NULL \x7b22757365725f6964223a20337d
werwerwwe 8 NULL NULL 4 NULL \x7b22757365725f6964223a20387d
yjtyjtyyy 9 NULL NULL 4 NULL \x7b22757365725f6964223a20397d
123 changes: 116 additions & 7 deletions ci/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -88,10 +88,27 @@ services:
- message_queue
- elasticsearch
- clickhouse-server
- pulsar
- redis-server
- pulsar-server
- cassandra-server
- starrocks-fe-server
- starrocks-be-server
volumes:
- ..:/risingwave

sink-doris-env:
image: public.ecr.aws/x5u3w5h6/rw-build-env:v20231109
depends_on:
- doris-fe-server
- doris-be-server
volumes:
- ..:/risingwave
command: >
sh -c "sudo sysctl -w vm.max_map_count=2000000"
networks:
mynetwork:
ipv4_address: 172.121.0.4

rw-build-env:
image: public.ecr.aws/x5u3w5h6/rw-build-env:v20240213
volumes:
Expand Down Expand Up @@ -159,10 +176,96 @@ services:
expose:
- 9009

# Temporary workaround for json schema registry test since redpanda only supports
# protobuf/avro schema registry. Should be removed after the support.
# Related tracking issue:
# https://github.com/redpanda-data/redpanda/issues/1878
redis-server:
container_name: redis-server
image: 'redis:latest'
expose:
- 6379
ports:
- 6378:6379
healthcheck:
test: ["CMD", "redis-cli", "ping"]
interval: 5s
timeout: 30s
retries: 50

doris-fe-server:
platform: linux/amd64
image: apache/doris:2.0.0_alpha-fe-x86_64
hostname: doris-fe-server
command: >
sh -c "sudo sysctl -w vm.max_map_count=2000000"
environment:
- FE_SERVERS=fe1:172.121.0.2:9010
- FE_ID=1
ports:
- "8030:8030"
- "9030:9030"
networks:
mynetwork:
ipv4_address: 172.121.0.2

doris-be-server:
platform: linux/amd64
image: apache/doris:2.0.0_alpha-be-x86_64
hostname: doris-be-server
command: >
sh -c "sudo sysctl -w vm.max_map_count=2000000"
environment:
- FE_SERVERS=fe1:172.121.0.2:9010
- BE_ADDR=172.121.0.3:9050
depends_on:
- doris-fe-server
ports:
- "9050:9050"
networks:
mynetwork:
ipv4_address: 172.121.0.3

cassandra-server:
container_name: cassandra-server
image: cassandra:4.0
ports:
- 9042:9042
environment:
- CASSANDRA_CLUSTER_NAME=cloudinfra

starrocks-fe-server:
container_name: starrocks-fe-server
image: starrocks/fe-ubuntu:3.1.7
hostname: starrocks-fe-server
command:
/opt/starrocks/fe/bin/start_fe.sh
ports:
- 28030:8030
- 29020:9020
- 29030:9030
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:9030"]
interval: 5s
timeout: 5s
retries: 30

starrocks-be-server:
image: starrocks/be-ubuntu:3.1.7
command:
- /bin/bash
- -c
- |
sleep 15s; mysql --connect-timeout 2 -h starrocks-fe-server -P9030 -uroot -e "alter system add backend \"starrocks-be-server:9050\";"
/opt/starrocks/be/bin/start_be.sh
ports:
- 28040:8040
- 29050:9050
hostname: starrocks-be-server
container_name: starrocks-be-server
depends_on:
- starrocks-fe-server

# # Temporary workaround for json schema registry test since redpanda only supports
# # protobuf/avro schema registry. Should be removed after the support.
# # Related tracking issue:
# # https://github.com/redpanda-data/redpanda/issues/1878
zookeeper:
container_name: zookeeper
image: confluentinc/cp-zookeeper:latest
Expand Down Expand Up @@ -201,8 +304,8 @@ services:
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9093,PLAINTEXT_INTERNAL://localhost:29093
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1

pulsar:
container_name: pulsar
pulsar-server:
container_name: pulsar-server
image: apachepulsar/pulsar:latest
command: bin/pulsar standalone
ports:
Expand All @@ -216,3 +319,9 @@ services:
interval: 5s
timeout: 5s
retries: 5
networks:
mynetwork:
ipam:
config:
- subnet: 172.121.80.0/16
default:
Loading

0 comments on commit 66cc802

Please sign in to comment.