Skip to content

Commit

Permalink
feat(test): Add starrocks redis doris cassandra e2e test (#14142) (#1…
Browse files Browse the repository at this point in the history
…5202)

Co-authored-by: Xinhao Xu <[email protected]>
  • Loading branch information
github-actions[bot] and xxhZs authored Feb 23, 2024
1 parent aeb00a7 commit e63c2e5
Show file tree
Hide file tree
Showing 17 changed files with 660 additions and 39 deletions.
123 changes: 116 additions & 7 deletions ci/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -88,10 +88,27 @@ services:
- message_queue
- elasticsearch
- clickhouse-server
- pulsar
- redis-server
- pulsar-server
- cassandra-server
- starrocks-fe-server
- starrocks-be-server
volumes:
- ..:/risingwave

sink-doris-env:
image: public.ecr.aws/x5u3w5h6/rw-build-env:v20231109
depends_on:
- doris-fe-server
- doris-be-server
volumes:
- ..:/risingwave
command: >
sh -c "sudo sysctl -w vm.max_map_count=2000000"
networks:
mynetwork:
ipv4_address: 172.121.0.4

rw-build-env:
image: public.ecr.aws/x5u3w5h6/rw-build-env:v20240124_1
volumes:
Expand Down Expand Up @@ -159,10 +176,96 @@ services:
expose:
- 9009

# Temporary workaround for json schema registry test since redpanda only supports
# protobuf/avro schema registry. Should be removed after the support.
# Related tracking issue:
# https://github.com/redpanda-data/redpanda/issues/1878
redis-server:
container_name: redis-server
image: 'redis:latest'
expose:
- 6379
ports:
- 6378:6379
healthcheck:
test: ["CMD", "redis-cli", "ping"]
interval: 5s
timeout: 30s
retries: 50

doris-fe-server:
platform: linux/amd64
image: apache/doris:2.0.0_alpha-fe-x86_64
hostname: doris-fe-server
command: >
sh -c "sudo sysctl -w vm.max_map_count=2000000"
environment:
- FE_SERVERS=fe1:172.121.0.2:9010
- FE_ID=1
ports:
- "8030:8030"
- "9030:9030"
networks:
mynetwork:
ipv4_address: 172.121.0.2

doris-be-server:
platform: linux/amd64
image: apache/doris:2.0.0_alpha-be-x86_64
hostname: doris-be-server
command: >
sh -c "sudo sysctl -w vm.max_map_count=2000000"
environment:
- FE_SERVERS=fe1:172.121.0.2:9010
- BE_ADDR=172.121.0.3:9050
depends_on:
- doris-fe-server
ports:
- "9050:9050"
networks:
mynetwork:
ipv4_address: 172.121.0.3

cassandra-server:
container_name: cassandra-server
image: cassandra:4.0
ports:
- 9042:9042
environment:
- CASSANDRA_CLUSTER_NAME=cloudinfra

starrocks-fe-server:
container_name: starrocks-fe-server
image: starrocks/fe-ubuntu:3.1.7
hostname: starrocks-fe-server
command:
/opt/starrocks/fe/bin/start_fe.sh
ports:
- 28030:8030
- 29020:9020
- 29030:9030
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:9030"]
interval: 5s
timeout: 5s
retries: 30

starrocks-be-server:
image: starrocks/be-ubuntu:3.1.7
command:
- /bin/bash
- -c
- |
sleep 15s; mysql --connect-timeout 2 -h starrocks-fe-server -P9030 -uroot -e "alter system add backend \"starrocks-be-server:9050\";"
/opt/starrocks/be/bin/start_be.sh
ports:
- 28040:8040
- 29050:9050
hostname: starrocks-be-server
container_name: starrocks-be-server
depends_on:
- starrocks-fe-server

# # Temporary workaround for json schema registry test since redpanda only supports
# # protobuf/avro schema registry. Should be removed after the support.
# # Related tracking issue:
# # https://github.com/redpanda-data/redpanda/issues/1878
zookeeper:
container_name: zookeeper
image: confluentinc/cp-zookeeper:latest
Expand Down Expand Up @@ -201,8 +304,8 @@ services:
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9093,PLAINTEXT_INTERNAL://localhost:29093
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1

pulsar:
container_name: pulsar
pulsar-server:
container_name: pulsar-server
image: apachepulsar/pulsar:latest
command: bin/pulsar standalone
ports:
Expand All @@ -216,3 +319,9 @@ services:
interval: 5s
timeout: 5s
retries: 5
networks:
mynetwork:
ipam:
config:
- subnet: 172.121.80.0/16
default:
65 changes: 65 additions & 0 deletions ci/scripts/e2e-cassandra-sink-test.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
#!/usr/bin/env bash

# Exits as soon as any line fails.
set -euo pipefail

source ci/scripts/common.sh

# prepare environment
export CONNECTOR_LIBS_PATH="./connector-node/libs"

while getopts 'p:' opt; do
case ${opt} in
p )
profile=$OPTARG
;;
\? )
echo "Invalid Option: -$OPTARG" 1>&2
exit 1
;;
: )
echo "Invalid option: $OPTARG requires an argument" 1>&2
;;
esac
done
shift $((OPTIND -1))

download_and_prepare_rw "$profile" source

echo "--- Download connector node package"
buildkite-agent artifact download risingwave-connector.tar.gz ./
mkdir ./connector-node
tar xf ./risingwave-connector.tar.gz -C ./connector-node

echo "--- starting risingwave cluster"
cargo make ci-start ci-sink-test
sleep 1

echo "--- create cassandra table"
curl https://downloads.apache.org/cassandra/4.1.3/apache-cassandra-4.1.3-bin.tar.gz --output apache-cassandra-4.1.3-bin.tar.gz
tar xfvz apache-cassandra-4.1.3-bin.tar.gz
cd apache-cassandra-4.1.3/bin
export CQLSH_HOST=cassandra-server
export CQLSH_PORT=9042
./cqlsh -e "CREATE KEYSPACE demo WITH replication = {'class': 'SimpleStrategy', 'replication_factor': 1};use demo;
CREATE table demo_bhv_table(v1 int primary key,v2 smallint,v3 bigint,v4 float,v5 double,v6 text,v7 date,v8 timestamp,v9 boolean);"

echo "--- testing sinks"
cd ../../
sqllogictest -p 4566 -d dev './e2e_test/sink/cassandra_sink.slt'
sleep 1
cd apache-cassandra-4.1.3/bin
./cqlsh -e "COPY demo.demo_bhv_table TO './query_result.csv' WITH HEADER = false AND ENCODING = 'UTF-8';"

if cat ./query_result.csv | awk -F "," '{
exit !($1 == 1 && $2 == 1 && $3 == 1 && $4 == 1.1 && $5 == 1.2 && $6 == "test" && $7 == "2013-01-01" && $8 == "2013-01-01 01:01:01.000+0000" && $9 == "False\r"); }'; then
echo "Cassandra sink check passed"
else
cat ./query_result.csv
echo "The output is not as expected."
exit 1
fi

echo "--- Kill cluster"
cd ../../
cargo make ci-kill
2 changes: 1 addition & 1 deletion ci/scripts/e2e-clickhouse-sink-test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ shift $((OPTIND -1))
download_and_prepare_rw "$profile" source

echo "--- starting risingwave cluster"
cargo make ci-start ci-clickhouse-test
cargo make ci-start ci-sink-test
sleep 1


Expand Down
3 changes: 1 addition & 2 deletions ci/scripts/e2e-deltalake-sink-rust-test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,7 @@ mkdir ./connector-node
tar xf ./risingwave-connector.tar.gz -C ./connector-node

echo "--- starting risingwave cluster"
mkdir -p .risingwave/log
cargo make ci-start ci-deltalake-test
cargo make ci-start ci-sink-test
sleep 1

# prepare minio deltalake sink
Expand Down
59 changes: 59 additions & 0 deletions ci/scripts/e2e-doris-sink-test.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
#!/usr/bin/env bash

# Exits as soon as any line fails.
set -euo pipefail

source ci/scripts/common.sh

while getopts 'p:' opt; do
case ${opt} in
p )
profile=$OPTARG
;;
\? )
echo "Invalid Option: -$OPTARG" 1>&2
exit 1
;;
: )
echo "Invalid option: $OPTARG requires an argument" 1>&2
;;
esac
done
shift $((OPTIND -1))

download_and_prepare_rw "$profile" source

echo "--- starting risingwave cluster"
cargo make ci-start ci-sink-test
sleep 1

echo "--- create doris table"
apt-get update -y && apt-get install -y mysql-client
sleep 2
mysql -uroot -P 9030 -h doris-fe-server -e "CREATE database demo;use demo;
CREATE table demo_bhv_table(v1 int,v2 smallint,v3 bigint,v4 float,v5 double,v6 string,v7 datev2,v8 datetime,v9 boolean) UNIQUE KEY(\`v1\`)
DISTRIBUTED BY HASH(\`v1\`) BUCKETS 1
PROPERTIES (
\"replication_allocation\" = \"tag.location.default: 1\"
);
CREATE USER 'users'@'%' IDENTIFIED BY '123456';
GRANT ALL ON *.* TO 'users'@'%';"
sleep 2

echo "--- testing sinks"
sqllogictest -p 4566 -d dev './e2e_test/sink/doris_sink.slt'
sleep 1
mysql -uroot -P 9030 -h doris-fe-server -e "select * from demo.demo_bhv_table" > ./query_result.csv


if cat ./query_result.csv | sed '1d; s/\t/,/g' | awk -F "," '{
exit !($1 == 1 && $2 == 1 && $3 == 1 && $4 == 1.1 && $5 == 1.2 && $6 == "test" && $7 == "2013-01-01" && $8 == "2013-01-01 01:01:01" && $9 == 0); }'; then
echo "Doris sink check passed"
else
cat ./query_result.csv
echo "The output is not as expected."
exit 1
fi

echo "--- Kill cluster"
cargo make ci-kill
2 changes: 1 addition & 1 deletion ci/scripts/e2e-pulsar-sink-test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ shift $((OPTIND -1))
download_and_prepare_rw "$profile" source

echo "--- starting risingwave cluster"
cargo make ci-start ci-pulsar-test
cargo make ci-start ci-sink-test
sleep 1

echo "--- waiting until pulsar is healthy"
Expand Down
48 changes: 48 additions & 0 deletions ci/scripts/e2e-redis-sink-test.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
#!/usr/bin/env bash

# Exits as soon as any line fails.
set -euo pipefail

source ci/scripts/common.sh

while getopts 'p:' opt; do
case ${opt} in
p )
profile=$OPTARG
;;
\? )
echo "Invalid Option: -$OPTARG" 1>&2
exit 1
;;
: )
echo "Invalid option: $OPTARG requires an argument" 1>&2
;;
esac
done
shift $((OPTIND -1))

download_and_prepare_rw "$profile" source

echo "--- starting risingwave cluster"
cargo make ci-start ci-sink-test
apt-get update -y && apt-get install -y redis-server
sleep 1

echo "--- testing sinks"
sqllogictest -p 4566 -d dev './e2e_test/sink/redis_sink.slt'
sleep 1

redis-cli -h redis-server -p 6379 get {\"v1\":1} >> ./query_result.txt
redis-cli -h redis-server -p 6379 get V1:1 >> ./query_result.txt

# check sink destination using shell
if cat ./query_result.txt | tr '\n' '\0' | xargs -0 -n1 bash -c '[[ "$0" == "{\"v1\":1,\"v2\":1,\"v3\":1,\"v4\":1.100000023841858,\"v5\":1.2,\"v6\":\"test\",\"v7\":734869,\"v8\":\"2013-01-01T01:01:01.000000Z\",\"v9\":false}" || "$0" == "V2:1,V3:1" ]]'; then
echo "Redis sink check passed"
else
cat ./query_result.txt
echo "The output is not as expected."
exit 1
fi

echo "--- Kill cluster"
cargo make ci-kill
Loading

0 comments on commit e63c2e5

Please sign in to comment.