diff --git a/ci/scripts/backwards-compat-test.sh b/ci/scripts/backwards-compat-test.sh index 90ddcf09b740c..d539748a23fd9 100755 --- a/ci/scripts/backwards-compat-test.sh +++ b/ci/scripts/backwards-compat-test.sh @@ -111,26 +111,26 @@ setup_old_cluster() { echo "--- Get RisingWave binary for $OLD_VERSION" OLD_URL=https://github.com/risingwavelabs/risingwave/releases/download/v${OLD_VERSION}/risingwave-v${OLD_VERSION}-x86_64-unknown-linux.tar.gz set +e - wget $OLD_URL + wget "$OLD_URL" if [[ "$?" -ne 0 ]]; then set -e echo "Failed to download ${OLD_VERSION} from github releases, build from source later during \`risedev d\`" configure_rw_build else set -e - tar -xvf risingwave-v${OLD_VERSION}-x86_64-unknown-linux.tar.gz + tar -xvf risingwave-v"${OLD_VERSION}"-x86_64-unknown-linux.tar.gz mv risingwave target/debug/risingwave echo "--- Start cluster on tag $OLD_VERSION" git config --global --add safe.directory /risingwave - configure_rw $OLD_VERSION + configure_rw "$OLD_VERSION" fi } setup_new_cluster() { echo "--- Setup Risingwave @ $RW_COMMIT" git checkout - - download_and_prepare_rw $profile common + download_and_prepare_rw "$profile" common # Make sure we always start w/o old config rm -r .risingwave/config } diff --git a/ci/scripts/build.sh b/ci/scripts/build.sh index ac02b99d1688c..0f4d40d8ff65c 100755 --- a/ci/scripts/build.sh +++ b/ci/scripts/build.sh @@ -41,9 +41,9 @@ cargo fmt --all -- --check echo "--- Build Rust components" if [[ "$profile" == "ci-dev" ]]; then - RISINGWAVE_FEATURE_FLAGS="--features rw-dynamic-link --no-default-features" + RISINGWAVE_FEATURE_FLAGS=(--features rw-dynamic-link --no-default-features) else - RISINGWAVE_FEATURE_FLAGS="--features rw-static-link" + RISINGWAVE_FEATURE_FLAGS=(--features rw-static-link) fi cargo build \ @@ -53,7 +53,7 @@ cargo build \ -p risingwave_sqlsmith \ -p risingwave_compaction_test \ -p risingwave_e2e_extended_mode_test \ - $RISINGWAVE_FEATURE_FLAGS \ + "${RISINGWAVE_FEATURE_FLAGS[@]}" \ --features embedded-python-udf \ --profile "$profile" diff --git a/ci/scripts/common.sh b/ci/scripts/common.sh index 5c4ed6e5bc913..d99066cb3d5e2 100755 --- a/ci/scripts/common.sh +++ b/ci/scripts/common.sh @@ -105,11 +105,11 @@ get_latest_kafka_version() { local versions=$(curl -s https://downloads.apache.org/kafka/ | grep -Eo 'href="[0-9]+\.[0-9]+\.[0-9]+/"' | grep -Eo "[0-9]+\.[0-9]+\.[0-9]+") # Sort the version numbers and get the latest one local latest_version=$(echo "$versions" | sort -V | tail -n1) - echo $latest_version + echo "$latest_version" } get_latest_kafka_download_url() { local latest_version=$(get_latest_kafka_version) local download_url="https://downloads.apache.org/kafka/${latest_version}/kafka_2.13-${latest_version}.tgz" - echo $download_url + echo "$download_url" } diff --git a/ci/scripts/connector-node-integration-test.sh b/ci/scripts/connector-node-integration-test.sh index a6b748c5728c7..0ebdd35ea682a 100755 --- a/ci/scripts/connector-node-integration-test.sh +++ b/ci/scripts/connector-node-integration-test.sh @@ -67,8 +67,8 @@ MC_PATH=${PWD}/mc ${MC_PATH} config host add minio http://127.0.0.1:9000 minioadmin minioadmin echo "--- starting connector-node service" -mkdir -p ${RISINGWAVE_ROOT}/java/connector-node/assembly/target/ -cd ${RISINGWAVE_ROOT}/java/connector-node/assembly/target/ +mkdir -p "${RISINGWAVE_ROOT}"/java/connector-node/assembly/target/ +cd "${RISINGWAVE_ROOT}"/java/connector-node/assembly/target/ # tar xvf risingwave-connector-1.0.0.tar.gz > /dev/null buildkite-agent artifact download risingwave-connector.tar.gz ./ tar xvf risingwave-connector.tar.gz > /dev/null @@ -77,7 +77,7 @@ sleep 3 # generate data echo "--- starting generate streamchunk data" -cd ${RISINGWAVE_ROOT}/java/connector-node/python-client +cd "${RISINGWAVE_ROOT}"/java/connector-node/python-client buildkite-agent artifact download java-binding-integration-test.tar.zst ./ tar xf java-binding-integration-test.tar.zst bin ./bin/data-chunk-payload-convert-generator data/sink_input.json > ./data/sink_input @@ -85,13 +85,13 @@ tar xf java-binding-integration-test.tar.zst bin ./bin/data-chunk-payload-generator 30 > ./data/stream_chunk_data echo "--- prepare integration tests" -cd ${RISINGWAVE_ROOT}/java/connector-node +cd "${RISINGWAVE_ROOT}"/java/connector-node pip3 install grpcio grpcio-tools psycopg2 psycopg2-binary pyspark==3.3 black cd python-client && bash gen-stub.sh && bash format-python.sh --check export PYTHONPATH=proto echo "--- running streamchunk data format integration tests" -cd ${RISINGWAVE_ROOT}/java/connector-node/python-client +cd "${RISINGWAVE_ROOT}"/java/connector-node/python-client if python3 integration_tests.py --stream_chunk_format_test --input_binary_file="./data/stream_chunk_data" --data_format_use_json=False; then echo "StreamChunk data format test passed" else @@ -106,8 +106,8 @@ type=("StreamChunk format") ${MC_PATH} mb minio/bucket for ((i=0; i<${#type[@]}; i++)); do echo "--- running file ${type[i]} integration tests" - cd ${RISINGWAVE_ROOT}/java/connector-node/python-client - if python3 integration_tests.py --file_sink ${sink_input_feature[i]}; then + cd "${RISINGWAVE_ROOT}"/java/connector-node/python-client + if python3 integration_tests.py --file_sink "${sink_input_feature[i]}"; then echo "File sink ${type[i]} test passed" else echo "File sink ${type[i]} test failed" diff --git a/ci/scripts/deterministic-e2e-test.sh b/ci/scripts/deterministic-e2e-test.sh index 68a41fafe7433..cb23f1cd7f247 100755 --- a/ci/scripts/deterministic-e2e-test.sh +++ b/ci/scripts/deterministic-e2e-test.sh @@ -31,25 +31,25 @@ export LOGDIR=.risingwave/log mkdir -p $LOGDIR echo "--- deterministic simulation e2e, ci-3cn-2fe, ddl" -seq $TEST_NUM | parallel MADSIM_TEST_SEED={} './risingwave_simulation ./e2e_test/ddl/\*\*/\*.slt 2> $LOGDIR/ddl-{}.log && rm $LOGDIR/ddl-{}.log' +seq "$TEST_NUM" | parallel MADSIM_TEST_SEED={} './risingwave_simulation ./e2e_test/ddl/\*\*/\*.slt 2> $LOGDIR/ddl-{}.log && rm $LOGDIR/ddl-{}.log' echo "--- deterministic simulation e2e, ci-3cn-2fe, streaming" -seq $TEST_NUM | parallel MADSIM_TEST_SEED={} './risingwave_simulation ./e2e_test/streaming/\*\*/\*.slt 2> $LOGDIR/streaming-{}.log && rm $LOGDIR/streaming-{}.log' +seq "$TEST_NUM" | parallel MADSIM_TEST_SEED={} './risingwave_simulation ./e2e_test/streaming/\*\*/\*.slt 2> $LOGDIR/streaming-{}.log && rm $LOGDIR/streaming-{}.log' echo "--- deterministic simulation e2e, ci-3cn-2fe, batch" -seq $TEST_NUM | parallel MADSIM_TEST_SEED={} './risingwave_simulation ./e2e_test/batch/\*\*/\*.slt 2> $LOGDIR/batch-{}.log && rm $LOGDIR/batch-{}.log' +seq "$TEST_NUM" | parallel MADSIM_TEST_SEED={} './risingwave_simulation ./e2e_test/batch/\*\*/\*.slt 2> $LOGDIR/batch-{}.log && rm $LOGDIR/batch-{}.log' echo "--- deterministic simulation e2e, ci-3cn-2fe, kafka source" -seq $TEST_NUM | parallel MADSIM_TEST_SEED={} './risingwave_simulation --kafka-datadir=./scripts/source/test_data ./e2e_test/source/basic/kafka\*.slt 2> $LOGDIR/source-{}.log && rm $LOGDIR/source-{}.log' +seq "$TEST_NUM" | parallel MADSIM_TEST_SEED={} './risingwave_simulation --kafka-datadir=./scripts/source/test_data ./e2e_test/source/basic/kafka\*.slt 2> $LOGDIR/source-{}.log && rm $LOGDIR/source-{}.log' echo "--- deterministic simulation e2e, ci-3cn-2fe, parallel, streaming" -seq $TEST_NUM | parallel MADSIM_TEST_SEED={} './risingwave_simulation -j 16 ./e2e_test/streaming/\*\*/\*.slt 2> $LOGDIR/parallel-streaming-{}.log && rm $LOGDIR/parallel-streaming-{}.log' +seq "$TEST_NUM" | parallel MADSIM_TEST_SEED={} './risingwave_simulation -j 16 ./e2e_test/streaming/\*\*/\*.slt 2> $LOGDIR/parallel-streaming-{}.log && rm $LOGDIR/parallel-streaming-{}.log' echo "--- deterministic simulation e2e, ci-3cn-2fe, parallel, batch" -seq $TEST_NUM | parallel MADSIM_TEST_SEED={} './risingwave_simulation -j 16 ./e2e_test/batch/\*\*/\*.slt 2> $LOGDIR/parallel-batch-{}.log && rm $LOGDIR/parallel-batch-{}.log' +seq "$TEST_NUM" | parallel MADSIM_TEST_SEED={} './risingwave_simulation -j 16 ./e2e_test/batch/\*\*/\*.slt 2> $LOGDIR/parallel-batch-{}.log && rm $LOGDIR/parallel-batch-{}.log' echo "--- deterministic simulation e2e, ci-3cn-2fe, fuzzing (pre-generated-queries)" timeout 10m seq 64 | parallel MADSIM_TEST_SEED={} './risingwave_simulation --run-sqlsmith-queries ./src/tests/sqlsmith/tests/sqlsmith-query-snapshots/{} 2> $LOGDIR/fuzzing-{}.log && rm $LOGDIR/fuzzing-{}.log' echo "--- deterministic simulation e2e, ci-3cn-2fe, e2e extended mode test" -seq $TEST_NUM | parallel MADSIM_TEST_SEED={} './risingwave_simulation -e 2> $LOGDIR/extended-{}.log && rm $LOGDIR/extended-{}.log' +seq "$TEST_NUM" | parallel MADSIM_TEST_SEED={} './risingwave_simulation -e 2> $LOGDIR/extended-{}.log && rm $LOGDIR/extended-{}.log' diff --git a/ci/scripts/deterministic-it-test.sh b/ci/scripts/deterministic-it-test.sh index 1c43bd3faae97..40288f5848b16 100755 --- a/ci/scripts/deterministic-it-test.sh +++ b/ci/scripts/deterministic-it-test.sh @@ -19,7 +19,7 @@ mv target/ci-sim target/sim TEST_PATTERN="$@" echo "--- Run integration tests in deterministic simulation mode" -seq $TEST_NUM | parallel "MADSIM_TEST_SEED={} NEXTEST_PROFILE=ci-sim \ +seq "$TEST_NUM" | parallel "MADSIM_TEST_SEED={} NEXTEST_PROFILE=ci-sim \ cargo nextest run \ --no-fail-fast \ --cargo-metadata target/nextest/cargo-metadata.json \ diff --git a/ci/scripts/deterministic-recovery-test.sh b/ci/scripts/deterministic-recovery-test.sh index b14cbce36cbbe..4dd2c1ec88936 100755 --- a/ci/scripts/deterministic-recovery-test.sh +++ b/ci/scripts/deterministic-recovery-test.sh @@ -36,14 +36,14 @@ mkdir -p $LOGDIR filter_stack_trace_for_all_logs() { # Defined in `common.sh` for log in "${LOGDIR}"/*.log; do - filter_stack_trace $log + filter_stack_trace "$log" done } trap filter_stack_trace_for_all_logs ERR echo "--- deterministic simulation e2e, ci-3cn-2fe-3meta, recovery, background_ddl" -seq $TEST_NUM | parallel MADSIM_TEST_SEED={} './risingwave_simulation \ +seq "$TEST_NUM" | parallel MADSIM_TEST_SEED={} './risingwave_simulation \ --kill \ --kill-rate=${KILL_RATE} \ ${USE_ARRANGEMENT_BACKFILL:-} \ @@ -51,7 +51,7 @@ ${USE_ARRANGEMENT_BACKFILL:-} \ 2> $LOGDIR/recovery-background-ddl-{}.log && rm $LOGDIR/recovery-background-ddl-{}.log' echo "--- deterministic simulation e2e, ci-3cn-2fe-3meta, recovery, ddl" -seq $TEST_NUM | parallel MADSIM_TEST_SEED={} './risingwave_simulation \ +seq "$TEST_NUM" | parallel MADSIM_TEST_SEED={} './risingwave_simulation \ --kill \ --kill-rate=${KILL_RATE} \ --background-ddl-rate=${BACKGROUND_DDL_RATE} \ @@ -59,7 +59,7 @@ ${USE_ARRANGEMENT_BACKFILL:-} \ ./e2e_test/ddl/\*\*/\*.slt 2> $LOGDIR/recovery-ddl-{}.log && rm $LOGDIR/recovery-ddl-{}.log' echo "--- deterministic simulation e2e, ci-3cn-2fe-3meta, recovery, streaming" -seq $TEST_NUM | parallel MADSIM_TEST_SEED={} './risingwave_simulation \ +seq "$TEST_NUM" | parallel MADSIM_TEST_SEED={} './risingwave_simulation \ --kill \ --kill-rate=${KILL_RATE} \ --background-ddl-rate=${BACKGROUND_DDL_RATE} \ @@ -67,7 +67,7 @@ ${USE_ARRANGEMENT_BACKFILL:-} \ ./e2e_test/streaming/\*\*/\*.slt 2> $LOGDIR/recovery-streaming-{}.log && rm $LOGDIR/recovery-streaming-{}.log' echo "--- deterministic simulation e2e, ci-3cn-2fe-3meta, recovery, batch" -seq $TEST_NUM | parallel MADSIM_TEST_SEED={} './risingwave_simulation \ +seq "$TEST_NUM" | parallel MADSIM_TEST_SEED={} './risingwave_simulation \ --kill \ --kill-rate=${KILL_RATE} \ --background-ddl-rate=${BACKGROUND_DDL_RATE} \ @@ -75,7 +75,7 @@ ${USE_ARRANGEMENT_BACKFILL:-} \ ./e2e_test/batch/\*\*/\*.slt 2> $LOGDIR/recovery-batch-{}.log && rm $LOGDIR/recovery-batch-{}.log' echo "--- deterministic simulation e2e, ci-3cn-2fe-3meta, recovery, kafka source,sink" -seq $TEST_NUM | parallel MADSIM_TEST_SEED={} './risingwave_simulation \ +seq "$TEST_NUM" | parallel MADSIM_TEST_SEED={} './risingwave_simulation \ --kill \ --kill-rate=${KILL_RATE} \ --kafka-datadir=./scripts/source/test_data \ diff --git a/ci/scripts/e2e-pulsar-sink-test.sh b/ci/scripts/e2e-pulsar-sink-test.sh index bf9a390c7e9ee..1ffd5cf56b1ce 100755 --- a/ci/scripts/e2e-pulsar-sink-test.sh +++ b/ci/scripts/e2e-pulsar-sink-test.sh @@ -30,7 +30,7 @@ MAX_RETRY=20 while [[ $HTTP_CODE -ne 200 && MAX_RETRY -gt 0 ]] do HTTP_CODE=$(curl --connect-timeout 2 -s -o /dev/null -w ''%{http_code}'' http://pulsar-server:8080/admin/v2/clusters) - echo Got HTTP Code: $HTTP_CODE + echo Got HTTP Code: "$HTTP_CODE" ((MAX_RETRY--)) sleep 5 done diff --git a/ci/scripts/e2e-source-test.sh b/ci/scripts/e2e-source-test.sh index 8785bc50e2b19..f3c99c404f6e5 100755 --- a/ci/scripts/e2e-source-test.sh +++ b/ci/scripts/e2e-source-test.sh @@ -9,9 +9,9 @@ source ci/scripts/common.sh # $1: subject name # $2: schema file path function register_schema_registry() { - curl -X POST http://message_queue:8081/subjects/$1/versions \ + curl -X POST http://message_queue:8081/subjects/"$1"/versions \ -H ‘Content-Type: application/vnd.schemaregistry.v1+json’ \ - --data-binary @<(jq -n --arg schema “$(cat $2)” ‘{schemaType: “PROTOBUF”, schema: $schema}’) + --data-binary @<(jq -n --arg schema “$(cat "$2")” ‘{schemaType: “PROTOBUF”, schema: "$schema"}’) } # prepare environment @@ -93,9 +93,9 @@ sqllogictest -p 4566 -d dev './e2e_test/source/cdc/cdc.share_stream.slt' # create a share source and check whether heartbeat message is received sqllogictest -p 4566 -d dev './e2e_test/source/cdc/cdc.create_source_job.slt' -table_id=`psql -U root -h localhost -p 4566 -d dev -t -c "select id from rw_internal_tables where name like '%mysql_source%';" | xargs`; -table_count=`psql -U root -h localhost -p 4566 -d dev -t -c "select count(*) from rw_table(${table_id}, public);" | xargs`; -if [ $table_count -eq 0 ]; then +table_id=$(psql -U root -h localhost -p 4566 -d dev -t -c "select id from rw_internal_tables where name like '%mysql_source%';" | xargs); +table_count=$(psql -U root -h localhost -p 4566 -d dev -t -c "select count(*) from rw_table(${table_id}, public);" | xargs); +if [ "$table_count" -eq 0 ]; then echo "ERROR: internal table of cdc share source is empty!" exit 1 fi diff --git a/ci/scripts/e2e-test-parallel-for-opendal.sh b/ci/scripts/e2e-test-parallel-for-opendal.sh index fcd63c9f0d586..606adcf929cd7 100755 --- a/ci/scripts/e2e-test-parallel-for-opendal.sh +++ b/ci/scripts/e2e-test-parallel-for-opendal.sh @@ -26,12 +26,12 @@ download_and_prepare_rw "$profile" common echo "--- Download artifacts" download-and-decompress-artifact e2e_test_generated ./ -host_args="-h localhost -p 4565 -h localhost -p 4566 -h localhost -p 4567" +host_args=(-h localhost -p 4565 -h localhost -p 4566 -h localhost -p 4567) echo "--- e2e, ci-3cn-3fe-opendal-fs-backend, streaming" RUST_LOG="info,risingwave_stream=info,risingwave_batch=info,risingwave_storage=info" \ risedev ci-start ci-3cn-3fe-opendal-fs-backend -sqllogictest ${host_args} -d dev './e2e_test/streaming/**/*.slt' -j 16 --junit "parallel-opendal-fs-backend-${profile}" +sqllogictest "${host_args[@]}" -d dev './e2e_test/streaming/**/*.slt' -j 16 --junit "parallel-opendal-fs-backend-${profile}" echo "--- Kill cluster Streaming" risedev ci-kill @@ -41,10 +41,10 @@ rm -rf /tmp/rw_ci echo "--- e2e, ci-3cn-3fe-opendal-fs-backend, batch" RUST_LOG="info,risingwave_stream=info,risingwave_batch=info,risingwave_storage=info" \ risedev ci-start ci-3cn-3fe-opendal-fs-backend -sqllogictest ${host_args} -d dev './e2e_test/ddl/**/*.slt' --junit "parallel-opendal-fs-backend-ddl-${profile}" -sqllogictest ${host_args} -d dev './e2e_test/visibility_mode/*.slt' -j 16 --junit "parallel-opendal-fs-backend-batch-${profile}" +sqllogictest "${host_args[@]}" -d dev './e2e_test/ddl/**/*.slt' --junit "parallel-opendal-fs-backend-ddl-${profile}" +sqllogictest "${host_args[@]}" -d dev './e2e_test/visibility_mode/*.slt' -j 16 --junit "parallel-opendal-fs-backend-batch-${profile}" echo "--- Kill cluster Batch" risedev ci-kill sleep 1 -rm -rf /tmp/rw_ci \ No newline at end of file +rm -rf /tmp/rw_ci diff --git a/ci/scripts/e2e-test-parallel-in-memory.sh b/ci/scripts/e2e-test-parallel-in-memory.sh index 4b55320f115d3..fcde15644c2b6 100755 --- a/ci/scripts/e2e-test-parallel-in-memory.sh +++ b/ci/scripts/e2e-test-parallel-in-memory.sh @@ -23,20 +23,20 @@ shift $((OPTIND -1)) download_and_prepare_rw "$profile" common -host_args="-h localhost -p 4565 -h localhost -p 4566 -h localhost -p 4567" +host_args=(-h localhost -p 4565 -h localhost -p 4566 -h localhost -p 4567) echo "--- e2e, ci-3cn-3fe-in-memory, streaming" risedev ci-start ci-3cn-3fe-in-memory sqllogictest --version -sqllogictest ${host_args} -d dev './e2e_test/streaming/**/*.slt' -j 16 --junit "parallel-in-memory-streaming-${profile}" --label in-memory +sqllogictest "${host_args[@]}" -d dev './e2e_test/streaming/**/*.slt' -j 16 --junit "parallel-in-memory-streaming-${profile}" --label in-memory echo "--- Kill cluster" risedev ci-kill echo "--- e2e, ci-3cn-3fe-in-memory, batch" risedev ci-start ci-3cn-3fe-in-memory -sqllogictest ${host_args} -d dev './e2e_test/ddl/**/*.slt' --junit "parallel-in-memory-batch-ddl-${profile}" --label in-memory -sqllogictest ${host_args} -d dev './e2e_test/batch/**/*.slt' -j 16 --junit "parallel-in-memory-batch-${profile}" --label in-memory +sqllogictest "${host_args[@]}" -d dev './e2e_test/ddl/**/*.slt' --junit "parallel-in-memory-batch-ddl-${profile}" --label in-memory +sqllogictest "${host_args[@]}" -d dev './e2e_test/batch/**/*.slt' -j 16 --junit "parallel-in-memory-batch-${profile}" --label in-memory echo "--- Kill cluster" risedev ci-kill diff --git a/ci/scripts/e2e-test-parallel.sh b/ci/scripts/e2e-test-parallel.sh index ad1f11a75f7b1..5f16a4c817871 100755 --- a/ci/scripts/e2e-test-parallel.sh +++ b/ci/scripts/e2e-test-parallel.sh @@ -31,28 +31,28 @@ kill_cluster() { risedev ci-kill } -host_args="-h localhost -p 4565 -h localhost -p 4566 -h localhost -p 4567" +host_args=(-h localhost -p 4565 -h localhost -p 4566 -h localhost -p 4567) RUST_LOG="info,risingwave_stream=info,risingwave_batch=info,risingwave_storage=info,risingwave_storage::hummock::compactor::compactor_runner=warn" echo "--- e2e, ci-3streaming-2serving-3fe, streaming" RUST_LOG=$RUST_LOG \ risedev ci-start ci-3streaming-2serving-3fe -sqllogictest ${host_args} -d dev './e2e_test/streaming/**/*.slt' -j 16 --junit "parallel-streaming-${profile}" +sqllogictest "${host_args[@]}" -d dev './e2e_test/streaming/**/*.slt' -j 16 --junit "parallel-streaming-${profile}" kill_cluster echo "--- e2e, ci-3streaming-2serving-3fe, batch" RUST_LOG=$RUST_LOG \ risedev ci-start ci-3streaming-2serving-3fe -sqllogictest ${host_args} -d dev './e2e_test/ddl/**/*.slt' --junit "parallel-batch-ddl-${profile}" -sqllogictest ${host_args} -d dev './e2e_test/visibility_mode/*.slt' -j 16 --junit "parallel-batch-${profile}" +sqllogictest "${host_args[@]}" -d dev './e2e_test/ddl/**/*.slt' --junit "parallel-batch-ddl-${profile}" +sqllogictest "${host_args[@]}" -d dev './e2e_test/visibility_mode/*.slt' -j 16 --junit "parallel-batch-${profile}" kill_cluster echo "--- e2e, ci-3streaming-2serving-3fe, generated" RUST_LOG=$RUST_LOG \ risedev ci-start ci-3streaming-2serving-3fe -sqllogictest ${host_args} -d dev './e2e_test/generated/**/*.slt' -j 16 --junit "parallel-generated-${profile}" +sqllogictest "${host_args[@]}" -d dev './e2e_test/generated/**/*.slt' -j 16 --junit "parallel-generated-${profile}" kill_cluster diff --git a/ci/scripts/gen-flamegraph.sh b/ci/scripts/gen-flamegraph.sh index 8fb71a1aee8ae..1ea2777fce8a8 100755 --- a/ci/scripts/gen-flamegraph.sh +++ b/ci/scripts/gen-flamegraph.sh @@ -124,7 +124,7 @@ AUCTION_TOPIC="nexmark-auction" BID_TOPIC="nexmark-bid" PERSON_TOPIC="nexmark-person" NUM_PARTITIONS=8 -# NOTE: Due to https://github.com/risingwavelabs/risingwave/issues/6747, use `SEPARATE_TOPICS=false` +# NOTE: Due to https://github.com/risingwavelabs/risingwave/issues/6747, use SEPARATE_TOPICS=false SEPARATE_TOPICS=false RUST_LOG="nexmark_server=info" @@ -180,7 +180,7 @@ configure_all() { # This has minor effect on the flamegraph, so can ignore for now. # could it be related to profiling on Docker? Needs further investigation. start_nperf() { - ./nperf record -p `pidof compute-node` -o perf.data & + ./nperf record -p $(pidof compute-node) -o perf.data & } start_kafka() { @@ -231,7 +231,7 @@ gen_heap_flamegraph() { JEPROF=$(find . -name 'jeprof' | head -1) chmod +x "$JEPROF" COMPUTE_NODE=".risingwave/bin/risingwave/compute-node" - $JEPROF --collapsed $COMPUTE_NODE $LATEST_HEAP_PROFILE > heap.collapsed + $JEPROF --collapsed $COMPUTE_NODE "$LATEST_HEAP_PROFILE" > heap.collapsed ../flamegraph.pl --color=mem --countname=bytes heap.collapsed > perf.svg mv perf.svg .. popd @@ -268,7 +268,7 @@ run_heap_flamegraph() { echo "--- Running benchmark for $QUERY" echo "--- Setting variables" QUERY_LABEL="$1" - QUERY_FILE_NAME="$(echo $QUERY_LABEL | sed 's/nexmark\-\(.*\)/\1.sql/')" + QUERY_FILE_NAME="$(echo "$QUERY_LABEL" | sed 's/nexmark\-\(.*\)/\1.sql/')" QUERY_PATH="$QUERY_DIR/$QUERY_FILE_NAME" FLAMEGRAPH_PATH="perf-$QUERY_LABEL.svg" echo "QUERY_LABEL: $QUERY_LABEL" @@ -328,7 +328,7 @@ run_cpu_flamegraph() { echo "--- Running benchmark for $QUERY" echo "--- Setting variables" QUERY_LABEL="$1" - QUERY_FILE_NAME="$(echo $QUERY_LABEL | sed 's/nexmark\-\(.*\)/\1.sql/')" + QUERY_FILE_NAME="$(echo "$QUERY_LABEL" | sed 's/nexmark\-\(.*\)/\1.sql/')" QUERY_PATH="$QUERY_DIR/$QUERY_FILE_NAME" FLAMEGRAPH_PATH="perf-$QUERY_LABEL.svg" echo "QUERY_LABEL: $QUERY_LABEL" @@ -367,7 +367,7 @@ run_cpu_flamegraph() { echo "--- Generate flamegraph" gen_cpu_flamegraph - mv perf.svg $FLAMEGRAPH_PATH + mv perf.svg "$FLAMEGRAPH_PATH" echo "--- Uploading flamegraph" buildkite-agent artifact upload "./$FLAMEGRAPH_PATH" diff --git a/ci/scripts/integration-tests.sh b/ci/scripts/integration-tests.sh index ef6c024ac9db0..90d23b2d80355 100755 --- a/ci/scripts/integration-tests.sh +++ b/ci/scripts/integration-tests.sh @@ -38,13 +38,13 @@ echo "--- case: ${case}, format: ${format}" if [[ -n "${RW_IMAGE_TAG+x}" ]]; then export RW_IMAGE="ghcr.io/risingwavelabs/risingwave:${RW_IMAGE_TAG}" - echo Docker image: $RW_IMAGE + echo Docker image: "$RW_IMAGE" fi if [ "${BUILDKITE_SOURCE}" == "schedule" ]; then # Use ghcr nightly image for scheduled build. If not specified, we use dockerhub's 'risingwavelabs/risingwave'. export RW_IMAGE="ghcr.io/risingwavelabs/risingwave:nightly-$(date '+%Y%m%d')" - echo Docker image: $RW_IMAGE + echo Docker image: "$RW_IMAGE" fi if [ "${case}" == "client-library" ]; then @@ -68,7 +68,7 @@ cd integration_tests/scripts echo "--- rewrite docker compose for protobuf" if [ "${format}" == "protobuf" ]; then - python3 gen_pb_compose.py ${case} ${format} + python3 gen_pb_compose.py "${case}" "${format}" fi echo "--- set vm.max_map_count=2000000 for doris" @@ -76,18 +76,18 @@ max_map_count_original_value=$(sysctl -n vm.max_map_count) sudo sysctl -w vm.max_map_count=2000000 echo "--- run Demos" -python3 run_demos.py --case ${case} --format ${format} +python3 run_demos.py --case "${case}" --format "${format}" echo "--- run docker ps" docker ps echo "--- check if the ingestion is successful" # extract the type of upstream source,e.g. mysql,postgres,etc -upstream=$(echo ${case} | cut -d'-' -f 1) -python3 check_data.py ${case} ${upstream} +upstream=$(echo "${case}" | cut -d'-' -f 1) +python3 check_data.py "${case}" "${upstream}" echo "--- clean Demos" -python3 clean_demos.py --case ${case} +python3 clean_demos.py --case "${case}" echo "--- reset vm.max_map_count={$max_map_count_original_value}" -sudo sysctl -w vm.max_map_count=$max_map_count_original_value +sudo sysctl -w vm.max_map_count="$max_map_count_original_value" diff --git a/ci/scripts/java-binding-test.sh b/ci/scripts/java-binding-test.sh index 0c8807411a2e9..0391dcdb51283 100755 --- a/ci/scripts/java-binding-test.sh +++ b/ci/scripts/java-binding-test.sh @@ -39,8 +39,8 @@ risedev ci-kill echo "--- run stream chunk java binding" RISINGWAVE_ROOT=$(git rev-parse --show-toplevel) -cd ${RISINGWAVE_ROOT}/java +cd "${RISINGWAVE_ROOT}"/java -(${RISINGWAVE_ROOT}/bin/data-chunk-payload-generator) | \ +("${RISINGWAVE_ROOT}"/bin/data-chunk-payload-generator) | \ java -cp "./java-binding-integration-test/target/dependency/*:./java-binding-integration-test/target/classes" \ com.risingwave.java.binding.StreamChunkDemo diff --git a/ci/scripts/multi-arch-docker.sh b/ci/scripts/multi-arch-docker.sh index dd61fab27eba5..b97b606201d6a 100755 --- a/ci/scripts/multi-arch-docker.sh +++ b/ci/scripts/multi-arch-docker.sh @@ -62,14 +62,14 @@ echo "--- multi arch image create " if [[ "${#BUILDKITE_COMMIT}" = 40 ]]; then # If the commit is 40 characters long, it's probably a SHA. TAG="git-${BUILDKITE_COMMIT}" - pushGchr ${TAG} + pushGchr "${TAG}" fi if [ "${BUILDKITE_SOURCE}" == "schedule" ]; then # If this is a schedule build, tag the image with the date. TAG="nightly-${date}" - pushGchr ${TAG} - pushDockerhub ${TAG} + pushGchr "${TAG}" + pushDockerhub "${TAG}" TAG="latest" pushGchr ${TAG} fi @@ -77,14 +77,14 @@ fi if [[ -n "${IMAGE_TAG+x}" ]]; then # Tag the image with the $IMAGE_TAG. TAG="${IMAGE_TAG}" - pushGchr ${TAG} + pushGchr "${TAG}" fi if [[ -n "${BUILDKITE_TAG}" ]]; then # If there's a tag, we tag the image. TAG="${BUILDKITE_TAG}" - pushGchr ${TAG} - pushDockerhub ${TAG} + pushGchr "${TAG}" + pushDockerhub "${TAG}" TAG="latest" pushDockerhub ${TAG} diff --git a/ci/scripts/regress-test.sh b/ci/scripts/regress-test.sh index 0dacff818bade..4d32f8a30ef40 100755 --- a/ci/scripts/regress-test.sh +++ b/ci/scripts/regress-test.sh @@ -51,20 +51,20 @@ RUST_BACKTRACE=1 target/debug/risingwave_regress_test --host db \ -p 5432 \ -u postgres \ --database postgres \ - --input `pwd`/src/tests/regress/data \ - --output `pwd`/src/tests/regress/output \ - --schedule `pwd`/src/tests/regress/data/schedule \ + --input $(pwd)/src/tests/regress/data \ + --output $(pwd)/src/tests/regress/output \ + --schedule $(pwd)/src/tests/regress/data/schedule \ --mode postgres echo "--- ci-3cn-1fe, RisingWave regress test" -rm -rf `pwd`/src/tests/regress/output +rm -rf $(pwd)/src/tests/regress/output risedev ci-start ci-3cn-1fe RUST_BACKTRACE=1 target/debug/risingwave_regress_test --host 127.0.0.1 \ -p 4566 \ -u root \ - --input `pwd`/src/tests/regress/data \ - --output `pwd`/src/tests/regress/output \ - --schedule `pwd`/src/tests/regress/data/schedule \ + --input $(pwd)/src/tests/regress/data \ + --output $(pwd)/src/tests/regress/output \ + --schedule $(pwd)/src/tests/regress/data/schedule \ --mode risingwave echo "--- Kill cluster" diff --git a/ci/scripts/release.sh b/ci/scripts/release.sh index 7fde7eb9b00e9..1c68b8acf9fa6 100755 --- a/ci/scripts/release.sh +++ b/ci/scripts/release.sh @@ -38,12 +38,12 @@ unset RUSTC_WRAPPER # disable sccache echo "--- Install protoc3" PROTOC_ARCH=${ARCH} -if [ ${ARCH} == "aarch64" ]; then +if [ "${ARCH}" == "aarch64" ]; then # shellcheck disable=SC1068 PROTOC_ARCH="aarch_64" fi -curl -LO https://github.com/protocolbuffers/protobuf/releases/download/v3.15.8/protoc-3.15.8-linux-${PROTOC_ARCH}.zip -unzip -o protoc-3.15.8-linux-${PROTOC_ARCH}.zip -d protoc +curl -LO https://github.com/protocolbuffers/protobuf/releases/download/v3.15.8/protoc-3.15.8-linux-"${PROTOC_ARCH}".zip +unzip -o protoc-3.15.8-linux-"${PROTOC_ARCH}".zip -d protoc mv ./protoc/bin/protoc /usr/local/bin/ mv ./protoc/include/* /usr/local/include/ @@ -66,7 +66,7 @@ fi echo "--- Build risingwave release binary" export ENABLE_BUILD_DASHBOARD=1 -if [ ${ARCH} == "aarch64" ]; then +if [ "${ARCH}" == "aarch64" ]; then # enable large page size support for jemalloc # see https://github.com/tikv/jemallocator/blob/802969384ae0c581255f3375ee2ba774c8d2a754/jemalloc-sys/build.rs#L218 export JEMALLOC_SYS_WITH_LG_PAGE=16 @@ -77,21 +77,21 @@ cd target/release && chmod +x risingwave risectl echo "--- Upload nightly binary to s3" if [ "${BUILDKITE_SOURCE}" == "schedule" ]; then - tar -czvf risingwave-"$(date '+%Y%m%d')"-${ARCH}-unknown-linux.tar.gz risingwave - aws s3 cp risingwave-"$(date '+%Y%m%d')"-${ARCH}-unknown-linux.tar.gz s3://rw-nightly-pre-built-binary + tar -czvf risingwave-"$(date '+%Y%m%d')"-"${ARCH}"-unknown-linux.tar.gz risingwave + aws s3 cp risingwave-"$(date '+%Y%m%d')"-"${ARCH}"-unknown-linux.tar.gz s3://rw-nightly-pre-built-binary elif [[ -n "${BINARY_NAME+x}" ]]; then - tar -czvf risingwave-${BINARY_NAME}-${ARCH}-unknown-linux.tar.gz risingwave - aws s3 cp risingwave-${BINARY_NAME}-${ARCH}-unknown-linux.tar.gz s3://rw-nightly-pre-built-binary + tar -czvf risingwave-"${BINARY_NAME}"-"${ARCH}"-unknown-linux.tar.gz risingwave + aws s3 cp risingwave-"${BINARY_NAME}"-"${ARCH}"-unknown-linux.tar.gz s3://rw-nightly-pre-built-binary fi echo "--- Build connector node" -cd ${REPO_ROOT}/java && mvn -B package -Dmaven.test.skip=true -Dno-build-rust +cd "${REPO_ROOT}"/java && mvn -B package -Dmaven.test.skip=true -Dno-build-rust if [[ -n "${BUILDKITE_TAG}" ]]; then echo "--- Collect all release assets" - cd ${REPO_ROOT} && mkdir release-assets && cd release-assets - cp -r ${REPO_ROOT}/target/release/* . - mv ${REPO_ROOT}/java/connector-node/assembly/target/risingwave-connector-1.0.0.tar.gz risingwave-connector-"${BUILDKITE_TAG}".tar.gz + cd "${REPO_ROOT}" && mkdir release-assets && cd release-assets + cp -r "${REPO_ROOT}"/target/release/* . + mv "${REPO_ROOT}"/java/connector-node/assembly/target/risingwave-connector-1.0.0.tar.gz risingwave-connector-"${BUILDKITE_TAG}".tar.gz tar -zxvf risingwave-connector-"${BUILDKITE_TAG}".tar.gz libs ls -l @@ -102,7 +102,7 @@ if [[ -n "${BUILDKITE_TAG}" ]]; then echo "--- Release create" set +e - response=$(gh api repos/risingwavelabs/risingwave/releases/tags/${BUILDKITE_TAG} 2>&1) + response=$(gh api repos/risingwavelabs/risingwave/releases/tags/"${BUILDKITE_TAG}" 2>&1) set -euo pipefail if [[ $response == *"Not Found"* ]]; then echo "Tag ${BUILDKITE_TAG} does not exist. Creating release..." @@ -112,18 +112,18 @@ if [[ -n "${BUILDKITE_TAG}" ]]; then fi echo "--- Release upload risingwave asset" - tar -czvf risingwave-"${BUILDKITE_TAG}"-${ARCH}-unknown-linux.tar.gz risingwave - gh release upload "${BUILDKITE_TAG}" risingwave-"${BUILDKITE_TAG}"-${ARCH}-unknown-linux.tar.gz + tar -czvf risingwave-"${BUILDKITE_TAG}"-"${ARCH}"-unknown-linux.tar.gz risingwave + gh release upload "${BUILDKITE_TAG}" risingwave-"${BUILDKITE_TAG}"-"${ARCH}"-unknown-linux.tar.gz echo "--- Release upload risingwave debug info" - tar -czvf risingwave-"${BUILDKITE_TAG}"-${ARCH}-unknown-linux.dwp.tar.gz risingwave.dwp - gh release upload "${BUILDKITE_TAG}" risingwave-"${BUILDKITE_TAG}"-${ARCH}-unknown-linux.dwp.tar.gz + tar -czvf risingwave-"${BUILDKITE_TAG}"-"${ARCH}"-unknown-linux.dwp.tar.gz risingwave.dwp + gh release upload "${BUILDKITE_TAG}" risingwave-"${BUILDKITE_TAG}"-"${ARCH}"-unknown-linux.dwp.tar.gz echo "--- Release upload risectl asset" - tar -czvf risectl-"${BUILDKITE_TAG}"-${ARCH}-unknown-linux.tar.gz risectl - gh release upload "${BUILDKITE_TAG}" risectl-"${BUILDKITE_TAG}"-${ARCH}-unknown-linux.tar.gz + tar -czvf risectl-"${BUILDKITE_TAG}"-"${ARCH}"-unknown-linux.tar.gz risectl + gh release upload "${BUILDKITE_TAG}" risectl-"${BUILDKITE_TAG}"-"${ARCH}"-unknown-linux.tar.gz echo "--- Release upload risingwave-all-in-one asset" - tar -czvf risingwave-"${BUILDKITE_TAG}"-${ARCH}-unknown-linux-all-in-one.tar.gz risingwave libs - gh release upload "${BUILDKITE_TAG}" risingwave-"${BUILDKITE_TAG}"-${ARCH}-unknown-linux-all-in-one.tar.gz + tar -czvf risingwave-"${BUILDKITE_TAG}"-"${ARCH}"-unknown-linux-all-in-one.tar.gz risingwave libs + gh release upload "${BUILDKITE_TAG}" risingwave-"${BUILDKITE_TAG}"-"${ARCH}"-unknown-linux-all-in-one.tar.gz fi diff --git a/ci/scripts/run-e2e-test.sh b/ci/scripts/run-e2e-test.sh index 8d9e393b25ef8..84b3363626138 100755 --- a/ci/scripts/run-e2e-test.sh +++ b/ci/scripts/run-e2e-test.sh @@ -169,7 +169,7 @@ if [[ "$RUN_COMPACTION" -eq "1" ]]; then # Poll the current version id until we have around 100 version deltas delta_log_cnt=0 - while [ $delta_log_cnt -le 90 ] + while [ "$delta_log_cnt" -le 90 ] do delta_log_cnt="$(./target/debug/risingwave risectl hummock list-version --verbose | grep -w '^ *id:' | grep -o '[0-9]\+' | head -n 1)" echo "Current version $delta_log_cnt" diff --git a/ci/scripts/run-micro-benchmarks.sh b/ci/scripts/run-micro-benchmarks.sh index 371cc416e7ac5..6094b3b7f284f 100755 --- a/ci/scripts/run-micro-benchmarks.sh +++ b/ci/scripts/run-micro-benchmarks.sh @@ -12,7 +12,7 @@ BENCHMARKS="stream_hash_agg json_parser bench_block_iter bench_compactor bench_l # Reference: https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/instancedata-data-retrieval.html get_instance_type() { - TOKEN=`curl -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 21600"` \ + TOKEN=$(curl -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 21600") \ && curl -H "X-aws-ec2-metadata-token: $TOKEN" -v http://169.254.169.254/latest/meta-data/instance-type } @@ -26,8 +26,8 @@ bench() { if [[ $REASON == \"benchmark-complete\" ]]; then ID="$(jq ".id" <<< "$LINE")" MEAN="$(jq ".mean" <<< "$LINE")" - EST="$(jq ".estimate" <<< $MEAN)" - UNIT="$(jq ".unit" <<< $MEAN)" + EST="$(jq ".estimate" <<< "$MEAN")" + UNIT="$(jq ".unit" <<< "$MEAN")" echo "Benchmark ID: $ID" echo "Average Time Taken: $EST" @@ -71,7 +71,7 @@ main() { OLD_IFS=$IFS IFS=$'\n' - bench $BENCHMARK + bench "$BENCHMARK" IFS=$OLD_IFS @@ -90,7 +90,7 @@ local_test() { for BENCHMARK in $BENCHMARKS do echo "--- Running $BENCHMARK" - bench $BENCHMARK + bench "$BENCHMARK" done NO_TRAILING_COMMA=$(sed -E '$ s/(.*),$/\1/' ./results.json) echo "$NO_TRAILING_COMMA" > ./results.json diff --git a/ci/scripts/run-unit-test.sh b/ci/scripts/run-unit-test.sh index 8fe4b150bf866..d9a723a34fa19 100755 --- a/ci/scripts/run-unit-test.sh +++ b/ci/scripts/run-unit-test.sh @@ -6,9 +6,9 @@ set -euo pipefail REPO_ROOT=${PWD} echo "+++ Run python UDF SDK unit tests" -cd ${REPO_ROOT}/src/expr/udf/python +cd "${REPO_ROOT}"/src/expr/udf/python python3 -m pytest -cd ${REPO_ROOT} +cd "${REPO_ROOT}" echo "+++ Run unit tests" # use tee to disable progress bar diff --git a/ci/scripts/s3-source-test-for-opendal-fs-engine.sh b/ci/scripts/s3-source-test-for-opendal-fs-engine.sh index 7521eaa97b336..11bdb8ff2ce62 100755 --- a/ci/scripts/s3-source-test-for-opendal-fs-engine.sh +++ b/ci/scripts/s3-source-test-for-opendal-fs-engine.sh @@ -30,7 +30,7 @@ risedev ci-start ci-3cn-3fe-opendal-fs-backend echo "--- Run test" python3 -m pip install minio psycopg2-binary -python3 e2e_test/s3/$script +python3 e2e_test/s3/"$script" echo "--- Kill cluster" rm -rf /tmp/rw_ci diff --git a/ci/scripts/s3-source-test.sh b/ci/scripts/s3-source-test.sh index 80c4f5fa3f16c..62a58ab3242f6 100755 --- a/ci/scripts/s3-source-test.sh +++ b/ci/scripts/s3-source-test.sh @@ -30,7 +30,7 @@ risedev ci-start ci-1cn-1fe echo "--- Run test" python3 -m pip install minio psycopg2-binary opendal -python3 e2e_test/s3/$script +python3 e2e_test/s3/"$script" echo "--- Kill cluster" risedev ci-kill