Skip to content

Commit

Permalink
style: apply shellcheck autofix to sh scripts (#16274)
Browse files Browse the repository at this point in the history
Signed-off-by: xxchan <[email protected]>
  • Loading branch information
xxchan authored Apr 12, 2024
1 parent 5fb6f69 commit 4f53f89
Show file tree
Hide file tree
Showing 23 changed files with 110 additions and 110 deletions.
8 changes: 4 additions & 4 deletions ci/scripts/backwards-compat-test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -111,26 +111,26 @@ setup_old_cluster() {
echo "--- Get RisingWave binary for $OLD_VERSION"
OLD_URL=https://github.com/risingwavelabs/risingwave/releases/download/v${OLD_VERSION}/risingwave-v${OLD_VERSION}-x86_64-unknown-linux.tar.gz
set +e
wget $OLD_URL
wget "$OLD_URL"
if [[ "$?" -ne 0 ]]; then
set -e
echo "Failed to download ${OLD_VERSION} from github releases, build from source later during \`risedev d\`"
configure_rw_build
else
set -e
tar -xvf risingwave-v${OLD_VERSION}-x86_64-unknown-linux.tar.gz
tar -xvf risingwave-v"${OLD_VERSION}"-x86_64-unknown-linux.tar.gz
mv risingwave target/debug/risingwave

echo "--- Start cluster on tag $OLD_VERSION"
git config --global --add safe.directory /risingwave
configure_rw $OLD_VERSION
configure_rw "$OLD_VERSION"
fi
}

setup_new_cluster() {
echo "--- Setup Risingwave @ $RW_COMMIT"
git checkout -
download_and_prepare_rw $profile common
download_and_prepare_rw "$profile" common
# Make sure we always start w/o old config
rm -r .risingwave/config
}
Expand Down
6 changes: 3 additions & 3 deletions ci/scripts/build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -41,9 +41,9 @@ cargo fmt --all -- --check
echo "--- Build Rust components"

if [[ "$profile" == "ci-dev" ]]; then
RISINGWAVE_FEATURE_FLAGS="--features rw-dynamic-link --no-default-features"
RISINGWAVE_FEATURE_FLAGS=(--features rw-dynamic-link --no-default-features)
else
RISINGWAVE_FEATURE_FLAGS="--features rw-static-link"
RISINGWAVE_FEATURE_FLAGS=(--features rw-static-link)
fi

cargo build \
Expand All @@ -53,7 +53,7 @@ cargo build \
-p risingwave_sqlsmith \
-p risingwave_compaction_test \
-p risingwave_e2e_extended_mode_test \
$RISINGWAVE_FEATURE_FLAGS \
"${RISINGWAVE_FEATURE_FLAGS[@]}" \
--features embedded-python-udf \
--profile "$profile"

Expand Down
4 changes: 2 additions & 2 deletions ci/scripts/common.sh
Original file line number Diff line number Diff line change
Expand Up @@ -105,11 +105,11 @@ get_latest_kafka_version() {
local versions=$(curl -s https://downloads.apache.org/kafka/ | grep -Eo 'href="[0-9]+\.[0-9]+\.[0-9]+/"' | grep -Eo "[0-9]+\.[0-9]+\.[0-9]+")
# Sort the version numbers and get the latest one
local latest_version=$(echo "$versions" | sort -V | tail -n1)
echo $latest_version
echo "$latest_version"
}

get_latest_kafka_download_url() {
local latest_version=$(get_latest_kafka_version)
local download_url="https://downloads.apache.org/kafka/${latest_version}/kafka_2.13-${latest_version}.tgz"
echo $download_url
echo "$download_url"
}
14 changes: 7 additions & 7 deletions ci/scripts/connector-node-integration-test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -67,8 +67,8 @@ MC_PATH=${PWD}/mc
${MC_PATH} config host add minio http://127.0.0.1:9000 minioadmin minioadmin

echo "--- starting connector-node service"
mkdir -p ${RISINGWAVE_ROOT}/java/connector-node/assembly/target/
cd ${RISINGWAVE_ROOT}/java/connector-node/assembly/target/
mkdir -p "${RISINGWAVE_ROOT}"/java/connector-node/assembly/target/
cd "${RISINGWAVE_ROOT}"/java/connector-node/assembly/target/
# tar xvf risingwave-connector-1.0.0.tar.gz > /dev/null
buildkite-agent artifact download risingwave-connector.tar.gz ./
tar xvf risingwave-connector.tar.gz > /dev/null
Expand All @@ -77,21 +77,21 @@ sleep 3

# generate data
echo "--- starting generate streamchunk data"
cd ${RISINGWAVE_ROOT}/java/connector-node/python-client
cd "${RISINGWAVE_ROOT}"/java/connector-node/python-client
buildkite-agent artifact download java-binding-integration-test.tar.zst ./
tar xf java-binding-integration-test.tar.zst bin
./bin/data-chunk-payload-convert-generator data/sink_input.json > ./data/sink_input
./bin/data-chunk-payload-convert-generator data/upsert_sink_input.json > ./data/upsert_sink_input
./bin/data-chunk-payload-generator 30 > ./data/stream_chunk_data

echo "--- prepare integration tests"
cd ${RISINGWAVE_ROOT}/java/connector-node
cd "${RISINGWAVE_ROOT}"/java/connector-node
pip3 install grpcio grpcio-tools psycopg2 psycopg2-binary pyspark==3.3 black
cd python-client && bash gen-stub.sh && bash format-python.sh --check
export PYTHONPATH=proto

echo "--- running streamchunk data format integration tests"
cd ${RISINGWAVE_ROOT}/java/connector-node/python-client
cd "${RISINGWAVE_ROOT}"/java/connector-node/python-client
if python3 integration_tests.py --stream_chunk_format_test --input_binary_file="./data/stream_chunk_data" --data_format_use_json=False; then
echo "StreamChunk data format test passed"
else
Expand All @@ -106,8 +106,8 @@ type=("StreamChunk format")
${MC_PATH} mb minio/bucket
for ((i=0; i<${#type[@]}; i++)); do
echo "--- running file ${type[i]} integration tests"
cd ${RISINGWAVE_ROOT}/java/connector-node/python-client
if python3 integration_tests.py --file_sink ${sink_input_feature[i]}; then
cd "${RISINGWAVE_ROOT}"/java/connector-node/python-client
if python3 integration_tests.py --file_sink "${sink_input_feature[i]}"; then
echo "File sink ${type[i]} test passed"
else
echo "File sink ${type[i]} test failed"
Expand Down
14 changes: 7 additions & 7 deletions ci/scripts/deterministic-e2e-test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -31,25 +31,25 @@ export LOGDIR=.risingwave/log
mkdir -p $LOGDIR

echo "--- deterministic simulation e2e, ci-3cn-2fe, ddl"
seq $TEST_NUM | parallel MADSIM_TEST_SEED={} './risingwave_simulation ./e2e_test/ddl/\*\*/\*.slt 2> $LOGDIR/ddl-{}.log && rm $LOGDIR/ddl-{}.log'
seq "$TEST_NUM" | parallel MADSIM_TEST_SEED={} './risingwave_simulation ./e2e_test/ddl/\*\*/\*.slt 2> $LOGDIR/ddl-{}.log && rm $LOGDIR/ddl-{}.log'

echo "--- deterministic simulation e2e, ci-3cn-2fe, streaming"
seq $TEST_NUM | parallel MADSIM_TEST_SEED={} './risingwave_simulation ./e2e_test/streaming/\*\*/\*.slt 2> $LOGDIR/streaming-{}.log && rm $LOGDIR/streaming-{}.log'
seq "$TEST_NUM" | parallel MADSIM_TEST_SEED={} './risingwave_simulation ./e2e_test/streaming/\*\*/\*.slt 2> $LOGDIR/streaming-{}.log && rm $LOGDIR/streaming-{}.log'

echo "--- deterministic simulation e2e, ci-3cn-2fe, batch"
seq $TEST_NUM | parallel MADSIM_TEST_SEED={} './risingwave_simulation ./e2e_test/batch/\*\*/\*.slt 2> $LOGDIR/batch-{}.log && rm $LOGDIR/batch-{}.log'
seq "$TEST_NUM" | parallel MADSIM_TEST_SEED={} './risingwave_simulation ./e2e_test/batch/\*\*/\*.slt 2> $LOGDIR/batch-{}.log && rm $LOGDIR/batch-{}.log'

echo "--- deterministic simulation e2e, ci-3cn-2fe, kafka source"
seq $TEST_NUM | parallel MADSIM_TEST_SEED={} './risingwave_simulation --kafka-datadir=./scripts/source/test_data ./e2e_test/source/basic/kafka\*.slt 2> $LOGDIR/source-{}.log && rm $LOGDIR/source-{}.log'
seq "$TEST_NUM" | parallel MADSIM_TEST_SEED={} './risingwave_simulation --kafka-datadir=./scripts/source/test_data ./e2e_test/source/basic/kafka\*.slt 2> $LOGDIR/source-{}.log && rm $LOGDIR/source-{}.log'

echo "--- deterministic simulation e2e, ci-3cn-2fe, parallel, streaming"
seq $TEST_NUM | parallel MADSIM_TEST_SEED={} './risingwave_simulation -j 16 ./e2e_test/streaming/\*\*/\*.slt 2> $LOGDIR/parallel-streaming-{}.log && rm $LOGDIR/parallel-streaming-{}.log'
seq "$TEST_NUM" | parallel MADSIM_TEST_SEED={} './risingwave_simulation -j 16 ./e2e_test/streaming/\*\*/\*.slt 2> $LOGDIR/parallel-streaming-{}.log && rm $LOGDIR/parallel-streaming-{}.log'

echo "--- deterministic simulation e2e, ci-3cn-2fe, parallel, batch"
seq $TEST_NUM | parallel MADSIM_TEST_SEED={} './risingwave_simulation -j 16 ./e2e_test/batch/\*\*/\*.slt 2> $LOGDIR/parallel-batch-{}.log && rm $LOGDIR/parallel-batch-{}.log'
seq "$TEST_NUM" | parallel MADSIM_TEST_SEED={} './risingwave_simulation -j 16 ./e2e_test/batch/\*\*/\*.slt 2> $LOGDIR/parallel-batch-{}.log && rm $LOGDIR/parallel-batch-{}.log'

echo "--- deterministic simulation e2e, ci-3cn-2fe, fuzzing (pre-generated-queries)"
timeout 10m seq 64 | parallel MADSIM_TEST_SEED={} './risingwave_simulation --run-sqlsmith-queries ./src/tests/sqlsmith/tests/sqlsmith-query-snapshots/{} 2> $LOGDIR/fuzzing-{}.log && rm $LOGDIR/fuzzing-{}.log'

echo "--- deterministic simulation e2e, ci-3cn-2fe, e2e extended mode test"
seq $TEST_NUM | parallel MADSIM_TEST_SEED={} './risingwave_simulation -e 2> $LOGDIR/extended-{}.log && rm $LOGDIR/extended-{}.log'
seq "$TEST_NUM" | parallel MADSIM_TEST_SEED={} './risingwave_simulation -e 2> $LOGDIR/extended-{}.log && rm $LOGDIR/extended-{}.log'
2 changes: 1 addition & 1 deletion ci/scripts/deterministic-it-test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ mv target/ci-sim target/sim
TEST_PATTERN="$@"

echo "--- Run integration tests in deterministic simulation mode"
seq $TEST_NUM | parallel "MADSIM_TEST_SEED={} NEXTEST_PROFILE=ci-sim \
seq "$TEST_NUM" | parallel "MADSIM_TEST_SEED={} NEXTEST_PROFILE=ci-sim \
cargo nextest run \
--no-fail-fast \
--cargo-metadata target/nextest/cargo-metadata.json \
Expand Down
12 changes: 6 additions & 6 deletions ci/scripts/deterministic-recovery-test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -36,46 +36,46 @@ mkdir -p $LOGDIR
filter_stack_trace_for_all_logs() {
# Defined in `common.sh`
for log in "${LOGDIR}"/*.log; do
filter_stack_trace $log
filter_stack_trace "$log"
done
}

trap filter_stack_trace_for_all_logs ERR

echo "--- deterministic simulation e2e, ci-3cn-2fe-3meta, recovery, background_ddl"
seq $TEST_NUM | parallel MADSIM_TEST_SEED={} './risingwave_simulation \
seq "$TEST_NUM" | parallel MADSIM_TEST_SEED={} './risingwave_simulation \
--kill \
--kill-rate=${KILL_RATE} \
${USE_ARRANGEMENT_BACKFILL:-} \
./e2e_test/background_ddl/sim/basic.slt \
2> $LOGDIR/recovery-background-ddl-{}.log && rm $LOGDIR/recovery-background-ddl-{}.log'

echo "--- deterministic simulation e2e, ci-3cn-2fe-3meta, recovery, ddl"
seq $TEST_NUM | parallel MADSIM_TEST_SEED={} './risingwave_simulation \
seq "$TEST_NUM" | parallel MADSIM_TEST_SEED={} './risingwave_simulation \
--kill \
--kill-rate=${KILL_RATE} \
--background-ddl-rate=${BACKGROUND_DDL_RATE} \
${USE_ARRANGEMENT_BACKFILL:-} \
./e2e_test/ddl/\*\*/\*.slt 2> $LOGDIR/recovery-ddl-{}.log && rm $LOGDIR/recovery-ddl-{}.log'

echo "--- deterministic simulation e2e, ci-3cn-2fe-3meta, recovery, streaming"
seq $TEST_NUM | parallel MADSIM_TEST_SEED={} './risingwave_simulation \
seq "$TEST_NUM" | parallel MADSIM_TEST_SEED={} './risingwave_simulation \
--kill \
--kill-rate=${KILL_RATE} \
--background-ddl-rate=${BACKGROUND_DDL_RATE} \
${USE_ARRANGEMENT_BACKFILL:-} \
./e2e_test/streaming/\*\*/\*.slt 2> $LOGDIR/recovery-streaming-{}.log && rm $LOGDIR/recovery-streaming-{}.log'

echo "--- deterministic simulation e2e, ci-3cn-2fe-3meta, recovery, batch"
seq $TEST_NUM | parallel MADSIM_TEST_SEED={} './risingwave_simulation \
seq "$TEST_NUM" | parallel MADSIM_TEST_SEED={} './risingwave_simulation \
--kill \
--kill-rate=${KILL_RATE} \
--background-ddl-rate=${BACKGROUND_DDL_RATE} \
${USE_ARRANGEMENT_BACKFILL:-} \
./e2e_test/batch/\*\*/\*.slt 2> $LOGDIR/recovery-batch-{}.log && rm $LOGDIR/recovery-batch-{}.log'

echo "--- deterministic simulation e2e, ci-3cn-2fe-3meta, recovery, kafka source,sink"
seq $TEST_NUM | parallel MADSIM_TEST_SEED={} './risingwave_simulation \
seq "$TEST_NUM" | parallel MADSIM_TEST_SEED={} './risingwave_simulation \
--kill \
--kill-rate=${KILL_RATE} \
--kafka-datadir=./scripts/source/test_data \
Expand Down
2 changes: 1 addition & 1 deletion ci/scripts/e2e-pulsar-sink-test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ MAX_RETRY=20
while [[ $HTTP_CODE -ne 200 && MAX_RETRY -gt 0 ]]
do
HTTP_CODE=$(curl --connect-timeout 2 -s -o /dev/null -w ''%{http_code}'' http://pulsar-server:8080/admin/v2/clusters)
echo Got HTTP Code: $HTTP_CODE
echo Got HTTP Code: "$HTTP_CODE"
((MAX_RETRY--))
sleep 5
done
Expand Down
10 changes: 5 additions & 5 deletions ci/scripts/e2e-source-test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,9 @@ source ci/scripts/common.sh
# $1: subject name
# $2: schema file path
function register_schema_registry() {
curl -X POST http://message_queue:8081/subjects/$1/versions \
curl -X POST http://message_queue:8081/subjects/"$1"/versions \
-H ‘Content-Type: application/vnd.schemaregistry.v1+json’ \
--data-binary @<(jq -n --arg schema “$(cat $2)” ‘{schemaType: “PROTOBUF”, schema: $schema}’)
--data-binary @<(jq -n --arg schema “$(cat "$2")” ‘{schemaType: “PROTOBUF”, schema: "$schema"}’)
}

# prepare environment
Expand Down Expand Up @@ -93,9 +93,9 @@ sqllogictest -p 4566 -d dev './e2e_test/source/cdc/cdc.share_stream.slt'

# create a share source and check whether heartbeat message is received
sqllogictest -p 4566 -d dev './e2e_test/source/cdc/cdc.create_source_job.slt'
table_id=`psql -U root -h localhost -p 4566 -d dev -t -c "select id from rw_internal_tables where name like '%mysql_source%';" | xargs`;
table_count=`psql -U root -h localhost -p 4566 -d dev -t -c "select count(*) from rw_table(${table_id}, public);" | xargs`;
if [ $table_count -eq 0 ]; then
table_id=$(psql -U root -h localhost -p 4566 -d dev -t -c "select id from rw_internal_tables where name like '%mysql_source%';" | xargs);
table_count=$(psql -U root -h localhost -p 4566 -d dev -t -c "select count(*) from rw_table(${table_id}, public);" | xargs);
if [ "$table_count" -eq 0 ]; then
echo "ERROR: internal table of cdc share source is empty!"
exit 1
fi
Expand Down
10 changes: 5 additions & 5 deletions ci/scripts/e2e-test-parallel-for-opendal.sh
Original file line number Diff line number Diff line change
Expand Up @@ -26,12 +26,12 @@ download_and_prepare_rw "$profile" common
echo "--- Download artifacts"
download-and-decompress-artifact e2e_test_generated ./

host_args="-h localhost -p 4565 -h localhost -p 4566 -h localhost -p 4567"
host_args=(-h localhost -p 4565 -h localhost -p 4566 -h localhost -p 4567)

echo "--- e2e, ci-3cn-3fe-opendal-fs-backend, streaming"
RUST_LOG="info,risingwave_stream=info,risingwave_batch=info,risingwave_storage=info" \
risedev ci-start ci-3cn-3fe-opendal-fs-backend
sqllogictest ${host_args} -d dev './e2e_test/streaming/**/*.slt' -j 16 --junit "parallel-opendal-fs-backend-${profile}"
sqllogictest "${host_args[@]}" -d dev './e2e_test/streaming/**/*.slt' -j 16 --junit "parallel-opendal-fs-backend-${profile}"

echo "--- Kill cluster Streaming"
risedev ci-kill
Expand All @@ -41,10 +41,10 @@ rm -rf /tmp/rw_ci
echo "--- e2e, ci-3cn-3fe-opendal-fs-backend, batch"
RUST_LOG="info,risingwave_stream=info,risingwave_batch=info,risingwave_storage=info" \
risedev ci-start ci-3cn-3fe-opendal-fs-backend
sqllogictest ${host_args} -d dev './e2e_test/ddl/**/*.slt' --junit "parallel-opendal-fs-backend-ddl-${profile}"
sqllogictest ${host_args} -d dev './e2e_test/visibility_mode/*.slt' -j 16 --junit "parallel-opendal-fs-backend-batch-${profile}"
sqllogictest "${host_args[@]}" -d dev './e2e_test/ddl/**/*.slt' --junit "parallel-opendal-fs-backend-ddl-${profile}"
sqllogictest "${host_args[@]}" -d dev './e2e_test/visibility_mode/*.slt' -j 16 --junit "parallel-opendal-fs-backend-batch-${profile}"

echo "--- Kill cluster Batch"
risedev ci-kill
sleep 1
rm -rf /tmp/rw_ci
rm -rf /tmp/rw_ci
8 changes: 4 additions & 4 deletions ci/scripts/e2e-test-parallel-in-memory.sh
Original file line number Diff line number Diff line change
Expand Up @@ -23,20 +23,20 @@ shift $((OPTIND -1))

download_and_prepare_rw "$profile" common

host_args="-h localhost -p 4565 -h localhost -p 4566 -h localhost -p 4567"
host_args=(-h localhost -p 4565 -h localhost -p 4566 -h localhost -p 4567)

echo "--- e2e, ci-3cn-3fe-in-memory, streaming"
risedev ci-start ci-3cn-3fe-in-memory
sqllogictest --version
sqllogictest ${host_args} -d dev './e2e_test/streaming/**/*.slt' -j 16 --junit "parallel-in-memory-streaming-${profile}" --label in-memory
sqllogictest "${host_args[@]}" -d dev './e2e_test/streaming/**/*.slt' -j 16 --junit "parallel-in-memory-streaming-${profile}" --label in-memory

echo "--- Kill cluster"
risedev ci-kill

echo "--- e2e, ci-3cn-3fe-in-memory, batch"
risedev ci-start ci-3cn-3fe-in-memory
sqllogictest ${host_args} -d dev './e2e_test/ddl/**/*.slt' --junit "parallel-in-memory-batch-ddl-${profile}" --label in-memory
sqllogictest ${host_args} -d dev './e2e_test/batch/**/*.slt' -j 16 --junit "parallel-in-memory-batch-${profile}" --label in-memory
sqllogictest "${host_args[@]}" -d dev './e2e_test/ddl/**/*.slt' --junit "parallel-in-memory-batch-ddl-${profile}" --label in-memory
sqllogictest "${host_args[@]}" -d dev './e2e_test/batch/**/*.slt' -j 16 --junit "parallel-in-memory-batch-${profile}" --label in-memory

echo "--- Kill cluster"
risedev ci-kill
10 changes: 5 additions & 5 deletions ci/scripts/e2e-test-parallel.sh
Original file line number Diff line number Diff line change
Expand Up @@ -31,28 +31,28 @@ kill_cluster() {
risedev ci-kill
}

host_args="-h localhost -p 4565 -h localhost -p 4566 -h localhost -p 4567"
host_args=(-h localhost -p 4565 -h localhost -p 4566 -h localhost -p 4567)

RUST_LOG="info,risingwave_stream=info,risingwave_batch=info,risingwave_storage=info,risingwave_storage::hummock::compactor::compactor_runner=warn"

echo "--- e2e, ci-3streaming-2serving-3fe, streaming"
RUST_LOG=$RUST_LOG \
risedev ci-start ci-3streaming-2serving-3fe
sqllogictest ${host_args} -d dev './e2e_test/streaming/**/*.slt' -j 16 --junit "parallel-streaming-${profile}"
sqllogictest "${host_args[@]}" -d dev './e2e_test/streaming/**/*.slt' -j 16 --junit "parallel-streaming-${profile}"

kill_cluster

echo "--- e2e, ci-3streaming-2serving-3fe, batch"
RUST_LOG=$RUST_LOG \
risedev ci-start ci-3streaming-2serving-3fe
sqllogictest ${host_args} -d dev './e2e_test/ddl/**/*.slt' --junit "parallel-batch-ddl-${profile}"
sqllogictest ${host_args} -d dev './e2e_test/visibility_mode/*.slt' -j 16 --junit "parallel-batch-${profile}"
sqllogictest "${host_args[@]}" -d dev './e2e_test/ddl/**/*.slt' --junit "parallel-batch-ddl-${profile}"
sqllogictest "${host_args[@]}" -d dev './e2e_test/visibility_mode/*.slt' -j 16 --junit "parallel-batch-${profile}"

kill_cluster

echo "--- e2e, ci-3streaming-2serving-3fe, generated"
RUST_LOG=$RUST_LOG \
risedev ci-start ci-3streaming-2serving-3fe
sqllogictest ${host_args} -d dev './e2e_test/generated/**/*.slt' -j 16 --junit "parallel-generated-${profile}"
sqllogictest "${host_args[@]}" -d dev './e2e_test/generated/**/*.slt' -j 16 --junit "parallel-generated-${profile}"

kill_cluster
12 changes: 6 additions & 6 deletions ci/scripts/gen-flamegraph.sh
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,7 @@ AUCTION_TOPIC="nexmark-auction"
BID_TOPIC="nexmark-bid"
PERSON_TOPIC="nexmark-person"
NUM_PARTITIONS=8
# NOTE: Due to https://github.com/risingwavelabs/risingwave/issues/6747, use `SEPARATE_TOPICS=false`
# NOTE: Due to https://github.com/risingwavelabs/risingwave/issues/6747, use SEPARATE_TOPICS=false
SEPARATE_TOPICS=false
RUST_LOG="nexmark_server=info"
Expand Down Expand Up @@ -180,7 +180,7 @@ configure_all() {
# This has minor effect on the flamegraph, so can ignore for now.
# could it be related to profiling on Docker? Needs further investigation.
start_nperf() {
./nperf record -p `pidof compute-node` -o perf.data &
./nperf record -p $(pidof compute-node) -o perf.data &
}

start_kafka() {
Expand Down Expand Up @@ -231,7 +231,7 @@ gen_heap_flamegraph() {
JEPROF=$(find . -name 'jeprof' | head -1)
chmod +x "$JEPROF"
COMPUTE_NODE=".risingwave/bin/risingwave/compute-node"
$JEPROF --collapsed $COMPUTE_NODE $LATEST_HEAP_PROFILE > heap.collapsed
$JEPROF --collapsed $COMPUTE_NODE "$LATEST_HEAP_PROFILE" > heap.collapsed
../flamegraph.pl --color=mem --countname=bytes heap.collapsed > perf.svg
mv perf.svg ..
popd
Expand Down Expand Up @@ -268,7 +268,7 @@ run_heap_flamegraph() {
echo "--- Running benchmark for $QUERY"
echo "--- Setting variables"
QUERY_LABEL="$1"
QUERY_FILE_NAME="$(echo $QUERY_LABEL | sed 's/nexmark\-\(.*\)/\1.sql/')"
QUERY_FILE_NAME="$(echo "$QUERY_LABEL" | sed 's/nexmark\-\(.*\)/\1.sql/')"
QUERY_PATH="$QUERY_DIR/$QUERY_FILE_NAME"
FLAMEGRAPH_PATH="perf-$QUERY_LABEL.svg"
echo "QUERY_LABEL: $QUERY_LABEL"
Expand Down Expand Up @@ -328,7 +328,7 @@ run_cpu_flamegraph() {
echo "--- Running benchmark for $QUERY"
echo "--- Setting variables"
QUERY_LABEL="$1"
QUERY_FILE_NAME="$(echo $QUERY_LABEL | sed 's/nexmark\-\(.*\)/\1.sql/')"
QUERY_FILE_NAME="$(echo "$QUERY_LABEL" | sed 's/nexmark\-\(.*\)/\1.sql/')"
QUERY_PATH="$QUERY_DIR/$QUERY_FILE_NAME"
FLAMEGRAPH_PATH="perf-$QUERY_LABEL.svg"
echo "QUERY_LABEL: $QUERY_LABEL"
Expand Down Expand Up @@ -367,7 +367,7 @@ run_cpu_flamegraph() {

echo "--- Generate flamegraph"
gen_cpu_flamegraph
mv perf.svg $FLAMEGRAPH_PATH
mv perf.svg "$FLAMEGRAPH_PATH"

echo "--- Uploading flamegraph"
buildkite-agent artifact upload "./$FLAMEGRAPH_PATH"
Expand Down
Loading

0 comments on commit 4f53f89

Please sign in to comment.