diff --git a/.github/workflows/tg-demo-tests.yaml b/.github/workflows/tg-demo-tests.yaml new file mode 100644 index 000000000000..60c7151b5f67 --- /dev/null +++ b/.github/workflows/tg-demo-tests.yaml @@ -0,0 +1,55 @@ +name: "[TG] TG demo tests" + +on: + push: + branches: + - tpatel/issue-8586 + workflow_dispatch: + schedule: + - cron: '0 0 * * 6' # This cron schedule runs the workflow every Saturday at 12am UTC + +jobs: + build-artifact: + uses: ./.github/workflows/build-artifact.yaml + with: + arch: '["wormhole_b0"]' + secrets: inherit + tg-demo-tests: + needs: build-artifact + strategy: + fail-fast: false + matrix: + test-group: [ + { + name: "TG demo tests", + arch: wormhole_b0, + runs-on: [arch-wormhole_b0, "config-tg", "in-service", "runner-test", "bare-metal", "pipeline-functional"], + cmd: './tests/scripts/run_tests.sh --tt-arch wormhole_b0 --pipeline-type demos_tg_device --dispatch-mode ""' + }, + ] + name: ${{ matrix.test-group.name }} + env: + TT_METAL_ENV: ${{ vars.TT_METAL_ENV }} + ARCH_NAME: ${{ matrix.test-group.arch }} + LOGURU_LEVEL: INFO + LD_LIBRARY_PATH: ${{ github.workspace }}/build/lib + environment: dev + runs-on: ${{ matrix.test-group.runs-on }} + steps: + - uses: tenstorrent-metal/metal-workflows/.github/actions/checkout-with-submodule-lfs@v2.0.0 + - name: Set up dynamic env vars for build + run: | + echo "TT_METAL_HOME=$(pwd)" >> $GITHUB_ENV + - uses: actions/download-artifact@v4 + with: + name: TTMetal_build_${{ matrix.test-group.arch }} + - name: Extract files + run: tar -xvf ttm_${{ matrix.test-group.arch }}.tar + - uses: ./.github/actions/install-python-deps + - name: Run demo regression tests + timeout-minutes: 180 + run: | + source ${{ github.workspace }}/python_env/bin/activate + cd $TT_METAL_HOME + export PYTHONPATH=$TT_METAL_HOME + ${{ matrix.test-group.cmd }} diff --git a/.github/workflows/tg-frequent-tests.yaml b/.github/workflows/tg-frequent-tests.yaml new file mode 100644 index 000000000000..e42d9a2fb001 --- /dev/null +++ b/.github/workflows/tg-frequent-tests.yaml @@ -0,0 +1,55 @@ +name: "[TG] TG frequent tests" + +on: + push: + branches: + - tpatel/issue-8586 + workflow_dispatch: + schedule: + - cron: "0 */8 * * *" # This cron schedule runs the workflow every 8 hours + +jobs: + build-artifact: + uses: ./.github/workflows/build-artifact.yaml + with: + arch: '["wormhole_b0"]' + secrets: inherit + tg-frequent-tests: + needs: build-artifact + strategy: + fail-fast: false + matrix: + test-group: [ + { + name: "TG frequent tests", + arch: wormhole_b0, + runs-on: [arch-wormhole_b0, "config-tg", "in-service", "runner-test", "bare-metal", "pipeline-functional"], + cmd: './tests/scripts/run_tests.sh --tt-arch wormhole_b0 --pipeline-type frequent_tg_device --dispatch-mode ""' + }, + ] + name: ${{ matrix.test-group.name }} + env: + TT_METAL_ENV: ${{ vars.TT_METAL_ENV }} + ARCH_NAME: ${{ matrix.test-group.arch }} + LOGURU_LEVEL: INFO + LD_LIBRARY_PATH: ${{ github.workspace }}/build/lib + environment: dev + runs-on: ${{ matrix.test-group.runs-on }} + steps: + - uses: tenstorrent-metal/metal-workflows/.github/actions/checkout-with-submodule-lfs@v2.0.0 + - name: Set up dynamic env vars for build + run: | + echo "TT_METAL_HOME=$(pwd)" >> $GITHUB_ENV + - uses: actions/download-artifact@v4 + with: + name: TTMetal_build_${{ matrix.test-group.arch }} + - name: Extract files + run: tar -xvf ttm_${{ matrix.test-group.arch }}.tar + - uses: ./.github/actions/install-python-deps + - name: Run frequent regression tests + timeout-minutes: 60 + run: | + source ${{ github.workspace }}/python_env/bin/activate + cd $TT_METAL_HOME + export PYTHONPATH=$TT_METAL_HOME + ${{ matrix.test-group.cmd }} diff --git a/.github/workflows/tg-model-perf-tests.yaml b/.github/workflows/tg-model-perf-tests.yaml new file mode 100644 index 000000000000..f87bc2b3afb2 --- /dev/null +++ b/.github/workflows/tg-model-perf-tests.yaml @@ -0,0 +1,90 @@ +name: "[TG] TG model perf tests" + +on: + push: + branches: + - tpatel/issue-8586 + workflow_dispatch: + schedule: + - cron: "0 */12 * * *" # This cron schedule runs the workflow every 12 hours + +jobs: + build-artifact: + uses: ./.github/workflows/build-artifact.yaml + with: + arch: '["wormhole_b0"]' + secrets: inherit + tg-model-perf-tests: + needs: build-artifact + strategy: + fail-fast: false + matrix: + test-group: [ + { + name: "TG LLM model perf tests", + model-type: "LLM", + arch: wormhole_b0, + runs-on: [arch-wormhole_b0, "config-tg", "in-service", "runner-test", "bare-metal", "pipeline-perf"], + cmd: './tests/scripts/run_tests.sh --tt-arch wormhole_b0 --pipeline-type llm_model_perf_tg_device --dispatch-mode ""' + }, + { + name: "TG CNN model perf tests", + model-type: "CNN", + arch: wormhole_b0, + runs-on: [arch-wormhole_b0, "config-tg", "in-service", "runner-test", "bare-metal", "pipeline-perf"], + cmd: './tests/scripts/run_tests.sh --tt-arch wormhole_b0 --pipeline-type cnn_model_perf_tg_device --dispatch-mode ""' + }, + ] + name: ${{ matrix.test-group.name }} + env: + TT_METAL_ENV: ${{ vars.TT_METAL_ENV }} + ARCH_NAME: ${{ matrix.test-group.arch }} + LOGURU_LEVEL: INFO + LD_LIBRARY_PATH: ${{ github.workspace }}/build/lib + environment: dev + runs-on: ${{ matrix.test-group.runs-on }} + steps: + - uses: tenstorrent-metal/metal-workflows/.github/actions/checkout-with-submodule-lfs@v2.0.0 + - name: Enable performance mode + run: | + sudo cpupower frequency-set -g performance + - name: Ensure weka mount is active + run: | + sudo systemctl restart mnt-MLPerf.mount + sudo /etc/rc.local + ls -al /mnt/MLPerf/bit_error_tests + - name: Set up dynamic env vars for build + run: | + echo "TT_METAL_HOME=$(pwd)" >> $GITHUB_ENV + echo "PYTHONPATH=$(pwd)" >> $GITHUB_ENV + - uses: actions/download-artifact@v4 + with: + name: TTMetal_build_${{ matrix.test-group.arch }} + - name: Extract files + run: tar -xvf ttm_${{ matrix.test-group.arch }}.tar + - uses: ./.github/actions/install-python-deps + - name: Run model perf regression tests + timeout-minutes: 60 + run: | + source ${{ github.workspace }}/python_env/bin/activate + cd $TT_METAL_HOME + export PYTHONPATH=$TT_METAL_HOME + ${{ matrix.test-group.cmd }} + - name: Check perf report exists + id: check-perf-report + if: ${{ !cancelled() }} + run: | + ls -hal + export PERF_REPORT_FILENAME=Models_Perf_$(date +%Y_%m_%d).csv + ls -hal $PERF_REPORT_FILENAME + echo "perf_report_filename=$PERF_REPORT_FILENAME" >> "$GITHUB_OUTPUT" + - name: Upload perf report + if: ${{ !cancelled() && steps.check-perf-report.conclusion == 'success' }} + uses: actions/upload-artifact@v4 + with: + name: perf-report-csv-${{ matrix.test-group.model-type }}-${{ matrix.test-group.arch }}-${{ matrix.test-group.machine-type }} + path: "${{ steps.check-perf-report.outputs.perf_report_filename }}" + - name: Disable performance mode + if: always() + run: | + sudo cpupower frequency-set -g ondemand diff --git a/.github/workflows/tg-unit-tests.yaml b/.github/workflows/tg-unit-tests.yaml index 12163a65d0d0..d3edc46d458c 100644 --- a/.github/workflows/tg-unit-tests.yaml +++ b/.github/workflows/tg-unit-tests.yaml @@ -1,9 +1,12 @@ name: "[TG] TG unit tests" on: - schedule: - - cron: '0 0 * * *' # Runs every day at 12am UTC + push: + branches: + - tpatel/issue-8586 workflow_dispatch: + schedule: + - cron: "0 */3 * * *" # This cron schedule runs the workflow every 3 hours jobs: build-artifact: diff --git a/.github/workflows/tgg-demo-tests.yaml b/.github/workflows/tgg-demo-tests.yaml new file mode 100644 index 000000000000..9f85249ead2c --- /dev/null +++ b/.github/workflows/tgg-demo-tests.yaml @@ -0,0 +1,55 @@ +name: "[TGG] TGG demo tests" + +on: + push: + branches: + - tpatel/issue-8586 + workflow_dispatch: + schedule: + - cron: '0 0 * * 6' # This cron schedule runs the workflow every Saturday at 12am UTC + +jobs: + build-artifact: + uses: ./.github/workflows/build-artifact.yaml + with: + arch: '["wormhole_b0"]' + secrets: inherit + tgg-demo-tests: + needs: build-artifact + strategy: + fail-fast: false + matrix: + test-group: [ + { + name: "TGG demo tests", + arch: wormhole_b0, + runs-on: [arch-wormhole_b0, "config-tgg", "in-service", "runner-test", "bare-metal", "pipeline-functional"], + cmd: './tests/scripts/run_tests.sh --tt-arch wormhole_b0 --pipeline-type demos_tgg_device --dispatch-mode ""' + }, + ] + name: ${{ matrix.test-group.name }} + env: + TT_METAL_ENV: ${{ vars.TT_METAL_ENV }} + ARCH_NAME: ${{ matrix.test-group.arch }} + LOGURU_LEVEL: INFO + LD_LIBRARY_PATH: ${{ github.workspace }}/build/lib + environment: dev + runs-on: ${{ matrix.test-group.runs-on }} + steps: + - uses: tenstorrent-metal/metal-workflows/.github/actions/checkout-with-submodule-lfs@v2.0.0 + - name: Set up dynamic env vars for build + run: | + echo "TT_METAL_HOME=$(pwd)" >> $GITHUB_ENV + - uses: actions/download-artifact@v4 + with: + name: TTMetal_build_${{ matrix.test-group.arch }} + - name: Extract files + run: tar -xvf ttm_${{ matrix.test-group.arch }}.tar + - uses: ./.github/actions/install-python-deps + - name: Run demo regression tests + timeout-minutes: 180 + run: | + source ${{ github.workspace }}/python_env/bin/activate + cd $TT_METAL_HOME + export PYTHONPATH=$TT_METAL_HOME + ${{ matrix.test-group.cmd }} diff --git a/.github/workflows/tgg-frequent-tests.yaml b/.github/workflows/tgg-frequent-tests.yaml new file mode 100644 index 000000000000..4439385ebeb1 --- /dev/null +++ b/.github/workflows/tgg-frequent-tests.yaml @@ -0,0 +1,55 @@ +name: "[TGG] TGG frequent tests" + +on: + push: + branches: + - tpatel/issue-8586 + workflow_dispatch: + schedule: + - cron: "0 */8 * * *" # This cron schedule runs the workflow every 8 hours + +jobs: + build-artifact: + uses: ./.github/workflows/build-artifact.yaml + with: + arch: '["wormhole_b0"]' + secrets: inherit + tgg-frequent-tests: + needs: build-artifact + strategy: + fail-fast: false + matrix: + test-group: [ + { + name: "TGG frequent tests", + arch: wormhole_b0, + runs-on: [arch-wormhole_b0, "config-tgg", "in-service", "runner-test", "bare-metal", "pipeline-functional"], + cmd: './tests/scripts/run_tests.sh --tt-arch wormhole_b0 --pipeline-type frequent_tgg_device --dispatch-mode ""' + }, + ] + name: ${{ matrix.test-group.name }} + env: + TT_METAL_ENV: ${{ vars.TT_METAL_ENV }} + ARCH_NAME: ${{ matrix.test-group.arch }} + LOGURU_LEVEL: INFO + LD_LIBRARY_PATH: ${{ github.workspace }}/build/lib + environment: dev + runs-on: ${{ matrix.test-group.runs-on }} + steps: + - uses: tenstorrent-metal/metal-workflows/.github/actions/checkout-with-submodule-lfs@v2.0.0 + - name: Set up dynamic env vars for build + run: | + echo "TT_METAL_HOME=$(pwd)" >> $GITHUB_ENV + - uses: actions/download-artifact@v4 + with: + name: TTMetal_build_${{ matrix.test-group.arch }} + - name: Extract files + run: tar -xvf ttm_${{ matrix.test-group.arch }}.tar + - uses: ./.github/actions/install-python-deps + - name: Run frequent regression tests + timeout-minutes: 60 + run: | + source ${{ github.workspace }}/python_env/bin/activate + cd $TT_METAL_HOME + export PYTHONPATH=$TT_METAL_HOME + ${{ matrix.test-group.cmd }} diff --git a/.github/workflows/tgg-model-perf-tests.yaml b/.github/workflows/tgg-model-perf-tests.yaml new file mode 100644 index 000000000000..204f5cd6addd --- /dev/null +++ b/.github/workflows/tgg-model-perf-tests.yaml @@ -0,0 +1,90 @@ +name: "[TGG] TGG model perf tests" + +on: + push: + branches: + - tpatel/issue-8586 + workflow_dispatch: + schedule: + - cron: "0 */12 * * *" # This cron schedule runs the workflow every 12 hours + +jobs: + build-artifact: + uses: ./.github/workflows/build-artifact.yaml + with: + arch: '["wormhole_b0"]' + secrets: inherit + tgg-model-perf-tests: + needs: build-artifact + strategy: + fail-fast: false + matrix: + test-group: [ + { + name: "TGG LLM model perf tests", + model-type: "LLM", + arch: wormhole_b0, + runs-on: [arch-wormhole_b0, "config-tgg", "in-service", "runner-test", "bare-metal", "pipeline-perf"], + cmd: './tests/scripts/run_tests.sh --tt-arch wormhole_b0 --pipeline-type llm_model_perf_tgg_device --dispatch-mode ""' + }, + { + name: "TGG CNN model perf tests", + model-type: "CNN", + arch: wormhole_b0, + runs-on: [arch-wormhole_b0, "config-tgg", "in-service", "runner-test", "bare-metal", "pipeline-perf"], + cmd: './tests/scripts/run_tests.sh --tt-arch wormhole_b0 --pipeline-type cnn_model_perf_tgg_device --dispatch-mode ""' + }, + ] + name: ${{ matrix.test-group.name }} + env: + TT_METAL_ENV: ${{ vars.TT_METAL_ENV }} + ARCH_NAME: ${{ matrix.test-group.arch }} + LOGURU_LEVEL: INFO + LD_LIBRARY_PATH: ${{ github.workspace }}/build/lib + environment: dev + runs-on: ${{ matrix.test-group.runs-on }} + steps: + - uses: tenstorrent-metal/metal-workflows/.github/actions/checkout-with-submodule-lfs@v2.0.0 + - name: Enable performance mode + run: | + sudo cpupower frequency-set -g performance + - name: Ensure weka mount is active + run: | + sudo systemctl restart mnt-MLPerf.mount + sudo /etc/rc.local + ls -al /mnt/MLPerf/bit_error_tests + - name: Set up dynamic env vars for build + run: | + echo "TT_METAL_HOME=$(pwd)" >> $GITHUB_ENV + echo "PYTHONPATH=$(pwd)" >> $GITHUB_ENV + - uses: actions/download-artifact@v4 + with: + name: TTMetal_build_${{ matrix.test-group.arch }} + - name: Extract files + run: tar -xvf ttm_${{ matrix.test-group.arch }}.tar + - uses: ./.github/actions/install-python-deps + - name: Run model perf regression tests + timeout-minutes: 60 + run: | + source ${{ github.workspace }}/python_env/bin/activate + cd $TT_METAL_HOME + export PYTHONPATH=$TT_METAL_HOME + ${{ matrix.test-group.cmd }} + - name: Check perf report exists + id: check-perf-report + if: ${{ !cancelled() }} + run: | + ls -hal + export PERF_REPORT_FILENAME=Models_Perf_$(date +%Y_%m_%d).csv + ls -hal $PERF_REPORT_FILENAME + echo "perf_report_filename=$PERF_REPORT_FILENAME" >> "$GITHUB_OUTPUT" + - name: Upload perf report + if: ${{ !cancelled() && steps.check-perf-report.conclusion == 'success' }} + uses: actions/upload-artifact@v4 + with: + name: perf-report-csv-${{ matrix.test-group.model-type }}-${{ matrix.test-group.arch }}-${{ matrix.test-group.machine-type }} + path: "${{ steps.check-perf-report.outputs.perf_report_filename }}" + - name: Disable performance mode + if: always() + run: | + sudo cpupower frequency-set -g ondemand diff --git a/.github/workflows/tgg-unit-tests.yaml b/.github/workflows/tgg-unit-tests.yaml index 5351b9ae824b..804daead89b9 100644 --- a/.github/workflows/tgg-unit-tests.yaml +++ b/.github/workflows/tgg-unit-tests.yaml @@ -1,9 +1,12 @@ name: "[TGG] TGG unit tests" on: + push: + branches: + - tpatel/issue-8586 workflow_dispatch: schedule: - - cron: '0 0 * * *' # This cron schedule runs the workflow every day at 12am UTC + - cron: "0 */3 * * *" # This cron schedule runs the workflow every 3 hours jobs: build-artifact: diff --git a/CODEOWNERS b/CODEOWNERS index 147b0e9a2463..e4e6c841c956 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -2,13 +2,25 @@ # precedence. .github/ @tt-rkim + +# T3000 workflows .github/t3000-unit-tests.yaml @tapspatel .github/t3000-profiler-tests.yaml @tapspatel .github/t3000-model-perf-tests.yaml @tapspatel .github/t3000-frequent-tests.yaml @tapspatel .github/t3000-demo-tests.yaml @tapspatel + +# TG workflows .github/tg-unit-tests.yaml @tapspatel +.github/tg-model-perf-tests.yaml @tapspatel +.github/tg-frequent-tests.yaml @tapspatel +.github/tg-demo-tests.yaml @tapspatel + +# TGG workflows .github/tgg-unit-tests.yaml @tapspatel +.github/tgg-model-perf-tests.yaml @tapspatel +.github/tgg-frequent-tests.yaml @tapspatel +.github/tgg-demo-tests.yaml @tapspatel /infra/ @tt-rkim diff --git a/tests/scripts/run_tests.sh b/tests/scripts/run_tests.sh index 37580d883098..fe6a4e5276a8 100755 --- a/tests/scripts/run_tests.sh +++ b/tests/scripts/run_tests.sh @@ -226,7 +226,7 @@ model_perf_t3000_device() { ##########################T3000########################## ##########################TG########################## -# Run TG unit tests +# Run tg unit tests unit_tg_device() { local tt_arch=$1 local pipeline_type=$2 @@ -234,10 +234,37 @@ unit_tg_device() { ./tests/scripts/tg/run_tg_unit_tests.sh } + +# Run tg frequent tests +frequent_tg_device() { + local tt_arch=$1 + local pipeline_type=$2 + local dispatch_mode=$3 + + ./tests/scripts/tg/run_tg_frequent_tests.sh +} + +# Run tg demo tests +demos_tg_device() { + local tt_arch=$1 + local pipeline_type=$2 + local dispatch_mode=$3 + + ./tests/scripts/tg/run_tg_demo_tests.sh +} + +# Run tg model perf tests +model_perf_tg_device() { + local tt_arch=$1 + local pipeline_type=$2 + local dispatch_mode=$3 + + ./tests/scripts/tg/run_tg_model_perf_tests.sh --pipeline-type "$pipeline_type" +} ##########################TG########################## ##########################TGG########################## -# Run TGG unit tests +# Run tgg unit tests unit_tgg_device() { local tt_arch=$1 local pipeline_type=$2 @@ -245,6 +272,33 @@ unit_tgg_device() { ./tests/scripts/tgg/run_tgg_unit_tests.sh } + +# Run tgg frequent tests +frequent_tgg_device() { + local tt_arch=$1 + local pipeline_type=$2 + local dispatch_mode=$3 + + ./tests/scripts/tgg/run_tgg_frequent_tests.sh +} + +# Run tgg demo tests +demos_tgg_device() { + local tt_arch=$1 + local pipeline_type=$2 + local dispatch_mode=$3 + + ./tests/scripts/tgg/run_tgg_demo_tests.sh +} + +# Run tgg model perf tests +model_perf_tgg_device() { + local tt_arch=$1 + local pipeline_type=$2 + local dispatch_mode=$3 + + ./tests/scripts/tgg/run_tgg_model_perf_tests.sh --pipeline-type "$pipeline_type" +} ##########################TGG########################## run_pipeline_tests() { @@ -285,9 +339,21 @@ run_pipeline_tests() { # TG pipelines elif [[ $pipeline_type == "unit_tg_device" ]]; then unit_tg_device "$tt_arch" "$pipeline_type" "$dispatch_mode" + elif [[ $pipeline_type == "frequent_tg_device" ]]; then + frequent_tg_device "$tt_arch" "$pipeline_type" "$dispatch_mode" + elif [[ $pipeline_type == "demos_tg_device" ]]; then + demos_tg_device "$tt_arch" "$pipeline_type" "$dispatch_mode" + elif [[ $pipeline_type == *"model_perf_tg_device" ]]; then + model_perf_tg_device "$tt_arch" "$pipeline_type" "$dispatch_mode" # TGG pipelines elif [[ $pipeline_type == "unit_tgg_device" ]]; then unit_tgg_device "$tt_arch" "$pipeline_type" "$dispatch_mode" + elif [[ $pipeline_type == "frequent_tgg_device" ]]; then + frequent_tgg_device "$tt_arch" "$pipeline_type" "$dispatch_mode" + elif [[ $pipeline_type == "demos_tgg_device" ]]; then + demos_tgg_device "$tt_arch" "$pipeline_type" "$dispatch_mode" + elif [[ $pipeline_type == *"model_perf_tgg_device" ]]; then + model_perf_tgg_device "$tt_arch" "$pipeline_type" "$dispatch_mode" else echo "Unknown pipeline: $pipeline_type" exit 1 diff --git a/tests/scripts/t3000/run_t3000_model_perf_tests.sh b/tests/scripts/t3000/run_t3000_model_perf_tests.sh index 5e26d9c7de29..5f35be30f2f7 100755 --- a/tests/scripts/t3000/run_t3000_model_perf_tests.sh +++ b/tests/scripts/t3000/run_t3000_model_perf_tests.sh @@ -2,63 +2,12 @@ #/bin/bash set -eo pipefail -run_t3000_falcon7b_tests() { - # Record the start time - start_time=$(date +%s) - - echo "LOG_METAL: Running run_t3000_falcon7b_tests" - - env pytest models/demos/falcon7b/tests -m "model_perf_t3000" - - # Record the end time - end_time=$(date +%s) - duration=$((end_time - start_time)) - echo "LOG_METAL: run_t3000_falcon7b_tests $duration seconds to complete" -} - -run_t3000_mixtral_tests() { - # Record the start time - start_time=$(date +%s) - - echo "LOG_METAL: Running run_t3000_mixtral_tests" - - env pytest models/demos/t3000/mixtral8x7b/tests -m "model_perf_t3000" - - # Record the end time - end_time=$(date +%s) - duration=$((end_time - start_time)) - echo "LOG_METAL: run_t3000_mixtral_tests $duration seconds to complete" -} - -run_t3000_llama2_70b_tests() { - # Record the start time - start_time=$(date +%s) - - echo "LOG_METAL: Running run_t3000_llama2_70b_tests" - - env WH_ARCH_YAML=wormhole_b0_80_arch_eth_dispatch.yaml pytest models/experimental/llama2_70b/tests/test_llama_perf_decode.py -m "model_perf_t3000" - - # Record the end time - end_time=$(date +%s) - duration=$((end_time - start_time)) - echo "LOG_METAL: run_t3000_llama2_70b_tests $duration seconds to complete" -} - -run_t3000_llm_tests() { - # Run falcon7b tests - run_t3000_falcon7b_tests - - # Run mixtral tests - run_t3000_mixtral_tests - - # Run llama2-70b tests - #run_t3000_llama2_70b_tests - +run_tg_llm_tests() { # Merge all the generated reports env python models/perf/merge_perf_results.py } -run_t3000_cnn_tests() { +run_tg_cnn_tests() { # Merge all the generated reports env python models/perf/merge_perf_results.py } @@ -98,14 +47,14 @@ main() { cd $TT_METAL_HOME export PYTHONPATH=$TT_METAL_HOME - if [[ "$pipeline_type" == "llm_model_perf_t3000_device" ]]; then - run_t3000_llm_tests - elif [[ "$pipeline_type" == "cnn_model_perf_t3000_device" ]]; then - run_t3000_cnn_tests + if [[ "$pipeline_type" == "llm_model_perf_tg_device" ]]; then + run_tg_llm_tests + elif [[ "$pipeline_type" == "cnn_model_perf_tg_device" ]]; then + run_tg_cnn_tests else - echo "$pipeline_type is invalid (supported: [cnn_model_perf_t3000_device, cnn_model_perf_t3000_device])" 2>&1 + echo "$pipeline_type is invalid (supported: [cnn_model_perf_tg_device, cnn_model_perf_tg_device])" 2>&1 exit 1 fi } -main "$@" +main "$@" \ No newline at end of file diff --git a/tests/scripts/t3000/run_t3000_unit_tests.sh b/tests/scripts/t3000/run_t3000_unit_tests.sh index 677b9d7cdf12..dbcc07780683 100755 --- a/tests/scripts/t3000/run_t3000_unit_tests.sh +++ b/tests/scripts/t3000/run_t3000_unit_tests.sh @@ -2,108 +2,9 @@ #/bin/bash set -eo pipefail -run_t3000_ttmetal_tests() { - # Record the start time - start_time=$(date +%s) - - echo "LOG_METAL: Running run_t3000_ttmetal_tests" - - TT_METAL_SLOW_DISPATCH_MODE=1 ./build/test/tt_metal/unit_tests --gtest_filter="DeviceFixture.EthKernelsDirectSendAllConnectedChips" - TT_METAL_SLOW_DISPATCH_MODE=1 ./build/test/tt_metal/unit_tests --gtest_filter="DeviceFixture.EthKernelsSendInterleavedBufferAllConnectedChips" - TT_METAL_SLOW_DISPATCH_MODE=1 ./build/test/tt_metal/unit_tests --gtest_filter="DeviceFixture.EthKernelsDirectRingGatherAllChips" - TT_METAL_SLOW_DISPATCH_MODE=1 ./build/test/tt_metal/unit_tests --gtest_filter="DeviceFixture.EthKernelsInterleavedRingGatherAllChips" - TT_METAL_ENABLE_REMOTE_CHIP=1 ./build/test/tt_metal/unit_tests_fast_dispatch --gtest_filter="CommandQueueSingleCardFixture.*" - ./build/test/tt_metal/unit_tests_fast_dispatch --gtest_filter="CommandQueueMultiDeviceFixture.*" - ./build/test/tt_metal/unit_tests_fast_dispatch --gtest_filter="DPrintFixture.*:WatcherFixture.*" - - # Record the end time - end_time=$(date +%s) - duration=$((end_time - start_time)) - echo "LOG_METAL: run_t3000_ttmetal_tests $duration seconds to complete" -} - -run_t3000_ttnn_tests() { - # Record the start time - start_time=$(date +%s) - - echo "LOG_METAL: Running run_t3000_ttnn_tests" - - pytest tests/ttnn/unit_tests/test_multi_device.py - - # Record the end time - end_time=$(date +%s) - duration=$((end_time - start_time)) - echo "LOG_METAL: run_t3000_ttnn_tests $duration seconds to complete" -} - -run_t3000_falcon7b_tests() { - # Record the start time - start_time=$(date +%s) - - echo "LOG_METAL: Running run_t3000_falcon7b_tests" - - pytest models/demos/ttnn_falcon7b/tests/multi_chip/test_falcon_mlp.py - pytest models/demos/ttnn_falcon7b/tests/multi_chip/test_falcon_attention.py - pytest models/demos/ttnn_falcon7b/tests/multi_chip/test_falcon_decoder.py - #pytest models/demos/ttnn_falcon7b/tests/multi_chip/test_falcon_causallm.py - - # Record the end time - end_time=$(date +%s) - duration=$((end_time - start_time)) - echo "LOG_METAL: run_t3000_falcon7b_tests $duration seconds to complete" -} - -run_t3000_falcon40b_tests() { - # Record the start time - start_time=$(date +%s) - - echo "LOG_METAL: Running run_t3000_falcon40b_tests" - - WH_ARCH_YAML=wormhole_b0_80_arch_eth_dispatch.yaml pytest models/demos/t3000/falcon40b/tests/test_falcon_mlp.py - WH_ARCH_YAML=wormhole_b0_80_arch_eth_dispatch.yaml pytest models/demos/t3000/falcon40b/tests/test_falcon_attention.py - WH_ARCH_YAML=wormhole_b0_80_arch_eth_dispatch.yaml pytest models/demos/t3000/falcon40b/tests/test_falcon_decoder.py - - # Record the end time - end_time=$(date +%s) - duration=$((end_time - start_time)) - echo "LOG_METAL: run_t3000_falcon40b_tests $duration seconds to complete" -} - -run_t3000_mixtral_tests() { - # Record the start time - start_time=$(date +%s) - - echo "LOG_METAL: Running run_t3000_mixtral_tests" - - pytest models/demos/t3000/mixtral8x7b/tests/test_mixtral_attention.py - pytest models/demos/t3000/mixtral8x7b/tests/test_mixtral_mlp.py - pytest models/demos/t3000/mixtral8x7b/tests/test_mixtral_rms_norm.py - pytest models/demos/t3000/mixtral8x7b/tests/test_mixtral_embedding.py - pytest models/demos/t3000/mixtral8x7b/tests/test_mixtral_moe.py - pytest models/demos/t3000/mixtral8x7b/tests/test_mixtral_decoder.py - pytest models/demos/t3000/mixtral8x7b/tests/test_mixtral_model.py::test_mixtral_model_inference[1-1-pcc] - - # Record the end time - end_time=$(date +%s) - duration=$((end_time - start_time)) - echo "LOG_METAL: run_t3000_mixtral_tests $duration seconds to complete" -} - -run_t3000_tests() { - # Run ttmetal tests - run_t3000_ttmetal_tests - - # Run ttnn tests - run_t3000_ttnn_tests - - # Run falcon7b tests - run_t3000_falcon7b_tests - - # Run falcon40b tests - run_t3000_falcon40b_tests - - # Run mixtral tests - run_t3000_mixtral_tests +run_tg_tests() { + # Write tests here + echo "LOG_METAL: Fill me!" } main() { @@ -120,8 +21,8 @@ main() { # Run all tests cd $TT_METAL_HOME export PYTHONPATH=$TT_METAL_HOME - - run_t3000_tests + + run_tg_tests } -main "$@" +main "$@" \ No newline at end of file diff --git a/tests/scripts/tg/run_tg_demo_tests.sh b/tests/scripts/tg/run_tg_demo_tests.sh new file mode 100755 index 000000000000..c10fb083f002 --- /dev/null +++ b/tests/scripts/tg/run_tg_demo_tests.sh @@ -0,0 +1,28 @@ + +#/bin/bash +set -eo pipefail + +run_tg_tests() { + # Add tests here + echo "Fill me!" +} + +main() { + if [[ -z "$TT_METAL_HOME" ]]; then + echo "Must provide TT_METAL_HOME in environment" 1>&2 + exit 1 + fi + + if [[ -z "$ARCH_NAME" ]]; then + echo "Must provide ARCH_NAME in environment" 1>&2 + exit 1 + fi + + # Run all tests + cd $TT_METAL_HOME + export PYTHONPATH=$TT_METAL_HOME + + run_tg_tests +} + +main "$@" \ No newline at end of file diff --git a/tests/scripts/tg/run_tg_frequent_tests.sh b/tests/scripts/tg/run_tg_frequent_tests.sh new file mode 100755 index 000000000000..c10fb083f002 --- /dev/null +++ b/tests/scripts/tg/run_tg_frequent_tests.sh @@ -0,0 +1,28 @@ + +#/bin/bash +set -eo pipefail + +run_tg_tests() { + # Add tests here + echo "Fill me!" +} + +main() { + if [[ -z "$TT_METAL_HOME" ]]; then + echo "Must provide TT_METAL_HOME in environment" 1>&2 + exit 1 + fi + + if [[ -z "$ARCH_NAME" ]]; then + echo "Must provide ARCH_NAME in environment" 1>&2 + exit 1 + fi + + # Run all tests + cd $TT_METAL_HOME + export PYTHONPATH=$TT_METAL_HOME + + run_tg_tests +} + +main "$@" \ No newline at end of file diff --git a/tests/scripts/tg/run_tg_model_perf_tests.sh b/tests/scripts/tg/run_tg_model_perf_tests.sh new file mode 100755 index 000000000000..5a5c93de2aef --- /dev/null +++ b/tests/scripts/tg/run_tg_model_perf_tests.sh @@ -0,0 +1,28 @@ + +#/bin/bash +set -eo pipefail + +run_tg_tests() { + # Write tests here + echo "LOG_METAL: Fill me!" +} + +main() { + if [[ -z "$TT_METAL_HOME" ]]; then + echo "Must provide TT_METAL_HOME in environment" 1>&2 + exit 1 + fi + + if [[ -z "$ARCH_NAME" ]]; then + echo "Must provide ARCH_NAME in environment" 1>&2 + exit 1 + fi + + # Run all tests + cd $TT_METAL_HOME + export PYTHONPATH=$TT_METAL_HOME + + run_tg_tests +} + +main "$@" diff --git a/tests/scripts/tgg/run_tgg_demo_tests.sh b/tests/scripts/tgg/run_tgg_demo_tests.sh new file mode 100755 index 000000000000..26c2f890f9cd --- /dev/null +++ b/tests/scripts/tgg/run_tgg_demo_tests.sh @@ -0,0 +1,28 @@ + +#/bin/bash +set -eo pipefail + +run_tgg_tests() { + # Add tests here + echo "Fill me!" +} + +main() { + if [[ -z "$TT_METAL_HOME" ]]; then + echo "Must provide TT_METAL_HOME in environment" 1>&2 + exit 1 + fi + + if [[ -z "$ARCH_NAME" ]]; then + echo "Must provide ARCH_NAME in environment" 1>&2 + exit 1 + fi + + # Run all tests + cd $TT_METAL_HOME + export PYTHONPATH=$TT_METAL_HOME + + run_tgg_tests +} + +main "$@" \ No newline at end of file diff --git a/tests/scripts/tgg/run_tgg_frequent_tests.sh b/tests/scripts/tgg/run_tgg_frequent_tests.sh new file mode 100755 index 000000000000..26c2f890f9cd --- /dev/null +++ b/tests/scripts/tgg/run_tgg_frequent_tests.sh @@ -0,0 +1,28 @@ + +#/bin/bash +set -eo pipefail + +run_tgg_tests() { + # Add tests here + echo "Fill me!" +} + +main() { + if [[ -z "$TT_METAL_HOME" ]]; then + echo "Must provide TT_METAL_HOME in environment" 1>&2 + exit 1 + fi + + if [[ -z "$ARCH_NAME" ]]; then + echo "Must provide ARCH_NAME in environment" 1>&2 + exit 1 + fi + + # Run all tests + cd $TT_METAL_HOME + export PYTHONPATH=$TT_METAL_HOME + + run_tgg_tests +} + +main "$@" \ No newline at end of file diff --git a/tests/scripts/tgg/run_tgg_model_perf_tests.sh b/tests/scripts/tgg/run_tgg_model_perf_tests.sh new file mode 100755 index 000000000000..ebf6aa946dba --- /dev/null +++ b/tests/scripts/tgg/run_tgg_model_perf_tests.sh @@ -0,0 +1,60 @@ + +#/bin/bash +set -eo pipefail + +run_tgg_llm_tests() { + # Merge all the generated reports + env python models/perf/merge_perf_results.py +} + +run_tgg_cnn_tests() { + # Merge all the generated reports + env python models/perf/merge_perf_results.py +} + +main() { + # Parse the arguments + while [[ $# -gt 0 ]]; do + case $1 in + --pipeline-type) + pipeline_type=$2 + shift + ;; + *) + echo "Unknown option: $1" + exit 1 + ;; + esac + shift + done + + if [[ -z "$TT_METAL_HOME" ]]; then + echo "Must provide TT_METAL_HOME in environment" 1>&2 + exit 1 + fi + + if [[ -z "$ARCH_NAME" ]]; then + echo "Must provide ARCH_NAME in environment" 1>&2 + exit 1 + fi + + if [[ -z "$pipeline_type" ]]; then + echo "--pipeline-type cannot be empty" 1>&2 + exit 1 + fi + + # Run all tests + cd $TT_METAL_HOME + export PYTHONPATH=$TT_METAL_HOME + + if [[ "$pipeline_type" == "llm_model_perf_tgg_device" ]]; then + run_tgg_llm_tests + elif [[ "$pipeline_type" == "cnn_model_perf_tgg_device" ]]; then + run_tgg_cnn_tests + else + echo "$pipeline_type is invalid (supported: [cnn_model_perf_tgg_device, cnn_model_perf_tgg_device])" 2>&1 + exit 1 + fi +} + +main "$@" \ No newline at end of file diff --git a/tests/scripts/tgg/run_tgg_unit_tests.sh b/tests/scripts/tgg/run_tgg_unit_tests.sh index b8c209a22cb0..8f5130ea8d03 100755 --- a/tests/scripts/tgg/run_tgg_unit_tests.sh +++ b/tests/scripts/tgg/run_tgg_unit_tests.sh @@ -25,4 +25,4 @@ main() { run_tgg_tests } -main "$@" +main "$@" \ No newline at end of file