Skip to content

Commit

Permalink
DO NOT MERGE: Skip other perf tests
Browse files Browse the repository at this point in the history
  • Loading branch information
tt-aho committed Dec 4, 2024
1 parent 02f1659 commit 540946f
Show file tree
Hide file tree
Showing 5 changed files with 38 additions and 38 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/perf-models-impl.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ jobs:
{name: "GS", arch: grayskull, runs-on: ["E150", "pipeline-perf", "bare-metal", "in-service"], machine-type: "bare_metal"},
{name: "N300 WH B0", arch: wormhole_b0, runs-on: ["N300", "pipeline-perf", "bare-metal", "in-service"], machine-type: "bare_metal"},
]
model-type: [llm_javelin, cnn_javelin, other]
model-type: [other]
name: "${{ matrix.model-type }} ${{ matrix.test-info.name }}"
env:
TT_METAL_ENV: ${{ vars.TT_METAL_ENV }}
Expand Down
10 changes: 5 additions & 5 deletions .github/workflows/t3000-model-perf-tests-impl.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -15,12 +15,12 @@ jobs:
fail-fast: false
matrix:
test-group: [
{ name: "t3k LLM falcon7b model perf tests", model: "falcon7b", model-type: "LLM", arch: wormhole_b0, cmd: run_t3000_falcon7b_tests, timeout: 75, owner_id: U05RWH3QUPM}, # Salar Hosseini
{ name: "t3k LLM mixtral model perf tests", model: "mixtral", model-type: "LLM", arch: wormhole_b0, cmd: run_t3000_mixtral_tests, timeout: 75, owner_id: U03PUAKE719}, # Miguel Tairum
{ name: "t3k LLM llama2-70B model perf tests", model: "llama2-70b", model-type: "LLM", arch: wormhole_b0, cmd: run_t3000_llama2_70b_tests, timeout: 75, owner_id: U03FJB5TM5Y}, # Colman Glagovich
{ name: "t3k LLM falcon40b model perf tests", model: "falcon40b", model-type: "LLM", arch: wormhole_b0, cmd: run_t3000_falcon40b_tests, timeout: 75, owner_id: U053W15B6JF}, # Djordje Ivanovic
# { name: "t3k LLM falcon7b model perf tests", model: "falcon7b", model-type: "LLM", arch: wormhole_b0, cmd: run_t3000_falcon7b_tests, timeout: 75, owner_id: U05RWH3QUPM}, # Salar Hosseini
# { name: "t3k LLM mixtral model perf tests", model: "mixtral", model-type: "LLM", arch: wormhole_b0, cmd: run_t3000_mixtral_tests, timeout: 75, owner_id: U03PUAKE719}, # Miguel Tairum
# { name: "t3k LLM llama2-70B model perf tests", model: "llama2-70b", model-type: "LLM", arch: wormhole_b0, cmd: run_t3000_llama2_70b_tests, timeout: 75, owner_id: U03FJB5TM5Y}, # Colman Glagovich
# { name: "t3k LLM falcon40b model perf tests", model: "falcon40b", model-type: "LLM", arch: wormhole_b0, cmd: run_t3000_falcon40b_tests, timeout: 75, owner_id: U053W15B6JF}, # Djordje Ivanovic
{ name: "t3k CNN resnet50 model perf tests", model: "resnet50", model-type: "CNN", arch: wormhole_b0, cmd: run_t3000_resnet50_tests, timeout: 75, owner_id: U013121KDH9}, # Austin Ho
{ name: "t3k CCL perf tests", arch: wormhole_b0, cmd: run_t3000_ccl_all_gather_perf_tests && run_t3000_ccl_reduce_scatter_perf_tests, timeout: 75, tracy: true, owner_id: ULMEPM2MA}, # Sean Nijjar
# { name: "t3k CCL perf tests", arch: wormhole_b0, cmd: run_t3000_ccl_all_gather_perf_tests && run_t3000_ccl_reduce_scatter_perf_tests, timeout: 75, tracy: true, owner_id: ULMEPM2MA}, # Sean Nijjar
#{ name: "t3k CNN model perf tests ", model-type: "CNN", arch: wormhole_b0, cmd: run_t3000_cnn_tests, timeout: 120, owner_id: }, #No tests are being run?
]
name: ${{ matrix.test-group.name }}
Expand Down
28 changes: 14 additions & 14 deletions .github/workflows/tg-model-perf-tests-impl.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -9,27 +9,27 @@ jobs:
fail-fast: false
matrix:
test-group: [
{
name: "TG LLM model perf tests",
model-type: "LLM",
arch: wormhole_b0,
runs-on: ["arch-wormhole_b0", "config-tg", "in-service", "bare-metal", "pipeline-perf"],
cmd: './tests/scripts/run_tests.sh --tt-arch wormhole_b0 --pipeline-type llm_model_perf_tg_device --dispatch-mode ""'
},
# {
# name: "TG LLM model perf tests",
# model-type: "LLM",
# arch: wormhole_b0,
# runs-on: ["arch-wormhole_b0", "config-tg", "in-service", "bare-metal", "pipeline-perf"],
# cmd: './tests/scripts/run_tests.sh --tt-arch wormhole_b0 --pipeline-type llm_model_perf_tg_device --dispatch-mode ""'
# },
{
name: "TG CNN model perf tests",
model-type: "CNN",
arch: wormhole_b0,
runs-on: ["arch-wormhole_b0", "config-tg", "in-service", "bare-metal", "pipeline-perf"],
cmd: './tests/scripts/run_tests.sh --tt-arch wormhole_b0 --pipeline-type cnn_model_perf_tg_device --dispatch-mode ""'
},
{ name: "TG CCL perf tests",
arch: wormhole_b0,
cmd: './tests/scripts/run_tests.sh --tt-arch wormhole_b0 --pipeline-type ccl_perf_tg_device --dispatch-mode ""',
timeout: 75,
tracy: true,
runs-on: ["arch-wormhole_b0", "config-tg", "in-service", "bare-metal", "pipeline-perf"],
owner_id: ULMEPM2MA}, # Sean Nijjar
# { name: "TG CCL perf tests",
# arch: wormhole_b0,
# cmd: './tests/scripts/run_tests.sh --tt-arch wormhole_b0 --pipeline-type ccl_perf_tg_device --dispatch-mode ""',
# timeout: 75,
# tracy: true,
# runs-on: ["arch-wormhole_b0", "config-tg", "in-service", "bare-metal", "pipeline-perf"],
# owner_id: ULMEPM2MA}, # Sean Nijjar
]
name: ${{ matrix.test-group.name }}
env:
Expand Down
14 changes: 7 additions & 7 deletions .github/workflows/tgg-model-perf-tests-impl.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -9,13 +9,13 @@ jobs:
fail-fast: false
matrix:
test-group: [
{
name: "TGG LLM model perf tests",
model-type: "LLM",
arch: wormhole_b0,
runs-on: ["arch-wormhole_b0", "config-tgg", "in-service", "bare-metal", "pipeline-perf"],
cmd: './tests/scripts/run_tests.sh --tt-arch wormhole_b0 --pipeline-type llm_model_perf_tgg_device --dispatch-mode ""'
},
# {
# name: "TGG LLM model perf tests",
# model-type: "LLM",
# arch: wormhole_b0,
# runs-on: ["arch-wormhole_b0", "config-tgg", "in-service", "bare-metal", "pipeline-perf"],
# cmd: './tests/scripts/run_tests.sh --tt-arch wormhole_b0 --pipeline-type llm_model_perf_tgg_device --dispatch-mode ""'
# },
{
name: "TGG CNN model perf tests",
model-type: "CNN",
Expand Down
22 changes: 11 additions & 11 deletions tests/scripts/run_performance.sh
Original file line number Diff line number Diff line change
Expand Up @@ -18,28 +18,28 @@ run_perf_models_other() {
if [ "$tt_arch" == "wormhole_b0" ]; then
env WH_ARCH_YAML=wormhole_b0_80_arch_eth_dispatch.yaml pytest models/demos/wormhole/resnet50/tests/test_perf_e2e_resnet50.py -m $test_marker

env WH_ARCH_YAML=wormhole_b0_80_arch_eth_dispatch.yaml pytest models/demos/wormhole/bert_tiny/tests/test_performance.py -m $test_marker
# env WH_ARCH_YAML=wormhole_b0_80_arch_eth_dispatch.yaml pytest models/demos/wormhole/bert_tiny/tests/test_performance.py -m $test_marker

env WH_ARCH_YAML=wormhole_b0_80_arch_eth_dispatch.yaml pytest models/demos/yolov4/tests/test_perf_yolo.py -m $test_marker
# env WH_ARCH_YAML=wormhole_b0_80_arch_eth_dispatch.yaml pytest models/demos/yolov4/tests/test_perf_yolo.py -m $test_marker
fi

env pytest -n auto tests/ttnn/integration_tests/bert/test_performance.py -m $test_marker
# env pytest -n auto tests/ttnn/integration_tests/bert/test_performance.py -m $test_marker

env pytest -n auto models/demos/ttnn_falcon7b/tests -m $test_marker
# env pytest -n auto models/demos/ttnn_falcon7b/tests -m $test_marker

env pytest models/demos/distilbert/tests/test_perf_distilbert.py -m $test_marker
# env pytest models/demos/distilbert/tests/test_perf_distilbert.py -m $test_marker

env pytest -n auto tests/ttnn/integration_tests/whisper/test_performance.py -m $test_marker
# env pytest -n auto tests/ttnn/integration_tests/whisper/test_performance.py -m $test_marker

env pytest -n auto models/demos/metal_BERT_large_11/tests -m $test_marker
# env pytest -n auto models/demos/metal_BERT_large_11/tests -m $test_marker

env pytest -n auto models/demos/vgg/tests/test_perf_vgg.py -m $test_marker
# env pytest -n auto models/demos/vgg/tests/test_perf_vgg.py -m $test_marker

env pytest -n auto models/demos/convnet_mnist/tests -m $test_marker
# env pytest -n auto models/demos/convnet_mnist/tests -m $test_marker

env pytest -n auto models/demos/bert_tiny/tests/test_performance.py -m $test_marker
# env pytest -n auto models/demos/bert_tiny/tests/test_performance.py -m $test_marker

env pytest -n auto models/demos/mnist/tests -m $test_marker
# env pytest -n auto models/demos/mnist/tests -m $test_marker

## Merge all the generated reports
env python models/perf/merge_perf_results.py
Expand Down

0 comments on commit 540946f

Please sign in to comment.