diff --git a/.github/workflows/test-mlperf-inference-abtf-poc.yml b/.github/workflows/test-mlperf-inference-abtf-poc.yml index 9e57daa..dc9e00b 100644 --- a/.github/workflows/test-mlperf-inference-abtf-poc.yml +++ b/.github/workflows/test-mlperf-inference-abtf-poc.yml @@ -44,13 +44,12 @@ jobs: strategy: fail-fast: false matrix: - os: [macos-latest, macos-13] - python-version: [ "3.8", "3.12" ] + os: [macos-latest, macos-13, windows-latest] + python-version: [ "3.9", "3.11", "3.12" ] backend: [ "pytorch" ] implementation: [ "python" ] exclude: - - os: ubuntu-24.04 - python-version: "3.8" + - os: windows-latest steps: - uses: actions/checkout@v3 @@ -60,10 +59,10 @@ jobs: python-version: ${{ matrix.python-version }} - name: Install dependencies run: | + #git config --system core.longpaths true pip install cm4mlops cm pull repo --url=${{ github.event.pull_request.head.repo.html_url }} --checkout=${{ github.event.pull_request.head.ref }} - cm run script --quiet --tags=get,sys-utils-cm - name: Test MLPerf Inference ABTF POC using ${{ matrix.backend }} natively run: | - cm run script --tags=run-abtf,inference,_poc-demo --adr.compiler.tags=gcc --quiet -v --gh_token=${{ secrets.ABTF_ACCESS_TOKEN }} + cm run script --tags=run-abtf,inference,_poc-demo --adr.compiler.tags=gcc --quiet --env.CM_MLPERF_LOADGEN_BUILD_FROM_SRC=off -v --gh_token=${{ secrets.ABTF_ACCESS_TOKEN }} diff --git a/script/demo-ml-model-abtf-cognata-pytorch-loadgen/_cm.yaml b/script/demo-ml-model-abtf-cognata-pytorch-loadgen/_cm.yaml index 3eae2c2..da5d215 100644 --- a/script/demo-ml-model-abtf-cognata-pytorch-loadgen/_cm.yaml +++ b/script/demo-ml-model-abtf-cognata-pytorch-loadgen/_cm.yaml @@ -19,6 +19,7 @@ tags: default_env: CM_MLPERF_LOADGEN_MODE: accuracy CM_MLPERF_LOADGEN_SCENARIO: Offline + CM_MLPERF_LOADGEN_BUILD_FROM_SRC: 'on' CM_OUTPUT_FOLDER_NAME: test_results CM_MLPERF_RUN_STYLE: test CM_TEST_QUERY_COUNT: '10' @@ -242,16 +243,6 @@ deps: - ray - ## Transformers - - tags: get,generic-python-lib,_transformers - names: - - ml-engine-transformers - enable_if_env: - CM_MODEL: - - bert-99 - - bert-99.9 - - gptj-99 - - gptj-99.9 ## Tensorflow - tags: get,generic-python-lib,_tensorflow @@ -274,10 +265,31 @@ deps: # Install MLPerf loadgen + - tags: get,generic-python-lib,_package.mlcommons-loadgen + enable_if_env: + CM_MLPERF_LOADGEN_BUILD_FROM_SRC: + - "off" + CM_HOST_OS_FLAVOR: + - linux + CM_HOST_PLATFORM_FLAVOR: + - x86_64 + names: + - loadgen + - mlperf-inference-loadgen + - tags: get,loadgen + enable_if_any_env: + CM_MLPERF_LOADGEN_BUILD_FROM_SRC: + - "on" + CM_HOST_OS_FLAVOR: + - darwin + - windows + CM_HOST_PLATFORM_FLAVOR: + - aarch64 names: - loadgen - mlperf-inference-loadgen + - mlperf-inference-loadgen-from-src #