diff --git a/.github/actions/setup_base/action.yml b/.github/actions/setup_base/action.yml index 939339aa92..0153a54326 100644 --- a/.github/actions/setup_base/action.yml +++ b/.github/actions/setup_base/action.yml @@ -67,7 +67,7 @@ runs: - uses: actions/setup-python@v4 with: - python-version: '3.11' + python-version: '3.10' - name: Install Ninja uses: llvm/actions/install-ninja@6a57890d0e3f9f35dfc72e7e48bc5e1e527cdd6c # Jan 17 diff --git a/.github/workflows/lintAndFormat.yml b/.github/workflows/lintAndFormat.yml index 91829ba809..538d142b2e 100644 --- a/.github/workflows/lintAndFormat.yml +++ b/.github/workflows/lintAndFormat.yml @@ -126,7 +126,7 @@ jobs: - name: Setup Python env uses: actions/setup-python@v4 with: - python-version: '3.11' + python-version: '3.10' - name: Install black run: pip install black[jupyter] @@ -196,7 +196,7 @@ jobs: - uses: actions/setup-python@v4 with: - python-version: '3.11' + python-version: '3.10' - name: Install Python and other packages run: | diff --git a/.github/workflows/mlirAIEDistro.yml b/.github/workflows/mlirAIEDistro.yml index f6d177fff7..dda79fa177 100644 --- a/.github/workflows/mlirAIEDistro.yml +++ b/.github/workflows/mlirAIEDistro.yml @@ -33,12 +33,14 @@ on: # comment it out when you're not working on these yamls pull_request: -concurrency: - # A PR number if a pull request and otherwise the commit hash. This cancels - # queued and in-progress runs for the same PR (presubmit) or commit - # (postsubmit). - group: ci-build-mlir-aie-distro-${{ github.event.number || github.sha }} - cancel-in-progress: true + # conversely you want to comment this out so that you can have multiple runs going concurrently + # which is useful here for flushing all bugs out +#concurrency: +# # A PR number if a pull request and otherwise the commit hash. This cancels +# # queued and in-progress runs for the same PR (presubmit) or commit +# # (postsubmit). +# group: ci-build-mlir-aie-distro-${{ github.event.number || github.sha }} +# cancel-in-progress: true jobs: @@ -71,7 +73,7 @@ jobs: needs: get_aie_project_commit - name: ${{ matrix.OS }} ${{ matrix.ARCH }} + name: ${{ matrix.OS }} ${{ matrix.ARCH }} rtti=${{ matrix.ENABLE_RTTI }} continue-on-error: true @@ -86,18 +88,24 @@ jobs: include: - OS: ubuntu-20.04 ARCH: x86_64 + ENABLE_RTTI: ON - OS: windows-2019 ARCH: AMD64 + ENABLE_RTTI: ON +# Can't figure it out but cmake segfault when building on mac on the runners - OS: macos-11 ARCH: x86_64 - - - OS: macos-11 - ARCH: arm64 + ENABLE_RTTI: ON +# +# - OS: macos-11 +# ARCH: arm64 +# ENABLE_RTTI: ON - OS: ubuntu-20.04 ARCH: aarch64 + ENABLE_RTTI: ON steps: @@ -125,7 +133,7 @@ jobs: with: MATRIX_OS: ${{ matrix.OS }} MATRIX_ARCH: ${{ matrix.ARCH }} - EXTRA_KEY: mlir-aie-distro + EXTRA_KEY: mlir-aie-distro-rtti-${{ matrix.ENABLE_RTTI }} - name: Shift workspace root id: workspace_root @@ -216,7 +224,7 @@ jobs: shell: bash run: | pip install pkginfo - WHL=$(ls wheelhouse/mlir_aie_*whl) + WHL=$(ls wheelhouse/mlir_aie*whl) echo "MLIR_AIE_WHEEL_VERSION=$(python -c "import pkginfo; w = pkginfo.Wheel('$WHL'); print(w.version.split('+')[0] + '+' + w.version.split('+')[1].rsplit('.', 1)[-1])")" | tee -a $GITHUB_OUTPUT - name: Download cache from container ubuntu @@ -248,26 +256,24 @@ jobs: # The wheels important parts of the wheels (all the LLVM/MLIR archives) have nothing to do with the # python version. With py3-none you can pip install them in any python venv. Unfortunately though this does - # mean that the python bindings themselves will confusingly not work in other envs (!=3.11) + # mean that the python bindings themselves will confusingly not work in other envs (!=3.10) - name: rename non-windows if: ${{ matrix.OS == 'ubuntu-20.04' || matrix.OS == 'macos-11' }} working-directory: ${{ steps.workspace_root.outputs.WORKSPACE_ROOT }} shell: bash run: | - rename 's/cp310-cp310/py3-none/' wheelhouse/mlir_aie_*whl - rename 's/cp311-cp311/py3-none/' wheelhouse/mlir_aie_*whl + rename 's/cp310-cp310/py3-none/' wheelhouse/mlir_aie*whl if [ x"${{ matrix.OS }}" == x"ubuntu-20.04" ] && [ x"${{ matrix.ARCH }}" == x"aarch64" ]; then - rename 's/x86_64/aarch64/' wheelhouse/mlir_aie_*whl + rename 's/x86_64/aarch64/' wheelhouse/mlir_aie*whl fi - name: rename windows if: ${{ matrix.OS == 'windows-2019' }} working-directory: ${{ steps.workspace_root.outputs.WORKSPACE_ROOT }} run: | - ls wheelhouse/mlir_aie_*whl | Rename-Item -NewName {$_ -replace 'cp310-cp310', 'py3-none' } - ls wheelhouse/mlir_aie_*whl | Rename-Item -NewName {$_ -replace 'cp311-cp311', 'py3-none' } + ls wheelhouse/mlir_aie*whl | Rename-Item -NewName {$_ -replace 'cp310-cp310', 'py3-none' } - name: build python bindings shell: bash @@ -278,11 +284,12 @@ jobs: export PIP_NO_BUILD_ISOLATION=false cp requirements.txt python_bindings - cp wheelhouse/mlir-aie-*.whl python_bindings/ + cp wheelhouse/mlir_aie*.whl python_bindings/ cp -r scripts python_bindings/scripts pushd python_bindings - unzip -q mlir-aie-*.whl + unzip -q mlir_aie*.whl + rm -rf mlir_aie*.whl CIBW_ARCHS=${{ matrix.ARCH }} \ CMAKE_GENERATOR="Ninja" \ @@ -388,7 +395,7 @@ jobs: cp requirements.txt python_bindings cp -R scripts python_bindings/scripts pushd python_bindings - unzip -q ../wheelhouse/mlir-aie-*-linux_aarch64.whl + unzip -q ../wheelhouse/mlir_aie*-linux_aarch64.whl CIBW_ARCHS=${{ matrix.ARCH }} \ CIBW_BUILD=${{ matrix.PY_VERSION }}-manylinux_aarch64 \ @@ -427,10 +434,15 @@ jobs: - OS: windows-2019 ARCH: AMD64 - - OS: macos-11 - ARCH: x86_64 +# - OS: macos-11 +# ARCH: x86_64 steps: + - name: Checkout reqs + uses: actions/checkout@v3 + with: + sparse-checkout: python/requirements.txt + - uses: actions/download-artifact@v3 with: name: build_artifact @@ -438,12 +450,12 @@ jobs: - uses: actions/setup-python@v4 with: - python-version: '3.11' + python-version: '3.10' - name: test shell: bash run: | - pip install numpy PyYAML + pip install -r python/requirements.txt pip install aie -f dist --no-index python -c 'import aie.dialects.aie' diff --git a/.github/workflows/mlirDistro.yml b/.github/workflows/mlirDistro.yml index 7d2023e668..4988cef56a 100644 --- a/.github/workflows/mlirDistro.yml +++ b/.github/workflows/mlirDistro.yml @@ -43,12 +43,14 @@ on: # At minute 0 past every 4th hour. (see https://crontab.guru) - cron: '0 */4 * * *' -concurrency: - # A PR number if a pull request and otherwise the commit hash. This cancels - # queued and in-progress runs for the same PR (presubmit) or commit - # (postsubmit). - group: ci-build-mlir-distro-${{ github.event.number || github.sha }} - cancel-in-progress: true + # conversely you want to comment this out so that you can have multiple runs going concurrently + # which is useful here for flushing all bugs out +#concurrency: +# # A PR number if a pull request and otherwise the commit hash. This cancels +# # queued and in-progress runs for the same PR (presubmit) or commit +# # (postsubmit). +# group: ci-build-mlir-distro-${{ github.event.number || github.sha }} +# cancel-in-progress: true jobs: @@ -280,7 +282,7 @@ jobs: # The wheels important parts of the wheels (all the LLVM/MLIR archives) have nothing to do with the # python version. With py3-none you can pip install them in any python venv. Unfortunately though this does - # mean that the python bindings themselves will confusingly not work in other envs (!=3.11) + # mean that the python bindings themselves will confusingly not work in other envs (!=3.10) - name: rename non-windows if: ${{ matrix.OS == 'ubuntu-20.04' || matrix.OS == 'macos-11' }} working-directory: ${{ steps.workspace_root.outputs.WORKSPACE_ROOT }} @@ -288,7 +290,6 @@ jobs: run: | rename 's/cp310-cp310/py3-none/' wheelhouse/mlir*whl - rename 's/cp311-cp311/py3-none/' wheelhouse/mlir*whl if [ x"${{ matrix.OS }}" == x"ubuntu-20.04" ] && [ x"${{ matrix.ARCH }}" == x"aarch64" ]; then rename 's/x86_64/aarch64/' wheelhouse/mlir*whl @@ -299,7 +300,6 @@ jobs: working-directory: ${{ steps.workspace_root.outputs.WORKSPACE_ROOT }} run: | ls wheelhouse/mlir*whl | Rename-Item -NewName {$_ -replace 'cp310-cp310', 'py3-none' } - ls wheelhouse/mlir*whl | Rename-Item -NewName {$_ -replace 'cp311-cp311', 'py3-none' } # The "native tools" MLIR utilities that are necessary for cross-compiling MLIR - basically just tblgen. # Now if you build a whole distro you naturally do get those utilities but it's easier to just bundle them @@ -338,16 +338,14 @@ jobs: # done - name: Upload wheels -# TODO(max): remove -# if: github.event_name == 'workflow_dispatch' && (success() || failure()) uses: actions/upload-artifact@v3 with: path: ${{ steps.workspace_root.outputs.WORKSPACE_ROOT }}/wheelhouse/*.whl - name: build_artifact + name: build_artifact_${{ matrix.OS }}_${{ matrix.ARCH }}_rtti_${{ matrix.ENABLE_RTTI }} smoke_test_wheels: - name: Smoketest ${{ matrix.OS }} ${{ matrix.ARCH }} rtti=${{ matrix.NO_RTTI }} + name: test ${{ matrix.OS }} ${{ matrix.ARCH }} rtti=${{ matrix.ENABLE_RTTI }} needs: [build] @@ -358,54 +356,100 @@ jobs: include: - OS: ubuntu-20.04 ARCH: x86_64 - NO_RTTI: '' + ENABLE_RTTI: ON - OS: windows-2019 ARCH: AMD64 - NO_RTTI: '' + ENABLE_RTTI: ON - OS: macos-11 ARCH: x86_64 - NO_RTTI: '' + ENABLE_RTTI: ON - OS: ubuntu-20.04 ARCH: x86_64 - NO_RTTI: '_no_rtti' + ENABLE_RTTI: OFF - OS: windows-2019 ARCH: AMD64 - NO_RTTI: '_no_rtti' + ENABLE_RTTI: OFF - OS: macos-11 ARCH: x86_64 - NO_RTTI: '_no_rtti' + ENABLE_RTTI: OFF steps: - uses: actions/download-artifact@v3 with: - name: build_artifact + name: build_artifact_${{ matrix.OS }}_${{ matrix.ARCH }}_rtti_${{ matrix.ENABLE_RTTI }} path: dist - uses: actions/setup-python@v4 with: - python-version: '3.11' + python-version: '3.10' - name: test shell: bash run: | pip install numpy PyYAML - unzip mlir${{ matrix.NO_RTTI }}*.whl + unzip -o -q dist/mlir\*.whl - PYTHONPATH=$(find . -name mlir_core) python -c 'import mlir${{ matrix.NO_RTTI }}.ir' + PYTHONPATH=$(find . -name mlir_core) python -c 'import mlir.ir' upload_distro_wheels: + name: upload ${{ matrix.OS }} ${{ matrix.ARCH }} rtti=${{ matrix.ENABLE_RTTI }} + if: github.event_name == 'workflow_dispatch' needs: smoke_test_wheels runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + include: + - OS: ubuntu-20.04 + ARCH: x86_64 + ENABLE_RTTI: ON + + - OS: ubuntu-20.04 + ARCH: aarch64 + ENABLE_RTTI: ON + + - OS: windows-2019 + ARCH: AMD64 + ENABLE_RTTI: ON + + - OS: macos-11 + ARCH: x86_64 + ENABLE_RTTI: ON + + - OS: macos-11 + ARCH: arm64 + ENABLE_RTTI: ON + + - OS: ubuntu-20.04 + ARCH: x86_64 + ENABLE_RTTI: OFF + + - OS: ubuntu-20.04 + ARCH: aarch64 + ENABLE_RTTI: OFF + + - OS: windows-2019 + ARCH: AMD64 + ENABLE_RTTI: OFF + + - OS: macos-11 + ARCH: x86_64 + ENABLE_RTTI: OFF + + - OS: macos-11 + ARCH: arm64 + ENABLE_RTTI: OFF + permissions: id-token: write contents: write @@ -415,7 +459,7 @@ jobs: with: # unpacks default artifact into dist/ # if `name: artifact` is omitted, the action will create extra parent dir - name: build_artifact + name: build_artifact_${{ matrix.OS }}_${{ matrix.ARCH }}_rtti_${{ matrix.ENABLE_RTTI }} path: dist - name: Release current commit diff --git a/docs/Dev.md b/docs/Dev.md new file mode 100644 index 0000000000..e4d04a851e --- /dev/null +++ b/docs/Dev.md @@ -0,0 +1,75 @@ +# Dev + +## Wheels + +There are CI/GHA workflows that build + +1. a distribution of LLVM+MLIR + 1. [mlirDistro.yml](..%2F.github%2Fworkflows%2FmlirDistro.yml) + 2. [Accompanying scripts](..%2Futils%2Fmlir_wheels) +2. a distribution of MLIR-AIE + 1. [mlirAIEDistro.yml](..%2F.github%2Fworkflows%2FmlirAIEDistro.yml) + 2. [Accompanying scripts](..%2Futils%2Fmlir_aie_wheels) + +The builds are packaged as [Python wheels](https://packaging.python.org/en/latest/specifications/binary-distribution-format/). +Why package binaries + C++ source as Python wheels? Because doing so enables this: + +```shell +$ pip download mlir -f https://github.com/Xilinx/mlir-aie/releases/expanded_assets/mlir-distro + +Looking in links: https://github.com/Xilinx/mlir-aie/releases/expanded_assets/mlir-distro +Collecting mlir + Downloading https://github.com/Xilinx/mlir-aie/releases/download/mlir-distro/mlir-18.0.0.2023121201+d36b483... + ╸━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 18.7/792.9 MB 14.6 MB/s eta 0:00:54 + +Saved ./mlir-18.0.0.2023121201+d36b483... +Successfully downloaded mlir + +$ unzip mlir-18.0.0.2023121201+d36b483... + +Archive: mlir-18.0.0.2023121201+d36b483... + creating: mlir/ + creating: mlir.libs/ + creating: mlir/src/ + creating: mlir/share/ + creating: mlir/include/ + creating: mlir/bin/ +``` + +**and this will work for all platforms that the wheels are being built for**. +I.e., no need to specify arch or platform or whatever (pip takes care of it). +And also, of course, `pip download mlir==18.0.0.2023121201+d36b483` works (`18.0.0.2023121201+d36b483` is the "version" of wheel). + +Currently we are building for + +* Linux + * x86_64 ([manylinux_2_27](https://github.com/pypa/manylinux)) + * aarch64 +* Windows + * AMD64 +* MacOS + * x86_64 + * arm64 + +Why Mac? Because some people do dev on a Mac. + +## How to cut a new wheel + +1. Go to the [actions tab](https://github.com/Xilinx/mlir-aie/actions) @ github.com/Xilinx/mlir-aie; +2. Select **MLIR Distro** in the left-most column (under **Actions**) +

+ image +

+3. Select **Run workflow** at the far right +

+ image +

+4. Finally (ignoring all of the options) hit the green `Run workflow` +

+ image +

+5. A **MLIR Distro** job will appear under the same actions tab (where you can monitor progress). + +## Gotchas + +1. In many places you will see `PIP_NO_BUILD_ISOLATION=false` - this means the opposite of what it says i.e., this actually turns off build isolation (i.e., equivalent to passing `--no-build-isolation` to `pip wheel`). [Don't ask me why](https://github.com/pypa/pip/issues/5229#issuecomment-387301397). \ No newline at end of file diff --git a/utils/mlir_aie_wheels/python_bindings/CMakeLists.txt b/utils/mlir_aie_wheels/python_bindings/CMakeLists.txt index 77a958d1a0..3cb0e28c2a 100644 --- a/utils/mlir_aie_wheels/python_bindings/CMakeLists.txt +++ b/utils/mlir_aie_wheels/python_bindings/CMakeLists.txt @@ -89,8 +89,10 @@ add_mlir_python_common_capi_library(AIEAggregateCAPI # Custom targets. # ############################################################################## -_flatten_mlir_python_targets(aie_python_sources_deps AIEPythonSources - AIECompilerPythonSources) +_flatten_mlir_python_targets(aie_python_sources_deps + AIEPythonSources + AIEPythonSources.Compiler +) add_custom_target("aie-python-sources" DEPENDS ${aie_python_sources_deps}) if(NOT LLVM_ENABLE_IDE) add_llvm_install_targets(install-aie-python-sources DEPENDS @@ -123,20 +125,3 @@ add_mlir_python_modules(AIEPythonModules COMMON_CAPI_LINK_LIBS AIEAggregateCAPI ) - -configure_file( - ${AIE_DIR}/../../../python/aie/compiler/aiecc/configure.py - ${CMAKE_CURRENT_SOURCE_DIR}/aiecc/configure.py -) - -declare_mlir_python_sources(AIECompilerPythonSources.Configure - ADD_TO_PARENT AIECompilerPythonSources - SOURCES aiecc/configure.py -) - -add_mlir_python_modules(AIECompilerPythonModules - ROOT_PREFIX ${CMAKE_BINARY_DIR}/aie/compiler - INSTALL_PREFIX aie/compiler - DECLARED_SOURCES - AIECompilerPythonSources -) diff --git a/utils/mlir_aie_wheels/python_bindings/setup.py b/utils/mlir_aie_wheels/python_bindings/setup.py index dc3b483245..b11dc16122 100644 --- a/utils/mlir_aie_wheels/python_bindings/setup.py +++ b/utils/mlir_aie_wheels/python_bindings/setup.py @@ -6,17 +6,18 @@ import sys from pathlib import Path from pprint import pprint +from typing import Union from setuptools import Extension, setup from setuptools.command.build_ext import build_ext def check_env(build, default=0): - return os.environ.get(build, default) in {"1", "true", "True", "ON", "YES"} + return os.environ.get(build, str(default)) in {"1", "true", "True", "ON", "YES"} class CMakeExtension(Extension): - def __init__(self, name: str, sourcedir: str = "") -> None: + def __init__(self, name: str, sourcedir: Union[str, Path] = "") -> None: super().__init__(name, sources=[]) self.sourcedir = os.fspath(Path(sourcedir).resolve()) @@ -80,7 +81,7 @@ def build_extension(self, ext: CMakeExtension) -> None: cmake_args = [ f"-G {cmake_generator}", - f"-DMLIR_DIR={MLIR_INSTALL_ABS_PATH / 'lib' / 'cmake' / 'mlir' if check_env('ENABLE_RTTI') else 'mlir_no_rtti'}", + f"-DMLIR_DIR={MLIR_INSTALL_ABS_PATH / 'lib' / 'cmake' / 'mlir'}", f"-DAIE_DIR={MLIR_AIE_INSTALL_ABS_PATH / 'lib' / 'cmake' / 'aie'}", f"-DCMAKE_INSTALL_PREFIX={install_dir}", f"-DPython3_EXECUTABLE={sys.executable}", @@ -153,29 +154,21 @@ def build_extension(self, ext: CMakeExtension) -> None: build_temp.mkdir(parents=True) print("ENV", pprint(os.environ), file=sys.stderr) - print("CMAKE_ARGS", cmake_args, file=sys.stderr) - - subprocess.run( - ["cmake", ext.sourcedir, *cmake_args], - cwd=build_temp, - check=True, - capture_output=True, - ) - subprocess.run( - ["cmake", "--build", ".", "--target", "install", *build_args], - cwd=build_temp, - check=True, - capture_output=True, - ) - shutil.copy( - MLIR_AIE_INSTALL_ABS_PATH - / "python" - / "aie" - / "compiler" - / "aiecc" - / "configure.py", - install_dir / "aie" / "compiler" / "aiecc" / "configure.py", - ) + print("cmake", " ".join(cmake_args), file=sys.stderr) + + try: + subprocess.run( + ["cmake", ext.sourcedir, *cmake_args], cwd=build_temp, check=True + ) + subprocess.run( + ["cmake", "--build", ".", "--target", "install", *build_args], + cwd=build_temp, + check=True, + ) + except subprocess.CalledProcessError as e: + print(f"{e.stdout=}") + print(f"{e.stderr=}") + raise e setup( @@ -184,7 +177,7 @@ def build_extension(self, ext: CMakeExtension) -> None: name="aie", include_package_data=True, long_description_content_type="text/markdown", - ext_modules=[CMakeExtension("_aie", sourcedir=".")], + ext_modules=[CMakeExtension("_aie", sourcedir=Path(__file__).parent.absolute())], cmdclass={"build_ext": CMakeBuild}, zip_safe=False, python_requires=">=3.10", diff --git a/utils/mlir_aie_wheels/scripts/build_local.sh b/utils/mlir_aie_wheels/scripts/build_local.sh new file mode 100755 index 0000000000..3dd148ba23 --- /dev/null +++ b/utils/mlir_aie_wheels/scripts/build_local.sh @@ -0,0 +1,61 @@ +#!/usr/bin/env bash +set -xe +HERE=$(dirname "$(realpath "$0")") + +unameOut="$(uname -s)" +case "${unameOut}" in + Linux*) machine=linux;; + Darwin*) machine=macos;; + CYGWIN*) machine=windows;; + MINGW*) machine=windows;; + MSYS_NT*) machine=windows;; + *) machine="UNKNOWN:${unameOut}" +esac +echo "${machine}" + +# rsync -avpP --exclude .git --exclude cmake-build-debug --exclude cmake-build-release ../../llvm/* llvm-project/ + +export APPLY_PATCHES=true + +if [ "$machine" == "linux" ]; then + export MATRIX_OS=ubuntu-20.04 + export CIBW_ARCHS=x86_64 + export CIBW_BUILD=cp311-manylinux_x86_64 + export ARCH=x86_64 + export PARALLEL_LEVEL=15 +elif [ "$machine" == "macos" ]; then + export MATRIX_OS=macos-11 + export CIBW_ARCHS=arm64 + export CIBW_BUILD=cp311-macosx_arm64 + export ARCH=arm64 + export PARALLEL_LEVEL=32 +else + export MATRIX_OS=windows-2019 + export CIBW_ARCHS=AMD64 + export CIBW_BUILD=cp311-win_amd64 + export ARCH=AMD64 +fi + +ccache --show-stats +ccache --print-stats +ccache --show-config + +export HOST_CCACHE_DIR="$(ccache --get-config cache_dir)" +cibuildwheel "$HERE"/.. --platform "$machine" + +rename 's/cp311-cp311/py3-none/' "$HERE/../wheelhouse/"mlir*whl + +if [ -d "$HERE/../wheelhouse/.ccache" ]; then + cp -R "$HERE/../wheelhouse/.ccache/"* "$HOST_CCACHE_DIR/" +fi + +cp -R "$HERE/../requirements.txt" "$HERE/../python_bindings" +cp -R "$HERE/../scripts" "$HERE/../python_bindings" +cp -R "$HERE/../wheelhouse/"mlir_aie*.whl "$HERE/../python_bindings" + +pushd "$HERE/../python_bindings" +# escape to prevent 'Filename not matched' when both the py3-none whl and the cp311 wheel +unzip -o -q mlir_aie\*.whl +rm -rf mlir_aie*.whl + +cibuildwheel --platform "$machine" --output-dir ../wheelhouse \ No newline at end of file diff --git a/utils/mlir_aie_wheels/scripts/download_mlir.sh b/utils/mlir_aie_wheels/scripts/download_mlir.sh index cb5cf21ffc..49e96861ca 100755 --- a/utils/mlir_aie_wheels/scripts/download_mlir.sh +++ b/utils/mlir_aie_wheels/scripts/download_mlir.sh @@ -1,12 +1,12 @@ #!/usr/bin/env bash set -xe -rm -rf mlir* || true +rm -rf mlir || true pip install mlir-native-tools --force -U if [ x"$ENABLE_RTTI" == x"OFF" ]; then - NO_RTTI="_no_rtti" + NO_RTTI="-no-rtti" fi if [ x"$CIBW_ARCHS" == x"arm64" ] || [ x"$CIBW_ARCHS" == x"aarch64" ]; then @@ -20,4 +20,8 @@ else pip -q download mlir$NO_RTTI fi -unzip -q mlir*whl +# overwrite files WITHOUT prompting +unzip -o -q mlir*whl + +echo $PWD +ls -l mlir* \ No newline at end of file diff --git a/utils/mlir_aie_wheels/setup.py b/utils/mlir_aie_wheels/setup.py index 9da60f1dec..d5accef63d 100644 --- a/utils/mlir_aie_wheels/setup.py +++ b/utils/mlir_aie_wheels/setup.py @@ -15,7 +15,7 @@ def check_env(build, default=0): - return os.environ.get(build, default) in {"1", "true", "True", "ON", "YES"} + return os.environ.get(build, str(default)) in {"1", "true", "True", "ON", "YES"} class CMakeExtension(Extension): @@ -213,22 +213,21 @@ def build_extension(self, ext: CMakeExtension) -> None: build_temp.mkdir(parents=True) print("ENV", pprint(os.environ), file=sys.stderr) - print("CMAKE_ARGS", cmake_args, file=sys.stderr) - - subprocess.run( - ["cmake", ext.sourcedir, *cmake_args], - cwd=build_temp, - check=True, - stdout=sys.stderr, - stderr=sys.stderr - ) - subprocess.run( - ["cmake", "--build", ".", "--target", "install", *build_args], - cwd=build_temp, - check=True, - stdout=sys.stderr, - stderr=sys.stderr - ) + print("cmake", " ".join(cmake_args), file=sys.stderr) + + try: + subprocess.run( + ["cmake", ext.sourcedir, "--debug-trycompile", "--debug-output", *cmake_args], cwd=build_temp, check=True + ) + subprocess.run( + ["cmake", "--build", ".", "--target", "install", *build_args], + cwd=build_temp, + check=True, + ) + except subprocess.CalledProcessError as e: + print(f"{e.stdout=}") + print(f"{e.stderr=}") + raise e # cibuildwheel containers are in the future? and this messes with ninja which checks timestamps # when configuring cmake @@ -270,5 +269,5 @@ def build_extension(self, ext: CMakeExtension) -> None: ext_modules=[CMakeExtension("_mlir_aie", sourcedir="mlir-aie")], cmdclass={"build_ext": CMakeBuild}, zip_safe=False, - python_requires=">=3.8", + python_requires=">=3.10", ) diff --git a/utils/mlir_wheels/pyproject.toml b/utils/mlir_wheels/pyproject.toml index a09a8f0e8e..29cdc1efe2 100644 --- a/utils/mlir_wheels/pyproject.toml +++ b/utils/mlir_wheels/pyproject.toml @@ -40,8 +40,9 @@ environment-pass = [ ] [tool.cibuildwheel.macos] -environment = { PATH = "/usr/local/opt/ccache/libexec:$PATH", PIP_FIND_LINKS = "https://makslevental.github.io/wheels" } +environment = { PATH = "/usr/local/opt/ccache/libexec:$PATH", PIP_FIND_LINKS = "https://makslevental.github.io/wheels", PIP_NO_BUILD_ISOLATION = "false" } before-build = [ + "pip install -r llvm-project/mlir/python/requirements.txt", "{project}/scripts/apply_patches.sh", ] repair-wheel-command = [ diff --git a/utils/mlir_wheels/setup.py b/utils/mlir_wheels/setup.py index 813910823b..db742c09f3 100644 --- a/utils/mlir_wheels/setup.py +++ b/utils/mlir_wheels/setup.py @@ -12,7 +12,7 @@ def check_env(build, default=0): - return os.environ.get(build, default) in {"1", "true", "True", "ON", "YES"} + return os.environ.get(build, str(default)) in {"1", "true", "True", "ON", "YES"} class CMakeExtension(Extension): @@ -196,23 +196,21 @@ def build_extension(self, ext: CMakeExtension) -> None: build_temp.mkdir(parents=True) print("ENV", pprint(os.environ), file=sys.stderr) - print("CMAKE_ARGS", cmake_args, file=sys.stderr) - - subprocess.run( - ["cmake", ext.sourcedir, *cmake_args], - cwd=build_temp, - check=True, - # cibuildwheel swallows stdout - stdout=sys.stderr, - stderr=sys.stderr - ) - subprocess.run( - ["cmake", "--build", ".", "--target", "install", *build_args], - cwd=build_temp, - check=True, - stdout=sys.stderr, - stderr=sys.stderr - ) + print("cmake", " ".join(cmake_args), file=sys.stderr) + + try: + subprocess.run( + ["cmake", ext.sourcedir, *cmake_args], cwd=build_temp, check=True + ) + subprocess.run( + ["cmake", "--build", ".", "--target", "install", *build_args], + cwd=build_temp, + check=True, + ) + except subprocess.CalledProcessError as e: + print(f"{e.stdout=}") + print(f"{e.stderr=}") + raise e # cibuildwheel containers are in the future? and this messes with ninja which checks timestamps # when configuring cmake