diff --git a/.github/workflows/build-docs.yml b/.github/workflows/build-docs.yml index 3efc702b4..25d86546d 100644 --- a/.github/workflows/build-docs.yml +++ b/.github/workflows/build-docs.yml @@ -21,24 +21,25 @@ jobs: upload-doc: runs-on: ubuntu-20.04 steps: - - uses: actions/checkout@v4 + - name: Check out the repository + uses: actions/checkout@v4 - - name: Setup Python + - name: Set up Python id: setup-python uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} cache: pip - - name: Install Python dependencies + - name: Install Python dependencies run: | pip install -r requirements.txt - - name: Make documents + - name: Make documents run: | PYTHONPATH=. python build_util/make_docs.py - - name: Deploy to GitHub Pages + - name: Deploy documents to GitHub Pages uses: peaceiris/actions-gh-pages@v3 with: github_token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/build-engine-container.yml b/.github/workflows/build-engine-container.yml index c169b8358..b4a7e0cec 100644 --- a/.github/workflows/build-engine-container.yml +++ b/.github/workflows/build-engine-container.yml @@ -81,30 +81,30 @@ jobs: platforms: linux/amd64 steps: - - uses: actions/checkout@v4 + - name: Check out the repository + uses: actions/checkout@v4 - - name: Setup QEMU + - name: Set up QEMU uses: docker/setup-qemu-action@v2 - - name: Setup Docker Buildx + - name: Set up Docker Buildx id: buildx uses: docker/setup-buildx-action@v2 - - name: Login to DockerHub + - name: Login to DockerHub uses: docker/login-action@v2 with: username: ${{ vars.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - # Download VOICEVOX RESOURCE - - name: Prepare VOICEVOX RESOURCE cache + - name: Prepare VOICEVOX RESOURCE cache uses: actions/cache@v3 id: voicevox-resource-cache with: key: voicevox-resource-${{ env.VOICEVOX_RESOURCE_VERSION }} path: download/resource - - name: Checkout VOICEVOX RESOURCE + - name: Check out the VOICEVOX RESOURCE repository if: steps.voicevox-resource-cache.outputs.cache-hit != 'true' uses: actions/checkout@v4 with: @@ -112,13 +112,12 @@ jobs: ref: ${{ env.VOICEVOX_RESOURCE_VERSION }} path: download/resource - # Merge VOICEVOX RESOURCE - - name: Merge VOICEVOX RESOURCE + - name: Merge VOICEVOX RESOURCE env: DOWNLOAD_RESOURCE_PATH: download/resource run: bash build_util/process_voicevox_resource.bash - - name: Build and Deploy Docker image + - name: Build and Deploy Docker image uses: docker/build-push-action@v3 env: IMAGE_TAG: diff --git a/.github/workflows/build-engine-package.yml b/.github/workflows/build-engine-package.yml index d9246db46..e8f97c73b 100644 --- a/.github/workflows/build-engine-package.yml +++ b/.github/workflows/build-engine-package.yml @@ -40,7 +40,7 @@ jobs: version: ${{ steps.vars.outputs.version }} version_or_latest: ${{ steps.vars.outputs.version_or_latest }} steps: - - name: declare variables + - name: Declare variables id: vars run: | : # release タグ名, または workflow_dispatch でのバージョン名. リリースでない (push event) 場合は空文字列 @@ -105,30 +105,31 @@ jobs: split: ${{ startsWith(matrix.os, 'macos-') && 'gsplit' || 'split' }} steps: - - name: declare variables + - name: Declare variables id: vars run: | echo "package_name=voicevox_engine-${{ matrix.target }}-${{ needs.config.outputs.version }}" >> "$GITHUB_OUTPUT" - - uses: actions/checkout@v4 + - name: Check out the repository + uses: actions/checkout@v4 # NOTE: The default 'sed' and 'split' of macOS is BSD 'sed' and 'split'. # There is a difference in specification between BSD 'sed' and 'split' and GNU 'sed' and 'split', # so you need to install GNU 'sed' and 'split'. - - name: Install GNU sed on macOS + - name: Install dependencies (macOS) if: startsWith(matrix.os, 'macos-') run: | brew install gnu-sed coreutils # ONNX Runtime providersとCUDA周りをリンクするために使う - - name: Install patchelf + - name: Install ONNX Runtime dependencies (Linux) if: startsWith(matrix.os, 'ubuntu-') && endsWith(matrix.target, 'nvidia') run: | sudo apt-get update sudo apt-get install -y patchelf - # Download CUDA - - name: Restore cached CUDA + # CUDA + - name: Restore cached CUDA if: matrix.cuda_version != '' uses: actions/cache/restore@v3 id: cuda-dll-cache-restore @@ -137,7 +138,7 @@ jobs: key: ${{ matrix.os }}-cuda-dll-${{ matrix.cuda_version }}-v1 path: download/cuda - - name: Setup CUDA + - name: Set up CUDA toolkit if: matrix.cuda_version != '' && steps.cuda-dll-cache-restore.outputs.cache-hit != 'true' uses: Jimver/cuda-toolkit@v0.2.10 id: cuda-toolkit @@ -145,7 +146,7 @@ jobs: method: network cuda: ${{ matrix.cuda_version }} - - name: Extract CUDA Dynamic Libraries + - name: Extract CUDA Dynamic Libraries if: matrix.cuda_version != '' && steps.cuda-dll-cache-restore.outputs.cache-hit != 'true' run: | set -eux @@ -179,19 +180,19 @@ jobs: sudo rm -rf "${CUDA_ROOT}" fi - - name: Save CUDA cache + - name: Save CUDA cache if: matrix.cuda_version != '' uses: actions/cache/save@v3 with: key: ${{ steps.cuda-dll-cache-restore.outputs.cache-primary-key }} path: download/cuda - # Download cuDNN - - name: Export cuDNN url to calc hash + # cuDNN + - name: Export cuDNN url to calc hash if: matrix.cudnn_url != '' run: echo "${{ matrix.cudnn_url }}" > download/cudnn_url.txt - - name: Restore cached cuDNN + - name: Restore cached cuDNN if: matrix.cudnn_url != '' uses: actions/cache/restore@v3 id: cudnn-dll-cache-restore @@ -200,7 +201,7 @@ jobs: key: ${{ matrix.os }}-cudnn-dll-${{ hashFiles('download/cudnn_url.txt') }}-v1 path: download/cudnn - - name: Download and extract cuDNN Dynamic Libraries + - name: Download and extract cuDNN Dynamic Libraries if: matrix.cudnn_url != '' && steps.cudnn-dll-cache-restore.outputs.cache-hit != 'true' run: | set -eux @@ -233,19 +234,19 @@ jobs: rm download/cudnn.tar.xz fi - - name: Save cuDNN cache + - name: Save cuDNN cache if: matrix.cudnn_url != '' uses: actions/cache/save@v3 with: key: ${{ steps.cudnn-dll-cache-restore.outputs.cache-primary-key }} path: download/cudnn - # Donwload zlib - - name: Export zlib url to calc hash + # zlib + - name: Export zlib url to calc hash if: matrix.zlib_url != '' run: echo "${{ matrix.zlib_url }}" >> download/zlib_url.txt - - name: Restore cached zlib + - name: Restore cached zlib if: matrix.zlib_url != '' uses: actions/cache/restore@v3 id: zlib-cache-restore @@ -253,7 +254,7 @@ jobs: key: zlib-cache-v1-${{ hashFiles('download/zlib_url.txt') }} path: download/zlib - - name: Download zlib + - name: Download zlib dynamic Library if: steps.zlib-cache-restore.outputs.cache-hit != 'true' && matrix.zlib_url != '' run: | curl -L "${{ matrix.zlib_url }}" -o download/zlib.zip @@ -265,19 +266,19 @@ jobs: mv download/zlib/dll_${{ matrix.architecture }}/zlibwapi.dll download/zlib/zlibwapi.dll rm -r download/zlib/dll_${{ matrix.architecture }} - - name: Save zlib cache + - name: Save zlib cache if: matrix.zlib_url != '' uses: actions/cache/save@v3 with: key: ${{ steps.zlib-cache-restore.outputs.cache-primary-key }} path: download/zlib - - name: Setup MSVC + - name: Set up MSVC if: startsWith(matrix.os, 'windows-') uses: ilammy/msvc-dev-cmd@v1 # Python install path of windows: C:/hostedtoolcache/windows/Python - - name: Setup Python + - name: Set up Python id: setup-python uses: actions/setup-python@v5 with: @@ -285,7 +286,7 @@ jobs: architecture: ${{ matrix.architecture }} cache: pip - - name: Install Python dependencies + - name: Install Python dependencies run: | python -m pip install -r requirements-dev.txt @@ -311,15 +312,15 @@ jobs: exit "$EXIT_CODE" fi - - name: Create download directory + - name: Create download directory run: mkdir -p download/ - # Donwload DirectML - - name: Export DirectML url to calc hash + # DirectML + - name: Export DirectML url to calc hash if: endswith(matrix.target, '-directml') run: echo "${{ matrix.directml_url }}" >> download/directml_url.txt - - name: Restore cached DirectML + - name: Restore cached DirectML if: endswith(matrix.target, '-directml') uses: actions/cache/restore@v3 id: directml-cache-restore @@ -327,7 +328,7 @@ jobs: key: directml-cache-v1-${{ hashFiles('download/directml_url.txt') }} path: download/directml - - name: Download DirectML + - name: Set up DirectML dynamic Library if: steps.directml-cache-restore.outputs.cache-hit != 'true' && endswith(matrix.target, '-directml') run: | curl -L "${{ matrix.directml_url }}" -o download/directml.zip @@ -339,25 +340,25 @@ jobs: mv download/directml/bin/${{ matrix.architecture }}-win/DirectML.dll download/directml/DirectML.dll rm -r download/directml/bin - - name: Save DirectML cache + - name: Save DirectML cache if: endswith(matrix.target, '-directml') uses: actions/cache/save@v3 with: key: ${{ steps.directml-cache-restore.outputs.cache-primary-key }} path: download/directml - # Download ONNX Runtime - - name: Export ONNX Runtime url to calc hash + # ONNX Runtime + - name: Export ONNX Runtime url to calc hash run: echo "${{ matrix.onnxruntime_url }}" > download/onnxruntime_url.txt - - name: Restore cached ONNX Runtime + - name: Restore cached ONNX Runtime uses: actions/cache/restore@v3 id: onnxruntime-cache-restore with: key: ${{ matrix.os }}-onnxruntime-${{ hashFiles('download/onnxruntime_url.txt') }}-v1 path: download/onnxruntime - - name: Download ONNX Runtime (Windows) + - name: Download ONNX Runtime (Windows) if: steps.onnxruntime-cache-restore.outputs.cache-hit != 'true' && startsWith(matrix.os, 'windows-') run: | curl -L "${{ matrix.onnxruntime_url }}" > download/onnxruntime.zip @@ -377,7 +378,7 @@ jobs: rm download/onnxruntime.zip - - name: Download ONNX Runtime (Mac/Linux) + - name: Download ONNX Runtime (Mac/Linux) if: steps.onnxruntime-cache-restore.outputs.cache-hit != 'true' && startsWith(matrix.os, 'windows-') != true run: | curl -L "${{ matrix.onnxruntime_url }}" > download/onnxruntime.tgz @@ -385,21 +386,21 @@ jobs: tar xf "download/onnxruntime.tgz" -C "download/onnxruntime" --strip-components 1 rm download/onnxruntime.tgz - - name: Save ONNX Runtime cache + - name: Save ONNX Runtime cache uses: actions/cache/save@v3 with: key: ${{ steps.onnxruntime-cache-restore.outputs.cache-primary-key }} path: download/onnxruntime - # Download VOICEVOX RESOURCE - - name: Prepare VOICEVOX RESOURCE cache + # VOICEVOX RESOURCE + - name: Prepare RESOURCE cache uses: actions/cache@v3 id: voicevox-resource-cache with: key: voicevox-resource-${{ env.VOICEVOX_RESOURCE_VERSION }} path: download/resource - - name: Checkout VOICEVOX RESOURCE + - name: Check out RESOURCE repository if: steps.voicevox-resource-cache.outputs.cache-hit != 'true' uses: actions/checkout@v4 with: @@ -407,21 +408,20 @@ jobs: ref: ${{ env.VOICEVOX_RESOURCE_VERSION }} path: download/resource - # Merge VOICEVOX RESOURCE - - name: Merge VOICEVOX RESOURCE + - name: Merge RESOURCE env: DOWNLOAD_RESOURCE_PATH: download/resource run: bash build_util/process_voicevox_resource.bash - # Download VOICEVOX Core - - name: Prepare VOICEVOX Core cache + # VOICEVOX CORE + - name: Prepare CORE cache uses: actions/cache@v3 id: voicevox-core-cache with: key: ${{ matrix.os }}-voicevox-core-${{ matrix.voicevox_core_asset_prefix }}-${{ env.VOICEVOX_CORE_VERSION }} path: download/core - - name: Download VOICEVOX Core + - name: Download CORE if: steps.voicevox-core-cache.outputs.cache-hit != 'true' env: VOICEVOX_CORE_ASSET_NAME: ${{ matrix.voicevox_core_asset_prefix }}-${{ env.VOICEVOX_CORE_VERSION }} @@ -439,14 +439,15 @@ jobs: rm -rf download/${{ env.VOICEVOX_CORE_ASSET_NAME }} rm download/${{ env.VOICEVOX_CORE_ASSET_NAME }}.zip - - name: Generate licenses.json + # Build + - name: Generate licenses.json run: | OUTPUT_LICENSE_JSON_PATH=engine_manifest_assets/dependency_licenses.json \ bash build_util/create_venv_and_generate_licenses.bash # FIXME: VOICEVOX (editor) cannot build without licenses.json cp engine_manifest_assets/dependency_licenses.json licenses.json - - name: Build run.py with PyInstaller + - name: Build VOICEVOX ENGINE run.py run: | set -eux @@ -473,15 +474,18 @@ jobs: LIBONNXRUNTIME_PATH="$LIBONNXRUNTIME_PATH" \ pyinstaller --noconfirm run.spec - - name: Gather DLL dependencies to dist/run/ (Windows) + # Because PyInstaller does not copy dynamic loaded libraries, + # manually move DLL dependencies into `dist/run/` (cache already saved) + + - name: Gather DLL dependencies (Windows) if: startsWith(matrix.os, 'windows-') run: | set -eux - # Move DLL dependencies (cache already saved) - + # Windows CUDA if [ -f "download/onnxruntime/lib/onnxruntime_providers_cuda.dll" ]; then - # ONNX Runtime providers (PyInstaller does not copy dynamic loaded libraries) + + # ONNX Runtime providers mv download/onnxruntime/lib/onnxruntime_*.dll dist/run/ # CUDA @@ -498,31 +502,30 @@ jobs: # zlib mv download/zlib/zlibwapi.dll dist/run/ - # Remove source directories to reduce disk usage (already cached) + # Clean source directories to reduce disk usage (already cached) rm -rf download/onnxruntime rm -rf download/cuda rm -rf download/cudnn rm -rf download/zlib fi + # Windows DirectML # 一度代入して actionlint のエラー回避 (詳細: NOTE 1) TARGET=${{ matrix.target }} if [[ $TARGET == *-directml ]]; then # DirectML mv download/directml/DirectML.dll dist/run/ - # Remove source directory (already cached) + # Clean source directories (already cached) rm -rf download/directml fi - - name: Gather DLL dependencies to dist/run/ (Linux CUDA) + - name: Gather DLL dependencies (Linux CUDA) if: startsWith(matrix.os, 'ubuntu-') && endsWith(matrix.target, 'nvidia') run: | set -eux - # Move DLL dependencies (cache already saved) - - # ONNX Runtime providers (PyInstaller does not copy dynamic loaded libraries) + # ONNX Runtime providers patchelf --set-rpath '$ORIGIN' "$(pwd)/download/onnxruntime/lib"/libonnxruntime_providers_*.so mv download/onnxruntime/lib/libonnxruntime_*.so dist/run/ @@ -537,17 +540,17 @@ jobs: mv download/cudnn/bin/libcudnn.so.* dist/run/ mv download/cudnn/bin/libcudnn_*_infer.so.* dist/run/ - # Remove source directories to reduce disk usage (already cached) + # Clean source directories to reduce disk usage (already cached) rm -rf download/onnxruntime rm -rf download/cuda rm -rf download/cudnn - - name: Set @rpath to @executable_path + - name: Set @rpath to @executable_path if: startsWith(matrix.os, 'macos-') run: | install_name_tool -add_rpath @executable_path/. dist/run/run - - name: Code signing + - name: Code signing if: github.event.inputs.code_signing == 'true' && startsWith(matrix.os, 'windows-') run: | bash build_util/codesign.bash "dist/run/run.exe" @@ -556,12 +559,12 @@ jobs: ESIGNERCKA_PASSWORD: ${{ secrets.ESIGNERCKA_PASSWORD }} ESIGNERCKA_TOTP_SECRET: ${{ secrets.ESIGNERCKA_TOTP_SECRET }} - - name: Rename artifact directory to archive + - name: Rename artifact directory to archive run: | mv dist/run/ "${{ matrix.target }}/" # 7z archives - - name: Create 7z archives + - name: Create 7z archives run: | # Compress to artifact.7z.001, artifact.7z.002, ... 7z -r -v1900m a "${{ steps.vars.outputs.package_name }}.7z" "${{ matrix.target }}/" @@ -570,7 +573,7 @@ jobs: ls ${{ steps.vars.outputs.package_name }}.7z.* > archives_7z.txt mv archives_7z.txt "${{ steps.vars.outputs.package_name }}.7z.txt" - - name: Upload 7z archives to artifact + - name: Upload 7z archives to artifact if: github.event.inputs.upload_artifact == 'true' uses: actions/upload-artifact@v3 with: @@ -578,7 +581,7 @@ jobs: path: | ${{ steps.vars.outputs.package_name }}.7z.* - - name: Upload 7z archives to Release assets + - name: Upload 7z archives to Release assets if: needs.config.outputs.version != '' uses: ncipollo/release-action@v1 with: @@ -590,12 +593,12 @@ jobs: ${{ steps.vars.outputs.package_name }}.7z.* commit: ${{ github.sha }} - - name: Clean 7z archives to reduce disk usage + - name: Clean 7z archives to reduce disk usage run: | rm -f ${{ steps.vars.outputs.package_name }}.7z.* # VVPP archives - - name: Create VVPP archives + - name: Create VVPP archives run: | # Compress to compressed.zip.001, compressed.zip.002, ... # NOTE: 1000th archive will be "compressed.zip.1000" after "compressed.zip.999". This is unconsidered as an extreme case. @@ -616,7 +619,7 @@ jobs: ls ${{ steps.vars.outputs.package_name }}*.vvppp ${{ steps.vars.outputs.package_name }}.vvpp > archives_vvpp.txt || true mv archives_vvpp.txt "${{ steps.vars.outputs.package_name }}.vvpp.txt" - - name: Upload VVPP archives to artifact + - name: Upload VVPP archives to artifact if: github.event.inputs.upload_artifact == 'true' uses: actions/upload-artifact@v3 with: @@ -626,7 +629,7 @@ jobs: ${{ steps.vars.outputs.package_name }}*.vvppp ${{ steps.vars.outputs.package_name }}.vvpp.txt - - name: Upload VVPP archives to Release assets + - name: Upload VVPP archives to Release assets if: needs.config.outputs.version != '' uses: ncipollo/release-action@v1 with: diff --git a/.github/workflows/test-engine-container.yml b/.github/workflows/test-engine-container.yml index d30337798..29f86e31d 100644 --- a/.github/workflows/test-engine-container.yml +++ b/.github/workflows/test-engine-container.yml @@ -37,24 +37,20 @@ jobs: - cpu-ubuntu20.04 steps: - - uses: actions/checkout@v4 + - name: Check out the repository + uses: actions/checkout@v4 - # - # Setup Python Environment - # - - uses: actions/setup-python@v5 + - name: Set up Python + uses: actions/setup-python@v5 with: python-version: "3.11.3" cache: pip - - name: Install requirements + - name: Install Python dependencies run: | pip install -r requirements-test.txt - # - # Setup Docker Environment - # - - name: Declare variables + - name: Declare variables id: docker_vars run: | if [ "${{ matrix.tag }}" != "" ]; then @@ -63,17 +59,17 @@ jobs: echo "image_tag=${{ env.IMAGE_NAME }}:${{ env.VERSION }}" >> "$GITHUB_OUTPUT" fi - - name: Docker pull + - name: Pull ENGINE application docker image run: docker pull "${{ steps.docker_vars.outputs.image_tag }}" - - name: Docker run + - name: Run ENGINE application docker container run: docker run -d -p 50021:50021 "${{ steps.docker_vars.outputs.image_tag }}" # Docker コンテナが起動してから、レスポンスが返ってくるまで待機する # リトライは10回まで `/version` にアクセスしてレスポンスのステータスコードをチェック # - ステータスコードが `200` の場合は正常終了します # - ステータスコードが `200` 以外の場合は、5秒間スリープしてリトライします - - name: Wait for container to start + - name: Warm up ENGINE server by waiting run: | set +e # curlのエラーを無視する @@ -93,5 +89,5 @@ jobs: done exit 1 - - name: Test + - name: Test ENGINE application docker container run: python build_util/check_release_build.py --skip_run_process --dist_dir dist/ diff --git a/.github/workflows/test-engine-package.yml b/.github/workflows/test-engine-package.yml index ec680ab10..6e052f25c 100644 --- a/.github/workflows/test-engine-package.yml +++ b/.github/workflows/test-engine-package.yml @@ -53,20 +53,22 @@ jobs: runs-on: ${{ matrix.os }} steps: - - name: declare variables + - name: Declare variables id: vars run: | echo "release_url=${{ env.REPO_URL }}/releases/download/${{ env.VERSION }}" >> "$GITHUB_OUTPUT" echo "package_name=voicevox_engine-${{ matrix.target }}-${{ env.VERSION }}" >> "$GITHUB_OUTPUT" - - uses: actions/checkout@v4 + - name: Check out the repository + uses: actions/checkout@v4 - - uses: actions/setup-python@v5 + - name: Set up Python + uses: actions/setup-python@v5 with: python-version: "3.11.3" cache: pip - - name: Download + - name: Download ENGINE package run: | mkdir -p download curl -L -o "download/list.txt" "${{ steps.vars.outputs.release_url }}/${{ steps.vars.outputs.package_name }}.7z.txt" @@ -74,13 +76,13 @@ jobs: 7z x "download/$(head -n1 download/list.txt)" mv "${{ matrix.target }}" dist/ - - name: chmod +x + - name: Set up permission if: startsWith(matrix.target, 'linux') || startsWith(matrix.target, 'macos') run: chmod +x dist/run - - name: Install requirements + - name: Install Python test dependencies run: | pip install -r requirements-test.txt - - name: Test + - name: Test ENGINE package run: python build_util/check_release_build.py --dist_dir dist/ diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 8c374b53b..2c2eb387b 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -20,31 +20,32 @@ jobs: python: ["3.11.3"] steps: - - uses: actions/checkout@v4 + - name: Check out the repository + uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python }} + - name: Set up Python uses: actions/setup-python@v5 with: python-version: ${{ matrix.python }} cache: pip - - name: Install dependencies + - name: Install Python dependencies run: | python -m pip install --upgrade pip setuptools wheel python -m pip install -r requirements-test.txt - - name: Validate poetry.lock + - name: Validate poetry.lock run: | poetry lock --no-update git diff --exit-code - - name: Run poetry and check + - name: Check dependency lists run: | poetry export --without-hashes -o requirements.txt.check poetry export --without-hashes --with dev -o requirements-dev.txt.check poetry export --without-hashes --with test -o requirements-test.txt.check poetry export --without-hashes --with license -o requirements-license.txt.check - + diff -q requirements.txt requirements.txt.check || \ diff -q requirements-dev.txt requirements-dev.txt.check || \ diff -q requirements-test.txt requirements-test.txt.check || \ @@ -54,11 +55,12 @@ jobs: exit 1 fi - - run: pysen run lint + - name: Check format + run: pysen run lint - - name: Run pytest and get coverage + - name: Test codes and coverage run: coverage run --omit=test/* -m pytest - - name: Submit coverage to Coveralls + - name: Submit coverage results to Coveralls if: matrix.os == 'ubuntu-20.04' run: coveralls --service=github env: