From 790499d9e556c7879deefc8670973f394eda2237 Mon Sep 17 00:00:00 2001 From: Ryo Yamashita Date: Thu, 25 Jul 2024 08:05:44 +0900 Subject: [PATCH 1/9] change: rework GPU features --- .github/workflows/build_and_deploy.yml | 104 +-- .github/workflows/download_test.yml | 8 +- .github/workflows/test.yml | 28 +- Cargo.lock | 688 +++++++++++++++++- Cargo.toml | 3 + crates/downloader/Cargo.toml | 4 + crates/downloader/src/main.rs | 209 ++++-- crates/voicevox_core/Cargo.toml | 7 +- crates/voicevox_core/src/devices.rs | 192 ++++- crates/voicevox_core/src/error.rs | 7 +- crates/voicevox_core/src/infer.rs | 15 +- .../src/infer/runtimes/onnxruntime.rs | 40 +- crates/voicevox_core/src/lib.rs | 23 +- crates/voicevox_core/src/status.rs | 25 +- crates/voicevox_core/src/synthesizer.rs | 69 +- crates/voicevox_core_c_api/Cargo.toml | 2 - .../include/voicevox_core.h | 4 +- crates/voicevox_core_c_api/src/lib.rs | 4 +- .../voicevox_core_c_api/tests/e2e/log_mask.rs | 2 +- .../tests/e2e/snapshots.toml | 10 + crates/voicevox_core_java_api/Cargo.toml | 4 - .../jp/hiroshiba/voicevoxcore/GlobalInfo.java | 6 +- crates/voicevox_core_python_api/Cargo.toml | 4 - .../python/voicevox_core/_models.py | 4 +- docs/downloader.md | 6 +- docs/usage.md | 4 +- example/cpp/windows/README.md | 2 +- example/python/README.md | 2 + 28 files changed, 1201 insertions(+), 275 deletions(-) diff --git a/.github/workflows/build_and_deploy.yml b/.github/workflows/build_and_deploy.yml index e9b5e9eb5..9283b273d 100644 --- a/.github/workflows/build_and_deploy.yml +++ b/.github/workflows/build_and_deploy.yml @@ -61,123 +61,90 @@ jobs: includes='[ { "os": "windows-2019", - "features": "", "target": "x86_64-pc-windows-msvc", - "artifact_name": "windows-x64-cpu", + "artifact_name": "windows-x64", "c_release_format": "plain-cdylib", - "whl_local_version": "cpu", - "can_skip_in_simple_test": true - }, - { - "os": "windows-2019", - "features": "directml", - "target": "x86_64-pc-windows-msvc", - "artifact_name": "windows-x64-directml", - "c_release_format": "plain-cdylib", - "whl_local_version": "directml", + "python_whl": true, "can_skip_in_simple_test": false }, { "os": "windows-2019", - "features": "cuda", - "target": "x86_64-pc-windows-msvc", - "artifact_name": "windows-x64-cuda", - "c_release_format": "plain-cdylib", - "whl_local_version": "cuda", - "can_skip_in_simple_test": true - }, - { - "os": "windows-2019", - "features": "", "target": "i686-pc-windows-msvc", - "artifact_name": "windows-x86-cpu", - "c_release_format": "plain-cdylib", - "whl_local_version": "cpu", - "can_skip_in_simple_test": true - }, - { - "os": "ubuntu-20.04", - "features": "", - "target": "x86_64-unknown-linux-gnu", - "artifact_name": "linux-x64-cpu", + "artifact_name": "windows-x86", "c_release_format": "plain-cdylib", - "whl_local_version": "cpu", + "python_whl": true, "can_skip_in_simple_test": true }, { "os": "ubuntu-20.04", - "features": "cuda", "target": "x86_64-unknown-linux-gnu", - "artifact_name": "linux-x64-gpu", + "artifact_name": "linux-x64", "c_release_format": "plain-cdylib", - "whl_local_version": "cuda", + "python_whl": true, "can_skip_in_simple_test": false }, { "os": "ubuntu-20.04", - "features": "", "target": "aarch64-unknown-linux-gnu", - "artifact_name": "linux-arm64-cpu", + "artifact_name": "linux-arm64", "c_release_format": "plain-cdylib", - "whl_local_version": "cpu", + "python_whl": true, "can_skip_in_simple_test": true }, { "os": "ubuntu-20.04", - "features": "", "target": "aarch64-linux-android", - "artifact_name": "android-arm64-cpu", + "artifact_name": "android-arm64", "c_release_format": "plain-cdylib", + "python_whl": false, "can_skip_in_simple_test": true }, { "os": "ubuntu-20.04", - "features": "", "target": "x86_64-linux-android", - "artifact_name": "android-x86_64-cpu", + "artifact_name": "android-x86_64", "c_release_format": "plain-cdylib", + "python_whl": false, "can_skip_in_simple_test": true }, { "os": "macos-12", - "features": "", "target": "aarch64-apple-darwin", - "artifact_name": "osx-arm64-cpu", + "artifact_name": "osx-arm64", "c_release_format": "plain-cdylib", - "whl_local_version": "cpu", + "python_whl": true, "can_skip_in_simple_test": false }, { "os": "macos-12", - "features": "", "target": "x86_64-apple-darwin", - "artifact_name": "osx-x64-cpu", + "artifact_name": "osx-x64", "c_release_format": "plain-cdylib", - "whl_local_version": "cpu", + "python_whl": true, "can_skip_in_simple_test": true }, { "os": "macos-12", - "features": "", "target": "aarch64-apple-ios", "artifact_name": "ios-arm64-cpu", "c_release_format": "ios-xcframework", + "python_whl": false, "can_skip_in_simple_test": true }, { "os": "macos-12", - "features": "", "target": "aarch64-apple-ios-sim", "artifact_name": "ios-arm64-cpu-sim", "c_release_format": "ios-xcframework", + "python_whl": false, "can_skip_in_simple_test": true }, { "os": "macos-12", - "features": "", "target": "x86_64-apple-ios", "artifact_name": "ios-x64-cpu", "c_release_format": "ios-xcframework", + "python_whl": false, "can_skip_in_simple_test": true } ]' @@ -216,7 +183,7 @@ jobs: git -c user.name=dummy -c user.email=dummy@dummy.dummy merge FETCH_HEAD ) > /dev/null 2>&1 - name: Set up Python 3.8 - if: matrix.whl_local_version + if: matrix.python_whl uses: actions/setup-python@v5 with: python-version: "3.8" @@ -258,7 +225,7 @@ jobs: - name: set cargo version run: | cargo set-version "$VERSION" --exclude voicevox_core_python_api --exclude downloader --exclude xtask - if ${{ !!matrix.whl_local_version }}; then cargo set-version "$VERSION+"${{ matrix.whl_local_version }} -p voicevox_core_python_api; fi + if ${{ matrix.python_whl }}; then cargo set-version "$VERSION" -p voicevox_core_python_api; fi - name: cache target uses: Swatinem/rust-cache@v2 if: ${{ !inputs.is_production }} @@ -270,7 +237,7 @@ jobs: ios-xcframework) linking=link-onnxruntime ;; esac function build() { - cargo build -p voicevox_core_c_api -vv --features "$linking",${{ matrix.features }} --target ${{ matrix.target }} --release + cargo build -p voicevox_core_c_api -vv --features "$linking" --target ${{ matrix.target }} --release } if ${{ !inputs.is_production }}; then build @@ -280,7 +247,7 @@ jobs: env: RUSTFLAGS: -C panic=abort - name: build voicevox_core_python_api - if: matrix.whl_local_version + if: matrix.python_whl id: build-voicevox-core-python-api run: | rm -rf ./target/wheels @@ -288,7 +255,7 @@ jobs: poetry config virtualenvs.create false (cd crates/voicevox_core_python_api && poetry install --with dev) function build() { - maturin build --manifest-path ./crates/voicevox_core_python_api/Cargo.toml --features ${{ matrix.features }}, --target ${{ matrix.target }} --release + maturin build --manifest-path ./crates/voicevox_core_python_api/Cargo.toml --target ${{ matrix.target }} --release } if ${{ !inputs.is_production }}; then build @@ -300,7 +267,7 @@ jobs: if: contains(matrix.target, 'android') run: | function build() { - cargo build -p voicevox_core_java_api -vv --features ${{ matrix.features }}, --target ${{ matrix.target }} --release + cargo build -p voicevox_core_java_api -vv --target ${{ matrix.target }} --release } if ${{ !inputs.is_production }}; then build @@ -318,9 +285,6 @@ jobs: > "artifact/${{ env.ASSET_NAME }}/voicevox_core.h" cp -v target/${{ matrix.target }}/release/*voicevox_core.{dll,so,dylib} "artifact/${{ env.ASSET_NAME }}" || true cp -v target/${{ matrix.target }}/release/voicevox_core.dll.lib "artifact/${{ env.ASSET_NAME }}/voicevox_core.lib" || true - cp -v -n target/${{ matrix.target }}/release/{,lib}onnxruntime*.{dll,so.*,so,dylib} "artifact/${{ env.ASSET_NAME }}" || true - # libonnxruntimeについてはバージョン付のshared libraryを使用するためバージョンがついてないものを削除する - rm -f artifact/${{ env.ASSET_NAME }}/libonnxruntime.{so,dylib} cp -v README.md "artifact/${{ env.ASSET_NAME }}/README.txt" echo "${{ env.VERSION }}" > "artifact/${{ env.ASSET_NAME }}/VERSION" @@ -354,7 +318,7 @@ jobs: ${{ env.ASSET_NAME }}.zip target_commitish: ${{ github.sha }} - name: Upload Python whl to Release - if: fromJson(needs.config.outputs.deploy) && matrix.whl_local_version + if: fromJson(needs.config.outputs.deploy) && matrix.python_whl uses: softprops/action-gh-release@v2 with: prerelease: true @@ -475,17 +439,17 @@ jobs: run: cargo set-version "$VERSION" -p voicevox_core_java_api - - name: "Download artifact (android-arm64-cpu)" + - name: "Download artifact (android-arm64)" uses: actions/download-artifact@v4 with: - name: voicevox_core_java_api-android-arm64-cpu - path: artifact/android-arm64-cpu + name: voicevox_core_java_api-android-arm64 + path: artifact/android-arm64 - - name: "Download artifact (android-x86_64-cpu)" + - name: "Download artifact (android-x86_64)" uses: actions/download-artifact@v4 with: - name: voicevox_core_java_api-android-x86_64-cpu - path: artifact/android-x86_64-cpu + name: voicevox_core_java_api-android-x86_64 + path: artifact/android-x86_64 - name: Print tree run: tree artifact @@ -494,8 +458,8 @@ jobs: run: | rm -rf crates/voicevox_core_java_api/lib/src/main/resources/dll cat < = Lazy::new(|| { @@ -74,13 +76,17 @@ struct Args { #[arg(short, long, value_name("GIT_TAG_OR_LATEST"), default_value("latest"))] version: String, + /// ダウンロードするONNX Runtimeのバージョンの指定 + #[arg(long, value_name("GIT_TAG_OR_LATEST"), default_value("latest"))] + onnxruntime_version: String, + /// 追加でダウンロードするライブラリのバージョン #[arg(long, value_name("GIT_TAG_OR_LATEST"), default_value("latest"))] additional_libraries_version: String, /// ダウンロードするデバイスを指定する(cudaはlinuxのみ) - #[arg(value_enum, long, default_value(<&str>::from(Device::default())))] - device: Device, + #[arg(value_enum, long, num_args(1..), default_value(<&str>::from(Device::default())))] + devices: Vec, /// ダウンロードするcpuのアーキテクチャを指定する #[arg(value_enum, long, default_value(CpuArch::default_opt().map(<&str>::from)))] @@ -93,6 +99,13 @@ struct Args { #[arg(long, value_name("REPOSITORY"), default_value(DEFAULT_CORE_REPO))] core_repo: RepoName, + #[arg( + long, + value_name("REPOSITORY"), + default_value(DEFAULT_ONNXRUNTIME_BUILDER_REPO) + )] + onnxruntime_builder_repo: RepoName, + #[arg( long, value_name("REPOSITORY"), @@ -105,11 +118,14 @@ struct Args { enum DownloadTarget { Core, Models, + Onnxruntime, AdditionalLibraries, Dict, } -#[derive(Default, ValueEnum, Display, IntoStaticStr, Clone, Copy, PartialEq)] +#[derive( + Default, ValueEnum, Display, IntoStaticStr, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, +)] #[strum(serialize_all = "kebab-case")] enum Device { #[default] @@ -156,7 +172,7 @@ impl Os { } #[derive(parse_display::FromStr, parse_display::Display, Clone)] -#[from_str(regex = "(?[a-zA-Z0-9_]+)/(?[a-zA-Z0-9_]+)")] +#[from_str(regex = "(?[a-zA-Z0-9_-]+)/(?[a-zA-Z0-9_-]+)")] #[display("{owner}/{repo}")] struct RepoName { owner: String, @@ -173,13 +189,16 @@ async fn main() -> anyhow::Result<()> { min, output, version, + onnxruntime_version, additional_libraries_version, - device, + devices, cpu_arch, os, core_repo, + onnxruntime_builder_repo, additional_libraries_repo, } = Args::parse(); + let devices = devices.into_iter().collect::>(); let targets: HashSet<_> = if !only.is_empty() { assert!(exclude.is_empty() && !min); @@ -224,9 +243,9 @@ async fn main() -> anyhow::Result<()> { `additional-libraries-version`はダウンロード対象から除外されています", ); } - if device == Device::Cpu { + if devices == [Device::Cpu].into() { warn!( - "`--device`が指定されていない、もしくは`--device=cpu`が指定されていますが、\ + "`--devices`が指定されていない、もしくは`--devices=cpu`が指定されていますが、\ `additional-libraries-version`はダウンロード対象から除外されています", ); } @@ -234,44 +253,67 @@ async fn main() -> anyhow::Result<()> { let octocrab = &octocrab()?; - let core = find_gh_asset(octocrab, &core_repo, &version, |tag| { - let device = match (os, device) { - (Os::Linux, Device::Cuda) => "gpu", - (_, device) => device.into(), - }; - format!("{LIB_NAME}-{os}-{cpu_arch}-{device}-{tag}.zip") + let core = find_gh_asset(octocrab, &core_repo, &version, |tag, _| { + Ok(format!("{LIB_NAME}-{os}-{cpu_arch}-{tag}.zip")) }) .await?; - let model = find_gh_asset(octocrab, &core_repo, &version, |tag| { - format!("model-{tag}.zip") + let model = find_gh_asset(octocrab, &core_repo, &version, |tag, _| { + Ok(format!("model-{tag}.zip")) }) .await?; - let additional_libraries = OptionFuture::from((device != Device::Cpu).then(|| { - find_gh_asset( - octocrab, - &additional_libraries_repo, - &additional_libraries_version, - |_| { - let device = match device { - Device::Cpu => unreachable!(), - Device::Cuda => "CUDA", - Device::Directml => "DirectML", - }; - format!("{device}-{os}-{cpu_arch}.zip") - }, - ) - })) - .await - .transpose()?; + let onnxruntime = find_gh_asset( + octocrab, + &onnxruntime_builder_repo, + &onnxruntime_version, + |tag, body| { + let body = body.with_context(|| "リリースノートがありません")?; + find_onnxruntime(tag, body, os, cpu_arch, &devices) + }, + ) + .await?; + + let additional_libraries = devices + .iter() + .filter(|&&device| device != Device::Cpu) + .map(|&device| { + find_gh_asset( + octocrab, + &additional_libraries_repo, + &additional_libraries_version, + move |_, _| { + Ok({ + let device = match device { + Device::Cpu => unreachable!(), + Device::Cuda => "CUDA", + Device::Directml => "DirectML", + }; + format!("{device}-{os}-{cpu_arch}.zip") + }) + }, + ) + }) + .collect::>() + .try_collect::>() + .await?; info!("対象OS: {os}"); info!("対象CPUアーキテクチャ: {cpu_arch}"); - info!("ダウンロードデバイスタイプ: {device}"); + info!( + "ダウンロードデバイスタイプ: {}", + devices.iter().format(", "), + ); info!("ダウンロード{LIB_NAME}バージョン: {}", core.tag); - if let Some(GhAsset { tag, .. }) = &additional_libraries { - info!("ダウンロード追加ライブラリバージョン: {tag}"); + info!("ダウンロードONNX Runtimeバージョン: {}", onnxruntime.tag); + if !additional_libraries.is_empty() { + info!( + "ダウンロード追加ライブラリバージョン: {}", + additional_libraries + .iter() + .map(|GhAsset { tag, .. }| tag) + .format(", "), + ); } let progresses = MultiProgress::new(); @@ -294,8 +336,16 @@ async fn main() -> anyhow::Result<()> { &progresses, )?); } + if targets.contains(&DownloadTarget::Onnxruntime) { + tasks.spawn(download_and_extract_from_gh( + onnxruntime, + Stripping::FirstDir, + &output.join("onnxruntime"), + &progresses, + )?); + } if targets.contains(&DownloadTarget::AdditionalLibraries) { - if let Some(additional_libraries) = additional_libraries { + for additional_libraries in additional_libraries { tasks.spawn(download_and_extract_from_gh( additional_libraries, Stripping::FirstDir, @@ -348,11 +398,12 @@ async fn find_gh_asset( octocrab: &Arc, repo: &RepoName, git_tag_or_latest: &str, - asset_name: impl FnOnce(&str) -> String, + asset_name: impl FnOnce(&str, Option<&str>) -> anyhow::Result, ) -> anyhow::Result { let Release { html_url, tag_name, + body, assets, .. } = { @@ -364,7 +415,11 @@ async fn find_gh_asset( }? }; - let asset_name = asset_name(&tag_name); + let asset_name = asset_name(&tag_name, body.as_deref()).with_context(|| { + format!( + "`{repo}`の`{tag_name}`の中から条件に合致するビルドが見つけることができませんでした", + ) + })?; let Asset { id, name, size, .. } = assets .into_iter() .find(|Asset { name, .. }| *name == asset_name) @@ -380,6 +435,78 @@ async fn find_gh_asset( }) } +/// `find_gh_asset`に用いる。 +/// +/// 候補が複数あった場合、「デバイス」の数が最も小さいもののうち最初のものを選ぶ。 +fn find_onnxruntime( + tag: &str, + body: &str, + os: Os, + cpu_arch: CpuArch, + devices: &BTreeSet, +) -> anyhow::Result { + let id = &format!( + "voicevox-onnxruntime-specs-v1format-v{}-dylibs", + tag.replace('.', "-"), + ); + let id = html_escape::encode_double_quoted_attribute(id); + + comrak::parse_document(&Default::default(), body, &Default::default()) + .descendants() + .flat_map(|node| match &node.data.borrow().value { + comrak::nodes::NodeValue::HtmlBlock(comrak::nodes::NodeHtmlBlock { + literal, .. + }) => Some(scraper::Html::parse_fragment(literal)), + _ => None, + }) + .collect::>() + .iter() + .find_map(|html_block| { + html_block + .select( + &scraper::Selector::parse(&format!("[id=\"{id}\"]")).expect("should be valid"), + ) + .next() + }) + .with_context(|| format!("リリースノートの中に`#{id}`が見つかりませんでした"))? + .select(&scraper::Selector::parse("tbody > tr").expect("should be valid")) + .map(|tr| { + tr.text() + .collect::>() + .try_into() + .map_err(|_| anyhow!("リリースノート中の`#{id}`をパースできませんでした")) + }) + .collect::, _>>()? + .into_iter() + .filter(|&[spec_os, spec_cpu_arch, spec_devices, _]| { + spec_os + == match os { + Os::Windows => "Windows", + Os::Linux => "Linux", + Os::Osx => "macOS", + } + && spec_cpu_arch + == match cpu_arch { + CpuArch::X86 => "x86", + CpuArch::X64 => "x86_64", + CpuArch::Arm64 => "AArch64", + } + && devices.iter().all(|device| { + spec_devices.split('/').any(|spec_device| { + spec_device + == match device { + Device::Cpu => "CPU", + Device::Cuda => "CUDA", + Device::Directml => "DirectML", + } + }) + }) + }) + .min_by_key(|&[.., spec_devices, _]| spec_devices.split('/').count()) + .map(|[.., name]| name.to_owned()) + .with_context(|| "指定されたOS, アーキテクチャ, デバイスを含むものが見つかりませんでした") +} + fn download_and_extract_from_gh( GhAsset { octocrab, diff --git a/crates/voicevox_core/Cargo.toml b/crates/voicevox_core/Cargo.toml index 527fa7494..c9ed52725 100644 --- a/crates/voicevox_core/Cargo.toml +++ b/crates/voicevox_core/Cargo.toml @@ -8,17 +8,12 @@ publish.workspace = true features = ["load-onnxruntime", "link-onnxruntime"] rustdoc-args = ["--cfg", "docsrs"] +# rustdocを参照 [features] default = [] - -# ONNX Runtimeのリンク方法を決めるフィーチャ(rustdocを参照)。 load-onnxruntime = ["voicevox-ort/load-dynamic"] link-onnxruntime = [] -# GPUを利用可能にするフィーチャ(rustdocを参照)。 -cuda = ["voicevox-ort/cuda"] -directml = ["voicevox-ort/directml"] - [dependencies] anyhow.workspace = true async_zip = { workspace = true, features = ["deflate"] } diff --git a/crates/voicevox_core/src/devices.rs b/crates/voicevox_core/src/devices.rs index dfe8d7e64..6adc377ac 100644 --- a/crates/voicevox_core/src/devices.rs +++ b/crates/voicevox_core/src/devices.rs @@ -1,10 +1,76 @@ +use std::{ + collections::BTreeMap, + fmt::{self, Display}, + ops::Index, +}; + +use derive_more::BitAnd; use serde::{Deserialize, Serialize}; -/// このライブラリで利用可能なデバイスの情報。 +pub(crate) fn test_gpus( + gpus: impl IntoIterator, + inference_rt_name: &'static str, + devices_supported_by_inference_rt: SupportedDevices, + test: impl Fn(GpuSpec) -> anyhow::Result<()>, +) -> DeviceAvailabilities { + DeviceAvailabilities( + gpus.into_iter() + .map(|gpu| { + let availability = test_gpu( + gpu, + inference_rt_name, + devices_supported_by_inference_rt, + &test, + ); + (gpu, availability) + }) + .collect(), + ) +} + +fn test_gpu( + gpu: GpuSpec, + inference_rt_name: &'static str, + devices_supported_by_inference_rt: SupportedDevices, + test: impl Fn(GpuSpec) -> anyhow::Result<()>, +) -> DeviceAvailability { + if !SupportedDevices::THIS[gpu] { + DeviceAvailability::NotSupportedByThisLib + } else if !devices_supported_by_inference_rt[gpu] { + DeviceAvailability::NotSupportedByCurrentLoadedInferenceRuntime(inference_rt_name) + } else { + match test(gpu) { + Ok(()) => DeviceAvailability::Ok, + Err(err) => DeviceAvailability::Err(err), + } + } +} + +/// 利用可能なデバイスの情報。 /// -/// あくまで本ライブラリが対応しているデバイスの情報であることに注意。GPUが使える環境ではなかったと +/// あくまで本ライブラリもしくはONNX Runtimeが対応しているデバイスの情報であることに注意。GPUが使える環境ではなかったと /// しても`cuda`や`dml`は`true`を示しうる。 -#[derive(Debug, Serialize, Deserialize)] +/// +/// ``` +/// # #[tokio::main] +/// # async fn main() -> anyhow::Result<()> { +/// use voicevox_core::{tokio::Onnxruntime, SupportedDevices}; +/// +/// # voicevox_core::blocking::Onnxruntime::load_once() +/// # .filename(if cfg!(windows) { +/// # // Windows\System32\onnxruntime.dllを回避 +/// # test_util::ONNXRUNTIME_DYLIB_PATH +/// # } else { +/// # voicevox_core::blocking::Onnxruntime::LIB_VERSIONED_FILENAME +/// # }) +/// # .exec()?; +/// # +/// let onnxruntime = Onnxruntime::get().unwrap(); +/// dbg!(SupportedDevices::THIS & onnxruntime.supported_devices()?); +/// # Ok(()) +/// # } +/// ``` +#[derive(Clone, Copy, PartialEq, Eq, Debug, BitAnd, Serialize, Deserialize)] pub struct SupportedDevices { /// CPUが利用可能。 /// @@ -27,7 +93,125 @@ pub struct SupportedDevices { } impl SupportedDevices { - pub fn to_json(&self) -> serde_json::Value { + /// このライブラリで利用可能なデバイスの情報。 + /// + /// `load-onnxruntime`のフィーチャが有効化されているときはすべて`true`となる。 + /// + #[cfg_attr(feature = "load-onnxruntime", doc = "```")] + #[cfg_attr(not(feature = "load-onnxruntime"), doc = "```no_run")] + /// # use voicevox_core::SupportedDevices; + /// assert!(SupportedDevices::THIS.cuda); + /// assert!(SupportedDevices::THIS.dml); + /// ``` + /// + /// `link-onnxruntime`のフィーチャが有効化されているときは`cpu`を除き`false`となる。 + /// + #[cfg_attr(feature = "link-onnxruntime", doc = "```")] + #[cfg_attr(not(feature = "link-onnxruntime"), doc = "```no_run")] + /// # use voicevox_core::SupportedDevices; + /// assert!(!SupportedDevices::THIS.cuda); + /// assert!(!SupportedDevices::THIS.dml); + /// ``` + pub const THIS: Self = { + #[cfg(feature = "load-onnxruntime")] + { + Self { + cpu: true, + cuda: true, + dml: true, + } + } + + #[cfg(all(not(doc), feature = "link-onnxruntime"))] + Self { + cpu: true, + cuda: false, + dml: false, + } + }; + + pub fn to_json(self) -> serde_json::Value { serde_json::to_value(self).expect("should not fail") } } + +#[derive(Debug)] +pub(crate) struct DeviceAvailabilities(BTreeMap); + +impl DeviceAvailabilities { + pub(crate) fn oks(&self) -> Vec { + self.0 + .iter() + .filter(|(_, result)| matches!(result, DeviceAvailability::Ok)) + .map(|(&gpu, _)| gpu) + .collect() + } +} + +impl Display for DeviceAvailabilities { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + for (gpu, availability) in &self.0 { + match availability { + DeviceAvailability::Ok => writeln!(f, "* {gpu}: OK"), + DeviceAvailability::Err(err) => { + writeln!(f, "* {gpu}: {err}", err = err.to_string().trim_end()) + } + DeviceAvailability::NotSupportedByThisLib => { + writeln!( + f, + "* {gpu}: Not supported by this `{name}` build", + name = env!("CARGO_PKG_NAME"), + ) + } + DeviceAvailability::NotSupportedByCurrentLoadedInferenceRuntime(name) => { + writeln!(f, "* {gpu}: Not supported by the current loaded {name}") + } + }?; + } + Ok(()) + } +} + +#[derive(Debug)] +enum DeviceAvailability { + Ok, + Err(anyhow::Error), + NotSupportedByThisLib, + NotSupportedByCurrentLoadedInferenceRuntime(&'static str), +} + +#[derive(Clone, Copy, PartialEq, Debug, derive_more::Display)] +pub(crate) enum DeviceSpec { + #[display(fmt = "CPU")] + Cpu, + + #[display(fmt = "{_0}")] + Gpu(GpuSpec), +} + +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug, derive_more::Display)] +pub(crate) enum GpuSpec { + #[display(fmt = "CUDA (device_id=0)")] + Cuda, + + #[display(fmt = "DirectML (device_id=0)")] + Dml, +} + +impl GpuSpec { + pub(crate) fn defaults() -> Vec { + // TODO: 網羅性 + vec![Self::Cuda, Self::Dml] + } +} + +impl Index for SupportedDevices { + type Output = bool; + + fn index(&self, gpu: GpuSpec) -> &Self::Output { + match gpu { + GpuSpec::Cuda => &self.cuda, + GpuSpec::Dml => &self.dml, + } + } +} diff --git a/crates/voicevox_core/src/error.rs b/crates/voicevox_core/src/error.rs index d0e7fced0..0125b1cc6 100644 --- a/crates/voicevox_core/src/error.rs +++ b/crates/voicevox_core/src/error.rs @@ -1,4 +1,5 @@ use crate::{ + devices::DeviceAvailabilities, engine::{FullContextLabelError, KanaParseError}, user_dict::InvalidWordError, StyleId, StyleType, VoiceModelId, @@ -33,7 +34,7 @@ impl Error { pub fn kind(&self) -> ErrorKind { match &self.0 { ErrorRepr::NotLoadedOpenjtalkDict => ErrorKind::NotLoadedOpenjtalkDict, - ErrorRepr::GpuSupport => ErrorKind::GpuSupport, + ErrorRepr::GpuSupport(_) => ErrorKind::GpuSupport, ErrorRepr::InitInferenceRuntime { .. } => ErrorKind::InitInferenceRuntime, ErrorRepr::LoadModel(LoadModelError { context, .. }) => match context { LoadModelErrorKind::OpenZipFile => ErrorKind::OpenZipFile, @@ -63,8 +64,8 @@ pub(crate) enum ErrorRepr { #[error("OpenJTalkの辞書が読み込まれていません")] NotLoadedOpenjtalkDict, - #[error("GPU機能をサポートすることができません")] - GpuSupport, + #[error("GPU機能をサポートすることができません:\n{_0}")] + GpuSupport(DeviceAvailabilities), #[error("{runtime_display_name}のロードまたは初期化ができませんでした")] InitInferenceRuntime { diff --git a/crates/voicevox_core/src/infer.rs b/crates/voicevox_core/src/infer.rs index cffd0d524..112ca6b53 100644 --- a/crates/voicevox_core/src/infer.rs +++ b/crates/voicevox_core/src/infer.rs @@ -11,16 +11,25 @@ use enum_map::{Enum, EnumMap}; use ndarray::{Array, ArrayD, Dimension, ShapeError}; use thiserror::Error; -use crate::{StyleType, SupportedDevices}; +use crate::{ + devices::{DeviceSpec, GpuSpec}, + StyleType, SupportedDevices, +}; pub(crate) trait InferenceRuntime: 'static { // TODO: "session"とは何なのかを定め、ドキュメントを書く。`InferenceSessionSet`も同様。 type Session: Sized + Send + 'static; type RunContext<'a>: From<&'a mut Self::Session> + PushInputTensor; - /// このライブラリで利用可能なデバイスの情報を取得する。 + /// 名前。 + const DISPLAY_NAME: &'static str; + + /// このランタイムで利用可能なデバイスの情報を取得する。 fn supported_devices(&self) -> crate::Result; + /// GPUが実際に利用できそうかどうか判定する。 + fn test_gpu(&self, gpu: GpuSpec) -> anyhow::Result<()>; + #[allow(clippy::type_complexity)] fn new_session( &self, @@ -187,7 +196,7 @@ impl ParamInfo { #[derive(new, Clone, Copy, PartialEq, Debug)] pub(crate) struct InferenceSessionOptions { pub(crate) cpu_num_threads: u16, - pub(crate) use_gpu: bool, + pub(crate) device: DeviceSpec, } #[derive(Error, Debug)] diff --git a/crates/voicevox_core/src/infer/runtimes/onnxruntime.rs b/crates/voicevox_core/src/infer/runtimes/onnxruntime.rs index 74dc8a601..5c897e082 100644 --- a/crates/voicevox_core/src/infer/runtimes/onnxruntime.rs +++ b/crates/voicevox_core/src/infer/runtimes/onnxruntime.rs @@ -8,7 +8,10 @@ use ort::{ GraphOptimizationLevel, IntoTensorElementType, TensorElementType, ValueType, }; -use crate::{devices::SupportedDevices, error::ErrorRepr}; +use crate::{ + devices::{DeviceSpec, GpuSpec, SupportedDevices}, + error::ErrorRepr, +}; use super::super::{ DecryptModelError, InferenceRuntime, InferenceSessionOptions, InputScalarKind, @@ -22,6 +25,8 @@ impl InferenceRuntime for self::blocking::Onnxruntime { type Session = ort::Session; type RunContext<'a> = OnnxruntimeRunContext<'a>; + const DISPLAY_NAME: &'static str = "ONNX Runtime"; + fn supported_devices(&self) -> crate::Result { (|| { let cpu = CPUExecutionProvider::default().is_available()?; @@ -40,6 +45,15 @@ impl InferenceRuntime for self::blocking::Onnxruntime { .map_err(Into::into) } + fn test_gpu(&self, gpu: GpuSpec) -> anyhow::Result<()> { + let sess_builder = &ort::SessionBuilder::new()?; + match gpu { + GpuSpec::Cuda => CUDAExecutionProvider::default().register(sess_builder), + GpuSpec::Dml => DirectMLExecutionProvider::default().register(sess_builder), + } + .map_err(Into::into) + } + fn new_session( &self, model: impl FnOnce() -> std::result::Result, DecryptModelError>, @@ -53,14 +67,18 @@ impl InferenceRuntime for self::blocking::Onnxruntime { .with_optimization_level(GraphOptimizationLevel::Level1)? .with_intra_threads(options.cpu_num_threads.into())?; - if options.use_gpu && cfg!(feature = "directml") { - builder = builder - .with_parallel_execution(false)? - .with_memory_pattern(false)?; - DirectMLExecutionProvider::default().register(&builder)?; - } else if options.use_gpu && cfg!(feature = "cuda") { - CUDAExecutionProvider::default().register(&builder)?; - } + match options.device { + DeviceSpec::Cpu => {} + DeviceSpec::Gpu(GpuSpec::Cuda) => { + CUDAExecutionProvider::default().register(&builder)?; + } + DeviceSpec::Gpu(GpuSpec::Dml) => { + builder = builder + .with_parallel_execution(false)? + .with_memory_pattern(false)?; + DirectMLExecutionProvider::default().register(&builder)?; + } + }; let model = model()?; let sess = builder.commit_from_memory(&{ model })?; @@ -365,7 +383,7 @@ pub(crate) mod blocking { } } - /// このライブラリで利用可能なデバイスの情報を取得する。 + /// ONNX Runtimeとして利用可能なデバイスの情報を取得する。 pub fn supported_devices(&self) -> crate::Result { ::supported_devices(self) } @@ -517,7 +535,7 @@ pub(crate) mod tokio { .map(Self::from_blocking) } - /// このライブラリで利用可能なデバイスの情報を取得する。 + /// ONNX Runtimeとして利用可能なデバイスの情報を取得する。 pub fn supported_devices(&self) -> crate::Result { self.0.supported_devices() } diff --git a/crates/voicevox_core/src/lib.rs b/crates/voicevox_core/src/lib.rs index 25ff64f9e..f0c6e2847 100644 --- a/crates/voicevox_core/src/lib.rs +++ b/crates/voicevox_core/src/lib.rs @@ -2,8 +2,6 @@ //! //! # Feature flags //! -//! ## ONNX Runtimeのリンク方法を決めるフィーチャ -//! //! このクレートの利用にあたっては以下の二つの[Cargoフィーチャ]のうちどちらかを有効にしなければなり //! ません。両方の有効化はコンパイルエラーとなります。[`Onnxruntime`]の初期化方法はこれらの //! フィーチャによって決まります。 @@ -12,29 +10,14 @@ //! - **`link-onnxruntime`**: ONNX Runtimeをロード時動的リンクします。iOSのような`dlopen`の利用が //! 困難な環境でのみこちらを利用するべきです。_Note_: //! [動的リンク対象のライブラリ名]は`onnxruntime`で固定です。変更 -//! は`patchelf(1)`や`install_name_tool(1)`で行ってください。 -//! -//! ## GPUを利用可能にするフィーチャ -//! -//! - **`cuda`** -//! - **`directml`** -// TODO: こんな感じ(↓)で書く -////! - **`cuda`**: [CUDAを用いた機械学習推論]を可能にします。 -////! - ❗ [acceleration\_mode]={Gpu,Auto}のときの挙動が変化します。`directml`と共に -////! 有効化したときの挙動は未規定です。 -////! - **`directml`**: [DirectMLを用いた機械学習推論]を可能にします。 -////! - ❗ 〃 -////! -////! [CUDAを用いた機械学習推論]: -////! https://onnxruntime.ai/docs/execution-providers/CUDA-ExecutionProvider.html -////! [DirectMLを用いた機械学習推論]: -////! https://onnxruntime.ai/docs/execution-providers/DirectML-ExecutionProvider.html -////! [acceleration\_mode]: InitializeOptions::acceleration_mode +//! は`patchelf(1)`や`install_name_tool(1)`で行ってください。また、[ONNX RuntimeのGPU機能]を使う +//! ことはできません。 //! //! [Cargoフィーチャ]: https://doc.rust-lang.org/stable/cargo/reference/features.html //! [動的リンク対象のライブラリ名]: //! https://doc.rust-lang.org/cargo/reference/build-scripts.html#rustc-link-lib //! [`Onnxruntime`]: blocking::Onnxruntime +//! [ONNX RuntimeのGPU機能]: https://onnxruntime.ai/docs/execution-providers/ #![cfg_attr(docsrs, feature(doc_cfg))] diff --git a/crates/voicevox_core/src/status.rs b/crates/voicevox_core/src/status.rs index 8c75d64d9..419be52f5 100644 --- a/crates/voicevox_core/src/status.rs +++ b/crates/voicevox_core/src/status.rs @@ -350,6 +350,7 @@ mod tests { use rstest::rstest; use crate::{ + devices::{DeviceSpec, GpuSpec}, infer::{ domains::{InferenceDomainMap, TalkOperation}, InferenceSessionOptions, @@ -360,16 +361,16 @@ mod tests { use super::Status; #[rstest] - #[case(true, 0)] - #[case(true, 1)] - #[case(true, 8)] - #[case(false, 2)] - #[case(false, 4)] - #[case(false, 8)] - #[case(false, 0)] - fn status_new_works(#[case] use_gpu: bool, #[case] cpu_num_threads: u16) { - let light_session_options = InferenceSessionOptions::new(cpu_num_threads, false); - let heavy_session_options = InferenceSessionOptions::new(cpu_num_threads, use_gpu); + #[case(DeviceSpec::Gpu(GpuSpec::Cuda), 0)] + #[case(DeviceSpec::Gpu(GpuSpec::Cuda), 1)] + #[case(DeviceSpec::Gpu(GpuSpec::Cuda), 8)] + #[case(DeviceSpec::Cpu, 2)] + #[case(DeviceSpec::Cpu, 4)] + #[case(DeviceSpec::Cpu, 8)] + #[case(DeviceSpec::Cpu, 0)] + fn status_new_works(#[case] device_for_heavy: DeviceSpec, #[case] cpu_num_threads: u16) { + let light_session_options = InferenceSessionOptions::new(cpu_num_threads, DeviceSpec::Cpu); + let heavy_session_options = InferenceSessionOptions::new(cpu_num_threads, device_for_heavy); let session_options = InferenceDomainMap { talk: enum_map! { TalkOperation::PredictDuration @@ -404,7 +405,7 @@ mod tests { let status = Status::new( crate::blocking::Onnxruntime::from_test_util_data().unwrap(), InferenceDomainMap { - talk: enum_map!(_ => InferenceSessionOptions::new(0, false)), + talk: enum_map!(_ => InferenceSessionOptions::new(0, DeviceSpec::Cpu)), }, ); let model = &crate::tokio::VoiceModel::sample().await.unwrap(); @@ -420,7 +421,7 @@ mod tests { let status = Status::new( crate::blocking::Onnxruntime::from_test_util_data().unwrap(), InferenceDomainMap { - talk: enum_map!(_ => InferenceSessionOptions::new(0, false)), + talk: enum_map!(_ => InferenceSessionOptions::new(0, DeviceSpec::Cpu)), }, ); let vvm = &crate::tokio::VoiceModel::sample().await.unwrap(); diff --git a/crates/voicevox_core/src/synthesizer.rs b/crates/voicevox_core/src/synthesizer.rs index a11af2d5b..adae7c4b5 100644 --- a/crates/voicevox_core/src/synthesizer.rs +++ b/crates/voicevox_core/src/synthesizer.rs @@ -45,7 +45,7 @@ impl Default for TtsOptions { } /// ハードウェアアクセラレーションモードを設定する設定値。 -#[derive(Default, Debug, PartialEq, Eq)] +#[derive(Default, Clone, Copy, Debug, PartialEq, Eq)] pub enum AccelerationMode { /// 実行環境に合った適切なハードウェアアクセラレーションモードを選択する。 #[default] @@ -74,8 +74,10 @@ pub(crate) mod blocking { use std::io::{Cursor, Write as _}; use enum_map::enum_map; + use tracing::info; use crate::{ + devices::{DeviceSpec, GpuSpec}, engine::{create_kana, mora_to_text, Mora, OjtPhoneme}, error::ErrorRepr, infer::{ @@ -84,7 +86,7 @@ pub(crate) mod blocking { PredictDurationOutput, PredictIntonationInput, PredictIntonationOutput, TalkDomain, TalkOperation, }, - InferenceSessionOptions, + InferenceRuntime as _, InferenceSessionOptions, }, status::Status, text_analyzer::{KanaAnalyzer, OpenJTalkAnalyzer, TextAnalyzer}, @@ -150,31 +152,44 @@ pub(crate) mod blocking { #[cfg(windows)] list_windows_video_cards(); - let use_gpu = match options.acceleration_mode { - AccelerationMode::Auto => { - let supported_devices = onnxruntime.supported_devices()?; + let test_gpus = || { + info!("GPUをテストします:"); + let availabilities = crate::devices::test_gpus( + GpuSpec::defaults(), + crate::blocking::Onnxruntime::DISPLAY_NAME, + onnxruntime.supported_devices()?, + |gpu| onnxruntime.test_gpu(gpu), + ); + for line in availabilities.to_string().lines() { + info!(" {line}"); + } + crate::Result::Ok(availabilities) + }; - if cfg!(feature = "directml") { - supported_devices.dml - } else { - supported_devices.cuda + let device_for_heavy = match options.acceleration_mode { + AccelerationMode::Auto => match *test_gpus()?.oks() { + [] => DeviceSpec::Cpu, + [gpu, ..] => DeviceSpec::Gpu(gpu), + }, + AccelerationMode::Cpu => DeviceSpec::Cpu, + AccelerationMode::Gpu => { + let availabilities = test_gpus()?; + match *availabilities.oks() { + [] => return Err(ErrorRepr::GpuSupport(availabilities).into()), + [gpu, ..] => DeviceSpec::Gpu(gpu), } } - AccelerationMode::Cpu => false, - AccelerationMode::Gpu => true, }; - if use_gpu && !can_support_gpu_feature(onnxruntime)? { - return Err(ErrorRepr::GpuSupport.into()); - } + info!("{device_for_heavy}を利用します"); // 軽いモデルはこちらを使う let light_session_options = - InferenceSessionOptions::new(options.cpu_num_threads, false); + InferenceSessionOptions::new(options.cpu_num_threads, DeviceSpec::Cpu); // 重いモデルはこちらを使う let heavy_session_options = - InferenceSessionOptions::new(options.cpu_num_threads, use_gpu); + InferenceSessionOptions::new(options.cpu_num_threads, device_for_heavy); let status = Status::new( onnxruntime, @@ -187,22 +202,14 @@ pub(crate) mod blocking { }, ); - return Ok(Self { + let use_gpu = matches!(device_for_heavy, DeviceSpec::Gpu(_)); + + Ok(Self { status, open_jtalk_analyzer: OpenJTalkAnalyzer::new(open_jtalk), kana_analyzer: KanaAnalyzer, use_gpu, - }); - - fn can_support_gpu_feature(onnxruntime: &crate::blocking::Onnxruntime) -> Result { - let supported_devices = onnxruntime.supported_devices()?; - - if cfg!(feature = "directml") { - Ok(supported_devices.dml) - } else { - Ok(supported_devices.cuda) - } - } + }) } pub fn onnxruntime(&self) -> &'static crate::blocking::Onnxruntime { @@ -993,13 +1000,13 @@ pub(crate) mod blocking { CreateDXGIFactory, IDXGIFactory, DXGI_ADAPTER_DESC, DXGI_ERROR_NOT_FOUND, }; - info!("検出されたGPU (DirectMLには1番目のGPUが使われます):"); + info!("検出されたGPU (DirectMLにはGPU 0が使われます):"); match list_windows_video_cards() { Ok(descs) => { - for desc in descs { + for (device_id, desc) in descs.into_iter().enumerate() { let description = OsString::from_wide(trim_nul(&desc.Description)); let vram = humansize::format_size(desc.DedicatedVideoMemory, BINARY); - info!(" - {description:?} ({vram})"); + info!(" GPU {device_id}: {description:?} ({vram})"); } } Err(err) => error!("{err}"), diff --git a/crates/voicevox_core_c_api/Cargo.toml b/crates/voicevox_core_c_api/Cargo.toml index 29b66e55a..1b86521d1 100644 --- a/crates/voicevox_core_c_api/Cargo.toml +++ b/crates/voicevox_core_c_api/Cargo.toml @@ -15,8 +15,6 @@ name = "e2e" [features] load-onnxruntime = ["voicevox_core/load-onnxruntime"] link-onnxruntime = ["voicevox_core/link-onnxruntime"] -cuda = ["voicevox_core/cuda"] -directml = ["voicevox_core/directml"] [dependencies] anstream = { workspace = true, default-features = false, features = ["auto"] } diff --git a/crates/voicevox_core_c_api/include/voicevox_core.h b/crates/voicevox_core_c_api/include/voicevox_core.h index fe19a4c2e..fe514dfa9 100644 --- a/crates/voicevox_core_c_api/include/voicevox_core.h +++ b/crates/voicevox_core_c_api/include/voicevox_core.h @@ -806,11 +806,11 @@ __declspec(dllimport) char *voicevox_synthesizer_create_metas_json(const struct VoicevoxSynthesizer *synthesizer); /** - * このライブラリで利用可能なデバイスの情報を、JSONで取得する。 + * ONNX Runtimeとして利用可能なデバイスの情報を、JSONで取得する。 * * JSONの解放は ::voicevox_json_free で行う。 * - * あくまで本ライブラリが対応しているデバイスの情報であることに注意。GPUが使える環境ではなかったとしても`cuda`や`dml`は`true`を示しうる。 + * あくまでONNX Runtimeが対応しているデバイスの情報であることに注意。GPUが使える環境ではなかったとしても`cuda`や`dml`は`true`を示しうる。 * * @param [in] onnxruntime * @param [out] output_supported_devices_json サポートデバイス情報のJSON文字列 diff --git a/crates/voicevox_core_c_api/src/lib.rs b/crates/voicevox_core_c_api/src/lib.rs index 38c06cd4f..255bbd4a5 100644 --- a/crates/voicevox_core_c_api/src/lib.rs +++ b/crates/voicevox_core_c_api/src/lib.rs @@ -648,11 +648,11 @@ pub extern "C" fn voicevox_synthesizer_create_metas_json( C_STRING_DROP_CHECKER.whitelist(metas).into_raw() } -/// このライブラリで利用可能なデバイスの情報を、JSONで取得する。 +/// ONNX Runtimeとして利用可能なデバイスの情報を、JSONで取得する。 /// /// JSONの解放は ::voicevox_json_free で行う。 /// -/// あくまで本ライブラリが対応しているデバイスの情報であることに注意。GPUが使える環境ではなかったとしても`cuda`や`dml`は`true`を示しうる。 +/// あくまでONNX Runtimeが対応しているデバイスの情報であることに注意。GPUが使える環境ではなかったとしても`cuda`や`dml`は`true`を示しうる。 /// /// @param [in] onnxruntime /// @param [out] output_supported_devices_json サポートデバイス情報のJSON文字列 diff --git a/crates/voicevox_core_c_api/tests/e2e/log_mask.rs b/crates/voicevox_core_c_api/tests/e2e/log_mask.rs index 9b08c9af7..93114976e 100644 --- a/crates/voicevox_core_c_api/tests/e2e/log_mask.rs +++ b/crates/voicevox_core_c_api/tests/e2e/log_mask.rs @@ -30,7 +30,7 @@ impl Utf8Output { pub(crate) fn mask_windows_video_cards(self) -> Self { self.mask_stderr( static_regex!( - r#"(?m)^\{timestamp\} INFO voicevox_core::synthesizer::blocking: 検出されたGPU \(DirectMLには1番目のGPUが使われます\):(\n\{timestamp\} INFO voicevox_core::synthesizer::blocking: - "[^"]+" \([0-9.]+ [a-zA-Z]+\))+"#, + r#"(?m)^\{timestamp\} INFO voicevox_core::synthesizer::blocking: 検出されたGPU \(DirectMLにはGPU 0が使われます\):(\n\{timestamp\} INFO voicevox_core::synthesizer::blocking: GPU [0-9]+: "[^"]+" \([0-9.]+ [a-zA-Z]+\))+"#, ), "{windows-video-cards}", ) diff --git a/crates/voicevox_core_c_api/tests/e2e/snapshots.toml b/crates/voicevox_core_c_api/tests/e2e/snapshots.toml index b623119dd..17ccd61f8 100644 --- a/crates/voicevox_core_c_api/tests/e2e/snapshots.toml +++ b/crates/voicevox_core_c_api/tests/e2e/snapshots.toml @@ -53,9 +53,11 @@ metas = ''' stderr.windows = ''' {timestamp} INFO ort: Loaded ONNX Runtime dylib with version '{onnxruntime_version}' {windows-video-cards} +{timestamp} INFO voicevox_core::synthesizer::blocking: CPUを利用します ''' stderr.unix = ''' {timestamp} INFO ort: Loaded ONNX Runtime dylib with version '{onnxruntime_version}' +{timestamp} INFO voicevox_core::synthesizer::blocking: CPUを利用します ''' [compatible_engine_load_model_before_initialize] @@ -97,9 +99,11 @@ output."こんにちは、音声合成の世界へようこそ".wav_length = 176 stderr.windows = ''' {timestamp} INFO ort: Loaded ONNX Runtime dylib with version '{onnxruntime_version}' {windows-video-cards} +{timestamp} INFO voicevox_core::synthesizer::blocking: CPUを利用します ''' stderr.unix = ''' {timestamp} INFO ort: Loaded ONNX Runtime dylib with version '{onnxruntime_version}' +{timestamp} INFO voicevox_core::synthesizer::blocking: CPUを利用します ''' [synthesizer_new_output_json] @@ -157,9 +161,11 @@ metas = ''' stderr.windows = ''' {timestamp} INFO ort: Loaded ONNX Runtime dylib with version '{onnxruntime_version}' {windows-video-cards} +{timestamp} INFO voicevox_core::synthesizer::blocking: CPUを利用します ''' stderr.unix = ''' {timestamp} INFO ort: Loaded ONNX Runtime dylib with version '{onnxruntime_version}' +{timestamp} INFO voicevox_core::synthesizer::blocking: CPUを利用します ''' [tts_via_audio_query] @@ -167,9 +173,11 @@ output."こんにちは、音声合成の世界へようこそ".wav_length = 176 stderr.windows = ''' {timestamp} INFO ort: Loaded ONNX Runtime dylib with version '{onnxruntime_version}' {windows-video-cards} +{timestamp} INFO voicevox_core::synthesizer::blocking: CPUを利用します ''' stderr.unix = ''' {timestamp} INFO ort: Loaded ONNX Runtime dylib with version '{onnxruntime_version}' +{timestamp} INFO voicevox_core::synthesizer::blocking: CPUを利用します ''' # FIXME: "user_dict_load"のはず @@ -177,9 +185,11 @@ stderr.unix = ''' stderr.windows = ''' {timestamp} INFO ort: Loaded ONNX Runtime dylib with version '{onnxruntime_version}' {windows-video-cards} +{timestamp} INFO voicevox_core::synthesizer::blocking: CPUを利用します ''' stderr.unix = ''' {timestamp} INFO ort: Loaded ONNX Runtime dylib with version '{onnxruntime_version}' +{timestamp} INFO voicevox_core::synthesizer::blocking: CPUを利用します ''' [user_dict_manipulate] diff --git a/crates/voicevox_core_java_api/Cargo.toml b/crates/voicevox_core_java_api/Cargo.toml index e9cced8b2..ec4e2d616 100644 --- a/crates/voicevox_core_java_api/Cargo.toml +++ b/crates/voicevox_core_java_api/Cargo.toml @@ -7,10 +7,6 @@ publish.workspace = true [lib] crate-type = ["cdylib"] -[features] -cuda = ["voicevox_core/cuda"] -directml = ["voicevox_core/directml"] - [dependencies] android_logger.workspace = true chrono = { workspace = true, default-features = false, features = ["clock"] } diff --git a/crates/voicevox_core_java_api/lib/src/main/java/jp/hiroshiba/voicevoxcore/GlobalInfo.java b/crates/voicevox_core_java_api/lib/src/main/java/jp/hiroshiba/voicevoxcore/GlobalInfo.java index 010e69073..496c2ccc4 100644 --- a/crates/voicevox_core_java_api/lib/src/main/java/jp/hiroshiba/voicevoxcore/GlobalInfo.java +++ b/crates/voicevox_core_java_api/lib/src/main/java/jp/hiroshiba/voicevoxcore/GlobalInfo.java @@ -24,10 +24,10 @@ public static String getVersion() { // FIXME: `Onnxruntime`に移すか、独立させる /** - * このライブラリで利用可能なデバイスの情報。 + * ONNX Runtime利用可能なデバイスの情報。 * - *

あくまで本ライブラリが対応しているデバイスの情報であることに注意。GPUが使える環境ではなかったとしても {@link #cuda} や {@link #dml} は {@code - * true} を示しうる。 + *

あくまでONNX Runtimeが対応しているデバイスの情報であることに注意。GPUが使える環境ではなかったとしても {@link #cuda} や {@link #dml} は + * {@code true} を示しうる。 */ public static class SupportedDevices { /** diff --git a/crates/voicevox_core_python_api/Cargo.toml b/crates/voicevox_core_python_api/Cargo.toml index 48c92dfb4..e0877b623 100644 --- a/crates/voicevox_core_python_api/Cargo.toml +++ b/crates/voicevox_core_python_api/Cargo.toml @@ -7,10 +7,6 @@ publish.workspace = true [lib] crate-type = ["cdylib"] -[features] -cuda = ["voicevox_core/cuda"] -directml = ["voicevox_core/directml"] - [dependencies] camino.workspace = true easy-ext.workspace = true diff --git a/crates/voicevox_core_python_api/python/voicevox_core/_models.py b/crates/voicevox_core_python_api/python/voicevox_core/_models.py index f7929fae2..941ed84fc 100644 --- a/crates/voicevox_core_python_api/python/voicevox_core/_models.py +++ b/crates/voicevox_core_python_api/python/voicevox_core/_models.py @@ -90,9 +90,9 @@ class SpeakerMeta: @pydantic.dataclasses.dataclass class SupportedDevices: """ - このライブラリで利用可能なデバイスの情報。 + ONNX Runtimeとして利用可能なデバイスの情報。 - あくまで本ライブラリが対応しているデバイスの情報であることに注意。GPUが使える環境ではなかったとしても + あくまでONNX Runtimeが対応しているデバイスの情報であることに注意。GPUが使える環境ではなかったとしても ``cuda`` や ``dml`` は ``True`` を示しうる。 """ diff --git a/docs/downloader.md b/docs/downloader.md index aeff5b4a8..76148197f 100644 --- a/docs/downloader.md +++ b/docs/downloader.md @@ -49,7 +49,7 @@ download または ``` -download --device cpu +download --devices cpu ``` @@ -57,7 +57,7 @@ download --device cpu ## DirectML 版をダウンロードする場合 ``` -download --device directml +download --devices directml ``` @@ -65,7 +65,7 @@ download --device directml ## CUDA 版をダウンロードする場合 ``` -download --device cuda +download --devices cuda ``` diff --git a/docs/usage.md b/docs/usage.md index e828ae220..067250126 100644 --- a/docs/usage.md +++ b/docs/usage.md @@ -31,10 +31,10 @@ chmod +x download ./download # DirectML版を利用する場合 -./download --device directml +./download --devices directml # CUDA版を利用する場合 -./download --device cuda +./download --devices cuda ``` `voicevox_core`ディレクトリにファイル一式がダウンロードされています。以降の説明ではこのディレクトリで作業を行います。 diff --git a/example/cpp/windows/README.md b/example/cpp/windows/README.md index 660d4190c..4012acdf9 100644 --- a/example/cpp/windows/README.md +++ b/example/cpp/windows/README.md @@ -14,7 +14,7 @@ Visual Studio Installerを使用しインストールしてください。 出力フォルダを作成するために、一度ビルドします。「windows_example.sln」をVisual Studioで開き、メニューの「ビルド」→「ソリューションのビルド」を押します。 この段階では、ビルドは失敗します。「bin」フォルダと「lib」フォルダが生成されていればOKです。 -[Releases](https://github.com/VOICEVOX/voicevox_core/releases/latest)から「voicevox_core-windows-x64-cpu-{バージョン名}.zip」をダウンロードし、展開します。[ダウンローダー](https://github.com/VOICEVOX/voicevox_core/blob/main/docs/downloader.md)を使うと便利です。 +[Releases](https://github.com/VOICEVOX/voicevox_core/releases/latest)から「voicevox_core-windows-x64-{バージョン名}.zip」をダウンロードし、展開します。[ダウンローダー](https://github.com/VOICEVOX/voicevox_core/blob/main/docs/downloader.md)を使うと便利です。 展開してできたファイル・フォルダをそれぞれ下記のフォルダへ配置します。 - simple_tts に配置 diff --git a/example/python/README.md b/example/python/README.md index 97303eb81..48678edb2 100644 --- a/example/python/README.md +++ b/example/python/README.md @@ -18,6 +18,8 @@ https://github.com/VOICEVOX/voicevox_core/releases/latest 2. ダウンローダーを使って環境構築します。 +FIXME: 今は`--exclude core`がある + linux/mac の場合 download-linux-x64 のところはアーキテクチャや OS によって適宜読み替えてください。 From 55878c52640b3c7c02759e7366ad79bac6743f7d Mon Sep 17 00:00:00 2001 From: Ryo Yamashita Date: Wed, 31 Jul 2024 02:40:40 +0900 Subject: [PATCH 2/9] =?UTF-8?q?`body`=E3=81=AB=E5=AF=BE=E3=81=97=E3=81=A6?= =?UTF-8?q?=E3=82=B3=E3=83=A1=E3=83=B3=E3=83=88?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- crates/downloader/src/main.rs | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/crates/downloader/src/main.rs b/crates/downloader/src/main.rs index 980264c51..ec980b8c4 100644 --- a/crates/downloader/src/main.rs +++ b/crates/downloader/src/main.rs @@ -398,7 +398,10 @@ async fn find_gh_asset( octocrab: &Arc, repo: &RepoName, git_tag_or_latest: &str, - asset_name: impl FnOnce(&str, Option<&str>) -> anyhow::Result, + asset_name: impl FnOnce( + &str, // タグ名 + Option<&str>, // リリースノートの内容 + ) -> anyhow::Result, ) -> anyhow::Result { let Release { html_url, @@ -440,7 +443,7 @@ async fn find_gh_asset( /// 候補が複数あった場合、「デバイス」の数が最も小さいもののうち最初のものを選ぶ。 fn find_onnxruntime( tag: &str, - body: &str, + body: &str, // リリースの"body" (i.e. リリースノートの内容) os: Os, cpu_arch: CpuArch, devices: &BTreeSet, From a7981715227d548a9af33ea2476ed9d2a649af82 Mon Sep 17 00:00:00 2001 From: Ryo Yamashita Date: Fri, 2 Aug 2024 23:40:48 +0900 Subject: [PATCH 3/9] =?UTF-8?q?``=E3=81=AE=E7=B5=84=E3=81=BF?= =?UTF-8?q?=E7=AB=8B=E3=81=A6=E3=82=92`build-spec-table`=E3=81=AB=E9=9B=86?= =?UTF-8?q?=E7=B4=84=E3=81=99=E3=82=8B?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- Cargo.lock | 16 ------------ Cargo.toml | 1 - crates/downloader/Cargo.toml | 1 - crates/downloader/src/main.rs | 46 +++++++++++++++++++---------------- 4 files changed, 25 insertions(+), 39 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index acdb44aeb..9fdf8f695 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1059,7 +1059,6 @@ dependencies = [ "fs-err", "futures-core", "futures-util", - "html-escape", "indicatif", "itertools 0.10.5", "octocrab", @@ -1604,15 +1603,6 @@ dependencies = [ "digest", ] -[[package]] -name = "html-escape" -version = "0.2.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d1ad449764d627e22bfd7cd5e8868264fc9236e07c752972b4080cd351cb476" -dependencies = [ - "utf8-width", -] - [[package]] name = "html5ever" version = "0.27.0" @@ -4181,12 +4171,6 @@ version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" -[[package]] -name = "utf8-width" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86bd8d4e895da8537e5315b8254664e6b769c4ff3db18321b297a1e7004392e3" - [[package]] name = "utf8parse" version = "0.2.1" diff --git a/Cargo.toml b/Cargo.toml index 414262da7..2ada5cbb5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -38,7 +38,6 @@ futures-core = "0.3.25" futures-util = "0.3.25" futures-lite = "2.2.0" heck = "0.4.1" -html-escape = "0.2.13" humansize = "2.1.2" indexmap = "2.0.0" indicatif = "0.17.3" diff --git a/crates/downloader/Cargo.toml b/crates/downloader/Cargo.toml index 036297ef5..c60afd3b6 100644 --- a/crates/downloader/Cargo.toml +++ b/crates/downloader/Cargo.toml @@ -16,7 +16,6 @@ flate2.workspace = true fs-err = { workspace = true, features = ["tokio"] } futures-core.workspace = true futures-util.workspace = true -html-escape.workspace = true indicatif.workspace = true itertools.workspace = true octocrab = { workspace = true, default-features = false, features = ["rustls-tls", "stream"] } diff --git a/crates/downloader/src/main.rs b/crates/downloader/src/main.rs index ec980b8c4..2a44fc768 100644 --- a/crates/downloader/src/main.rs +++ b/crates/downloader/src/main.rs @@ -267,9 +267,9 @@ async fn main() -> anyhow::Result<()> { octocrab, &onnxruntime_builder_repo, &onnxruntime_version, - |tag, body| { + |_, body| { let body = body.with_context(|| "リリースノートがありません")?; - find_onnxruntime(tag, body, os, cpu_arch, &devices) + find_onnxruntime(body, os, cpu_arch, &devices) }, ) .await?; @@ -442,17 +442,22 @@ async fn find_gh_asset( /// /// 候補が複数あった場合、「デバイス」の数が最も小さいもののうち最初のものを選ぶ。 fn find_onnxruntime( - tag: &str, body: &str, // リリースの"body" (i.e. リリースノートの内容) os: Os, cpu_arch: CpuArch, devices: &BTreeSet, ) -> anyhow::Result { - let id = &format!( - "voicevox-onnxruntime-specs-v1format-v{}-dylibs", - tag.replace('.', "-"), - ); - let id = html_escape::encode_double_quoted_attribute(id); + macro_rules! selector { + ($expr:expr $(,)?) => {{ + static SELECTOR: Lazy = + Lazy::new(|| scraper::Selector::parse($expr).expect("should be valid")); + &SELECTOR + }}; + } + + const TARGET: &str = "table\ + [data-voicevox-onnxruntime-specs-format-version=\"1\"]\ + [data-voicevox-onnxruntime-specs-type=\"dylibs\"]"; comrak::parse_document(&Default::default(), body, &Default::default()) .descendants() @@ -464,20 +469,19 @@ fn find_onnxruntime( }) .collect::>() .iter() - .find_map(|html_block| { - html_block - .select( - &scraper::Selector::parse(&format!("[id=\"{id}\"]")).expect("should be valid"), - ) - .next() - }) - .with_context(|| format!("リリースノートの中に`#{id}`が見つかりませんでした"))? - .select(&scraper::Selector::parse("tbody > tr").expect("should be valid")) + .flat_map(|html_block| html_block.select(selector!(TARGET))) + .exactly_one() + .map_err(|err| match err.count() { + 0 => anyhow!("リリースノートの中に`{TARGET}`が見つかりませんでした"), + _ => anyhow!("リリースノートの中に`{TARGET}`が複数ありました"), + })? + .select(selector!("tbody > tr")) .map(|tr| { - tr.text() - .collect::>() - .try_into() - .map_err(|_| anyhow!("リリースノート中の`#{id}`をパースできませんでした")) + tr.select(selector!("td")) + .map(|td| td.text().exactly_one().ok()) + .collect::>>() + .and_then(|text| text.try_into().ok()) + .with_context(|| anyhow!("リリースノート中の`{TARGET}`をパースできませんでした")) }) .collect::, _>>()? .into_iter() From d4fca5e0b99a63e330342c57392839402f83b859 Mon Sep 17 00:00:00 2001 From: Ryo Yamashita Date: Sat, 3 Aug 2024 03:19:25 +0900 Subject: [PATCH 4/9] =?UTF-8?q?fixup!=20`
`=E3=81=AE=E7=B5=84?= =?UTF-8?q?=E3=81=BF=E7=AB=8B=E3=81=A6=E3=82=92`build-spec-table`=E3=81=AB?= =?UTF-8?q?=E9=9B=86=E7=B4=84=E3=81=99=E3=82=8B?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- crates/downloader/src/main.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/downloader/src/main.rs b/crates/downloader/src/main.rs index 2a44fc768..883a343ad 100644 --- a/crates/downloader/src/main.rs +++ b/crates/downloader/src/main.rs @@ -481,7 +481,7 @@ fn find_onnxruntime( .map(|td| td.text().exactly_one().ok()) .collect::>>() .and_then(|text| text.try_into().ok()) - .with_context(|| anyhow!("リリースノート中の`{TARGET}`をパースできませんでした")) + .with_context(|| format!("リリースノート中の`{TARGET}`をパースできませんでした")) }) .collect::, _>>()? .into_iter() From 317af0feca9b01104083044f7823145b2c2cfbcd Mon Sep 17 00:00:00 2001 From: Ryo Yamashita Date: Sat, 3 Aug 2024 03:15:00 +0900 Subject: [PATCH 5/9] =?UTF-8?q?`GpuSpec::defaults`=E3=81=AE=E7=B6=B2?= =?UTF-8?q?=E7=BE=85=E6=80=A7=E3=82=92=E3=83=86=E3=82=B9=E3=83=88?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- crates/voicevox_core/src/devices.rs | 25 ++++++++++++++++++++++++- 1 file changed, 24 insertions(+), 1 deletion(-) diff --git a/crates/voicevox_core/src/devices.rs b/crates/voicevox_core/src/devices.rs index 6adc377ac..95e9d1dcc 100644 --- a/crates/voicevox_core/src/devices.rs +++ b/crates/voicevox_core/src/devices.rs @@ -200,7 +200,6 @@ pub(crate) enum GpuSpec { impl GpuSpec { pub(crate) fn defaults() -> Vec { - // TODO: 網羅性 vec![Self::Cuda, Self::Dml] } } @@ -215,3 +214,27 @@ impl Index for SupportedDevices { } } } + +#[cfg(test)] +mod tests { + use pretty_assertions::assert_eq; + + use super::{GpuSpec, SupportedDevices}; + + #[test] + fn gpu_spec_defaults_is_exhaustive() { + static SUPPORTED_DEVICES: SupportedDevices = SupportedDevices::THIS; // whatever + + assert_eq!( + { + #[forbid(unused_variables)] + let SupportedDevices { cpu: _, cuda, dml } = &SUPPORTED_DEVICES; + [cuda as *const _, dml as *const _] + }, + *GpuSpec::defaults() + .into_iter() + .map(|gpu| &SUPPORTED_DEVICES[gpu] as *const _) + .collect::>(), + ); + } +} From 6a29b207b326fe1494594c9904496b8351071dbb Mon Sep 17 00:00:00 2001 From: Ryo Yamashita Date: Sat, 3 Aug 2024 03:59:06 +0900 Subject: [PATCH 6/9] =?UTF-8?q?`DeviceAvailabilities`=E3=81=AE=E8=A1=A8?= =?UTF-8?q?=E7=A4=BA=E3=82=92=E6=97=A5=E6=9C=AC=E8=AA=9E=E3=81=AB=E3=81=99?= =?UTF-8?q?=E3=82=8B?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- crates/voicevox_core/src/devices.rs | 4 ++-- crates/voicevox_core/src/infer/runtimes/onnxruntime.rs | 10 +++++++++- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/crates/voicevox_core/src/devices.rs b/crates/voicevox_core/src/devices.rs index 95e9d1dcc..c08c3cd6b 100644 --- a/crates/voicevox_core/src/devices.rs +++ b/crates/voicevox_core/src/devices.rs @@ -159,12 +159,12 @@ impl Display for DeviceAvailabilities { DeviceAvailability::NotSupportedByThisLib => { writeln!( f, - "* {gpu}: Not supported by this `{name}` build", + "* {gpu}: この`{name}`のビルドでは利用できません", name = env!("CARGO_PKG_NAME"), ) } DeviceAvailability::NotSupportedByCurrentLoadedInferenceRuntime(name) => { - writeln!(f, "* {gpu}: Not supported by the current loaded {name}") + writeln!(f, "* {gpu}: {name}では利用できません") } }?; } diff --git a/crates/voicevox_core/src/infer/runtimes/onnxruntime.rs b/crates/voicevox_core/src/infer/runtimes/onnxruntime.rs index 5c897e082..f77a7cc41 100644 --- a/crates/voicevox_core/src/infer/runtimes/onnxruntime.rs +++ b/crates/voicevox_core/src/infer/runtimes/onnxruntime.rs @@ -25,7 +25,15 @@ impl InferenceRuntime for self::blocking::Onnxruntime { type Session = ort::Session; type RunContext<'a> = OnnxruntimeRunContext<'a>; - const DISPLAY_NAME: &'static str = "ONNX Runtime"; + const DISPLAY_NAME: &'static str = { + #[cfg(feature = "load-onnxruntime")] + { + "現在ロードされているONNX Runtime" + } + + #[cfg(feature = "link-onnxruntime")] + "現在リンクされているONNX Runtime" + }; fn supported_devices(&self) -> crate::Result { (|| { From 6f569a3bc4991a184a573bd08ec55918a4308a5d Mon Sep 17 00:00:00 2001 From: Ryo Yamashita Date: Sat, 3 Aug 2024 16:20:15 +0900 Subject: [PATCH 7/9] =?UTF-8?q?=E3=83=87=E3=83=95=E3=82=A9=E3=83=AB?= =?UTF-8?q?=E3=83=88=E3=82=92`VOICEVOX/onnxruntime-builder`=E5=AE=9B?= =?UTF-8?q?=E3=81=A6=E3=81=AB=E3=81=99=E3=82=8B?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- crates/downloader/src/main.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/downloader/src/main.rs b/crates/downloader/src/main.rs index 883a343ad..0432ea0a9 100644 --- a/crates/downloader/src/main.rs +++ b/crates/downloader/src/main.rs @@ -40,7 +40,7 @@ const DEFAULT_OUTPUT: &str = if cfg!(windows) { const LIB_NAME: &str = "voicevox_core"; const DEFAULT_CORE_REPO: &str = "VOICEVOX/voicevox_core"; -const DEFAULT_ONNXRUNTIME_BUILDER_REPO: &str = "qryxip/onnxruntime-builder"; // FIXME +const DEFAULT_ONNXRUNTIME_BUILDER_REPO: &str = "VOICEVOX/onnxruntime-builder"; const DEFAULT_ADDITIONAL_LIBRARIES_REPO: &str = "VOICEVOX/voicevox_additional_libraries"; static OPEN_JTALK_DIC_URL: Lazy = Lazy::new(|| { From 96ad3cbfee707000a64d27adb2bfdfddab9bd3ad Mon Sep 17 00:00:00 2001 From: Ryo Yamashita Date: Sun, 4 Aug 2024 19:31:05 +0900 Subject: [PATCH 8/9] =?UTF-8?q?`if=20cfg!(=E2=80=A6)`=E3=81=AE=E5=BD=A2?= =?UTF-8?q?=E3=81=AB=E3=81=99=E3=82=8B?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- crates/voicevox_core/src/devices.rs | 18 ++++++++---------- .../src/infer/runtimes/onnxruntime.rs | 12 +++++------- 2 files changed, 13 insertions(+), 17 deletions(-) diff --git a/crates/voicevox_core/src/devices.rs b/crates/voicevox_core/src/devices.rs index c08c3cd6b..f3027e741 100644 --- a/crates/voicevox_core/src/devices.rs +++ b/crates/voicevox_core/src/devices.rs @@ -112,22 +112,20 @@ impl SupportedDevices { /// assert!(!SupportedDevices::THIS.cuda); /// assert!(!SupportedDevices::THIS.dml); /// ``` - pub const THIS: Self = { - #[cfg(feature = "load-onnxruntime")] - { - Self { - cpu: true, - cuda: true, - dml: true, - } + pub const THIS: Self = if cfg!(feature = "load-onnxruntime") { + Self { + cpu: true, + cuda: true, + dml: true, } - - #[cfg(all(not(doc), feature = "link-onnxruntime"))] + } else if cfg!(feature = "link-onnxruntime") { Self { cpu: true, cuda: false, dml: false, } + } else { + panic!("either `load-onnxruntime` or `link-onnxruntime` must be enabled"); }; pub fn to_json(self) -> serde_json::Value { diff --git a/crates/voicevox_core/src/infer/runtimes/onnxruntime.rs b/crates/voicevox_core/src/infer/runtimes/onnxruntime.rs index f77a7cc41..15ba963eb 100644 --- a/crates/voicevox_core/src/infer/runtimes/onnxruntime.rs +++ b/crates/voicevox_core/src/infer/runtimes/onnxruntime.rs @@ -25,14 +25,12 @@ impl InferenceRuntime for self::blocking::Onnxruntime { type Session = ort::Session; type RunContext<'a> = OnnxruntimeRunContext<'a>; - const DISPLAY_NAME: &'static str = { - #[cfg(feature = "load-onnxruntime")] - { - "現在ロードされているONNX Runtime" - } - - #[cfg(feature = "link-onnxruntime")] + const DISPLAY_NAME: &'static str = if cfg!(feature = "load-onnxruntime") { + "現在ロードされているONNX Runtime" + } else if cfg!(feature = "link-onnxruntime") { "現在リンクされているONNX Runtime" + } else { + panic!("either `load-onnxruntime` or `link-onnxruntime` must be enabled"); }; fn supported_devices(&self) -> crate::Result { From 669625a9ce0a81feb6ab4cc8f71522d603a67ab9 Mon Sep 17 00:00:00 2001 From: Ryo Yamashita Date: Sun, 4 Aug 2024 19:44:53 +0900 Subject: [PATCH 9/9] =?UTF-8?q?`load-onnxruntime`=E3=81=A7CUDA=E3=81=A8Dir?= =?UTF-8?q?ectML=E3=81=8C=E4=BD=BF=E3=81=88=E3=82=8B=E3=81=93=E3=81=A8?= =?UTF-8?q?=E3=81=AB=E8=A8=80=E5=8F=8A?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- crates/voicevox_core/src/lib.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/crates/voicevox_core/src/lib.rs b/crates/voicevox_core/src/lib.rs index f0c6e2847..fedf538cf 100644 --- a/crates/voicevox_core/src/lib.rs +++ b/crates/voicevox_core/src/lib.rs @@ -6,7 +6,8 @@ //! ません。両方の有効化はコンパイルエラーとなります。[`Onnxruntime`]の初期化方法はこれらの //! フィーチャによって決まります。 //! -//! - **`load-onnxruntime`**: ONNX Runtimeを`dlopen`/`LoadLibraryExW`で開きます。 +//! - **`load-onnxruntime`**: ONNX Runtimeを`dlopen`/`LoadLibraryExW`で +//! 開きます。[CUDA]と[DirectML]が利用できます。 //! - **`link-onnxruntime`**: ONNX Runtimeをロード時動的リンクします。iOSのような`dlopen`の利用が //! 困難な環境でのみこちらを利用するべきです。_Note_: //! [動的リンク対象のライブラリ名]は`onnxruntime`で固定です。変更 @@ -14,6 +15,8 @@ //! ことはできません。 //! //! [Cargoフィーチャ]: https://doc.rust-lang.org/stable/cargo/reference/features.html +//! [CUDA]: https://onnxruntime.ai/docs/execution-providers/CUDA-ExecutionProvider.html +//! [DirectML]: https://onnxruntime.ai/docs/execution-providers/DirectML-ExecutionProvider.html //! [動的リンク対象のライブラリ名]: //! https://doc.rust-lang.org/cargo/reference/build-scripts.html#rustc-link-lib //! [`Onnxruntime`]: blocking::Onnxruntime