From 4f7e1c8f1571a4bd1615309ac0dd5d50498a4b74 Mon Sep 17 00:00:00 2001 From: Luke Date: Tue, 4 Jun 2024 16:02:17 -0700 Subject: [PATCH] Fix CI/CD for automated deployments (#278) --- .github/workflows/cd.yml | 126 +++- .github/workflows/ci.yml | 94 ++- .github/workflows/outdated.yml | 10 +- .github/workflows/release-pr.yml | 28 + .github/workflows/security.yml | 21 + .github/workflows/triage.yml | 3 +- .gitignore | 1 - Cargo.toml | 45 +- Cross.toml | 0 Makefile.toml | 37 +- README.md | 9 + .../{TetaNES.desktop => tetanes.desktop} | 0 deny.toml | 254 ++++++++ tetanes-core/Cargo.toml | 20 +- tetanes-core/src/lib.rs | 4 +- tetanes-core/src/sys/fs/wasm.rs | 2 +- tetanes-utils/Cargo.toml | 4 +- tetanes/Cargo.toml | 52 +- tetanes/assets/main.css | 162 +++++ tetanes/index.html | 166 +---- tetanes/src/bin/build_artifacts.rs | 607 +++++++++++------- tetanes/src/bin/update_homebrew_formula.rs | 55 -- tetanes/src/lib.rs | 4 +- tetanes/src/nes/renderer/gui.rs | 79 ++- tetanes/src/sys/platform/wasm.rs | 515 +++++++++++---- 25 files changed, 1627 insertions(+), 671 deletions(-) create mode 100644 .github/workflows/release-pr.yml create mode 100644 .github/workflows/security.yml create mode 100644 Cross.toml rename assets/linux/{TetaNES.desktop => tetanes.desktop} (100%) create mode 100644 deny.toml create mode 100644 tetanes/assets/main.css delete mode 100644 tetanes/src/bin/update_homebrew_formula.rs diff --git a/.github/workflows/cd.yml b/.github/workflows/cd.yml index e353754f..cad08d10 100644 --- a/.github/workflows/cd.yml +++ b/.github/workflows/cd.yml @@ -1,5 +1,5 @@ --- -name: Continuous Deployment +name: CD # yamllint disable-line rule:truthy on: @@ -11,19 +11,119 @@ permissions: contents: write jobs: - cd: - runs-on: ubuntu-latest + build-and-upload-artifacts: + name: Build & Upload Artifacts (${{ matrix.target }}) + if: startsWith(github.event.release.name, 'tetanes') + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + include: + - target: x86_64-pc-windows-msvc + os: windows-latest + # TODO: windows aarch64 + # - target: aarch64-pc-windows-msvc + # os: windows-latest + - target: x86_64-unknown-linux-gnu + os: ubuntu-latest + # TODO: aarch64 linux having trouble with docker in CI + # - target: aarch64-unknown-linux-gnu + # os: ubuntu-latest + - target: x86_64-apple-darwin + os: macos-latest + - target: aarch64-apple-darwin + os: macos-latest + - target: wasm32-unknown-unknown + os: ubuntu-latest + defaults: + run: + shell: bash + outputs: + release_tag: ${{ steps.upload.outputs.release_tag }} steps: - uses: actions/checkout@v4 - with: - fetch-depth: 0 - token: ${{ secrets.RELEASE_PLZ_TOKEN }} - uses: dtolnay/rust-toolchain@master with: - toolchain: stable - - uses: Swatinem/rust-cache@v2 - - name: Run release - uses: MarcoIeni/release-plz-action@v0.5 - env: - GITHUB_TOKEN: ${{ secrets.RELEASE_PLZ_TOKEN }} - CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }} + toolchain: nightly + # wasm32-unknown-unknown doesn't need additional toolchains and rustup + # target is managed by build-artifacts already + # + # Windows/macOS just run `rustup add` since the CI runners support cross-compiling + # Linux relies on `cross` + - if: ${{ !startsWith(matrix.os, 'ubuntu') }} + uses: taiki-e/setup-cross-toolchain-action@v1 + with: + target: ${{ matrix.target }} + - if: startsWith(matrix.os, 'ubuntu') + uses: baptiste0928/cargo-install@v3 + with: + crate: cross + git: https://github.com/cross-rs/cross + commit: 19be834 + - uses: taiki-e/install-action@v2 + with: + tool: cargo-make + # Build `.deb` + - if: startsWith(matrix.os, 'ubuntu') + uses: taiki-e/install-action@v2 + with: + tool: cargo-deb + # Build `.msi` + - if: startsWith(matrix.os, 'windows') + uses: taiki-e/install-action@v2 + with: + tool: cargo-wix + # Build `.wasm` + - if: startsWith(matrix.target, 'wasm32') + uses: taiki-e/install-action@v2 + with: + tool: trunk + # Install linux dependencies + - if: startsWith(matrix.os, 'ubuntu') + run: | + sudo apt update + sudo apt install -y libudev-dev libasound2-dev libssl-dev libfuse2 + # Windows/macOS/wasm32/ubuntu x86_64 can all build/cross build normally + - if: startsWith(matrix.os, 'macos') || startsWith(matrix.target, 'x86_64') || startsWith(matrix.target, 'wasm32') + run: | + cargo make build-artifacts -- --target ${{ matrix.target }} + # ubuntu aarch64 requires cross building + - if: startsWith(matrix.os, 'ubuntu') && startsWith(matrix.target, 'aarch64') + run: | + export CROSS_CONTAINER_IN_CONTAINER=true + cargo make build-artifacts -- --target ${{ matrix.target }} --cross + - uses: actions/upload-artifact@v4 + name: "Upload artifacts" + with: + name: ${{ matrix.target }}-artifacts + path: dist/ + - id: upload + run: | + gh release upload ${{ github.event.release.tag_name }} dist/* --clobber + echo "release_tag=${{ github.event.release.tag_name }}" >> "$GITHUB_OUTPUT" + update-homebrew-formula: + needs: build-and-upload-artifacts + runs-on: ubuntu + permissions: + contents: write + env: + RELEASE_TAG: ${{ needs.build-and-upload-artifacts.outputs.release_tag }} + steps: + - uses: actions/checkout@v4 + with: + repository: "lukexor/homebrew-formulae" + - id: commit + run: | + gh release -R lukexor/tetanes download $RELEASE_TAG --pattern '*-apple-darwin.tar.gz*' + x86_64_SHA=$(cat *x86_64*txt | awk '{ print $1 }') + aarch64_SHA=$(cat *aarch64*txt | awk '{ print $1 }') + VERSION=${RELEASE_TAG#"tetanes-v"} + cat tetanes.rb.tmpl | \ + sed "s/%VERSION%/${VERSION}/g" | \ + sed "s/%x86_64_SHA%/${x86_64_SHA}/g" | \ + sed "s/%aarch64_SHA%/${aarch64_SHA}/g" \ + > Casks/tetanes.rb + echo "version=${VERSION}" >> "$GITHUB_OUTPUT" + - uses: stefanzweifel/git-auto-commit-action@v5 + with: + commit_message: Version Bump v${{ steps.commit.outputs.version }} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 98308c7c..86a9e218 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -5,8 +5,12 @@ name: CI on: push: branches: [main] + paths-ignore: + - "**.md" pull_request: branches: [main] + paths-ignore: + - "**.md" env: CARGO_TERM_COLOR: always @@ -19,34 +23,98 @@ jobs: - uses: actions/checkout@v4 with: fetch-depth: 0 + - uses: dtolnay/rust-toolchain@master + with: + toolchain: nightly + targets: wasm32-unknown-unknown + components: clippy + - uses: Swatinem/rust-cache@v2 - run: cargo fmt --all --check - build-and-test: + lint-web: + name: Lint Web + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - uses: dtolnay/rust-toolchain@master + with: + toolchain: nightly + targets: wasm32-unknown-unknown + components: clippy + - uses: Swatinem/rust-cache@v2 + - run: | + cargo clippy --lib --bin tetanes --target wasm32-unknown-unknown --all-features --keep-going -- -D warnings + + lint-tetanes: + name: Lint TetaNES (${{ matrix.os }}) runs-on: ${{ matrix.os }} strategy: - fail-fast: false matrix: os: [macos-latest, ubuntu-latest, windows-latest] - toolchain: [stable, 1.78.0] steps: - uses: actions/checkout@v4 with: fetch-depth: 0 - uses: dtolnay/rust-toolchain@master with: - toolchain: ${{ matrix.toolchain }} - targets: wasm32-unknown-unknown + toolchain: nightly components: clippy - uses: Swatinem/rust-cache@v2 - - if: matrix.os == 'ubuntu-latest' + - if: startsWith(matrix.os, 'ubuntu') run: | sudo apt update - sudo apt install libudev-dev libasound2-dev + sudo apt install -y libudev-dev libasound2-dev - run: | - cargo clippy --all-targets - cargo clippy --target wasm32-unknown-unknown + cargo clippy -p tetanes --all-features --keep-going -- -D warnings + + lint-tetanes-core: + name: Lint TetaNES Core (${{ matrix.os }}) + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [macos-latest, ubuntu-latest, windows-latest] + toolchain: [nightly, stable, 1.78] + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - uses: dtolnay/rust-toolchain@master + with: + toolchain: ${{ matrix.toolchain }} + components: clippy + - uses: Swatinem/rust-cache@v2 + - run: | + cargo clippy -p tetanes-core --all-features --keep-going -- -D warnings + + test: + name: Test + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - uses: dtolnay/rust-toolchain@master + with: + toolchain: nightly + - uses: Swatinem/rust-cache@v2 - run: | - cargo build -p tetanes - cargo build -p tetanes --target wasm32-unknown-unknown - - run: cargo doc - - run: cargo test + sudo apt update + sudo apt install -y libudev-dev libasound2-dev + - run: | + cargo test --all-targets --all-features --no-fail-fast + + docs: + name: Docs + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: dtolnay/rust-toolchain@master + with: + toolchain: nightly + components: clippy + - uses: Swatinem/rust-cache@v2 + - env: + RUSTDOCFLAGS: -D warnings + run: cargo doc --no-deps --document-private-items --all-features --workspace --examples --keep-going diff --git a/.github/workflows/outdated.yml b/.github/workflows/outdated.yml index 455b15df..e710b1ae 100644 --- a/.github/workflows/outdated.yml +++ b/.github/workflows/outdated.yml @@ -1,22 +1,22 @@ --- -name: Outdated +name: Check Outdated # yamllint disable-line rule:truthy on: schedule: - # At 08:00 on day-of-month 2 and 16 - - cron: "0 8 2,16 * *" + # At 06:00 on day-of-month 2 and 16 + - cron: "0 6 2,16 * *" env: CARGO_TERM_COLOR: always jobs: outdated: - name: Outdated + name: Check Outdated runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 with: fetch-depth: 0 - uses: dtolnay/install@cargo-outdated - - run: cargo outdated --exit-code 1 + - run: cargo outdated -e --exit-code 1 diff --git a/.github/workflows/release-pr.yml b/.github/workflows/release-pr.yml new file mode 100644 index 00000000..5cb95d6a --- /dev/null +++ b/.github/workflows/release-pr.yml @@ -0,0 +1,28 @@ +--- +name: Release PR + +# yamllint disable-line rule:truthy +on: + push: + branches: [main] + +permissions: + pull-requests: write + contents: write + +jobs: + release-pr: + name: Release PR + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + token: ${{ secrets.RELEASE_PLZ_TOKEN }} + - uses: dtolnay/rust-toolchain@stable + - uses: Swatinem/rust-cache@v2 + - name: Run release + uses: MarcoIeni/release-plz-action@v0.5 + env: + GITHUB_TOKEN: ${{ secrets.RELEASE_PLZ_TOKEN }} + CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }} diff --git a/.github/workflows/security.yml b/.github/workflows/security.yml new file mode 100644 index 00000000..8867562c --- /dev/null +++ b/.github/workflows/security.yml @@ -0,0 +1,21 @@ +--- +name: Security Audit + +on: + schedule: + # At 06:00 once a week on Sunday + - cron: "0 6 * * 0" + push: + paths: + - "**/Cargo.toml" + - "**/Cargo.lock" + pull_request: + +jobs: + audit: + name: Security Audit + runs-on: ubuntu-22.04 + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - uses: EmbarkStudios/cargo-deny-action@v1 diff --git a/.github/workflows/triage.yml b/.github/workflows/triage.yml index 59b5392d..2ecf1536 100644 --- a/.github/workflows/triage.yml +++ b/.github/workflows/triage.yml @@ -1,5 +1,5 @@ --- -name: Triage +name: Triage Issues # yamllint disable-line rule:truthy on: @@ -8,6 +8,7 @@ on: jobs: triage: + name: Triage Issue runs-on: ubuntu-latest steps: - name: add needs-triage label diff --git a/.gitignore b/.gitignore index 307b9a8d..2af51332 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,3 @@ -/roms tmp/ test_results* target/ diff --git a/Cargo.toml b/Cargo.toml index 9787be90..c669d79d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -16,6 +16,19 @@ documentation = "https://docs.rs/tetanes" repository = "https://github.com/lukexor/tetanes.git" homepage = "https://lukeworks.tech/tetanes" +[workspace.lints.clippy] +all = { level = "warn", priority = -1 } +missing_const_for_fn = "warn" +print_literal = "warn" + +[workspace.lints.rust] +future_incompatible = "warn" +nonstandard_style = "warn" +rust_2018_compatibility = "warn" +rust_2018_idioms = "warn" +rust_2021_compatibility = "warn" +unused = "warn" + [workspace.dependencies] anyhow = "1.0" bincode = "1.3" @@ -51,22 +64,38 @@ opt-level = 1 codegen-units = 1 lto = true # See: https://smallcultfollowing.com/babysteps/blog/2024/05/02/unwind-considered-harmful/ -panic = 'abort' +panic = "abort" [profile.flamegraph] inherits = "release" debug = true [profile.dist] -strip = true inherits = "release" +strip = true -[profile.dev.package.serde] -inherits = "release" -[profile.dev.package.bincode] -inherits = "release" -[profile.dev.build-override] -inherits = "release" +[profile.dev.package."*"] +opt-level = 3 + +[workspace.metadata.cross.target.x86_64-unknown-linux-gnu] +pre-build = [ + "dpkg --add-architecture $CROSS_DEB_ARCH", + """apt update && apt install -y \ + libudev-dev:$CROSS_DEB_ARCH \ + libssl-dev:$CROSS_DEB_ARCH \ + libasound2-dev:$CROSS_DEB_ARCH + """, +] + +[workspace.metadata.cross.target.aarch64-unknown-linux-gnu] +pre-build = [ + "dpkg --add-architecture $CROSS_DEB_ARCH", + """apt update && apt install -y \ + libudev-dev:$CROSS_DEB_ARCH \ + libssl-dev:$CROSS_DEB_ARCH \ + libasound2-dev:$CROSS_DEB_ARCH + """, +] [workspace.metadata.wix] upgrade-guid = "DB76CEB0-15B8-4727-9C3E-55819AB5E7B9" diff --git a/Cross.toml b/Cross.toml new file mode 100644 index 00000000..e69de29b diff --git a/Makefile.toml b/Makefile.toml index 94deaa0c..3dc4df71 100644 --- a/Makefile.toml +++ b/Makefile.toml @@ -32,6 +32,18 @@ category = "Build" command = "cargo" args = ["build", "-p", "tetanes", "--profile", "dist", "${@}"] +[tasks.build-artifacts] +description = "Build TetaNES Artifacts for a given target_arch" +category = "Build" +command = "cargo" +args = ["run", "--bin", "build_artifacts", "${@}"] + +[tasks.build-cross] +description = "Cross-Build TetaNES for a given target_arch" +category = "Build" +command = "cross" +args = ["build", "-p", "tetanes", "--profile", "dist", "${@}"] + [tasks.bench] description = "Benchmark TetaNES" category = "Development" @@ -62,19 +74,6 @@ category = "Development" command = "rustup" args = ["target", "add", "wasm32-unknown-unknown"] -[tasks.install-binstall] -description = "Install cargo binstall" -category = "Development" -command = "cargo" -args = ["install", "cargo-binstall"] - -[tasks.install-cargo-tools] -description = "Install cargo tools" -category = "Development" -command = "cargo" -args = ["binstall", "-y", "trunk", "cargo-deb"] -dependencies = ["install-binstall"] - [tasks.create-dist] description = "Creates dist directory" category = "Development" @@ -88,18 +87,6 @@ command = "trunk" args = ["build", "--config", "tetanes/Cargo.toml", "--release", "--dist", "dist/web", "--public-url", "./"] dependencies = ["create-dist", "add-wasm-target"] -[tasks.build-all] -run_task = { name = ["build", "build-web"], parallel = true } - -[tasks.build-artifacts] -command = "cargo" -args = ["run", "--bin", "build_artifacts"] -dependencies = ["install-cargo-tools"] - -[tasks.update-homebrew] -command = "cargo" -args = ["run", "--bin", "update_homebrew_formula"] - [tasks.run-web] description = "Run TetaNES Web in release mode" category = "Development" diff --git a/README.md b/README.md index eb5a87e3..fa3d6c3d 100644 --- a/README.md +++ b/README.md @@ -153,6 +153,15 @@ This will install the latest released version of the `TetaNES` binary to your `cargo` bin directory located at either `$HOME/.cargo/bin/` on a Unix-like platform or `%USERPROFILE%\.cargo\bin` on Windows. +Alternatively, if you have [`cargo binstall`](https://crates.io/crates/cargo-binstall/) installed: + +```sh +cargo binstall tetanes +``` + +This will try to find the target binary for your platform from the latest +[Release][] or install from source, similar to above. + ### Usage ```text diff --git a/assets/linux/TetaNES.desktop b/assets/linux/tetanes.desktop similarity index 100% rename from assets/linux/TetaNES.desktop rename to assets/linux/tetanes.desktop diff --git a/deny.toml b/deny.toml new file mode 100644 index 00000000..9862b30f --- /dev/null +++ b/deny.toml @@ -0,0 +1,254 @@ +# This template contains all of the possible sections and their default values + +# Note that all fields that take a lint level have these possible values: +# * deny - An error will be produced and the check will fail +# * warn - A warning will be produced, but the check will not fail +# * allow - No warning or error will be produced, though in some cases a note +# will be + +# The values provided in this template are the default values that will be used +# when any section or field is not specified in your own configuration + +# Root options + +[graph] +# If 1 or more target triples (and optionally, target_features) are specified, +# only the specified targets will be checked when running `cargo deny check`. +# This means, if a particular package is only ever used as a target specific +# dependency, such as, for example, the `nix` crate only being used via the +# `target_family = "unix"` configuration, that only having windows targets in +# this list would mean the nix crate, as well as any of its exclusive +# dependencies not shared by any other crates, would be ignored, as the target +# list here is effectively saying which targets you are building for. +targets = [ + # The triple can be any string, but only the target triples built in to + # rustc (as of 1.40) can be checked against actual config expressions + #{ triple = "x86_64-unknown-linux-musl" }, + # You can also specify which target_features you promise are enabled for a + # particular target. target_features are currently not validated against + # the actual valid features supported by the target architecture. + #{ triple = "wasm32-unknown-unknown", features = ["atomics"] }, +] +# When creating the dependency graph used as the source of truth when checks are +# executed, this field can be used to prune crates from the graph, removing them +# from the view of cargo-deny. This is an extremely heavy hammer, as if a crate +# is pruned from the graph, all of its dependencies will also be pruned unless +# they are connected to another crate in the graph that hasn't been pruned, +# so it should be used with care. The identifiers are [Package ID Specifications] +# (https://doc.rust-lang.org/cargo/reference/pkgid-spec.html) +#exclude = [] +# If true, metadata will be collected with `--all-features`. Note that this can't +# be toggled off if true, if you want to conditionally enable `--all-features` it +# is recommended to pass `--all-features` on the cmd line instead +all-features = true +# If true, metadata will be collected with `--no-default-features`. The same +# caveat with `all-features` applies +no-default-features = false + +[output] +# If set, these feature will be enabled when collecting metadata. If `--features` +# is specified on the cmd line they will take precedence over this option. +#features = [] +# When outputting inclusion graphs in diagnostics that include features, this +# option can be used to specify the depth at which feature edges will be added. +# This option is included since the graphs can be quite large and the addition +# of features from the crate(s) to all of the graph roots can be far too verbose. +# This option can be overridden via `--feature-depth` on the cmd line +feature-depth = 1 + +# This section is considered when running `cargo deny check advisories` +# More documentation for the advisories section can be found here: +# https://embarkstudios.github.io/cargo-deny/checks/advisories/cfg.html +[advisories] +version = 2 +# The path where the advisory database is cloned/fetched into +db-path = "~/.cargo/advisory-db" +# The url(s) of the advisory databases to use +db-urls = ["https://github.com/rustsec/advisory-db"] +# The lint level for crates that have been yanked from their source registry +yanked = "warn" +# A list of advisory IDs to ignore. Note that ignored advisories will still +# output a note when they are encountered. +ignore = [ + #"RUSTSEC-0000-0000", +] +# Threshold for security vulnerabilities, any vulnerability with a CVSS score +# lower than the range specified will be ignored. Note that ignored advisories +# will still output a note when they are encountered. +# * None - CVSS Score 0.0 +# * Low - CVSS Score 0.1 - 3.9 +# * Medium - CVSS Score 4.0 - 6.9 +# * High - CVSS Score 7.0 - 8.9 +# * Critical - CVSS Score 9.0 - 10.0 +#severity-threshold = + +# If this is true, then cargo deny will use the git executable to fetch advisory database. +# If this is false, then it uses a built-in git library. +# Setting this to true can be helpful if you have special authentication requirements that cargo-deny does not support. +# See Git Authentication for more information about setting up git authentication. +#git-fetch-with-cli = true + +# This section is considered when running `cargo deny check licenses` +# More documentation for the licenses section can be found here: +# https://embarkstudios.github.io/cargo-deny/checks/licenses/cfg.html +[licenses] +version = 2 +# List of explicitly allowed licenses +# See https://spdx.org/licenses/ for list of possible licenses +# [possible values: any SPDX 3.11 short identifier (+ optional exception)]. +allow = [ + "Apache-2.0", # https://tldrlegal.com/license/apache-license-2.0-(apache-2.0) + "Apache-2.0 WITH LLVM-exception", # https://spdx.org/licenses/LLVM-exception.html + "BSD-2-Clause", # https://tldrlegal.com/license/bsd-2-clause-license-(freebsd) + "BSD-3-Clause", # https://tldrlegal.com/license/bsd-3-clause-license-(revised) + "BSL-1.0", # https://tldrlegal.com/license/boost-software-license-1.0-explained + "CC0-1.0", # https://creativecommons.org/publicdomain/zero/1.0/ + "ISC", # https://www.tldrlegal.com/license/isc-license + "LicenseRef-UFL-1.0", # no official SPDX, see https://github.com/emilk/egui/issues/2321 + "MIT", # https://tldrlegal.com/license/mit-license + "MPL-2.0", # https://www.mozilla.org/en-US/MPL/2.0/FAQ/ - see Q11. Used by webpki-roots on Linux. + "OFL-1.1", # https://spdx.org/licenses/OFL-1.1.html + "Unicode-DFS-2016", # https://spdx.org/licenses/Unicode-DFS-2016.html + "Zlib", # https://tldrlegal.com/license/zlib-libpng-license-(zlib) +] +# The confidence threshold for detecting a license from license text. +# The higher the value, the more closely the license text must be to the +# canonical license text of a valid SPDX license file. +# [possible values: any between 0.0 and 1.0]. +confidence-threshold = 0.8 +# Allow 1 or more licenses on a per-crate basis, so that particular licenses +# aren't accepted for every possible crate as with the normal allow list +exceptions = [ + # Each entry is the crate and version constraint, and its specific allow + # list + #{ allow = ["Zlib"], name = "adler32", version = "*" }, +] + +# Some crates don't have (easily) machine readable licensing information, +# adding a clarification entry for it allows you to manually specify the +# licensing information +#[[licenses.clarify]] +# The name of the crate the clarification applies to +#name = "ring" +# The optional version constraint for the crate +#version = "*" +# The SPDX expression for the license requirements of the crate +#expression = "MIT AND ISC AND OpenSSL" +# One or more files in the crate's source used as the "source of truth" for +# the license expression. If the contents match, the clarification will be used +# when running the license check, otherwise the clarification will be ignored +# and the crate will be checked normally, which may produce warnings or errors +# depending on the rest of your configuration +#license-files = [ +# Each entry is a crate relative path, and the (opaque) hash of its contents +#{ path = "LICENSE", hash = 0xbd0eed23 } +#] + +[licenses.private] +# If true, ignores workspace crates that aren't published, or are only +# published to private registries. +# To see how to mark a crate as unpublished (to the official registry), +# visit https://doc.rust-lang.org/cargo/reference/manifest.html#the-publish-field. +ignore = false +# One or more private registries that you might publish crates to, if a crate +# is only published to private registries, and ignore is true, the crate will +# not have its license(s) checked +registries = [ + #"https://sekretz.com/registry +] + +# This section is considered when running `cargo deny check bans`. +# More documentation about the 'bans' section can be found here: +# https://embarkstudios.github.io/cargo-deny/checks/bans/cfg.html +[bans] +# Lint level for when multiple versions of the same crate are detected +multiple-versions = "allow" +# Lint level for when a crate version requirement is `*` +wildcards = "allow" +# The graph highlighting used when creating dotgraphs for crates +# with multiple versions +# * lowest-version - The path to the lowest versioned duplicate is highlighted +# * simplest-path - The path to the version with the fewest edges is highlighted +# * all - Both lowest-version and simplest-path are used +highlight = "all" +# The default lint level for `default` features for crates that are members of +# the workspace that is being checked. This can be overridden by allowing/denying +# `default` on a crate-by-crate basis if desired. +workspace-default-features = "allow" +# The default lint level for `default` features for external crates that are not +# members of the workspace. This can be overridden by allowing/denying `default` +# on a crate-by-crate basis if desired. +external-default-features = "allow" +# List of crates that are allowed. Use with care! +allow = [ + #{ name = "ansi_term", version = "=0.11.0" }, +] +# List of crates to deny +deny = [ + # Each entry the name of a crate and a version range. If version is + # not specified, all versions will be matched. + #{ name = "ansi_term", version = "=0.11.0" }, + # + # Wrapper crates can optionally be specified to allow the crate when it + # is a direct dependency of the otherwise banned crate + #{ name = "ansi_term", version = "=0.11.0", wrappers = [] }, +] + +# List of features to allow/deny +# Each entry the name of a crate and a version range. If version is +# not specified, all versions will be matched. +#[[bans.features]] +#name = "reqwest" +# Features to not allow +#deny = ["json"] +# Features to allow +#allow = [ +# "rustls", +# "__rustls", +# "__tls", +# "hyper-rustls", +# "rustls", +# "rustls-pemfile", +# "rustls-tls-webpki-roots", +# "tokio-rustls", +# "webpki-roots", +#] +# If true, the allowed features must exactly match the enabled feature set. If +# this is set there is no point setting `deny` +#exact = true + +# Certain crates/versions that will be skipped when doing duplicate detection. +skip = [ + #{ name = "ansi_term", version = "=0.11.0" }, +] +# Similarly to `skip` allows you to skip certain crates during duplicate +# detection. Unlike skip, it also includes the entire tree of transitive +# dependencies starting at the specified crate, up to a certain depth, which is +# by default infinite. +skip-tree = [ + #{ name = "ansi_term", version = "=0.11.0", depth = 20 }, +] + +# This section is considered when running `cargo deny check sources`. +# More documentation about the 'sources' section can be found here: +# https://embarkstudios.github.io/cargo-deny/checks/sources/cfg.html +[sources] +# Lint level for what to happen when a crate from a crate registry that is not +# in the allow list is encountered +unknown-registry = "warn" +# Lint level for what to happen when a crate from a git repository that is not +# in the allow list is encountered +unknown-git = "warn" +# List of URLs for allowed crate registries. Defaults to the crates.io index +# if not specified. If it is specified but empty, no registries are allowed. +allow-registry = ["https://github.com/rust-lang/crates.io-index"] +# List of URLs for allowed Git repositories +allow-git = [] + +[sources.allow-org] +# 1 or more github.com organizations to allow git sources for +github = [] +# 1 or more gitlab.com organizations to allow git sources for +gitlab = [] +# 1 or more bitbucket.org organizations to allow git sources for +bitbucket = [] diff --git a/tetanes-core/Cargo.toml b/tetanes-core/Cargo.toml index 41589ba0..07736a07 100644 --- a/tetanes-core/Cargo.toml +++ b/tetanes-core/Cargo.toml @@ -16,18 +16,12 @@ keywords = ["nes", "emulator"] [lib] crate-type = ["cdylib", "rlib"] -[lints.clippy] -all = "warn" -missing_const_for_fn = "warn" -print_literal = "warn" +[[bench]] +name = "clock_frame" +harness = false -[lints.rust] -future_incompatible = "warn" -nonstandard_style = "warn" -rust_2018_compatibility = "warn" -rust_2018_idioms = "warn" -rust_2021_compatibility = "warn" -unused = "warn" +[lints] +workspace = true [features] default = ["cycle-accurate"] @@ -63,7 +57,3 @@ tracing-subscriber.workspace = true [target.'cfg(not(target_arch = "wasm32"))'.dev-dependencies] criterion = { version = "0.5", features = ["html_reports"] } - -[[bench]] -name = "clock_frame" -harness = false diff --git a/tetanes-core/src/lib.rs b/tetanes-core/src/lib.rs index 495dfbfa..d01d8581 100644 --- a/tetanes-core/src/lib.rs +++ b/tetanes-core/src/lib.rs @@ -1,7 +1,7 @@ #![doc = include_str!("../README.md")] #![doc( - html_favicon_url = "https://github.com/lukexor/tetanes/blob/main/assets/tetanes_icon.png?raw=true", - html_logo_url = "https://github.com/lukexor/tetanes/blob/main/assets/tetanes_icon.png?raw=true" + html_favicon_url = "https://github.com/lukexor/tetanes/blob/main/assets/linux/icon.png?raw=true", + html_logo_url = "https://github.com/lukexor/tetanes/blob/main/assets/linux/icon.png?raw=true" )] pub mod action; diff --git a/tetanes-core/src/sys/fs/wasm.rs b/tetanes-core/src/sys/fs/wasm.rs index fb665663..72908ffd 100644 --- a/tetanes-core/src/sys/fs/wasm.rs +++ b/tetanes-core/src/sys/fs/wasm.rs @@ -25,7 +25,7 @@ fn local_storage() -> Result { .local_storage() .map_err(|err| { tracing::error!("failed to get local storage: {err:?}"); - Error::custom(format!("failed to get storage")) + Error::custom("failed to get storage") })? .ok_or_else(|| Error::custom("no storage available")) } diff --git a/tetanes-utils/Cargo.toml b/tetanes-utils/Cargo.toml index 54b7f997..8b291130 100644 --- a/tetanes-utils/Cargo.toml +++ b/tetanes-utils/Cargo.toml @@ -24,5 +24,5 @@ bench = false [dependencies] anyhow.workspace = true clap.workspace = true -tetanes-core = { version = "0.10.0", path = "../tetanes-core" } -tetanes = { version = "0.10.0", path = "../tetanes" } +tetanes-core = { version = "0.10", path = "../tetanes-core" } +tetanes = { version = "0.10", path = "../tetanes" } diff --git a/tetanes/Cargo.toml b/tetanes/Cargo.toml index e72a96d4..e9f75fb0 100644 --- a/tetanes/Cargo.toml +++ b/tetanes/Cargo.toml @@ -20,22 +20,50 @@ name = "tetanes" test = false bench = false -[lints.clippy] -all = "warn" -missing_const_for_fn = "warn" -print_literal = "warn" +[[bin]] +name = "build_artifacts" +test = false +bench = false -[lints.rust] -future_incompatible = "warn" -nonstandard_style = "warn" -rust_2018_compatibility = "warn" -rust_2018_idioms = "warn" -rust_2021_compatibility = "warn" -unused = "warn" +[lints] +workspace = true [package.metadata.docs.rs] targets = ["wasm32-unknown-unknown"] +[package.metadata.deb] +extended-description = """ +`TetaNES` is a cross-platform emulator for the Nintendo Entertainment System +(NES) released in Japan in 1983 and North America in 1986, written in +Rust using wgpu. It runs on Linux, macOS, Windows, and in a web browser +with Web Assembly. + +It started as a personal curiosity that turned into a passion project. It is +still being actively developed with new features and improvements constantly +being added. It is a fairly accurate emulator that can play most NES titles. + +`TetaNES` is also meant to showcase using Rust's performance, memory safety, and +fearless concurrency features in a large project. Features used in this project +include complex enums, traits, generics, matching, iterators, channels, and +threads. + +`TetaNES` also compiles for the web! Try it out in your browser +(http://lukeworks.tech/tetanes-web)! +""" +section = "game" +assets = [ + [ + 'target/dist/tetanes', + '/usr/bin/', + '755', + ], + [ + "README.md", + "usr/share/doc/tetanes/README", + "644", + ], +] + [features] default = ["tetanes-core/cycle-accurate"] profiling = ["tetanes-core/profiling", "dep:puffin", "dep:puffin_egui"] @@ -111,10 +139,12 @@ web-sys = { workspace = true, features = [ "File", "FileList", "FileReader", + "HtmlAnchorElement", "HtmlCanvasElement", "HtmlElement", "HtmlInputElement", "HtmlDivElement", + "Navigator", "Window", ] } wasm-bindgen = "0.2" diff --git a/tetanes/assets/main.css b/tetanes/assets/main.css new file mode 100644 index 00000000..a35143c4 --- /dev/null +++ b/tetanes/assets/main.css @@ -0,0 +1,162 @@ +@font-face { + font-family: "Pixeloid Sans"; + src: + local("Pixeloid Sans"), + url("./pixeloid-sans.ttf") format("truetype"); +} +@font-face { + font-family: "Pixeloid Sans Bold"; + src: + local("Pixeloid Sans Bold"), + url("./pixeloid-sans-bold.ttf") format("truetype"); +} + +body { + --color: #e6b673; + --heading: #a9491f; + --background: #0f1419; + background-color: var(--background); + max-width: 80%; + margin: auto; + margin-bottom: 100px; + color: var(--color); + font-family: "Pixeloid Sans", "Courier New", Courier, monospace; + display: flex; + flex-direction: column; + align-items: center; +} + +h1 { + color: var(--heading); + font-family: "Pixeloid Sans Bold", "Courier New", Courier, monospace; + text-align: center; + margin-top: 80px; + margin-bottom: 0px; +} + +h2 { + font-size: 0.8rem; + margin-bottom: 40px; +} + +h3 { + color: var(--heading); + font-family: "Pixeloid Sans Bold", "Courier New", Courier, monospace; + text-align: center; + margin-top: 40px; +} + +p { + font-size: 0.9rem; + max-width: 70ch; + margin: 15px 0; +} + +table { + --color: #333; + border-collapse: separate; + border-color: var(--color); + border-spacing: 0; + border: 0.5px solid var(--color); + text-align: left; + width: 100%; +} + +th { + color: var(--heading); +} + +th, +td { + padding: 5px; + border: 0.5px solid var(--color); +} + +a { + color: #36a3d9; + text-decoration: none; +} + +a:hover { + text-decoration: underline; +} + +canvas { + margin: 0 40px; + width: fit-content; + height: fit-content; + outline: none; +} + +#wrapper { + display: flex; + justify-content: center; + flex-wrap: wrap; + margin-bottom: 20px; +} + +#content { + max-width: 70ch; + margin: auto; +} + +.hidden { + display: none !important; +} +.absolute { + position: absolute; +} + +#loading-status { + display: flex; + justify-content: center; + align-items: center; +} + +.loader { + border: 4px solid #e6b673; + border-top: 4px solid #a9491f; + border-radius: 50%; + width: 16px; + height: 16px; + margin: 8px; + animation: spin 2s linear infinite; +} + +#error { + color: #ff3333; + text-align: center; +} + +.version-download { + position: relative; + width: max-content; + margin: auto; +} + +.version-download div { + width: 100%; +} + +.version-download a { + display: block; + padding: 0.8rem; + background: #14191f; + color: #a9491f; + font-family: "Pixeloid Sans Bold", "Courier New", Courier, monospace; +} + +.version-download a:hover { + background: #212733; + text-decoration: none; + cursor: pointer; +} + +@keyframes spin { + 0% { + transform: rotate(0deg); + } + 100% { + transform: rotate(360deg); + } +} diff --git a/tetanes/index.html b/tetanes/index.html index db0f566e..adbe970b 100644 --- a/tetanes/index.html +++ b/tetanes/index.html @@ -18,137 +18,9 @@ as="font" type="font/ttf" /> - + @@ -168,6 +40,7 @@

TetaNES

+

@@ -176,7 +49,7 @@

TetaNES

-
+
Loading...

You can check out the code on - github. + github or download the + desktop version:

-

Controls

+ + +

Controls

diff --git a/tetanes/src/bin/build_artifacts.rs b/tetanes/src/bin/build_artifacts.rs index a6a2af08..628f1ed6 100644 --- a/tetanes/src/bin/build_artifacts.rs +++ b/tetanes/src/bin/build_artifacts.rs @@ -1,38 +1,74 @@ +#![allow(unused)] + +use anyhow::Context; use cfg_if::cfg_if; +use clap::Parser; use std::{ - env, fs, io, + env, + ffi::OsStr, + fs, path::{Path, PathBuf}, - process::Command, + process::{Command, ExitStatus, Output}, }; +/// CLI options +#[derive(Parser, Debug)] +#[must_use] +pub struct Args { + /// Target platform to build for. e.g. `x86_64-unknown-linux-gnu`. + #[clap(long)] + target: String, + /// Build for a target platform different from the host using + /// `cross`. e.g. `aarch64-unknown-linux-gnu`. + #[clap(long)] + cross: bool, + /// Clean `dist` directory before building. + #[clap(long)] + clean: bool, +} + +/// Build context with required variables and platform targets. #[derive(Debug)] #[must_use] struct Build { version: &'static str, bin_name: &'static str, + bin_path: PathBuf, app_name: &'static str, - target_arch: &'static str, + arch: &'static str, + target_arch: String, + #[cfg(target_os = "linux")] + cross: bool, cargo_target_dir: PathBuf, dist_dir: PathBuf, } -fn main() -> io::Result<()> { - let build = Build::new()?; +fn main() -> anyhow::Result<()> { + let args = Args::parse(); + let build = Build::new(args)?; - if env::args().nth(1).as_deref() == Some("web") { - build.make("build-web")?; - build.compress_web_artifacts()?; - } + println!("building artifacts: {build:?}..."); - cfg_if! { - if #[cfg(target_os = "linux")] { - build.make("build")?; - build.create_linux_artifacts()?; - } else if #[cfg(target_os = "macos")] { - build.make("build")?; - build.create_macos_app()?; - } else if #[cfg(target_os = "windows")] { - build.create_windows_installer()?; + if build.target_arch == "wasm32-unknown-unknown" { + build.make(["build-web"])?; + build.compress_web_artifacts()?; + } else { + let build_args = ["build", "--target", &build.target_arch]; + cfg_if! { + if #[cfg(target_os = "linux")] { + let build_args = if build.cross { + vec!["build-cross"] + } else { + build_args.to_vec() + }; + build.make(build_args)?; + build.create_linux_artifacts()?; + } else if #[cfg(target_os = "macos")] { + build.make(build_args)?; + build.create_macos_app()?; + } else if #[cfg(target_os = "windows")] { + build.create_windows_installer()?; + } } } @@ -40,248 +76,264 @@ fn main() -> io::Result<()> { } impl Build { - fn new() -> io::Result { + /// Create a new build context by cleaning up any previous artifacts and ensuring the + /// dist directory is created. + fn new(args: Args) -> anyhow::Result { let dist_dir = PathBuf::from("dist"); - let _ = fs::remove_dir_all(&dist_dir); // ignore if not found - fs::create_dir_all(&dist_dir)?; + if args.clean { + let _ = remove_dir_all(&dist_dir); // ignore if not found + } + create_dir_all(&dist_dir)?; + + let bin_name = env!("CARGO_PKG_NAME"); + let cargo_target_dir = + PathBuf::from(env::var("CARGO_TARGET_DIR").unwrap_or_else(|_| "target".to_string())); + let target_arch = args.target; Ok(Build { version: env!("CARGO_PKG_VERSION"), - bin_name: env!("CARGO_PKG_NAME"), + bin_name, + bin_path: cargo_target_dir + .join(&target_arch) + .join("dist") + .join(bin_name), app_name: "TetaNES", - target_arch: if cfg!(target_arch = "x86_64") { + arch: if target_arch.starts_with("x86_64") { "x86_64" - } else if cfg!(target_arch = "aarch64") { + } else if target_arch.starts_with("aarch64") { "aarch64" - } else if cfg!(target_arch = "wasm32") { + } else if target_arch.starts_with("wasm32") { "wasm32" } else { - panic!("unsupported target arch"); + panic!("unsupported target_arch: {target_arch}") }, - cargo_target_dir: PathBuf::from( - env::var("CARGO_TARGET_DIR").unwrap_or_else(|_| "target".to_string()), - ), + target_arch, + #[cfg(target_os = "linux")] + cross: args.cross, + cargo_target_dir, dist_dir: PathBuf::from("dist"), }) } - fn bin_path(&self) -> PathBuf { - self.cargo_target_dir.join("dist").join(self.bin_name) - } - /// Run `cargo make` to build binary. /// /// Note: Wix on Windows bakes in the build step - fn make(&self, cmd: &'static str) -> io::Result<()> { + fn make( + &self, + args: impl IntoIterator>, + ) -> anyhow::Result { + let mut cmd = Command::new("cargo"); + cmd.arg("make"); + for arg in args { + cmd.arg(arg); + } // TODO: disable lto and make pgo build - Command::new("cargo").args(["make", cmd]).spawn()?.wait()?; - - Ok(()) + cmd_spawn_wait(&mut cmd) } /// Create a dist directory for artifacts. - fn create_build_dir(&self, dir: impl AsRef) -> io::Result { + fn create_build_dir(&self, dir: impl AsRef) -> anyhow::Result { let build_dir = self.cargo_target_dir.join(dir); println!("creating build directory: {build_dir:?}"); - let _ = fs::remove_dir_all(&build_dir); // ignore if not found - fs::create_dir_all(&build_dir)?; + let _ = remove_dir_all(&build_dir); // ignore if not found + create_dir_all(&build_dir)?; Ok(build_dir) } /// Write out a SHA256 checksum for a file. - fn write_sha256(&self, file: impl AsRef, output: impl AsRef) -> io::Result<()> { + fn write_sha256(&self, file: impl AsRef, output: impl AsRef) -> anyhow::Result<()> { let file = file.as_ref(); let output = output.as_ref(); - println!("writing sha256 for {file:?}"); - let shasum = { cfg_if! { if #[cfg(target_os = "windows")] { - Command::new("powershell") + cmd_output(Command::new("powershell") .arg("-Command") - .arg(format!("Get-FileHash -Algorithm SHA256 {} | select-object -ExpandProperty Hash", file.display())) - .output()? + .arg(format!("Get-FileHash -Algorithm SHA256 {} | select-object -ExpandProperty Hash", file.display())))? } else { - Command::new("shasum") + cmd_output(Command::new("shasum") .current_dir(file.parent().expect("parent directory")) .args(["-a", "256"]) - .arg(file.file_name().expect("filename")) - .output()? + .arg(file.file_name().expect("filename")))? } } }; let sha256 = std::str::from_utf8(&shasum.stdout) - .expect("valid stdout") + .with_context(|| format!("invalid sha output for {file:?}"))? .trim() .to_owned(); println!("sha256: {sha256}"); - fs::write(output, shasum.stdout)?; - - Ok(()) + write(output, shasum.stdout) } + /// Create a Gzipped tarball. fn tar_gz( &self, tgz_name: impl AsRef, directory: impl AsRef, files: impl IntoIterator>, - ) -> io::Result<()> { - println!("creating tarball..."); - + ) -> anyhow::Result<()> { + let directory = directory.as_ref(); let tgz_name = tgz_name.as_ref(); + let tgz_path = self.dist_dir.join(tgz_name); + let mut cmd = Command::new("tar"); cmd.arg("-czvf") - .arg(self.dist_dir.join(tgz_name)) - .arg(format!("--directory={}", directory.as_ref().display())); + .arg(&tgz_path) + .arg(format!("--directory={}", directory.display())); for file in files { cmd.arg(file.as_ref()); } - cmd.spawn()?.wait()?; - let tgz_sha_name = format!("{tgz_name}-sha256.txt"); - self.write_sha256( - self.dist_dir.join(tgz_name), - self.dist_dir.join(tgz_sha_name), - )?; - Ok(()) + cmd_spawn_wait(&mut cmd)?; + self.write_sha256( + tgz_path, + self.dist_dir.join(format!("{tgz_name}-sha256.txt")), + ) } - /// Create linux artifacts. + /// Create linux artifacts (.tar.gz, .deb and .AppImage). #[cfg(target_os = "linux")] - fn create_linux_artifacts(&self) -> io::Result<()> { + fn create_linux_artifacts(&self) -> anyhow::Result<()> { println!("creating linux artifacts..."); let build_dir = self.create_build_dir("linux")?; - println!("creating tarball..."); + // Binary .tar.gz + copy("README.md", build_dir.join("README.md"))?; + copy("LICENSE-MIT", build_dir.join("LICENSE-MIT"))?; + copy("LICENSE-APACHE", build_dir.join("LICENSE-APACHE"))?; - fs::copy("README.md", build_dir.join("README.md"))?; - fs::copy("LICENSE-MIT", build_dir.join("LICENSE-MIT"))?; - fs::copy("LICENSE-APACHE", build_dir.join("LICENSE-APACHE"))?; - fs::copy(self.bin_path(), build_dir.join(self.bin_name))?; + let build_bin_path = build_dir.join(self.bin_name); + copy(&self.bin_path, &build_bin_path)?; self.tar_gz( - format!( - "{}-{}-{}-linux.tar.gz", - self.bin_name, self.version, self.target_arch - ), + format!("{}-{}-unknown-linux-gnu.tar.gz", self.bin_name, self.arch), &build_dir, ["."], )?; - println!("creating deb..."); - - // NOTE: -1 is the deb revision number - let deb_name = format!("{}-{}-1-amd64.deb", self.bin_name, self.version); - Command::new("cargo") - .args(["deb", "-p", "tetanes", "-o"]) - .arg(self.dist_dir.join(&deb_name)) - .spawn()? - .wait()?; - let deb_sha_name = format!("{deb_name}-sha256.txt"); - self.write_sha256( - self.dist_dir.join(&deb_name), - self.dist_dir.join(deb_sha_name), - )?; - - println!("creating AppImage..."); - - let app_dir = build_dir.join("AppDir"); - - Command::new(format!( - "vendored/linuxdeploy-{}.AppImage", - self.target_arch - )) - .arg("-e") - .arg(self.bin_path()) - .arg("-i") - .arg("assets/linux/icon.png") - .arg("-d") - .arg("assets/linux/TetaNES.desktop") - .arg("--appdir") - .arg(&app_dir) - .arg("--output") - .arg("appimage") - .spawn()? - .wait()?; - - let app_image_name = format!("{}-{}.AppImage", self.app_name, self.target_arch); - fs::rename(&app_image_name, self.dist_dir.join(&app_image_name))?; - let app_image_sha_name = format!("{app_image_name}-sha256.txt"); - self.write_sha256( - self.dist_dir.join(&app_image_name), - self.dist_dir.join(app_image_sha_name), - )?; + // TODO: Fix deb/AppImage for cross builds + if !self.cross { + // Debian .deb + // NOTE: 1- is the deb revision number + let deb_name = format!("{}-1-amd64.deb", self.bin_name); + let deb_path_dist = self.dist_dir.join(&deb_name); + cmd_spawn_wait( + Command::new("cargo") + .args([ + "deb", + "-v", + "-p", + "tetanes", + "--profile", + "dist", + "--target", + &self.target_arch, + "--no-build", // already built + "--no-strip", // already stripped + "-o", + ]) + .arg(&deb_path_dist), + )?; + + self.write_sha256( + &deb_path_dist, + self.dist_dir.join(format!("{deb_name}-sha256.txt")), + )?; + + // AppImage + let linuxdeploy_cmd = format!("vendored/linuxdeploy-{}.AppImage", self.arch); + let app_dir = build_dir.join("AppDir"); + let desktop_name = format!("assets/linux/{}.desktop", self.bin_name); + cmd_spawn_wait( + Command::new(&linuxdeploy_cmd) + .arg("-e") + .arg(&self.bin_path) + .args([ + "-i", + "assets/linux/icon.png", + "-d", + &desktop_name, + "--appdir", + ]) + .arg(&app_dir) + .args(["--output", "appimage"]), + )?; + + // NOTE: AppImage name is derived from tetanes.desktop + // Rename to lowercase + let app_image_name = format!("{}-{}.AppImage", self.bin_name, self.arch); + let app_image_path = PathBuf::from(format!("{}-{}.AppImage", self.app_name, self.arch)); + let app_image_path_dist = self.dist_dir.join(&app_image_name); + rename(&app_image_path, &app_image_path_dist)?; + self.write_sha256( + &app_image_path_dist, + self.dist_dir.join(format!("{app_image_name}-sha256.txt")), + )?; + } Ok(()) } - /// Create macOS app. + /// Create macOS artifacts (.app in a .tar.gz and separate .dmg). #[cfg(target_os = "macos")] - fn create_macos_app(&self) -> io::Result<()> { - use std::os::unix::fs::symlink; - + fn create_macos_app(&self) -> anyhow::Result<()> { println!("creating macos app..."); - let artifact_name = format!("{}-{}-{}", self.app_name, self.version, self.target_arch); + let build_dir = self.create_build_dir("macos")?; + + let artifact_name = format!("{}-{}", self.bin_name, self.arch); let volume = PathBuf::from("/Volumes").join(&artifact_name); - let dmg_name = format!("{artifact_name}-Uncompressed.dmg"); + let app_name = format!("{}.app", self.app_name); + let dmg_name = format!("{artifact_name}-uncompressed.dmg"); + let dmg_path = build_dir.join(&dmg_name); let dmg_name_compressed = format!("{artifact_name}.dmg"); + let dmg_path_compressed = build_dir.join(&dmg_name_compressed); + let dmg_path_dist = self.dist_dir.join(&dmg_name_compressed); - println!("creating dmg volume: {dmg_name_compressed}"); - - let build_dir = self.create_build_dir("macos")?; + if let Err(err) = cmd_status(Command::new("hdiutil").arg("detach").arg(&volume)) { + eprintln!("failed to detach volume: {err:?}"); + } + cmd_spawn_wait( + Command::new("hdiutil") + .args(["create", "-size", "50m", "-volname", &artifact_name]) + .arg(&dmg_path), + )?; + cmd_spawn_wait(Command::new("hdiutil").arg("attach").arg(&dmg_path))?; - let _ = Command::new("hdiutil").arg("detach").arg(&volume).status(); - Command::new("hdiutil") - .args(["create", "-size", "50m", "-volname"]) - .arg(&artifact_name) - .arg(build_dir.join(&dmg_name)) - .spawn()? - .wait()?; - Command::new("hdiutil") - .arg("attach") - .arg(build_dir.join(&dmg_name)) - .spawn()? - .wait()?; - - println!("creating directories: {volume:?}"); - - let app_dir = volume.join(format!("{}.app", self.app_name)); - fs::create_dir_all(app_dir.join("Contents/MacOS"))?; - fs::create_dir_all(app_dir.join("Contents/Resources"))?; - fs::create_dir_all(volume.join(".Picture"))?; + let app_dir = volume.join(&app_name); + create_dir_all(app_dir.join("Contents/MacOS"))?; + create_dir_all(app_dir.join("Contents/Resources"))?; + create_dir_all(volume.join(".Picture"))?; println!("updating Info.plist version: {}", self.version); - let mut info_plist = fs::read_to_string("assets/macos/Info.plist")?; + let mut info_plist = read_to_string("assets/macos/Info.plist")?; info_plist = info_plist.replace("%VERSION%", self.version); - fs::write(app_dir.join("Contents/Info.plist"), info_plist)?; - - println!("copying assets..."); + write(app_dir.join("Contents/Info.plist"), info_plist)?; // TODO: maybe include readme/license? - fs::copy( + copy( "assets/macos/Icon.icns", app_dir.join("Contents/Resources/Icon.icns"), )?; - fs::copy( + copy( "assets/macos/background.png", volume.join(".Picture/background.png"), )?; - fs::copy( - self.bin_path(), + copy( + &self.bin_path, app_dir.join("Contents/MacOS").join(self.bin_name), )?; - println!("creating /Applications symlink..."); symlink("/Applications", volume.join("Applications"))?; println!("configuring app bundle window..."); @@ -304,7 +356,7 @@ impl Build { set arrangement of the icon view options of container window to not arranged set position of item ".Picture" to {{800, 320}} set position of item ".fseventsd" to {{800, 320}} - set position of item "{app_name}.app" to {{150, 300}} + set position of item "{app_name}" to {{150, 300}} close set position of item "Applications" to {{425, 300}} open @@ -317,117 +369,190 @@ impl Build { delay 1 -- sync end tell "#, - app_name = self.app_name, volume = volume.display() ); - Command::new("osascript") - .arg("-e") - .arg(&configure_bundle_script) - .spawn()? - .wait()?; - - println!("signing code..."); - Command::new("codesign") - .args(["--force", "--sign", "-"]) - .arg(app_dir.join("Contents/MacOS").join(self.bin_name)) - .spawn()? - .wait()?; + cmd_spawn_wait( + Command::new("osascript") + .arg("-e") + .arg(&configure_bundle_script), + )?; + + let app_bin_path = app_dir.join("Contents/MacOS").join(self.bin_name); + cmd_spawn_wait( + Command::new("codesign") + .args(["--force", "--sign", "-"]) + .arg(&app_bin_path), + )?; // TODO: fix // ensure spctl --assess --type execute "${VOLUME}/${APP_NAME}.app" - Command::new("codesign") - .args(["--verify", "--strict", "--verbose=2"]) - .arg(app_dir.join("Contents/MacOS").join(self.bin_name)) - .spawn()? - .wait()?; + cmd_spawn_wait( + Command::new("codesign") + .args(["--verify", "--strict", "--verbose=2"]) + .arg(&app_bin_path), + )?; self.tar_gz( - format!( - "{}-{}-{}-apple.tar.gz", - self.bin_name, self.version, self.target_arch - ), + format!("{}-{}-apple-darwin.tar.gz", self.bin_name, self.arch), &volume, - [&format!("{}.app", self.app_name)], + [&app_name], )?; - println!("compressing dmg..."); - - Command::new("hdiutil") - .arg("detach") - .arg(&volume) - .spawn()? - .wait()?; - Command::new("hdiutil") - .args(["convert", "-format", "UDBZ", "-o"]) - .arg(build_dir.join(&dmg_name_compressed)) - .arg(build_dir.join(&dmg_name)) - .spawn()? - .wait()?; - - println!("writing artifacts..."); - - fs::copy( - build_dir.join(&dmg_name_compressed), - self.dist_dir.join(&dmg_name_compressed), + cmd_spawn_wait(Command::new("hdiutil").arg("detach").arg(&volume))?; + cmd_spawn_wait( + Command::new("hdiutil") + .args(["convert", "-format", "UDBZ", "-o"]) + .arg(&dmg_path_compressed) + .arg(&dmg_path), )?; - let dmg_sha_name = format!("{artifact_name}-sha256.txt"); + + rename(&dmg_path_compressed, &dmg_path_dist)?; self.write_sha256( - self.dist_dir.join(&dmg_name_compressed), - self.dist_dir.join(dmg_sha_name), + &dmg_path_dist, + self.dist_dir + .join(format!("{dmg_name_compressed}-sha256.txt")), + ) + } + + /// Create Windows artifacts (.msi). + #[cfg(target_os = "windows")] + fn create_windows_installer(&self) -> anyhow::Result<()> { + println!("creating windows installer..."); + + let installer_name = format!("{}-{}.msi", self.bin_name, self.arch); + let installer_path_dist = self.dist_dir.join(&installer_name); + + cmd_spawn_wait( + Command::new("cargo") + .args([ + "wix", + "-v", + "-p", + "tetanes", + "--profile", + "dist", + "--target", + &self.target_arch, + "--nocapture", + "-o", + ]) + .arg(&installer_path_dist), )?; - println!("cleaning up..."); + // TODO: maybe zip installer? + self.write_sha256( + &installer_path_dist, + self.dist_dir.join(format!("{installer_name}-sha256.txt")), + ) + } - fs::remove_file(build_dir.join(&dmg_name))?; + /// Compress web artifacts (.tar.gz). + fn compress_web_artifacts(&self) -> anyhow::Result<()> { + println!("compressing web artifacts..."); - Ok(()) + let build_dir = self.dist_dir.join("web"); + self.tar_gz( + format!("{}-{}.tar.gz", self.bin_name, self.target_arch), + &build_dir, + ["."], + )?; + + remove_dir_all(&build_dir) } +} - /// Create Windows installer. - #[cfg(target_os = "windows")] - fn create_windows_installer(&self) -> io::Result<()> { - println!("creating windows installer..."); +/// Helper function to `copy` a file and report contextual errors. +fn copy(src: impl AsRef, dst: impl AsRef) -> anyhow::Result { + let src = src.as_ref(); + let dst = dst.as_ref(); - let build_dir = self.create_build_dir("wix")?; + println!("copying: {src:?} to {dst:?}"); - let artifact_name = format!("{}-{}-{}", self.app_name, self.version, self.target_arch); - let installer_name = format!("{artifact_name}.msi"); + fs::copy(src, dst).with_context(|| format!("failed to copy {src:?} to {dst:?}")) +} - println!("building installer..."); +/// Helper function to `rename` a file and report contextual errors. +fn rename(src: impl AsRef, dst: impl AsRef) -> anyhow::Result<()> { + let src = src.as_ref(); + let dst = dst.as_ref(); - Command::new("cargo") - .args(["wix", "-p", "tetanes", "--nocapture"]) - .spawn()? - .wait()?; + println!("renaming: {src:?} to {dst:?}"); - println!("writing artifacts..."); + fs::rename(src, dst).with_context(|| format!("failed to rename {src:?} to {dst:?}")) +} - fs::copy( - build_dir.join(&installer_name), - self.dist_dir.join(&installer_name), - )?; - let sha_name = format!("{installer_name}-sha256.txt"); - self.write_sha256( - self.dist_dir.join(&installer_name), - self.dist_dir.join(sha_name), - )?; +/// Helper function to `create_dir_all` a directory and report contextual errors. +fn create_dir_all(dir: impl AsRef) -> anyhow::Result<()> { + let dir = dir.as_ref(); - Ok(()) - } + println!("creating dir: {dir:?}"); - /// Compress web artifacts. - fn compress_web_artifacts(&self) -> io::Result<()> { - println!("compressing web artifacts..."); + fs::create_dir_all(dir).with_context(|| format!("failed to create {dir:?}")) +} - self.tar_gz( - format!("{}-{}-web.tar.gz", self.bin_name, self.version), - self.dist_dir.join("web"), - ["."], - )?; +/// Helper function to `remove_dir_all` a directory and report contextual errors. +fn remove_dir_all(dir: impl AsRef) -> anyhow::Result<()> { + let dir = dir.as_ref(); - println!("cleaning up..."); + println!("removing dir: {dir:?}"); - fs::remove_dir_all(self.dist_dir.join("web"))?; + fs::remove_dir_all(dir).with_context(|| format!("failed to remove {dir:?}")) +} - Ok(()) - } +/// Helper function to `write` to a file and report contextual errors. +fn write(path: impl AsRef, contents: impl AsRef<[u8]>) -> anyhow::Result<()> { + let path = path.as_ref(); + + println!("writing to path: {path:?}"); + + let contents = contents.as_ref(); + fs::write(path, contents).with_context(|| format!("failed to write to {path:?}")) +} + +/// Helper function to `read_to_string` and report contextual errors. +#[cfg(target_os = "macos")] +fn read_to_string(path: impl AsRef) -> anyhow::Result { + let path = path.as_ref(); + + println!("reading to string: {path:?}"); + + fs::read_to_string(path).with_context(|| format!("failed to read {path:?}")) +} + +/// Helper function to `symlink` and report contextual errors. +#[cfg(unix)] +fn symlink(src: impl AsRef, dst: impl AsRef) -> anyhow::Result<()> { + use std::os::unix::fs::symlink; + + let src = src.as_ref(); + let dst = dst.as_ref(); + + println!("symlinking: {src:?} to {dst:?}"); + + symlink(src, dst).with_context(|| format!("failed to symlink {src:?} to {dst:?}")) +} + +/// Helper function to `spawn` [`Command`] and `wait` while reporting contextual errors. +fn cmd_spawn_wait(cmd: &mut Command) -> anyhow::Result { + println!("running: {cmd:?}"); + + cmd.spawn() + .with_context(|| format!("failed to spawn {cmd:?}"))? + .wait() + .with_context(|| format!("failed to run {cmd:?}")) +} + +/// Helper function to run [`Command`] with `output` while reporting contextual errors. +fn cmd_output(cmd: &mut Command) -> anyhow::Result { + println!("running: {cmd:?}"); + + cmd.output() + .with_context(|| format!("failed to run {cmd:?}")) +} + +/// Helper function to run [`Command`] with `status` while reporting contextual errors. +fn cmd_status(cmd: &mut Command) -> anyhow::Result { + println!("running: {cmd:?}"); + + cmd.status() + .with_context(|| format!("failed to run {cmd:?}")) } diff --git a/tetanes/src/bin/update_homebrew_formula.rs b/tetanes/src/bin/update_homebrew_formula.rs deleted file mode 100644 index 2f9dd77b..00000000 --- a/tetanes/src/bin/update_homebrew_formula.rs +++ /dev/null @@ -1,55 +0,0 @@ -use std::{env, fs, io, path::PathBuf, process::Command}; - -/// Update the homebrew formula. -fn main() -> io::Result<()> { - println!("updating homebrew formula..."); - - let cargo_target_dir = - PathBuf::from(env::var("CARGO_TARGET_DIR").unwrap_or_else(|_| "target".to_string())); - let build_dir = cargo_target_dir.join("homebrew"); - let version = env::args() - .next() - .expect("must provide a version, e.g. 0.10.0"); - - Command::new("gh") - .args(["release", "download"]) - .arg(format!("v{version}")) - .args(["--pattern", "*-apple.tar.gz", "--dir"]) - .arg(&build_dir) - .spawn()? - .wait()?; - - let x86_64_sha = - fs::read_to_string(build_dir.join("tetanes-0.10.0-x86_64-apple.tar.gz-sha256.txt"))?; - let x86_64_sha = x86_64_sha - .split_whitespace() - .next() - .expect("missing sha256"); - let aarch64_sha = - fs::read_to_string(build_dir.join("tetanes-0.10.0-aarch64-apple.tar.gz-sha256.txt"))?; - let aarch64_sha = aarch64_sha - .split_whitespace() - .next() - .expect("missing sha256"); - - Command::new("git") - .current_dir("homebrew-formulae") - .arg("pull") - .spawn()? - .wait()?; - - let mut formula = fs::read_to_string("homebrew-formulae/tetanes.rb.tmpl")?; - formula = formula.replace("%VERSION%", &version); - formula = formula.replace("%x86_64_SHA%", x86_64_sha); - formula = formula.replace("%aarch64_SHA%", aarch64_sha); - fs::write("homebrew-formulae/Casks/tetanes.rb", formula)?; - - Command::new("git") - .args(["commit", "-m"]) - .arg(format!("Version bump: {version}")) - .spawn()? - .wait()?; - Command::new("git").arg("push").spawn()?.wait()?; - - Ok(()) -} diff --git a/tetanes/src/lib.rs b/tetanes/src/lib.rs index 0d0b2e00..84ead67e 100644 --- a/tetanes/src/lib.rs +++ b/tetanes/src/lib.rs @@ -1,7 +1,7 @@ #![doc = include_str!("../README.md")] #![doc( - html_favicon_url = "https://github.com/lukexor/tetanes/blob/main/assets/tetanes_icon.png?raw=true", - html_logo_url = "https://github.com/lukexor/tetanes/blob/main/assets/tetanes_icon.png?raw=true" + html_favicon_url = "https://github.com/lukexor/tetanes/blob/main/assets/linux/icon.png?raw=true", + html_logo_url = "https://github.com/lukexor/tetanes/blob/main/assets/linux/icon.png?raw=true" )] pub mod error; diff --git a/tetanes/src/nes/renderer/gui.rs b/tetanes/src/nes/renderer/gui.rs index 6b5f4920..e88a60b2 100644 --- a/tetanes/src/nes/renderer/gui.rs +++ b/tetanes/src/nes/renderer/gui.rs @@ -44,7 +44,7 @@ use tetanes_core::{ time::{Duration, Instant}, video::VideoFilter, }; -use tracing::info; +use tracing::{error, info, warn}; use uuid::Uuid; use winit::{ event::{ElementState, MouseButton}, @@ -289,7 +289,11 @@ impl Gui { S: Into, { let text = text.into(); - info!("{text}"); + match ty { + MessageType::Info => info!("{text}"), + MessageType::Warn => warn!("{text}"), + MessageType::Error => error!("{text}"), + } self.messages .push((ty, text, Instant::now() + Self::MSG_TIMEOUT)); } @@ -374,6 +378,13 @@ impl Gui { .insert(0, MONO_FONT.0.to_string()); ctx.set_fonts(fonts); + // Check for update on start + if self.version.requires_updates() { + if let Ok(update_available) = self.version.update_available() { + self.update_window_open = update_available; + } + } + self.initialized = true; } @@ -766,6 +777,7 @@ impl Gui { fn show_update_window(&mut self, ctx: &Context) { let mut update_window_open = self.update_window_open; let mut close_window = false; + let enable_auto_update = false; egui::Window::new("Update Available") .open(&mut update_window_open) .resizable(false) @@ -780,31 +792,34 @@ impl Gui { ui.separator(); ui.add_space(15.0); - ui.label("Would you like to install it and restart?"); - ui.add_space(15.0); + // TODO: Add auto-update for each platform + if enable_auto_update { + ui.label("Would you like to install it and restart?"); + ui.add_space(15.0); - ui.horizontal(|ui| { - let res = ui.button("Continue").on_hover_text(format!( - "Install the latest version (v{}) restart TetaNES.", - self.version.current() - )); - if res.clicked() { - if let Err(err) = self.version.install_update_and_restart() { - self.add_message( - MessageType::Error, - format!("Failed to install update: {err}"), - ); + ui.horizontal(|ui| { + let res = ui.button("Continue").on_hover_text(format!( + "Install the latest version (v{}) restart TetaNES.", + self.version.current() + )); + if res.clicked() { + if let Err(err) = self.version.install_update_and_restart() { + self.add_message( + MessageType::Error, + format!("Failed to install update: {err}"), + ); + close_window = true; + } + } + let res = ui.button("Cancel").on_hover_text(format!( + "Keep the current version of TetaNES (v{}).", + self.version.current() + )); + if res.clicked() { close_window = true; } - } - let res = ui.button("Cancel").on_hover_text(format!( - "Keep the current version of TetaNES (v{}).", - self.version.current() - )); - if res.clicked() { - close_window = true; - } - }); + }); + } }); if close_window { update_window_open = false; @@ -1376,15 +1391,17 @@ impl Gui { if self.version.requires_updates() && ui.button("🌐 Check for Updates...").clicked() { match self.version.update_available() { - Ok(update_available) => self.update_window_open = update_available, + Ok(update_available) => { + self.update_window_open = update_available; + if !update_available { + self.add_message( + MessageType::Info, + format!("TetaNES v{} is up to date!", self.version.current()), + ); + } + } Err(err) => self.add_message(MessageType::Error, err.to_string()), } - if !self.update_window_open { - self.add_message( - MessageType::Info, - format!("TetaNES v{} is up to date!", self.version.current()), - ); - } ui.close_menu(); } ui.toggle_value(&mut self.about_open, "ℹ About"); diff --git a/tetanes/src/sys/platform/wasm.rs b/tetanes/src/sys/platform/wasm.rs index f59391e1..e8f636a0 100644 --- a/tetanes/src/sys/platform/wasm.rs +++ b/tetanes/src/sys/platform/wasm.rs @@ -5,11 +5,14 @@ use crate::{ Running, }, platform::{BuilderExt, EventLoopExt, Feature, Initialize}, + thread, }; use anyhow::{bail, Context}; use std::path::PathBuf; -use wasm_bindgen::{closure::Closure, JsCast, JsValue}; -use web_sys::{js_sys::Uint8Array, window, FileReader, HtmlCanvasElement, HtmlInputElement}; +use wasm_bindgen::prelude::*; +use web_sys::{ + js_sys::Uint8Array, FileReader, HtmlAnchorElement, HtmlCanvasElement, HtmlInputElement, +}; use winit::{ event::Event, event_loop::{EventLoop, EventLoopProxy, EventLoopWindowTarget}, @@ -17,13 +20,17 @@ use winit::{ window::WindowBuilder, }; -pub const fn supports_impl(feature: Feature) -> bool { - match feature { - Feature::Storage => true, - Feature::Filesystem | Feature::Viewports | Feature::Suspend => false, - } -} +const BIN_NAME: &str = env!("CARGO_PKG_NAME"); +const VERSION: &str = env!("CARGO_PKG_VERSION"); +const OS_OPTIONS: [(Os, Arch, &str); 5] = [ + (Os::Unknown, Arch::X86_64, html_ids::SELECTED_VERSION), + (Os::Windows, Arch::X86_64, html_ids::WINDOWS_X86_LINK), + (Os::MacOs, Arch::Aarch64, html_ids::MACOS_AARCH64_LINK), + (Os::MacOs, Arch::X86_64, html_ids::MACOS_X86_LINK), + (Os::Linux, Arch::X86_64, html_ids::LINXU_X86_LINK), +]; +/// Method for platforms supporting opening a file dialog. pub fn open_file_dialog_impl( _title: impl Into, _name: impl Into, @@ -54,127 +61,397 @@ pub fn open_file_dialog_impl( Ok(None) } -impl Initialize for Running { - fn initialize(&mut self) -> anyhow::Result<()> { - let window = web_sys::window().context("valid js window")?; - let document = window.document().context("valid html document")?; +/// Checks if the current platform supports a given feature. +pub const fn supports_impl(feature: Feature) -> bool { + match feature { + Feature::Storage => true, + Feature::Filesystem | Feature::Viewports | Feature::Suspend => false, + } +} - let on_error = |tx: &EventLoopProxy, err: JsValue| { - tx.nes_event(UiEvent::Error( - err.as_string() - .unwrap_or_else(|| "failed to load rom".to_string()), - )); - }; +/// Helper method to log and send errors to the UI thread from javascript. +fn on_error(tx: &EventLoopProxy, err: JsValue) { + tracing::error!("{err:?}"); + tx.nes_event(UiEvent::Error( + err.as_string() + .unwrap_or_else(|| "failed to load rom".to_string()), + )); +} - for input_id in [html_ids::ROM_INPUT, html_ids::REPLAY_INPUT] { - let on_change = Closure::::new({ - let tx = self.tx.clone(); - move |evt: web_sys::Event| { - match FileReader::new() { - Ok(reader) => { - let Some(file) = evt - .current_target() - .and_then(|target| target.dyn_into::().ok()) - .and_then(|input| input.files()) - .and_then(|files| files.item(0)) - else { - tx.nes_event(UiEvent::FileDialogCancelled); - return; - }; - match reader.read_as_array_buffer(&file) { - Ok(_) => { - let on_load = Closure::::new({ - let reader = reader.clone(); - let tx = tx.clone(); - move || match reader.result() { - Ok(result) => { - let data = Uint8Array::new(&result); - let event = match input_id { - html_ids::ROM_INPUT => EmulationEvent::LoadRom( - (file.name(), RomData(data.to_vec())), - ), - html_ids::REPLAY_INPUT => { - EmulationEvent::LoadReplay(( - file.name(), - ReplayData(data.to_vec()), - )) - } - _ => unreachable!("unsupported input id"), - }; - tx.nes_event(event); - focus_canvas(); - } - Err(err) => on_error(&tx, err), - } - }); - reader.set_onload(Some(on_load.as_ref().unchecked_ref())); - on_load.forget(); - } - Err(err) => on_error(&tx, err), - } - } - Err(err) => on_error(&tx, err), - }; - } - }); +/// Sets up the window resize handler for responding to changes in the viewport size. +fn set_resize_handler(window: &web_sys::Window, tx: &EventLoopProxy) { + let on_resize = Closure::::new({ + let tx = tx.clone(); + move |_: web_sys::Event| { + if let Some(window) = web_sys::window() { + let width = window + .inner_width() + .ok() + .and_then(|w| w.as_f64()) + .map_or(0.0, |w| w as f32); + let height = window + .inner_height() + .ok() + .and_then(|h| h.as_f64()) + .map_or(0.0, |h| h as f32); + tx.nes_event(RendererEvent::BrowserResized((width, height))); + } + } + }); - let on_cancel = Closure::::new({ - let tx = self.tx.clone(); - move |_: web_sys::Event| { - focus_canvas(); + let on_resize_cb = on_resize.as_ref().unchecked_ref(); + if let Err(err) = window.add_event_listener_with_callback("resize", on_resize_cb) { + on_error(tx, err); + } + + on_resize.forget(); +} + +/// Sets up the onload handler for reading loaded files. +fn set_file_onload_handler( + tx: EventLoopProxy, + input_id: &'static str, + reader: web_sys::FileReader, + file_name: String, +) -> anyhow::Result<()> { + let on_load = Closure::::new({ + let reader = reader.clone(); + move || match reader.result() { + Ok(result) => { + let data = Uint8Array::new(&result).to_vec(); + let event = match input_id { + html_ids::ROM_INPUT => { + EmulationEvent::LoadRom((file_name.clone(), RomData(data))) + } + html_ids::REPLAY_INPUT => { + EmulationEvent::LoadReplay((file_name.clone(), ReplayData(data))) + } + _ => unreachable!("unsupported input id"), + }; + tx.nes_event(event); + focus_canvas(); + } + Err(err) => on_error(&tx, err), + } + }); + + reader.set_onload(Some(on_load.as_ref().unchecked_ref())); + + on_load.forget(); + + Ok(()) +} + +/// Sets up the onchange and oncancel handlers for file input elements. +fn set_file_onchange_handlers( + document: &web_sys::Document, + tx: &EventLoopProxy, + input_id: &'static str, +) -> anyhow::Result<()> { + let on_change = Closure::::new({ + let tx = tx.clone(); + move |evt: web_sys::Event| match FileReader::new() { + Ok(reader) => { + let Some(file) = evt + .current_target() + .and_then(|target| target.dyn_into::().ok()) + .and_then(|input| input.files()) + .and_then(|files| files.item(0)) + else { tx.nes_event(UiEvent::FileDialogCancelled); + return; + }; + if let Err(err) = reader + .read_as_array_buffer(&file) + .map(|_| set_file_onload_handler(tx.clone(), input_id, reader, file.name())) + { + on_error(&tx, err); } - }); + } + Err(err) => on_error(&tx, err), + } + }); - let input = document - .get_element_by_id(input_id) - .with_context(|| format!("valid {input_id} button"))?; - if let Err(err) = - input.add_event_listener_with_callback("change", on_change.as_ref().unchecked_ref()) - { - on_error(&self.tx, err); + let on_cancel = Closure::::new({ + let tx = tx.clone(); + move |_: web_sys::Event| { + focus_canvas(); + tx.nes_event(UiEvent::FileDialogCancelled); + } + }); + + let input = document + .get_element_by_id(input_id) + .with_context(|| format!("valid {input_id} button"))?; + let on_change_cb = on_change.as_ref().unchecked_ref(); + let on_cancel_cb = on_cancel.as_ref().unchecked_ref(); + if let Err(err) = input + .add_event_listener_with_callback("change", on_change_cb) + .and_then(|_| input.add_event_listener_with_callback("cancel", on_cancel_cb)) + { + on_error(tx, err) + } + + on_change.forget(); + on_cancel.forget(); + + Ok(()) +} + +/// Enumeration of supported operating systems. +#[derive(Debug, Copy, Clone)] +#[must_use] +enum Os { + Unknown, + Windows, + #[allow(clippy::enum_variant_names)] + MacOs, + Linux, + Mobile, +} + +impl std::fmt::Display for Os { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let os = match self { + Os::Windows => "Windows", + Os::MacOs => "macOS", + Os::Linux => "Linux", + _ => "Desktop", + }; + write!(f, "{os}") + } +} + +/// Enumeration of supported CPU architectures. +#[derive(Debug, Copy, Clone)] +#[must_use] +enum Arch { + X86_64, + Aarch64, +} + +impl std::fmt::Display for Arch { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let arch = match self { + Arch::X86_64 => "x86_64", + Arch::Aarch64 => "aarch64", + }; + write!(f, "{arch}") + } +} + +/// Converts the operating system and architecture to a human-readable string. +const fn platform_to_string(os: Os, arch: Arch) -> &'static str { + match (os, arch) { + (Os::Windows, Arch::X86_64) => "Windows", + (Os::MacOs, Arch::X86_64) => "Mac - Intel Chip", + (Os::MacOs, Arch::Aarch64) => "Mac - Apple Chip", + (Os::Linux, Arch::X86_64) => "Linux", + (Os::Mobile, _) => "Mobile", + _ => "Desktop", + } +} + +#[wasm_bindgen] +extern "C" { + /// Extends the `Navigator` object to support the `userAgentData` method. + #[wasm_bindgen(extends = web_sys::Navigator)] + type NavigatorExt; + + /// The `NavigatorUAData` is what's returned from `navigator.userAgentData` on browsers that + /// support it. + type NavigatorUAData; + + /// The `HighEntropyValues` object is returned from `navigator.userAgentData.getHighEntropyValues`. + #[derive(Debug)] + #[wasm_bindgen(js_name = Object)] + type HighEntropyValues; + + /// `navigator.userAgentData` for browsers that support it. + #[wasm_bindgen(method, getter, js_name = userAgentData)] + fn user_agent_data(this: &NavigatorExt) -> Option; + + /// `navigator.userAgentData.getHighEntropyValues()` for browsers that support it. + #[wasm_bindgen(method, js_name = getHighEntropyValues)] + async fn get_high_entropy_values(this: &NavigatorUAData, hints: Vec) -> JsValue; + + /// `HighEntropyValues.mobile` indicates whether the detected platform is a mobile device. + #[wasm_bindgen(method, getter, js_class = "HighEntropyValues")] + fn mobile(this: &HighEntropyValues) -> bool; + + /// `HighEntropyValues.platform` indicates the detected OS platform (e.g. `Windows`). + #[wasm_bindgen(method, getter, js_class = "HighEntropyValues")] + fn platform(this: &HighEntropyValues) -> String; + + /// `HighEntropyValues.platform` indicates the detected CPU architecture. (e.g. `x86`). + #[wasm_bindgen(method, getter, js_class = "HighEntropyValues")] + fn architecture(this: &HighEntropyValues) -> String; +} + +/// Detects the user's platform and architecture. +async fn detect_user_platform() -> anyhow::Result<(Os, Arch)> { + let navigator = web_sys::window() + .map(|win| win.navigator()) + .context("failed to get navigator")?; + + let user_agent = navigator.user_agent().unwrap_or_default(); + let mut os = if user_agent.contains("Mobile") { + Os::Mobile + } else if user_agent.contains("Windows") { + Os::Windows + } else if user_agent.contains("Mac") { + Os::MacOs + } else if user_agent.contains("Linux") { + Os::Linux + } else { + Os::Unknown + }; + let mut arch = Arch::X86_64; + + // FIXME: Currently unsupported on Firefox/Safari but it's the only way to derive + // macOS aarch64 + let navigator_ext = NavigatorExt { obj: navigator }; + let Some(ua_data) = navigator_ext.user_agent_data() else { + return Ok((os, arch)); + }; + let Ok(ua_values) = ua_data + .get_high_entropy_values(vec![ + "architecture".into(), + "platform".into(), + "bitness".into(), + ]) + .await + .dyn_into::() + else { + return Ok((os, arch)); + }; + if ua_values.mobile() { + os = Os::Mobile; + } else { + match ua_values.platform().as_str() { + "Windows" => os = Os::Windows, + "macOS" => { + os = Os::MacOs; + arch = if ua_values.architecture().starts_with("x86") { + Arch::X86_64 + } else { + Arch::Aarch64 + }; } - if let Err(err) = - input.add_event_listener_with_callback("cancel", on_cancel.as_ref().unchecked_ref()) + "Linux" => os = Os::Linux, + _ => (), + } + }; + + Ok((os, arch)) +} + +/// Constructs the download URL for the given operating system and architecture. +fn download_url_by_os(os: Os, arch: Arch) -> String { + let base_url = + format!("https://github.com/lukexor/tetanes/releases/download/tetanes-v{VERSION}"); + match os { + Os::MacOs => format!("{base_url}/{BIN_NAME}-{arch}.dmg"), + Os::Windows => format!("{base_url}/{BIN_NAME}-{arch}.msi"), + Os::Linux => format!("{base_url}/{BIN_NAME}-{arch}-unknown-linux-gnu.tar.gz"), + _ => format!("https://github.com/lukexor/tetanes/releases/tag/tetanes-v{VERSION}"), + } +} + +/// Sets the download links to the correct release artifacts. +fn set_download_versions(document: &web_sys::Document) { + if let Some(version) = document.get_element_by_id(html_ids::VERSION) { + version.set_inner_html(concat!("v", env!("CARGO_PKG_VERSION"))); + } + + let document = document.clone(); + thread::spawn(async move { + // Update download links to the correct release artifacts + for (os, arch, id) in OS_OPTIONS { + if let Some(download_link) = document + .get_element_by_id(id) + .and_then(|el| el.dyn_into::().ok()) { - on_error(&self.tx, err); + download_link.set_href(&download_url_by_os(os, arch)); + let platform = platform_to_string(os, arch); + download_link.set_inner_text(&format!("Download for {platform}")); } - on_change.forget(); - on_cancel.forget(); } - let on_resize = Closure::::new({ - let tx = self.tx.clone(); - move |_: web_sys::Event| { - if let Some(window) = web_sys::window() { - tx.nes_event(RendererEvent::BrowserResized(( - window - .inner_width() - .ok() - .and_then(|w| w.as_f64()) - .map_or(0.0, |w| w as f32), - window - .inner_height() - .ok() - .and_then(|h| h.as_f64()) - .map_or(0.0, |h| h as f32), - ))); - } - } - }); - if let Err(err) = - window.add_event_listener_with_callback("resize", on_resize.as_ref().unchecked_ref()) + // Set selected version to detected platform + if let Some(selected_version) = document + .get_element_by_id(html_ids::SELECTED_VERSION) + .and_then(|el| el.dyn_into::().ok()) { - on_error(&self.tx, err); + if let Ok((os, arch)) = detect_user_platform().await { + selected_version.set_href(&download_url_by_os(os, arch)); + let platform = platform_to_string(os, arch); + selected_version.set_inner_text(&format!("Download for {platform}")); + } } - on_resize.forget(); - if let Some(status) = document.get_element_by_id(html_ids::LOADING_STATUS) { - if let Err(err) = status.class_list().add_1("hidden") { + // Add mouseover/mouseout event listeners to version download links and make them visible + if let (Some(version_download), Some(version_options)) = ( + document.get_element_by_id(html_ids::VERSION_DOWNLOAD), + document.get_element_by_id(html_ids::VERSION_OPTIONS), + ) { + let on_mouseover = Closure::::new({ + let version_options = version_options.clone(); + move |_: web_sys::Event| { + if let Err(err) = version_options.class_list().remove_1("hidden") { + tracing::error!("{err:?}"); + } + } + }); + let on_mouseout = Closure::::new(move |_: web_sys::Event| { + if let Err(err) = version_options.class_list().add_1("hidden") { + tracing::error!("{err:?}"); + } + }); + let on_mouseover_cb = on_mouseover.as_ref().unchecked_ref(); + let on_mouseout_cb = on_mouseout.as_ref().unchecked_ref(); + if let Err(err) = version_download + .add_event_listener_with_callback("mouseover", on_mouseover_cb) + .and_then(|_| { + version_download.add_event_listener_with_callback("mouseout", on_mouseout_cb) + }) + .and_then(|_| version_download.class_list().remove_1("hidden")) + { tracing::error!("{err:?}"); - on_error(&self.tx, err); } + on_mouseover.forget(); + on_mouseout.forget(); } + }); +} + +/// Hides the loading status when the WASM module has finished loading. +fn finish_loading( + document: &web_sys::Document, + tx: &EventLoopProxy, +) -> anyhow::Result<()> { + if let Some(status) = document.get_element_by_id(html_ids::LOADING_STATUS) { + if let Err(err) = status.class_list().add_1("hidden") { + on_error(tx, err); + } + } + + Ok(()) +} + +impl Initialize for Running { + /// Initialize JS event handlers and DOM elements. + fn initialize(&mut self) -> anyhow::Result<()> { + let window = web_sys::window().context("valid window")?; + let document = window.document().context("valid html document")?; + + set_download_versions(&document); + set_resize_handler(&window, &self.tx); + for input_id in [html_ids::ROM_INPUT, html_ids::REPLAY_INPUT] { + set_file_onchange_handlers(&document, &self.tx, input_id)?; + } + + finish_loading(&document, &self.tx)?; Ok(()) } @@ -199,19 +476,31 @@ impl EventLoopExt for EventLoop { } mod html_ids { + //! HTML element IDs used to interact with the DOM. + pub(super) const CANVAS: &str = "frame"; pub(super) const LOADING_STATUS: &str = "loading-status"; pub(super) const ROM_INPUT: &str = "load-rom"; pub(super) const REPLAY_INPUT: &str = "load-replay"; + pub(super) const VERSION: &str = "version"; + pub(super) const VERSION_DOWNLOAD: &str = "version-download"; + pub(super) const VERSION_OPTIONS: &str = "version-options"; + pub(super) const SELECTED_VERSION: &str = "selected-version"; + pub(super) const WINDOWS_X86_LINK: &str = "x86_64-pc-windows-msvc"; + pub(super) const MACOS_X86_LINK: &str = "x86_64-apple-darwin"; + pub(super) const MACOS_AARCH64_LINK: &str = "aarch64-apple-darwin"; + pub(super) const LINXU_X86_LINK: &str = "x86_64-unknown-linux-gnu"; } +/// Gets the primary canvas element. pub fn get_canvas() -> Option { - window() + web_sys::window() .and_then(|win| win.document()) .and_then(|doc| doc.get_element_by_id(html_ids::CANVAS)) .and_then(|canvas| canvas.dyn_into::().ok()) } +/// Focuses the canvas element. pub fn focus_canvas() { if let Some(canvas) = get_canvas() { let _ = canvas.focus();
Action