diff --git a/.github/pre-commit b/.github/pre-commit deleted file mode 100755 index d76935698..000000000 --- a/.github/pre-commit +++ /dev/null @@ -1,31 +0,0 @@ -#!/bin/sh - -# A pre-push hook for rust codebases that checks formatting, clippy, and tests - -set -eu - -if [[ "${IGNORE_RUSTHOOKS:=0}" -ne 0 ]]; then - echo "Ignoring rusthooks" - exit 0 -fi - -if ! cargo fmt -- --check -then - echo "There are some code style issues." - echo "Run cargo fmt first." - exit 1 -fi - -if ! cargo clippy --all-targets --all-features --workspace -- -D warnings -then - echo "There are some clippy issues." - exit 1 -fi - -if ! cargo test --all-features -then - echo "There are some test issues." - exit 1 -fi - -exit 0 diff --git a/.github/workflows/ci-py.yml b/.github/workflows/ci-py.yml new file mode 100644 index 000000000..2f693b763 --- /dev/null +++ b/.github/workflows/ci-py.yml @@ -0,0 +1,163 @@ +name: Continuous integration 🐍 + +on: + push: + branches: + - main + pull_request: + branches: + - main + merge_group: + types: [checks_requested] + workflow_dispatch: {} + +env: + SCCACHE_GHA_ENABLED: "true" + +jobs: + # Check if changes were made to the relevant files. + # Always returns true if running on the default branch, to ensure all changes are throughly checked. + changes: + name: Check for changes in Python files + runs-on: ubuntu-latest + # Required permissions + permissions: + pull-requests: read + # Set job outputs to values from filter step + outputs: + python: ${{ github.ref_name == github.event.repository.default_branch || steps.filter.outputs.python }} + steps: + - uses: actions/checkout@v4 + - uses: dorny/paths-filter@v3 + id: filter + with: + filters: | + python: + - 'quantinuum-hugr-py/**' + - 'pyproject.toml' + - 'specification/schema/**' + + check: + needs: changes + if: ${{ needs.changes.outputs.python == 'true' }} + + name: check python + runs-on: ubuntu-latest + + strategy: + matrix: + python-version: ['3.10'] + + steps: + - uses: actions/checkout@v4 + - name: Run sccache-cache + uses: mozilla-actions/sccache-action@v0.0.4 + - name: Install poetry + run: pipx install poetry + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + cache: "poetry" + + - name: Install the project libraries + run: poetry install + + - name: Type check with mypy + run: poetry run mypy . + + - name: Check formatting with ruff + run: poetry run ruff format --check + + - name: Lint with ruff + run: poetry run ruff check + + - name: Run tests + run: poetry run pytest + + # Ensure that the serialization schema is up to date + serialization-schema: + needs: [changes] + if: ${{ needs.changes.outputs.python == 'true' }} + name: Check serialization schema + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Run sccache-cache + uses: mozilla-actions/sccache-action@v0.0.4 + - name: Install poetry + run: pipx install poetry + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.10" + cache: "poetry" + - name: Install the project libraries + run: poetry install + - name: Generate the updated schema + run: | + poetry run python scripts/generate_schema.py specification/schema/ + - name: Check if the schema is up to date + run: | + git diff --exit-code --name-only specification/schema/ + if [ $? -ne 0 ]; then + echo "The serialization schema is not up to date" + echo "Please run 'just update-schema' and commit the changes" + exit 1 + fi + + # This is a meta job to mark successful completion of the required checks, + # even if they are skipped due to no changes in the relevant files. + required-checks: + name: Required checks 🐍 + needs: [changes, check, serialization-schema] + if: ${{ !cancelled() }} + runs-on: ubuntu-latest + steps: + - name: Fail if required checks failed + # This condition should simply be `if: failure() || cancelled()`, + # but there seems to be a bug in the github workflow runner. + # + # See https://github.com/orgs/community/discussions/80788 + if: | + needs.changes.result == 'failure' || needs.changes.result == 'cancelled' || + needs.check.result == 'failure' || needs.check.result == 'cancelled' || + needs.serialization-schema.result == 'failure' || needs.serialization-schema.result == 'cancelled' + run: | + echo "Required checks failed" + echo "Please check the logs for more information" + exit 1 + - name: Pass if required checks passed + run: | + echo "All required checks passed" + + coverage: + needs: [changes, check] + # Run only if there are changes in the relevant files and the check job passed or was skipped + if: always() && !failure() && !cancelled() && needs.changes.outputs.python == 'true' && github.event_name != 'merge_group' + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Run sccache-cache + uses: mozilla-actions/sccache-action@v0.0.4 + - name: Install poetry + run: pipx install poetry + - name: Set up Python 3.10 + uses: actions/setup-python@v5 + with: + python-version: '3.10' + cache: "poetry" + + - name: Install the project libraries + run: poetry install + + - name: Run python tests with coverage instrumentation + run: poetry run pytest --cov=./ --cov-report=xml + + - name: Upload python coverage to codecov.io + uses: codecov/codecov-action@v4 + with: + files: coverage.xml + name: python + flags: python + token: ${{ secrets.CODECOV_TOKEN }} diff --git a/.github/workflows/ci-rs.yml b/.github/workflows/ci-rs.yml new file mode 100644 index 000000000..66467bd5b --- /dev/null +++ b/.github/workflows/ci-rs.yml @@ -0,0 +1,183 @@ +name: Continuous integration 🦀 + +on: + push: + branches: + - main + pull_request: + branches: + - main + merge_group: + types: [checks_requested] + workflow_dispatch: {} + +env: + CARGO_TERM_COLOR: always + CARGO_INCREMENTAL: 0 + RUSTFLAGS: "--cfg=ci_run" + MIRIFLAGS: '-Zmiri-permissive-provenance' # Required due to warnings in bitvec 1.0.1 + CI: true # insta snapshots behave differently on ci + SCCACHE_GHA_ENABLED: "true" + RUSTC_WRAPPER: "sccache" + +jobs: + # Check if changes were made to the relevant files. + # Always returns true if running on the default branch, to ensure all changes are throughly checked. + changes: + name: Check for changes in Rust files + runs-on: ubuntu-latest + # Required permissions + permissions: + pull-requests: read + # Set job outputs to values from filter step + outputs: + rust: ${{ github.ref_name == github.event.repository.default_branch || steps.filter.outputs.rust }} + steps: + - uses: actions/checkout@v4 + - uses: dorny/paths-filter@v3 + id: filter + with: + filters: | + rust: + - 'quantinuum-hugr/**' + - 'Cargo.toml' + - 'specification/schema/**' + + check: + needs: changes + if: ${{ needs.changes.outputs.rust == 'true' }} + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: mozilla-actions/sccache-action@v0.0.4 + - name: Install stable toolchain + uses: dtolnay/rust-toolchain@stable + with: + components: rustfmt, clippy + - name: Check formatting + run: cargo fmt -- --check + - name: Run clippy + run: cargo clippy --all-targets --all-features --workspace -- -D warnings + - name: Build docs + run: cargo doc --no-deps --all-features --workspace + env: + RUSTDOCFLAGS: "-Dwarnings" + + benches: + needs: changes + if: ${{ needs.changes.outputs.rust == 'true' }} && github.event_name != 'merge_group' + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: mozilla-actions/sccache-action@v0.0.4 + - name: Install stable toolchain + uses: dtolnay/rust-toolchain@stable + - name: Build benchmarks with no features + run: cargo bench --verbose --no-run --workspace --no-default-features + - name: Build benchmarks with all features + run: cargo bench --verbose --no-run --workspace --all-features + + # Run tests on Rust stable + tests-stable: + needs: changes + if: ${{ needs.changes.outputs.rust == 'true' }} + runs-on: ubuntu-latest + name: tests (Rust stable) + steps: + - uses: actions/checkout@v4 + - uses: mozilla-actions/sccache-action@v0.0.4 + - id: toolchain + uses: dtolnay/rust-toolchain@master + with: + toolchain: 'stable' + - name: Configure default rust toolchain + run: rustup override set ${{steps.toolchain.outputs.name}} + - name: Build with no features + run: cargo test --verbose --workspace --no-default-features --no-run + - name: Tests with no features + run: cargo test --verbose --workspace --no-default-features + - name: Build with all features + run: cargo test --verbose --workspace --all-features --no-run + - name: Tests with all features + run: cargo test --verbose --workspace --all-features + + # Run tests on other toolchains + tests-other: + needs: changes + if: ${{ needs.changes.outputs.rust == 'true' && github.event_name != 'merge_group' }} + runs-on: ubuntu-latest + strategy: + fail-fast: true + matrix: + rust: ['1.75', beta, nightly] + name: tests (Rust ${{ matrix.rust }}) + steps: + - uses: actions/checkout@v4 + - uses: mozilla-actions/sccache-action@v0.0.4 + - id: toolchain + uses: dtolnay/rust-toolchain@master + with: + toolchain: ${{ matrix.rust }} + - name: Configure default rust toolchain + run: rustup override set ${{steps.toolchain.outputs.name}} + - name: Build with no features + run: cargo test --verbose --workspace --no-default-features --no-run + - name: Tests with no features + run: cargo test --verbose --workspace --no-default-features + - name: Build with all features + run: cargo test --verbose --workspace --all-features --no-run + - name: Tests with all features + run: cargo test --verbose --workspace --all-features + + # This is a meta job to mark successful completion of the required checks, + # even if they are skipped due to no changes in the relevant files. + required-checks: + name: Required checks 🦀 + needs: [changes, check, tests-stable] + if: ${{ !cancelled() }} + runs-on: ubuntu-latest + steps: + - name: Fail if required checks failed + # This condition should simply be `if: failure() || cancelled()`, + # but there seems to be a bug in the github workflow runner. + # + # See https://github.com/orgs/community/discussions/80788 + if: | + needs.changes.result == 'failure' || needs.changes.result == 'cancelled' || + needs.check.result == 'failure' || needs.check.result == 'cancelled' || + needs.tests-stable.result == 'failure' || needs.tests-stable.result == 'cancelled' + run: | + echo "Required checks failed" + echo "Please check the logs for more information" + exit 1 + - name: Pass if required checks passed + run: | + echo "All required checks passed" + + coverage: + needs: [changes, tests-stable, tests-other, check] + # Run only if there are changes in the relevant files and the check job passed or was skipped + if: always() && !failure() && !cancelled() && needs.changes.outputs.rust == 'true' && github.event_name != 'merge_group' + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: mozilla-actions/sccache-action@v0.0.4 + - uses: dtolnay/rust-toolchain@nightly + with: + components: llvm-tools-preview + - name: Install cargo-llvm-cov + uses: taiki-e/install-action@cargo-llvm-cov + - name: Run tests with coverage instrumentation + run: | + cargo llvm-cov clean --workspace + cargo llvm-cov --no-report --workspace --no-default-features --doctests + cargo llvm-cov --no-report --workspace --all-features --doctests + - name: Generate coverage report + run: cargo llvm-cov --all-features report --codecov --output-path coverage.json + - name: Upload coverage to codecov.io + uses: codecov/codecov-action@v4 + with: + files: coverage.json + name: rust + flags: rust + token: ${{ secrets.CODECOV_TOKEN }} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml deleted file mode 100644 index 725dea474..000000000 --- a/.github/workflows/ci.yml +++ /dev/null @@ -1,115 +0,0 @@ -name: Continuous integration - -on: - push: - branches: - - main - pull_request: - branches: - - main - merge_group: - types: [checks_requested] - workflow_dispatch: {} - -env: - CARGO_TERM_COLOR: always - CARGO_INCREMENTAL: 0 - RUSTFLAGS: "--cfg=ci_run" - MIRIFLAGS: '-Zmiri-permissive-provenance' # Required due to warnings in bitvec 1.0.1 - CI: true # insta snapshots behave differently on ci - SCCACHE_GHA_ENABLED: "true" - RUSTC_WRAPPER: "sccache" - -jobs: - check: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - uses: mozilla-actions/sccache-action@v0.0.4 - - name: Install stable toolchain - uses: dtolnay/rust-toolchain@stable - with: - components: rustfmt, clippy - - name: Check formatting - run: cargo fmt -- --check - - name: Run clippy - run: cargo clippy --all-targets --all-features --workspace -- -D warnings - - name: Build docs - run: cargo doc --no-deps --all-features --workspace - env: - RUSTDOCFLAGS: "-Dwarnings" - - benches: - if: github.event_name != 'merge_group' - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - uses: mozilla-actions/sccache-action@v0.0.4 - - name: Install stable toolchain - uses: dtolnay/rust-toolchain@stable - - name: Build benchmarks with no features - run: cargo bench --verbose --no-run --workspace --no-default-features - - name: Build benchmarks with all features - run: cargo bench --verbose --no-run --workspace --all-features - - tests: - runs-on: ubuntu-latest - strategy: - matrix: - rust: ['1.75', stable, beta, nightly] - # workaround to ignore non-stable tests when running the merge queue checks - # see: https://github.community/t/how-to-conditionally-include-exclude-items-in-matrix-eg-based-on-branch/16853/6 - isMerge: - - ${{ github.event_name == 'merge_group' }} - exclude: - - rust: '1.75' - isMerge: true - - rust: beta - isMerge: true - - rust: nightly - isMerge: true - name: tests (Rust ${{ matrix.rust }}) - steps: - - uses: actions/checkout@v4 - - uses: mozilla-actions/sccache-action@v0.0.4 - - id: toolchain - uses: dtolnay/rust-toolchain@master - with: - toolchain: ${{ matrix.rust }} - - name: Configure default rust toolchain - run: rustup override set ${{steps.toolchain.outputs.name}} - - name: Build with no features - run: cargo test --verbose --workspace --no-default-features --no-run - - name: Tests with no features - run: cargo test --verbose --workspace --no-default-features - - name: Build with all features - run: cargo test --verbose --workspace --all-features --no-run - - name: Tests with all features - run: cargo test --verbose --workspace --all-features - - coverage: - if: github.event_name != 'merge_group' - needs: [tests, check] - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - uses: mozilla-actions/sccache-action@v0.0.4 - - uses: dtolnay/rust-toolchain@nightly - with: - components: llvm-tools-preview - - name: Install cargo-llvm-cov - uses: taiki-e/install-action@cargo-llvm-cov - - name: Run tests with coverage instrumentation - run: | - cargo llvm-cov clean --workspace - cargo llvm-cov --no-report --workspace --no-default-features --doctests - cargo llvm-cov --no-report --workspace --all-features --doctests - - name: Generate coverage report - run: cargo llvm-cov --all-features report --codecov --output-path coverage.json - - name: Upload coverage to codecov.io - uses: codecov/codecov-action@v4 - with: - files: coverage.json - name: ubuntu - env: - CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} diff --git a/.github/workflows/drop-cache.yml b/.github/workflows/drop-cache.yml index e389893db..f550ba484 100644 --- a/.github/workflows/drop-cache.yml +++ b/.github/workflows/drop-cache.yml @@ -10,18 +10,18 @@ jobs: steps: - name: Check out code uses: actions/checkout@v4 - + - name: Cleanup run: | gh extension install actions/gh-actions-cache - + REPO=${{ github.repository }} BRANCH="refs/pull/${{ github.event.pull_request.number }}/merge" echo "Fetching list of cache key" cacheKeysForPR=$(gh actions-cache list -R $REPO -B $BRANCH | cut -f 1 ) - ## Setting this to not fail the workflow while deleting cache keys. + ## Setting this to not fail the workflow while deleting cache keys. set +e echo "Deleting caches..." for cacheKey in $cacheKeysForPR diff --git a/.github/workflows/pr-title.yml b/.github/workflows/pr-title.yml index 0a7422684..33cb96744 100644 --- a/.github/workflows/pr-title.yml +++ b/.github/workflows/pr-title.yml @@ -3,7 +3,7 @@ name: Check Conventional Commits format on: pull_request_target: branches: - - main + - main types: - opened - edited @@ -69,4 +69,4 @@ jobs: # labels change, you might want to use the `labeled` and `unlabeled` # event triggers in your workflow. ignoreLabels: | - ignore-semantic-pull-request \ No newline at end of file + ignore-semantic-pull-request diff --git a/.github/workflows/unsoundness.yml b/.github/workflows/unsoundness.yml index 0780f0d97..fa1721d51 100644 --- a/.github/workflows/unsoundness.yml +++ b/.github/workflows/unsoundness.yml @@ -3,7 +3,7 @@ name: Unsoundness checks on: push: branches: - - main + - main workflow_dispatch: {} concurrency: diff --git a/.gitignore b/.gitignore index 1b0b35659..d1780cda0 100644 --- a/.gitignore +++ b/.gitignore @@ -20,8 +20,26 @@ proptest-regressions/ .devenv* devenv.local.nix -# managed by devenv -.pre-commit-config.yaml - # Coverage report -lcov.info \ No newline at end of file +lcov.info + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# Unit test / coverage reports +htmlcov/ +.coverage + +# Jupyter Notebook +.ipynb_checkpoints + +# Environments +.env +.venv +env/ +venv/ + +# ruff +.ruff_cache diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 000000000..d00b538de --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,76 @@ +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.5.0 # Use the ref you want to point at + hooks: + - id: check-added-large-files + - id: check-case-conflict + - id: check-executables-have-shebangs + - id: check-merge-conflict + - id: check-toml + - id: check-vcs-permalinks + - id: check-yaml + - id: detect-private-key + - id: end-of-file-fixer + exclude: | + (?x)^( + specification/schema/.*| + .*.snap| + .*.snap.new + )$ + - id: trailing-whitespace + exclude: | + (?x)^( + specification/schema/.*| + .*.snap| + .*.snap.new + )$ + - id: fix-byte-order-marker + - id: mixed-line-ending + # Python-specific + - id: check-ast + - id: check-docstring-first + - id: debug-statements + + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.3.0 + hooks: + - id: ruff + args: [--fix, --exit-non-zero-on-fix] + - id: ruff-format + + - repo: https://github.com/pre-commit/mirrors-mypy + rev: v1.9.0 + hooks: + - id: mypy + additional_dependencies: [pydantic] + + - repo: local + hooks: + - id: cargo-fmt + name: cargo format + description: Format rust code with `cargo fmt`. + entry: cargo fmt --all -- --check + language: system + files: \.rs$ + pass_filenames: false + - id: cargo-check + name: cargo check + description: Check rust code with `cargo check`. + entry: cargo check --all --all-features --workspace + language: system + files: \.rs$ + pass_filenames: false + - id: cargo-test + name: cargo test + description: Run tests with `cargo test`. + entry: cargo test --all-features --workspace + language: system + files: \.rs$ + pass_filenames: false + - id: cargo-clippy + name: cargo clippy + description: Run clippy lints with `cargo clippy`. + entry: cargo clippy --all-features --workspace -- -D warnings + language: system + files: \.rs$ + pass_filenames: false diff --git a/DEVELOPMENT.md b/DEVELOPMENT.md index 110a4cf6e..855ed3bc3 100644 --- a/DEVELOPMENT.md +++ b/DEVELOPMENT.md @@ -29,26 +29,27 @@ shell by setting up [direnv](https://devenv.sh/automatic-shell-activation/). To setup the environment manually you will need: - Rust: https://www.rust-lang.org/tools/install +- Just: https://just.systems/ +- Poetry: https://python-poetry.org/ -You can use the git hook in [`.github/pre-commit`](.github/pre-commit) to automatically run the test and check formatting before committing. -To install it, run: +Once you have these installed, you can install the required python dependencies and setup pre-commit hooks with: ```bash -ln -s .github/pre-commit .git/hooks/pre-commit -# Or, to check before pushing instead -ln -s .github/pre-commit .git/hooks/pre-push +just setup ``` ## 🏃 Running the tests -To compile and test the rust code, run: +To compile and test the code, run: ```bash -cargo build -cargo test +just test +# or, to test only the rust code or the python code +just test rust +just test python ``` -Run the benchmarks with: +Run the rust benchmarks with: ```bash cargo bench @@ -62,6 +63,8 @@ stable available. cargo +nightly miri test ``` +Run `just` to see all available commands. + ## 💅 Coding Style The rustfmt tool is used to enforce a consistent rust coding style. The CI will fail if the code is not formatted correctly. @@ -69,14 +72,22 @@ The rustfmt tool is used to enforce a consistent rust coding style. The CI will To format your code, run: ```bash -# Format rust code -cargo fmt +just format +``` + +We also use various linters to catch common mistakes and enforce best practices. To run these, use: + +```bash +just check ``` -We also check for clippy warnings, which are a set of linting rules for rust. To run clippy, run: +To quickly fix common issues, run: ```bash -cargo clippy --all-targets +just fix +# or, to fix only the rust code or the python code +just fix rust +just fix python ``` ## 📈 Code Coverage @@ -85,9 +96,15 @@ We run coverage checks on the CI. Once you submit a PR, you can review the line-by-line coverage report on [codecov](https://app.codecov.io/gh/CQCL/hugr/commits?branch=All%20branches). -To run the coverage checks locally, install `cargo-llvm-cov`, generate the report with: +To run the coverage checks locally, first install `cargo-llvm-cov`. +```bash +cargo install cargo-llvm-cov +``` + +Then run the tests: + ```bash -cargo llvm-cov --lcov > lcov.info +just coverage ``` and open it with your favourite coverage viewer. In VSCode, you can use @@ -118,4 +135,4 @@ We accept the following contribution types: - test: Adding missing tests, refactoring tests; no production code change. - ci: CI related changes. These changes are not published in the changelog. - chore: Updating build tasks, package manager configs, etc. These changes are not published in the changelog. -- revert: Reverting previous commits. \ No newline at end of file +- revert: Reverting previous commits. diff --git a/LICENCE b/LICENCE index f49a4e16e..261eeb9e9 100644 --- a/LICENCE +++ b/LICENCE @@ -198,4 +198,4 @@ distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and - limitations under the License. \ No newline at end of file + limitations under the License. diff --git a/README.md b/README.md index 79baadaf6..749ab51db 100644 --- a/README.md +++ b/README.md @@ -48,7 +48,7 @@ See [DEVELOPMENT.md](https://github.com/CQCL/hugr/blob/main/DEVELOPMENT.md) for This project is licensed under Apache License, Version 2.0 ([LICENSE][] or http://www.apache.org/licenses/LICENSE-2.0). [API documentation here]: https://docs.rs/quantinuum-hugr/ - [build_status]: https://github.com/CQCL/hugr/workflows/Continuous%20integration/badge.svg?branch=main + [build_status]: https://github.com/CQCL/hugr/actions/workflows/ci-rs.yml/badge.svg?branch=main [msrv]: https://img.shields.io/badge/rust-1.75.0%2B-blue.svg [crates]: https://img.shields.io/crates/v/quantinuum-hugr [codecov]: https://img.shields.io/codecov/c/gh/CQCL/hugr?logo=codecov diff --git a/codecov.yml b/codecov.yml new file mode 100644 index 000000000..15ae701a4 --- /dev/null +++ b/codecov.yml @@ -0,0 +1,26 @@ +# Codecov coverage report configuration + +# Coverage report +# Do not fail if the coverage is not met +coverage: + status: + patch: + default: + informational: false + only_pulls: true + project: + default: + informational: true + +# Ignore tests and binaries +ignore: + - "quantinuum-hugr-py/tests" + - "scripts" + +# Coverage groups config +flag_management: + default_rules: # the rules that will be followed for any flag added, generally + # Use previous coverage if one is not available for the current commit. + # + # (E.g. if the PR doesn't modify a subproject, we don't submit a coverage report for it.) + carryforward: true diff --git a/devenv.nix b/devenv.nix index 43ffbefa4..ecbc22a6e 100644 --- a/devenv.nix +++ b/devenv.nix @@ -4,7 +4,7 @@ let in { # https://devenv.sh/packages/ - # on macos frameworks have to be explicitly specified + # on macos frameworks have to be explicitly specified # otherwise a linker error ocurs on rust packages packages = [ pkgs.just @@ -30,6 +30,14 @@ in export LLVM_PROFDATA="${pkgs.llvmPackages_16.libllvm}/bin/llvm-profdata" ''; + languages.python = { + enable = true; + poetry = { + enable = true; + activate.enable = true; + }; + }; + # https://devenv.sh/languages/ # https://devenv.sh/reference/options/#languagesrustversion languages.rust = { @@ -38,12 +46,6 @@ in components = [ "rustc" "cargo" "clippy" "rustfmt" "rust-analyzer" ]; }; - # https://devenv.sh/pre-commit-hooks/ - pre-commit.hooks.clippy.enable = true; - pre-commit.tools.clippy = lib.mkForce config.languages.rust.toolchain.clippy; - pre-commit.hooks.rustfmt.enable = true; - pre-commit.tools.rustfmt = lib.mkForce config.languages.rust.toolchain.rustfmt; - # https://devenv.sh/processes/ # processes.ping.exec = "ping example.com"; diff --git a/devenv.yaml b/devenv.yaml index 5a9261d03..d6eb89958 100644 --- a/devenv.yaml +++ b/devenv.yaml @@ -5,4 +5,4 @@ inputs: url: github:nix-community/fenix inputs: nixpkgs: - follows: nixpkgs \ No newline at end of file + follows: nixpkgs diff --git a/justfile b/justfile index bc395e953..b3aa2c7c3 100644 --- a/justfile +++ b/justfile @@ -2,22 +2,70 @@ help: @just --list --justfile {{justfile()}} -# Run all the rust tests -test: - cargo test --all-features +# Prepare the environment for development, installing all the dependencies and +# setting up the pre-commit hooks. +setup: + poetry install + poetry run pre-commit install -t pre-commit -# Auto-fix all clippy warnings -fix: - cargo clippy --all-targets --all-features --workspace --fix --allow-staged - -# Run the pre-commit checks +# Run the pre-commit checks. check: - ./.github/pre-commit + poetry run pre-commit run --all-files + +# Run all the tests. +test language="[rust|python]": (_run_lang language \ + "cargo test --all-features" \ + "poetry run pytest" + ) + +# Run all the benchmarks. +bench language="[rust|python]": (_run_lang language \ + "cargo bench" \ + "true" + ) + +# Auto-fix all clippy warnings. +fix language="[rust|python]": (_run_lang language \ + "cargo clippy --all-targets --all-features --workspace --fix --allow-staged --allow-dirty" \ + "poetry run ruff check --fix" + ) + +# Format the code. +format language="[rust|python]": (_run_lang language \ + "cargo fmt" \ + "poetry run ruff format" + ) + +# Generate a test coverage report. +coverage language="[rust|python]": (_run_lang language \ + "cargo llvm-cov --lcov > lcov.info" \ + "poetry run pytest --cov=./ --cov-report=html" + ) + +# Load a shell with all the dependencies installed +shell: + poetry shell + +# Update the HUGR schema. +update-schema: + poetry run python scripts/generate_schema.py specification/schema/ -# Format the code -format: - cargo fmt -# Generate a test coverage report -coverage: - cargo llvm-cov --lcov > lcov.info +# Runs a rust and a python command, depending on the `language` variable. +# +# If `language` is set to `rust` or `python`, only run the command for that language. +# Otherwise, run both commands. +_run_lang language rust_cmd python_cmd: + #!/usr/bin/env bash + set -euo pipefail + if [ "{{ language }}" = "rust" ]; then + set -x + {{ rust_cmd }} + elif [ "{{ language }}" = "python" ]; then + set -x + {{ python_cmd }} + else + set -x + {{ rust_cmd }} + {{ python_cmd }} + fi diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 000000000..4c988dcef --- /dev/null +++ b/poetry.lock @@ -0,0 +1,613 @@ +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. + +[[package]] +name = "annotated-types" +version = "0.6.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, + {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, +] + +[[package]] +name = "cfgv" +version = "3.4.0" +description = "Validate configuration and produce human readable error messages." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, + {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "coverage" +version = "7.4.4" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "coverage-7.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0be5efd5127542ef31f165de269f77560d6cdef525fffa446de6f7e9186cfb2"}, + {file = "coverage-7.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ccd341521be3d1b3daeb41960ae94a5e87abe2f46f17224ba5d6f2b8398016cf"}, + {file = "coverage-7.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fa497a8ab37784fbb20ab699c246053ac294d13fc7eb40ec007a5043ec91f8"}, + {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b1a93009cb80730c9bca5d6d4665494b725b6e8e157c1cb7f2db5b4b122ea562"}, + {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:690db6517f09336559dc0b5f55342df62370a48f5469fabf502db2c6d1cffcd2"}, + {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:09c3255458533cb76ef55da8cc49ffab9e33f083739c8bd4f58e79fecfe288f7"}, + {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8ce1415194b4a6bd0cdcc3a1dfbf58b63f910dcb7330fe15bdff542c56949f87"}, + {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b91cbc4b195444e7e258ba27ac33769c41b94967919f10037e6355e998af255c"}, + {file = "coverage-7.4.4-cp310-cp310-win32.whl", hash = "sha256:598825b51b81c808cb6f078dcb972f96af96b078faa47af7dfcdf282835baa8d"}, + {file = "coverage-7.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:09ef9199ed6653989ebbcaacc9b62b514bb63ea2f90256e71fea3ed74bd8ff6f"}, + {file = "coverage-7.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0f9f50e7ef2a71e2fae92774c99170eb8304e3fdf9c8c3c7ae9bab3e7229c5cf"}, + {file = "coverage-7.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:623512f8ba53c422fcfb2ce68362c97945095b864cda94a92edbaf5994201083"}, + {file = "coverage-7.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0513b9508b93da4e1716744ef6ebc507aff016ba115ffe8ecff744d1322a7b63"}, + {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40209e141059b9370a2657c9b15607815359ab3ef9918f0196b6fccce8d3230f"}, + {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a2b2b78c78293782fd3767d53e6474582f62443d0504b1554370bde86cc8227"}, + {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:73bfb9c09951125d06ee473bed216e2c3742f530fc5acc1383883125de76d9cd"}, + {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1f384c3cc76aeedce208643697fb3e8437604b512255de6d18dae3f27655a384"}, + {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:54eb8d1bf7cacfbf2a3186019bcf01d11c666bd495ed18717162f7eb1e9dd00b"}, + {file = "coverage-7.4.4-cp311-cp311-win32.whl", hash = "sha256:cac99918c7bba15302a2d81f0312c08054a3359eaa1929c7e4b26ebe41e9b286"}, + {file = "coverage-7.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:b14706df8b2de49869ae03a5ccbc211f4041750cd4a66f698df89d44f4bd30ec"}, + {file = "coverage-7.4.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:201bef2eea65e0e9c56343115ba3814e896afe6d36ffd37bab783261db430f76"}, + {file = "coverage-7.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:41c9c5f3de16b903b610d09650e5e27adbfa7f500302718c9ffd1c12cf9d6818"}, + {file = "coverage-7.4.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d898fe162d26929b5960e4e138651f7427048e72c853607f2b200909794ed978"}, + {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ea79bb50e805cd6ac058dfa3b5c8f6c040cb87fe83de10845857f5535d1db70"}, + {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce4b94265ca988c3f8e479e741693d143026632672e3ff924f25fab50518dd51"}, + {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:00838a35b882694afda09f85e469c96367daa3f3f2b097d846a7216993d37f4c"}, + {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fdfafb32984684eb03c2d83e1e51f64f0906b11e64482df3c5db936ce3839d48"}, + {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:69eb372f7e2ece89f14751fbcbe470295d73ed41ecd37ca36ed2eb47512a6ab9"}, + {file = "coverage-7.4.4-cp312-cp312-win32.whl", hash = "sha256:137eb07173141545e07403cca94ab625cc1cc6bc4c1e97b6e3846270e7e1fea0"}, + {file = "coverage-7.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:d71eec7d83298f1af3326ce0ff1d0ea83c7cb98f72b577097f9083b20bdaf05e"}, + {file = "coverage-7.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d5ae728ff3b5401cc320d792866987e7e7e880e6ebd24433b70a33b643bb0384"}, + {file = "coverage-7.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cc4f1358cb0c78edef3ed237ef2c86056206bb8d9140e73b6b89fbcfcbdd40e1"}, + {file = "coverage-7.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8130a2aa2acb8788e0b56938786c33c7c98562697bf9f4c7d6e8e5e3a0501e4a"}, + {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf271892d13e43bc2b51e6908ec9a6a5094a4df1d8af0bfc360088ee6c684409"}, + {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4cdc86d54b5da0df6d3d3a2f0b710949286094c3a6700c21e9015932b81447e"}, + {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ae71e7ddb7a413dd60052e90528f2f65270aad4b509563af6d03d53e979feafd"}, + {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:38dd60d7bf242c4ed5b38e094baf6401faa114fc09e9e6632374388a404f98e7"}, + {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa5b1c1bfc28384f1f53b69a023d789f72b2e0ab1b3787aae16992a7ca21056c"}, + {file = "coverage-7.4.4-cp38-cp38-win32.whl", hash = "sha256:dfa8fe35a0bb90382837b238fff375de15f0dcdb9ae68ff85f7a63649c98527e"}, + {file = "coverage-7.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:b2991665420a803495e0b90a79233c1433d6ed77ef282e8e152a324bbbc5e0c8"}, + {file = "coverage-7.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3b799445b9f7ee8bf299cfaed6f5b226c0037b74886a4e11515e569b36fe310d"}, + {file = "coverage-7.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b4d33f418f46362995f1e9d4f3a35a1b6322cb959c31d88ae56b0298e1c22357"}, + {file = "coverage-7.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aadacf9a2f407a4688d700e4ebab33a7e2e408f2ca04dbf4aef17585389eff3e"}, + {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c95949560050d04d46b919301826525597f07b33beba6187d04fa64d47ac82e"}, + {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff7687ca3d7028d8a5f0ebae95a6e4827c5616b31a4ee1192bdfde697db110d4"}, + {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5fc1de20b2d4a061b3df27ab9b7c7111e9a710f10dc2b84d33a4ab25065994ec"}, + {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c74880fc64d4958159fbd537a091d2a585448a8f8508bf248d72112723974cbd"}, + {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:742a76a12aa45b44d236815d282b03cfb1de3b4323f3e4ec933acfae08e54ade"}, + {file = "coverage-7.4.4-cp39-cp39-win32.whl", hash = "sha256:d89d7b2974cae412400e88f35d86af72208e1ede1a541954af5d944a8ba46c57"}, + {file = "coverage-7.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:9ca28a302acb19b6af89e90f33ee3e1906961f94b54ea37de6737b7ca9d8827c"}, + {file = "coverage-7.4.4-pp38.pp39.pp310-none-any.whl", hash = "sha256:b2c5edc4ac10a7ef6605a966c58929ec6c1bd0917fb8c15cb3363f65aa40e677"}, + {file = "coverage-7.4.4.tar.gz", hash = "sha256:c901df83d097649e257e803be22592aedfd5182f07b3cc87d640bbb9afd50f49"}, +] + +[package.dependencies] +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "distlib" +version = "0.3.8" +description = "Distribution utilities" +optional = false +python-versions = "*" +files = [ + {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, + {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "filelock" +version = "3.13.1" +description = "A platform independent file lock." +optional = false +python-versions = ">=3.8" +files = [ + {file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"}, + {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.24)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +typing = ["typing-extensions (>=4.8)"] + +[[package]] +name = "identify" +version = "2.5.35" +description = "File identification library for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "identify-2.5.35-py2.py3-none-any.whl", hash = "sha256:c4de0081837b211594f8e877a6b4fad7ca32bbfc1a9307fdd61c28bfe923f13e"}, + {file = "identify-2.5.35.tar.gz", hash = "sha256:10a7ca245cfcd756a554a7288159f72ff105ad233c7c4b9c6f0f4d108f5f6791"}, +] + +[package.extras] +license = ["ukkonen"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "mypy" +version = "1.9.0" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f8a67616990062232ee4c3952f41c779afac41405806042a8126fe96e098419f"}, + {file = "mypy-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d357423fa57a489e8c47b7c85dfb96698caba13d66e086b412298a1a0ea3b0ed"}, + {file = "mypy-1.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49c87c15aed320de9b438ae7b00c1ac91cd393c1b854c2ce538e2a72d55df150"}, + {file = "mypy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:48533cdd345c3c2e5ef48ba3b0d3880b257b423e7995dada04248725c6f77374"}, + {file = "mypy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:4d3dbd346cfec7cb98e6cbb6e0f3c23618af826316188d587d1c1bc34f0ede03"}, + {file = "mypy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:653265f9a2784db65bfca694d1edd23093ce49740b2244cde583aeb134c008f3"}, + {file = "mypy-1.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a3c007ff3ee90f69cf0a15cbcdf0995749569b86b6d2f327af01fd1b8aee9dc"}, + {file = "mypy-1.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2418488264eb41f69cc64a69a745fad4a8f86649af4b1041a4c64ee61fc61129"}, + {file = "mypy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:68edad3dc7d70f2f17ae4c6c1b9471a56138ca22722487eebacfd1eb5321d612"}, + {file = "mypy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:85ca5fcc24f0b4aeedc1d02f93707bccc04733f21d41c88334c5482219b1ccb3"}, + {file = "mypy-1.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aceb1db093b04db5cd390821464504111b8ec3e351eb85afd1433490163d60cd"}, + {file = "mypy-1.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0235391f1c6f6ce487b23b9dbd1327b4ec33bb93934aa986efe8a9563d9349e6"}, + {file = "mypy-1.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4d5ddc13421ba3e2e082a6c2d74c2ddb3979c39b582dacd53dd5d9431237185"}, + {file = "mypy-1.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:190da1ee69b427d7efa8aa0d5e5ccd67a4fb04038c380237a0d96829cb157913"}, + {file = "mypy-1.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:fe28657de3bfec596bbeef01cb219833ad9d38dd5393fc649f4b366840baefe6"}, + {file = "mypy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e54396d70be04b34f31d2edf3362c1edd023246c82f1730bbf8768c28db5361b"}, + {file = "mypy-1.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5e6061f44f2313b94f920e91b204ec600982961e07a17e0f6cd83371cb23f5c2"}, + {file = "mypy-1.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a10926e5473c5fc3da8abb04119a1f5811a236dc3a38d92015cb1e6ba4cb9e"}, + {file = "mypy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b685154e22e4e9199fc95f298661deea28aaede5ae16ccc8cbb1045e716b3e04"}, + {file = "mypy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:5d741d3fc7c4da608764073089e5f58ef6352bedc223ff58f2f038c2c4698a89"}, + {file = "mypy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:587ce887f75dd9700252a3abbc9c97bbe165a4a630597845c61279cf32dfbf02"}, + {file = "mypy-1.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f88566144752999351725ac623471661c9d1cd8caa0134ff98cceeea181789f4"}, + {file = "mypy-1.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61758fabd58ce4b0720ae1e2fea5cfd4431591d6d590b197775329264f86311d"}, + {file = "mypy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e49499be624dead83927e70c756970a0bc8240e9f769389cdf5714b0784ca6bf"}, + {file = "mypy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:571741dc4194b4f82d344b15e8837e8c5fcc462d66d076748142327626a1b6e9"}, + {file = "mypy-1.9.0-py3-none-any.whl", hash = "sha256:a260627a570559181a9ea5de61ac6297aa5af202f06fd7ab093ce74e7181e43e"}, + {file = "mypy-1.9.0.tar.gz", hash = "sha256:3cc5da0127e6a478cddd906068496a97a7618a21ce9b54bde5bf7e539c7af974"}, +] + +[package.dependencies] +mypy-extensions = ">=1.0.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = ">=4.1.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] +mypyc = ["setuptools (>=50)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "nodeenv" +version = "1.8.0" +description = "Node.js virtual environment builder" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" +files = [ + {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"}, + {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"}, +] + +[package.dependencies] +setuptools = "*" + +[[package]] +name = "packaging" +version = "24.0" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, +] + +[[package]] +name = "platformdirs" +version = "4.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pre-commit" +version = "3.6.2" +description = "A framework for managing and maintaining multi-language pre-commit hooks." +optional = false +python-versions = ">=3.9" +files = [ + {file = "pre_commit-3.6.2-py2.py3-none-any.whl", hash = "sha256:ba637c2d7a670c10daedc059f5c49b5bd0aadbccfcd7ec15592cf9665117532c"}, + {file = "pre_commit-3.6.2.tar.gz", hash = "sha256:c3ef34f463045c88658c5b99f38c1e297abdcc0ff13f98d3370055fbbfabc67e"}, +] + +[package.dependencies] +cfgv = ">=2.0.0" +identify = ">=1.0.0" +nodeenv = ">=0.11.1" +pyyaml = ">=5.1" +virtualenv = ">=20.10.0" + +[[package]] +name = "pydantic" +version = "2.6.4" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic-2.6.4-py3-none-any.whl", hash = "sha256:cc46fce86607580867bdc3361ad462bab9c222ef042d3da86f2fb333e1d916c5"}, + {file = "pydantic-2.6.4.tar.gz", hash = "sha256:b1704e0847db01817624a6b86766967f552dd9dbf3afba4004409f908dcc84e6"}, +] + +[package.dependencies] +annotated-types = ">=0.4.0" +pydantic-core = "2.16.3" +typing-extensions = ">=4.6.1" + +[package.extras] +email = ["email-validator (>=2.0.0)"] + +[[package]] +name = "pydantic-core" +version = "2.16.3" +description = "" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic_core-2.16.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:75b81e678d1c1ede0785c7f46690621e4c6e63ccd9192af1f0bd9d504bbb6bf4"}, + {file = "pydantic_core-2.16.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9c865a7ee6f93783bd5d781af5a4c43dadc37053a5b42f7d18dc019f8c9d2bd1"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:162e498303d2b1c036b957a1278fa0899d02b2842f1ff901b6395104c5554a45"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2f583bd01bbfbff4eaee0868e6fc607efdfcc2b03c1c766b06a707abbc856187"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b926dd38db1519ed3043a4de50214e0d600d404099c3392f098a7f9d75029ff8"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:716b542728d4c742353448765aa7cdaa519a7b82f9564130e2b3f6766018c9ec"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc4ad7f7ee1a13d9cb49d8198cd7d7e3aa93e425f371a68235f784e99741561f"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bd87f48924f360e5d1c5f770d6155ce0e7d83f7b4e10c2f9ec001c73cf475c99"}, + {file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0df446663464884297c793874573549229f9eca73b59360878f382a0fc085979"}, + {file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4df8a199d9f6afc5ae9a65f8f95ee52cae389a8c6b20163762bde0426275b7db"}, + {file = "pydantic_core-2.16.3-cp310-none-win32.whl", hash = "sha256:456855f57b413f077dff513a5a28ed838dbbb15082ba00f80750377eed23d132"}, + {file = "pydantic_core-2.16.3-cp310-none-win_amd64.whl", hash = "sha256:732da3243e1b8d3eab8c6ae23ae6a58548849d2e4a4e03a1924c8ddf71a387cb"}, + {file = "pydantic_core-2.16.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:519ae0312616026bf4cedc0fe459e982734f3ca82ee8c7246c19b650b60a5ee4"}, + {file = "pydantic_core-2.16.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b3992a322a5617ded0a9f23fd06dbc1e4bd7cf39bc4ccf344b10f80af58beacd"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d62da299c6ecb04df729e4b5c52dc0d53f4f8430b4492b93aa8de1f541c4aac"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2acca2be4bb2f2147ada8cac612f8a98fc09f41c89f87add7256ad27332c2fda"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b662180108c55dfbf1280d865b2d116633d436cfc0bba82323554873967b340"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e7c6ed0dc9d8e65f24f5824291550139fe6f37fac03788d4580da0d33bc00c97"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6b1bb0827f56654b4437955555dc3aeeebeddc47c2d7ed575477f082622c49e"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e56f8186d6210ac7ece503193ec84104da7ceb98f68ce18c07282fcc2452e76f"}, + {file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:936e5db01dd49476fa8f4383c259b8b1303d5dd5fb34c97de194560698cc2c5e"}, + {file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:33809aebac276089b78db106ee692bdc9044710e26f24a9a2eaa35a0f9fa70ba"}, + {file = "pydantic_core-2.16.3-cp311-none-win32.whl", hash = "sha256:ded1c35f15c9dea16ead9bffcde9bb5c7c031bff076355dc58dcb1cb436c4721"}, + {file = "pydantic_core-2.16.3-cp311-none-win_amd64.whl", hash = "sha256:d89ca19cdd0dd5f31606a9329e309d4fcbb3df860960acec32630297d61820df"}, + {file = "pydantic_core-2.16.3-cp311-none-win_arm64.whl", hash = "sha256:6162f8d2dc27ba21027f261e4fa26f8bcb3cf9784b7f9499466a311ac284b5b9"}, + {file = "pydantic_core-2.16.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0f56ae86b60ea987ae8bcd6654a887238fd53d1384f9b222ac457070b7ac4cff"}, + {file = "pydantic_core-2.16.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9bd22a2a639e26171068f8ebb5400ce2c1bc7d17959f60a3b753ae13c632975"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4204e773b4b408062960e65468d5346bdfe139247ee5f1ca2a378983e11388a2"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f651dd19363c632f4abe3480a7c87a9773be27cfe1341aef06e8759599454120"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aaf09e615a0bf98d406657e0008e4a8701b11481840be7d31755dc9f97c44053"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8e47755d8152c1ab5b55928ab422a76e2e7b22b5ed8e90a7d584268dd49e9c6b"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:500960cb3a0543a724a81ba859da816e8cf01b0e6aaeedf2c3775d12ee49cade"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf6204fe865da605285c34cf1172879d0314ff267b1c35ff59de7154f35fdc2e"}, + {file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d33dd21f572545649f90c38c227cc8631268ba25c460b5569abebdd0ec5974ca"}, + {file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:49d5d58abd4b83fb8ce763be7794d09b2f50f10aa65c0f0c1696c677edeb7cbf"}, + {file = "pydantic_core-2.16.3-cp312-none-win32.whl", hash = "sha256:f53aace168a2a10582e570b7736cc5bef12cae9cf21775e3eafac597e8551fbe"}, + {file = "pydantic_core-2.16.3-cp312-none-win_amd64.whl", hash = "sha256:0d32576b1de5a30d9a97f300cc6a3f4694c428d956adbc7e6e2f9cad279e45ed"}, + {file = "pydantic_core-2.16.3-cp312-none-win_arm64.whl", hash = "sha256:ec08be75bb268473677edb83ba71e7e74b43c008e4a7b1907c6d57e940bf34b6"}, + {file = "pydantic_core-2.16.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:b1f6f5938d63c6139860f044e2538baeee6f0b251a1816e7adb6cbce106a1f01"}, + {file = "pydantic_core-2.16.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2a1ef6a36fdbf71538142ed604ad19b82f67b05749512e47f247a6ddd06afdc7"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:704d35ecc7e9c31d48926150afada60401c55efa3b46cd1ded5a01bdffaf1d48"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d937653a696465677ed583124b94a4b2d79f5e30b2c46115a68e482c6a591c8a"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9803edf8e29bd825f43481f19c37f50d2b01899448273b3a7758441b512acf8"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:72282ad4892a9fb2da25defeac8c2e84352c108705c972db82ab121d15f14e6d"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f752826b5b8361193df55afcdf8ca6a57d0232653494ba473630a83ba50d8c9"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4384a8f68ddb31a0b0c3deae88765f5868a1b9148939c3f4121233314ad5532c"}, + {file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a4b2bf78342c40b3dc830880106f54328928ff03e357935ad26c7128bbd66ce8"}, + {file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:13dcc4802961b5f843a9385fc821a0b0135e8c07fc3d9949fd49627c1a5e6ae5"}, + {file = "pydantic_core-2.16.3-cp38-none-win32.whl", hash = "sha256:e3e70c94a0c3841e6aa831edab1619ad5c511199be94d0c11ba75fe06efe107a"}, + {file = "pydantic_core-2.16.3-cp38-none-win_amd64.whl", hash = "sha256:ecdf6bf5f578615f2e985a5e1f6572e23aa632c4bd1dc67f8f406d445ac115ed"}, + {file = "pydantic_core-2.16.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:bda1ee3e08252b8d41fa5537413ffdddd58fa73107171a126d3b9ff001b9b820"}, + {file = "pydantic_core-2.16.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:21b888c973e4f26b7a96491c0965a8a312e13be108022ee510248fe379a5fa23"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be0ec334369316fa73448cc8c982c01e5d2a81c95969d58b8f6e272884df0074"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b5b6079cc452a7c53dd378c6f881ac528246b3ac9aae0f8eef98498a75657805"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ee8d5f878dccb6d499ba4d30d757111847b6849ae07acdd1205fffa1fc1253c"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7233d65d9d651242a68801159763d09e9ec96e8a158dbf118dc090cd77a104c9"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6119dc90483a5cb50a1306adb8d52c66e447da88ea44f323e0ae1a5fcb14256"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:578114bc803a4c1ff9946d977c221e4376620a46cf78da267d946397dc9514a8"}, + {file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d8f99b147ff3fcf6b3cc60cb0c39ea443884d5559a30b1481e92495f2310ff2b"}, + {file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4ac6b4ce1e7283d715c4b729d8f9dab9627586dafce81d9eaa009dd7f25dd972"}, + {file = "pydantic_core-2.16.3-cp39-none-win32.whl", hash = "sha256:e7774b570e61cb998490c5235740d475413a1f6de823169b4cf94e2fe9e9f6b2"}, + {file = "pydantic_core-2.16.3-cp39-none-win_amd64.whl", hash = "sha256:9091632a25b8b87b9a605ec0e61f241c456e9248bfdcf7abdf344fdb169c81cf"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:36fa178aacbc277bc6b62a2c3da95226520da4f4e9e206fdf076484363895d2c"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:dcca5d2bf65c6fb591fff92da03f94cd4f315972f97c21975398bd4bd046854a"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a72fb9963cba4cd5793854fd12f4cfee731e86df140f59ff52a49b3552db241"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b60cc1a081f80a2105a59385b92d82278b15d80ebb3adb200542ae165cd7d183"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cbcc558401de90a746d02ef330c528f2e668c83350f045833543cd57ecead1ad"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:fee427241c2d9fb7192b658190f9f5fd6dfe41e02f3c1489d2ec1e6a5ab1e04a"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f4cb85f693044e0f71f394ff76c98ddc1bc0953e48c061725e540396d5c8a2e1"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b29eeb887aa931c2fcef5aa515d9d176d25006794610c264ddc114c053bf96fe"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a425479ee40ff021f8216c9d07a6a3b54b31c8267c6e17aa88b70d7ebd0e5e5b"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:5c5cbc703168d1b7a838668998308018a2718c2130595e8e190220238addc96f"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99b6add4c0b39a513d323d3b93bc173dac663c27b99860dd5bf491b240d26137"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f76ee558751746d6a38f89d60b6228fa174e5172d143886af0f85aa306fd89"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:00ee1c97b5364b84cb0bd82e9bbf645d5e2871fb8c58059d158412fee2d33d8a"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:287073c66748f624be4cef893ef9174e3eb88fe0b8a78dc22e88eca4bc357ca6"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed25e1835c00a332cb10c683cd39da96a719ab1dfc08427d476bce41b92531fc"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:86b3d0033580bd6bbe07590152007275bd7af95f98eaa5bd36f3da219dcd93da"}, + {file = "pydantic_core-2.16.3.tar.gz", hash = "sha256:1cac689f80a3abab2d3c0048b29eea5751114054f032a941a32de4c852c59cad"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pytest" +version = "8.1.1" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-8.1.1-py3-none-any.whl", hash = "sha256:2a8386cfc11fa9d2c50ee7b2a57e7d898ef90470a7a34c4b949ff59662bb78b7"}, + {file = "pytest-8.1.1.tar.gz", hash = "sha256:ac978141a75948948817d360297b7aae0fcb9d6ff6bc9ec6d514b85d5a65c044"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.4,<2.0" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-cov" +version = "4.1.0" +description = "Pytest plugin for measuring coverage." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, + {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, +] + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "quantinuum-hugr" +version = "0.0.0" +description = "Quantinuum's common representation for quantum programs" +optional = false +python-versions = ">=3.10" +files = [] +develop = true + +[package.dependencies] +pydantic = "^2.6.4" + +[package.source] +type = "directory" +url = "quantinuum-hugr-py" + +[[package]] +name = "ruff" +version = "0.3.3" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.3.3-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:973a0e388b7bc2e9148c7f9be8b8c6ae7471b9be37e1cc732f8f44a6f6d7720d"}, + {file = "ruff-0.3.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:cfa60d23269d6e2031129b053fdb4e5a7b0637fc6c9c0586737b962b2f834493"}, + {file = "ruff-0.3.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1eca7ff7a47043cf6ce5c7f45f603b09121a7cc047447744b029d1b719278eb5"}, + {file = "ruff-0.3.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7d3f6762217c1da954de24b4a1a70515630d29f71e268ec5000afe81377642d"}, + {file = "ruff-0.3.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b24c19e8598916d9c6f5a5437671f55ee93c212a2c4c569605dc3842b6820386"}, + {file = "ruff-0.3.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:5a6cbf216b69c7090f0fe4669501a27326c34e119068c1494f35aaf4cc683778"}, + {file = "ruff-0.3.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:352e95ead6964974b234e16ba8a66dad102ec7bf8ac064a23f95371d8b198aab"}, + {file = "ruff-0.3.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d6ab88c81c4040a817aa432484e838aaddf8bfd7ca70e4e615482757acb64f8"}, + {file = "ruff-0.3.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79bca3a03a759cc773fca69e0bdeac8abd1c13c31b798d5bb3c9da4a03144a9f"}, + {file = "ruff-0.3.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:2700a804d5336bcffe063fd789ca2c7b02b552d2e323a336700abb8ae9e6a3f8"}, + {file = "ruff-0.3.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:fd66469f1a18fdb9d32e22b79f486223052ddf057dc56dea0caaf1a47bdfaf4e"}, + {file = "ruff-0.3.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:45817af234605525cdf6317005923bf532514e1ea3d9270acf61ca2440691376"}, + {file = "ruff-0.3.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:0da458989ce0159555ef224d5b7c24d3d2e4bf4c300b85467b08c3261c6bc6a8"}, + {file = "ruff-0.3.3-py3-none-win32.whl", hash = "sha256:f2831ec6a580a97f1ea82ea1eda0401c3cdf512cf2045fa3c85e8ef109e87de0"}, + {file = "ruff-0.3.3-py3-none-win_amd64.whl", hash = "sha256:be90bcae57c24d9f9d023b12d627e958eb55f595428bafcb7fec0791ad25ddfc"}, + {file = "ruff-0.3.3-py3-none-win_arm64.whl", hash = "sha256:0171aab5fecdc54383993389710a3d1227f2da124d76a2784a7098e818f92d61"}, + {file = "ruff-0.3.3.tar.gz", hash = "sha256:38671be06f57a2f8aba957d9f701ea889aa5736be806f18c0cd03d6ff0cbca8d"}, +] + +[[package]] +name = "setuptools" +version = "69.2.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "typing-extensions" +version = "4.10.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, +] + +[[package]] +name = "virtualenv" +version = "20.25.1" +description = "Virtual Python Environment builder" +optional = false +python-versions = ">=3.7" +files = [ + {file = "virtualenv-20.25.1-py3-none-any.whl", hash = "sha256:961c026ac520bac5f69acb8ea063e8a4f071bcc9457b9c1f28f6b085c511583a"}, + {file = "virtualenv-20.25.1.tar.gz", hash = "sha256:e08e13ecdca7a0bd53798f356d5831434afa5b07b93f0abdf0797b7a06ffe197"}, +] + +[package.dependencies] +distlib = ">=0.3.7,<1" +filelock = ">=3.12.2,<4" +platformdirs = ">=3.9.1,<5" + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.10" +content-hash = "e04a4d46e86490057be6b1faf1bebf9d4a5297d221884cbbb93319f8280101e4" diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 000000000..af4a5ed9b --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,26 @@ +[tool.poetry] +description = "Namespace for the python packages in the HUGR repository. See the individual packages for more information." +name = "hugr-project" +version = "0.0.0" +authors = [] +readme = "README.md" +packages = [] + +package-mode = false + +[tool.poetry.group.main.dependencies] +python = "^3.10" + +[tool.poetry.group.dev.dependencies] +pre-commit = "^3.6.2" +pytest = "^8.1.1" +pytest-cov = "^4.1.0" +mypy = "^1.9.0" +ruff = "^0.3.3" + +[tool.poetry.group.quantinuum-hugr.dependencies] +quantinuum-hugr = { path = "quantinuum-hugr-py", develop = true } + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" diff --git a/quantinuum-hugr-py/CHANGELOG.md b/quantinuum-hugr-py/CHANGELOG.md new file mode 100644 index 000000000..825c32f0d --- /dev/null +++ b/quantinuum-hugr-py/CHANGELOG.md @@ -0,0 +1 @@ +# Changelog diff --git a/quantinuum-hugr-py/README.md b/quantinuum-hugr-py/README.md new file mode 100644 index 000000000..a03779b7a --- /dev/null +++ b/quantinuum-hugr-py/README.md @@ -0,0 +1,53 @@ +quantinuum_hugr +=============== + +[![build_status][]](https://github.com/CQCL/hugr/actions) +[![codecov][]](https://codecov.io/gh/CQCL/hugr) + +The Hierarchical Unified Graph Representation (HUGR, pronounced _hugger_) is the +common representation of quantum circuits and operations in the Quantinuum +ecosystem. + +This library provides a pure-python implementation of the HUGR data model, and +a low-level API for constructing HUGR objects. + +This library is intended to be used as a dependency for other high-level tools. +See [`guppylang`][] and [`tket2`][] for examples of such tools. + +The HUGR specification is [here](https://github.com/CQCL/hugr/blob/main/specification/hugr.md). + + [`guppylang`]: https://pypi.org/project/guppylang/ + [`tket2`]: https://github.com/CQCL/tket2 + + +## Installation + +TODO + +The package name is `quantinuum_hugr`, but it hasn't been published yet. +The current experimental version can be installed from the source code: + +```bash +pip install "quantinuum_hugr@git+https://github.com/CQCL/hugr.git@main#subdirectory=quantinuum-hugr-py" +``` + +## Usage + +TODO + +## Recent Changes + +TODO + +## Development + +TODO + +## License + +This project is licensed under Apache License, Version 2.0 ([LICENSE][] or http://www.apache.org/licenses/LICENSE-2.0). + + [build_status]: https://github.com/CQCL/hugr/actions/workflows/ci-py.yml/badge.svg?branch=main + [codecov]: https://img.shields.io/codecov/c/gh/CQCL/hugr?logo=codecov + [LICENSE]: https://github.com/CQCL/hugr/blob/main/LICENCE + [CHANGELOG]: https://github.com/CQCL/hugr/blob/main/quantinuum-hugr-py/CHANGELOG.md diff --git a/quantinuum-hugr-py/pyproject.toml b/quantinuum-hugr-py/pyproject.toml new file mode 100644 index 000000000..0db399359 --- /dev/null +++ b/quantinuum-hugr-py/pyproject.toml @@ -0,0 +1,20 @@ +[tool.poetry] +name = "quantinuum_hugr" +version = "0.0.0" +description = "Quantinuum's common representation for quantum programs" +classifiers = [] # TODO +keywords = [] # TODO +authors = [] # TODO +maintainers = [] # TODO +license = "Apache-2.0" +readme = "README.md" +homepage = "https://github.com/CQCL/hugr" +repository = "https://github.com/CQCL/hugr" + +[tool.poetry.dependencies] +python = ">=3.10" +pydantic = "^2.6.4" + +[tool.pytest.ini_options] +# Lark throws deprecation warnings for `src_parse` and `src_constants`. +filterwarnings = "ignore::DeprecationWarning:lark.*" diff --git a/quantinuum-hugr-py/src/quantinuum_hugr/__init__.py b/quantinuum-hugr-py/src/quantinuum_hugr/__init__.py new file mode 100644 index 000000000..606c3dc58 --- /dev/null +++ b/quantinuum-hugr-py/src/quantinuum_hugr/__init__.py @@ -0,0 +1,8 @@ +"""`quantinuum-hugr` is a Python package for the Quantinuum HUGR common +representation. +""" + + +def it_works() -> str: + """Return a string to confirm that the package is installed and working.""" + return "It works!" diff --git a/quantinuum-hugr-py/src/quantinuum_hugr/serialization/__init__.py b/quantinuum-hugr-py/src/quantinuum_hugr/serialization/__init__.py new file mode 100644 index 000000000..ad1db81e8 --- /dev/null +++ b/quantinuum-hugr-py/src/quantinuum_hugr/serialization/__init__.py @@ -0,0 +1,3 @@ +from .serial_hugr import SerialHugr + +__all__ = ["SerialHugr"] diff --git a/quantinuum-hugr-py/src/quantinuum_hugr/serialization/ops.py b/quantinuum-hugr-py/src/quantinuum_hugr/serialization/ops.py new file mode 100644 index 000000000..9490a5c57 --- /dev/null +++ b/quantinuum-hugr-py/src/quantinuum_hugr/serialization/ops.py @@ -0,0 +1,557 @@ +import inspect +import sys +from abc import ABC +from typing import Any, Literal, cast + +from pydantic import BaseModel, Field, RootModel + +from . import tys +from .tys import ( + ExtensionId, + ExtensionSet, + FunctionType, + PolyFuncType, + Type, + TypeRow, +) + +NodeID = int + + +class BaseOp(ABC, BaseModel): + """Base class for ops that store their node's input/output types""" + + # Parent node index of node the op belongs to, used only at serialization time + parent: NodeID + input_extensions: ExtensionSet = Field(default_factory=ExtensionSet) + + def insert_port_types(self, in_types: TypeRow, out_types: TypeRow) -> None: + """Hook to insert type information from the input and output ports into the + op""" + + def insert_child_dfg_signature(self, inputs: TypeRow, outputs: TypeRow) -> None: + """Hook to insert type information from a child dataflow graph""" + + def display_name(self) -> str: + """Name of the op for visualisation""" + return self.__class__.__name__ + + +# ---------------------------------------------------------- +# --------------- Module level operations ------------------ +# ---------------------------------------------------------- + + +class Module(BaseOp): + """The root of a module, parent of all other `ModuleOp`s.""" + + op: Literal["Module"] = "Module" + + +class FuncDefn(BaseOp): + """A function definition. Children nodes are the body of the definition.""" + + op: Literal["FuncDefn"] = "FuncDefn" + + name: str + signature: PolyFuncType = Field(default_factory=PolyFuncType.empty) + + def insert_port_types(self, in_types: TypeRow, out_types: TypeRow) -> None: + assert len(in_types) == 0 + assert len(out_types) == 1 + out = out_types[0] + assert isinstance(out, PolyFuncType) + self.signature = out # TODO: Extensions + + +class FuncDecl(BaseOp): + """External function declaration, linked at runtime.""" + + op: Literal["FuncDecl"] = "FuncDecl" + name: str + signature: PolyFuncType = Field(default_factory=PolyFuncType.empty) + + def insert_port_types(self, in_types: TypeRow, out_types: TypeRow) -> None: + assert len(in_types) == 0 + assert len(out_types) == 1 + out = out_types[0] + assert isinstance(out, PolyFuncType) + self.signature = out + + +class ConstBase(BaseOp): + """A constant operation definition.""" + + op: Literal["Const"] = "Const" + + +CustomConst = Any # TODO + + +class ExtensionConst(ConstBase): + """An extension constant value, that can check it is of a given [CustomType].""" + + c: Literal["Extension"] = Field("Extension", title="ConstTag") + e: CustomConst = Field(title="CustomConst") + + class Config: + json_schema_extra = { + "required": ["parent", "op", "c", "e"], + } + + +class FunctionConst(ConstBase): + """A higher-order function value.""" + + c: Literal["Function"] = Field("Function", title="ConstTag") + hugr: Any # TODO + + class Config: + json_schema_extra = { + "required": ["parent", "op", "c", "hugr"], + } + + +class Tuple(ConstBase): + """A constant tuple value.""" + + c: Literal["Tuple"] = Field("Tuple", title="ConstTag") + vs: list["Const"] + + class Config: + json_schema_extra = { + "required": ["parent", "op", "c", "vs"], + } + + +class Sum(ConstBase): + """A Sum variant + + For any Sum type where this value meets the type of the variant indicated by the tag + """ + + c: Literal["Sum"] = Field("Sum", title="ConstTag") + tag: int + typ: Type + vs: list["Const"] + + class Config: + # Needed to avoid random '\n's in the pydantic description + json_schema_extra = { + "description": ( + "A Sum variant For any Sum type where this value meets the type " + "of the variant indicated by the tag." + ), + "required": ["parent", "op", "c", "tag", "typ", "vs"], + } + + +class Const(RootModel): + """A constant operation.""" + + root: ExtensionConst | FunctionConst | Tuple | Sum = Field(discriminator="c") + + +# ----------------------------------------------- +# --------------- BasicBlock types ------------------ +# ----------------------------------------------- + + +class DataflowBlock(BaseOp): + """A CFG basic block node. The signature is that of the internal Dataflow + graph.""" + + op: Literal["DataflowBlock"] = "DataflowBlock" + inputs: TypeRow = Field(default_factory=list) + other_outputs: TypeRow = Field(default_factory=list) + sum_rows: list[TypeRow] = Field(default_factory=list) + extension_delta: ExtensionSet = Field(default_factory=list) + + def insert_port_types(self, in_types: TypeRow, out_types: TypeRow) -> None: + num_cases = len(out_types) + self.sum_rows = [[] for _ in range(num_cases)] + + def insert_child_dfg_signature(self, inputs: TypeRow, outputs: TypeRow) -> None: + self.inputs = inputs + pred = outputs[0] + assert isinstance(pred, tys.UnitSum | tys.GeneralSum) + if isinstance(pred, tys.UnitSum): + self.sum_rows = [[] for _ in range(cast(tys.UnitSum, pred).size)] + else: + self.sum_rows = [] + for variant in pred.rows: + self.sum_rows.append(variant) + self.other_outputs = outputs[1:] + + class Config: + # Needed to avoid random '\n's in the pydantic description + json_schema_extra = { + "required": [ + "parent", + "op", + "inputs", + "other_outputs", + "sum_rows", + "extension_delta", + ], + "description": "A CFG basic block node. The signature is that of the internal Dataflow graph.", + } + + +class ExitBlock(BaseOp): + """The single exit node of the CFG, has no children, stores the types of + the CFG node output.""" + + op: Literal["ExitBlock"] = "ExitBlock" + cfg_outputs: TypeRow + + class Config: + # Needed to avoid random '\n's in the pydantic description + json_schema_extra = { + "description": "The single exit node of the CFG, has no children, stores the types of the CFG node output." + } + + +# --------------------------------------------- +# --------------- DataflowOp ------------------ +# --------------------------------------------- + + +class DataflowOp(BaseOp): + pass + + +class Input(DataflowOp): + """An input node. The outputs of this node are the inputs to the parent node.""" + + op: Literal["Input"] = "Input" + types: TypeRow = Field(default_factory=list) + + def insert_port_types(self, in_types: TypeRow, out_types: TypeRow) -> None: + assert len(in_types) == 0 + self.types = list(out_types) + + +class Output(DataflowOp): + """An output node. The inputs are the outputs of the function.""" + + op: Literal["Output"] = "Output" + types: TypeRow = Field(default_factory=list) + + def insert_port_types(self, in_types: TypeRow, out_types: TypeRow) -> None: + assert len(out_types) == 0 + self.types = list(in_types) + + +class Call(DataflowOp): + """ + Call a function directly. + + The first port is connected to the def/declare of the function being called + directly, with a `ConstE` edge. The signature of the remaining ports matches + the function being called. + """ + + op: Literal["Call"] = "Call" + signature: FunctionType = Field(default_factory=FunctionType.empty) + + def insert_port_types(self, in_types: TypeRow, out_types: TypeRow) -> None: + # The constE edge comes after the value inputs + fun_ty = in_types[-1] + assert isinstance(fun_ty, PolyFuncType) + poly_func = cast(PolyFuncType, fun_ty) + assert len(poly_func.params) == 0 + self.signature = poly_func.body + + class Config: + # Needed to avoid random '\n's in the pydantic description + json_schema_extra = { + "description": ( + "Operation to call a function directly. The first port is " + "connected to the def/declare of the function being called directly, " + "with a `Static` edge. The signature of the remaining " + "ports matches the function being called." + ) + } + + +class CallIndirect(DataflowOp): + """Call a function indirectly. + + Like call, but the first input is a standard dataflow graph type.""" + + op: Literal["CallIndirect"] = "CallIndirect" + signature: FunctionType = Field(default_factory=FunctionType.empty) + + def insert_port_types(self, in_types: TypeRow, out_types: TypeRow) -> None: + fun_ty = in_types[0] + assert isinstance(fun_ty, PolyFuncType) + poly_func = cast(PolyFuncType, fun_ty) + assert len(poly_func.params) == 0 + assert len(poly_func.body.input) == len(in_types) - 1 + assert len(poly_func.body.output) == len(out_types) + self.signature = poly_func.body + + +class LoadConstant(DataflowOp): + """An operation that loads a static constant in to the local dataflow graph.""" + + op: Literal["LoadConstant"] = "LoadConstant" + datatype: Type + + +class LeafOpBase(DataflowOp): + """Simple operation that has only value inputs+outputs and (potentially) StateOrder + edges.""" + + op: Literal["LeafOp"] = "LeafOp" + + +class DFG(DataflowOp): + """A simply nested dataflow graph.""" + + op: Literal["DFG"] = "DFG" + signature: FunctionType = Field(default_factory=FunctionType.empty) + + def insert_child_dfg_signature(self, inputs: TypeRow, outputs: TypeRow) -> None: + self.signature = FunctionType( + input=list(inputs), output=list(outputs), extension_reqs=ExtensionSet([]) + ) + + +# ------------------------------------------------ +# --------------- ControlFlowOp ------------------ +# ------------------------------------------------ + + +class Conditional(DataflowOp): + """Conditional operation, defined by child `Case` nodes for each branch.""" + + op: Literal["Conditional"] = "Conditional" + other_inputs: TypeRow = Field(default_factory=list) # Remaining input types + outputs: TypeRow = Field(default_factory=list) # Output types + sum_rows: list[TypeRow] = Field(description="The possible rows of the Sum input") + # Extensions used to produce the outputs + extension_delta: ExtensionSet = Field(default_factory=list) + + def insert_port_types(self, in_types: TypeRow, out_types: TypeRow) -> None: + # First port is a predicate, i.e. a sum of tuple types. We need to unpack + # those into a list of type rows + pred = in_types[0] + if isinstance(pred, tys.UnitSum): + self.sum_rows = [[] for _ in range(cast(tys.UnitSum, pred).size)] + else: + assert isinstance(pred, tys.GeneralSum) + self.sum_rows = [] + for ty in pred.rows: + self.sum_rows.append(ty) + self.other_inputs = list(in_types[1:]) + self.outputs = list(out_types) + + +class Case(BaseOp): + """Case ops - nodes valid inside Conditional nodes.""" + + op: Literal["Case"] = "Case" + # The signature of the contained dataflow graph. + signature: FunctionType = Field(default_factory=FunctionType.empty) + + def insert_child_dfg_signature(self, inputs: TypeRow, outputs: TypeRow) -> None: + self.signature = tys.FunctionType( + input=list(inputs), output=list(outputs), extension_reqs=ExtensionSet([]) + ) + + +class TailLoop(DataflowOp): + """Tail-controlled loop.""" + + op: Literal["TailLoop"] = "TailLoop" + just_inputs: TypeRow = Field(default_factory=list) # Types that are only input + just_outputs: TypeRow = Field(default_factory=list) # Types that are only output + # Types that are appended to both input and output: + rest: TypeRow = Field(default_factory=list) + + def insert_port_types(self, in_types: TypeRow, out_types: TypeRow) -> None: + assert in_types == out_types + # self.just_inputs = list(in_types) + # self.just_outputs = list(out_types) + self.rest = list(in_types) + + +class CFG(DataflowOp): + """A dataflow node which is defined by a child CFG.""" + + op: Literal["CFG"] = "CFG" + signature: FunctionType = Field(default_factory=FunctionType.empty) + + def insert_port_types(self, inputs: TypeRow, outputs: TypeRow) -> None: + self.signature = FunctionType( + input=list(inputs), output=list(outputs), extension_reqs=ExtensionSet([]) + ) + + +ControlFlowOp = Conditional | TailLoop | CFG + + +# ----------------------------------------- +# --------------- LeafOp ------------------ +# ----------------------------------------- + + +class CustomOp(LeafOpBase): + """A user-defined operation that can be downcasted by the extensions that define + it.""" + + lop: Literal["CustomOp"] = "CustomOp" + extension: ExtensionId + op_name: str + signature: tys.FunctionType = Field(default_factory=tys.FunctionType.empty) + description: str = "" + args: list[tys.TypeArg] = Field(default_factory=list) + + def insert_port_types(self, in_types: TypeRow, out_types: TypeRow) -> None: + self.signature = tys.FunctionType(input=list(in_types), output=list(out_types)) + + def display_name(self) -> str: + return self.op_name + + class Config: + # Needed to avoid random '\n's in the pydantic description + json_schema_extra = { + "description": ( + "A user-defined operation that can be downcasted by the extensions that " + "define it." + ) + } + + +class Noop(LeafOpBase): + """A no-op operation.""" + + lop: Literal["Noop"] = "Noop" + ty: Type + + def insert_port_types(self, in_types: TypeRow, out_types: TypeRow) -> None: + assert len(in_types) == 1 + assert len(out_types) == 1 + assert in_types[0] == out_types[0] + self.ty = in_types[0] + + +class MakeTuple(LeafOpBase): + """An operation that packs all its inputs into a tuple.""" + + lop: Literal["MakeTuple"] = "MakeTuple" + tys: TypeRow = Field(default_factory=list) + + def insert_port_types(self, in_types: TypeRow, out_types: TypeRow) -> None: + # If we have a single order edge as input, this is a unit + if in_types == [None]: + in_types = [] + self.tys = list(in_types) + + +class UnpackTuple(LeafOpBase): + """An operation that packs all its inputs into a tuple.""" + + lop: Literal["UnpackTuple"] = "UnpackTuple" + tys: TypeRow = Field(default_factory=list) + + def insert_port_types(self, in_types: TypeRow, out_types: TypeRow) -> None: + self.tys = list(out_types) + + +class Tag(LeafOpBase): + """An operation that creates a tagged sum value from one of its variants.""" + + lop: Literal["Tag"] = "Tag" + tag: int # The variant to create. + variants: TypeRow # The variants of the sum type. + + +class TypeApply(LeafOpBase): + """Fixes some TypeParams of a polymorphic type by providing TypeArgs.""" + + lop: Literal["TypeApply"] = "TypeApply" + ta: "TypeApplication" + + +class TypeApplication(BaseModel): + """Records details of an application of a PolyFuncType to some TypeArgs and the + result (a less-, but still potentially-, polymorphic type). + """ + + input: PolyFuncType + args: list[tys.TypeTypeArg] + output: PolyFuncType + + class Config: + # Needed to avoid random '\n's in the pydantic description + json_schema_extra = { + "description": ( + "Records details of an application of a PolyFuncType to some TypeArgs " + "and the result (a less-, but still potentially-, polymorphic type)." + ) + } + + +class LeafOp(RootModel): + """A constant operation.""" + + root: CustomOp | Noop | MakeTuple | UnpackTuple | Tag | TypeApply = Field( + discriminator="lop" + ) + + +class OpType(RootModel): + """A constant operation.""" + + root: ( + Module + | Case + | FuncDefn + | FuncDecl + | Const + | DataflowBlock + | ExitBlock + | Conditional + | TailLoop + | CFG + | Input + | Output + | Call + | CallIndirect + | LoadConstant + | LeafOp + | DFG + ) = Field(discriminator="op") + + +# -------------------------------------- +# --------------- OpDef ---------------- +# -------------------------------------- + + +class OpDef(BaseOp, populate_by_name=True): + """Serializable definition for dynamically loaded operations.""" + + name: str # Unique identifier of the operation. + description: str # Human readable description of the operation. + inputs: list[tuple[str | None, Type]] + outputs: list[tuple[str | None, Type]] + misc: dict[str, Any] # Miscellaneous data associated with the operation. + def_: str | None = Field( + ..., alias="def" + ) # (YAML?)-encoded definition of the operation. + extension_reqs: ExtensionSet # Resources required to execute this operation. + + +# Now that all classes are defined, we need to update the ForwardRefs in all type +# annotations. We use some inspect magic to find all classes defined in this file. +classes = inspect.getmembers( + sys.modules[__name__], + lambda member: inspect.isclass(member) and member.__module__ == __name__, +) +for _, c in classes: + if issubclass(c, BaseModel): + c.model_rebuild() diff --git a/quantinuum-hugr-py/src/quantinuum_hugr/serialization/serial_hugr.py b/quantinuum-hugr-py/src/quantinuum_hugr/serialization/serial_hugr.py new file mode 100644 index 000000000..355756bce --- /dev/null +++ b/quantinuum-hugr-py/src/quantinuum_hugr/serialization/serial_hugr.py @@ -0,0 +1,36 @@ +from typing import Any, Literal + +from pydantic import BaseModel + +from .ops import NodeID, OpType + +Port = tuple[NodeID, int | None] # (node, offset) +Edge = tuple[Port, Port] + + +class SerialHugr(BaseModel): + """A serializable representation of a Hugr.""" + + version: Literal["v1"] = "v1" + nodes: list[OpType] + edges: list[Edge] + + def to_json(self) -> str: + """Return a JSON representation of the Hugr.""" + return self.model_dump_json() + + @classmethod + def load_json(cls, json: dict[Any, Any]) -> "SerialHugr": + """Decode a JSON-encoded Hugr.""" + return cls(**json) + + @classmethod + def get_version(cls) -> str: + """Return the version of the schema.""" + return cls(nodes=[], edges=[]).version + + class Config: + title = "Hugr" + json_schema_extra = { + "required": ["version", "nodes", "edges"], + } diff --git a/quantinuum-hugr-py/src/quantinuum_hugr/serialization/tys.py b/quantinuum-hugr-py/src/quantinuum_hugr/serialization/tys.py new file mode 100644 index 000000000..cea1a40cc --- /dev/null +++ b/quantinuum-hugr-py/src/quantinuum_hugr/serialization/tys.py @@ -0,0 +1,320 @@ +import inspect +import sys +from enum import Enum +from typing import Annotated, Any, Literal, Optional, Union + +from pydantic import ( + BaseModel, + Field, + RootModel, + ValidationError, + ValidationInfo, + ValidatorFunctionWrapHandler, + WrapValidator, +) +from pydantic_core import PydanticCustomError + + +def _json_custom_error_validator( + value: Any, handler: ValidatorFunctionWrapHandler, _info: ValidationInfo +) -> Any: + """Simplify the error message to avoid a gross error stemming + from exhaustive checking of all union options. + + As suggested at + https://docs.pydantic.dev/latest/concepts/types/#named-recursive-types + + + Used to define named recursive alias types. + """ + try: + return handler(value) + except ValidationError as err: + raise PydanticCustomError( + "invalid_json", + "Input is not valid json", + ) from err + + +ExtensionId = str + + +class ExtensionSet(RootModel): + """A set of extensions ids.""" + + root: Optional[list[ExtensionId]] = Field(default=None) + + +# -------------------------------------------- +# --------------- TypeParam ------------------ +# -------------------------------------------- + + +class TypeTypeParam(BaseModel): + tp: Literal["Type"] = "Type" + b: "TypeBound" + + +class BoundedNatParam(BaseModel): + tp: Literal["BoundedNat"] = "BoundedNat" + bound: int | None + + +class OpaqueParam(BaseModel): + tp: Literal["Opaque"] = "Opaque" + ty: "Opaque" + + +class ListParam(BaseModel): + tp: Literal["List"] = "List" + param: "TypeParam" + + +class TupleParam(BaseModel): + tp: Literal["Tuple"] = "Tuple" + params: list["TypeParam"] + + +class TypeParam(RootModel): + """A type parameter.""" + + root: Annotated[ + TypeTypeParam | BoundedNatParam | OpaqueParam | ListParam | TupleParam, + WrapValidator(_json_custom_error_validator), + ] = Field(discriminator="tp") + + +# ------------------------------------------ +# --------------- TypeArg ------------------ +# ------------------------------------------ + + +class CustomTypeArg(BaseModel): + typ: None # TODO + value: str + + +class TypeTypeArg(BaseModel): + tya: Literal["Type"] = "Type" + ty: "Type" + + +class BoundedNatArg(BaseModel): + tya: Literal["BoundedNat"] = "BoundedNat" + n: int + + +class OpaqueArg(BaseModel): + tya: Literal["Opaque"] = "Opaque" + arg: CustomTypeArg + + +class SequenceArg(BaseModel): + tya: Literal["Sequence"] = "Sequence" + args: list["TypeArg"] + + +class ExtensionsArg(BaseModel): + tya: Literal["Extensions"] = "Extensions" + es: ExtensionSet + + +class TypeArg(RootModel): + """A type argument.""" + + root: Annotated[ + TypeTypeArg | BoundedNatArg | OpaqueArg | SequenceArg | ExtensionsArg, + WrapValidator(_json_custom_error_validator), + ] = Field(discriminator="tya") + + +# -------------------------------------------- +# --------------- Container ------------------ +# -------------------------------------------- + + +class MultiContainer(BaseModel): + ty: "Type" + + +class Array(MultiContainer): + """Known size array whose elements are of the same type.""" + + t: Literal["Array"] = "Array" + len: int + + +class UnitSum(BaseModel): + """Simple predicate where all variants are empty tuples.""" + + t: Literal["Sum"] = "Sum" + + s: Literal["Unit"] = "Unit" + size: int + + +class GeneralSum(BaseModel): + """General sum type that explicitly stores the types of the variants.""" + + t: Literal["Sum"] = "Sum" + + s: Literal["General"] = "General" + rows: list["TypeRow"] + + +class SumType(RootModel): + root: Union[UnitSum, GeneralSum] = Field(discriminator="s") + + +# ---------------------------------------------- +# --------------- ClassicType ------------------ +# ---------------------------------------------- + + +class Variable(BaseModel): + """A type variable identified by a de Bruijn index.""" + + t: Literal["V"] = "V" + i: int + b: "TypeBound" + + +class USize(BaseModel): + """Unsigned integer size type.""" + + t: Literal["I"] = "I" + + +class FunctionType(BaseModel): + """A graph encoded as a value. It contains a concrete signature and a set of + required resources.""" + + input: "TypeRow" # Value inputs of the function. + output: "TypeRow" # Value outputs of the function. + # The extension requirements which are added by the operation + extension_reqs: "ExtensionSet" = Field(default_factory=list) + + @classmethod + def empty(cls) -> "FunctionType": + return FunctionType(input=[], output=[], extension_reqs=ExtensionSet([])) + + class Config: + # Needed to avoid random '\n's in the pydantic description + json_schema_extra = { + "description": ( + "A graph encoded as a value. It contains a concrete signature and " + "a set of required resources." + ) + } + + +class PolyFuncType(BaseModel): + """A graph encoded as a value. It contains a concrete signature and a set of + required resources.""" + + t: Literal["G"] = "G" + + # The declared type parameters, i.e., these must be instantiated with the same + # number of TypeArgs before the function can be called. Note that within the body, + # variable (DeBruijn) index 0 is element 0 of this array, i.e. the variables are + # bound from right to left. + params: list[TypeParam] + + # Template for the function. May contain variables up to length of `params` + body: FunctionType + + @classmethod + def empty(cls) -> "PolyFuncType": + return PolyFuncType(params=[], body=FunctionType.empty()) + + class Config: + # Needed to avoid random '\n's in the pydantic description + json_schema_extra = { + "description": ( + "A graph encoded as a value. It contains a concrete signature and " + "a set of required resources." + ) + } + + +class TypeBound(Enum): + Eq = "E" + Copyable = "C" + Any = "A" + + @staticmethod + def join(*bs: "TypeBound") -> "TypeBound": + """Computes the least upper bound for a sequence of bounds.""" + res = TypeBound.Eq + for b in bs: + if b == TypeBound.Any: + return TypeBound.Any + if res == TypeBound.Eq: + res = b + return res + + +class Opaque(BaseModel): + """An opaque operation that can be downcasted by the extensions that define it.""" + + t: Literal["Opaque"] = "Opaque" + extension: ExtensionId + id: str # Unique identifier of the opaque type. + args: list[TypeArg] + bound: TypeBound + + +# ---------------------------------------------- +# --------------- LinearType ------------------- +# ---------------------------------------------- + + +class Qubit(BaseModel): + """A qubit.""" + + t: Literal["Q"] = "Q" + + +class Type(RootModel): + """A HUGR type.""" + + root: Annotated[ + Qubit | Variable | USize | PolyFuncType | Array | SumType | Opaque, + WrapValidator(_json_custom_error_validator), + ] = Field(discriminator="t") + + +# ------------------------------------------- +# --------------- TypeRow ------------------- +# ------------------------------------------- + +TypeRow = list[Type] + + +# ------------------------------------------- +# --------------- Signature ----------------- +# ------------------------------------------- + + +class Signature(BaseModel): + """Describes the edges required to/from a node. + + This includes both the concept of "signature" in the spec, and also the target + (value) of a call (constant). + """ + + signature: "PolyFuncType" # The underlying signature + + # The extensions which are associated with all the inputs and carried through + input_extensions: ExtensionSet + + +# Now that all classes are defined, we need to update the ForwardRefs in all type +# annotations. We use some inspect magic to find all classes defined in this file. +classes = inspect.getmembers( + sys.modules[__name__], + lambda member: inspect.isclass(member) and member.__module__ == __name__, +) +for _, c in classes: + if issubclass(c, BaseModel): + c.model_rebuild() diff --git a/quantinuum-hugr-py/tests/__init__.py b/quantinuum-hugr-py/tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/quantinuum-hugr-py/tests/serialization/__init__.py b/quantinuum-hugr-py/tests/serialization/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/quantinuum-hugr-py/tests/serialization/test_basic.py b/quantinuum-hugr-py/tests/serialization/test_basic.py new file mode 100644 index 000000000..e29fd40c0 --- /dev/null +++ b/quantinuum-hugr-py/tests/serialization/test_basic.py @@ -0,0 +1,3 @@ +def test_it_works(): + """TODO: Replace this with a real test.""" + assert 2 + 2 != "🐟" diff --git a/quantinuum-hugr/benches/bench_main.rs b/quantinuum-hugr/benches/bench_main.rs index db6e5dc4e..bc0619c48 100644 --- a/quantinuum-hugr/benches/bench_main.rs +++ b/quantinuum-hugr/benches/bench_main.rs @@ -1,9 +1,11 @@ //! Benchmarks -#[allow(dead_code)] +#![allow(dead_code)] + mod benchmarks; use criterion::criterion_main; criterion_main! { benchmarks::hugr::benches, + benchmarks::types::benches, } diff --git a/quantinuum-hugr/benches/benchmarks/hugr.rs b/quantinuum-hugr/benches/benchmarks/hugr.rs index 231a1efa1..7da7b4c12 100644 --- a/quantinuum-hugr/benches/benchmarks/hugr.rs +++ b/quantinuum-hugr/benches/benchmarks/hugr.rs @@ -1,11 +1,57 @@ #![allow(clippy::unit_arg)] // Required for black_box uses -use criterion::{criterion_group, AxisScale, Criterion, PlotConfiguration}; +use criterion::{black_box, criterion_group, AxisScale, Criterion, PlotConfiguration}; +use hugr::builder::{BuildError, CFGBuilder, DFGBuilder, Dataflow, DataflowHugr, HugrBuilder}; +use hugr::extension::prelude::{BOOL_T, USIZE_T}; +use hugr::extension::ExtensionSet; +use hugr::types::FunctionType; +use hugr::{type_row, Hugr}; -fn bench_it_works(c: &mut Criterion) { - let mut group = c.benchmark_group("it_works"); +pub fn simple_dfg_hugr() -> Hugr { + let dfg_builder = + DFGBuilder::new(FunctionType::new(type_row![BOOL_T], type_row![BOOL_T])).unwrap(); + let [i1] = dfg_builder.input_wires_arr(); + dfg_builder.finish_prelude_hugr_with_outputs([i1]).unwrap() +} + +pub fn simple_cfg_builder + AsRef>( + cfg_builder: &mut CFGBuilder, +) -> Result<(), BuildError> { + let sum2_variants = vec![type_row![USIZE_T], type_row![USIZE_T]]; + let mut entry_b = + cfg_builder.entry_builder(sum2_variants.clone(), type_row![], ExtensionSet::new())?; + let entry = { + let [inw] = entry_b.input_wires_arr(); + + let sum = entry_b.make_sum(1, sum2_variants, [inw])?; + entry_b.finish_with_outputs(sum, [])? + }; + let mut middle_b = cfg_builder + .simple_block_builder(FunctionType::new(type_row![USIZE_T], type_row![USIZE_T]), 1)?; + let middle = { + let c = middle_b.add_load_const(hugr::ops::Const::unary_unit_sum()); + let [inw] = middle_b.input_wires_arr(); + middle_b.finish_with_outputs(c, [inw])? + }; + let exit = cfg_builder.exit_block(); + cfg_builder.branch(&entry, 0, &middle)?; + cfg_builder.branch(&middle, 0, &exit)?; + cfg_builder.branch(&entry, 1, &exit)?; + Ok(()) +} + +pub fn simple_cfg_hugr() -> Hugr { + let mut cfg_builder = + CFGBuilder::new(FunctionType::new(type_row![USIZE_T], type_row![USIZE_T])).unwrap(); + simple_cfg_builder(&mut cfg_builder).unwrap(); + cfg_builder.finish_prelude_hugr().unwrap() +} + +fn bench_builder(c: &mut Criterion) { + let mut group = c.benchmark_group("builder"); group.plot_config(PlotConfiguration::default().summary_scale(AxisScale::Logarithmic)); - group.bench_function("it_works", |b| b.iter(|| 42)); + group.bench_function("simple_dfg", |b| b.iter(|| black_box(simple_dfg_hugr()))); + group.bench_function("simple_cfg", |b| b.iter(|| black_box(simple_cfg_hugr()))); group.finish(); } @@ -13,5 +59,5 @@ criterion_group! { name = benches; config = Criterion::default(); targets = - bench_it_works, + bench_builder, } diff --git a/quantinuum-hugr/benches/benchmarks/mod.rs b/quantinuum-hugr/benches/benchmarks/mod.rs index 4b93580c2..a153caa15 100644 --- a/quantinuum-hugr/benches/benchmarks/mod.rs +++ b/quantinuum-hugr/benches/benchmarks/mod.rs @@ -1 +1,2 @@ pub mod hugr; +pub mod types; diff --git a/quantinuum-hugr/benches/benchmarks/types.rs b/quantinuum-hugr/benches/benchmarks/types.rs new file mode 100644 index 000000000..3b599983c --- /dev/null +++ b/quantinuum-hugr/benches/benchmarks/types.rs @@ -0,0 +1,36 @@ +// Required for black_box uses +#![allow(clippy::unit_arg)] +use hugr::extension::prelude::{QB_T, USIZE_T}; +use hugr::ops::AliasDecl; +use hugr::types::{FunctionType, Type, TypeBound}; + +use criterion::{black_box, criterion_group, AxisScale, Criterion, PlotConfiguration}; + +/// Construct a complex type. +fn make_complex_type() -> Type { + let qb = QB_T; + let int = USIZE_T; + let q_register = Type::new_tuple(vec![qb; 8]); + let b_register = Type::new_tuple(vec![int; 8]); + let q_alias = Type::new_alias(AliasDecl::new("QReg", TypeBound::Any)); + let sum = Type::new_sum([vec![q_register].into(), vec![q_alias].into()]); + Type::new_function(FunctionType::new(vec![sum], vec![b_register])) +} + +fn bench_construction(c: &mut Criterion) { + let mut group = c.benchmark_group("types"); + group.plot_config(PlotConfiguration::default().summary_scale(AxisScale::Logarithmic)); + + group.bench_function("construction", |b| { + b.iter(|| black_box(make_complex_type())) + }); + + group.finish(); +} + +criterion_group! { + name = benches; + config = Criterion::default(); + targets = + bench_construction, +} diff --git a/quantinuum-hugr/src/algorithm/const_fold.rs b/quantinuum-hugr/src/algorithm/const_fold.rs index 138769ae7..6fcec022d 100644 --- a/quantinuum-hugr/src/algorithm/const_fold.rs +++ b/quantinuum-hugr/src/algorithm/const_fold.rs @@ -4,6 +4,7 @@ use std::collections::{BTreeSet, HashMap}; use itertools::Itertools; +use crate::types::SumType; use crate::{ builder::{DFGBuilder, Dataflow, DataflowHugr}, extension::{ConstFoldResult, ExtensionRegistry}, @@ -14,8 +15,7 @@ use crate::{ }, ops::{Const, LeafOp}, type_row, - types::{FunctionType, Type, TypeEnum}, - values::Value, + types::FunctionType, Hugr, HugrView, IncomingPort, Node, SimpleReplacement, }; @@ -48,25 +48,20 @@ pub fn fold_leaf_op(op: &LeafOp, consts: &[(IncomingPort, Const)]) -> ConstFoldR match op { LeafOp::Noop { .. } => out_row([consts.first()?.1.clone()]), LeafOp::MakeTuple { .. } => { - out_row([Const::new_tuple(sorted_consts(consts).into_iter().cloned())]) + out_row([Const::tuple(sorted_consts(consts).into_iter().cloned())]) } LeafOp::UnpackTuple { .. } => { let c = &consts.first()?.1; - - if let Value::Tuple { vs } = c.value() { - if let TypeEnum::Tuple(tys) = c.const_type().as_type_enum() { - return out_row(tys.iter().zip(vs.iter()).map(|(t, v)| { - Const::new(v.clone(), t.clone()) - .expect("types should already have been checked") - })); - } - } - panic!("This op always takes a Tuple input."); + let Const::Tuple { vs } = c else { + panic!("This op always takes a Tuple input."); + }; + out_row(vs.iter().cloned()) } - LeafOp::Tag { tag, variants } => out_row([Const::new( - Value::sum(*tag, consts.iter().map(|(_, konst)| konst.value().clone())), - Type::new_sum(variants.clone()), + LeafOp::Tag { tag, variants } => out_row([Const::sum( + *tag, + consts.iter().map(|(_, konst)| konst.clone()), + SumType::new(variants.clone()), ) .unwrap()]), LeafOp::CustomOp(_) => { @@ -81,7 +76,7 @@ pub fn fold_leaf_op(op: &LeafOp, consts: &[(IncomingPort, Const)]) -> ConstFoldR /// Generate a graph that loads and outputs `consts` in order, validating /// against `reg`. fn const_graph(consts: Vec, reg: &ExtensionRegistry) -> Hugr { - let const_types = consts.iter().map(Const::const_type).cloned().collect_vec(); + let const_types = consts.iter().map(Const::const_type).collect_vec(); let mut b = DFGBuilder::new(FunctionType::new(type_row![], const_types)).unwrap(); let outputs = consts @@ -226,15 +221,12 @@ mod test { use crate::std_extensions::arithmetic::float_types::{ConstF64, FLOAT64_TYPE}; use crate::std_extensions::arithmetic::int_types::{ConstIntU, INT_TYPES}; use crate::std_extensions::logic::{self, NaryLogic}; + use rstest::rstest; /// int to constant fn i2c(b: u64) -> Const { - Const::new( - ConstIntU::new(5, b).unwrap().into(), - INT_TYPES[5].to_owned(), - ) - .unwrap() + Const::extension(ConstIntU::new(5, b).unwrap()) } /// float to constant @@ -262,10 +254,13 @@ mod test { int(x.0 - x.1) == 2 */ let sum_type = sum_with_error(INT_TYPES[5].to_owned()); - let mut build = - DFGBuilder::new(FunctionType::new(type_row![], vec![sum_type.clone()])).unwrap(); + let mut build = DFGBuilder::new(FunctionType::new( + type_row![], + vec![sum_type.clone().into()], + )) + .unwrap(); - let tup = build.add_load_const(Const::new_tuple([f2c(5.6), f2c(3.2)])); + let tup = build.add_load_const(Const::tuple([f2c(5.6), f2c(3.2)])); let unpack = build .add_dataflow_op( @@ -298,11 +293,7 @@ mod test { constant_fold_pass(&mut h, ®); - let expected = Value::Sum { - tag: 0, - values: vec![Box::new(i2c(2).value().clone())], - }; - let expected = Const::new(expected, sum_type).unwrap(); + let expected = Const::sum(0, [i2c(2).clone()], sum_type).unwrap(); assert_fully_folded(&h, &expected); } @@ -344,7 +335,7 @@ mod test { collections::EXTENSION.to_owned(), ]) .unwrap(); - let list: Const = ListValue::new(BOOL_T, vec![Value::unit_sum(1)]).into(); + let list: Const = ListValue::new(BOOL_T, [Const::unit_sum(0, 1).unwrap()]).into(); let mut build = DFGBuilder::new(FunctionType::new( type_row![], vec![list.const_type().clone()], diff --git a/quantinuum-hugr/src/algorithm/nest_cfgs.rs b/quantinuum-hugr/src/algorithm/nest_cfgs.rs index 57a89455a..174af2b3d 100644 --- a/quantinuum-hugr/src/algorithm/nest_cfgs.rs +++ b/quantinuum-hugr/src/algorithm/nest_cfgs.rs @@ -605,7 +605,7 @@ pub(crate) mod test { // \-> right -/ \-<--<-/ let mut cfg_builder = CFGBuilder::new(FunctionType::new(type_row![NAT], type_row![NAT]))?; - let pred_const = cfg_builder.add_constant(Const::unit_sum(0, 2)); + let pred_const = cfg_builder.add_constant(Const::unit_sum(0, 2).expect("0 < 2")); let const_unit = cfg_builder.add_constant(Const::unary_unit_sum()); let entry = n_identity( @@ -887,7 +887,7 @@ pub(crate) mod test { separate: bool, ) -> Result<(Hugr, BasicBlockID, BasicBlockID), BuildError> { let mut cfg_builder = CFGBuilder::new(FunctionType::new(type_row![NAT], type_row![NAT]))?; - let pred_const = cfg_builder.add_constant(Const::unit_sum(0, 2)); + let pred_const = cfg_builder.add_constant(Const::unit_sum(0, 2).expect("0 < 2")); let const_unit = cfg_builder.add_constant(Const::unary_unit_sum()); let entry = n_identity( @@ -929,7 +929,7 @@ pub(crate) mod test { cfg_builder: &mut CFGBuilder, separate_headers: bool, ) -> Result<(BasicBlockID, BasicBlockID), BuildError> { - let pred_const = cfg_builder.add_constant(Const::unit_sum(0, 2)); + let pred_const = cfg_builder.add_constant(Const::unit_sum(0, 2).expect("0 < 2")); let const_unit = cfg_builder.add_constant(Const::unary_unit_sum()); let entry = n_identity( diff --git a/quantinuum-hugr/src/builder/cfg.rs b/quantinuum-hugr/src/builder/cfg.rs index 70b60fee7..cd2923b12 100644 --- a/quantinuum-hugr/src/builder/cfg.rs +++ b/quantinuum-hugr/src/builder/cfg.rs @@ -52,7 +52,6 @@ use crate::{ /// types::{FunctionType, Type, SumType}, /// ops, /// type_row, -/// values::Value, /// }; /// /// const NAT: Type = prelude::USIZE_T; @@ -73,10 +72,11 @@ use crate::{ /// let [inw] = entry_b.input_wires_arr(); /// let entry = { /// // Pack the const "42" into the appropriate sum type. -/// let left_42 = -/// ops::Const::new_sum(0, -/// [prelude::ConstUsize::new(42).into()], -/// SumType::new(sum_variants.clone()))?; +/// let left_42 = ops::Const::sum( +/// 0, +/// [prelude::ConstUsize::new(42).into()], +/// SumType::new(sum_variants.clone()) +/// )?; /// let sum = entry_b.add_load_const(left_42); /// /// entry_b.finish_with_outputs(sum, [inw])? diff --git a/quantinuum-hugr/src/extension/prelude.rs b/quantinuum-hugr/src/extension/prelude.rs index d6bf068d3..cdccfdecd 100644 --- a/quantinuum-hugr/src/extension/prelude.rs +++ b/quantinuum-hugr/src/extension/prelude.rs @@ -3,15 +3,16 @@ use lazy_static::lazy_static; use smol_str::SmolStr; +use crate::types::SumType; use crate::{ extension::{ExtensionId, TypeDefBound}, + ops::constant::CustomConst, ops::LeafOp, type_row, types::{ type_param::{TypeArg, TypeParam}, CustomType, FunctionType, PolyFuncType, Type, TypeBound, }, - values::CustomConst, Extension, }; @@ -157,8 +158,8 @@ pub const ERROR_TYPE: Type = Type::new_extension(ERROR_CUSTOM_TYPE); pub const ERROR_TYPE_NAME: SmolStr = SmolStr::new_inline("error"); /// Return a Sum type with the first variant as the given type and the second an Error. -pub fn sum_with_error(ty: Type) -> Type { - Type::new_sum([vec![ty].into(), vec![ERROR_TYPE].into()]) +pub fn sum_with_error(ty: Type) -> SumType { + SumType::new([vec![ty], vec![ERROR_TYPE]]) } #[derive(Debug, Clone, PartialEq, serde::Serialize, serde::Deserialize)] @@ -184,7 +185,7 @@ impl CustomConst for ConstUsize { } fn equal_consts(&self, other: &dyn CustomConst) -> bool { - crate::values::downcast_equal_consts(self, other) + crate::ops::constant::downcast_equal_consts(self, other) } fn extension_reqs(&self) -> ExtensionSet { @@ -222,7 +223,7 @@ impl CustomConst for ConstError { } fn equal_consts(&self, other: &dyn CustomConst) -> bool { - crate::values::downcast_equal_consts(self, other) + crate::ops::constant::downcast_equal_consts(self, other) } fn extension_reqs(&self) -> ExtensionSet { diff --git a/quantinuum-hugr/src/hugr/rewrite.rs b/quantinuum-hugr/src/hugr/rewrite.rs index 151fcaca0..dd26b1ac2 100644 --- a/quantinuum-hugr/src/hugr/rewrite.rs +++ b/quantinuum-hugr/src/hugr/rewrite.rs @@ -18,10 +18,6 @@ pub trait Rewrite { type Error: std::error::Error; /// The type returned on successful application of the rewrite. type ApplyResult; - /// The node iterator returned by [`Rewrite::invalidation_set`] - type InvalidationSet<'a>: Iterator + 'a - where - Self: 'a; /// If `true`, [self.apply]'s of this rewrite guarantee that they do not mutate the Hugr when they return an Err. /// If `false`, there is no guarantee; the Hugr should be assumed invalid when Err is returned. @@ -47,7 +43,7 @@ pub trait Rewrite { /// /// Two `impl Rewrite`s can be composed if their invalidation sets are /// disjoint. - fn invalidation_set(&self) -> Self::InvalidationSet<'_>; + fn invalidation_set(&self) -> impl Iterator; } /// Wraps any rewrite into a transaction (i.e. that has no effect upon failure) @@ -60,9 +56,6 @@ pub struct Transactional { impl Rewrite for Transactional { type Error = R::Error; type ApplyResult = R::ApplyResult; - type InvalidationSet<'a> = R::InvalidationSet<'a> - where - Self: 'a; const UNCHANGED_ON_FAILURE: bool = true; fn verify(&self, h: &impl HugrView) -> Result<(), Self::Error> { @@ -93,7 +86,7 @@ impl Rewrite for Transactional { } #[inline] - fn invalidation_set(&self) -> Self::InvalidationSet<'_> { + fn invalidation_set(&self) -> impl Iterator { self.underlying.invalidation_set() } } diff --git a/quantinuum-hugr/src/hugr/rewrite/consts.rs b/quantinuum-hugr/src/hugr/rewrite/consts.rs index a42b7d2c9..3fbd0aabd 100644 --- a/quantinuum-hugr/src/hugr/rewrite/consts.rs +++ b/quantinuum-hugr/src/hugr/rewrite/consts.rs @@ -29,8 +29,6 @@ impl Rewrite for RemoveLoadConstant { // The Const node the LoadConstant was connected to. type ApplyResult = Node; - type InvalidationSet<'a> = iter::Once; - const UNCHANGED_ON_FAILURE: bool = true; fn verify(&self, h: &impl HugrView) -> Result<(), Self::Error> { @@ -64,7 +62,7 @@ impl Rewrite for RemoveLoadConstant { Ok(source) } - fn invalidation_set(&self) -> Self::InvalidationSet<'_> { + fn invalidation_set(&self) -> impl Iterator { iter::once(self.0) } } @@ -79,8 +77,6 @@ impl Rewrite for RemoveConst { // The parent of the Const node. type ApplyResult = Node; - type InvalidationSet<'a> = iter::Once; - const UNCHANGED_ON_FAILURE: bool = true; fn verify(&self, h: &impl HugrView) -> Result<(), Self::Error> { @@ -108,7 +104,7 @@ impl Rewrite for RemoveConst { Ok(parent) } - fn invalidation_set(&self) -> Self::InvalidationSet<'_> { + fn invalidation_set(&self) -> impl Iterator { iter::once(self.0) } } diff --git a/quantinuum-hugr/src/hugr/rewrite/inline_dfg.rs b/quantinuum-hugr/src/hugr/rewrite/inline_dfg.rs index 9d3775a7c..e5a6c4062 100644 --- a/quantinuum-hugr/src/hugr/rewrite/inline_dfg.rs +++ b/quantinuum-hugr/src/hugr/rewrite/inline_dfg.rs @@ -25,8 +25,6 @@ impl Rewrite for InlineDFG { type ApplyResult = [Node; 3]; type Error = InlineDFGError; - type InvalidationSet<'a> = <[Node; 1] as IntoIterator>::IntoIter; - const UNCHANGED_ON_FAILURE: bool = true; fn verify(&self, h: &impl crate::HugrView) -> Result<(), Self::Error> { @@ -122,7 +120,7 @@ impl Rewrite for InlineDFG { Ok([n, input, output]) } - fn invalidation_set(&self) -> Self::InvalidationSet<'_> { + fn invalidation_set(&self) -> impl Iterator { [self.0.node()].into_iter() } } @@ -147,7 +145,6 @@ mod test { use crate::std_extensions::arithmetic::int_types::{self, ConstIntU}; use crate::types::FunctionType; use crate::utils::test_quantum_extension; - use crate::values::Value; use crate::{type_row, Direction, HugrView, Node, Port}; use crate::{Hugr, Wire}; @@ -186,12 +183,7 @@ mod test { d: &mut DFGBuilder, ) -> Result> { let int_ty = &int_types::INT_TYPES[6]; - let cst = Const::new( - Value::Extension { - c: (Box::new(ConstIntU::new(6, 15)?),), - }, - int_ty.clone(), - )?; + let cst = Const::extension(ConstIntU::new(6, 15)?); let c1 = d.add_load_const(cst); let [lifted] = d .add_dataflow_op( diff --git a/quantinuum-hugr/src/hugr/rewrite/insert_identity.rs b/quantinuum-hugr/src/hugr/rewrite/insert_identity.rs index 540ff05cc..ee61db971 100644 --- a/quantinuum-hugr/src/hugr/rewrite/insert_identity.rs +++ b/quantinuum-hugr/src/hugr/rewrite/insert_identity.rs @@ -48,9 +48,6 @@ impl Rewrite for IdentityInsertion { type Error = IdentityInsertionError; /// The inserted node. type ApplyResult = Node; - type InvalidationSet<'a> = iter::Once - where - Self: 'a; const UNCHANGED_ON_FAILURE: bool = true; fn verify(&self, _h: &impl HugrView) -> Result<(), IdentityInsertionError> { /* @@ -90,7 +87,7 @@ impl Rewrite for IdentityInsertion { } #[inline] - fn invalidation_set(&self) -> Self::InvalidationSet<'_> { + fn invalidation_set(&self) -> impl Iterator { iter::once(self.post_node) } } diff --git a/quantinuum-hugr/src/hugr/rewrite/outline_cfg.rs b/quantinuum-hugr/src/hugr/rewrite/outline_cfg.rs index f6120e4a5..43f3b12f9 100644 --- a/quantinuum-hugr/src/hugr/rewrite/outline_cfg.rs +++ b/quantinuum-hugr/src/hugr/rewrite/outline_cfg.rs @@ -1,6 +1,5 @@ //! Rewrite for inserting a CFG-node into the hierarchy containing a subsection of an existing CFG -use std::collections::{hash_set, HashSet}; -use std::iter; +use std::collections::HashSet; use itertools::Itertools; use thiserror::Error; @@ -101,9 +100,6 @@ impl Rewrite for OutlineCfg { /// /// [CFG]: OpType::CFG type ApplyResult = (Node, Node); - type InvalidationSet<'a> = iter::Copied> - where - Self: 'a; const UNCHANGED_ON_FAILURE: bool = true; fn verify(&self, h: &impl HugrView) -> Result<(), OutlineCfgError> { @@ -216,8 +212,7 @@ impl Rewrite for OutlineCfg { Ok((new_block, cfg_node)) } - #[inline] - fn invalidation_set(&self) -> Self::InvalidationSet<'_> { + fn invalidation_set(&self) -> impl Iterator { self.blocks.iter().copied() } } diff --git a/quantinuum-hugr/src/hugr/rewrite/replace.rs b/quantinuum-hugr/src/hugr/rewrite/replace.rs index aa4dca469..dc54f6652 100644 --- a/quantinuum-hugr/src/hugr/rewrite/replace.rs +++ b/quantinuum-hugr/src/hugr/rewrite/replace.rs @@ -1,8 +1,6 @@ //! Implementation of the `Replace` operation. use std::collections::{HashMap, HashSet, VecDeque}; -use std::iter::Copied; -use std::slice::Iter; use itertools::Itertools; use thiserror::Error; @@ -215,10 +213,6 @@ impl Rewrite for Replacement { type ApplyResult = (); - type InvalidationSet<'a> = Copied> - where - Self: 'a; - const UNCHANGED_ON_FAILURE: bool = false; fn verify(&self, h: &impl crate::HugrView) -> Result<(), Self::Error> { @@ -331,7 +325,7 @@ impl Rewrite for Replacement { Ok(()) } - fn invalidation_set(&self) -> Self::InvalidationSet<'_> { + fn invalidation_set(&self) -> impl Iterator { self.removal.iter().copied() } } diff --git a/quantinuum-hugr/src/hugr/rewrite/simple_replace.rs b/quantinuum-hugr/src/hugr/rewrite/simple_replace.rs index f7d836d58..ebee0d1bf 100644 --- a/quantinuum-hugr/src/hugr/rewrite/simple_replace.rs +++ b/quantinuum-hugr/src/hugr/rewrite/simple_replace.rs @@ -1,8 +1,6 @@ //! Implementation of the `SimpleReplace` operation. -use std::collections::{hash_map, HashMap}; -use std::iter::{self, Copied}; -use std::slice; +use std::collections::HashMap; use crate::hugr::views::SiblingSubgraph; use crate::hugr::{HugrMut, HugrView, NodeMetadataMap, Rewrite}; @@ -55,19 +53,9 @@ impl SimpleReplacement { } } -type SubgraphNodesIter<'a> = Copied>; -type NuOutNodesIter<'a> = iter::Map< - hash_map::Keys<'a, (Node, IncomingPort), IncomingPort>, - fn(&'a (Node, IncomingPort)) -> Node, ->; - impl Rewrite for SimpleReplacement { type Error = SimpleReplacementError; type ApplyResult = (); - type InvalidationSet<'a> = iter::Chain, NuOutNodesIter<'a>> - where - Self: 'a; - const UNCHANGED_ON_FAILURE: bool = true; fn verify(&self, _h: &impl HugrView) -> Result<(), SimpleReplacementError> { @@ -184,10 +172,9 @@ impl Rewrite for SimpleReplacement { } #[inline] - fn invalidation_set(&self) -> Self::InvalidationSet<'_> { + fn invalidation_set(&self) -> impl Iterator { let subcirc = self.subgraph.nodes().iter().copied(); - let get_node: fn(&(Node, IncomingPort)) -> Node = |key| key.0; - let out_neighs = self.nu_out.keys().map(get_node); + let out_neighs = self.nu_out.keys().map(|key| key.0); subcirc.chain(out_neighs) } } diff --git a/quantinuum-hugr/src/hugr/validate/test.rs b/quantinuum-hugr/src/hugr/validate/test.rs index 753766c9a..f5db71456 100644 --- a/quantinuum-hugr/src/hugr/validate/test.rs +++ b/quantinuum-hugr/src/hugr/validate/test.rs @@ -15,7 +15,6 @@ use crate::std_extensions::logic::test::{and_op, or_op}; use crate::std_extensions::logic::{self, NotOp}; use crate::types::type_param::{TypeArg, TypeArgError, TypeParam}; use crate::types::{CustomType, FunctionType, PolyFuncType, Type, TypeBound, TypeRow}; -use crate::values::Value; use crate::{type_row, Direction, IncomingPort, Node}; const NAT: Type = crate::extension::prelude::USIZE_T; @@ -522,12 +521,11 @@ fn no_polymorphic_consts() -> Result<(), Box> { .with_extension_delta(collections::EXTENSION_NAME), ), )?; - let empty_list = Value::Extension { - c: (Box::new(collections::ListValue::new_empty( - Type::new_var_use(0, TypeBound::Copyable), - )),), - }; - let cst = def.add_load_const(Const::new(empty_list, list_of_var)?); + let empty_list = Const::extension(collections::ListValue::new_empty(Type::new_var_use( + 0, + TypeBound::Copyable, + ))); + let cst = def.add_load_const(empty_list); let res = def.finish_hugr_with_outputs([cst], ®); assert_matches!( res.unwrap_err(), @@ -562,7 +560,8 @@ mod extension_tests { /// /// Returns the node indices of each of the operations. fn add_block_children(b: &mut Hugr, parent: Node, sum_size: usize) -> (Node, Node, Node, Node) { - let const_op = ops::Const::unit_sum(0, sum_size as u8); + let const_op = + ops::Const::unit_sum(0, sum_size as u8).expect("`sum_size` must be greater than 0"); let tag_type = Type::new_unit_sum(sum_size as u8); let input = b.add_node_with_parent(parent, ops::Input::new(type_row![BOOL_T])); diff --git a/quantinuum-hugr/src/hugr/views.rs b/quantinuum-hugr/src/hugr/views.rs index 0a0cf7d1b..649ae522e 100644 --- a/quantinuum-hugr/src/hugr/views.rs +++ b/quantinuum-hugr/src/hugr/views.rs @@ -28,8 +28,8 @@ use super::{Hugr, HugrError, NodeMetadata, NodeMetadataMap, NodeType, DEFAULT_NO use crate::ops::handle::NodeHandle; use crate::ops::{OpParent, OpTag, OpTrait, OpType}; -use crate::types::Type; use crate::types::{EdgeKind, FunctionType}; +use crate::types::{PolyFuncType, Type}; use crate::{Direction, IncomingPort, Node, OutgoingPort, Port}; use itertools::Either; @@ -327,11 +327,37 @@ pub trait HugrView: sealed::HugrInternals { } } - /// For HUGRs with a [`DataflowParent`][crate::ops::DataflowParent] root operation, report the - /// signature of the inner dataflow sibling graph. Otherwise return None. - fn get_function_type(&self) -> Option { - let op = self.get_nodetype(self.root()); - op.op.inner_function_type() + /// Returns the function type defined by this dataflow HUGR. + /// + /// If the root of the Hugr is a + /// [`DataflowParent`][crate::ops::DataflowParent] operation, report the + /// signature corresponding to the input and output node of its sibling + /// graph. Otherwise, returns `None`. + /// + /// In contrast to [`get_function_type`][HugrView::get_function_type], this + /// method always return a concrete [`FunctionType`]. + fn get_df_function_type(&self) -> Option { + let op = self.get_optype(self.root()); + op.inner_function_type() + } + + /// Returns the function type defined by this HUGR. + /// + /// For HUGRs with a [`DataflowParent`][crate::ops::DataflowParent] root + /// operation, report the signature of the inner dataflow sibling graph. + /// + /// For HUGRS with a [`FuncDecl`][crate::ops::FuncDecl] or + /// [`FuncDefn`][crate::ops::FuncDefn] root operation, report the signature + /// of the function. + /// + /// Otherwise, returns `None`. + fn get_function_type(&self) -> Option { + let op = self.get_optype(self.root()); + match op { + OpType::FuncDecl(decl) => Some(decl.signature.clone()), + OpType::FuncDefn(defn) => Some(defn.signature.clone()), + _ => op.inner_function_type().map(PolyFuncType::from), + } } /// Return a wrapper over the view that can be used in petgraph algorithms. diff --git a/quantinuum-hugr/src/hugr/views/descendants.rs b/quantinuum-hugr/src/hugr/views/descendants.rs index 3a60f4755..453503509 100644 --- a/quantinuum-hugr/src/hugr/views/descendants.rs +++ b/quantinuum-hugr/src/hugr/views/descendants.rs @@ -261,12 +261,12 @@ pub(super) mod test { assert_eq!( region.get_function_type(), - Some(FunctionType::new_endo(type_row![NAT, QB])) + Some(FunctionType::new_endo(type_row![NAT, QB]).into()) ); let inner_region: DescendantsGraph = DescendantsGraph::try_new(&hugr, inner)?; assert_eq!( inner_region.get_function_type(), - Some(FunctionType::new(type_row![NAT], type_row![NAT])) + Some(FunctionType::new(type_row![NAT], type_row![NAT]).into()) ); Ok(()) diff --git a/quantinuum-hugr/src/hugr/views/sibling.rs b/quantinuum-hugr/src/hugr/views/sibling.rs index e07897ac1..7d69cc4b0 100644 --- a/quantinuum-hugr/src/hugr/views/sibling.rs +++ b/quantinuum-hugr/src/hugr/views/sibling.rs @@ -439,7 +439,7 @@ mod test { fn flat_mut(mut simple_dfg_hugr: Hugr) { simple_dfg_hugr.update_validate(&PRELUDE_REGISTRY).unwrap(); let root = simple_dfg_hugr.root(); - let signature = simple_dfg_hugr.get_function_type().unwrap(); + let signature = simple_dfg_hugr.get_df_function_type().unwrap().clone(); let sib_mut = SiblingMut::::try_new(&mut simple_dfg_hugr, root); assert_eq!( diff --git a/quantinuum-hugr/src/hugr/views/snapshots/hugr__hugr__views__tests__dot_cfg.snap b/quantinuum-hugr/src/hugr/views/snapshots/hugr__hugr__views__tests__dot_cfg.snap index fa89312ff..397c1d92f 100644 --- a/quantinuum-hugr/src/hugr/views/snapshots/hugr__hugr__views__tests__dot_cfg.snap +++ b/quantinuum-hugr/src/hugr/views/snapshots/hugr__hugr__views__tests__dot_cfg.snap @@ -1,5 +1,46 @@ --- -source: src/hugr/views/tests.rs +source: quantinuum-hugr/src/hugr/views/tests.rs expression: h.dot_string() --- -"digraph {\n0 [shape=plain label=<
(0) CFG
>]\n1 [shape=plain label=<
0
(1) ExitBlock
>]\n2 [shape=plain label=<
0
(2) DataflowBlock
01
>]\n2:out0 -> 6:in0 [style=\"dashed\"]\n2:out1 -> 1:in0 [style=\"dashed\"]\n3 [shape=plain label=<
(3) Input
0: usize
>]\n3:out0 -> 5:in0 [style=\"\"]\n4 [shape=plain label=<
0: Sum(General([TypeRow { types: [Type(Extension(CustomType { extension: IdentList(\"prelude\"), id: \"usize\", args: [], bound: Eq }), Eq)] }, TypeRow { types: [Type(Extension(CustomType { extension: IdentList(\"prelude\"), id: \"usize\", args: [], bound: Eq }), Eq)] }]))
(4) Output
>]\n5 [shape=plain label=<
0: usize
(5) Tag
0: Sum(General([TypeRow { types: [Type(Extension(CustomType { extension: IdentList(\"prelude\"), id: \"usize\", args: [], bound: Eq }), Eq)] }, TypeRow { types: [Type(Extension(CustomType { extension: IdentList(\"prelude\"), id: \"usize\", args: [], bound: Eq }), Eq)] }]))
>]\n5:out0 -> 4:in0 [style=\"\"]\n6 [shape=plain label=<
0
(6) DataflowBlock
0
>]\n6:out0 -> 1:in0 [style=\"dashed\"]\n7 [shape=plain label=<
(7) Input
0: usize
>]\n7:out0 -> 8:in1 [style=\"\"]\n8 [shape=plain label=<
0: Sum(UnitSum(1))1: usize
(8) Output
>]\n9 [shape=plain label=<
(9) const:sum:{tag:0, vals:[]}
0: Sum(UnitSum(1))
>]\n9:out0 -> 10:in0 [style=\"\"]\n10 [shape=plain label=<
0: Sum(UnitSum(1))
(10) LoadConstant
0: Sum(UnitSum(1))
>]\n10:out0 -> 8:in0 [style=\"\"]\nhier0 [shape=plain label=\"0\"]\nhier0 -> hier2 [style = \"dashed\"] \nhier0 -> hier1 [style = \"dashed\"] \nhier0 -> hier6 [style = \"dashed\"] \nhier1 [shape=plain label=\"1\"]\nhier2 [shape=plain label=\"2\"]\nhier2 -> hier3 [style = \"dashed\"] \nhier2 -> hier4 [style = \"dashed\"] \nhier2 -> hier5 [style = \"dashed\"] \nhier3 [shape=plain label=\"3\"]\nhier4 [shape=plain label=\"4\"]\nhier5 [shape=plain label=\"5\"]\nhier6 [shape=plain label=\"6\"]\nhier6 -> hier7 [style = \"dashed\"] \nhier6 -> hier8 [style = \"dashed\"] \nhier6 -> hier9 [style = \"dashed\"] \nhier6 -> hier10 [style = \"dashed\"] \nhier7 [shape=plain label=\"7\"]\nhier8 [shape=plain label=\"8\"]\nhier9 [shape=plain label=\"9\"]\nhier10 [shape=plain label=\"10\"]\n}\n" +digraph { +0 [shape=plain label=<
(0) CFG
>] +1 [shape=plain label=<
0
(1) ExitBlock
>] +2 [shape=plain label=<
0
(2) DataflowBlock
01
>] +2:out0 -> 6:in0 [style="dashed"] +2:out1 -> 1:in0 [style="dashed"] +3 [shape=plain label=<
(3) Input
0: usize
>] +3:out0 -> 5:in0 [style=""] +4 [shape=plain label=<
0: [usize]+[usize]
(4) Output
>] +5 [shape=plain label=<
0: usize
(5) Tag
0: [usize]+[usize]
>] +5:out0 -> 4:in0 [style=""] +6 [shape=plain label=<
0
(6) DataflowBlock
0
>] +6:out0 -> 1:in0 [style="dashed"] +7 [shape=plain label=<
(7) Input
0: usize
>] +7:out0 -> 8:in1 [style=""] +8 [shape=plain label=<
0: []1: usize
(8) Output
>] +9 [shape=plain label=<
(9) const:sum:{tag:0, vals:[]}
0: []
>] +9:out0 -> 10:in0 [style=""] +10 [shape=plain label=<
0: []
(10) LoadConstant
0: []
>] +10:out0 -> 8:in0 [style=""] +hier0 [shape=plain label="0"] +hier0 -> hier2 [style = "dashed"] +hier0 -> hier1 [style = "dashed"] +hier0 -> hier6 [style = "dashed"] +hier1 [shape=plain label="1"] +hier2 [shape=plain label="2"] +hier2 -> hier3 [style = "dashed"] +hier2 -> hier4 [style = "dashed"] +hier2 -> hier5 [style = "dashed"] +hier3 [shape=plain label="3"] +hier4 [shape=plain label="4"] +hier5 [shape=plain label="5"] +hier6 [shape=plain label="6"] +hier6 -> hier7 [style = "dashed"] +hier6 -> hier8 [style = "dashed"] +hier6 -> hier9 [style = "dashed"] +hier6 -> hier10 [style = "dashed"] +hier7 [shape=plain label="7"] +hier8 [shape=plain label="8"] +hier9 [shape=plain label="9"] +hier10 [shape=plain label="10"] +} diff --git a/quantinuum-hugr/src/hugr/views/snapshots/hugr__hugr__views__tests__dot_dfg.snap b/quantinuum-hugr/src/hugr/views/snapshots/hugr__hugr__views__tests__dot_dfg.snap index 5e7d86242..1eb650776 100644 --- a/quantinuum-hugr/src/hugr/views/snapshots/hugr__hugr__views__tests__dot_dfg.snap +++ b/quantinuum-hugr/src/hugr/views/snapshots/hugr__hugr__views__tests__dot_dfg.snap @@ -1,5 +1,27 @@ --- -source: src/hugr/views/tests.rs +source: quantinuum-hugr/src/hugr/views/tests.rs expression: h.dot_string() --- -"digraph {\n0 [shape=plain label=<
(0) DFG
>]\n1 [shape=plain label=<
(1) Input
0: qubit1: qubit
>]\n1:out0 -> 3:in0 [style=\"\"]\n1:out1 -> 3:in1 [style=\"\"]\n2 [shape=plain label=<
0: qubit1: qubit
(2) Output
>]\n3 [shape=plain label=<
0: qubit1: qubit
(3) test.quantum.CX
0: qubit1: qubit
>]\n3:out0 -> 4:in1 [style=\"\"]\n3:out1 -> 4:in0 [style=\"\"]\n3:out2 -> 4:in2 [style=\"dotted\"]\n4 [shape=plain label=<
0: qubit1: qubit
(4) test.quantum.CX
0: qubit1: qubit
>]\n4:out0 -> 2:in0 [style=\"\"]\n4:out1 -> 2:in1 [style=\"\"]\nhier0 [shape=plain label=\"0\"]\nhier0 -> hier1 [style = \"dashed\"] \nhier0 -> hier2 [style = \"dashed\"] \nhier0 -> hier3 [style = \"dashed\"] \nhier0 -> hier4 [style = \"dashed\"] \nhier1 [shape=plain label=\"1\"]\nhier2 [shape=plain label=\"2\"]\nhier3 [shape=plain label=\"3\"]\nhier4 [shape=plain label=\"4\"]\n}\n" +digraph { +0 [shape=plain label=<
(0) DFG
>] +1 [shape=plain label=<
(1) Input
0: qubit1: qubit
>] +1:out0 -> 3:in0 [style=""] +1:out1 -> 3:in1 [style=""] +2 [shape=plain label=<
0: qubit1: qubit
(2) Output
>] +3 [shape=plain label=<
0: qubit1: qubit
(3) test.quantum.CX
0: qubit1: qubit
>] +3:out0 -> 4:in1 [style=""] +3:out1 -> 4:in0 [style=""] +3:out2 -> 4:in2 [style="dotted"] +4 [shape=plain label=<
0: qubit1: qubit
(4) test.quantum.CX
0: qubit1: qubit
>] +4:out0 -> 2:in0 [style=""] +4:out1 -> 2:in1 [style=""] +hier0 [shape=plain label="0"] +hier0 -> hier1 [style = "dashed"] +hier0 -> hier2 [style = "dashed"] +hier0 -> hier3 [style = "dashed"] +hier0 -> hier4 [style = "dashed"] +hier1 [shape=plain label="1"] +hier2 [shape=plain label="2"] +hier3 [shape=plain label="3"] +hier4 [shape=plain label="4"] +} diff --git a/quantinuum-hugr/src/hugr/views/snapshots/hugr__hugr__views__tests__dot_empty_dfg.snap b/quantinuum-hugr/src/hugr/views/snapshots/hugr__hugr__views__tests__dot_empty_dfg.snap index 7180af21f..278a39718 100644 --- a/quantinuum-hugr/src/hugr/views/snapshots/hugr__hugr__views__tests__dot_empty_dfg.snap +++ b/quantinuum-hugr/src/hugr/views/snapshots/hugr__hugr__views__tests__dot_empty_dfg.snap @@ -1,5 +1,15 @@ --- -source: src/hugr/views/tests.rs +source: quantinuum-hugr/src/hugr/views/tests.rs expression: h.dot_string() --- -"digraph {\n0 [shape=plain label=<
(0) DFG
>]\n1 [shape=plain label=<
(1) Input
0: Sum(UnitSum(2))
>]\n1:out0 -> 2:in0 [style=\"\"]\n2 [shape=plain label=<
0: Sum(UnitSum(2))
(2) Output
>]\nhier0 [shape=plain label=\"0\"]\nhier0 -> hier1 [style = \"dashed\"] \nhier0 -> hier2 [style = \"dashed\"] \nhier1 [shape=plain label=\"1\"]\nhier2 [shape=plain label=\"2\"]\n}\n" +digraph { +0 [shape=plain label=<
(0) DFG
>] +1 [shape=plain label=<
(1) Input
0: []+[]
>] +1:out0 -> 2:in0 [style=""] +2 [shape=plain label=<
0: []+[]
(2) Output
>] +hier0 [shape=plain label="0"] +hier0 -> hier1 [style = "dashed"] +hier0 -> hier2 [style = "dashed"] +hier1 [shape=plain label="1"] +hier2 [shape=plain label="2"] +} diff --git a/quantinuum-hugr/src/hugr/views/snapshots/hugr__hugr__views__tests__mmd_cfg.snap b/quantinuum-hugr/src/hugr/views/snapshots/hugr__hugr__views__tests__mmd_cfg.snap index b796a8c7e..8a89d24ba 100644 --- a/quantinuum-hugr/src/hugr/views/snapshots/hugr__hugr__views__tests__mmd_cfg.snap +++ b/quantinuum-hugr/src/hugr/views/snapshots/hugr__hugr__views__tests__mmd_cfg.snap @@ -1,5 +1,5 @@ --- -source: src/hugr/views/tests.rs +source: quantinuum-hugr/src/hugr/views/tests.rs expression: h.mermaid_string() --- graph LR @@ -11,7 +11,7 @@ graph LR 3--"0:0
usize"-->5 4["(4) Output"] 5["(5) Tag"] - 5--"0:0
Sum(General([TypeRow { types: [Type(Extension(CustomType { extension: IdentList(#quot;prelude#quot;), id: #quot;usize#quot;, args: [], bound: Eq }), Eq)] }, TypeRow { types: [Type(Extension(CustomType { extension: IdentList(#quot;prelude#quot;), id: #quot;usize#quot;, args: [], bound: Eq }), Eq)] }]))"-->4 + 5--"0:0
[usize]+[usize]"-->4 end 2-."0:0".->6 2-."1:0".->1 @@ -22,9 +22,9 @@ graph LR 7--"0:1
usize"-->8 8["(8) Output"] 9["(9) const:sum:{tag:0, vals:[]}"] - 9--"0:0
Sum(UnitSum(1))"-->10 + 9--"0:0
[]"-->10 10["(10) LoadConstant"] - 10--"0:0
Sum(UnitSum(1))"-->8 + 10--"0:0
[]"-->8 end 6-."0:0".->1 end diff --git a/quantinuum-hugr/src/hugr/views/snapshots/hugr__hugr__views__tests__mmd_empty_dfg.snap b/quantinuum-hugr/src/hugr/views/snapshots/hugr__hugr__views__tests__mmd_empty_dfg.snap index d807ab294..df8a755b7 100644 --- a/quantinuum-hugr/src/hugr/views/snapshots/hugr__hugr__views__tests__mmd_empty_dfg.snap +++ b/quantinuum-hugr/src/hugr/views/snapshots/hugr__hugr__views__tests__mmd_empty_dfg.snap @@ -1,11 +1,11 @@ --- -source: src/hugr/views/tests.rs +source: quantinuum-hugr/src/hugr/views/tests.rs expression: h.mermaid_string() --- graph LR subgraph 0 ["(0) DFG"] direction LR 1["(1) Input"] - 1--"0:0
Sum(UnitSum(2))"-->2 + 1--"0:0
[]+[]"-->2 2["(2) Output"] end diff --git a/quantinuum-hugr/src/hugr/views/tests.rs b/quantinuum-hugr/src/hugr/views/tests.rs index 200d0e89c..678459578 100644 --- a/quantinuum-hugr/src/hugr/views/tests.rs +++ b/quantinuum-hugr/src/hugr/views/tests.rs @@ -71,7 +71,7 @@ fn node_connections( #[case::empty_dfg("dot_empty_dfg", crate::builder::test::simple_dfg_hugr())] #[cfg_attr(miri, ignore)] // Opening files is not supported in (isolated) miri fn dot_string(#[case] test_name: &str, #[case] h: Hugr) { - insta::assert_yaml_snapshot!(test_name, h.dot_string()); + insta::assert_snapshot!(test_name, h.dot_string()); } /// Render some hugrs into mermaid format. diff --git a/quantinuum-hugr/src/lib.rs b/quantinuum-hugr/src/lib.rs index 85b7502a7..e83ea9fbd 100644 --- a/quantinuum-hugr/src/lib.rs +++ b/quantinuum-hugr/src/lib.rs @@ -149,7 +149,6 @@ pub mod ops; pub mod std_extensions; pub mod types; mod utils; -pub mod values; pub use crate::core::{ CircuitUnit, Direction, IncomingPort, Node, NodeIndex, OutgoingPort, Port, PortIndex, Wire, diff --git a/quantinuum-hugr/src/ops.rs b/quantinuum-hugr/src/ops.rs index 07a7d3252..7145b35e1 100644 --- a/quantinuum-hugr/src/ops.rs +++ b/quantinuum-hugr/src/ops.rs @@ -380,6 +380,8 @@ pub trait OpTrait { pub trait OpParent { /// The inner function type of the operation, if it has a child dataflow /// sibling graph. + /// + /// Non-container ops like `FuncDecl` return `None` even though they represent a function. fn inner_function_type(&self) -> Option { None } diff --git a/quantinuum-hugr/src/ops/constant.rs b/quantinuum-hugr/src/ops/constant.rs index 7c9d94488..d18cff4dd 100644 --- a/quantinuum-hugr/src/ops/constant.rs +++ b/quantinuum-hugr/src/ops/constant.rs @@ -1,69 +1,197 @@ //! Constant value definitions. -use crate::{ - extension::ExtensionSet, - types::{ConstTypeError, EdgeKind, SumType, Type}, - values::{CustomConst, Value}, -}; +mod custom; +use super::{OpName, OpTrait, StaticTag}; +use super::{OpTag, OpType}; +use crate::extension::ExtensionSet; +use crate::types::{CustomType, EdgeKind, SumType, SumTypeError, Type}; +use crate::{Hugr, HugrView}; + +use itertools::Itertools; use smol_str::SmolStr; +use thiserror::Error; -use super::OpTag; -use super::{OpName, OpTrait, StaticTag}; +pub use custom::{downcast_equal_consts, CustomConst, CustomSerialized}; -/// A constant value definition. +/// An operation returning a constant value. +/// +/// Represents core types and extension types. #[derive(Debug, Clone, PartialEq, serde::Serialize, serde::Deserialize)] -pub struct Const { - value: Value, - typ: Type, +#[serde(tag = "c")] +pub enum Const { + /// An extension constant value, that can check it is of a given [CustomType]. + Extension { + /// The custom constant value. + e: ExtensionConst, + }, + /// A higher-order function value. + // TODO use a root parametrised hugr, e.g. Hugr. + Function { + /// A Hugr defining the function. + hugr: Box, + }, + /// A tuple + Tuple { + /// Constant values in the tuple. + vs: Vec, + }, + /// A Sum variant, with a tag indicating the index of the variant and its + /// value. + Sum { + /// The tag index of the variant. + tag: usize, + /// The value of the variant. + /// + /// Sum variants are always a row of values, hence the Vec. + #[serde(rename = "vs")] + values: Vec, + /// The full type of the Sum, including the other variants. + #[serde(rename = "typ")] + sum_type: SumType, + }, } -impl Const { - /// Creates a new Const, type-checking the value. - pub fn new(value: Value, typ: Type) -> Result { - typ.check_type(&value)?; - Ok(Self { value, typ }) +/// Boxed [`CustomConst`] trait object. +/// +/// Use [`Const::extension`] to create a new variant of this type. +/// +/// This is required to avoid in +/// [`Const::Extension`], while implementing a transparent encoding into a +/// `CustomConst`. +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] +#[serde(transparent)] +pub struct ExtensionConst(pub(super) Box); + +impl PartialEq for ExtensionConst { + fn eq(&self, other: &Self) -> bool { + self.0.equal_consts(other.0.as_ref()) } +} - /// Returns a reference to the value of this [`Const`]. - pub fn value(&self) -> &Value { - &self.value - } +/// Struct for custom type check fails. +#[derive(Clone, Debug, PartialEq, Eq, Error)] +pub enum CustomCheckFailure { + /// The value had a specific type that was not what was expected + #[error("Expected type: {expected} but value was of type: {found}")] + TypeMismatch { + /// The expected custom type. + expected: CustomType, + /// The custom type found when checking. + found: Type, + }, + /// Any other message + #[error("{0}")] + Message(String), +} +/// Errors that arise from typechecking constants +#[derive(Clone, Debug, PartialEq, Error)] +pub enum ConstTypeError { + /// Invalid sum type definition. + #[error("{0}")] + SumType(#[from] SumTypeError), + /// Function constant missing a function type. + #[error( + "A function constant cannot be defined using a Hugr with root of type {}.", + .hugr_root_type.name() + )] + FunctionTypeMissing { + /// The root node type of the Hugr defining the function constant. + hugr_root_type: OpType, + }, + /// A mismatch between the type expected and the value. + #[error("Value {1:?} does not match expected type {0}")] + ConstCheckFail(Type, Const), + /// Error when checking a custom value. + #[error("Error when checking custom type: {0:?}")] + CustomCheckFail(#[from] CustomCheckFailure), +} + +impl Const { /// Returns a reference to the type of this [`Const`]. - pub fn const_type(&self) -> &Type { - &self.typ + pub fn const_type(&self) -> Type { + match self { + Self::Extension { e } => e.0.get_type(), + Self::Tuple { vs } => Type::new_tuple(vs.iter().map(Self::const_type).collect_vec()), + Self::Sum { sum_type, .. } => sum_type.clone().into(), + Self::Function { hugr } => { + let func_type = hugr.get_function_type().unwrap_or_else(|| { + panic!( + "{}", + ConstTypeError::FunctionTypeMissing { + hugr_root_type: hugr.get_optype(hugr.root()).clone() + } + ) + }); + Type::new_function(func_type) + } + } } /// Creates a new Const Sum. The value is determined by `items` and is /// type-checked `typ` - pub fn new_sum( + pub fn sum( tag: usize, items: impl IntoIterator, typ: SumType, ) -> Result { - Self::new( - Value::sum(tag, items.into_iter().map(|x| x.value().to_owned())), - typ.into(), - ) + let values: Vec = items.into_iter().collect(); + typ.check_type(tag, &values)?; + Ok(Self::Sum { + tag, + values, + sum_type: typ, + }) } - /// Constant Sum over units, used as branching values. - pub fn unit_sum(tag: usize, size: u8) -> Self { - Self { - value: Value::unit_sum(tag), - typ: Type::new_unit_sum(size), + /// Returns a tuple constant of constant values. + pub fn tuple(items: impl IntoIterator) -> Self { + Self::Tuple { + vs: items.into_iter().collect(), + } + } + + /// Returns a constant function defined by a Hugr. + /// + /// # Errors + /// + /// Returns an error if the Hugr root node does not define a function. + pub fn function(hugr: impl Into) -> Result { + let hugr = hugr.into(); + if hugr.get_function_type().is_none() { + Err(ConstTypeError::FunctionTypeMissing { + hugr_root_type: hugr.get_optype(hugr.root()).clone(), + })?; } + Ok(Self::Function { + hugr: Box::new(hugr), + }) + } + + /// Constant unit type (empty Tuple). + pub const fn unit() -> Self { + Self::Tuple { vs: vec![] } + } + + /// Constant Sum over units, used as branching values. + pub fn unit_sum(tag: usize, size: u8) -> Result { + Self::sum(tag, [], SumType::Unit { size }) } /// Constant Sum over units, with only one variant. pub fn unary_unit_sum() -> Self { - Self::unit_sum(0, 1) + Self::unit_sum(0, 1).expect("0 < 1") } - /// Constant "true" value, i.e. the second variant of Sum((), ()). + /// Returns a constant "true" value, i.e. the second variant of Sum((), ()). pub fn true_val() -> Self { - Self::unit_sum(1, 2) + Self::unit_sum(1, 2).expect("1 < 2") + } + + /// Returns a constant "false" value, i.e. the first variant of Sum((), ()). + pub fn false_val() -> Self { + Self::unit_sum(0, 2).expect("0 < 2") } /// Generate a constant equivalent of a boolean, @@ -76,29 +204,42 @@ impl Const { } } - /// Constant "false" value, i.e. the first variant of Sum((), ()). - pub fn false_val() -> Self { - Self::unit_sum(0, 2) - } - - /// Tuple of values - pub fn new_tuple(items: impl IntoIterator) -> Self { - let (values, types): (Vec, Vec) = items - .into_iter() - .map(|Const { value, typ }| (value, typ)) - .unzip(); - Self::new(Value::tuple(values), Type::new_tuple(types)).unwrap() + /// Returns a tuple constant of constant values. + pub fn extension(custom_const: impl CustomConst) -> Self { + Self::Extension { + e: ExtensionConst(Box::new(custom_const)), + } } /// For a Const holding a CustomConst, extract the CustomConst by downcasting. pub fn get_custom_value(&self) -> Option<&T> { - self.value().get_custom_value() + if let Self::Extension { e } = self { + e.0.downcast_ref() + } else { + None + } } } impl OpName for Const { fn name(&self) -> SmolStr { - self.value.name().into() + match self { + Self::Extension { e } => format!("const:custom:{}", e.0.name()), + Self::Function { hugr: h } => { + let Some(t) = h.get_function_type() else { + panic!("HUGR root node isn't a valid function parent."); + }; + format!("const:function:[{}]", t) + } + Self::Tuple { vs: vals } => { + let names: Vec<_> = vals.iter().map(Self::name).collect(); + format!("const:seq:{{{}}}", names.join(", ")) + } + Self::Sum { tag, values, .. } => { + format!("const:sum:{{tag:{tag}, vals:{values:?}}}") + } + } + .into() } } impl StaticTag for Const { @@ -106,11 +247,18 @@ impl StaticTag for Const { } impl OpTrait for Const { fn description(&self) -> &str { - self.value.description() + "Constant value" } fn extension_delta(&self) -> ExtensionSet { - self.value.extension_reqs() + match self { + Self::Extension { e } => e.0.extension_reqs().clone(), + Self::Function { .. } => ExtensionSet::new(), // no extensions required to load Hugr (only to run) + Self::Tuple { vs } => ExtensionSet::union_over(vs.iter().map(Const::extension_delta)), + Self::Sum { values, .. } => { + ExtensionSet::union_over(values.iter().map(|x| x.extension_delta())) + } + } } fn tag(&self) -> OpTag { @@ -118,7 +266,7 @@ impl OpTrait for Const { } fn static_output(&self) -> Option { - Some(EdgeKind::Static(self.typ.clone())) + Some(EdgeKind::Static(self.const_type())) } } @@ -129,41 +277,66 @@ where T: CustomConst, { fn from(value: T) -> Self { - let typ = value.get_type(); - Const { - value: Value::custom(value), - typ, - } + Self::extension(value) } } #[cfg(test)] mod test { use super::Const; + use crate::builder::test::simple_dfg_hugr; use crate::{ builder::{BuildError, DFGBuilder, Dataflow, DataflowHugr}, extension::{ prelude::{ConstUsize, USIZE_CUSTOM_T, USIZE_T}, ExtensionId, ExtensionRegistry, PRELUDE, }, + ops::constant::CustomSerialized, std_extensions::arithmetic::float_types::{self, ConstF64, FLOAT64_TYPE}, type_row, types::type_param::TypeArg, - types::{CustomCheckFailure, CustomType, FunctionType, Type, TypeBound, TypeRow}, - values::{ - test::{serialized_float, CustomTestValue}, - CustomSerialized, Value, - }, + types::{CustomType, FunctionType, Type, TypeBound, TypeRow}, }; use cool_asserts::assert_matches; + use rstest::{fixture, rstest}; use serde_yaml::Value as YamlValue; use super::*; + #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] + /// A custom constant value used in testing + pub(crate) struct CustomTestValue(pub CustomType); + + #[typetag::serde] + impl CustomConst for CustomTestValue { + fn name(&self) -> SmolStr { + format!("CustomTestValue({:?})", self.0).into() + } + + fn extension_reqs(&self) -> ExtensionSet { + ExtensionSet::singleton(self.0.extension()) + } + + fn get_type(&self) -> Type { + self.0.clone().into() + } + } + + /// A [`CustomSerialized`] encoding a [`FLOAT64_TYPE`] float constant used in testing. + pub(crate) fn serialized_float(f: f64) -> Const { + CustomSerialized::new( + FLOAT64_TYPE, + serde_yaml::Value::Number(f.into()), + float_types::EXTENSION_ID, + ) + .into() + } + fn test_registry() -> ExtensionRegistry { ExtensionRegistry::try_new([PRELUDE.to_owned(), float_types::EXTENSION.to_owned()]).unwrap() } + /// Constructs a DFG hugr defining a sum constant, and returning the loaded value. #[test] fn test_sum() -> Result<(), BuildError> { use crate::builder::Container; @@ -174,7 +347,7 @@ mod test { type_row![], TypeRow::from(vec![pred_ty.clone().into()]), ))?; - let c = b.add_constant(Const::new_sum( + let c = b.add_constant(Const::sum( 0, [ Into::::into(CustomTestValue(USIZE_CUSTOM_T)), @@ -189,7 +362,7 @@ mod test { type_row![], TypeRow::from(vec![pred_ty.clone().into()]), ))?; - let c = b.add_constant(Const::new_sum(1, [], pred_ty.clone())?); + let c = b.add_constant(Const::sum(1, [], pred_ty.clone())?); let w = b.load_const(&c); b.finish_hugr_with_outputs([w], &test_registry()).unwrap(); @@ -200,73 +373,113 @@ mod test { fn test_bad_sum() { let pred_ty = SumType::new([type_row![USIZE_T, FLOAT64_TYPE], type_row![]]); - let res = Const::new_sum(0, [Const::new_tuple(std::iter::empty())], pred_ty); - assert_matches!(res, Err(ConstTypeError::SumWrongLength)); - } + let good_sum = const_usize(); + println!("{}", serde_json::to_string_pretty(&good_sum).unwrap()); - #[test] - fn test_constant_values() { - let int_value: Value = ConstUsize::new(257).into(); - USIZE_T.check_type(&int_value).unwrap(); - FLOAT64_TYPE - .check_type(serialized_float(17.4).value()) - .unwrap(); + let good_sum = + Const::sum(0, [const_usize(), serialized_float(5.1)], pred_ty.clone()).unwrap(); + println!("{}", serde_json::to_string_pretty(&good_sum).unwrap()); + + let res = Const::sum(0, [], pred_ty.clone()); assert_matches!( - FLOAT64_TYPE.check_type(&int_value), - Err(ConstTypeError::CustomCheckFail( - CustomCheckFailure::TypeMismatch { .. } - )) + res, + Err(ConstTypeError::SumType(SumTypeError::WrongVariantLength { + tag: 0, + expected: 2, + found: 0 + })) ); - let tuple_ty = Type::new_tuple(vec![USIZE_T, FLOAT64_TYPE]); - let tuple_val = Value::tuple([int_value.clone(), serialized_float(5.1).value().to_owned()]); - tuple_ty.check_type(&tuple_val).unwrap(); - let tuple_val2 = Value::tuple(vec![ - serialized_float(6.1).value().to_owned(), - int_value.clone(), - ]); + + let res = Const::sum(4, [], pred_ty.clone()); assert_matches!( - tuple_ty.check_type(&tuple_val2), - Err(ConstTypeError::ValueCheckFail(ty, tv2)) => ty == tuple_ty && tv2 == tuple_val2 + res, + Err(ConstTypeError::SumType(SumTypeError::InvalidTag { + tag: 4, + num_variants: 2 + })) ); - let tuple_val3 = Value::tuple([ - int_value.clone(), - serialized_float(3.3).value().clone(), - serialized_float(2.0).value().clone(), - ]); - assert_eq!( - tuple_ty.check_type(&tuple_val3), - Err(ConstTypeError::TupleWrongLength) + + let res = Const::sum(0, [const_usize(), const_usize()], pred_ty); + assert_matches!( + res, + Err(ConstTypeError::SumType(SumTypeError::InvalidValueType { + tag: 0, + index: 1, + expected, + found, + })) if expected == FLOAT64_TYPE && found == const_usize() ); + } + + #[rstest] + fn function_value(simple_dfg_hugr: Hugr) { + let v = Const::function(simple_dfg_hugr).unwrap(); + + let correct_type = Type::new_function(FunctionType::new_endo(type_row![ + crate::extension::prelude::BOOL_T + ])); + + assert_eq!(v.const_type(), correct_type); + assert!(v.name().starts_with("const:function:")) + } + + #[fixture] + fn const_usize() -> Const { + ConstUsize::new(257).into() + } + + #[fixture] + fn const_tuple() -> Const { + Const::tuple([ConstUsize::new(257).into(), serialized_float(5.1)]) + } - let op = Const::new(int_value, USIZE_T).unwrap(); + #[rstest] + #[case(Const::unit(), Type::UNIT, "const:seq:{}")] + #[case(const_usize(), USIZE_T, "const:custom:ConstUsize(")] + #[case(serialized_float(17.4), FLOAT64_TYPE, "const:custom:yaml:Number(17.4)")] + #[case(const_tuple(), Type::new_tuple(type_row![USIZE_T, FLOAT64_TYPE]), "const:seq:{")] + fn const_type( + #[case] const_value: Const, + #[case] expected_type: Type, + #[case] name_prefix: &str, + ) { + assert_eq!(const_value.const_type(), expected_type); + let name = const_value.name(); + assert!( + name.starts_with(name_prefix), + "{name} does not start with {name_prefix}" + ); + } - assert_eq!(op.get_custom_value(), Some(&ConstUsize::new(257))); - let try_float: Option<&ConstF64> = op.get_custom_value(); - assert!(try_float.is_none()); - let try_usize: Option<&ConstUsize> = tuple_val.get_custom_value(); - assert!(try_usize.is_none()); + #[rstest] + fn const_custom_value(const_usize: Const, const_tuple: Const) { + assert_eq!( + const_usize.get_custom_value::(), + Some(&ConstUsize::new(257)) + ); + assert_eq!(const_usize.get_custom_value::(), None); + assert_eq!(const_tuple.get_custom_value::(), None); + assert_eq!(const_tuple.get_custom_value::(), None); } #[test] fn test_yaml_const() { - let ex_id: ExtensionId = "myrsrc".try_into().unwrap(); + let ex_id: ExtensionId = "my_extension".try_into().unwrap(); let typ_int = CustomType::new( - "mytype", + "my_type", vec![TypeArg::BoundedNat { n: 8 }], ex_id.clone(), TypeBound::Eq, ); - let val: Value = + let yaml_const: Const = CustomSerialized::new(typ_int.clone(), YamlValue::Number(6.into()), ex_id.clone()) .into(); let classic_t = Type::new_extension(typ_int.clone()); assert_matches!(classic_t.least_upper_bound(), TypeBound::Eq); - classic_t.check_type(&val).unwrap(); - - let typ_qb: Type = CustomType::new("mytype", vec![], ex_id, TypeBound::Eq).into(); - assert_matches!(typ_qb.check_type(&val), - Err(ConstTypeError::CustomCheckFail(CustomCheckFailure::TypeMismatch{expected, found})) => expected == typ_int && found == typ_qb); + assert_eq!(yaml_const.const_type(), classic_t); - assert_eq!(val, val); + let typ_qb = CustomType::new("my_type", vec![], ex_id, TypeBound::Eq); + let t = Type::new_extension(typ_qb.clone()); + assert_ne!(yaml_const.const_type(), t); } } diff --git a/quantinuum-hugr/src/ops/constant/custom.rs b/quantinuum-hugr/src/ops/constant/custom.rs new file mode 100644 index 000000000..77d1b6a63 --- /dev/null +++ b/quantinuum-hugr/src/ops/constant/custom.rs @@ -0,0 +1,119 @@ +//! Representation of custom constant values. +//! +//! These can be used as [`Const`] operations in HUGRs. +//! +//! [`Const`]: crate::ops::Const + +use std::any::Any; + +use downcast_rs::{impl_downcast, Downcast}; +use smol_str::SmolStr; + +use crate::extension::ExtensionSet; +use crate::macros::impl_box_clone; + +use crate::types::{CustomCheckFailure, Type}; + +/// Constant value for opaque [`CustomType`]s. +/// +/// When implementing this trait, include the `#[typetag::serde]` attribute to +/// enable serialization. +/// +/// [`CustomType`]: crate::types::CustomType +#[typetag::serde(tag = "c")] +pub trait CustomConst: + Send + Sync + std::fmt::Debug + CustomConstBoxClone + Any + Downcast +{ + /// An identifier for the constant. + fn name(&self) -> SmolStr; + + /// The extension(s) defining the custom constant + /// (a set to allow, say, a [List] of [USize]) + /// + /// [List]: crate::std_extensions::collections::LIST_TYPENAME + /// [USize]: crate::extension::prelude::USIZE_T + fn extension_reqs(&self) -> ExtensionSet; + + /// Check the value is a valid instance of the provided type. + fn validate(&self) -> Result<(), CustomCheckFailure> { + Ok(()) + } + + /// Compare two constants for equality, using downcasting and comparing the definitions. + /// + /// If the type implements `PartialEq`, use [`downcast_equal_consts`] to compare the values. + fn equal_consts(&self, _other: &dyn CustomConst) -> bool { + // false unless overloaded + false + } + + /// report the type + fn get_type(&self) -> Type; +} + +/// Const equality for types that have PartialEq +pub fn downcast_equal_consts( + constant: &T, + other: &dyn CustomConst, +) -> bool { + if let Some(other) = other.as_any().downcast_ref::() { + constant == other + } else { + false + } +} + +impl_downcast!(CustomConst); +impl_box_clone!(CustomConst, CustomConstBoxClone); + +#[derive(Debug, Clone, PartialEq, serde::Serialize, serde::Deserialize)] +/// A constant value stored as a serialized blob that can report its own type. +pub struct CustomSerialized { + typ: Type, + value: serde_yaml::Value, + extensions: ExtensionSet, +} + +impl CustomSerialized { + /// Creates a new [`CustomSerialized`]. + pub fn new( + typ: impl Into, + value: serde_yaml::Value, + exts: impl Into, + ) -> Self { + Self { + typ: typ.into(), + value, + extensions: exts.into(), + } + } + + /// Returns the inner value. + pub fn value(&self) -> &serde_yaml::Value { + &self.value + } +} + +#[typetag::serde] +impl CustomConst for CustomSerialized { + fn name(&self) -> SmolStr { + format!("yaml:{:?}", self.value).into() + } + + fn equal_consts(&self, other: &dyn CustomConst) -> bool { + Some(self) == other.downcast_ref() + } + + fn extension_reqs(&self) -> ExtensionSet { + self.extensions.clone() + } + fn get_type(&self) -> Type { + self.typ.clone() + } +} + +impl PartialEq for dyn CustomConst { + fn eq(&self, other: &Self) -> bool { + (*self).equal_consts(other) + } +} diff --git a/quantinuum-hugr/src/std_extensions/arithmetic/conversions.rs b/quantinuum-hugr/src/std_extensions/arithmetic/conversions.rs index 46e187fd5..39fa73c5e 100644 --- a/quantinuum-hugr/src/std_extensions/arithmetic/conversions.rs +++ b/quantinuum-hugr/src/std_extensions/arithmetic/conversions.rs @@ -23,7 +23,7 @@ mod const_fold; /// The extension identifier. pub const EXTENSION_ID: ExtensionId = ExtensionId::new_unchecked("arithmetic.conversions"); -/// Extensiop for conversions between floats and integers. +/// Extension for conversions between floats and integers. #[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, EnumIter, IntoStaticStr, EnumString)] #[allow(missing_docs, non_camel_case_types)] pub enum ConvertOpDef { @@ -43,7 +43,10 @@ impl MakeOpDef for ConvertOpDef { match self { trunc_s | trunc_u => PolyFuncType::new( vec![LOG_WIDTH_TYPE_PARAM], - FunctionType::new(type_row![FLOAT64_TYPE], vec![sum_with_error(int_tv(0))]), + FunctionType::new( + type_row![FLOAT64_TYPE], + vec![sum_with_error(int_tv(0)).into()], + ), ), convert_s | convert_u => PolyFuncType::new( diff --git a/quantinuum-hugr/src/std_extensions/arithmetic/conversions/const_fold.rs b/quantinuum-hugr/src/std_extensions/arithmetic/conversions/const_fold.rs index 14c79852c..a66c6f2cf 100644 --- a/quantinuum-hugr/src/std_extensions/arithmetic/conversions/const_fold.rs +++ b/quantinuum-hugr/src/std_extensions/arithmetic/conversions/const_fold.rs @@ -1,15 +1,16 @@ +use crate::ops::Const; use crate::{ extension::{ prelude::{sum_with_error, ConstError}, ConstFold, ConstFoldResult, OpDef, }, ops, + ops::constant::CustomConst, std_extensions::arithmetic::{ float_types::ConstF64, int_types::{get_log_width, ConstIntS, ConstIntU, INT_TYPES}, }, types::ConstTypeError, - values::{CustomConst, Value}, IncomingPort, }; @@ -35,8 +36,8 @@ fn get_input(consts: &[(IncomingPort, ops::Const)]) -> Option<&T fn fold_trunc( type_args: &[crate::types::TypeArg], - consts: &[(IncomingPort, ops::Const)], - convert: impl Fn(f64, u8) -> Result, + consts: &[(IncomingPort, Const)], + convert: impl Fn(f64, u8) -> Result, ) -> ConstFoldResult { let f: &ConstF64 = get_input(consts)?; let f = f.value(); @@ -51,24 +52,15 @@ fn fold_trunc( signal: 0, message: "Can't truncate non-finite float".to_string(), }; - let sum_val = Value::Sum { - tag: 1, - values: [Box::new(err_val.into())].to_vec(), - }; - - ops::Const::new(sum_val, sum_type.clone()).unwrap() + Const::sum(1, [err_val.into()], sum_type.clone()) + .unwrap_or_else(|e| panic!("Invalid computed sum, {}", e)) }; let out_const: ops::Const = if !f.is_finite() { err_value() } else { let cv = convert(f, log_width); if let Ok(cv) = cv { - let sum_val = Value::Sum { - tag: 0, - values: [Box::new(cv)].to_vec(), - }; - - ops::Const::new(sum_val, sum_type).unwrap() + Const::sum(0, [cv], sum_type).unwrap_or_else(|e| panic!("Invalid computed sum, {}", e)) } else { err_value() } diff --git a/quantinuum-hugr/src/std_extensions/arithmetic/float_types.rs b/quantinuum-hugr/src/std_extensions/arithmetic/float_types.rs index 3bc15dde9..6527a2145 100644 --- a/quantinuum-hugr/src/std_extensions/arithmetic/float_types.rs +++ b/quantinuum-hugr/src/std_extensions/arithmetic/float_types.rs @@ -4,8 +4,8 @@ use smol_str::SmolStr; use crate::{ extension::{ExtensionId, ExtensionSet}, + ops::constant::CustomConst, types::{CustomType, Type, TypeBound}, - values::CustomConst, Extension, }; use lazy_static::lazy_static; @@ -13,7 +13,7 @@ use lazy_static::lazy_static; /// The extension identifier. pub const EXTENSION_ID: ExtensionId = ExtensionId::new_unchecked("arithmetic.float.types"); -/// Identfier for the 64-bit IEEE 754-2019 floating-point type. +/// Identifier for the 64-bit IEEE 754-2019 floating-point type. const FLOAT_TYPE_ID: SmolStr = SmolStr::new_inline("float64"); /// 64-bit IEEE 754-2019 floating-point type (as [CustomType]) @@ -61,7 +61,7 @@ impl CustomConst for ConstF64 { } fn equal_consts(&self, other: &dyn CustomConst) -> bool { - crate::values::downcast_equal_consts(self, other) + crate::ops::constant::downcast_equal_consts(self, other) } fn extension_reqs(&self) -> ExtensionSet { diff --git a/quantinuum-hugr/src/std_extensions/arithmetic/int_ops.rs b/quantinuum-hugr/src/std_extensions/arithmetic/int_ops.rs index e1cea6c49..97c61ed2a 100644 --- a/quantinuum-hugr/src/std_extensions/arithmetic/int_ops.rs +++ b/quantinuum-hugr/src/std_extensions/arithmetic/int_ops.rs @@ -107,7 +107,7 @@ impl MakeOpDef for IntOpDef { ) .into(), inarrow_s | inarrow_u => CustomValidator::new_with_validator( - int_polytype(2, vec![int_tv(0)], vec![sum_with_error(int_tv(1))]), + int_polytype(2, vec![int_tv(0)], vec![sum_with_error(int_tv(1)).into()]), IOValidator { f_ge_s: true }, ) .into(), @@ -126,7 +126,7 @@ impl MakeOpDef for IntOpDef { int_polytype( 2, intpair.clone(), - vec![sum_with_error(Type::new_tuple(intpair))], + vec![sum_with_error(Type::new_tuple(intpair)).into()], ) } .into(), @@ -139,13 +139,13 @@ impl MakeOpDef for IntOpDef { idiv_checked_u | idiv_checked_s => int_polytype( 2, vec![int_tv(0), int_tv(1)], - vec![sum_with_error(int_tv(0))], + vec![sum_with_error(int_tv(0)).into()], ) .into(), imod_checked_u | imod_checked_s => int_polytype( 2, vec![int_tv(0), int_tv(1).clone()], - vec![sum_with_error(int_tv(1))], + vec![sum_with_error(int_tv(1)).into()], ) .into(), imod_u | imod_s => { @@ -372,7 +372,10 @@ mod test { .to_extension_op() .unwrap() .signature(), - FunctionType::new(vec![int_type(ta(3))], vec![sum_with_error(int_type(ta(3)))],) + FunctionType::new( + vec![int_type(ta(3))], + vec![sum_with_error(int_type(ta(3))).into()], + ) ); assert!( IntOpDef::iwiden_u @@ -388,7 +391,10 @@ mod test { .to_extension_op() .unwrap() .signature(), - FunctionType::new(vec![int_type(ta(2))], vec![sum_with_error(int_type(ta(1)))],) + FunctionType::new( + vec![int_type(ta(2))], + vec![sum_with_error(int_type(ta(1))).into()], + ) ); assert!(IntOpDef::inarrow_u diff --git a/quantinuum-hugr/src/std_extensions/arithmetic/int_types.rs b/quantinuum-hugr/src/std_extensions/arithmetic/int_types.rs index d7501f5a3..7c7724cc8 100644 --- a/quantinuum-hugr/src/std_extensions/arithmetic/int_types.rs +++ b/quantinuum-hugr/src/std_extensions/arithmetic/int_types.rs @@ -6,11 +6,11 @@ use smol_str::SmolStr; use crate::{ extension::{ExtensionId, ExtensionSet}, + ops::constant::CustomConst, types::{ type_param::{TypeArg, TypeArgError, TypeParam}, ConstTypeError, CustomType, Type, TypeBound, }, - values::CustomConst, Extension, }; use lazy_static::lazy_static; @@ -150,7 +150,7 @@ impl CustomConst for ConstIntU { format!("u{}({})", self.log_width, self.value).into() } fn equal_consts(&self, other: &dyn CustomConst) -> bool { - crate::values::downcast_equal_consts(self, other) + crate::ops::constant::downcast_equal_consts(self, other) } fn extension_reqs(&self) -> ExtensionSet { @@ -168,7 +168,7 @@ impl CustomConst for ConstIntS { format!("i{}({})", self.log_width, self.value).into() } fn equal_consts(&self, other: &dyn CustomConst) -> bool { - crate::values::downcast_equal_consts(self, other) + crate::ops::constant::downcast_equal_consts(self, other) } fn extension_reqs(&self) -> ExtensionSet { diff --git a/quantinuum-hugr/src/std_extensions/collections.rs b/quantinuum-hugr/src/std_extensions/collections.rs index 2c078a463..6c829cbfb 100644 --- a/quantinuum-hugr/src/std_extensions/collections.rs +++ b/quantinuum-hugr/src/std_extensions/collections.rs @@ -5,6 +5,7 @@ use lazy_static::lazy_static; use serde::{Deserialize, Serialize}; use smol_str::SmolStr; +use crate::ops::{Const, OpTrait}; use crate::{ algorithm::const_fold::sorted_consts, extension::{ @@ -12,12 +13,12 @@ use crate::{ ConstFold, ExtensionId, ExtensionRegistry, ExtensionSet, SignatureError, TypeDef, TypeDefBound, }, + ops::constant::CustomConst, ops::{self, custom::ExtensionOp, OpName}, types::{ type_param::{TypeArg, TypeParam}, CustomCheckFailure, CustomType, FunctionType, PolyFuncType, Type, TypeBound, }, - values::{CustomConst, Value}, Extension, }; @@ -32,12 +33,12 @@ pub const EXTENSION_NAME: ExtensionId = ExtensionId::new_unchecked("Collections" #[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] /// Dynamically sized list of values, all of the same type. -pub struct ListValue(Vec, Type); +pub struct ListValue(Vec, Type); impl ListValue { /// Create a new [CustomConst] for a list of values of type `typ`. /// That all values ore of type `typ` is not checked here. - pub fn new(typ: Type, contents: impl IntoIterator) -> Self { + pub fn new(typ: Type, contents: impl IntoIterator) -> Self { Self(contents.into_iter().collect_vec(), typ) } @@ -76,23 +77,26 @@ impl CustomConst for ListValue { .map_err(|_| error())?; // constant can only hold classic type. - let [TypeArg::Type { ty: t }] = typ.args() else { + let [TypeArg::Type { ty }] = typ.args() else { return Err(error()); }; // check all values are instances of the element type - for val in &self.0 { - t.check_type(val).map_err(|_| error())?; + for v in &self.0 { + if v.const_type() != *ty { + return Err(error()); + } } + Ok(()) } fn equal_consts(&self, other: &dyn CustomConst) -> bool { - crate::values::downcast_equal_consts(self, other) + crate::ops::constant::downcast_equal_consts(self, other) } fn extension_reqs(&self) -> ExtensionSet { - ExtensionSet::union_over(self.0.iter().map(Value::extension_reqs)) + ExtensionSet::union_over(self.0.iter().map(Const::extension_delta)) .union(EXTENSION_NAME.into()) } } @@ -102,17 +106,13 @@ struct PopFold; impl ConstFold for PopFold { fn fold( &self, - type_args: &[TypeArg], + _type_args: &[TypeArg], consts: &[(crate::IncomingPort, ops::Const)], ) -> crate::extension::ConstFoldResult { - let [TypeArg::Type { ty }] = type_args else { - return None; - }; let [list]: [&ops::Const; 1] = sorted_consts(consts).try_into().ok()?; let list: &ListValue = list.get_custom_value().expect("Should be list value."); let mut list = list.clone(); let elem = list.0.pop()?; // empty list fails to evaluate "pop" - let elem = ops::Const::new(elem, ty.clone()).unwrap(); Some(vec![(0.into(), list.into()), (1.into(), elem)]) } @@ -129,7 +129,7 @@ impl ConstFold for PushFold { let [list, elem]: [&ops::Const; 2] = sorted_consts(consts).try_into().ok()?; let list: &ListValue = list.get_custom_value().expect("Should be list value."); let mut list = list.clone(); - list.0.push(elem.value().clone()); + list.0.push(elem.clone()); Some(vec![(0.into(), list.into())]) } diff --git a/quantinuum-hugr/src/std_extensions/logic.rs b/quantinuum-hugr/src/std_extensions/logic.rs index bf0d4c86b..fe1f3177c 100644 --- a/quantinuum-hugr/src/std_extensions/logic.rs +++ b/quantinuum-hugr/src/std_extensions/logic.rs @@ -151,10 +151,10 @@ fn extension() -> Extension { NotOp.add_to_extension(&mut extension).unwrap(); extension - .add_value(FALSE_NAME, ops::Const::unit_sum(0, 2)) + .add_value(FALSE_NAME, ops::Const::false_val()) .unwrap(); extension - .add_value(TRUE_NAME, ops::Const::unit_sum(1, 2)) + .add_value(TRUE_NAME, ops::Const::true_val()) .unwrap(); extension } @@ -253,7 +253,7 @@ pub(crate) mod test { for v in [false_val, true_val] { let simpl = v.typed_value().const_type(); - assert_eq!(simpl, &BOOL_T); + assert_eq!(simpl, BOOL_T); } } diff --git a/quantinuum-hugr/src/types.rs b/quantinuum-hugr/src/types.rs index ef3e48aeb..08c91159e 100644 --- a/quantinuum-hugr/src/types.rs +++ b/quantinuum-hugr/src/types.rs @@ -8,7 +8,9 @@ mod signature; pub mod type_param; pub mod type_row; -pub use check::{ConstTypeError, CustomCheckFailure}; +pub use crate::ops::constant::{ConstTypeError, CustomCheckFailure}; +use crate::utils::display_list_with_separator; +pub use check::SumTypeError; pub use custom::CustomType; pub use poly_func::PolyFuncType; pub use signature::FunctionType; @@ -17,7 +19,7 @@ pub use type_param::TypeArg; pub use type_row::TypeRow; use itertools::FoldWhile::{Continue, Done}; -use itertools::Itertools; +use itertools::{repeat_n, Itertools}; use serde::{Deserialize, Serialize}; use crate::extension::{ExtensionRegistry, SignatureError}; @@ -99,20 +101,35 @@ pub(crate) fn least_upper_bound(mut tags: impl Iterator) -> Ty .into_inner() } -#[derive(Clone, PartialEq, Debug, Eq, derive_more::Display, Serialize, Deserialize)] +#[derive(Clone, PartialEq, Debug, Eq, Serialize, Deserialize)] #[serde(tag = "s")] /// Representation of a Sum type. /// Either store the types of the variants, or in the special (but common) case /// of a UnitSum (sum over empty tuples), store only the size of the Sum. pub enum SumType { + /// Special case of a Sum over unit types. #[allow(missing_docs)] - #[display(fmt = "UnitSum({})", "size")] Unit { size: u8 }, + /// General case of a Sum type. #[allow(missing_docs)] - #[display(fmt = "General({:?})", "rows")] General { rows: Vec }, } +impl std::fmt::Display for SumType { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + if self.num_variants() == 0 { + return write!(f, "⊥"); + } + + match self { + SumType::Unit { size } => { + display_list_with_separator(repeat_n("[]", *size as usize), f, "+") + } + SumType::General { rows } => display_list_with_separator(rows.iter(), f, "+"), + } + } +} + impl SumType { /// Initialize a new sum type. pub fn new(variants: impl IntoIterator) -> Self @@ -137,6 +154,14 @@ impl SumType { _ => None, } } + + /// Returns the number of variants in the sum type. + pub fn num_variants(&self) -> usize { + match self { + SumType::Unit { size } => *size as usize, + SumType::General { rows } => rows.len(), + } + } } impl From for Type { @@ -166,10 +191,7 @@ pub enum TypeEnum { #[display(fmt = "Variable({})", _0)] Variable(usize, TypeBound), #[allow(missing_docs)] - #[display(fmt = "Tuple({})", "_0")] - Tuple(TypeRow), - #[allow(missing_docs)] - #[display(fmt = "Sum({})", "_0")] + #[display(fmt = "{}", "_0")] Sum(SumType), } impl TypeEnum { @@ -186,7 +208,6 @@ impl TypeEnum { .flat_map(TypeRow::iter) .map(Type::least_upper_bound), ), - TypeEnum::Tuple(ts) => least_upper_bound(ts.iter().map(Type::least_upper_bound)), } } } @@ -233,7 +254,7 @@ impl Type { /// An empty `TypeRow`. Provided here for convenience pub const EMPTY_TYPEROW: TypeRow = type_row![]; /// Unit type (empty tuple). - pub const UNIT: Self = Self(TypeEnum::Tuple(Self::EMPTY_TYPEROW), TypeBound::Eq); + pub const UNIT: Self = Self(TypeEnum::Sum(SumType::Unit { size: 1 }), TypeBound::Eq); const EMPTY_TYPEROW_REF: &'static TypeRow = &Self::EMPTY_TYPEROW; @@ -245,7 +266,11 @@ impl Type { /// Initialize a new tuple type by providing the elements. #[inline(always)] pub fn new_tuple(types: impl Into) -> Self { - Self::new(TypeEnum::Tuple(types.into())) + let row = types.into(); + match row.len() { + 0 => Self::UNIT, + _ => Self::new_sum([row]), + } } /// Initialize a new sum type by providing the possible variant types. @@ -317,7 +342,6 @@ impl Type { // There is no need to check the components against the bound, // that is guaranteed by construction (even for deserialization) match &self.0 { - TypeEnum::Tuple(row) => validate_each(extension_registry, var_decls, row.iter()), TypeEnum::Sum(SumType::General { rows }) => validate_each( extension_registry, var_decls, @@ -337,7 +361,6 @@ impl Type { TypeEnum::Variable(idx, bound) => t.apply_typevar(*idx, *bound), TypeEnum::Extension(cty) => Type::new_extension(cty.substitute(t)), TypeEnum::Function(bf) => Type::new_function(bf.substitute(t)), - TypeEnum::Tuple(elems) => Type::new_tuple(subst_row(elems, t)), TypeEnum::Sum(SumType::General { rows }) => { Type::new_sum(rows.iter().map(|x| subst_row(x, t))) } @@ -421,8 +444,8 @@ pub(crate) mod test { Type::new_alias(AliasDecl::new("my_alias", TypeBound::Eq)), ]); assert_eq!( - t.to_string(), - "Tuple([usize, Function(forall . [[]][]), my_custom, Alias(my_alias)])".to_string() + &t.to_string(), + "[usize, Function(forall . [[]][]), my_custom, Alias(my_alias)]" ); } diff --git a/quantinuum-hugr/src/types/check.rs b/quantinuum-hugr/src/types/check.rs index 2a97f61d7..d2da2a372 100644 --- a/quantinuum-hugr/src/types/check.rs +++ b/quantinuum-hugr/src/types/check.rs @@ -2,134 +2,82 @@ use thiserror::Error; -use crate::{ - ops::{FuncDecl, FuncDefn, OpType}, - values::Value, - Hugr, HugrView, -}; - -use super::{CustomType, PolyFuncType, Type, TypeEnum}; - -/// Struct for custom type check fails. -#[derive(Clone, Debug, PartialEq, Eq, Error)] -pub enum CustomCheckFailure { - /// The value had a specific type that was not what was expected - #[error("Expected type: {expected} but value was of type: {found}")] - TypeMismatch { - /// The expected custom type. - expected: CustomType, - /// The custom type found when checking. - found: Type, - }, - /// Any other message - #[error("{0}")] - Message(String), -} +use super::Type; +use crate::ops::Const; /// Errors that arise from typechecking constants #[derive(Clone, Debug, PartialEq, Error)] -pub enum ConstTypeError { - /// Found a Var type constructor when we're checking a const val - #[error("Type of a const value can't be Var")] - ConstCantBeVar, - /// Type we were checking against was an Alias. - /// This should have been resolved to an actual type. - #[error("Type of a const value can't be an Alias {0}")] - NoAliases(String), - /// The length of the tuple value doesn't match the length of the tuple type - #[error("Tuple of wrong length")] - TupleWrongLength, +pub enum SumTypeError { + /// The type of the variant doesn't match the type of the value. + #[error("Expected type {expected} for element {index} of variant #{tag}, but found {}", .found.const_type())] + InvalidValueType { + /// Tag of the variant. + tag: usize, + /// The element in the tuple that was wrong. + index: usize, + /// The expected type. + expected: Type, + /// The value that was found. + found: Const, + }, /// The length of the sum value doesn't match the length of the variant of - /// the sum type - #[error("Sum variant of wrong length")] - SumWrongLength, - /// Tag for a sum value exceeded the number of variants - #[error("Tag of Sum value is invalid")] - InvalidSumTag, - /// A mismatch between the type expected and the value. - #[error("Value {1:?} does not match expected type {0}")] - ValueCheckFail(Type, Value), - /// Error when checking a custom value. - #[error("Error when checking custom type: {0:?}")] - CustomCheckFail(#[from] CustomCheckFailure), -} - -fn type_sig_equal(v: &Hugr, t: &PolyFuncType) -> bool { - // exact signature equality, in future this may need to be - // relaxed to be compatibility checks between the signatures. - let root_op = v.get_optype(v.root()); - if let OpType::FuncDecl(FuncDecl { signature, .. }) - | OpType::FuncDefn(FuncDefn { signature, .. }) = root_op - { - signature == t - } else { - v.get_function_type() - .is_some_and(|ft| &PolyFuncType::from(ft) == t) - } + /// the sum type. + #[error("Sum variant #{tag} should have length {expected}, but has length {found}")] + WrongVariantLength { + /// The variant index. + tag: usize, + /// The expected length of the sum variant. + expected: usize, + /// The length of the sum variant found. + found: usize, + }, + /// Tag for a sum value exceeded the number of variants. + #[error("Invalid tag {tag} for sum type with {num_variants} variants")] + InvalidTag { + /// The tag of the sum value. + tag: usize, + /// The number of variants in the sum type. + num_variants: usize, + }, } impl super::SumType { - /// Check that a [`Value`] is a valid instance of this [`SumType`]. + /// Check if a sum variant is a valid instance of this [`SumType`]. + /// + /// Since [`Const::Sum`] variants always contain a tuple of values, + /// `val` must be a slice of [`Const`]s. /// /// [`SumType`]: crate::types::SumType /// /// # Errors /// /// This function will return an error if there is a type check error. - pub fn check_type(&self, tag: usize, val: &[Box]) -> Result<(), ConstTypeError> { - if self + pub fn check_type(&self, tag: usize, val: &[Const]) -> Result<(), SumTypeError> { + let variant = self .get_variant(tag) - .ok_or(ConstTypeError::InvalidSumTag)? - .len() - != val.len() - { - Err(ConstTypeError::SumWrongLength)? - } + .ok_or_else(|| SumTypeError::InvalidTag { + tag, + num_variants: self.num_variants(), + })?; - for (t, v) in itertools::zip_eq( - self.get_variant(tag) - .ok_or(ConstTypeError::InvalidSumTag)? - .iter(), - val.iter(), - ) { - t.check_type(v)?; + if variant.len() != val.len() { + Err(SumTypeError::WrongVariantLength { + tag, + expected: variant.len(), + found: val.len(), + })?; } - Ok(()) - } -} -impl Type { - /// Check that a [`Value`] is a valid instance of this [`Type`]. - /// - /// # Errors - /// - /// This function will return an error if there is a type check error. - pub fn check_type(&self, val: &Value) -> Result<(), ConstTypeError> { - match (&self.0, val) { - (TypeEnum::Extension(expected), Value::Extension { c: (e_val,) }) => { - let found = e_val.get_type(); - if found == expected.clone().into() { - Ok(e_val.validate()?) - } else { - Err(CustomCheckFailure::TypeMismatch { - expected: expected.clone(), - found, - } - .into()) - } + for (index, (t, v)) in itertools::zip_eq(variant.iter(), val.iter()).enumerate() { + if v.const_type() != *t { + Err(SumTypeError::InvalidValueType { + tag, + index, + expected: t.clone(), + found: v.clone(), + })?; } - (TypeEnum::Function(t), Value::Function { hugr: v }) if type_sig_equal(v, t) => Ok(()), - (TypeEnum::Tuple(t), Value::Tuple { vs: t_v }) => { - if t.len() != t_v.len() { - return Err(ConstTypeError::TupleWrongLength); - } - t_v.iter() - .zip(t.iter()) - .try_for_each(|(elem, ty)| ty.check_type(elem)) - .map_err(|_| ConstTypeError::ValueCheckFail(self.clone(), val.clone())) - } - (TypeEnum::Sum(sum), Value::Sum { tag, values }) => sum.check_type(*tag, values), - _ => Err(ConstTypeError::ValueCheckFail(self.clone(), val.clone())), } + Ok(()) } } diff --git a/quantinuum-hugr/src/types/poly_func.rs b/quantinuum-hugr/src/types/poly_func.rs index 98d2ac1ed..6916924b1 100644 --- a/quantinuum-hugr/src/types/poly_func.rs +++ b/quantinuum-hugr/src/types/poly_func.rs @@ -12,7 +12,7 @@ use super::{FunctionType, Substitution}; /// A polymorphic function type, e.g. of a [Graph], or perhaps an [OpDef]. /// (Nodes/operations in the Hugr are not polymorphic.) /// -/// [Graph]: crate::values::Value::Function +/// [Graph]: crate::ops::constant::Const::Function /// [OpDef]: crate::extension::OpDef #[derive( Clone, PartialEq, Debug, Default, Eq, derive_more::Display, serde::Serialize, serde::Deserialize, diff --git a/quantinuum-hugr/src/types/serialize.rs b/quantinuum-hugr/src/types/serialize.rs index 77859fc40..f2299ffb7 100644 --- a/quantinuum-hugr/src/types/serialize.rs +++ b/quantinuum-hugr/src/types/serialize.rs @@ -1,4 +1,4 @@ -use super::{PolyFuncType, SumType, Type, TypeArg, TypeBound, TypeEnum, TypeRow}; +use super::{PolyFuncType, SumType, Type, TypeArg, TypeBound, TypeEnum}; use super::custom::CustomType; @@ -11,7 +11,6 @@ pub(super) enum SerSimpleType { Q, I, G(Box), - Tuple { inner: TypeRow }, Sum(SumType), Array { inner: Box, len: u64 }, Opaque(CustomType), @@ -35,7 +34,6 @@ impl From for SerSimpleType { TypeEnum::Function(sig) => SerSimpleType::G(sig), TypeEnum::Variable(i, b) => SerSimpleType::V { i, b }, TypeEnum::Sum(sum) => SerSimpleType::Sum(sum), - TypeEnum::Tuple(inner) => SerSimpleType::Tuple { inner }, } } } @@ -46,7 +44,6 @@ impl From for Type { SerSimpleType::Q => QB_T, SerSimpleType::I => USIZE_T, SerSimpleType::G(sig) => Type::new_function(*sig), - SerSimpleType::Tuple { inner } => Type::new_tuple(inner), SerSimpleType::Sum(sum) => sum.into(), SerSimpleType::Array { inner, len } => { array_type(TypeArg::BoundedNat { n: len }, (*inner).into()) diff --git a/quantinuum-hugr/src/types/type_row.rs b/quantinuum-hugr/src/types/type_row.rs index a4805b8ed..1f0d379cb 100644 --- a/quantinuum-hugr/src/types/type_row.rs +++ b/quantinuum-hugr/src/types/type_row.rs @@ -55,6 +55,11 @@ impl TypeRow { self.iter().chain(rest).cloned().collect_vec().into() } + /// Returns a reference to the types in the row. + pub fn as_slice(&self) -> &[Type] { + &self.types + } + delegate! { to self.types { /// Iterator over the types in the row. @@ -104,7 +109,7 @@ impl Deref for TypeRow { type Target = [Type]; fn deref(&self) -> &Self::Target { - &self.types + self.as_slice() } } diff --git a/quantinuum-hugr/src/utils.rs b/quantinuum-hugr/src/utils.rs index bf7225e3e..b8230de33 100644 --- a/quantinuum-hugr/src/utils.rs +++ b/quantinuum-hugr/src/utils.rs @@ -5,14 +5,28 @@ use itertools::Itertools; /// Write a comma separated list of of some types. /// Like debug_list, but using the Display instance rather than Debug, /// and not adding surrounding square brackets. -pub fn display_list(ts: &[T], f: &mut fmt::Formatter) -> fmt::Result +pub fn display_list(ts: impl IntoIterator, f: &mut fmt::Formatter) -> fmt::Result +where + T: Display, +{ + display_list_with_separator(ts, f, ", ") +} + +/// Write a separated list of of some types, using a custom separator. +/// Like debug_list, but using the Display instance rather than Debug, +/// and not adding surrounding square brackets. +pub fn display_list_with_separator( + ts: impl IntoIterator, + f: &mut fmt::Formatter, + sep: &str, +) -> fmt::Result where T: Display, { let mut first = true; - for t in ts.iter() { + for t in ts.into_iter() { if !first { - f.write_str(", ")?; + f.write_str(sep)?; } t.fmt(f)?; if first { diff --git a/quantinuum-hugr/src/values.rs b/quantinuum-hugr/src/values.rs deleted file mode 100644 index 8a8f2e55f..000000000 --- a/quantinuum-hugr/src/values.rs +++ /dev/null @@ -1,295 +0,0 @@ -//! Representation of values (shared between [Const] and in future [TypeArg]) -//! -//! [Const]: crate::ops::Const -//! [TypeArg]: crate::types::type_param::TypeArg - -use std::any::Any; - -use downcast_rs::{impl_downcast, Downcast}; -use itertools::Itertools; -use smol_str::SmolStr; - -use crate::extension::ExtensionSet; -use crate::macros::impl_box_clone; - -use crate::{Hugr, HugrView}; - -use crate::types::{CustomCheckFailure, Type}; - -/// A value that can be stored as a static constant. Representing core types and -/// extension types. -#[derive(Clone, Debug, PartialEq, serde::Serialize, serde::Deserialize)] -#[serde(tag = "v")] -pub enum Value { - /// An extension constant value. - // Note: the extra level of tupling is to avoid https://github.com/rust-lang/rust/issues/78808 - Extension { - #[allow(missing_docs)] - c: (Box,), - }, - /// A higher-order function value. - // TODO use a root parametrised hugr, e.g. Hugr. - Function { - #[allow(missing_docs)] - hugr: Box, - }, - /// A tuple - Tuple { - #[allow(missing_docs)] - vs: Vec, - }, - /// A Sum variant -- for any Sum type where this value meets - /// the type of the variant indicated by the tag - Sum { - /// The tag index of the variant - tag: usize, - /// The value of the variant - values: Vec>, - }, -} - -impl Value { - /// Returns the name of this [`Value`]. - pub fn name(&self) -> String { - match self { - Value::Extension { c: e } => format!("const:custom:{}", e.0.name()), - Value::Function { hugr: h } => { - let Some(t) = h.get_function_type() else { - panic!("HUGR root node isn't a valid function parent."); - }; - format!("const:function:[{}]", t) - } - Value::Tuple { vs: vals } => { - let names: Vec<_> = vals.iter().map(Value::name).collect(); - format!("const:seq:{{{}}}", names.join(", ")) - } - Value::Sum { tag, values } => { - format!("const:sum:{{tag:{tag}, vals:{values:?}}}") - } - } - } - - /// Description of the value. - pub fn description(&self) -> &str { - "Constant value" - } - - /// Constant unit type (empty Tuple). - pub const fn unit() -> Self { - Self::Tuple { vs: vec![] } - } - - /// Constant Sum of a unit value, used to control branches. - pub fn unit_sum(tag: usize) -> Self { - Self::sum(tag, []) - } - - /// Constant Sum with just one variant of unit type - pub fn unary_unit_sum() -> Self { - Self::unit_sum(0) - } - - /// Tuple of values. - pub fn tuple(items: impl IntoIterator) -> Self { - Self::Tuple { - vs: items.into_iter().collect(), - } - } - - /// Sum value (could be of any compatible type - i.e. a Sum type where the - /// `tag`th row is equal in length and compatible elementwise with `values`) - pub fn sum(tag: usize, values: impl IntoIterator) -> Self { - Self::Sum { - tag, - values: values.into_iter().map(Box::new).collect_vec(), - } - } - - /// New custom value (of type that implements [`CustomConst`]). - pub fn custom(c: C) -> Self { - Self::Extension { c: (Box::new(c),) } - } - - /// For a Const holding a CustomConst, extract the CustomConst by downcasting. - pub fn get_custom_value(&self) -> Option<&T> { - if let Value::Extension { c: (custom,) } = self { - custom.downcast_ref() - } else { - None - } - } - - /// The Extensions that must be supported to handle the value at runtime - pub fn extension_reqs(&self) -> ExtensionSet { - match self { - Value::Extension { c } => c.0.extension_reqs().clone(), - Value::Function { .. } => ExtensionSet::new(), // no extensions reqd to load Hugr (only to run) - Value::Tuple { vs } => ExtensionSet::union_over(vs.iter().map(Value::extension_reqs)), - Value::Sum { values, .. } => { - ExtensionSet::union_over(values.iter().map(|x| x.extension_reqs())) - } - } - } -} - -impl From for Value { - fn from(v: T) -> Self { - Self::custom(v) - } -} - -/// Constant value for opaque `[CustomType]`s. -/// -/// When implementing this trait, include the `#[typetag::serde]` attribute to -/// enable serialization. -/// -/// [CustomType]: crate::types::CustomType -#[typetag::serde(tag = "c")] -pub trait CustomConst: - Send + Sync + std::fmt::Debug + CustomConstBoxClone + Any + Downcast -{ - /// An identifier for the constant. - fn name(&self) -> SmolStr; - - /// The extension(s) defining the custom value - /// (a set to allow, say, a [List] of [USize]) - /// - /// [List]: crate::std_extensions::collections::LIST_TYPENAME - /// [USize]: crate::extension::prelude::USIZE_T - fn extension_reqs(&self) -> ExtensionSet; - - /// Check the value is a valid instance of the provided type. - fn validate(&self) -> Result<(), CustomCheckFailure> { - Ok(()) - } - - /// Compare two constants for equality, using downcasting and comparing the definitions. - // Can't derive PartialEq for trait objects - fn equal_consts(&self, _other: &dyn CustomConst) -> bool { - // false unless overloaded - false - } - - /// report the type - fn get_type(&self) -> Type; -} - -/// Const equality for types that have PartialEq -pub fn downcast_equal_consts( - value: &T, - other: &dyn CustomConst, -) -> bool { - if let Some(other) = other.as_any().downcast_ref::() { - value == other - } else { - false - } -} - -impl_downcast!(CustomConst); -impl_box_clone!(CustomConst, CustomConstBoxClone); - -#[derive(Debug, Clone, PartialEq, serde::Serialize, serde::Deserialize)] -/// A value stored as a serialized blob that can report its own type. -pub struct CustomSerialized { - typ: Type, - value: serde_yaml::Value, - extensions: ExtensionSet, -} - -impl CustomSerialized { - /// Creates a new [`CustomSerialized`]. - pub fn new( - typ: impl Into, - value: serde_yaml::Value, - exts: impl Into, - ) -> Self { - Self { - typ: typ.into(), - value, - extensions: exts.into(), - } - } - - /// Returns the inner value. - pub fn value(&self) -> &serde_yaml::Value { - &self.value - } -} - -#[typetag::serde] -impl CustomConst for CustomSerialized { - fn name(&self) -> SmolStr { - format!("yaml:{:?}", self.value).into() - } - - fn equal_consts(&self, other: &dyn CustomConst) -> bool { - Some(self) == other.downcast_ref() - } - - fn extension_reqs(&self) -> ExtensionSet { - self.extensions.clone() - } - fn get_type(&self) -> Type { - self.typ.clone() - } -} - -impl PartialEq for dyn CustomConst { - fn eq(&self, other: &Self) -> bool { - (*self).equal_consts(other) - } -} - -#[cfg(test)] -pub(crate) mod test { - use rstest::rstest; - - use super::*; - use crate::builder::test::simple_dfg_hugr; - use crate::ops::Const; - use crate::std_extensions::arithmetic::float_types::{self, FLOAT64_TYPE}; - use crate::type_row; - use crate::types::{CustomType, FunctionType, Type}; - - #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] - - /// A custom constant value used in testing - pub(crate) struct CustomTestValue(pub CustomType); - #[typetag::serde] - impl CustomConst for CustomTestValue { - fn name(&self) -> SmolStr { - format!("CustomTestValue({:?})", self.0).into() - } - - fn extension_reqs(&self) -> ExtensionSet { - ExtensionSet::singleton(self.0.extension()) - } - - fn get_type(&self) -> Type { - self.0.clone().into() - } - } - - pub(crate) fn serialized_float(f: f64) -> Const { - CustomSerialized { - typ: FLOAT64_TYPE, - value: serde_yaml::Value::Number(f.into()), - extensions: float_types::EXTENSION_ID.into(), - } - .into() - } - - #[rstest] - fn function_value(simple_dfg_hugr: Hugr) { - let v = Value::Function { - hugr: Box::new(simple_dfg_hugr), - }; - - let correct_type = Type::new_function(FunctionType::new_endo(type_row![ - crate::extension::prelude::BOOL_T - ])); - - assert!(correct_type.check_type(&v).is_ok()); - } -} diff --git a/scripts/__init__.py b/scripts/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/scripts/generate_schema.py b/scripts/generate_schema.py new file mode 100644 index 000000000..6f891716d --- /dev/null +++ b/scripts/generate_schema.py @@ -0,0 +1,34 @@ +#!/usr/bin/env python +"""Dumps the json schema for `quantinuum_hugr.serialization.SerialHugr` to a file. + +The schema is written to a file named `hugr_schema_v#.json` in the specified output directory. +If no output directory is specified, the schema is written to the current working directory. + +usage: python generate_schema.py [] +""" + +import json +import sys +from pathlib import Path + +from pydantic import TypeAdapter + +from quantinuum_hugr.serialization import SerialHugr + +if __name__ == "__main__": + if len(sys.argv) == 1: + out_dir = Path.cwd() + elif len(sys.argv) == 2: + out_dir = Path(sys.argv[1]) + else: + print(__doc__) + sys.exit(1) + + version = SerialHugr.get_version() + filename = f"hugr_schema_{version}.json" + path = out_dir / filename + + print(f"Writing schema to {path}") + + with path.open("w") as f: + json.dump(TypeAdapter(SerialHugr).json_schema(), f, indent=4) diff --git a/specification/hugr.md b/specification/hugr.md index dc57299af..46ed8fe2b 100644 --- a/specification/hugr.md +++ b/specification/hugr.md @@ -794,8 +794,7 @@ The Hugr defines a number of type constructors, that can be instantiated into ty Extensions ::= (Extension)* -- a set, not a list -Type ::= Tuple(#) -- fixed-arity, heterogeneous components - | Sum([#]) -- disjoint union of rows of other types, tagged by unsigned int +Type ::= Sum([#]) -- disjoint union of rows of other types, tagged by unsigned int | Opaque(Name, [TypeArg]) -- a (instantiation of a) custom type defined by an extension | Function(TypeParams, #, #, Extensions) -- polymorphic with type parameters, -- function arguments + results, and delta (see below) @@ -804,11 +803,13 @@ Type ::= Tuple(#) -- fixed-arity, heterogeneous components (We write `[Foo]` to indicate a list of Foo's.) +Tuples are represented as Sum types with a single variant. The type `(int<1>,int<2>)` is represented as `Sum([#(int<1>,int<2>)])`. + The majority of types will be Opaque ones defined by extensions including the [standard library](#standard-library). However a number of types can be constructed using only the core type constructors: for example the empty tuple type, aka `unit`, with exactly one instance (so 0 bits of data); the empty sum, with no instances; the empty Function type (taking no arguments and producing no results - `void -> void`); and compositions thereof. Types representing functions are generally `CopyableType`, but not `EqType`. (It is undecidable whether two functions produce the same result for all possible inputs, or similarly whether one computation graph can be rewritten into another by semantic-preserving rewrites). -Tuples and Sums are `CopyableType` (respectively, `EqType`) if all their components are; they are also fixed-size if their components are. +Sums are `CopyableType` (respectively, `EqType`) if all their components are; they are also fixed-size if their components are. ### Polymorphism diff --git a/specification/schema/hugr_schema_v0.json b/specification/schema/hugr_schema_v0.json index db5ad9db3..6092f5636 100644 --- a/specification/schema/hugr_schema_v0.json +++ b/specification/schema/hugr_schema_v0.json @@ -1622,4 +1622,4 @@ ], "title": "Hugr", "type": "object" -} \ No newline at end of file +} diff --git a/specification/schema/hugr_schema_v1.json b/specification/schema/hugr_schema_v1.json index 60f611f27..1ca87f089 100644 --- a/specification/schema/hugr_schema_v1.json +++ b/specification/schema/hugr_schema_v1.json @@ -74,7 +74,7 @@ "type": "integer" }, "input_extensions": { - "$ref": "#/$defs/InputExtensions" + "$ref": "#/$defs/ExtensionSet" }, "op": { "const": "CFG", @@ -99,7 +99,7 @@ "type": "integer" }, "input_extensions": { - "$ref": "#/$defs/InputExtensions" + "$ref": "#/$defs/ExtensionSet" }, "op": { "const": "Call", @@ -117,14 +117,14 @@ "type": "object" }, "CallIndirect": { - "description": "Operation to call a function indirectly. Like call, but the first input is a standard dataflow graph type.", + "description": "Call a function indirectly.\n\nLike call, but the first input is a standard dataflow graph type.", "properties": { "parent": { "title": "Parent", "type": "integer" }, "input_extensions": { - "$ref": "#/$defs/InputExtensions" + "$ref": "#/$defs/ExtensionSet" }, "op": { "const": "CallIndirect", @@ -149,7 +149,7 @@ "type": "integer" }, "input_extensions": { - "$ref": "#/$defs/InputExtensions" + "$ref": "#/$defs/ExtensionSet" }, "op": { "const": "Case", @@ -174,7 +174,7 @@ "type": "integer" }, "input_extensions": { - "$ref": "#/$defs/InputExtensions" + "$ref": "#/$defs/ExtensionSet" }, "op": { "const": "Conditional", @@ -195,49 +195,54 @@ "title": "Outputs", "type": "array" }, - "extension_delta": { + "sum_rows": { + "description": "The possible rows of the Sum input", "items": { - "type": "string" + "items": { + "$ref": "#/$defs/Type" + }, + "type": "array" }, - "title": "Extension Delta", + "title": "Sum Rows", "type": "array" + }, + "extension_delta": { + "$ref": "#/$defs/ExtensionSet" } }, "required": [ - "parent" + "parent", + "sum_rows" ], "title": "Conditional", "type": "object" }, "Const": { - "description": "A constant value definition.", - "properties": { - "parent": { - "title": "Parent", - "type": "integer" + "description": "A constant operation.", + "discriminator": { + "mapping": { + "Extension": "#/$defs/ExtensionConst", + "Function": "#/$defs/FunctionConst", + "Sum": "#/$defs/Sum", + "Tuple": "#/$defs/Tuple" }, - "input_extensions": { - "$ref": "#/$defs/InputExtensions" + "propertyName": "c" + }, + "oneOf": [ + { + "$ref": "#/$defs/ExtensionConst" }, - "op": { - "const": "Const", - "default": "Const", - "title": "Op" + { + "$ref": "#/$defs/FunctionConst" }, - "value": { - "$ref": "#/$defs/Value" + { + "$ref": "#/$defs/Tuple" }, - "typ": { - "$ref": "#/$defs/Type" + { + "$ref": "#/$defs/Sum" } - }, - "required": [ - "parent", - "value", - "typ" ], - "title": "Const", - "type": "object" + "title": "Const" }, "CustomOp": { "description": "A user-defined operation that can be downcasted by the extensions that define it.", @@ -247,7 +252,7 @@ "type": "integer" }, "input_extensions": { - "$ref": "#/$defs/InputExtensions" + "$ref": "#/$defs/ExtensionSet" }, "op": { "const": "LeafOp", @@ -317,7 +322,7 @@ "type": "integer" }, "input_extensions": { - "$ref": "#/$defs/InputExtensions" + "$ref": "#/$defs/ExtensionSet" }, "op": { "const": "DFG", @@ -342,7 +347,7 @@ "type": "integer" }, "input_extensions": { - "$ref": "#/$defs/InputExtensions" + "$ref": "#/$defs/ExtensionSet" }, "op": { "const": "DataflowBlock", @@ -374,15 +379,16 @@ "type": "array" }, "extension_delta": { - "items": { - "type": "string" - }, - "title": "Extension Delta", - "type": "array" + "$ref": "#/$defs/ExtensionSet" } }, "required": [ - "parent" + "parent", + "op", + "inputs", + "other_outputs", + "sum_rows", + "extension_delta" ], "title": "DataflowBlock", "type": "object" @@ -395,7 +401,7 @@ "type": "integer" }, "input_extensions": { - "$ref": "#/$defs/InputExtensions" + "$ref": "#/$defs/ExtensionSet" }, "op": { "const": "ExitBlock", @@ -417,30 +423,55 @@ "title": "ExitBlock", "type": "object" }, - "ExtensionVal": { + "ExtensionConst": { "description": "An extension constant value, that can check it is of a given [CustomType].", "properties": { - "v": { + "parent": { + "title": "Parent", + "type": "integer" + }, + "input_extensions": { + "$ref": "#/$defs/ExtensionSet" + }, + "op": { + "const": "Const", + "default": "Const", + "title": "Op" + }, + "c": { "const": "Extension", "default": "Extension", - "title": "V" + "title": "ConstTag" }, - "c": { - "maxItems": 1, - "minItems": 1, - "prefixItems": [ - {} - ], - "title": "C", - "type": "array" + "e": { + "title": "CustomConst" } }, "required": [ - "c" + "parent", + "op", + "c", + "e" ], - "title": "ExtensionVal", + "title": "ExtensionConst", "type": "object" }, + "ExtensionSet": { + "anyOf": [ + { + "items": { + "type": "string" + }, + "type": "array" + }, + { + "type": "null" + } + ], + "default": null, + "description": "A set of extensions ids.", + "title": "ExtensionSet" + }, "ExtensionsArg": { "properties": { "tya": { @@ -449,11 +480,7 @@ "title": "Tya" }, "es": { - "items": { - "type": "string" - }, - "title": "Es", - "type": "array" + "$ref": "#/$defs/ExtensionSet" } }, "required": [ @@ -470,7 +497,7 @@ "type": "integer" }, "input_extensions": { - "$ref": "#/$defs/InputExtensions" + "$ref": "#/$defs/ExtensionSet" }, "op": { "const": "FuncDecl", @@ -500,7 +527,7 @@ "type": "integer" }, "input_extensions": { - "$ref": "#/$defs/InputExtensions" + "$ref": "#/$defs/ExtensionSet" }, "op": { "const": "FuncDefn", @@ -522,6 +549,39 @@ "title": "FuncDefn", "type": "object" }, + "FunctionConst": { + "description": "A higher-order function value.", + "properties": { + "parent": { + "title": "Parent", + "type": "integer" + }, + "input_extensions": { + "$ref": "#/$defs/ExtensionSet" + }, + "op": { + "const": "Const", + "default": "Const", + "title": "Op" + }, + "c": { + "const": "Function", + "default": "Function", + "title": "ConstTag" + }, + "hugr": { + "title": "Hugr" + } + }, + "required": [ + "parent", + "op", + "c", + "hugr" + ], + "title": "FunctionConst", + "type": "object" + }, "FunctionType": { "description": "A graph encoded as a value. It contains a concrete signature and a set of required resources.", "properties": { @@ -540,11 +600,7 @@ "type": "array" }, "extension_reqs": { - "items": { - "type": "string" - }, - "title": "Extension Reqs", - "type": "array" + "$ref": "#/$defs/ExtensionSet" } }, "required": [ @@ -554,24 +610,6 @@ "title": "FunctionType", "type": "object" }, - "FunctionVal": { - "description": "A higher-order function value.", - "properties": { - "v": { - "const": "Function", - "default": "Function", - "title": "V" - }, - "hugr": { - "title": "Hugr" - } - }, - "required": [ - "hugr" - ], - "title": "FunctionVal", - "type": "object" - }, "GeneralSum": { "description": "General sum type that explicitly stores the types of the variants.", "properties": { @@ -588,9 +626,9 @@ "rows": { "items": { "items": { - "$ref": "#/$defs/Type", - "type": "array" - } + "$ref": "#/$defs/Type" + }, + "type": "array" }, "title": "Rows", "type": "array" @@ -610,7 +648,7 @@ "type": "integer" }, "input_extensions": { - "$ref": "#/$defs/InputExtensions" + "$ref": "#/$defs/ExtensionSet" }, "op": { "const": "Input", @@ -631,33 +669,8 @@ "title": "Input", "type": "object" }, - "InputExtensions": { - "anyOf": [ - { - "items": { - "type": "string" - }, - "type": "array" - }, - { - "type": "null" - } - ], - "default": null - }, - "USize": { - "description": "Unsigned integer size type.", - "properties": { - "t": { - "const": "I", - "default": "I", - "title": "T" - } - }, - "title": "USize", - "type": "object" - }, "LeafOp": { + "description": "A constant operation.", "discriminator": { "mapping": { "CustomOp": "#/$defs/CustomOp", @@ -688,7 +701,8 @@ { "$ref": "#/$defs/TypeApply" } - ] + ], + "title": "LeafOp" }, "ListParam": { "properties": { @@ -715,7 +729,7 @@ "type": "integer" }, "input_extensions": { - "$ref": "#/$defs/InputExtensions" + "$ref": "#/$defs/ExtensionSet" }, "op": { "const": "LoadConstant", @@ -741,7 +755,7 @@ "type": "integer" }, "input_extensions": { - "$ref": "#/$defs/InputExtensions" + "$ref": "#/$defs/ExtensionSet" }, "op": { "const": "LeafOp", @@ -775,7 +789,7 @@ "type": "integer" }, "input_extensions": { - "$ref": "#/$defs/InputExtensions" + "$ref": "#/$defs/ExtensionSet" }, "op": { "const": "Module", @@ -797,7 +811,7 @@ "type": "integer" }, "input_extensions": { - "$ref": "#/$defs/InputExtensions" + "$ref": "#/$defs/ExtensionSet" }, "op": { "const": "LeafOp", @@ -821,6 +835,7 @@ "type": "object" }, "OpType": { + "description": "A constant operation.", "discriminator": { "mapping": { "CFG": "#/$defs/CFG", @@ -831,7 +846,6 @@ "Const": "#/$defs/Const", "DFG": "#/$defs/DFG", "DataflowBlock": "#/$defs/DataflowBlock", - "DummyOp": "#/$defs/DummyOp", "ExitBlock": "#/$defs/ExitBlock", "FuncDecl": "#/$defs/FuncDecl", "FuncDefn": "#/$defs/FuncDefn", @@ -860,9 +874,6 @@ { "$ref": "#/$defs/Const" }, - { - "$ref": "#/$defs/DummyOp" - }, { "$ref": "#/$defs/DataflowBlock" }, @@ -899,7 +910,8 @@ { "$ref": "#/$defs/DFG" } - ] + ], + "title": "OpType" }, "Opaque": { "description": "An opaque operation that can be downcasted by the extensions that define it.", @@ -979,7 +991,7 @@ "type": "integer" }, "input_extensions": { - "$ref": "#/$defs/InputExtensions" + "$ref": "#/$defs/ExtensionSet" }, "op": { "const": "Output", @@ -1062,26 +1074,67 @@ "Sum": { "description": "A Sum variant For any Sum type where this value meets the type of the variant indicated by the tag.", "properties": { - "v": { + "parent": { + "title": "Parent", + "type": "integer" + }, + "input_extensions": { + "$ref": "#/$defs/ExtensionSet" + }, + "op": { + "const": "Const", + "default": "Const", + "title": "Op" + }, + "c": { "const": "Sum", "default": "Sum", - "title": "V" + "title": "ConstTag" }, "tag": { "title": "Tag", "type": "integer" }, - "value": { - "$ref": "#/$defs/Value" + "typ": { + "$ref": "#/$defs/Type" + }, + "vs": { + "items": { + "$ref": "#/$defs/Const" + }, + "title": "Vs", + "type": "array" } }, "required": [ + "parent", + "op", + "c", "tag", - "value" + "typ", + "vs" ], "title": "Sum", "type": "object" }, + "SumType": { + "discriminator": { + "mapping": { + "General": "#/$defs/GeneralSum", + "Unit": "#/$defs/UnitSum" + }, + "propertyName": "s" + }, + "oneOf": [ + { + "$ref": "#/$defs/UnitSum" + }, + { + "$ref": "#/$defs/GeneralSum" + } + ], + "title": "SumType" + }, "Tag": { "description": "An operation that creates a tagged sum value from one of its variants.", "properties": { @@ -1090,7 +1143,7 @@ "type": "integer" }, "input_extensions": { - "$ref": "#/$defs/InputExtensions" + "$ref": "#/$defs/ExtensionSet" }, "op": { "const": "LeafOp", @@ -1130,7 +1183,7 @@ "type": "integer" }, "input_extensions": { - "$ref": "#/$defs/InputExtensions" + "$ref": "#/$defs/ExtensionSet" }, "op": { "const": "TailLoop", @@ -1166,22 +1219,37 @@ "type": "object" }, "Tuple": { - "description": "A tuple.", + "description": "A constant tuple value.", "properties": { - "v": { + "parent": { + "title": "Parent", + "type": "integer" + }, + "input_extensions": { + "$ref": "#/$defs/ExtensionSet" + }, + "op": { + "const": "Const", + "default": "Const", + "title": "Op" + }, + "c": { "const": "Tuple", "default": "Tuple", - "title": "V" + "title": "ConstTag" }, "vs": { "items": { - "$ref": "#/$defs/Value" + "$ref": "#/$defs/Const" }, "title": "Vs", "type": "array" } }, "required": [ + "parent", + "op", + "c", "vs" ], "title": "Tuple", @@ -1208,46 +1276,8 @@ "title": "TupleParam", "type": "object" }, - "TupleType": { - "description": "Product type, known-size tuple over elements of type row.", - "properties": { - "t": { - "const": "Tuple", - "default": "Tuple", - "title": "T" - }, - "inner": { - "items": { - "$ref": "#/$defs/Type" - }, - "title": "Inner", - "type": "array" - } - }, - "required": [ - "inner" - ], - "title": "TupleType", - "type": "object" - }, - "SumType": { - "discriminator": { - "mapping": { - "General": "#/$defs/GeneralSum", - "Unit": "#/$defs/UnitSum" - }, - "propertyName": "s" - }, - "oneOf": [ - { - "$ref": "#/$defs/UnitSum" - }, - { - "$ref": "#/$defs/GeneralSum" - } - ] - }, "Type": { + "description": "A HUGR type.", "discriminator": { "mapping": { "Array": "#/$defs/Array", @@ -1256,7 +1286,6 @@ "Opaque": "#/$defs/Opaque", "Q": "#/$defs/Qubit", "Sum": "#/$defs/SumType", - "Tuple": "#/$defs/TupleType", "V": "#/$defs/Variable" }, "propertyName": "t" @@ -1277,16 +1306,14 @@ { "$ref": "#/$defs/Array" }, - { - "$ref": "#/$defs/TupleType" - }, { "$ref": "#/$defs/SumType" }, { "$ref": "#/$defs/Opaque" } - ] + ], + "title": "Type" }, "TypeApplication": { "description": "Records details of an application of a PolyFuncType to some TypeArgs and the result (a less-, but still potentially-, polymorphic type).", @@ -1321,7 +1348,7 @@ "type": "integer" }, "input_extensions": { - "$ref": "#/$defs/InputExtensions" + "$ref": "#/$defs/ExtensionSet" }, "op": { "const": "LeafOp", @@ -1345,6 +1372,7 @@ "type": "object" }, "TypeArg": { + "description": "A type argument.", "discriminator": { "mapping": { "BoundedNat": "#/$defs/BoundedNatArg", @@ -1371,7 +1399,8 @@ { "$ref": "#/$defs/ExtensionsArg" } - ] + ], + "title": "TypeArg" }, "TypeBound": { "enum": [ @@ -1383,6 +1412,7 @@ "type": "string" }, "TypeParam": { + "description": "A type parameter.", "discriminator": { "mapping": { "BoundedNat": "#/$defs/BoundedNatParam", @@ -1409,7 +1439,8 @@ { "$ref": "#/$defs/TupleParam" } - ] + ], + "title": "TypeParam" }, "TypeTypeArg": { "properties": { @@ -1445,6 +1476,18 @@ "title": "TypeTypeParam", "type": "object" }, + "USize": { + "description": "Unsigned integer size type.", + "properties": { + "t": { + "const": "I", + "default": "I", + "title": "T" + } + }, + "title": "USize", + "type": "object" + }, "UnitSum": { "description": "Simple predicate where all variants are empty tuples.", "properties": { @@ -1477,7 +1520,7 @@ "type": "integer" }, "input_extensions": { - "$ref": "#/$defs/InputExtensions" + "$ref": "#/$defs/ExtensionSet" }, "op": { "const": "LeafOp", @@ -1503,22 +1546,6 @@ "title": "UnpackTuple", "type": "object" }, - "Value": { - "anyOf": [ - { - "$ref": "#/$defs/ExtensionVal" - }, - { - "$ref": "#/$defs/FunctionVal" - }, - { - "$ref": "#/$defs/Tuple" - }, - { - "$ref": "#/$defs/Sum" - } - ] - }, "Variable": { "description": "A type variable identified by a de Bruijn index.", "properties": { @@ -1543,6 +1570,7 @@ "type": "object" } }, + "description": "A serializable representation of a Hugr.", "properties": { "version": { "const": "v1", @@ -1615,4 +1643,4 @@ ], "title": "Hugr", "type": "object" -} +} \ No newline at end of file diff --git a/specification/schema/serialization.md b/specification/schema/serialization.md old mode 100755 new mode 100644 index 29e57e532..f14fd40bd --- a/specification/schema/serialization.md +++ b/specification/schema/serialization.md @@ -1,106 +1,16 @@ +# HUGR serialization schema -# Serialization Options +This folder contains the schema for the serialization of the HUGR objects +compliant with the [JSON Schema](https://json-schema.org/draft/2020-12/release-notes) +specification. -Given most of our tooling is in Rust it is useful to narrow our search -to options that have good [serde](https://serde.rs/) compatibility. For -most datastructures, serde allows us to to get serialization and -deserialization to many formats just by annotating the datastructures, -with no bespoke code. This is a maintainability godsend. It is also very -fast. +The model is generated from the pydantic model in the `quantinuum_hugr` python +package, and is used to validate the serialization format of the Rust +implementation. -Unfortunately, this excludes the Tierkreis serialization technology -Protobuf, as its [serde support is -poor](https://docs.rs/serde-protobuf/latest/serde_protobuf/) -(serialization not supported at all currently). In general, as Protobuf -does its own code generation in the languages it supports, you have to -work with the datastructures it constructs (and as in the case of -Tierkreis, write a lot of boilerplate code to then convert those in to -the data you want to work with), wheras serde just handles your own -datastructures for you. - -With that in mind, [this -article](https://blog.logrocket.com/rust-serialization-whats-ready-for-production-today/) -has a good summary of performance benchmarks for various options. An -interesting find here was -[FlatBuffers](https://google.github.io/flatbuffers/), Google's protobuf -alternative with zero-copy. Unfortunately, as the article describes it -is quite annoying to work with in Rust and shares the protobuf -schema-related problems mentioned above. - -The highest performing target is -[bincode](https://github.com/bincode-org/bincode), but it does not seem -to be widely used and has poor python support. Another notable mention -is [CBOR](https://cbor.io/); it is however not very well performing on the benchmarks. - -If we take a good balance between performance and language compatibility -MessagePack (or [msgpack](https://msgpack.org/) ) appears to be a very -solid option. It has good serde support (as well as very wide language -support in general, including a fast python package implemented in C), -is one of the top performers on benchmarks (see also [this -thesis](https://hdl.handle.net/10657/13140)), -and has small data size. Another nice benefit is that, like CBOR, it is -very similar to JSON when decoded, which, given that serde can easily -let us go between JSON and msgpack, gives us human-friendly text -visibility. The similarity to JSON also allows very easy conversion from -Python dictionaries. - -# Conclusion - -- Use serde to serialize and deserialize the HUGR rust struct. - -- For serialised format we tentatively propose msgpack, but note that - serde allows a very low cost change to this at a later date. - -- In future if a human interpretable text format is required build a - standalone module - this could well be [a set of MLIR - dialects](https://github.com/PennyLaneAI/catalyst/tree/main/mlir) . - -## Note - -One important downside of this approach, particularly in comparison with -code-generating options like Protobuf, is that non-Rust languages (in -our case, most notably Python, and in future likely also C++) will -require code for handling the binary format and representing the data -model natively. However, for Python at least, this can be achieved -relatively simply with [Pydantic](https://docs.pydantic.dev/). This also -brings with it Python-side schema generation and validation. As an -example, the below fully implements serialization/deserialization of the -spec described in the [main document](hugr.md). - -```python -from typing import Any -import ormsgpack -from pydantic import BaseModel - -class MPBaseModel(BaseModel): - def packb(self) -> bytes: - return ormsgpack.packb( - self, option=ormsgpack.OPT_SERIALIZE_PYDANTIC | ormsgpack.OPT_NON_STR_KEYS - ) - - @classmethod - def unpackb(cls, b: bytes) -> "MPBaseModel": - return cls(**ormsgpack.unpackb(b, option=ormsgpack.OPT_NON_STR_KEYS)) - - -NodeID = int -Port = tuple[NodeID, int] # (node, offset) -NodeWeight = Any - -class Hugr(MPBaseModel): - # (parent, #incoming, #outgoing, NodeWeight) - nodes: list[tuple[NodeID, int, int, NodeWeight]] - edges: list[tuple[Port, Port]] - root: NodeID - -# TODO: specify scheme for NodeWeight - -with open("../hugr/foo.bin", "rb") as f: - # print(Hugr.schema_json()) - pg = Hugr.unpackb(f.read()) - print(pg) - outb = pg.packb() - f.seek(0) - assert outb == f.read() +A script `generate_schema.py` is provided to regenerate the schema. To update +the schema, run the following command: +```bash +just update-schema ```