From 1ee5abba141e781e05a17a896f3018141982736c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tam=C3=A1s=20T=C3=B3th?= Date: Mon, 24 Jun 2024 17:52:29 +0200 Subject: [PATCH 1/9] Add workflow to run `actionlint` (#4468) [actionlint](https://github.com/rhysd/actionlint) is a static checker for GitHub Actions workflow files. This PR adds a workflow that runs `actionlint` to check `.yml` files in `.github/workflows`. It also cleans up some issues flagged by `shellcheck`, a linter for shell scripts `actionlint` calls out to. --------- Co-authored-by: Bruce Collie --- .github/actionlint.yaml | 5 ++ .github/workflows/develop.yml | 48 ++++++++--------- .github/workflows/master-pr.yml | 8 +-- .github/workflows/release.yml | 79 +++++++++++++++------------- .github/workflows/run-actionlint.yml | 16 ++++++ .github/workflows/test-pr.yml | 45 ++++++++-------- .github/workflows/update-deps.yml | 6 +-- 7 files changed, 118 insertions(+), 89 deletions(-) create mode 100644 .github/actionlint.yaml create mode 100644 .github/workflows/run-actionlint.yml diff --git a/.github/actionlint.yaml b/.github/actionlint.yaml new file mode 100644 index 00000000000..cdd80bc93a8 --- /dev/null +++ b/.github/actionlint.yaml @@ -0,0 +1,5 @@ +self-hosted-runner: + labels: + - MacM1 + - normal + - performance diff --git a/.github/workflows/develop.yml b/.github/workflows/develop.yml index 088b6676933..c3623290323 100644 --- a/.github/workflows/develop.yml +++ b/.github/workflows/develop.yml @@ -27,7 +27,7 @@ jobs: git checkout -B master origin/master old_develop="$(git merge-base origin/develop origin/master)" new_develop="$(git rev-parse origin/develop)" - if git diff --exit-code ${old_develop} ${new_develop} -- package/version; then + if git diff --exit-code "${old_develop}" "${new_develop}" -- package/version; then git merge --no-edit origin/develop ./package/version.sh bump else @@ -55,41 +55,41 @@ jobs: run: | set -euxo pipefail workspace=$(pwd) - docker run --name k-posting-profiling-tests-${GITHUB_SHA} \ - --rm -it --detach \ - -e BENCHER_API_TOKEN=$BENCHER_API_TOKEN \ - -e BENCHER_PROJECT=$BENCHER_PROJECT \ - -e BENCHER_ADAPTER=$BENCHER_ADAPTER \ - -e GITHUB_HEAD_REF=$GITHUB_HEAD_REF \ - -e GITHUB_BASE_REF=$GITHUB_BASE_REF \ - -e GITHUB_TOKEN=$GITHUB_TOKEN \ - -e GITHUB_ACTIONS=true \ - -e GITHUB_EVENT_NAME=$GITHUB_EVENT_NAME \ - -e GITHUB_EVENT_PATH=$GITHUB_EVENT_PATH \ - -e GITHUB_REPOSITORY=$GITHUB_REPOSITORY \ - -e GITHUB_REF=$GITHUB_REF \ - -e GITHUB_SHA=${GITHUB_SHA} \ - --workdir /opt/workspace \ - -v "${workspace}:/opt/workspace" \ - -v "${GITHUB_EVENT_PATH}:${GITHUB_EVENT_PATH}" \ - ${BASE_OS}:${BASE_DISTRO} + docker run --name "k-posting-profiling-tests-${GITHUB_SHA}" \ + --rm -it --detach \ + -e BENCHER_API_TOKEN="$BENCHER_API_TOKEN" \ + -e BENCHER_PROJECT="$BENCHER_PROJECT" \ + -e BENCHER_ADAPTER="$BENCHER_ADAPTER" \ + -e GITHUB_HEAD_REF="$GITHUB_HEAD_REF" \ + -e GITHUB_BASE_REF="$GITHUB_BASE_REF" \ + -e GITHUB_TOKEN="$GITHUB_TOKEN" \ + -e GITHUB_ACTIONS=true \ + -e GITHUB_EVENT_NAME="$GITHUB_EVENT_NAME" \ + -e GITHUB_EVENT_PATH="$GITHUB_EVENT_PATH" \ + -e GITHUB_REPOSITORY="$GITHUB_REPOSITORY" \ + -e GITHUB_REF="$GITHUB_REF" \ + -e GITHUB_SHA="$GITHUB_SHA" \ + --workdir /opt/workspace \ + -v "${workspace}:/opt/workspace" \ + -v "${GITHUB_EVENT_PATH}:${GITHUB_EVENT_PATH}" \ + "${BASE_OS}:${BASE_DISTRO}" - name: 'Setting up dependencies' run: | set -euxo pipefail - docker exec -t k-posting-profiling-tests-${GITHUB_SHA} /bin/bash -c './k-distribution/tests/profiling/setup_profiling.sh SKIP_K_BUILD' + docker exec -t "k-posting-profiling-tests-${GITHUB_SHA}" /bin/bash -c './k-distribution/tests/profiling/setup_profiling.sh SKIP_K_BUILD' - name: 'Getting Performance Tests Results' run: | set -euxo pipefail - docker exec -t k-posting-profiling-tests-${GITHUB_SHA} /bin/bash -c './k-distribution/tests/profiling/post_results_to_develop.py ${GITHUB_SHA}' + docker exec -t "k-posting-profiling-tests-${GITHUB_SHA}" /bin/bash -c './k-distribution/tests/profiling/post_results_to_develop.py "${GITHUB_SHA}"' - name: 'Posting Performance Tests Results' run: | set -euxo pipefail - docker exec -t k-posting-profiling-tests-${GITHUB_SHA} /bin/bash -c 'bencher run \ + docker exec -t "k-posting-profiling-tests-${GITHUB_SHA}" /bin/bash -c 'bencher run \ --if-branch "develop" --else-if-branch "master" \ --file .profiling-results.json --err --ci-only-on-alert \ --github-actions "${GITHUB_TOKEN}" "echo Exporting report"' - name: 'Tear down Docker' if: always() run: | - docker stop --time=0 k-posting-profiling-tests-${GITHUB_SHA} - docker container rm --force k-posting-profiling-tests-${GITHUB_SHA} || true + docker stop --time=0 "k-posting-profiling-tests-${GITHUB_SHA}" + docker container rm --force "k-posting-profiling-tests-${GITHUB_SHA}" || true diff --git a/.github/workflows/master-pr.yml b/.github/workflows/master-pr.yml index e96dfb1d31a..2dda01b9e34 100644 --- a/.github/workflows/master-pr.yml +++ b/.github/workflows/master-pr.yml @@ -25,8 +25,8 @@ jobs: run: | set -x pull_number=$(jq --raw-output .pull_request.number "${GITHUB_EVENT_PATH}") - curl -X PATCH \ - -H "Accept: application/vnd.github+json" \ - -H "Authorization: Bearer ${GITHUB_TOKEN}" \ - https://api.github.com/repos/runtimeverification/k/pulls/${pull_number} \ + curl -X PATCH \ + -H "Accept: application/vnd.github+json" \ + -H "Authorization: Bearer ${GITHUB_TOKEN}" \ + "https://api.github.com/repos/runtimeverification/k/pulls/${pull_number}" \ -d '{"base":"develop"}' diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index f45bfe88ef3..2c1fd886c47 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -14,7 +14,7 @@ jobs: runs-on: ubuntu-24.04 steps: - name: 'Get release_id' - run: echo "release_id=$(jq --raw-output '.release.id' $GITHUB_EVENT_PATH)" >> ${GITHUB_OUTPUT} + run: echo "release_id=$(jq --raw-output '.release.id' "$GITHUB_EVENT_PATH")" >> "${GITHUB_OUTPUT}" id: release outputs: release_id: ${{ steps.release.outputs.release_id }} @@ -35,13 +35,14 @@ jobs: set -x version=$(cat package/version) tarball=kframework-${version}-src.tar.gz + # shellcheck disable=SC2038 find . -name .git | xargs rm -r CURDIR=$(pwd) cd .. - tar czvf ${tarball} $(basename ${CURDIR}) - mv ${tarball} ${CURDIR}/ - cd ${CURDIR} - gh release upload --repo runtimeverification/k --clobber v${version} ${tarball} + tar czvf "${tarball}" "$(basename "${CURDIR}")" + mv "${tarball}" "${CURDIR}/" + cd "${CURDIR}" + gh release upload --repo runtimeverification/k --clobber "v${version}" "${tarball}" cachix-release: name: 'k-framework-binary cachix release' @@ -114,8 +115,8 @@ jobs: run: | set -x version=$(cat package/version) - cp kframework_amd64_ubuntu_jammy.deb kframework_${version}_amd64_ubuntu_jammy.deb - gh release upload --repo runtimeverification/k --clobber v${version} kframework_${version}_amd64_ubuntu_jammy.deb + cp kframework_amd64_ubuntu_jammy.deb "kframework_${version}_amd64_ubuntu_jammy.deb" + gh release upload --repo runtimeverification/k --clobber "v${version}" "kframework_${version}_amd64_ubuntu_jammy.deb" - name: 'Build, Test, and Push Dockerhub Image' shell: bash {0} env: @@ -125,19 +126,19 @@ jobs: set -euxo pipefail version=$(cat package/version) version_tag=ubuntu-jammy-${version} - docker login --username rvdockerhub --password ${DOCKERHUB_PASSWORD} - docker image build . --file package/docker/Dockerfile.ubuntu-jammy --tag ${DOCKERHUB_REPO}:${version_tag} - docker run --name k-package-docker-test-jammy-${GITHUB_SHA} --rm -it --detach ${DOCKERHUB_REPO}:${version_tag} - docker exec -t k-package-docker-test-jammy-${GITHUB_SHA} bash -c 'cd ~ && echo "module TEST imports BOOL endmodule" > test.k' - docker exec -t k-package-docker-test-jammy-${GITHUB_SHA} bash -c 'cd ~ && kompile test.k --backend llvm' - docker exec -t k-package-docker-test-jammy-${GITHUB_SHA} bash -c 'cd ~ && kompile test.k --backend haskell' - docker exec -t k-package-docker-test-jammy-${GITHUB_SHA} bash -c 'cd ~ && pyk kompile test.k --backend llvm' - docker exec -t k-package-docker-test-jammy-${GITHUB_SHA} bash -c 'cd ~ && pyk kompile test.k --backend haskell' - docker image push ${DOCKERHUB_REPO}:${version_tag} + docker login --username rvdockerhub --password "${DOCKERHUB_PASSWORD}" + docker image build . --file package/docker/Dockerfile.ubuntu-jammy --tag "${DOCKERHUB_REPO}:${version_tag}" + docker run --name "k-package-docker-test-jammy-${GITHUB_SHA}" --rm -it --detach "${DOCKERHUB_REPO}:${version_tag}" + docker exec -t "k-package-docker-test-jammy-${GITHUB_SHA}" bash -c 'cd ~ && echo "module TEST imports BOOL endmodule" > test.k' + docker exec -t "k-package-docker-test-jammy-${GITHUB_SHA}" bash -c 'cd ~ && kompile test.k --backend llvm' + docker exec -t "k-package-docker-test-jammy-${GITHUB_SHA}" bash -c 'cd ~ && kompile test.k --backend haskell' + docker exec -t "k-package-docker-test-jammy-${GITHUB_SHA}" bash -c 'cd ~ && pyk kompile test.k --backend llvm' + docker exec -t "k-package-docker-test-jammy-${GITHUB_SHA}" bash -c 'cd ~ && pyk kompile test.k --backend haskell' + docker image push "${DOCKERHUB_REPO}:${version_tag}" - name: 'Clean up Docker Container' if: always() run: | - docker stop --time=0 k-package-docker-test-jammy-${GITHUB_SHA} + docker stop --time=0 "k-package-docker-test-jammy-${GITHUB_SHA}" - name: On Failure, Upload the kore-exec.tar.gz file to the Summary Page if: failure() uses: actions/upload-artifact@v4 @@ -193,7 +194,9 @@ jobs: # https://github.com/actions/runner-images/issues/6459 # https://github.com/actions/runner-images/issues/6507 # https://github.com/actions/runner-images/issues/2322 - brew list -1 | grep python | while read formula; do brew unlink $formula; brew link --overwrite $formula; done + + # shellcheck disable=SC2162 + brew list -1 | grep python | while read formula; do brew unlink "$formula"; brew link --overwrite "$formula"; done - name: Build brew bottle id: build @@ -209,15 +212,16 @@ jobs: ROOT_URL='https://github.com/runtimeverification/k/releases/download' wget "$ROOT_URL/v${VERSION}/kframework-${VERSION}-src.tar.gz" cd homebrew-k - ../kframework/package/macos/brew-update-to-local ${PACKAGE} ${VERSION} - git commit Formula/$PACKAGE.rb -m "Update ${PACKAGE} to ${VERSION}: part 1" - ../kframework/package/macos/brew-build-and-update-to-local-bottle ${PACKAGE} ${VERSION} ${ROOT_URL} + ../kframework/package/macos/brew-update-to-local "${PACKAGE}" "${VERSION}" + git commit "Formula/$PACKAGE.rb" -m "Update ${PACKAGE} to ${VERSION}: part 1" + ../kframework/package/macos/brew-build-and-update-to-local-bottle "${PACKAGE}" "${VERSION}" "${ROOT_URL}" git reset HEAD^ - LOCAL_BOTTLE_NAME=$(basename $(find . -name "kframework--${VERSION}.arm64_sonoma.bottle*.tar.gz")) - BOTTLE_NAME=$(echo ${LOCAL_BOTTLE_NAME#./} | sed 's!kframework--!kframework-!') - ../kframework/package/macos/brew-update-to-final ${PACKAGE} ${VERSION} ${ROOT_URL} - echo "path=${LOCAL_BOTTLE_NAME}" >> ${GITHUB_OUTPUT} - echo "path_remote=${BOTTLE_NAME}" >> ${GITHUB_OUTPUT} + LOCAL_BOTTLE_NAME=$(basename "$(find . -name "kframework--${VERSION}.arm64_sonoma.bottle*.tar.gz")") + # shellcheck disable=2001 + BOTTLE_NAME=$(echo "${LOCAL_BOTTLE_NAME#./}" | sed 's!kframework--!kframework-!') + ../kframework/package/macos/brew-update-to-final "${PACKAGE}" "${VERSION}" "${ROOT_URL}" + echo "path=${LOCAL_BOTTLE_NAME}" >> "${GITHUB_OUTPUT}" + echo "path_remote=${BOTTLE_NAME}" >> "${GITHUB_OUTPUT}" - name: Upload bottle uses: actions/upload-artifact@v4 @@ -279,7 +283,9 @@ jobs: # https://github.com/actions/runner-images/issues/6459 # https://github.com/actions/runner-images/issues/6507 # https://github.com/actions/runner-images/issues/2322 - brew list -1 | grep python | while read formula; do brew unlink $formula; brew link --overwrite $formula; done + + # shellcheck disable=SC2162 + brew list -1 | grep python | while read formula; do brew unlink "$formula"; brew link --overwrite "$formula"; done - name: 'Test brew bottle' id: test @@ -324,8 +330,8 @@ jobs: run: | set -x version=$(cat k-homebrew-checkout/package/version) - mv homebrew-k-old/${BOTTLE_NAME} homebrew-k-old/${REMOTE_BOTTLE_NAME} - gh release upload --repo runtimeverification/k --clobber v${version} homebrew-k-old/${REMOTE_BOTTLE_NAME} + mv "homebrew-k-old/${BOTTLE_NAME}" "homebrew-k-old/${REMOTE_BOTTLE_NAME}" + gh release upload --repo runtimeverification/k --clobber "v${version}" "homebrew-k-old/${REMOTE_BOTTLE_NAME}" - run: | git config --global user.name rv-jenkins @@ -385,16 +391,16 @@ jobs: MAVEN_PASSWORD: ${{ secrets.CLOUDREPO_PASSWORD }} run: | cat ~/.m2/settings.xml - docker exec -t k-release-ci-${GITHUB_SHA} bash -c 'mkdir -p /home/github-runner/.m2' - docker cp ~/.m2/settings.xml k-release-ci-${GITHUB_SHA}:/tmp/settings.xml - docker exec -t k-release-ci-${GITHUB_SHA} bash -c 'mv /tmp/settings.xml /home/github-runner/.m2/settings.xml' - docker exec -e MAVEN_USERNAME -e MAVEN_PASSWORD -t k-release-ci-${GITHUB_SHA} bash -c "mvn --batch-mode deploy" + docker exec -t "k-release-ci-${GITHUB_SHA}" bash -c 'mkdir -p /home/github-runner/.m2' + docker cp ~/.m2/settings.xml "k-release-ci-${GITHUB_SHA}:/tmp/settings.xml" + docker exec -t "k-release-ci-${GITHUB_SHA} bash" -c 'mv /tmp/settings.xml /home/github-runner/.m2/settings.xml' + docker exec -e MAVEN_USERNAME -e MAVEN_PASSWORD -t "k-release-ci-${GITHUB_SHA}" bash -c "mvn --batch-mode deploy" - name: 'Tear down Docker' if: always() run: | - docker stop --time=0 k-release-ci-${GITHUB_SHA} - docker container rm --force k-release-ci-${GITHUB_SHA} || true + docker stop --time=0 "k-release-ci-${GITHUB_SHA}" + docker container rm --force "k-release-ci-${GITHUB_SHA}" || true - name: Publish release uses: actions/github-script@v7 @@ -455,7 +461,7 @@ jobs: -H "Authorization: Bearer ${GITHUB_TOKEN}" \ -H "X-GitHub-Api-Version: 2022-11-28" \ https://api.github.com/repos/runtimeverification/devops/dispatches \ - -d '{"event_type":"on-demand-test","client_payload":{"repo":"runtimeverification/k","version":"'${VERSION}'"}}' + -d '{"event_type":"on-demand-test","client_payload":{"repo":"runtimeverification/k","version":"'"${VERSION}"'"}}' pyk-build-docs: name: 'Build Pyk Documentation' @@ -532,6 +538,7 @@ jobs: npm run build-sitemap cd - mv web/public_content ./ + # shellcheck disable=SC2046 rm -rf $(find . -maxdepth 1 -not -name public_content -a -not -name .git -a -not -path . -a -not -path .. -a -not -name CNAME) mv public_content/* ./ rm -rf public_content diff --git a/.github/workflows/run-actionlint.yml b/.github/workflows/run-actionlint.yml new file mode 100644 index 00000000000..2d7cd5f6a09 --- /dev/null +++ b/.github/workflows/run-actionlint.yml @@ -0,0 +1,16 @@ +name: Lint GitHub Actions workflows +on: pull_request + +jobs: + actionlint: + name: Run actionlint + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Download actionlint + id: get_actionlint + run: bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash) + shell: bash + - name: Check workflow files + run: ${{ steps.get_actionlint.outputs.executable }} -color + shell: bash diff --git a/.github/workflows/test-pr.yml b/.github/workflows/test-pr.yml index 4d43a311ed7..77a19e55e99 100644 --- a/.github/workflows/test-pr.yml +++ b/.github/workflows/test-pr.yml @@ -81,12 +81,12 @@ jobs: distro: jammy llvm: 15 - name: 'Build and Test K' - run: docker exec -t k-ci-${GITHUB_SHA} /bin/bash -c 'mvn verify -Dspotless.check.skip=true --batch-mode -U' + run: docker exec -t "k-ci-${GITHUB_SHA}" /bin/bash -c 'mvn verify -Dspotless.check.skip=true --batch-mode -U' - name: 'Tear down Docker' if: always() run: | - docker stop --time=0 k-ci-${GITHUB_SHA} - docker container rm --force k-ci-${GITHUB_SHA} || true + docker stop --time=0 "k-ci-${GITHUB_SHA}" + docker container rm --force "k-ci-${GITHUB_SHA}" || true test-package-ubuntu-jammy: @@ -222,10 +222,11 @@ jobs: GC_DONT_GC: '1' run: | nix --version - export JQ=$(nix-build '' -A jq --no-link)/bin/jq + JQ=$(nix-build '' -A jq --no-link)/bin/jq + export JQ k=$(nix build . --print-build-logs --json | $JQ -r '.[].outputs | to_entries[].value') - drv=$(nix-store --query --deriver ${k}) - nix-store --query --requisites ${drv} | cachix push k-framework + drv=$(nix-store --query --deriver "$k") + nix-store --query --requisites "$drv" | cachix push k-framework - name: 'Smoke test K' run: GC_DONT_GC=1 nix build --print-build-logs .#smoke-test # These tests take a really long time to run on other platforms, so we @@ -366,33 +367,33 @@ jobs: run: | set -euxo pipefail workspace=$(pwd) - docker run --name k-profiling-tests-${GITHUB_SHA} \ + docker run --name "k-profiling-tests-${GITHUB_SHA}" \ --rm -it --detach --user root \ - -e BENCHER_API_TOKEN=$BENCHER_API_TOKEN \ - -e BENCHER_PROJECT=$BENCHER_PROJECT \ - -e BENCHER_ADAPTER=$BENCHER_ADAPTER \ - -e GITHUB_HEAD_REF=$GITHUB_HEAD_REF \ - -e GITHUB_BASE_REF=$GITHUB_BASE_REF \ - -e GITHUB_TOKEN=$GITHUB_TOKEN \ + -e BENCHER_API_TOKEN="$BENCHER_API_TOKEN" \ + -e BENCHER_PROJECT="$BENCHER_PROJECT" \ + -e BENCHER_ADAPTER="$BENCHER_ADAPTER" \ + -e GITHUB_HEAD_REF="$GITHUB_HEAD_REF" \ + -e GITHUB_BASE_REF="$GITHUB_BASE_REF" \ + -e GITHUB_TOKEN="$GITHUB_TOKEN" \ -e GITHUB_ACTIONS=true \ - -e GITHUB_EVENT_NAME=$GITHUB_EVENT_NAME \ - -e GITHUB_EVENT_PATH=$GITHUB_EVENT_PATH \ - -e GITHUB_REPOSITORY=$GITHUB_REPOSITORY \ - -e GITHUB_REF=$GITHUB_REF \ + -e GITHUB_EVENT_NAME="$GITHUB_EVENT_NAME" \ + -e GITHUB_EVENT_PATH="$GITHUB_EVENT_PATH" \ + -e GITHUB_REPOSITORY="$GITHUB_REPOSITORY" \ + -e GITHUB_REF="$GITHUB_REF" \ --workdir /opt/workspace \ -v "${workspace}:/opt/workspace" \ -v "${GITHUB_EVENT_PATH}:${GITHUB_EVENT_PATH}" \ - runtimeverificationinc/k-profiling-tests-${GITHUB_SHA} + "runtimeverificationinc/k-profiling-tests-${GITHUB_SHA}" - name: 'Install K from Package' run: | set -euxo pipefail - docker exec -t k-profiling-tests-${GITHUB_SHA} /bin/bash -c './k-distribution/tests/profiling/setup_profiling.sh' + docker exec -t "k-profiling-tests-${GITHUB_SHA}" /bin/bash -c './k-distribution/tests/profiling/setup_profiling.sh' - name: 'Profiling Performance Tests' run: | set -euxo pipefail - docker exec -t k-profiling-tests-${GITHUB_SHA} /bin/bash -c 'cd k-distribution/tests/profiling && make' + docker exec -t "k-profiling-tests-${GITHUB_SHA}" /bin/bash -c 'cd k-distribution/tests/profiling && make' - name: 'Tear down Docker' if: always() run: | - docker stop --time=0 k-profiling-tests-${GITHUB_SHA} - docker container rm --force k-profiling-tests-${GITHUB_SHA} || true + docker stop --time=0 "k-profiling-tests-${GITHUB_SHA}" + docker container rm --force "k-profiling-tests-${GITHUB_SHA}" || true diff --git a/.github/workflows/update-deps.yml b/.github/workflows/update-deps.yml index 6edafbfacc8..9a66903a64b 100644 --- a/.github/workflows/update-deps.yml +++ b/.github/workflows/update-deps.yml @@ -48,16 +48,16 @@ jobs: hs_backend_version=$(cat deps/haskell-backend_release) cd haskell-backend/src/main/native/haskell-backend - git checkout ${hs_backend_version} + git checkout "${hs_backend_version}" cd - - sed -i 's! haskell-backend.url = "github:runtimeverification/haskell-backend/.*";! haskell-backend.url = "github:runtimeverification/haskell-backend/'${hs_backend_version}'";!' flake.nix + sed -i 's! haskell-backend.url = "github:runtimeverification/haskell-backend/.*";! haskell-backend.url = "github:runtimeverification/haskell-backend/'"${hs_backend_version}"'";!' flake.nix if git add flake.nix haskell-backend/src/main/native/haskell-backend && git commit -m "flake.nix, haskell-backend/src/main/native/haskell-backend: update to version ${hs_backend_version}"; then changed=true fi llvm_backend_version="v$(cat deps/llvm-backend_release)" cd llvm-backend/src/main/native/llvm-backend - git checkout ${llvm_backend_version} + git checkout "${llvm_backend_version}" cd - sed -i 's! url = "github:runtimeverification/llvm-backend/.*";! url = "github:runtimeverification/llvm-backend/'"${llvm_backend_version}"'";!' flake.nix if git add flake.nix llvm-backend/src/main/native/llvm-backend && git commit -m "flake.nix, llvm-backend/src/main/native/llvm-backend: update to version ${llvm_backend_version}"; then From 44d0dbc2866b25bb80aa074eb7ad81cf95be96e5 Mon Sep 17 00:00:00 2001 From: rv-jenkins Date: Mon, 24 Jun 2024 11:25:01 -0600 Subject: [PATCH 2/9] Update dependency: deps/llvm-backend_release (#4465) Co-authored-by: devops Co-authored-by: Bruce Collie --- deps/llvm-backend_release | 2 +- flake.lock | 8 ++++---- flake.nix | 2 +- llvm-backend/src/main/native/llvm-backend | 2 +- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/deps/llvm-backend_release b/deps/llvm-backend_release index 0665a48efd5..c892edde544 100644 --- a/deps/llvm-backend_release +++ b/deps/llvm-backend_release @@ -1 +1 @@ -0.1.50 +0.1.51 diff --git a/flake.lock b/flake.lock index 4059105ce23..582046fbfd3 100644 --- a/flake.lock +++ b/flake.lock @@ -112,16 +112,16 @@ ] }, "locked": { - "lastModified": 1718737220, - "narHash": "sha256-0yMDvOr3TQPqpKBKOXAqfrz6FO+rOJ0qG3uNotxXhao=", + "lastModified": 1718910118, + "narHash": "sha256-E2utS5SGK3B7IjHRBlyKSB6TR/gLli14m+7d8AAmp0I=", "owner": "runtimeverification", "repo": "llvm-backend", - "rev": "a7e321383372ba625173d4064836ab4f8cebef85", + "rev": "6671fe4b8de164ac2d141dcce869a06b628ed560", "type": "github" }, "original": { "owner": "runtimeverification", - "ref": "v0.1.50", + "ref": "v0.1.51", "repo": "llvm-backend", "type": "github" } diff --git a/flake.nix b/flake.nix index 7f6b7996349..f346567f8dd 100644 --- a/flake.nix +++ b/flake.nix @@ -5,7 +5,7 @@ nixpkgs.follows = "llvm-backend/nixpkgs"; flake-utils.url = "github:numtide/flake-utils"; llvm-backend = { - url = "github:runtimeverification/llvm-backend/v0.1.50"; + url = "github:runtimeverification/llvm-backend/v0.1.51"; inputs.utils.follows = "flake-utils"; }; rv-utils.url = "github:runtimeverification/rv-nix-tools"; diff --git a/llvm-backend/src/main/native/llvm-backend b/llvm-backend/src/main/native/llvm-backend index a7e32138337..6671fe4b8de 160000 --- a/llvm-backend/src/main/native/llvm-backend +++ b/llvm-backend/src/main/native/llvm-backend @@ -1 +1 @@ -Subproject commit a7e321383372ba625173d4064836ab4f8cebef85 +Subproject commit 6671fe4b8de164ac2d141dcce869a06b628ed560 From c14fbb4d399930d5084b4cfc77084c80455fe1a1 Mon Sep 17 00:00:00 2001 From: gtrepta <50716988+gtrepta@users.noreply.github.com> Date: Mon, 24 Jun 2024 14:02:05 -0500 Subject: [PATCH 3/9] Begin implementing fuzzing (#4454) This introduces the most basic functionality for doing fuzz testing of a property in K. It adds a function `fuzz` to the `ktool.kfuzz` pyk module which takes the location of an llvm-kompiled definition, a template configuration in kore containing input variables to be randomized, a strategy for substituting those variables (so far, only with integers using `kintegers`), and either a function to check the resulting kore or a flag to check the exit code of the interpreter. It will invoke hypothesis to randomize the inputs and run the interpreter. --- pyk/poetry.lock | 46 ++++++- pyk/pyproject.toml | 1 + pyk/src/pyk/ktool/kfuzz.py | 111 ++++++++++++++++ pyk/src/pyk/ktool/krun.py | 31 +++-- pyk/src/tests/integration/ktool/test_fuzz.py | 125 +++++++++++++++++++ 5 files changed, 305 insertions(+), 9 deletions(-) create mode 100644 pyk/src/pyk/ktool/kfuzz.py create mode 100644 pyk/src/tests/integration/ktool/test_fuzz.py diff --git a/pyk/poetry.lock b/pyk/poetry.lock index d181bc4df4a..9de7d3ec3b6 100644 --- a/pyk/poetry.lock +++ b/pyk/poetry.lock @@ -373,6 +373,39 @@ files = [ [package.dependencies] pyreadline3 = {version = "*", markers = "sys_platform == \"win32\" and python_version >= \"3.8\""} +[[package]] +name = "hypothesis" +version = "6.103.1" +description = "A library for property-based testing" +optional = false +python-versions = ">=3.8" +files = [ + {file = "hypothesis-6.103.1-py3-none-any.whl", hash = "sha256:d3c959fab6233e78867499e2117ae9db8dc40eeed936d71a2cfc7b6094972e74"}, + {file = "hypothesis-6.103.1.tar.gz", hash = "sha256:d299d5c21d6408eab3be670c94c974f3acf0b511c61fe81804b09091e393ee1f"}, +] + +[package.dependencies] +attrs = ">=22.2.0" +exceptiongroup = {version = ">=1.0.0", markers = "python_version < \"3.11\""} +sortedcontainers = ">=2.1.0,<3.0.0" + +[package.extras] +all = ["backports.zoneinfo (>=0.2.1)", "black (>=19.10b0)", "click (>=7.0)", "crosshair-tool (>=0.0.54)", "django (>=3.2)", "dpcontracts (>=0.4)", "hypothesis-crosshair (>=0.0.4)", "lark (>=0.10.1)", "libcst (>=0.3.16)", "numpy (>=1.17.3)", "pandas (>=1.1)", "pytest (>=4.6)", "python-dateutil (>=1.4)", "pytz (>=2014.1)", "redis (>=3.0.0)", "rich (>=9.0.0)", "tzdata (>=2024.1)"] +cli = ["black (>=19.10b0)", "click (>=7.0)", "rich (>=9.0.0)"] +codemods = ["libcst (>=0.3.16)"] +crosshair = ["crosshair-tool (>=0.0.54)", "hypothesis-crosshair (>=0.0.4)"] +dateutil = ["python-dateutil (>=1.4)"] +django = ["django (>=3.2)"] +dpcontracts = ["dpcontracts (>=0.4)"] +ghostwriter = ["black (>=19.10b0)"] +lark = ["lark (>=0.10.1)"] +numpy = ["numpy (>=1.17.3)"] +pandas = ["pandas (>=1.1)"] +pytest = ["pytest (>=4.6)"] +pytz = ["pytz (>=2014.1)"] +redis = ["redis (>=3.0.0)"] +zoneinfo = ["backports.zoneinfo (>=0.2.1)", "tzdata (>=2024.1)"] + [[package]] name = "importlib-metadata" version = "7.1.0" @@ -892,6 +925,17 @@ files = [ {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, ] +[[package]] +name = "sortedcontainers" +version = "2.4.0" +description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" +optional = false +python-versions = "*" +files = [ + {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"}, + {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"}, +] + [[package]] name = "textual" version = "0.27.0" @@ -1010,4 +1054,4 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "1e8db8aceb9488a870758157f280a7cbff78677f44d6bedfa585f6bebdd5ffc5" +content-hash = "7888201fe916d5896a73a6bedf4367e437bd8807dad0ef19175db10165d07404" diff --git a/pyk/pyproject.toml b/pyk/pyproject.toml index 5497ac8620f..3be1e5f6207 100644 --- a/pyk/pyproject.toml +++ b/pyk/pyproject.toml @@ -21,6 +21,7 @@ cmd2 = "^2.4.2" coloredlogs = "^15.0.1" filelock = "^3.9.0" graphviz = "^0.20.1" +hypothesis = "^6.103.1" psutil = "5.9.5" pybind11 = "^2.10.3" pytest = "*" diff --git a/pyk/src/pyk/ktool/kfuzz.py b/pyk/src/pyk/ktool/kfuzz.py new file mode 100644 index 00000000000..b2d02971564 --- /dev/null +++ b/pyk/src/pyk/ktool/kfuzz.py @@ -0,0 +1,111 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + +from hypothesis import Phase, Verbosity, given, settings +from hypothesis.strategies import builds, fixed_dictionaries, integers + +from ..kast.inner import KSort +from ..konvert import _kast_to_kore +from ..kore.parser import KoreParser +from ..kore.syntax import Assoc, EVar +from ..prelude.k import inj +from ..prelude.kint import intToken +from .krun import llvm_interpret_raw + +if TYPE_CHECKING: + from collections.abc import Callable, Mapping + from pathlib import Path + from typing import Any + + from hypothesis.strategies import SearchStrategy + + from ..kast.inner import KInner + from ..kore.syntax import Pattern + + +def kintegers( + *, + min_value: int | None = None, + max_value: int | None = None, + with_inj: KSort | None = None, +) -> SearchStrategy[Pattern]: + """Return a search strategy for K integers. + + Args: + min_value: Minimum value for the generated integers + max_value: Maximum value for the generated integers + with_inj: Return the integer as an injection into this sort + + Returns: + A strategy which generates integer domain values. + """ + + def int_dv(value: int) -> Pattern: + res: KInner = intToken(value) + if with_inj is not None: + res = inj(KSort('Int'), with_inj, res) + return _kast_to_kore(res) + + return builds(int_dv, integers(min_value=min_value, max_value=max_value)) + + +def fuzz( + definition_dir: str | Path, + template: Pattern, + subst_strategy: dict[EVar, SearchStrategy[Pattern]], + check_func: Callable[[Pattern], Any] | None = None, + check_exit_code: bool = False, + max_examples: int = 50, +) -> None: + """Fuzz a property test with concrete execution over a K term. + + Args: + definition_dir: The location of the K definition to run the interpreter for. + template: The term which will be sent to the interpreter after randomizing inputs. It should contain at least one variable which will be substituted for a value. + subst_strategy: Should have each variable in the template term mapped to a strategy for generating values for it. + check_func: Will be called on the kore output from the interpreter. + Should throw an AssertionError if it determines that the output indicates a test failure. + A RuntimeError will be thrown if this is passed as an argument and check_exit_code is True. + check_exit_code: Check the exit code of the interpreter for a test failure instead of using check_func. + An exit code of 0 indicates a passing test. + A RuntimeError will be thrown if this is True and check_func is also passed as an argument. + max_examples: The number of test cases to run. + + Raises: + RuntimeError: If check_func exists and check_exit_code is set, or check_func doesn't exist and check_exit_code is cleared. + """ + if bool(check_func) == check_exit_code: + raise RuntimeError('Must pass one of check_func or check_exit_code, and not both!') + + def test(subst_case: Mapping[EVar, Pattern]) -> None: + def sub(p: Pattern) -> Pattern: + if isinstance(p, Assoc): + symbol = p.symbol() + args = (arg.top_down(sub) for arg in p.app.args) + return p.of(symbol, patterns=(p.app.let(args=args),)) + if p in subst_case: + assert isinstance(p, EVar) + return subst_case[p] + return p + + test_pattern = template.top_down(sub) + res = llvm_interpret_raw(definition_dir, test_pattern.text) + + if check_exit_code: + assert res.returncode == 0 + else: + assert check_func + res_pattern = KoreParser(res.stdout).pattern() + check_func(res_pattern) + + strat: SearchStrategy = fixed_dictionaries(subst_strategy) + + given(strat)( + settings( + deadline=50000, + max_examples=max_examples, + verbosity=Verbosity.verbose, + phases=(Phase.generate, Phase.target, Phase.shrink), + )(test) + )() diff --git a/pyk/src/pyk/ktool/krun.py b/pyk/src/pyk/ktool/krun.py index 43df462837a..f18717feead 100644 --- a/pyk/src/pyk/ktool/krun.py +++ b/pyk/src/pyk/ktool/krun.py @@ -315,18 +315,33 @@ def llvm_interpret(definition_dir: str | Path, pattern: Pattern, *, depth: int | Raises: RuntimeError: If the interpreter fails. """ - definition_dir = Path(definition_dir) - check_dir_path(definition_dir) + try: + res = llvm_interpret_raw(definition_dir, pattern.text, depth) + except CalledProcessError as err: + raise RuntimeError(f'Interpreter failed with status {err.returncode}: {err.stderr}') from err + + return KoreParser(res.stdout).pattern() + + +def llvm_interpret_raw(definition_dir: str | Path, kore: str, depth: int | None = None) -> CompletedProcess: + """Execute the `interpreter` binary generated by the LLVM Backend, with no processing of input/output. + + Args: + definition_dir: Path to the kompiled definition directory. + pattern: KORE string to start rewriting from. + depth: Maximal number of rewrite steps to take. + + Returns: + The CompletedProcess of the interpreter. + Raises: + CalledProcessError: If the interpreter fails. + """ + definition_dir = Path(definition_dir) interpreter_file = definition_dir / 'interpreter' check_file_path(interpreter_file) depth = depth if depth is not None else -1 args = [str(interpreter_file), '/dev/stdin', str(depth), '/dev/stdout'] - try: - res = run_process(args, input=pattern.text, pipe_stderr=True) - except CalledProcessError as err: - raise RuntimeError(f'Interpreter failed with status {err.returncode}: {err.stderr}') from err - - return KoreParser(res.stdout).pattern() + return run_process(args, input=kore, pipe_stderr=True) diff --git a/pyk/src/tests/integration/ktool/test_fuzz.py b/pyk/src/tests/integration/ktool/test_fuzz.py new file mode 100644 index 00000000000..76d34f9d6af --- /dev/null +++ b/pyk/src/tests/integration/ktool/test_fuzz.py @@ -0,0 +1,125 @@ +from __future__ import annotations + +import logging +from typing import TYPE_CHECKING + +import pytest + +from pyk.kast.inner import KSort +from pyk.kore.parser import KoreParser +from pyk.kore.prelude import inj, top_cell_initializer +from pyk.kore.syntax import DV, App, Assoc, EVar, SortApp, String +from pyk.ktool.kfuzz import fuzz, kintegers +from pyk.ktool.kprint import _kast +from pyk.testing import KompiledTest + +from ..utils import K_FILES, TEST_DATA_DIR + +if TYPE_CHECKING: + from pathlib import Path + from typing import Final + + from pyk.kore.syntax import Pattern + +_LOGGER: Final = logging.getLogger(__name__) + +FUZZ_FILES: Path = TEST_DATA_DIR / 'fuzzing' + +VAR_X = EVar(name='VarX', sort=SortApp('SortInt')) +VAR_Y = EVar(name='VarY', sort=SortApp('SortInt')) + + +class TestImpFuzz(KompiledTest): + KOMPILE_MAIN_FILE = K_FILES / 'imp.k' + KOMPILE_BACKEND = 'llvm' + SUBSTS = {VAR_X: kintegers(with_inj=KSort('AExp')), VAR_Y: kintegers(with_inj=KSort('AExp'))} + + @staticmethod + def check(p: Pattern) -> None: + def check_inner(p: Pattern) -> Pattern: + match p: + case Assoc(): + symbol = p.symbol() + args = (arg.top_down(check_inner) for arg in p.app.args) + return p.of(symbol, patterns=(p.app.let(args=args),)) + case App("Lbl'UndsPipe'-'-GT-Unds'", args=(key, val)): + match key, val: + case ( + App('inj', args=(DV(value=String('res')),)), + App('inj', args=(DV(value=String(resval)),)), + ): + assert resval == '0' + + return p + + p.top_down(check_inner) + + @staticmethod + def setup_program(definition_dir: Path, text: str) -> Pattern: + kore_text = _kast(definition_dir=definition_dir, input='program', output='kore', expression=text).stdout + + program_pattern = KoreParser(kore_text).pattern() + + def replace_var_ids(p: Pattern) -> Pattern: + match p: + case App('inj', _, (DV(_, String('varx')),)): + return VAR_X + case App('inj', _, (DV(_, String('vary')),)): + return VAR_Y + return p + + program_pattern = program_pattern.top_down(replace_var_ids) + init_pattern = top_cell_initializer( + { + '$PGM': inj(SortApp('SortPgm'), SortApp('SortKItem'), program_pattern), + } + ) + + return init_pattern + + def test_fuzz( + self, + definition_dir: Path, + ) -> None: + # Given + program_text = """ + // Checks the commutativity of addition + int x, y, a, b, res; + x = varx; + y = vary; + a = x + y; + b = y + x; + if ((a <= b) && (b <= a)) { // a == b + res = 0; + } else { + res = 1; + } + """ + + init_pattern = self.setup_program(definition_dir, program_text) + + # Then + fuzz(definition_dir, init_pattern, self.SUBSTS, self.check) + + def test_fuzz_fail( + self, + definition_dir: Path, + ) -> None: + # Given + program_text = """ + // Checks that x <= y + int x, y, res; + x = varx; + y = vary; + if (x <= y) { + res = 0; + } else { + res = 1; + } + """ + + init_pattern = self.setup_program(definition_dir, program_text) + + # Then + with pytest.raises(AssertionError): + fuzz(definition_dir, init_pattern, self.SUBSTS, self.check) From 67eff5d505fb755f0212bc47629b2a1c8ef00b77 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tam=C3=A1s=20T=C3=B3th?= Date: Mon, 24 Jun 2024 22:02:41 +0200 Subject: [PATCH 4/9] Refactor `from_spec_modules` in `APRProof` (#4447) - Move `ProveRpc`, `ClaimIndex` into separate modules - Add parameter `ordered` to `ClaimIndex.labels` to enable topological sorting of the result - Implement `APRProof.from_spec_modules` using `ClaimIndex` --- pyk/src/pyk/__main__.py | 3 +- pyk/src/pyk/ktool/claim_index.py | 188 ++++++++++++++++ pyk/src/pyk/ktool/kprove.py | 238 +------------------- pyk/src/pyk/ktool/prove_rpc.py | 100 ++++++++ pyk/src/pyk/proof/reachability.py | 87 ++----- pyk/src/tests/integration/ktool/test_imp.py | 2 +- 6 files changed, 319 insertions(+), 299 deletions(-) create mode 100644 pyk/src/pyk/ktool/claim_index.py create mode 100644 pyk/src/pyk/ktool/prove_rpc.py diff --git a/pyk/src/pyk/__main__.py b/pyk/src/pyk/__main__.py index a7b8d6143ea..8664ccbeee6 100644 --- a/pyk/src/pyk/__main__.py +++ b/pyk/src/pyk/__main__.py @@ -36,8 +36,9 @@ from .kore.syntax import Pattern, kore_term from .ktool.kompile import Kompile, KompileBackend from .ktool.kprint import KPrint -from .ktool.kprove import KProve, ProveRpc +from .ktool.kprove import KProve from .ktool.krun import KRun +from .ktool.prove_rpc import ProveRpc from .prelude.k import GENERATED_TOP_CELL from .prelude.ml import is_top, mlAnd, mlOr from .proof.reachability import APRFailureInfo, APRProof diff --git a/pyk/src/pyk/ktool/claim_index.py b/pyk/src/pyk/ktool/claim_index.py new file mode 100644 index 00000000000..a33294f46b2 --- /dev/null +++ b/pyk/src/pyk/ktool/claim_index.py @@ -0,0 +1,188 @@ +from __future__ import annotations + +from collections.abc import Mapping +from dataclasses import dataclass +from functools import partial +from graphlib import TopologicalSorter +from typing import TYPE_CHECKING + +from ..kast import Atts +from ..kast.outer import KClaim +from ..utils import FrozenDict, unique + +if TYPE_CHECKING: + from collections.abc import Container, Iterable, Iterator + + from ..kast.outer import KFlatModule, KFlatModuleList + + +@dataclass(frozen=True) +class ClaimIndex(Mapping[str, KClaim]): + claims: FrozenDict[str, KClaim] + main_module_name: str | None + + def __init__( + self, + claims: Mapping[str, KClaim], + main_module_name: str | None = None, + ): + self._validate(claims) + object.__setattr__(self, 'claims', FrozenDict(claims)) + object.__setattr__(self, 'main_module_name', main_module_name) + + @staticmethod + def from_module_list(module_list: KFlatModuleList) -> ClaimIndex: + module_list = ClaimIndex._resolve_depends(module_list) + return ClaimIndex( + claims={claim.label: claim for module in module_list.modules for claim in module.claims}, + main_module_name=module_list.main_module, + ) + + @staticmethod + def _validate(claims: Mapping[str, KClaim]) -> None: + for label, claim in claims.items(): + if claim.label != label: + raise ValueError(f'Claim label mismatch, expected: {label}, found: {claim.label}') + + for depend in claim.dependencies: + if depend not in claims: + raise ValueError(f'Invalid dependency label: {depend}') + + @staticmethod + def _resolve_depends(module_list: KFlatModuleList) -> KFlatModuleList: + """Resolve each depends value relative to the module the claim belongs to. + + Example: + ``` + module THIS-MODULE + claim ... [depends(foo,OTHER-MODULE.bar)] + endmodule + ``` + + becomes + + ``` + module THIS-MODULE + claim ... [depends(THIS-MODULE.foo,OTHER-MODULE.bar)] + endmodule + ``` + """ + labels = {claim.label for module in module_list.modules for claim in module.claims} + + def resolve_claim_depends(module_name: str, claim: KClaim) -> KClaim: + depends = claim.dependencies + if not depends: + return claim + + resolve = partial(ClaimIndex._resolve_claim_label, labels, module_name) + resolved = [resolve(label) for label in depends] + return claim.let(att=claim.att.update([Atts.DEPENDS(','.join(resolved))])) + + modules: list[KFlatModule] = [] + for module in module_list.modules: + resolve_depends = partial(resolve_claim_depends, module.name) + module = module.map_sentences(resolve_depends, of_type=KClaim) + modules.append(module) + + return module_list.let(modules=modules) + + @staticmethod + def _resolve_claim_label(labels: Container[str], module_name: str | None, label: str) -> str: + """Resolve `label` to a valid label in `labels`, or raise. + + If a `label` is not found and `module_name` is set, the label is tried after qualifying. + """ + if label in labels: + return label + + if module_name is not None: + qualified = f'{module_name}.{label}' + if qualified in labels: + return qualified + + raise ValueError(f'Claim label not found: {label}') + + def __iter__(self) -> Iterator[str]: + return iter(self.claims) + + def __len__(self) -> int: + return len(self.claims) + + def __getitem__(self, label: str) -> KClaim: + try: + label = self.resolve(label) + except ValueError: + raise KeyError(f'Claim not found: {label}') from None + return self.claims[label] + + def resolve(self, label: str) -> str: + return self._resolve_claim_label(self.claims, self.main_module_name, label) + + def resolve_all(self, labels: Iterable[str]) -> list[str]: + return [self.resolve(label) for label in unique(labels)] + + def labels( + self, + *, + include: Iterable[str] | None = None, + exclude: Iterable[str] | None = None, + with_depends: bool = True, + ordered: bool = False, + ) -> list[str]: + """Return a list of labels from the index. + + Args: + include: Labels to include in the result. If `None`, all labels are included. + exclude: Labels to exclude from the result. If `None`, no labels are excluded. + Takes precedence over `include`. + with_depends: If `True`, the result is transitively closed w.r.t. the dependency relation. + Labels in `exclude` are pruned, and their dependencies are not considered on the given path. + ordered: If `True`, the result is topologically sorted w.r.t. the dependency relation. + + Returns: + A list of labels from the index. + + Raises: + ValueError: If an item in `include` or `exclude` cannot be resolved to a valid label. + """ + include = self.resolve_all(include) if include is not None else self.claims + exclude = self.resolve_all(exclude) if exclude is not None else [] + + labels: list[str] + + if with_depends: + labels = self._close_dependencies(labels=include, prune=exclude) + else: + labels = [label for label in include if label not in set(exclude)] + + if ordered: + return self._sort_topologically(labels) + + return labels + + def _close_dependencies(self, labels: Iterable[str], prune: Iterable[str]) -> list[str]: + res: list[str] = [] + + pending = list(labels) + done = set(prune) + + while pending: + label = pending.pop(0) # BFS + + if label in done: + continue + + res.append(label) + pending += self.claims[label].dependencies + done.add(label) + + return res + + def _sort_topologically(self, labels: list[str]) -> list[str]: + label_set = set(labels) + graph = { + label: [dep for dep in claim.dependencies if dep in label_set] + for label, claim in self.claims.items() + if label in labels + } + return list(TopologicalSorter(graph).static_order()) diff --git a/pyk/src/pyk/ktool/kprove.py b/pyk/src/pyk/ktool/kprove.py index 195bee76536..0c2c945f3e2 100644 --- a/pyk/src/pyk/ktool/kprove.py +++ b/pyk/src/pyk/ktool/kprove.py @@ -4,12 +4,8 @@ import logging import os import re -from collections.abc import Mapping from contextlib import contextmanager -from dataclasses import dataclass from enum import Enum -from functools import cached_property, partial -from graphlib import TopologicalSorter from itertools import chain from pathlib import Path from subprocess import CalledProcessError @@ -17,27 +13,25 @@ from ..cli.utils import check_dir_path, check_file_path from ..cterm import CTerm -from ..kast import Atts, kast_term +from ..kast import kast_term from ..kast.inner import KInner -from ..kast.manip import extract_lhs, flatten_label -from ..kast.outer import KApply, KClaim, KDefinition, KFlatModule, KFlatModuleList, KImport, KRequire +from ..kast.manip import flatten_label +from ..kast.outer import KDefinition, KFlatModule, KFlatModuleList, KImport, KRequire from ..kore.rpc import KoreExecLogFormat from ..prelude.ml import is_top -from ..proof import APRProof, APRProver, EqualityProof, ImpliesProver -from ..utils import FrozenDict, gen_file_timestamp, run_process, unique +from ..utils import gen_file_timestamp, run_process from . import TypeInferenceMode +from .claim_index import ClaimIndex from .kprint import KPrint if TYPE_CHECKING: - from collections.abc import Callable, Container, Iterable, Iterator + from collections.abc import Callable, Iterable, Iterator, Mapping from subprocess import CompletedProcess - from typing import ContextManager, Final + from typing import Final - from ..cli.pyk import ProveOptions - from ..kast.outer import KRule, KRuleLike + from ..kast.outer import KClaim, KRule, KRuleLike from ..kast.pretty import SymbolTable from ..kcfg import KCFGExplore - from ..proof import Proof, Prover from ..utils import BugReport _LOGGER: Final = logging.getLogger(__name__) @@ -401,219 +395,3 @@ def _get_rule_line(_line: str) -> tuple[str, bool, int] | None: axioms.pop(-1) return axioms - - -class ProveRpc: - _kprove: KProve - _explore_context: Callable[[], ContextManager[KCFGExplore]] - - def __init__( - self, - kprove: KProve, - explore_context: Callable[[], ContextManager[KCFGExplore]], - ): - self._kprove = kprove - self._explore_context = explore_context - - def prove_rpc(self, options: ProveOptions) -> list[Proof]: - all_claims = self._kprove.get_claims( - options.spec_file, - spec_module_name=options.spec_module, - claim_labels=options.claim_labels, - exclude_claim_labels=options.exclude_claim_labels, - type_inference_mode=options.type_inference_mode, - ) - - if all_claims is None: - raise ValueError(f'No claims found in file: {options.spec_file}') - - return [ - self._prove_claim_rpc( - claim, - max_depth=options.max_depth, - save_directory=options.save_directory, - max_iterations=options.max_iterations, - ) - for claim in all_claims - ] - - def _prove_claim_rpc( - self, - claim: KClaim, - max_depth: int | None = None, - save_directory: Path | None = None, - max_iterations: int | None = None, - ) -> Proof: - definition = self._kprove.definition - - proof: Proof - prover: Prover - lhs_top = extract_lhs(claim.body) - is_functional_claim = type(lhs_top) is KApply and definition.symbols[lhs_top.label.name] in definition.functions - - if is_functional_claim: - proof = EqualityProof.from_claim(claim, definition, proof_dir=save_directory) - if save_directory is not None and EqualityProof.proof_data_exists(proof.id, save_directory): - _LOGGER.info(f'Reloading from disk {proof.id}: {save_directory}') - proof = EqualityProof.read_proof_data(save_directory, proof.id) - - else: - proof = APRProof.from_claim(definition, claim, {}, proof_dir=save_directory) - if save_directory is not None and APRProof.proof_data_exists(proof.id, save_directory): - _LOGGER.info(f'Reloading from disk {proof.id}: {save_directory}') - proof = APRProof.read_proof_data(save_directory, proof.id) - - if not proof.passed and (max_iterations is None or max_iterations > 0): - with self._explore_context() as kcfg_explore: - if is_functional_claim: - assert type(proof) is EqualityProof - prover = ImpliesProver(proof, kcfg_explore) - else: - assert type(proof) is APRProof - prover = APRProver(kcfg_explore, execute_depth=max_depth) - prover.advance_proof(proof, max_iterations=max_iterations) - - if proof.passed: - _LOGGER.info(f'Proof passed: {proof.id}') - elif proof.failed: - _LOGGER.info(f'Proof failed: {proof.id}') - else: - _LOGGER.info(f'Proof pending: {proof.id}') - return proof - - -@dataclass(frozen=True) -class ClaimIndex(Mapping[str, KClaim]): - claims: FrozenDict[str, KClaim] - main_module_name: str | None - - def __init__( - self, - claims: Mapping[str, KClaim], - main_module_name: str | None = None, - ): - self._validate(claims) - object.__setattr__(self, 'claims', FrozenDict(claims)) - object.__setattr__(self, 'main_module_name', main_module_name) - - @staticmethod - def from_module_list(module_list: KFlatModuleList) -> ClaimIndex: - module_list = ClaimIndex._resolve_depends(module_list) - return ClaimIndex( - claims={claim.label: claim for module in module_list.modules for claim in module.claims}, - main_module_name=module_list.main_module, - ) - - @staticmethod - def _validate(claims: Mapping[str, KClaim]) -> None: - for label, claim in claims.items(): - if claim.label != label: - raise ValueError(f'Claim label mismatch, expected: {label}, found: {claim.label}') - - for depend in claim.dependencies: - if depend not in claims: - raise ValueError(f'Invalid dependency label: {depend}') - - @staticmethod - def _resolve_depends(module_list: KFlatModuleList) -> KFlatModuleList: - """Resolve each depends value relative to the module the claim belongs to. - - Example: - ``` - module THIS-MODULE - claim ... [depends(foo,OTHER-MODULE.bar)] - endmodule - ``` - - becomes - - ``` - module THIS-MODULE - claim ... [depends(THIS-MODULE.foo,OTHER-MODULE.bar)] - endmodule - ``` - """ - labels = {claim.label for module in module_list.modules for claim in module.claims} - - def resolve_claim_depends(module_name: str, claim: KClaim) -> KClaim: - depends = claim.dependencies - if not depends: - return claim - - resolve = partial(ClaimIndex._resolve_claim_label, labels, module_name) - resolved = [resolve(label) for label in depends] - return claim.let(att=claim.att.update([Atts.DEPENDS(','.join(resolved))])) - - modules: list[KFlatModule] = [] - for module in module_list.modules: - resolve_depends = partial(resolve_claim_depends, module.name) - module = module.map_sentences(resolve_depends, of_type=KClaim) - modules.append(module) - - return module_list.let(modules=modules) - - @staticmethod - def _resolve_claim_label(labels: Container[str], module_name: str | None, label: str) -> str: - """Resolve `label` to a valid label in `labels`, or raise. - - If a `label` is not found and `module_name` is set, the label is tried after qualifying. - """ - if label in labels: - return label - - if module_name is not None: - qualified = f'{module_name}.{label}' - if qualified in labels: - return qualified - - raise ValueError(f'Claim label not found: {label}') - - def __iter__(self) -> Iterator[str]: - return iter(self.claims) - - def __len__(self) -> int: - return len(self.claims) - - def __getitem__(self, label: str) -> KClaim: - try: - label = self.resolve(label) - except ValueError: - raise KeyError(f'Claim not found: {label}') from None - return self.claims[label] - - @cached_property - def topological(self) -> tuple[str, ...]: - graph = {label: claim.dependencies for label, claim in self.claims.items()} - return tuple(TopologicalSorter(graph).static_order()) - - def resolve(self, label: str) -> str: - return self._resolve_claim_label(self.claims, self.main_module_name, label) - - def resolve_all(self, labels: Iterable[str]) -> list[str]: - return [self.resolve(label) for label in unique(labels)] - - def labels( - self, - *, - include: Iterable[str] | None = None, - exclude: Iterable[str] | None = None, - with_depends: bool = True, - ) -> list[str]: - res: list[str] = [] - - pending = self.resolve_all(include) if include is not None else list(self.claims) - done = set(self.resolve_all(exclude)) if exclude is not None else set() - - while pending: - label = pending.pop(0) # BFS - - if label in done: - continue - - res.append(label) - done.add(label) - - if with_depends: - pending += self.claims[label].dependencies - - return res diff --git a/pyk/src/pyk/ktool/prove_rpc.py b/pyk/src/pyk/ktool/prove_rpc.py new file mode 100644 index 00000000000..250381d3f54 --- /dev/null +++ b/pyk/src/pyk/ktool/prove_rpc.py @@ -0,0 +1,100 @@ +from __future__ import annotations + +import logging +from typing import TYPE_CHECKING + +from ..kast.manip import extract_lhs +from ..kast.outer import KApply +from ..proof import APRProof, APRProver, EqualityProof, ImpliesProver + +if TYPE_CHECKING: + from collections.abc import Callable + from pathlib import Path + from typing import ContextManager, Final + + from ..cli.pyk import ProveOptions + from ..kast.outer import KClaim + from ..kcfg import KCFGExplore + from ..proof import Proof, Prover + from .kprove import KProve + +_LOGGER: Final = logging.getLogger(__name__) + + +class ProveRpc: + _kprove: KProve + _explore_context: Callable[[], ContextManager[KCFGExplore]] + + def __init__( + self, + kprove: KProve, + explore_context: Callable[[], ContextManager[KCFGExplore]], + ): + self._kprove = kprove + self._explore_context = explore_context + + def prove_rpc(self, options: ProveOptions) -> list[Proof]: + all_claims = self._kprove.get_claims( + options.spec_file, + spec_module_name=options.spec_module, + claim_labels=options.claim_labels, + exclude_claim_labels=options.exclude_claim_labels, + type_inference_mode=options.type_inference_mode, + ) + + if all_claims is None: + raise ValueError(f'No claims found in file: {options.spec_file}') + + return [ + self._prove_claim_rpc( + claim, + max_depth=options.max_depth, + save_directory=options.save_directory, + max_iterations=options.max_iterations, + ) + for claim in all_claims + ] + + def _prove_claim_rpc( + self, + claim: KClaim, + max_depth: int | None = None, + save_directory: Path | None = None, + max_iterations: int | None = None, + ) -> Proof: + definition = self._kprove.definition + + proof: Proof + prover: Prover + lhs_top = extract_lhs(claim.body) + is_functional_claim = type(lhs_top) is KApply and definition.symbols[lhs_top.label.name] in definition.functions + + if is_functional_claim: + proof = EqualityProof.from_claim(claim, definition, proof_dir=save_directory) + if save_directory is not None and EqualityProof.proof_data_exists(proof.id, save_directory): + _LOGGER.info(f'Reloading from disk {proof.id}: {save_directory}') + proof = EqualityProof.read_proof_data(save_directory, proof.id) + + else: + proof = APRProof.from_claim(definition, claim, {}, proof_dir=save_directory) + if save_directory is not None and APRProof.proof_data_exists(proof.id, save_directory): + _LOGGER.info(f'Reloading from disk {proof.id}: {save_directory}') + proof = APRProof.read_proof_data(save_directory, proof.id) + + if not proof.passed and (max_iterations is None or max_iterations > 0): + with self._explore_context() as kcfg_explore: + if is_functional_claim: + assert type(proof) is EqualityProof + prover = ImpliesProver(proof, kcfg_explore) + else: + assert type(proof) is APRProof + prover = APRProver(kcfg_explore, execute_depth=max_depth) + prover.advance_proof(proof, max_iterations=max_iterations) + + if proof.passed: + _LOGGER.info(f'Proof passed: {proof.id}') + elif proof.failed: + _LOGGER.info(f'Proof failed: {proof.id}') + else: + _LOGGER.info(f'Proof pending: {proof.id}') + return proof diff --git a/pyk/src/pyk/proof/reachability.py b/pyk/src/pyk/proof/reachability.py index a079190441e..e0ece7406f9 100644 --- a/pyk/src/pyk/proof/reachability.py +++ b/pyk/src/pyk/proof/reachability.py @@ -1,6 +1,5 @@ from __future__ import annotations -import graphlib import json import logging import re @@ -10,13 +9,13 @@ from pyk.kore.rpc import LogEntry from ..cterm.cterm import remove_useless_constraints -from ..kast.att import AttEntry, Atts from ..kast.inner import KInner, Subst from ..kast.manip import flatten_label, free_vars, ml_pred_to_bool from ..kast.outer import KFlatModule, KImport, KRule from ..kcfg import KCFG, KCFGStore from ..kcfg.exploration import KCFGExploration from ..konvert import kflatmodule_to_kore +from ..ktool.claim_index import ClaimIndex from ..prelude.ml import mlAnd, mlTop from ..utils import FrozenDict, ensure_dir_path, hash_str, shorten_hashes, single from .implies import ProofSummary, Prover, RefutationProof @@ -423,74 +422,28 @@ def from_spec_modules( logs: dict[int, tuple[LogEntry, ...]], proof_dir: Path | None = None, spec_labels: Iterable[str] | None = None, - **kwargs: Any, ) -> list[APRProof]: - claims_by_label = {claim.label: claim for module in spec_modules.modules for claim in module.claims} - if spec_labels is None: - spec_labels = list(claims_by_label.keys()) - _spec_labels = [] - for spec_label in spec_labels: - if spec_label in claims_by_label: - _spec_labels.append(spec_label) - elif f'{spec_modules.main_module}.{spec_label}' in claims_by_label: - _spec_labels.append(f'{spec_modules.main_module}.{spec_label}') + claim_index = ClaimIndex.from_module_list(spec_modules) + spec_labels = claim_index.labels(include=spec_labels, with_depends=True, ordered=True) + + res: list[APRProof] = [] + + for label in spec_labels: + if proof_dir is not None and Proof.proof_data_exists(label, proof_dir): + apr_proof = APRProof.read_proof_data(proof_dir, label) else: - raise ValueError( - f'Could not find specification label: {spec_label} or {spec_modules.main_module}.{spec_label}' + _LOGGER.info(f'Building APRProof for claim: {label}') + claim = claim_index[label] + apr_proof = APRProof.from_claim( + defn, + claim, + logs=logs, + proof_dir=proof_dir, ) - spec_labels = _spec_labels - - claims_graph: dict[str, list[str]] = {} - unfound_dependencies = [] - for module in spec_modules.modules: - for claim in module.claims: - claims_graph[claim.label] = [] - for dependency in claim.dependencies: - if dependency in claims_by_label: - claims_graph[claim.label].append(dependency) - elif f'{module.name}.{dependency}' in claims_by_label: - claims_graph[claim.label].append(f'{module.name}.{dependency}') - else: - unfound_dependencies.append((claim.label, module.name, dependency)) - if unfound_dependencies: - unfound_dependency_list = [ - f'Could not find dependency for claim {label}: {dependency} or {module_name}.{dependency}' - for label, module_name, dependency in unfound_dependencies - ] - unfound_dependency_message = '\n - ' + '\n - '.join(unfound_dependency_list) - raise ValueError(f'Could not find dependencies:{unfound_dependency_message}') - - claims_subgraph: dict[str, list[str]] = {} - remaining_claims = spec_labels - while len(remaining_claims) > 0: - claim_label = remaining_claims.pop() - claims_subgraph[claim_label] = claims_graph[claim_label] - remaining_claims.extend(claims_graph[claim_label]) - - topological_sorter = graphlib.TopologicalSorter(claims_subgraph) - topological_sorter.prepare() - apr_proofs: list[APRProof] = [] - while topological_sorter.is_active(): - for claim_label in topological_sorter.get_ready(): - if proof_dir is not None and Proof.proof_data_exists(claim_label, proof_dir): - apr_proof = APRProof.read_proof_data(proof_dir, claim_label) - else: - _LOGGER.info(f'Building APRProof for claim: {claim_label}') - claim = claims_by_label[claim_label] - if len(claims_graph[claim_label]) > 0: - claim_att = claim.att.update([AttEntry(Atts.DEPENDS, ','.join(claims_graph[claim_label]))]) - claim = claim.let_att(claim_att) - apr_proof = APRProof.from_claim( - defn, - claim, - logs=logs, - proof_dir=proof_dir, - ) - apr_proof.write_proof_data() - apr_proofs.append(apr_proof) - topological_sorter.done(claim_label) - - return apr_proofs + apr_proof.write_proof_data() + res.append(apr_proof) + + return res def path_constraints(self, final_node_id: NodeIdLike) -> KInner: path = self.shortest_path_to(final_node_id) diff --git a/pyk/src/tests/integration/ktool/test_imp.py b/pyk/src/tests/integration/ktool/test_imp.py index 347c12621c2..66b45faa559 100644 --- a/pyk/src/tests/integration/ktool/test_imp.py +++ b/pyk/src/tests/integration/ktool/test_imp.py @@ -10,7 +10,7 @@ from pyk.cli.pyk import ProveOptions from pyk.kast.inner import KApply, KSequence, KVariable from pyk.kcfg.semantics import KCFGSemantics -from pyk.ktool.kprove import ProveRpc +from pyk.ktool.prove_rpc import ProveRpc from pyk.proof import ProofStatus from pyk.testing import KCFGExploreTest, KProveTest from pyk.utils import single From 1de58c1a658c9ea18d90d20d86ad37db40a215d5 Mon Sep 17 00:00:00 2001 From: Dwight Guth Date: Tue, 25 Jun 2024 02:42:06 -0500 Subject: [PATCH 5/9] add two important missing flags to the llvm backend compiler (#4472) This PR fixes a small fraction of a significant piece of technical debt in pyk's implementation of `pyk.ktool.kompile`, adding support for two command line flags that are important that were currently unable to be passed to the compiler due to its design. This is by no means an exhaustive list. There are still other important flags that are unsupported. This is simply what is likely to block me in the immediate future. Co-authored-by: Bruce Collie Co-authored-by: rv-jenkins --- pyk/src/pyk/ktool/kompile.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/pyk/src/pyk/ktool/kompile.py b/pyk/src/pyk/ktool/kompile.py index 17c9b6d4bf8..fa70fa496e0 100644 --- a/pyk/src/pyk/ktool/kompile.py +++ b/pyk/src/pyk/ktool/kompile.py @@ -20,6 +20,7 @@ if TYPE_CHECKING: from collections.abc import Iterable, Mapping + from fractions import Fraction from typing import Any, Final, Literal from ..utils import BugReport @@ -421,6 +422,8 @@ class LLVMKompile(Kompile): enable_llvm_debug: bool llvm_proof_hint_instrumentation: bool llvm_mutable_bytes: bool + iterated_threshold: Fraction | None + heuristic: str | None def __init__( self, @@ -435,6 +438,8 @@ def __init__( enable_llvm_debug: bool = False, llvm_proof_hint_instrumentation: bool = False, llvm_mutable_bytes: bool = False, + iterated_threshold: Fraction | None = None, + heuristic: str | None = None, ): llvm_kompile_type = LLVMKompileType(llvm_kompile_type) if llvm_kompile_type is not None else None llvm_kompile_output = Path(llvm_kompile_output) if llvm_kompile_output is not None else None @@ -455,6 +460,8 @@ def __init__( object.__setattr__(self, 'enable_llvm_debug', enable_llvm_debug) object.__setattr__(self, 'llvm_proof_hint_instrumentation', llvm_proof_hint_instrumentation) object.__setattr__(self, 'llvm_mutable_bytes', llvm_mutable_bytes) + object.__setattr__(self, 'iterated_threshold', iterated_threshold) + object.__setattr__(self, 'heuristic', heuristic) @property def backend(self) -> Literal[KompileBackend.LLVM]: @@ -491,6 +498,12 @@ def args(self) -> list[str]: if self.llvm_mutable_bytes: args += ['--llvm-mutable-bytes'] + if self.iterated_threshold: + args += ['--iterated-threshold', str(self.iterated_threshold)] + + if self.heuristic: + args += ['--heuristic', self.heuristic] + return args From 531652d017eb47ef00e0320e5718001118ff073f Mon Sep 17 00:00:00 2001 From: Bruce Collie Date: Tue, 25 Jun 2024 11:17:03 +0100 Subject: [PATCH 6/9] Use prebuilt haskell backend (#4473) This PR uses the existing infrastructure we have for the LLVM backend to download a pre-packaged release of the Haskell backend when running the K integration tests. It's a bit obscured currently because of degraded CI performance generally, but this should take average-case CI run times down to 20-25 minutes. --- .github/actions/with-k-docker/Dockerfile | 4 +++- .github/actions/with-k-docker/action.yml | 10 +++++++++- package/debian/kframework-frontend/rules.jammy | 2 +- 3 files changed, 13 insertions(+), 3 deletions(-) diff --git a/.github/actions/with-k-docker/Dockerfile b/.github/actions/with-k-docker/Dockerfile index 97df8ea4fb5..09f800e3669 100644 --- a/.github/actions/with-k-docker/Dockerfile +++ b/.github/actions/with-k-docker/Dockerfile @@ -5,9 +5,11 @@ FROM ubuntu:${BASE_DISTRO} ARG K_DEB_PATH ARG INSTALL_BACKEND_DEBS ARG LLVM_BACKEND_DEB_PATH +ARG HASKELL_BACKEND_DEB_PATH COPY ${K_DEB_PATH} /kframework.deb COPY ${LLVM_BACKEND_DEB_PATH} /llvm-backend.deb +COPY ${HASKELL_BACKEND_DEB_PATH} /haskell-backend.deb RUN apt-get -y update \ && apt-get -y install \ @@ -19,7 +21,7 @@ RUN apt-get -y update \ /kframework.deb RUN if [ "${INSTALL_BACKEND_DEBS}" = "true" ]; then \ - apt-get -y install /llvm-backend.deb; \ + apt-get -y install /llvm-backend.deb /haskell-backend.deb; \ fi RUN apt-get -y clean diff --git a/.github/actions/with-k-docker/action.yml b/.github/actions/with-k-docker/action.yml index 62c75bed3e1..f5b67cabdc2 100644 --- a/.github/actions/with-k-docker/action.yml +++ b/.github/actions/with-k-docker/action.yml @@ -42,6 +42,7 @@ runs: DOCKERFILE=${{ github.action_path }}/Dockerfile K_DEB_PATH=${{ inputs.k-deb-path }} LLVM_BACKEND_DEB_PATH=llvm-backend.deb + HASKELL_BACKEND_DEB_PATH=haskell-backend.deb gh release download \ --repo runtimeverification/llvm-backend \ @@ -49,13 +50,20 @@ runs: --output "${LLVM_BACKEND_DEB_PATH}" \ v$(cat deps/llvm-backend_release) + gh release download \ + --repo runtimeverification/haskell-backend \ + --pattern "*ubuntu_${BASE_DISTRO}.deb" \ + --output "${HASKELL_BACKEND_DEB_PATH}" \ + $(cat deps/haskell-backend_release) + docker build . \ --file ${DOCKERFILE} \ --tag ${TAG} \ --build-arg BASE_DISTRO=${BASE_DISTRO} \ --build-arg K_DEB_PATH=${K_DEB_PATH} \ --build-arg INSTALL_BACKEND_DEBS=${INSTALL_BACKEND_DEBS} \ - --build-arg LLVM_BACKEND_DEB_PATH=${LLVM_BACKEND_DEB_PATH} + --build-arg LLVM_BACKEND_DEB_PATH=${LLVM_BACKEND_DEB_PATH} \ + --build-arg HASKELL_BACKEND_DEB_PATH=${HASKELL_BACKEND_DEB_PATH} - name: 'Run Docker container' shell: bash {0} diff --git a/package/debian/kframework-frontend/rules.jammy b/package/debian/kframework-frontend/rules.jammy index 5f62a9bc885..def7df0d9e3 100755 --- a/package/debian/kframework-frontend/rules.jammy +++ b/package/debian/kframework-frontend/rules.jammy @@ -23,7 +23,7 @@ export PREFIX dh $@ override_dh_auto_build: - mvn --batch-mode package -DskipTests -Dllvm.backend.skip + mvn --batch-mode package -DskipTests -Dllvm.backend.skip -Dhaskell.backend.skip override_dh_auto_install: package/package --frontend From 94a069d50f711874987b0331eb466df672683e49 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andrei=20V=C4=83caru?= <16517508+anvacaru@users.noreply.github.com> Date: Tue, 25 Jun 2024 16:19:48 +0300 Subject: [PATCH 7/9] Add missing operators to prelude.kint (#4477) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Adding new functions to the `KInt` module for Integer operations described in [domains.md](https://github.com/runtimeverification/k/blob/master/pyk/src/tests/profiling/test-data/domains.md?plain=1#L1173) for: - integer arithmetic - integer min and max - integer absolute value Adding docstrings to KInt module. Adding labels for `minInt`, `maxInt`, and `absInt`. --------- Co-authored-by: Tamás Tóth --- pyk/src/pyk/prelude/kint.py | 299 +++++++++++++++++++ pyk/src/tests/profiling/test-data/domains.md | 6 +- 2 files changed, 302 insertions(+), 3 deletions(-) diff --git a/pyk/src/pyk/prelude/kint.py b/pyk/src/pyk/prelude/kint.py index 28a3eff5ff6..c4ed0dd7808 100644 --- a/pyk/src/pyk/prelude/kint.py +++ b/pyk/src/pyk/prelude/kint.py @@ -13,24 +13,323 @@ def intToken(i: int) -> KToken: # noqa: N802 + r"""Instantiate the KAST term ``#token(i, "Int")``. + + Args: + i: The integer literal. + + Returns: + The KAST term ``#token(i, "Int")``. + """ return KToken(str(i), INT) def ltInt(i1: KInner, i2: KInner) -> KApply: # noqa: N802 + r"""Instantiate the KAST term ```_ KApply: # noqa: N802 + r"""Instantiate the KAST term ```_<=Int_`(i1, i2)``. + + Args: + i1: The left operand. + i2: The right operand. + + Returns: + The KAST term ```_<=Int_`(i1, i2)``. + """ return KApply('_<=Int_', i1, i2) def gtInt(i1: KInner, i2: KInner) -> KApply: # noqa: N802 + r"""Instantiate the KAST term ```_>Int_`(i1, i2)``. + + Args: + i1: The left operand. + i2: The right operand. + + Returns: + The KAST term ```_>Int_`(i1, i2)``. + """ return KApply('_>Int_', i1, i2) def geInt(i1: KInner, i2: KInner) -> KApply: # noqa: N802 + r"""Instantiate the KAST term ```_>=Int_`(i1, i2)``. + + Args: + i1: The left operand. + i2: The right operand. + + Returns: + The KAST term ```_>=Int_`(i1, i2)``. + """ return KApply('_>=Int_', i1, i2) def eqInt(i1: KInner, i2: KInner) -> KApply: # noqa: N802 + r"""Instantiate the KAST term ```_==Int_`(i1, i2)``. + + Args: + i1: The left operand. + i2: The right operand. + + Returns: + The KAST term ```_==Int_`(i1, i2)``. + """ return KApply('_==Int_', i1, i2) + + +def neqInt(i1: KInner, i2: KInner) -> KApply: # noqa: N802 + r"""Instantiate the KAST term ```_=/=Int_`(i1, i2)``. + + Args: + i1: The left operand. + i2: The right operand. + + Returns: + The KAST term ```_=/=Int_`(i1, i2)``. + """ + return KApply('_=/=Int_', i1, i2) + + +def notInt(i: KInner) -> KApply: # noqa: N802 + r"""Instantiate the KAST term ```~Int_`(i)``. + + Args: + i: The integer operand. + + Returns: + The KAST term ```Int_`(i)``. + """ + return KApply('~Int_', i) + + +def expInt(i1: KInner, i2: KInner) -> KApply: # noqa: N802 + r"""Instantiate the KAST term ```_^Int_`(i1, i2)``. + + Args: + i1: The base. + i2: The exponent. + + Returns: + The KAST term ```_^Int_`(i1, i2)``. + """ + return KApply('_^Int_', i1, i2) + + +def expModInt(i1: KInner, i2: KInner, i3: KInner) -> KApply: # noqa: N802 + r"""Instantiate the KAST term ```_^%Int__`(i1, i2, i3)``. + + Args: + i1: The dividend. + i2: The divisior. + i3: The modulus. + + Returns: + The KAST term ```_^%Int__`(i1, i2, i3)``. + """ + return KApply('_^%Int__', i1, i2, i3) + + +def mulInt(i1: KInner, i2: KInner) -> KApply: # noqa: N802 + r"""Instantiate the KAST term ```_*Int_`(i1, i2)``. + + Args: + i1: The left operand. + i2: The right operand. + + Returns: + The KAST term ```_*Int_`(i1, i2)``. + """ + return KApply('_*Int_', i1, i2) + + +def divInt(i1: KInner, i2: KInner) -> KApply: # noqa: N802 + r"""Instantiate the KAST term ```_/Int_`(i1, i2)``. + + Args: + i1: The dividend. + i2: The divisor. + + Returns: + The KAST term ```_/Int_`(i1, i2)``. + """ + return KApply('_/Int_', i1, i2) + + +def modInt(i1: KInner, i2: KInner) -> KApply: # noqa: N802 + r"""Instantiate the KAST term ```_%Int_`(i1, i2)``. + + Args: + i1: The dividend. + i2: The divisor. + + Returns: + The KAST term ```_%Int_`(i1, i2)``. + """ + return KApply('_%Int_', i1, i2) + + +def euclidDivInt(i1: KInner, i2: KInner) -> KApply: # noqa: N802 + r"""Instantiate the KAST term ```_divInt_`(i1, i2)``. + + Args: + i1: The dividend. + i2: The divisor. + + Returns: + The KAST term ```_divInt_`(i1, i2)``. + """ + return KApply('_divInt_', i1, i2) + + +def euclidModInt(i1: KInner, i2: KInner) -> KApply: # noqa: N802 + r"""Instantiate the KAST term ```_modInt_`(i1, i2)``. + + Args: + i1: The dividend. + i2: The divisor. + + Returns: + The KAST term ```_modInt_`(i1, i2)``. + """ + return KApply('_modInt_', i1, i2) + + +def addInt(i1: KInner, i2: KInner) -> KApply: # noqa: N802 + r"""Instantiate the KAST term ```_+Int_`(i1, i2)``. + + Args: + i1: The left operand. + i2: The right operand. + + Returns: + The KAST term ```_+Int_`(i1, i2)``. + """ + return KApply('_+Int_', i1, i2) + + +def subInt(i1: KInner, i2: KInner) -> KApply: # noqa: N802 + r"""Instantiate the KAST term ```_-Int_`(i1, i2)``. + + Args: + i1: The left operand. + i2: The right operand. + + Returns: + The KAST term ```_-Int_`(i1, i2)``. + """ + return KApply('_-Int_', i1, i2) + + +def rshiftInt(i1: KInner, i2: KInner) -> KApply: # noqa: N802 + r"""Instantiate the KAST term ```_>>Int_`(i1, i2)``. + + Args: + i1: The left operand. + i2: The right operand. + + Returns: + The KAST term ```_>>Int_`(i1, i2)``. + """ + return KApply('_>>Int_', i1, i2) + + +def lshiftInt(i1: KInner, i2: KInner) -> KApply: # noqa: N802 + r"""Instantiate the KAST term ```_< KApply: # noqa: N802 + r"""Instantiate the KAST term ```_&Int_`(i1, i2)``. + + Args: + i1: The left operand. + i2: The right operand. + + Returns: + The KAST term ```_&Int_`(i1, i2)``. + """ + return KApply('_&Int_', i1, i2) + + +def xorInt(i1: KInner, i2: KInner) -> KApply: # noqa: N802 + r"""Instantiate the KAST term ```_xorInt_`(i1, i2)``. + + Args: + i1: The left operand. + i2: The right operand. + + Returns: + The KAST term ```_xorInt_`(i1, i2)``. + """ + return KApply('_xorInt_', i1, i2) + + +def orInt(i1: KInner, i2: KInner) -> KApply: # noqa: N802 + r"""Instantiate the KAST term ```_|Int_`(i1, i2)``. + + Args: + i1: The left operand. + i2: The right operand. + + Returns: + The KAST term ```_|Int_`(i1, i2)``. + """ + return KApply('_|Int_', i1, i2) + + +def minInt(i1: KInner, i2: KInner) -> KApply: # noqa: N802 + r"""Instantiate the KAST term ```minInt`(i1, i2)``. + + Args: + i1: The left operand. + i2: The right operand. + + Returns: + The KAST term ```minInt`(i1, i2)``. + """ + return KApply('minInt', i1, i2) + + +def maxInt(i1: KInner, i2: KInner) -> KApply: # noqa: N802 + r"""Instantiate the KAST term ```maxInt`(i1, i2)``. + + Args: + i1: The left operand. + i2: The right operand. + + Returns: + The KAST term ```maxInt`(i1, i2)``. + """ + return KApply('maxInt', i1, i2) + + +def absInt(i: KInner) -> KApply: # noqa: N802 + r"""Instantiate the KAST term ```absInt`(i)``. + + Args: + i: The integer operand. + + Returns: + The KAST term ```absInt`(i)``. + """ + return KApply('absInt', i) diff --git a/pyk/src/tests/profiling/test-data/domains.md b/pyk/src/tests/profiling/test-data/domains.md index 1f42f186f28..be0803144fb 100644 --- a/pyk/src/tests/profiling/test-data/domains.md +++ b/pyk/src/tests/profiling/test-data/domains.md @@ -1257,8 +1257,8 @@ You can: You can compute the minimum and maximum `minInt` and `maxInt` of two integers. ```k - syntax Int ::= "minInt" "(" Int "," Int ")" [function, total, smt-hook((ite (< #1 #2) #1 #2)), hook(INT.min)] - | "maxInt" "(" Int "," Int ")" [function, total, smt-hook((ite (< #1 #2) #2 #1)), hook(INT.max)] + syntax Int ::= "minInt" "(" Int "," Int ")" [function, total, symbol(minInt), smt-hook((ite (< #1 #2) #1 #2)), hook(INT.min)] + | "maxInt" "(" Int "," Int ")" [function, total, symbol(maxInt), smt-hook((ite (< #1 #2) #2 #1)), hook(INT.max)] ``` ### Absolute value @@ -1266,7 +1266,7 @@ You can compute the minimum and maximum `minInt` and `maxInt` of two integers. You can compute the absolute value `absInt` of an integer. ```k - syntax Int ::= absInt ( Int ) [function, total, smt-hook((ite (< #1 0) (- 0 #1) #1)), hook(INT.abs)] + syntax Int ::= absInt ( Int ) [function, total, symbol(absInt), smt-hook((ite (< #1 0) (- 0 #1) #1)), hook(INT.abs)] ``` ### Log base 2 From 3bc2373e7000b46e61385195287b393fde85467f Mon Sep 17 00:00:00 2001 From: Juan C <38925412+JuanCoRo@users.noreply.github.com> Date: Tue, 25 Jun 2024 18:02:24 +0200 Subject: [PATCH 8/9] Expose `-Wno` via `ignore-warnings` kompile option (#4471) Address #4470 --- pyk/src/pyk/__main__.py | 1 + pyk/src/pyk/cli/args.py | 5 +++++ pyk/src/pyk/ktool/kompile.py | 10 ++++++++++ 3 files changed, 16 insertions(+) diff --git a/pyk/src/pyk/__main__.py b/pyk/src/pyk/__main__.py index 8664ccbeee6..4ad22390628 100644 --- a/pyk/src/pyk/__main__.py +++ b/pyk/src/pyk/__main__.py @@ -346,6 +346,7 @@ def exec_kompile(options: KompileCommandOptions) -> None: type_inference_mode=options.type_inference_mode, warnings=options.warnings, warnings_to_errors=options.warnings_to_errors, + ignore_warnings=options.ignore_warnings, no_exc_wrap=options.no_exc_wrap, ) except RuntimeError as err: diff --git a/pyk/src/pyk/cli/args.py b/pyk/src/pyk/cli/args.py index 356a17a5a57..3dbea7e72a6 100644 --- a/pyk/src/pyk/cli/args.py +++ b/pyk/src/pyk/cli/args.py @@ -206,6 +206,7 @@ class KompileOptions(Options): bison_lists: bool no_exc_wrap: bool outer_parsed_json: bool + ignore_warnings: list[str] @staticmethod def default() -> dict[str, Any]: @@ -230,6 +231,7 @@ def default() -> dict[str, Any]: 'bison_lists': False, 'no_exc_wrap': False, 'outer_parsed_json': False, + 'ignore_warnings': [], } @staticmethod @@ -475,6 +477,9 @@ def kompile_args(self) -> ArgumentParser: action='store_true', help='Do not wrap the output on the CLI.', ) + args.add_argument( + '--ignore-warnings', '-Wno', dest='ignore_warnings', action='append', help='Ignore provided warnings' + ) return args @cached_property diff --git a/pyk/src/pyk/ktool/kompile.py b/pyk/src/pyk/ktool/kompile.py index fa70fa496e0..6d2f7a87f9e 100644 --- a/pyk/src/pyk/ktool/kompile.py +++ b/pyk/src/pyk/ktool/kompile.py @@ -58,6 +58,7 @@ def kompile( type_inference_mode: str | TypeInferenceMode | None = None, warnings: str | Warnings | None = None, warnings_to_errors: bool = False, + ignore_warnings: Iterable[str] = (), no_exc_wrap: bool = False, # --- debug: bool = False, @@ -78,6 +79,7 @@ def kompile( type_inference_mode=type_inference_mode, warnings=warnings, warnings_to_errors=warnings_to_errors, + ignore_warnings=ignore_warnings, no_exc_wrap=no_exc_wrap, debug=debug, verbose=verbose, @@ -96,6 +98,7 @@ def kompile( type_inference_mode=type_inference_mode, warnings=warnings, warnings_to_errors=warnings_to_errors, + ignore_warnings=ignore_warnings, no_exc_wrap=no_exc_wrap, debug=debug, verbose=verbose, @@ -111,6 +114,7 @@ def _booster_kompile( type_inference_mode: str | TypeInferenceMode | None, warnings: str | Warnings | None, warnings_to_errors: bool, + ignore_warnings: Iterable[str], no_exc_wrap: bool, # --- debug: bool, @@ -146,6 +150,7 @@ def kompile_llvm() -> None: type_inference_mode=type_inference_mode, warnings=warnings, warnings_to_errors=warnings_to_errors, + ignore_warnings=ignore_warnings, no_exc_wrap=no_exc_wrap, debug=debug, verbose=verbose, @@ -161,6 +166,7 @@ def kompile_haskell() -> None: type_inference_mode=type_inference_mode, warnings=warnings, warnings_to_errors=warnings_to_errors, + ignore_warnings=ignore_warnings, no_exc_wrap=no_exc_wrap, debug=debug, verbose=verbose, @@ -273,6 +279,7 @@ def __call__( type_inference_mode: str | TypeInferenceMode | None = None, warnings: str | Warnings | None = None, warnings_to_errors: bool = False, + ignore_warnings: Iterable[str] = (), no_exc_wrap: bool = False, debug: bool = False, verbose: bool = False, @@ -321,6 +328,9 @@ def __call__( if outer_parsed_json: args += ['--outer-parsed-json'] + if ignore_warnings: + args += ['-Wno', ','.join(ignore_warnings)] + try: proc_res = run_process(args, logger=_LOGGER, cwd=cwd, check=check) except CalledProcessError as err: From c0a91a8fe0df320c190619a877a59b9b0a0a850d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tam=C3=A1s=20T=C3=B3th?= Date: Wed, 26 Jun 2024 14:49:41 +0200 Subject: [PATCH 9/9] Add link to pyk docs to ToC page (#4476) The link will appear on the left sidebar on https://kframework.org/. Co-authored-by: Bruce Collie --- web/toc.md | 1 + 1 file changed, 1 insertion(+) diff --git a/web/toc.md b/web/toc.md index 2c458b00ac4..ee72a822d9b 100644 --- a/web/toc.md +++ b/web/toc.md @@ -17,6 +17,7 @@ output: - [Homepage](/web/pages/index.md) - [Install K](https://github.com/runtimeverification/k/releases/latest) +- [Pyk Documentation](/pyk) - [K Tutorial](/k-distribution/k-tutorial/README.md) - [Section 1: Basic K Concepts](/k-distribution/k-tutorial/1_basic/README.md) - [Lesson 1.1: Setting up a K Environment](/k-distribution/k-tutorial/1_basic/01_installing/README.md)