diff --git a/.github/workflows/changelog.yml b/.github/workflows/changelog.yml deleted file mode 100644 index ae79649f8a..0000000000 --- a/.github/workflows/changelog.yml +++ /dev/null @@ -1,49 +0,0 @@ -name: 'changelog' -on: - pull_request: - types: - - opened - - synchronize - - reopened - - ready_for_review - - labeled - - unlabeled - -permissions: - contents: write - -jobs: - changelog: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - with: - # Depending on your needs, you can use a token that will re-trigger workflows - # See https://github.com/stefanzweifel/git-auto-commit-action#commits-made-by-this-action-do-not-trigger-new-workflow-runs - token: ${{ secrets.BOT_CPR_PAT }} - - # All commits must be signed, import key and sign commit of updated change log. - - name: Import GPG key - id: import_gpg - uses: crazy-max/ghaction-import-gpg@v6 - with: - # Use a key associated with the provenanceio-bot github account. - gpg_private_key: ${{ secrets.BOT_GPG_PRIVATE_KEY }} - passphrase: ${{ secrets.BOT_GPG_PRIVATE_KEY_PW }} - git_user_signingkey: true - git_commit_gpgsign: true - - - uses: dangoslen/dependabot-changelog-helper@v3 - with: - version: ${{ needs.setup.outputs.version }} - activationLabels: 'dependencies' - changelogPath: './CHANGELOG.md' - - # This step is required for committing the changes to your branch. - # See https://github.com/stefanzweifel/git-auto-commit-action#commits-made-by-this-action-do-not-trigger-new-workflow-runs - - uses: stefanzweifel/git-auto-commit-action@v5 - with: - commit_user_name: "Provenance-io Bot" - commit_user_email: "129784868+provenanceio-bot@users.noreply.github.com" - commit_options: "-S" - commit_message: "Updated Changelog" diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml deleted file mode 100644 index 5501e5853d..0000000000 --- a/.github/workflows/codeql-analysis.yml +++ /dev/null @@ -1,77 +0,0 @@ -name: "CodeQL" - -on: - pull_request: - paths: - - "**.go" - - "go.mod" - - "go.sum" - - ".github/workflows/codeql-analysis.yml" - push: - branches: - - main - tags: - - "v[0-9]+.[0-9]+.[0-9]+" # Push events to matching v*, i.e. v1.0, v20.15.10 - - "v[0-9]+.[0-9]+.[0-9]+-rc*" # Push events to matching v*, i.e. v1.0-rc1, v20.15.10-rc5 - schedule: - # Wednesdays at 21:23 (randomly chosen). - - cron: '23 21 * * 3' - -# Set concurrency for this workflow to cancel in-progress jobs if retriggered. -# The github.ref is only available when triggered by a PR so fall back to github.run_id for other cases. -# The github.run_id is unique for each run, giving each such invocation it's own unique concurrency group. -# Basically, if you push to a PR branch, jobs that are still running for that PR will be cancelled. -# But jobs started because of a merge to main or a release tag push are not cancelled. -concurrency: - group: ${{ github.workflow }}-${{ github.ref || github.run_id }} - cancel-in-progress: true - -jobs: - analyze: - name: Analyze - runs-on: ubuntu-latest - permissions: - actions: read - contents: read - security-events: write - - strategy: - fail-fast: false - matrix: - language: [ 'go' ] - # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ] - # Learn more: - # https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed - - steps: - - name: Checkout repository - uses: actions/checkout@v4 - - # Initializes the CodeQL tools for scanning. - - name: Initialize CodeQL - uses: github/codeql-action/init@v3 - with: - languages: ${{ matrix.language }} - # If you wish to specify custom queries, you can do so here or in a config file. - # By default, queries listed here will override any specified in a config file. - # Prefix the list here with "+" to use these queries and those in the config file. - # queries: ./path/to/local/query, your-org/your-repo/queries@main - - # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). - # If this step fails, then you should remove it and run the build manually (see below) - - name: Autobuild - uses: github/codeql-action/autobuild@v3 - - # ℹī¸ Command-line programs to run using the OS shell. - # 📚 https://git.io/JvXDl - - # ✏ī¸ If the Autobuild fails above, remove it and uncomment the following three lines - # and modify them (or add more) to build your code if your project - # uses a compiled language - - #- run: | - # make bootstrap - # make release - - - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3 diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml deleted file mode 100644 index d0fd7e5483..0000000000 --- a/.github/workflows/lint.yml +++ /dev/null @@ -1,41 +0,0 @@ -name: Lint -# This workflow is run on every pull request and push to main -# The `golangci` will pass without running if no *.{go, mod, sum} files have been changed. -on: - pull_request: - push: - branches: - - main - -# concurrency is not defined in here because this job usually doesn't -# run long enough to need cancelling, and it's okay for it to run multiple -# times for the same PR. - -jobs: - golangci: - name: golangci-lint - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - uses: technote-space/get-diff-action@v6.1.2 - with: - PATTERNS: | - **/**.go - go.mod - go.sum - .github/workflows/lint.yml - scripts/no-now-lint.sh - - uses: actions/setup-go@v5 - if: env.GIT_DIFF - with: - go-version: '1.21' - - uses: golangci/golangci-lint-action@v4 - if: env.GIT_DIFF - with: - # If you change this version, be sure to also change it in contrib/devtools/Makefile. - version: v1.54 - args: --timeout 10m --out-${NO_FUTURE}format colored-line-number - github-token: ${{ secrets.github_token }} - - name: No Now Usage - if: env.GIT_DIFF - run: scripts/no-now-lint.sh diff --git a/.github/workflows/mamoru-build-test.yml b/.github/workflows/mamoru-build-test.yml new file mode 100644 index 0000000000..96a5ab38cd --- /dev/null +++ b/.github/workflows/mamoru-build-test.yml @@ -0,0 +1,47 @@ +name: Build Test + +on: + push: + branches: + - master + - 'mamoru*' + - develop + + + pull_request: + branches: + - master + - 'mamoru*' + - develop + + +jobs: + build-test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: technote-space/get-diff-action@v6.1.2 + with: + PATTERNS: | + **/**.go + go.mod + go.sum + .github/workflows/lint.yml + scripts/no-now-lint.sh + - uses: actions/setup-go@v5 + with: + go-version: '1.20' + - uses: golangci/golangci-lint-action@v4 + if: env.GIT_DIFF + with: + # If you change this version, be sure to also change it in contrib/devtools/Makefile. + version: v1.54 + args: --timeout 10m --out-${NO_FUTURE}format colored-line-number + github-token: ${{ secrets.github_token }} + - name: No Now Usage + if: env.GIT_DIFF + run: scripts/no-now-lint.sh + + - name: Build with Makefile + run: | + make build diff --git a/.github/workflows/docker.yml b/.github/workflows/mamoru-docker.yml similarity index 58% rename from .github/workflows/docker.yml rename to .github/workflows/mamoru-docker.yml index 2b2aa6f255..de9542e2a3 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/mamoru-docker.yml @@ -1,43 +1,60 @@ -name: Docker Build and Push -# Build & Push builds the simapp docker image on every push to master -# and pushes the image to https://hub.docker.com/u/provenanceio +name: "Build docker image" on: push: branches: - - main - tags: - - "v[0-9]+.[0-9]+.[0-9]+" # Push events to matching v*, i.e. v1.0, v20.15.10 - - "v[0-9]+.[0-9]+.[0-9]+-rc*" # Push events to matching v*, i.e. v1.0-rc1, v20.15.10-rc5 - -# Set concurrency for this workflow to cancel in-progress jobs if retriggered. -# The github.ref is only available when triggered by a PR so fall back to github.run_id for other cases. -# The github.run_id is unique for each run, giving each such invocation it's own unique concurrency group. -# Basically, if you push to a PR branch, jobs that are still running for that PR will be cancelled. -# But jobs started because of a merge to main or a release tag push are not cancelled. -concurrency: - group: ${{ github.workflow }}-${{ github.ref || github.run_id }} - cancel-in-progress: true + - master + - 'mamoru*' + - develop + +env: + REPOSITORY: mamorufoundation/provenance-sniffer + jobs: - docker: + build: runs-on: ubuntu-latest +# strategy: +# matrix: +# platform: [linux/amd64] # linux/amd64,linux/amd64/v2,linux/amd64/v3,linux/386 steps: - uses: actions/checkout@v4 with: fetch-depth: 0 + - name: Login to Docker Hub + uses: docker/login-action@v2 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Setup go uses: actions/setup-go@v5 with: go-version: '1.20' + - name: Set up Docker Buildx + id: buildx + uses: docker/setup-buildx-action@v3 + + - name: Available platforms + run: echo ${{ steps.buildx.outputs.platforms }} + + - name: Get current date + id: date + run: echo "::set-output name=date::$(date -u +'%Y-%m-%d')" + + - name: Set architecture variable + id: set_arch + run: echo "::set-output name=arch::$(uname -m)" - name: Go mod vendor run: | - go mod vendor + go mod vendor - name: Prepare id: prep run: | - DOCKER_IMAGE=provenanceio/provenance VERSION=noop if [[ $GITHUB_REF == refs/tags/* ]]; then VERSION=${GITHUB_REF#refs/tags/} @@ -59,27 +76,21 @@ jobs: echo "Setting output: created=$created" echo "created=$created" >> "$GITHUB_OUTPUT" - - name: Set up Docker Buildx - id: buildx - uses: docker/setup-buildx-action@v3 - - - name: Available platforms - run: echo ${{ steps.buildx.outputs.platforms }} - - - name: Login to DockerHub - uses: docker/login-action@v3 - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - - name: Publish to Docker Hub + - name: Build and push uses: docker/build-push-action@v5 with: context: . target: run + file: docker/blockchain/Dockerfile build-args: | VERSION=${{ steps.prep.outputs.version }} - platforms: linux/amd64,linux/arm64 - file: docker/blockchain/Dockerfile - push: ${{ github.event_name != 'pull_request' }} - tags: ${{ steps.prep.outputs.tags }} + GIT_REVISION=${{ github.sha }} + BUILD_DATE=${{ steps.date.outputs.date }} + PROFILE=release + COMMIT=${{ github.sha }} + push: true + platforms: linux/amd64 + tags: | + ${{ env.REPOSITORY }}:latest + ${{ env.REPOSITORY }}:${{ github.sha }} + diff --git a/.github/workflows/mamoru-unit-test.yml b/.github/workflows/mamoru-unit-test.yml new file mode 100644 index 0000000000..620bfe159d --- /dev/null +++ b/.github/workflows/mamoru-unit-test.yml @@ -0,0 +1,57 @@ +name: Unit Test + +on: + push: + branches: + - master + - 'mamoru*' + - develop + + pull_request: + branches: + - master + - 'mamoru*' + - develop + +jobs: + unit-test: + + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v3 + + - name: Set up Go + uses: actions/setup-go@v4 + with: + go-version: '1.20' + + - run: go version + + - uses: actions/cache@v3 + with: + # In order: + # * Module download cache + # * Build cache (Linux) + path: | + ~/go/pkg/mod + ~/.cache/go-build + key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} + restore-keys: | + ${{ runner.os }}-go- + +# - name: Setup SSH for Private Repository Access +# uses: webfactory/ssh-agent@v0.9.0 +# with: +# ssh-private-key: ${{ secrets.MAMORU_ETHERMINT_SSH_PRIVATE_KEY }} + + - name: Unit Test + run: | + make test + + + + + + diff --git a/.github/workflows/proto-registry.yml b/.github/workflows/proto-registry.yml deleted file mode 100644 index ea4af7998e..0000000000 --- a/.github/workflows/proto-registry.yml +++ /dev/null @@ -1,79 +0,0 @@ -name: Third Party Proto Registry -# Protobuf runs buf (https://buf.build/) push updated proto files to https://buf.build/provenance-io -# This workflow pushes the third party protos to buf.build whenever any of them are updated in `main`. -on: - push: - branches: - - main - paths: - - "third_party/**.proto" - - ".github/workflows/proto-registry.yml" - workflow_dispatch: - -# Concurrency is not defined in here because this job usually doesn't run -# long enough to need cancelling, and it's okay for it to run multiple times. - -# Provenance publishes Protobuf content (including third_party dependencies) to buf.build. -# Because of third_party dependencies declared in buf.yml, a buf.lock will be generated -# containing the dependency manifest. Among other things, the buf.lock contains the -# latest buf.build commit hash of the third_party library in the https://buf.build/provenance-io. -# When new files are added to the third_party library we need: -# 1. publish the third_party library -# 2. update buf.lock to point to the new commit hash -# 3. sign and create a PR to commit buf.lock changes back to main -# This helps avoid a buf push failure due to a new third party proto reference. -jobs: - push_third_party: - if: github.ref == 'refs/heads/main' - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Buf setup action - uses: bufbuild/buf-setup-action@v1.30.0 - - name: Buf push 'third_party/proto' - uses: bufbuild/buf-push-action@v1 - with: - input: 'third_party/proto' - buf_token: ${{ secrets.BUF_TOKEN }} - - name: Import GPG key - id: import_gpg - uses: crazy-max/ghaction-import-gpg@v6 - with: - # Use a key associated with the provenanceio-bot github account. - gpg_private_key: ${{ secrets.BOT_GPG_PRIVATE_KEY }} - passphrase: ${{ secrets.BOT_GPG_PRIVATE_KEY_PW }} - git_user_signingkey: true - git_commit_gpgsign: true - - name: Update buf.lock - run: | - cd proto - buf mod update - cd .. - git add . - git commit -S -m "Update buf.lock to latest commit hash" - - name: Create Pull Request - id: cpr - uses: peter-evans/create-pull-request@v6.0.2 - with: - base: main - branch: provenanceio-bot/patch-buf-lock - delete-branch: true - # GitHub Personal Access Token (from the same account where the GPG key is stored) - # When this expires, you'll need to log into the provenanceio-bot github account, - # regenerate a new one, and update the secret to have the new value. - token: ${{ secrets.BOT_CPR_PAT }} - committer: ${{ steps.import_gpg.outputs.name }} <${{ steps.import_gpg.outputs.email }}> - author: ${{ steps.import_gpg.outputs.name }} <${{ steps.import_gpg.outputs.email }}> - signoff: true - title: 'Update buf.lock' - body: | - Updates `third_party/proto/buf.lock` with latest commit hash. - - Auto-generated by [create-pull-request][1] - - [1]: https://github.com/peter-evans/create-pull-request - - name: Check outputs - if: ${{ steps.cpr.outputs.pull-request-number }} - run: | - echo "Pull Request Number - ${{ steps.cpr.outputs.pull-request-number }}" - echo "Pull Request URL - ${{ steps.cpr.outputs.pull-request-url }}" diff --git a/.github/workflows/proto.yml b/.github/workflows/proto.yml deleted file mode 100644 index 394e2396c5..0000000000 --- a/.github/workflows/proto.yml +++ /dev/null @@ -1,77 +0,0 @@ -name: Protobuf Checks -# This workflow runs some Protobuf related checks. -on: - pull_request: - paths: - - "**/**.proto" - - "**/go.mod" - - ".github/workflows/proto.yml" - - "scripts/proto-update-check.sh" - - "scripts/proto-update-deps.sh" - -# Set concurrency for this workflow to cancel in-progress jobs if retriggered. -# The github.ref is only available when triggered by a PR so fall back to github.run_id for other cases. -# The github.run_id is unique for each run, giving each such invocation it's own unique concurrency group. -# Basically, if you push to a PR branch, jobs that are still running for that PR will be cancelled. -# But jobs started because of a merge to main or a release tag push are not cancelled. -concurrency: - group: ${{ github.workflow }}-${{ github.ref || github.run_id }} - cancel-in-progress: true - -jobs: - buf_lint: - runs-on: ubuntu-latest - name: Protobuf Checks - steps: - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - name: Setup go - uses: actions/setup-go@v5 - with: - go-version: '1.20' - - name: Setup Job - id: setup - run: | - branch='main' - if [ -n "$GITHUB_BASE_REF" ]; then - branch="$GITHUB_BASE_REF" - fi - echo "Setting output: base-branch=$branch" - echo "base-branch=$branch" >> "$GITHUB_OUTPUT" - - uses: bufbuild/buf-setup-action@v1.30.0 - - uses: bufbuild/buf-lint-action@v1.1.0 - if: always() - with: - input: 'proto' - - name: provenance breaking - id: breaking-prov - if: always() - uses: bufbuild/buf-breaking-action@v1.1.3 - with: - input: 'proto' - against: 'https://github.com/provenance-io/provenance.git#branch=${{ steps.setup.outputs.base-branch }},subdir=proto' - - name: provenance breaking results - if: always() && steps.breaking-prov.outcome == 'failure' - run: | - echo "results:" - echo "${{ toJSON(steps.breaking-prov.outputs) }}" - - name: third-party breaking - id: breaking-third - if: always() - uses: bufbuild/buf-breaking-action@v1.1.3 - with: - input: 'third_party/proto' - against: 'https://github.com/provenance-io/provenance.git#branch=${{ steps.setup.outputs.base-branch }},subdir=third_party/proto' - - name: third-party breaking results - if: always() && steps.breaking-third.outcome == 'failure' - run: | - echo "results:" - echo "${{ toJSON(steps.breaking-third.outputs) }}" - - name: proto-update-check - if: always() - run: | - BASE_BRANCH="..origin/${{ steps.setup.outputs.base-branch }}" - export BASE_BRANCH - make proto-update-check diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml deleted file mode 100644 index 0df77b74f2..0000000000 --- a/.github/workflows/release.yml +++ /dev/null @@ -1,326 +0,0 @@ -name: Provenance Build and Release - -on: - pull_request: - paths: - - "**.go" - - "go.mod" - - "go.sum" - - "**.mk" - - "Makefile" - - "gon.json" - - "scripts/**" - - ".github/workflows/release.yml" - push: - tags: - - "v[0-9]+.[0-9]+.[0-9]+" # Push events to matching v*, i.e. v1.0, v20.15.10 - - "v[0-9]+.[0-9]+.[0-9]+-rc*" # Push events to matching v*, i.e. v1.0-rc1, v20.15.10-rc5 - -# Set concurrency for this workflow to cancel in-progress jobs if retriggered. -# The github.ref is only available when triggered by a PR so fall back to github.run_id for other cases. -# The github.run_id is unique for each run, giving each such invocation it's own unique concurrency group. -# Basically, if you push to a PR branch, jobs that are still running for that PR will be cancelled. -# But jobs started because of a merge to main or a release tag push are not cancelled. -concurrency: - group: ${{ github.workflow }}-${{ github.ref || github.run_id }} - cancel-in-progress: true - -jobs: - build_init: - runs-on: ubuntu-latest - name: Build Init - steps: - - name: Define Variables - id: vars - # ${GITHUB_REF##*/} removes everything before the last slash. E.g. 'refs/tags/v1.8.0' becomes 'v1.8.0' - # ${GITHUB_SHA:0:7} gets the first 7 characters. E.g. `3e9928920f5a64c8fc4884ee085efe1983071c90` becomes `3e99289' - run: | - version="${GITHUB_SHA:0:7}" - is_release='false' - if [[ "$GITHUB_REF" =~ ^refs/tags/ ]]; then - version=${GITHUB_REF##*/} - is_release='true' - fi - prerelease=false - if [[ "$version" =~ -rc ]]; then - prerelease=true - fi - echo "Setting output: version=$version" - echo "version=$version" >> "$GITHUB_OUTPUT" - echo "Setting output: is_release=$is_release" - echo "is_release=$is_release" >> "$GITHUB_OUTPUT" - echo "Setting output: prerelease=$prerelease" - echo "prerelease=$prerelease" >> "$GITHUB_OUTPUT" - outputs: - version: ${{ steps.vars.outputs.version }} - is_release: ${{ steps.vars.outputs.is_release }} - prerelease: ${{ steps.vars.outputs.prerelease }} - go_version: '1.20' - - build_osx: - runs-on: macos-latest - needs: - - build_init - name: Build OSX - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Setup go - uses: actions/setup-go@v5 - with: - go-version: ${{ needs.build_init.outputs.go_version }} - - name: Build osx binary - run: | - export VERSION=${{ needs.build_init.outputs.version }} - export WITH_CLEVELDB=false - export WITH_ROCKSDB=false - make build-release-zip - - name: Provenanced version - run: build/provenanced version --long - - uses: actions/upload-artifact@v4 - with: - name: osx-zip - path: build/*.zip - - build_linux: - runs-on: ubuntu-20.04 - needs: - - build_init - name: Build Linux - env: - LD_LIBRARY_PATH: /usr/local/lib:/usr/local/lib/x86_64-linux-gnu - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Setup go - uses: actions/setup-go@v5 - with: - go-version: ${{ needs.build_init.outputs.go_version }} - - name: Install deps - run: | - sudo apt-get update - sudo apt-get install -y libgflags-dev libsnappy-dev zlib1g-dev libbz2-dev liblz4-dev libzstd-dev - - name: Build and install cleveldb - run: make cleveldb - - name: Build linux binary - run: | - export VERSION=${{ needs.build_init.outputs.version }} - export WITH_CLEVELDB=true - make build-release-zip - - name: Provenanced version - run: build/provenanced version --long - - uses: actions/upload-artifact@v4 - with: - name: linux-zip - path: build/provenance*.zip - - build_dbmigrate: - runs-on: ubuntu-20.04 - needs: - - build_init - name: Build dbmigrate - env: - LD_LIBRARY_PATH: /usr/local/lib:/usr/local/lib/x86_64-linux-gnu - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Setup go - uses: actions/setup-go@v5 - with: - go-version: ${{ needs.build_init.outputs.go_version }} - - name: Install deps - run: | - sudo apt-get update - sudo apt-get install -y libgflags-dev libsnappy-dev zlib1g-dev libbz2-dev liblz4-dev libzstd-dev - - name: Build and install cleveldb - run: make cleveldb - - name: Build dbmigrate binary - run: | - export VERSION=${{ needs.build_init.outputs.version }} - export WITH_CLEVELDB=true - export WITH_ROCKSDB=false - export WITH_BADGERDB=false - make build-dbmigrate-zip - - name: dbmigrate --help - run: build/dbmigrate --help - - uses: actions/upload-artifact@v4 - with: - name: dbmigrate-zip - path: build/dbmigrate*.zip - - buf_push: - needs: - - build_init - if: needs.build_init.outputs.is_release == 'true' - runs-on: ubuntu-latest - name: Protobuf Push - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Buf setup action - uses: bufbuild/buf-setup-action@v1.30.0 - - name: Buf push 'proto/' - uses: bufbuild/buf-push-action@v1 - with: - input: 'proto' - buf_token: ${{ secrets.BUF_TOKEN }} - - create_release: - needs: - - build_init - - build_linux - - build_dbmigrate - if: needs.build_init.outputs.is_release == 'true' - runs-on: ubuntu-latest - name: Create Release - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Create release ${{ needs.build_init.outputs.version }} - uses: actions/create-release@v1 - id: create_release - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - tag_name: ${{ github.ref }} - release_name: Release ${{ github.ref }} - draft: false - prerelease: ${{ needs.build_init.outputs.prerelease }} - body_path: RELEASE_CHANGELOG.md - outputs: - release_url: ${{ steps.create_release.outputs.upload_url }} - - update_release: - needs: - - build_init - - create_release - if: needs.build_init.outputs.is_release == 'true' - runs-on: ubuntu-latest - name: Attach Release Artifacts - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Setup go - uses: actions/setup-go@v5 - with: - go-version: ${{ needs.build_init.outputs.go_version }} - - name: Download linux zip artifact - uses: actions/download-artifact@v4 - with: - name: linux-zip - path: build/ - - name: Download dbmigrate zip artifact - uses: actions/download-artifact@v4 - with: - name: dbmigrate-zip - path: build/ - - name: Create release items - id: create-items - run: | - make VERSION=${{ needs.build_init.outputs.version }} build-release-checksum build-release-plan build-release-proto - - name: Upload linux zip artifact - if: always() && steps.create-items.outcome == 'success' - uses: actions/upload-release-asset@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ needs.create_release.outputs.release_url }} - asset_path: ./build/provenance-linux-amd64-${{ needs.build_init.outputs.version }}.zip - asset_name: provenance-linux-amd64-${{ needs.build_init.outputs.version }}.zip - asset_content_type: application/octet-stream - - name: Upload dbmigrate zip artifact - if: always() && steps.create-items.outcome == 'success' - uses: actions/upload-release-asset@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ needs.create_release.outputs.release_url }} - asset_path: ./build/dbmigrate-linux-amd64-${{ needs.build_init.outputs.version }}.zip - asset_name: dbmigrate-linux-amd64-${{ needs.build_init.outputs.version }}.zip - asset_content_type: application/octet-stream - - name: Upload release checksum - if: always() && steps.create-items.outcome == 'success' - uses: actions/upload-release-asset@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ needs.create_release.outputs.release_url }} - asset_path: ./build/sha256sum.txt - asset_name: sha256sum.txt - asset_content_type: application/octet-stream - - name: Upload release plan - if: always() && steps.create-items.outcome == 'success' - uses: actions/upload-release-asset@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ needs.create_release.outputs.release_url }} - asset_path: ./build/plan-${{ needs.build_init.outputs.version }}.json - asset_name: plan-${{ needs.build_init.outputs.version }}.json - asset_content_type: application/octet-stream - - name: Upload release protos - if: always() && steps.create-items.outcome == 'success' - uses: actions/upload-release-asset@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ needs.create_release.outputs.release_url }} - asset_path: ./build/protos-${{ needs.build_init.outputs.version }}.zip - asset_name: protos-${{ needs.build_init.outputs.version }}.zip - asset_content_type: application/octet-stream - - java_kotlin_release: - needs: - - build_init - if: needs.build_init.outputs.is_release == 'true' - runs-on: ubuntu-latest - name: Java/Kotlin Proto Publishing - steps: - - name: Checkout - uses: actions/checkout@v4 - - - name: Java Setup - uses: actions/setup-java@v4 - with: - distribution: 'zulu' - java-version: 11 - server-id: github - - - name: GPG Setup - env: - GPG_KEY: ${{ secrets.OSSRH_GPG_SECRET_KEY }} - run: | - export GPG_TTY=$(tty) - echo -n "$GPG_KEY" | base64 --decode | gpg --batch --import - gpg --list-secret-keys --keyid-format LONG - echo -n "$GPG_KEY" | base64 --decode > $GITHUB_WORKSPACE/release.gpg - - - name: Build and Publish - env: - OSSRH_USERNAME: ${{ secrets.OSSRH_USERNAME }} - OSSRH_PASSWORD: ${{ secrets.OSSRH_PASSWORD }} - GPG_PASSWORD: ${{ secrets.OSSRH_GPG_SECRET_KEY_PASSWORD }} - run: | - cd protoBindings - ./gradlew publish closeAndReleaseSonatypeStagingRepository \ - -PartifactVersion=$(echo "${{ needs.build_init.outputs.version }}" | sed -e 's/^v//') \ - -Psigning.keyId=B7D30ABE \ - -Psigning.password="${{ secrets.OSSRH_GPG_SECRET_KEY_PASSWORD }}" \ - -Psigning.secretKeyRingFile=$GITHUB_WORKSPACE/release.gpg \ - --info - - npm_release: - needs: - - build_init - if: needs.build_init.outputs.is_release == 'true' - runs-on: ubuntu-latest - name: NPM Proto Publishing - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Publish - uses: provenance-io/npm-publish-action@v1.1 - with: - api-version: ${{ needs.build_init.outputs.version }} - npm-token: ${{ secrets.NPM_TOKEN }} - tag: alpha diff --git a/.github/workflows/rosetta.yml b/.github/workflows/rosetta.yml deleted file mode 100644 index c78858c1d1..0000000000 --- a/.github/workflows/rosetta.yml +++ /dev/null @@ -1,42 +0,0 @@ -name: Rosetta -# Rosetta runs rosetta-cli to verify that provenance still meets the Rosetta api -on: - pull_request: - push: - branches: - - main - -# Set concurrency for this workflow to cancel in-progress jobs if retriggered. -# The github.ref is only available when triggered by a PR so fall back to github.run_id for other cases. -# The github.run_id is unique for each run, giving each such invocation it's own unique concurrency group. -# Basically, if you push to a PR branch, jobs that are still running for that PR will be cancelled. -# But jobs started because of a merge to main or a release tag push are not cancelled. -concurrency: - group: ${{ github.workflow }}-${{ github.ref || github.run_id }} - cancel-in-progress: true - -jobs: - rosetta: - runs-on: ubuntu-latest - timeout-minutes: 10 - steps: - - uses: actions/checkout@v4 - - uses: technote-space/get-diff-action@v6.1.2 - with: - PATTERNS: | - **/**.go - go.mod - go.sum - client/rosetta/** - .github/workflows/rosetta.yml - - uses: actions/setup-go@v5 - if: ${{ env.GIT_DIFF }} - with: - go-version: '1.20' - - name: Go mod vendor - if: ${{ env.GIT_DIFF }} - run: | - go mod vendor - - name: rosetta - if: ${{ env.GIT_DIFF }} - run: make test-rosetta diff --git a/.github/workflows/sims.yml b/.github/workflows/sims.yml deleted file mode 100644 index dd74f99326..0000000000 --- a/.github/workflows/sims.yml +++ /dev/null @@ -1,302 +0,0 @@ -name: Sims -# Sims workflow runs multiple types of simulations (nondeterminism, import-export, after-import, multi-seed-short) - -on: - pull_request: - push: - branches: - - main - -env: - LD_LIBRARY_PATH: /usr/local/lib:/usr/local/lib/x86_64-linux-gnu - -# Set concurrency for this workflow to cancel in-progress jobs if retriggered. -# The github.ref is only available when triggered by a PR so fall back to github.run_id for other cases. -# The github.run_id is unique for each run, giving each such invocation it's own unique concurrency group. -# Basically, if you push to a PR branch, jobs that are still running for that PR will be cancelled. -# But jobs started because of a merge to main or a release tag push are not cancelled. -concurrency: - group: ${{ github.workflow }}-${{ github.ref || github.run_id }} - cancel-in-progress: true - -jobs: - setup: - if: "!contains(github.event.head_commit.message, 'skip-sims')" - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - uses: technote-space/get-diff-action@v6.1.2 - with: - PATTERNS: | - **/**.go - go.mod - go.sum - .github/workflows/sims.yml - sims.mk - SET_ENV_NAME_INSERTIONS: 1 - SET_ENV_NAME_LINES: 1 - - name: Define Variables - id: def-vars - run: | - file_prefix="sim-test-${GITHUB_SHA:0:7}-${GITHUB_RUN_ATTEMPT}" - echo "Setting output: file-prefix=$file_prefix" - echo "file-prefix=$file_prefix" >> "$GITHUB_OUTPUT" - db_cache_key_hash="${{ hashFiles('scripts/cleveldb_build_and_install.sh') }}" - echo "Setting output: db-cache-key-hash=$db_cache_key_hash" - echo "db-cache-key-hash=$db_cache_key_hash" >> "$GITHUB_OUTPUT" - go_cache_key_hash="${{ hashFiles('go.sum') }}" - echo "Setting output: go-cache-key-hash=$go_cache_key_hash" - echo "go-cache-key-hash=$go_cache_key_hash" >> "$GITHUB_OUTPUT" - outputs: - go-version: '1.20' - should-run: ${{ env.GIT_DIFF }} - file-prefix: ${{ steps.def-vars.outputs.file-prefix }} - db-cache-key-suffix: sims-db3-${{ steps.def-vars.outputs.db-cache-key-hash }} - # In Order: - # * The leveldb repo tarball - # * The directory extracted from the leveldb tarball - db-cache-path: | - leveldb*.tar.gz - leveldb-* - go-cache-key-suffix: sims-go3-${{ steps.def-vars.outputs.go-cache-key-hash }} - # In Order: - # * Go binary directory - # * Go module directory - # * Go build cache (Linux) - go-cache-path: | - ~/go/bin - ~/go/pkg/mod - ~/.cache/go-build - - build-linux: - needs: setup - if: needs.setup.outputs.should-run - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - name: Output setup - run: | - echo " go-version: [${{ needs.setup.outputs.go-version }}]" - echo " should-run: [${{ needs.setup.outputs.should-run }}]" - echo " file-prefix: [${{ needs.setup.outputs.file-prefix }}]" - echo "db-cache-key-suffix: [${{ needs.setup.outputs.db-cache-key-suffix }}]" - echo " db-cache-path: [${{ needs.setup.outputs.db-cache-path }}]" - echo "go-cache-key-suffix: [${{ needs.setup.outputs.go-cache-key-suffix }}]" - echo " go-cache-path: [${{ needs.setup.outputs.go-cache-path }}]" - - uses: actions/cache@v4 - name: Load db cache - id: db-cache-setup - with: - key: ${{ runner.os }}-${{ needs.setup.outputs.db-cache-key-suffix }} - path: ${{ needs.setup.outputs.db-cache-path }} - - uses: actions/cache@v4 - name: Load go cache - id: go-cache-setup - with: - key: ${{ runner.os }}-${{ needs.setup.outputs.go-cache-key-suffix }} - path: ${{ needs.setup.outputs.go-cache-path }} - - name: Setup build environment - env: - DEBIAN_FRONTEND: noninteractive - run: | - sudo apt-get update - sudo apt-get install -y libgflags-dev libsnappy-dev zlib1g-dev libbz2-dev liblz4-dev libzstd-dev wget curl build-essential cmake gcc sqlite3 - - name: Build cleveldb - if: steps.db-cache-setup.outputs.cache-hit != 'true' - run: | - export CLEVELDB_DO_BUILD='true' - export CLEVELDB_DO_INSTALL='false' - export CLEVELDB_DO_CLEANUP='false' - make cleveldb - - name: Install cleveldb - run: | - export CLEVELDB_DO_BUILD='false' - export CLEVELDB_DO_INSTALL='true' - export CLEVELDB_SUDO='true' - export CLEVELDB_DO_CLEANUP='false' - make cleveldb - echo 'WITH_CLEVELDB=true' >> "$GITHUB_ENV" - - uses: actions/setup-go@v5 - with: - go-version: ${{ needs.setup.outputs.go-version }} - - name: Display go version - run: go version - - name: Install runsim - if: steps.go-cache-setup.outputs.cache-hit != 'true' - run: export GO111MODULE="on" && go get github.com/cosmos/tools/cmd/runsim@v1.0.0 - - name: Update provwasm contract - run: make download-smart-contracts - - name: Build provenanced - run: make build - - name: Provenanced version - run: build/provenanced version --long - - runsim: - # These tests are the ones that use the runsim program (see sims.mk). - needs: [setup, build-linux] - if: needs.setup.outputs.should-run - strategy: - fail-fast: false - matrix: - test: ["import-export", "multi-seed-short", "after-import"] - db-backend: ["goleveldb"] - os: ["ubuntu-latest"] - runs-on: ${{ matrix.os }} - steps: - - uses: actions/checkout@v4 - - uses: actions/cache@v4 - name: Load go cache - with: - key: ${{ runner.os }}-${{ needs.setup.outputs.go-cache-key-suffix }} - path: ${{ needs.setup.outputs.go-cache-path }} - - name: Define test-logs - id: test-logs - run: | - test_logs="${{ needs.setup.outputs.file-prefix }}-${{ matrix.test }}-${{ matrix.db-backend }}-${{ matrix.os }}" - echo "Setting output: test-logs=$test_logs" - echo "test-logs=$test_logs" >> "$GITHUB_OUTPUT" - - uses: actions/setup-go@v5 - with: - go-version: ${{ needs.setup.outputs.go-version }} - - name: Display go version - run: go version - - name: Update provwasm contract - run: make download-smart-contracts - - name: Test - run: | - set -x - ec=0 - export DB_BACKEND=${{ matrix.db-backend }} - make test-sim-${{ matrix.test }} || ec="$?" - echo "Test exited with code $ec" - if [[ "$ec" -ne '0' ]] && ls -d /tmp/sim-logs-*; then - mkdir "${{ steps.test-logs.outputs.test-logs }}" - for logdir in $( ls -d /tmp/sim-logs-* ); do - cp -rv $logdir/* "${{ steps.test-logs.outputs.test-logs }}/" || : - done - bad_seed="$( grep -Eo 'Seed [[:digit:]]+: FAILED' "${{ steps.test-logs.outputs.test-logs }}"/runsim_log | sed -E 's/[^[:digit:]]+//g' )" || : - if [[ -n "$bad_seed" ]]; then - echo "::group::last 500 lines of seed $bad_seed stdout" - tail -n 500 "${{ steps.test-logs.outputs.test-logs }}"/*-seed-$bad_seed-*.stdout || : - echo "::endgroup::" - fi - fi - exit "$ec" - - uses: actions/upload-artifact@v4 - if: failure() - with: - name: ${{ steps.test-logs.outputs.test-logs }} - path: ${{ steps.test-logs.outputs.test-logs }} - - go-test-multi-db: - # These are tests that use go test to run (see sims.mk), and that we want to test using different database backends. - needs: [setup, build-linux] - if: needs.setup.outputs.should-run - strategy: - fail-fast: false - matrix: - # The test-sim-simple test is pretty quick and should be able to identify glaring problems. - # The test-sim-benchmark is handy to have for each db type. - test: ["simple", "benchmark"] - db-backend: ["goleveldb", "cleveldb"] - os: ["ubuntu-latest"] - runs-on: ${{ matrix.os }} - steps: - - uses: actions/checkout@v4 - - uses: actions/cache@v4 - name: Load db cache - with: - key: ${{ runner.os }}-${{ needs.setup.outputs.db-cache-key-suffix }} - path: ${{ needs.setup.outputs.db-cache-path }} - - uses: actions/cache@v4 - name: Load go cache - with: - key: ${{ runner.os }}-${{ needs.setup.outputs.go-cache-key-suffix }} - path: ${{ needs.setup.outputs.go-cache-path }} - - name: Define test-logs - id: test-logs - run: | - test_logs="${{ needs.setup.outputs.file-prefix }}-${{ matrix.test }}-${{ matrix.db-backend }}-${{ matrix.os }}" - echo "Setting output: test-logs=$test_logs" - echo "test-logs=$test_logs" >> "$GITHUB_OUTPUT" - - name: Install cleveldb - if: matrix.db-backend == 'cleveldb' - run: | - export CLEVELDB_DO_BUILD='false' - export CLEVELDB_DO_INSTALL='true' - export CLEVELDB_SUDO='true' - export CLEVELDB_DO_CLEANUP='false' - make cleveldb - echo 'WITH_CLEVELDB=true' >> "$GITHUB_ENV" - - uses: actions/setup-go@v5 - with: - go-version: ${{ needs.setup.outputs.go-version }} - - name: Display go version - run: go version - - name: Update provwasm contract - run: make download-smart-contracts - - name: Test - run: | - set -x - export DB_BACKEND=${{ matrix.db-backend }} - make test-sim-${{ matrix.test }} | tee "${{ steps.test-logs.outputs.test-logs }}.txt" - exit "${PIPESTATUS[0]}" - - uses: actions/upload-artifact@v4 - if: failure() - with: - name: ${{ steps.test-logs.outputs.test-logs }} - path: ${{ steps.test-logs.outputs.test-logs }}.txt - - go-test-single-db: - # These are tests that use go test to run (see sims.mk), and that we don't care about testing using different database backends. - needs: [setup, build-linux] - if: needs.setup.outputs.should-run - strategy: - fail-fast: false - matrix: - # The test-sim-nondeterminism test hard-codes the db backend to use memdb. - # The test-sim-benchmark-invariants test can use different db backends, but to save resources, is down here. - test: ["nondeterminism", "benchmark-invariants"] - os: ["ubuntu-latest"] - runs-on: ${{ matrix.os }} - steps: - - uses: actions/checkout@v4 - - uses: actions/cache@v4 - name: Load go cache - with: - key: ${{ runner.os }}-${{ needs.setup.outputs.go-cache-key-suffix }} - path: ${{ needs.setup.outputs.go-cache-path }} - - name: Define test-logs - id: test-logs - run: | - test_logs="${{ needs.setup.outputs.file-prefix }}-${{ matrix.test }}-${{ matrix.os }}" - echo "Setting output: test-logs=$test_logs" - echo "test-logs=$test_logs" >> "$GITHUB_OUTPUT" - - uses: actions/setup-go@v5 - with: - go-version: ${{ needs.setup.outputs.go-version }} - - name: Display go version - run: go version - - name: Update provwasm contract - run: make download-smart-contracts - - name: Test - run: | - set -x - make test-sim-${{ matrix.test }} | tee "${{ steps.test-logs.outputs.test-logs }}.txt" - exit "${PIPESTATUS[0]}" - - uses: actions/upload-artifact@v4 - if: failure() - with: - name: ${{ steps.test-logs.outputs.test-logs }} - path: ${{ steps.test-logs.outputs.test-logs }}.txt - -# Sim tests that are not run in here: -# test-sim-multi-seed-long: runsim -# The short version does 50 blocks and takes 30-40 minutes. The long one does 500 blocks. Way too long. -# test-sim-custom-genesis-multi-seed: runsim -# This one requires a genesis file to be created, and I don't feel like doing that right now. -# test-sim-custom-genesis-fast: -# Same as test-sim-custom-genesis-multi-seed. -# test-sim-profile: -# This is the exact same thing as test-sim-benchmark except with a couple extra output files. -# Unless I add an upload for them, it's not worth doing it again. diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml deleted file mode 100644 index 5ff8a4e850..0000000000 --- a/.github/workflows/test.yml +++ /dev/null @@ -1,195 +0,0 @@ -name: Tests / Code Coverage -# Tests / Code Coverage workflow runs unit tests and uploads a code coverage report - - -# This workflow is run on pushes to master & every Pull Request, -# if no *.go, go.mod or go.sum file is changed it will pass without running as these are required checks -on: - pull_request: - push: - branches: - - main - -# Set concurrency for this workflow to cancel in-progress jobs if retriggered. -# The github.ref is only available when triggered by a PR so fall back to github.run_id for other cases. -# The github.run_id is unique for each run, giving each such invocation it's own unique concurrency group. -# Basically, if you push to a PR branch, jobs that are still running for that PR will be cancelled. -# But jobs started because of a merge to main or a release tag push are not cancelled. -concurrency: - group: ${{ github.workflow }}-${{ github.ref || github.run_id }} - cancel-in-progress: true - -jobs: - setup-tests: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - uses: technote-space/get-diff-action@v6.1.2 - with: - PATTERNS: | - **/**.go - go.mod - go.sum - .github/workflows/test.yml - - name: Define Variables - id: def-vars - run: | - file_prefix="${GITHUB_SHA:0:7}-${GITHUB_RUN_ATTEMPT}" - echo "Setting output: file-prefix=$file_prefix" - echo "file-prefix=$file_prefix" >> "$GITHUB_OUTPUT" - - name: Create a file with all the pkgs - run: go list ./... > pkgs.txt - - uses: actions/upload-artifact@v4 - with: - name: "${{ steps.def-vars.outputs.file-prefix }}-pkgs.txt" - path: ./pkgs.txt - - name: Split pkgs into parts - # The x/marker/simulation test-race takes around 6ish minutes and is by far the longest running one. - # The next longest running is x/metadata/client/cli at 2.5ish minutes. - # So take x/marker/simulation out of the list, split the list into 3 parts and create a 4th part - # with just the x/marker/simulation test. - run: | - grep -vF -e 'github.com/provenance-io/provenance/x/marker/simulation' pkgs.txt > pkgs.txt.tmp - split -d -n l/3 pkgs.txt.tmp pkgs.txt.part. - printf 'github.com/provenance-io/provenance/x/marker/simulation\n' > pkgs.txt.part.03 - - uses: actions/upload-artifact@v4 - with: - name: "${{ steps.def-vars.outputs.file-prefix }}-pkgs.txt.part.00" - path: ./pkgs.txt.part.00 - - uses: actions/upload-artifact@v4 - with: - name: "${{ steps.def-vars.outputs.file-prefix }}-pkgs.txt.part.01" - path: ./pkgs.txt.part.01 - - uses: actions/upload-artifact@v4 - with: - name: "${{ steps.def-vars.outputs.file-prefix }}-pkgs.txt.part.02" - path: ./pkgs.txt.part.02 - - uses: actions/upload-artifact@v4 - with: - name: "${{ steps.def-vars.outputs.file-prefix }}-pkgs.txt.part.03" - path: ./pkgs.txt.part.03 - outputs: - should-run: ${{ env.GIT_DIFF }} - go-version: '1.20' - file-prefix: ${{ steps.def-vars.outputs.file-prefix }} - - - tests: - needs: setup-tests - # Note: There's a required check on this, and it must pass. A skip doesn't count as a pass. - # So instead of a job-level if: needs.setup-tests.outputs.should-run on this job, - # it's in the steps below (except the checkout step). - strategy: - fail-fast: false - matrix: - part: ["00", "01", "02", "03"] - runs-on: ubuntu-latest - env: - LD_LIBRARY_PATH: /usr/local/lib:/usr/local/lib/x86_64-linux-gnu - steps: - - uses: actions/checkout@v4 - with: - # CodeCov requires fetch-depth > 1 - fetch-depth: 2 - - uses: actions/setup-go@v5 - if: needs.setup-tests.outputs.should-run - with: - go-version: ${{ needs.setup-tests.outputs.go-version }} - - uses: actions/download-artifact@v4 - if: needs.setup-tests.outputs.should-run - with: - name: "${{ needs.setup-tests.outputs.file-prefix }}-pkgs.txt.part.${{ matrix.part }}" - - name: test & coverage report creation - if: needs.setup-tests.outputs.should-run - run: | - cat pkgs.txt.part.${{ matrix.part }} | xargs go test -mod=readonly -timeout 30m -coverprofile=${{ matrix.part }}profile.out -covermode=atomic -tags='norace ledger test_ledger_mock' - - uses: actions/upload-artifact@v4 - if: needs.setup-tests.outputs.should-run - with: - name: "${{ needs.setup-tests.outputs.file-prefix }}-${{ matrix.part }}-coverage" - path: ./${{ matrix.part }}profile.out - - - # This action performs a code coverage assessment but filters out generated code from proto based types - # and grpc services - upload-coverage-report: - needs: [setup-tests, tests] - # Note: There's a required check on this, and it must pass. A skip doesn't count as a pass. - # So instead of a job-level if: needs.setup-tests.outputs.should-run on this job, - # it's in the steps below (except the checkout step). - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - with: - # CodeCov requires fetch-depth > 1 - fetch-depth: 2 - - uses: actions/download-artifact@v4 - if: needs.setup-tests.outputs.should-run - with: - name: "${{ needs.setup-tests.outputs.file-prefix }}-00-coverage" - - uses: actions/download-artifact@v4 - if: needs.setup-tests.outputs.should-run - with: - name: "${{ needs.setup-tests.outputs.file-prefix }}-01-coverage" - - uses: actions/download-artifact@v4 - if: needs.setup-tests.outputs.should-run - with: - name: "${{ needs.setup-tests.outputs.file-prefix }}-02-coverage" - - uses: actions/download-artifact@v4 - if: needs.setup-tests.outputs.should-run - with: - name: "${{ needs.setup-tests.outputs.file-prefix }}-03-coverage" - - name: Combine profiles - if: needs.setup-tests.outputs.should-run - run: | - cat ./*profile.out | grep -v "mode: atomic" >> coverage.txt - - name: filter out DONTCOVER - if: needs.setup-tests.outputs.should-run - run: | - excludelist="$(find ./ -type f -name '*.go' | xargs grep -l 'DONTCOVER')" - excludelist+=" $(find ./ -type f -name '*.pb.go')" - excludelist+=" $(find ./ -type f -name '*.pb.gw.go')" - excludelist+=" $(find ./ -type f -path './tests/mocks/*.go')" - for filename in ${excludelist}; do - filename=$(echo $filename | sed 's/^./github.com\/cosmos\/cosmos-sdk/g') - echo "Excluding ${filename} from coverage report..." - sed -i.bak "/$(echo $filename | sed 's/\//\\\//g')/d" coverage.txt - done - - uses: codecov/codecov-action@v4 - if: needs.setup-tests.outputs.should-run - with: - file: ./coverage.txt - - - test-race: - needs: setup-tests - # Note: There's a required check on this, and it must pass. A skip doesn't count as a pass. - # So instead of a job-level if: needs.setup-tests.outputs.should-run on this job, - # it's in the steps below (except the checkout step). - runs-on: ubuntu-latest - strategy: - fail-fast: false - matrix: - part: ["00", "01", "02", "03"] - env: - LD_LIBRARY_PATH: /usr/local/lib:/usr/local/lib/x86_64-linux-gnu - steps: - - uses: actions/checkout@v4 - - uses: actions/setup-go@v5 - if: needs.setup-tests.outputs.should-run - with: - go-version: ${{ needs.setup-tests.outputs.go-version }} - - uses: actions/download-artifact@v4 - if: needs.setup-tests.outputs.should-run - with: - name: "${{ needs.setup-tests.outputs.file-prefix }}-pkgs.txt.part.${{ matrix.part }}" - - name: test & coverage report creation - if: needs.setup-tests.outputs.should-run - run: | - xargs --arg-file=pkgs.txt.part.${{ matrix.part }} go test -mod=readonly -timeout 30m -race -tags='cgo ledger test_ledger_mock' | tee ${{ matrix.part }}-race-output.txt - exit "${PIPESTATUS[0]}" - - uses: actions/upload-artifact@v4 - if: needs.setup-tests.outputs.should-run - with: - name: "${{ needs.setup-tests.outputs.file-prefix }}-${{ matrix.part }}-race-output" - path: ./${{ matrix.part }}-race-output.txt diff --git a/.gitignore b/.gitignore index e0a56f83ad..f03ab43450 100644 --- a/.gitignore +++ b/.gitignore @@ -18,7 +18,7 @@ keyring-* vendor/ *.account .go.mod.bak - +tmp_daemon_storage/ # VS Code .history/ .vscode/ diff --git a/app/app.go b/app/app.go index 7ffbf424aa..19d9e942e6 100644 --- a/app/app.go +++ b/app/app.go @@ -173,6 +173,8 @@ import ( triggertypes "github.com/provenance-io/provenance/x/trigger/types" _ "github.com/provenance-io/provenance/client/docs/statik" // registers swagger-ui files with statik + + "github.com/provenance-io/provenance/mamoru_cosmos_sdk" ) var ( @@ -1096,6 +1098,13 @@ func New( app.ScopedICQKeeper = scopedICQKeeper app.ScopedICAHostKeeper = scopedICAHostKeeper + ////////////////////////// MAMORU SNIFFER ////////////////////////// + listener := mamoru_cosmos_sdk.NewStreamingService(logger, mamoru_cosmos_sdk.NewSniffer(logger)) + streamingManager := storetypes.StreamingManager{AbciListeners: []storetypes.ABCIListener{listener}} + + bApp.SetStreamingManager(streamingManager) + ////////////////////////// MAMORU SNIFFER ////////////////////////// + return app } diff --git a/docker/blockchain/Dockerfile b/docker/blockchain/Dockerfile index 832d56cb5d..80c5a78005 100644 --- a/docker/blockchain/Dockerfile +++ b/docker/blockchain/Dockerfile @@ -5,12 +5,13 @@ WORKDIR /go/src/github.com/provenance-io/provenance RUN apt-get update && \ apt-get upgrade -y && \ - apt-get install -y libleveldb-dev + apt-get install -y libleveldb-dev make COPY client/ ./client/ COPY app/ ./app/ COPY go.* ./ COPY cmd/ ./cmd/ +COPY mamoru_cosmos_sdk/ ./mamoru_cosmos_sdk/ COPY internal/ ./internal/ COPY x/ ./x/ COPY vendor/ ./vendor/ @@ -23,12 +24,8 @@ COPY Makefile sims.mk ./ # Build and install provenanced ENV VERSION=$VERSION RUN ARCH=$(uname -m) && \ - if [ "$ARCH" != "x86_64" ] && [ "$ARCH" != "aarch64" ]; then \ - echo "Unsupported architecture (required: x86_64 or aarch64): $ARCH"; \ - exit 1; \ - fi && \ echo "Building and installing provenance for Arch: $ARCH"; \ - make VERSION=${VERSION} install + make VERSION=$VERSION install ### FROM debian:bullseye-slim as run @@ -38,16 +35,12 @@ RUN apt-get update && \ apt-get install -y curl jq libleveldb-dev && \ apt-get clean && \ rm -rf /var/lib/apt/lists/ - + COPY --from=build /go/src/github.com/provenance-io/provenance/vendor/github.com/CosmWasm/wasmvm/internal/api/libwasmvm.*.so /tmp COPY --from=build /go/bin/provenanced /usr/bin/provenanced -RUN ARCH=$(uname -m) && \ - if [ "$ARCH" != "x86_64" ] && [ "$ARCH" != "aarch64" ]; then \ - echo "Unsupported architecture (required: x86_64 or aarch64): $ARCH"; \ - exit 1; \ - fi && \ +RUN ARCH=$(uname -m) && \ cp /tmp/libwasmvm.$ARCH.so /usr/local/lib/. && \ rm /tmp/libwasmvm.*.so diff --git a/go.mod b/go.mod index 64876868c0..53e927b68f 100644 --- a/go.mod +++ b/go.mod @@ -82,7 +82,6 @@ require ( github.com/felixge/httpsnoop v1.0.4 // indirect github.com/fsnotify/fsnotify v1.7.0 // indirect github.com/go-kit/kit v0.12.0 // indirect - github.com/go-kit/log v0.2.1 // indirect github.com/go-logfmt/logfmt v0.5.1 // indirect github.com/go-logr/logr v1.3.0 // indirect github.com/go-logr/stdr v1.2.2 // indirect @@ -207,3 +206,11 @@ replace github.com/gin-gonic/gin => github.com/gin-gonic/gin v1.7.0 // TODO: This is also required for https://github.com/provenance-io/provenance/issues/1414 replace github.com/CosmWasm/wasmd => github.com/provenance-io/wasmd v0.30.0-pio-7 + +require ( + github.com/Mamoru-Foundation/mamoru-sniffer-go v0.13.0 + github.com/go-kit/log v0.2.1 + gotest.tools/v3 v3.5.1 +) + +//replace github.com/Mamoru-Foundation/mamoru-sniffer-go => ../mamoru-sniffer-go diff --git a/go.sum b/go.sum index ed1a800bff..0c44704303 100644 --- a/go.sum +++ b/go.sum @@ -213,6 +213,8 @@ github.com/DATA-DOG/go-sqlmock v1.3.3/go.mod h1:f/Ixk793poVmq4qj/V1dPUg2JEAKC73Q github.com/DataDog/datadog-go v3.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ= github.com/DataDog/zstd v1.5.0/go.mod h1:g4AWEaM3yOg3HYfnJ3YIawPnVdXJh9QME85blwSAmyw= github.com/Knetic/govaluate v3.0.1-0.20171022003610-9aa49832a739+incompatible/go.mod h1:r7JcOSlj0wfOMncg0iLm8Leh48TZaKVeNIfJntJ2wa0= +github.com/Mamoru-Foundation/mamoru-sniffer-go v0.13.0 h1:9Eo+umr0Z+c8qRbCb8DzcJqMOMkBqF8pFj9GIi4grKM= +github.com/Mamoru-Foundation/mamoru-sniffer-go v0.13.0/go.mod h1:u2UBuNW7Wxz5sL533/hygPYIt25EDGmWzoUuZ9XqtGo= github.com/Microsoft/go-winio v0.6.0 h1:slsWYD/zyx7lCXoZVlvQrj0hPTM1HI4+v1sIda2yDvg= github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5 h1:TngWCqHvy9oXAN6lEVMRuU21PR1EtLVZJmdB18Gu3Rw= github.com/OneOfOne/xxhash v1.2.2 h1:KMrpdQIwFcEqXDklaen+P1axHaj9BSKzvpUUfnHldSE= @@ -1864,6 +1866,8 @@ gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gotest.tools v2.2.0+incompatible/go.mod h1:DsYFclhRJ6vuDpmuTbkuFWG+y2sxOXAzmJt81HFBacw= +gotest.tools/v3 v3.5.1 h1:EENdUnS3pdur5nybKYIh2Vfgc8IUNBjxDPSjtiJcOzU= +gotest.tools/v3 v3.5.1/go.mod h1:isy3WKz7GK6uNw/sbHzfKBLvlvXwUyV06n6brMxxopU= honnef.co/go/tools v0.0.0-20180728063816-88497007e858/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= diff --git a/mamoru_cosmos_sdk/mock_streaming.go b/mamoru_cosmos_sdk/mock_streaming.go new file mode 100644 index 0000000000..f8b51e7dd5 --- /dev/null +++ b/mamoru_cosmos_sdk/mock_streaming.go @@ -0,0 +1,52 @@ +package mamoru_cosmos_sdk + +import ( + "context" + + "github.com/cosmos/cosmos-sdk/store/types" + + tmabci "github.com/tendermint/tendermint/abci/types" + tmlog "github.com/tendermint/tendermint/libs/log" + //"github.com/Mamoru-Foundation/mamoru-sniffer-go/mamoru_sniffer" +) + +var _ types.ABCIListener = (*MockStreamingService)(nil) + +// MockStreamingService mock streaming service +type MockStreamingService struct { + logger tmlog.Logger + currentBlockNumber int64 +} + +func NewMockStreamingService(logger tmlog.Logger) *MockStreamingService { + logger.Info("Mamoru MockStreamingService start") + + return &MockStreamingService{ + logger: logger, + } +} + +func (ss *MockStreamingService) ListenBeginBlock(ctx context.Context, req tmabci.RequestBeginBlock, res tmabci.ResponseBeginBlock) error { + ss.currentBlockNumber = req.Header.Height + ss.logger.Info("Mamoru Mock ListenBeginBlock", "height", ss.currentBlockNumber) + + return nil +} + +func (ss *MockStreamingService) ListenDeliverTx(ctx context.Context, req tmabci.RequestDeliverTx, res tmabci.ResponseDeliverTx) error { + ss.logger.Info("Mamoru Mock ListenDeliverTx", "height", ss.currentBlockNumber) + + return nil +} + +func (ss *MockStreamingService) ListenEndBlock(ctx context.Context, req tmabci.RequestEndBlock, res tmabci.ResponseEndBlock) error { + ss.logger.Info("Mamoru Mock ListenEndBlock", "height", ss.currentBlockNumber) + + return nil +} + +func (ss *MockStreamingService) ListenCommit(ctx context.Context, res tmabci.ResponseCommit, changeSet []*types.StoreKVPair) error { + ss.logger.Info("Mamoru Mock ListenCommit", "height", ss.currentBlockNumber) + + return nil +} diff --git a/mamoru_cosmos_sdk/sniffer.go b/mamoru_cosmos_sdk/sniffer.go new file mode 100644 index 0000000000..0e0add729d --- /dev/null +++ b/mamoru_cosmos_sdk/sniffer.go @@ -0,0 +1,130 @@ +package mamoru_cosmos_sdk + +import ( + "fmt" + "os" + "strconv" + "sync" + + "github.com/go-kit/log/level" + "github.com/go-kit/log/term" + "github.com/tendermint/tendermint/libs/log" + + "github.com/Mamoru-Foundation/mamoru-sniffer-go/mamoru_sniffer" + "github.com/Mamoru-Foundation/mamoru-sniffer-go/mamoru_sniffer/cosmos" + "github.com/provenance-io/provenance/mamoru_cosmos_sdk/sync_state" +) + +const ( + PolishTimeSec = 10 + DefaultTNApiURL = "http://localhost:26657/status" +) + +var snifferConnectFunc = cosmos.CosmosConnect + +func InitConnectFunc(f func() (*cosmos.SnifferCosmos, error)) { + snifferConnectFunc = f +} + +func init() { + mamoru_sniffer.InitLogger(func(entry mamoru_sniffer.LogEntry) { + kvs := mapToInterfaceSlice(entry.Ctx) + msg := "Mamoru core: " + entry.Message + var tmLogger = log.NewTMLoggerWithColorFn(os.Stdout, func(keyvals ...interface{}) term.FgBgColor { + if keyvals[0] != level.Key() { + panic(fmt.Sprintf("expected level key to be first, got %v", keyvals[0])) + } + switch keyvals[1].(level.Value).String() { + case "debug": + return term.FgBgColor{Fg: term.Green} + case "error": + return term.FgBgColor{Fg: term.DarkRed} + default: + return term.FgBgColor{} + } + }) + + switch entry.Level { + case mamoru_sniffer.LogLevelDebug: + tmLogger.Debug(msg, kvs...) + case mamoru_sniffer.LogLevelInfo: + tmLogger.Info(msg, kvs...) + case mamoru_sniffer.LogLevelWarning: + tmLogger.With("Warn").Error(msg, kvs...) + case mamoru_sniffer.LogLevelError: + tmLogger.Error(msg, kvs...) + } + }) +} + +func mapToInterfaceSlice(m map[string]string) []interface{} { + var result []interface{} + for key, value := range m { + result = append(result, key, value) + } + + return result +} + +type Sniffer struct { + mu sync.Mutex + logger log.Logger + client *cosmos.SnifferCosmos + sync *sync_state.Client +} + +func NewSniffer(logger log.Logger) *Sniffer { + tmAPIURL := getEnv("MAMORU_TM_API_URL", DefaultTNApiURL) + httpClient := sync_state.NewHTTPRequest(logger, tmAPIURL, PolishTimeSec, isSnifferEnabled()) + + return &Sniffer{ + logger: logger, + sync: httpClient, + } +} + +// IsSynced returns true if the sniffer is synced with the chain +func (s *Sniffer) IsSynced() bool { + s.logger.Info("Mamoru Sniffer sync", "sync", s.sync.GetSyncData().IsSync(), + "block", s.sync.GetSyncData().GetCurrentBlockNumber()) + + return s.sync.GetSyncData().IsSync() +} + +func (s *Sniffer) CheckRequirements() bool { + return isSnifferEnabled() && s.IsSynced() && s.connect() +} + +func (s *Sniffer) Client() *cosmos.SnifferCosmos { + return s.client +} + +func (s *Sniffer) connect() bool { + if s.client != nil { + return true + } + + s.mu.Lock() + defer s.mu.Unlock() + + var err error + s.client, err = snifferConnectFunc() + if err != nil { + s.logger.Error("Mamoru Sniffer connect", "err", err) + return false + } + + return true +} + +func isSnifferEnabled() bool { + val, _ := strconv.ParseBool(getEnv("MAMORU_SNIFFER_ENABLE", "false")) + return val +} + +func getEnv(key, fallback string) string { + if value, ok := os.LookupEnv(key); ok { + return value + } + return fallback +} diff --git a/mamoru_cosmos_sdk/sniffer_test.go b/mamoru_cosmos_sdk/sniffer_test.go new file mode 100644 index 0000000000..2a2cc8c3dd --- /dev/null +++ b/mamoru_cosmos_sdk/sniffer_test.go @@ -0,0 +1,78 @@ +package mamoru_cosmos_sdk + +import ( + sdk "github.com/cosmos/cosmos-sdk/types" + tmprototypes "github.com/tendermint/tendermint/proto/tendermint/types" + "gotest.tools/v3/assert" + "os" + "testing" + + abci "github.com/tendermint/tendermint/abci/types" + "github.com/tendermint/tendermint/libs/log" +) + +// TestNewSniffer tests the NewSniffer function +func TestNewSniffer(t *testing.T) { + snifferTest := NewSniffer(log.NewTMLogger(log.NewSyncWriter(os.Stdout))) + if snifferTest == nil { + t.Error("NewSniffer returned nil") + } +} + +// TestIsSnifferEnable tests the isSnifferEnable method +func TestIsSnifferEnable(t *testing.T) { + + // Set environment variable for testing + t.Setenv("MAMORU_SNIFFER_ENABLE", "true") + logger := log.NewTMLogger(log.NewSyncWriter(os.Stdout)) + _ = NewSniffer(logger) + if !isSnifferEnabled() { + t.Error("Expected sniffer to be enabled") + } + + // Test with invalid value + t.Setenv("MAMORU_SNIFFER_ENABLE", "not_a_bool") + if isSnifferEnabled() { + t.Error("Expected sniffer to be disabled with invalid env value") + } +} + +// smoke test for the sniffer +func TestSnifferSmoke(t *testing.T) { + t.Skip() + t.Setenv("MAMORU_SNIFFER_ENABLE", "true") + t.Setenv("MAMORU_CHAIN_TYPE", "ETH_TESTNET") + t.Setenv("MAMORU_CHAIN_ID", "validationchain") + t.Setenv("MAMORU_STATISTICS_SEND_INTERVAL_SECS", "1") + t.Setenv("MAMORU_ENDPOINT", "http://localhost:9090") + t.Setenv("MAMORU_PRIVATE_KEY", "6Hi8mqAFp14m3pySNYDjXhUysZok0X6jaMWvwZGdd8=") + //InitConnectFunc(func() (*cosmos.SnifferCosmos, error) { + // return nil, nil + //}) + logger := log.TestingLogger() + sniffer := NewSniffer(logger) + if sniffer == nil { + t.Error("NewSniffer returned nil") + } + header := tmprototypes.Header{} + ischeck := true + ctx := sdk.NewContext(nil, header, ischeck, logger) + + streamingService := NewStreamingService(logger, sniffer) + regBB := abci.RequestBeginBlock{} + resBB := abci.ResponseBeginBlock{} + err := streamingService.ListenBeginBlock(ctx, regBB, resBB) + assert.NilError(t, err) + regDT := abci.RequestDeliverTx{} + resDT := abci.ResponseDeliverTx{} + err = streamingService.ListenDeliverTx(ctx, regDT, resDT) + assert.NilError(t, err) + regEB := abci.RequestEndBlock{} + resEB := abci.ResponseEndBlock{} + err = streamingService.ListenEndBlock(ctx, regEB, resEB) + assert.NilError(t, err) + + resC := abci.ResponseCommit{} + err = streamingService.ListenCommit(ctx, resC, nil) + assert.NilError(t, err) +} diff --git a/mamoru_cosmos_sdk/streaming.go b/mamoru_cosmos_sdk/streaming.go new file mode 100644 index 0000000000..66ba6c30c0 --- /dev/null +++ b/mamoru_cosmos_sdk/streaming.go @@ -0,0 +1,254 @@ +package mamoru_cosmos_sdk + +import ( + "context" + "encoding/hex" + "strconv" + "strings" + + "github.com/cosmos/cosmos-sdk/store/types" + sdktypes "github.com/cosmos/cosmos-sdk/types" + abci "github.com/tendermint/tendermint/abci/types" + "github.com/tendermint/tendermint/libs/bytes" + "github.com/tendermint/tendermint/libs/log" + types2 "github.com/tendermint/tendermint/types" + + "github.com/Mamoru-Foundation/mamoru-sniffer-go/mamoru_sniffer/cosmos" +) + +var _ types.ABCIListener = (*StreamingService)(nil) + +type StreamingService struct { + logger log.Logger + + blockMetadata types.BlockMetadata + currentBlockNumber int64 + + sniffer *Sniffer +} + +func NewStreamingService(logger log.Logger, sniffer *Sniffer) *StreamingService { + logger.Info("Mamoru StreamingService start") + + return &StreamingService{ + sniffer: sniffer, + logger: logger, + } +} + +func (ss *StreamingService) ListenBeginBlock(ctx context.Context, req abci.RequestBeginBlock, res abci.ResponseBeginBlock) error { + ss.blockMetadata = types.BlockMetadata{} + ss.blockMetadata.RequestBeginBlock = &req + ss.blockMetadata.ResponseBeginBlock = &res + ss.currentBlockNumber = req.Header.Height + ss.logger.Info("Mamoru ListenBeginBlock", "height", ss.currentBlockNumber) + + return nil +} + +func (ss *StreamingService) ListenDeliverTx(ctx context.Context, req abci.RequestDeliverTx, res abci.ResponseDeliverTx) error { + ss.logger.Info("Mamoru ListenDeliverTx", "height", ss.currentBlockNumber) + ss.blockMetadata.DeliverTxs = append(ss.blockMetadata.DeliverTxs, &types.BlockMetadata_DeliverTx{ + Request: &req, + Response: &res, + }) + + return nil +} + +func (ss *StreamingService) ListenEndBlock(ctx context.Context, req abci.RequestEndBlock, res abci.ResponseEndBlock) error { + ss.blockMetadata.RequestEndBlock = &req + ss.blockMetadata.ResponseEndBlock = &res + ss.logger.Info("Mamoru ListenEndBlock", "height", ss.currentBlockNumber) + + return nil +} + +func (ss *StreamingService) ListenCommit(ctx context.Context, res abci.ResponseCommit, changeSet []*types.StoreKVPair) error { + if ss.sniffer == nil || !ss.sniffer.CheckRequirements() { + return nil + } + + ss.blockMetadata.ResponseCommit = &res + ss.logger.Info("Mamoru ListenCommit", "height", ss.currentBlockNumber) + + var eventCount uint64 = 0 + var txCount uint64 = 0 + var callTracesCount uint64 = 0 + builder := cosmos.NewCosmosCtxBuilder() + + blockHeight := uint64(ss.blockMetadata.RequestEndBlock.Height) + block := cosmos.Block{ + Seq: blockHeight, + Height: ss.blockMetadata.RequestEndBlock.Height, + Hash: hex.EncodeToString(ss.blockMetadata.RequestBeginBlock.Hash), + VersionBlock: ss.blockMetadata.RequestBeginBlock.Header.Version.Block, + VersionApp: ss.blockMetadata.RequestBeginBlock.Header.Version.App, + ChainId: ss.blockMetadata.RequestBeginBlock.Header.ChainID, + Time: ss.blockMetadata.RequestBeginBlock.Header.Time.Unix(), + LastBlockIdHash: hex.EncodeToString(ss.blockMetadata.RequestBeginBlock.Header.LastBlockId.Hash), + LastBlockIdPartSetHeaderTotal: ss.blockMetadata.RequestBeginBlock.Header.LastBlockId.PartSetHeader.Total, + LastBlockIdPartSetHeaderHash: hex.EncodeToString(ss.blockMetadata.RequestBeginBlock.Header.LastBlockId.PartSetHeader.Hash), + LastCommitHash: hex.EncodeToString(ss.blockMetadata.RequestBeginBlock.Header.LastCommitHash), + DataHash: hex.EncodeToString(ss.blockMetadata.RequestBeginBlock.Header.DataHash), + ValidatorsHash: hex.EncodeToString(ss.blockMetadata.RequestBeginBlock.Header.ValidatorsHash), + NextValidatorsHash: hex.EncodeToString(ss.blockMetadata.RequestBeginBlock.Header.NextValidatorsHash), + ConsensusHash: hex.EncodeToString(ss.blockMetadata.RequestBeginBlock.Header.ConsensusHash), + AppHash: hex.EncodeToString(ss.blockMetadata.RequestBeginBlock.Header.AppHash), + LastResultsHash: hex.EncodeToString(ss.blockMetadata.RequestBeginBlock.Header.LastResultsHash), + EvidenceHash: hex.EncodeToString(ss.blockMetadata.RequestBeginBlock.Header.EvidenceHash), + ProposerAddress: hex.EncodeToString(ss.blockMetadata.RequestBeginBlock.Header.ProposerAddress), + LastCommitInfoRound: ss.blockMetadata.RequestBeginBlock.LastCommitInfo.Round, + } + + if ss.blockMetadata.ResponseEndBlock.ConsensusParamUpdates != nil { + block.ConsensusParamUpdatesBlockMaxBytes = ss.blockMetadata.ResponseEndBlock.ConsensusParamUpdates.Block.MaxBytes + block.ConsensusParamUpdatesBlockMaxGas = ss.blockMetadata.ResponseEndBlock.ConsensusParamUpdates.Block.MaxGas + block.ConsensusParamUpdatesEvidenceMaxAgeNumBlocks = ss.blockMetadata.ResponseEndBlock.ConsensusParamUpdates.Evidence.MaxAgeNumBlocks + block.ConsensusParamUpdatesEvidenceMaxAgeDuration = ss.blockMetadata.ResponseEndBlock.ConsensusParamUpdates.Evidence.MaxAgeDuration.Milliseconds() + block.ConsensusParamUpdatesEvidenceMaxBytes = ss.blockMetadata.ResponseEndBlock.ConsensusParamUpdates.Evidence.MaxBytes + block.ConsensusParamUpdatesValidatorPubKeyTypes = strings.Join(ss.blockMetadata.ResponseEndBlock.ConsensusParamUpdates.Validator.PubKeyTypes[:], ",") + block.ConsensusParamUpdatesVersionApp = ss.blockMetadata.ResponseEndBlock.ConsensusParamUpdates.Version.GetAppVersion() + } + + builder.SetBlock(block) + + for _, beginBlock := range ss.blockMetadata.ResponseBeginBlock.Events { + eventCount++ + builder.AppendEvents([]cosmos.Event{ + { + Seq: blockHeight, + EventType: beginBlock.Type, + }, + }) + for _, attribute := range beginBlock.Attributes { + builder.AppendEventAttributes([]cosmos.EventAttribute{ + { + Seq: blockHeight, + EventSeq: blockHeight, + Key: string(attribute.Key), + Value: string(attribute.Value), + Index: attribute.Index, + }, + }) + } + } + + for _, validatorUpdate := range ss.blockMetadata.ResponseEndBlock.ValidatorUpdates { + builder.AppendValidatorUpdates([]cosmos.ValidatorUpdate{ + { + Seq: blockHeight, + PubKey: validatorUpdate.PubKey.GetEd25519(), + Power: validatorUpdate.Power, + }, + }) + } + + for _, voteInfo := range ss.blockMetadata.RequestBeginBlock.LastCommitInfo.Votes { + builder.AppendVoteInfos([]cosmos.VoteInfo{ + { + Seq: blockHeight, + BlockSeq: blockHeight, + ValidatorAddress: sdktypes.ValAddress(voteInfo.Validator.Address).String(), + ValidatorPower: voteInfo.Validator.Power, + SignedLastBlock: voteInfo.SignedLastBlock, + }, + }) + } + + for _, misbehavior := range ss.blockMetadata.RequestBeginBlock.ByzantineValidators { + builder.AppendMisbehaviors([]cosmos.Misbehavior{ + { + Seq: blockHeight, + BlockSeq: blockHeight, + Typ: misbehavior.Type.String(), + ValidatorPower: misbehavior.Validator.Power, + ValidatorAddress: sdktypes.ValAddress(misbehavior.Validator.Address).String(), + Height: misbehavior.Height, + Time: misbehavior.Time.Unix(), + TotalVotingPower: misbehavior.TotalVotingPower, + }, + }) + } + + for txIndex, tx := range ss.blockMetadata.DeliverTxs { + txHash := bytes.HexBytes(types2.Tx(tx.Request.Tx).Hash()).String() + builder.AppendTxs([]cosmos.Transaction{ + { + Seq: blockHeight, + Tx: tx.Request.Tx, + TxHash: txHash, + TxIndex: uint32(txIndex), + Code: tx.Response.Code, + Data: tx.Response.Data, + Log: tx.Response.Log, + Info: tx.Response.Info, + GasWanted: tx.Response.GasWanted, + GasUsed: tx.Response.GasUsed, + Codespace: tx.Response.Codespace, + }, + }) + + for _, event := range tx.Response.Events { + eventCount++ + builder.AppendEvents([]cosmos.Event{ + { + Seq: blockHeight, + EventType: event.Type, + }, + }) + + for _, attribute := range event.Attributes { + builder.AppendEventAttributes([]cosmos.EventAttribute{ + { + Seq: blockHeight, + EventSeq: blockHeight, + Key: string(attribute.Key), + Value: string(attribute.Value), + Index: attribute.Index, + }, + }) + } + } + + txCount++ + } + + for _, event := range ss.blockMetadata.ResponseEndBlock.Events { + eventCount++ + builder.AppendEvents([]cosmos.Event{ + { + Seq: blockHeight, + EventType: event.Type, + }, + }) + for _, attribute := range event.Attributes { + builder.AppendEventAttributes([]cosmos.EventAttribute{ + { + Seq: blockHeight, + EventSeq: blockHeight, + Key: string(attribute.Key), + Value: string(attribute.Value), + Index: attribute.Index, + }, + }) + } + } + + builder.SetBlockData(strconv.FormatUint(blockHeight, 10), hex.EncodeToString(ss.blockMetadata.RequestBeginBlock.Hash)) + + statTxs := txCount + statEvn := eventCount + + builder.SetStatistics(uint64(1), statTxs, statEvn, callTracesCount) + + cosmosCtx := builder.Finish() + + ss.logger.Info("Mamoru Send", "height", ss.currentBlockNumber, "txs", statTxs, "events", statEvn, "callTraces", callTracesCount) + + if client := ss.sniffer.Client(); client != nil { + client.ObserveCosmosData(cosmosCtx) + } + + return nil +} diff --git a/mamoru_cosmos_sdk/streaming_test.go b/mamoru_cosmos_sdk/streaming_test.go new file mode 100644 index 0000000000..0fdcfa01fa --- /dev/null +++ b/mamoru_cosmos_sdk/streaming_test.go @@ -0,0 +1,56 @@ +package mamoru_cosmos_sdk + +import ( + sdk "github.com/cosmos/cosmos-sdk/types" + "testing" + "time" + + tmabci "github.com/tendermint/tendermint/abci/types" + "github.com/tendermint/tendermint/libs/log" + tmprototypes "github.com/tendermint/tendermint/proto/tendermint/types" + tmversion "github.com/tendermint/tendermint/proto/tendermint/version" + "gotest.tools/v3/assert" +) + +func TestListenBeginBlock(t *testing.T) { + t.Run("TestListenBeginBlock", func(t *testing.T) { + + logger := log.TestingLogger() + header := tmprototypes.Header{} + ischeck := true + ctx := sdk.NewContext(nil, header, ischeck, logger) + + ss := NewStreamingService(logger.With("module", "mamoru"), nil) + + req := tmabci.RequestBeginBlock{Header: tmprototypes.Header{ + Version: tmversion.Consensus{}, + ChainID: "", + Height: 1234, + Time: time.Time{}, + LastBlockId: tmprototypes.BlockID{}, + LastCommitHash: []byte{'a', 'b', 'c'}, + DataHash: []byte{'a', 'b', 'c'}, + ValidatorsHash: []byte{'a', 'b', 'c'}, + NextValidatorsHash: []byte{'a', 'b', 'c'}, + ConsensusHash: []byte{'a', 'b', 'c'}, + AppHash: []byte{'a', 'b', 'c'}, + LastResultsHash: []byte{'a', 'b', 'c'}, EvidenceHash: []byte{'a', 'b', 'c'}, + ProposerAddress: []byte{'a', 'b', 'c'}, + }} + res := tmabci.ResponseBeginBlock{} + + err := ss.ListenBeginBlock(ctx, req, res) + assert.NilError(t, err) + assert.Equal(t, ss.blockMetadata.RequestBeginBlock.Header.Height, req.Header.Height) + assert.Equal(t, ss.currentBlockNumber, req.Header.Height) + assert.DeepEqual(t, ss.blockMetadata.RequestBeginBlock.Header.LastCommitHash, req.Header.LastCommitHash) + assert.DeepEqual(t, ss.blockMetadata.RequestBeginBlock.Header.DataHash, req.Header.DataHash) + assert.DeepEqual(t, ss.blockMetadata.RequestBeginBlock.Header.ValidatorsHash, req.Header.ValidatorsHash) + assert.DeepEqual(t, ss.blockMetadata.RequestBeginBlock.Header.NextValidatorsHash, req.Header.NextValidatorsHash) + assert.DeepEqual(t, ss.blockMetadata.RequestBeginBlock.Header.ConsensusHash, req.Header.ConsensusHash) + assert.DeepEqual(t, ss.blockMetadata.RequestBeginBlock.Header.AppHash, req.Header.AppHash) + assert.DeepEqual(t, ss.blockMetadata.RequestBeginBlock.Header.LastResultsHash, req.Header.LastResultsHash) + assert.DeepEqual(t, ss.blockMetadata.RequestBeginBlock.Header.EvidenceHash, req.Header.EvidenceHash) + assert.DeepEqual(t, ss.blockMetadata.RequestBeginBlock.Header.ProposerAddress, req.Header.ProposerAddress) + }) +} diff --git a/mamoru_cosmos_sdk/sync_state/client.go b/mamoru_cosmos_sdk/sync_state/client.go new file mode 100644 index 0000000000..4086ca1afc --- /dev/null +++ b/mamoru_cosmos_sdk/sync_state/client.go @@ -0,0 +1,197 @@ +package sync_state + +import ( + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "os" + "os/signal" + "strconv" + "syscall" + "time" + + "github.com/tendermint/tendermint/libs/log" +) + +type JSONRPCResponse struct { + Jsonrpc string `json:"jsonrpc"` + ID int `json:"id"` + Result Result `json:"result"` +} + +type Result struct { + NodeInfo NodeInfo `json:"node_info"` + SyncInfo SyncInfo `json:"sync_info"` + ValidatorInfo ValidatorInfo `json:"validator_info"` +} + +type NodeInfo struct { + ProtocolVersion ProtocolVersion `json:"protocol_version"` + ID string `json:"id"` + ListenAddr string `json:"listen_addr"` + Network string `json:"network"` + Version string `json:"version"` + Channels string `json:"channels"` + Moniker string `json:"moniker"` + Other Other `json:"other"` +} + +type ProtocolVersion struct { + P2P string `json:"p2p"` + Block string `json:"block"` + App string `json:"app"` +} + +type Other struct { + TxIndex string `json:"tx_index"` + RPCAddress string `json:"rpc_address"` +} + +type SyncInfo struct { + LatestBlockHash string `json:"latest_block_hash"` + LatestAppHash string `json:"latest_app_hash"` + LatestBlockHeight string `json:"latest_block_height"` + LatestBlockTime time.Time `json:"latest_block_time"` + EarliestBlockHash string `json:"earliest_block_hash"` + EarliestAppHash string `json:"earliest_app_hash"` + EarliestBlockHeight string `json:"earliest_block_height"` + EarliestBlockTime time.Time `json:"earliest_block_time"` + CatchingUp bool `json:"catching_up"` +} + +type ValidatorInfo struct { + Address string `json:"address"` + PubKey PubKey `json:"pub_key"` + VotingPower string `json:"voting_power"` +} + +type PubKey struct { + Type string `json:"type"` + Value string `json:"value"` +} + +func (sync *JSONRPCResponse) GetCurrentBlockNumber() uint64 { + if sync == nil { + return 0 + } + // convert string to uint64 + blockHeight, err := strconv.ParseUint(sync.Result.SyncInfo.LatestBlockHeight, 10, 64) + if err != nil { + return 0 + } + return blockHeight +} + +func (sync *JSONRPCResponse) IsSync() bool { + if sync == nil { + return false + } + + return !sync.Result.SyncInfo.CatchingUp +} + +type Client struct { + logger log.Logger + syncData *JSONRPCResponse + Url string + PolishTimeSec uint + + quit chan struct{} + signals chan os.Signal +} + +func NewHTTPRequest(logget log.Logger, url string, polishTimeSec uint, enable bool) *Client { + c := &Client{ + logger: logget, + Url: url, + PolishTimeSec: polishTimeSec, + quit: make(chan struct{}), + signals: make(chan os.Signal, 1), + } + + if enable { + go c.loop() + } + // Register for SIGINT (Ctrl+C) and SIGTERM (kill) signals + signal.Notify(c.signals, syscall.SIGINT, syscall.SIGTERM) + + return c +} + +func (c *Client) GetSyncData() *JSONRPCResponse { + return c.syncData +} + +func (c *Client) loop() { + // wait for 2 minutes for the node to start + time.Sleep(2 * time.Minute) + ticker := time.NewTicker(time.Duration(c.PolishTimeSec) * time.Second) + defer ticker.Stop() + // Perform the first tick immediately + c.fetchSyncStatus() + + for { + select { + case <-ticker.C: + c.fetchSyncStatus() + case <-c.quit: + c.logger.Info("Mamoru SyncProcess Shutting down...") + return + case <-c.signals: + c.logger.Info("Signal received, initiating shutdown...") + c.Close() + } + } +} + +func (c *Client) fetchSyncStatus() { + // Create a context with timeout + ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) + defer cancel() + + c.logger.Info("Mamoru requesting syncData ...") + // Send the request and get the response + response, err := sendJSONRPCRequest(ctx, c.Url) + if err != nil { + c.logger.Error("Mamoru Sync", "error", err) + return + } + c.logger.Info("Mamoru Sync", "response", response != nil) + c.syncData = response +} + +func (c *Client) Close() { + close(c.quit) +} + +func sendJSONRPCRequest(ctx context.Context, url string) (*JSONRPCResponse, error) { + req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) + if err != nil { + return nil, err + } + + resp, err := http.DefaultClient.Do(req) + if err != nil { + return nil, err + } + + defer resp.Body.Close() + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("HTTP request returned status code %d", resp.StatusCode) + } + + responseBody, err := io.ReadAll(resp.Body) + if err != nil { + return nil, err + } + + var response JSONRPCResponse + err = json.Unmarshal(responseBody, &response) + if err != nil { + return nil, err + } + + return &response, nil +} diff --git a/mamoru_cosmos_sdk/sync_state/client_test.go b/mamoru_cosmos_sdk/sync_state/client_test.go new file mode 100644 index 0000000000..5aed4a57f8 --- /dev/null +++ b/mamoru_cosmos_sdk/sync_state/client_test.go @@ -0,0 +1,146 @@ +package sync_state + +import ( + "context" + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/tendermint/tendermint/libs/log" +) + +var mockSyncStatusResponse = `{ + "jsonrpc": "2.0", + "id": -1, + "result": { + "node_info": { + "protocol_version": { + "p2p": "8", + "block": "11", + "app": "0" + }, + "id": "adde11e52e960a6b204ec0fdfbfbf65049d325bf", + "listen_addr": "tcp://0.0.0.0:26655", + "network": "kava_2222-10", + "version": "0.34.27", + "channels": "40202122233038606100", + "moniker": "mamoru-kava", + "other": { + "tx_index": "on", + "rpc_address": "tcp://127.0.0.1:26658" + } + }, + "sync_info": { + "latest_block_hash": "320B1CBF4D15D5E5BA89E3D36121385874251C3BD1847C2D6CE47BFCFD4F4D09", + "latest_app_hash": "FA397677181078430BEC3E10F829111D554963A94618DDCC2B3DAD73F3FFA54D", + "latest_block_height": "9042629", + "latest_block_time": "2024-03-18T10:54:16.728631892Z", + "earliest_block_hash": "72CD24385249F6BF6F1ECD92E9B9EDA6A5DD241D74A7501EC818BCE132D32E0F", + "earliest_app_hash": "785C3EBA43E200377C68E907ADD843EC529B55A931F39C38B9035D34E1C6A1F0", + "earliest_block_height": "9037588", + "earliest_block_time": "2024-03-18T02:08:51.032328852Z", + "catching_up": false + }, + "validator_info": { + "address": "60321514488E8840E437FE6E991D76ABFB15C5A7", + "pub_key": { + "type": "tendermint/PubKeyEd25519", + "value": "BFVaUzOk0pmh4hIilFF+9A20fgtUq3o5ngyAUgcWanc=" + }, + "voting_power": "0" + } + } +}` + +func TestNewHTTPRequest(t *testing.T) { + logger := log.NewNopLogger() + client := NewHTTPRequest(logger, "http://localhost", 10, true) + assert.NotNil(t, client) +} + +func TestGetCurrentBlockNumber(t *testing.T) { + sync := &JSONRPCResponse{ + Result: Result{ + SyncInfo: SyncInfo{ + LatestBlockHeight: "100", + }, + }, + } + assert.Equal(t, uint64(100), sync.GetCurrentBlockNumber()) +} + +func TestGetCurrentBlockNumberWithInvalidHeight(t *testing.T) { + sync := &JSONRPCResponse{ + Result: Result{ + SyncInfo: SyncInfo{ + LatestBlockHeight: "invalid", + }, + }, + } + assert.Equal(t, uint64(0), sync.GetCurrentBlockNumber()) +} + +func TestIsSync(t *testing.T) { + sync := &JSONRPCResponse{ + Result: Result{ + SyncInfo: SyncInfo{ + CatchingUp: false, + }, + }, + } + assert.True(t, sync.IsSync()) +} + +func TestSendJSONRPCRequestSuccess(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { + _, _ = rw.Write([]byte(mockSyncStatusResponse)) + })) + defer server.Close() + + ctx, cancel := context.WithTimeout(context.Background(), 2*time.Second) + defer cancel() + + response, err := sendJSONRPCRequest(ctx, server.URL) + assert.NoError(t, err) + assert.NotNil(t, response) +} + +func TestSendJSONRPCRequestFailure(t *testing.T) { + ctx, cancel := context.WithTimeout(context.Background(), 2*time.Second) + defer cancel() + + _, err := sendJSONRPCRequest(ctx, "http://invalid-url") + assert.Error(t, err) +} + +func TestFetchSyncStatus(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { + _, _ = rw.Write([]byte(mockSyncStatusResponse)) + })) + defer server.Close() + + logger := log.NewNopLogger() + client := NewHTTPRequest(logger, server.URL, 10, true) + client.fetchSyncStatus() + + assert.True(t, client.GetSyncData().IsSync()) +} + +func TestFetchSyncStatusWithInvalidURL(t *testing.T) { + logger := log.NewNopLogger() + client := NewHTTPRequest(logger, "http://invalid-url", 10, true) + client.fetchSyncStatus() + + assert.Nil(t, client.GetSyncData()) +} + +func TestClose(t *testing.T) { + logger := log.NewNopLogger() + client := NewHTTPRequest(logger, "http://localhost", 10, true) + client.Close() + + _, ok := <-client.quit + assert.False(t, ok) +}