diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS deleted file mode 100644 index ea3f1419e8..0000000000 --- a/.github/CODEOWNERS +++ /dev/null @@ -1,165 +0,0 @@ -# CODEOWNERS Best Practices -# 1. Per Github docs: "Order is important; the last matching pattern takes the most precedence." -# Please define less specific codeowner paths before more specific codeowner paths in order for the more specific rule to have priority - -# Root -* @smartcontractkit/ccip - -# Chains -/common @smartcontractkit/bix-framework -/core/chains/ @smartcontractkit/bix-framework - -# Services -/core/services/directrequest @smartcontractkit/keepers -/core/services/feeds @smartcontractkit/FMS -/core/services/synchronization/telem @smartcontractkit/realtime - -# To be deprecated in Chainlink V3 -/core/services/fluxmonitorv2 @smartcontractkit/foundations -/core/services/job @smartcontractkit/ccip -/core/services/keystore @smartcontractkit/keepers -/core/services/ocr* @smartcontractkit/foundations -/core/services/periodicbackup @smartcontractkit/foundations -/core/services/pg @smartcontractkit/foundations @samsondav -/core/services/pipeline @smartcontractkit/foundations @smartcontractkit/bix-framework -/core/services/telemetry @smartcontractkit/realtime -/core/services/relay/evm/mercury @smartcontractkit/mercury-team -/core/services/webhook @smartcontractkit/foundations @smartcontractkit/bix-framework -/core/services/llo @smartcontractkit/mercury-team - -# VRF-related services -/core/services/vrf @smartcontractkit/vrf-team -/core/services/blockhashstore @smartcontractkit/vrf-team -/core/services/blockheaderfeeder @smartcontractkit/vrf-team -/core/services/pipeline/task.vrf.go @smartcontractkit/vrf-team -/core/services/pipeline/task.vrfv2.go @smartcontractkit/vrf-team -/core/services/pipeline/task.vrfv2plus.go @smartcontractkit/vrf-team - -# Keeper/Automation-related services -/core/services/keeper @smartcontractkit/keepers -/core/services/ocr2/plugins/ocr2keeper @smartcontractkit/keepers - -# Chainlink Functions -core/services/functions @smartcontractkit/functions -core/services/ocr2/plugins/functions @smartcontractkit/functions -core/services/s4 @smartcontractkit/functions -core/service/ocr2/plugins/s4 @smartcontractkit/functions -core/services/ocr2/plugins/threshold @smartcontractkit/functions -core/services/relay/evm/functions @smartcontractkit/functions -core/services/relay/evm/functions @smartcontractkit/functions -core/scripts/functions @smartcontractkit/functions -core/scripts/gateway @smartcontractkit/functions - -# Contracts -/contracts/ @RensR @matYang @RayXpub @elatoskinas - -# First we match on project names to catch files like the compilation scripts, -# gas snapshots and other files not places in the project directories. -# This could give some false positives, so afterwards we match on the project directories -# to ensure the entire directory is always owned by the correct team. - -/contracts/**/*keeper* @smartcontractkit/keepers -/contracts/**/*upkeep* @smartcontractkit/keepers -/contracts/**/*automation* @smartcontractkit/keepers -/contracts/gas-snapshots/automation.gas-snapshot @smartcontractkit/keepers -/contracts/**/ccip/ @smartcontractkit/ccip-onchain @makramkd -/contracts/**/*Functions* @smartcontractkit/functions - -/contracts/src/v0.8/functions @smartcontractkit/functions -/contracts/**/*functions* @smartcontractkit/functions -/contracts/**/*llo-feeds* @smartcontrackit/mercury-team -/contracts/**/*vrf* @smartcontractkit/vrf-team -/contracts/**/*l2ep* @smartcontractkit/bix-ship -# TODO: replace with a team tag when ready -/contracts/**/*keystone* @archseer @bolekk @patrick-dowell - -/contracts/src/v0.8/automation @smartcontractkit/keepers -/contracts/src/v0.8/functions @smartcontractkit/functions -# TODO: interfaces folder, folder should be removed and files moved to the correct folders -/contracts/src/v0.8/l2ep @chris-de-leon-cll -/contracts/src/v0.8/llo-feeds @smartcontractkit/mercury-team -# TODO: mocks folder, folder should be removed and files moved to the correct folders -/contracts/src/v0.8/operatorforwarder @smartcontractkit/data-feeds-engineers -/contracts/src/v0.8/shared @RensR @matYang @RayXpub @elatoskinas -# TODO: tests folder, folder should be removed and files moved to the correct folders -# TODO: transmission folder, owner should be found -/contracts/src/v0.8/vrf @smartcontractkit/vrf-team - - -# At the end, match any files missed by the patterns above -/contracts/scripts/native_solc_compile_all_events_mock @smartcontractkit/functions -# Remove changeset files from the codeowners -/contracts/.changeset - - -# Tests -/integration-tests/**/*ccip* @smartcontractkit/test-tooling-team @jasonmci @smartcontractkit/ccip -/integration-tests/**/*keeper* @smartcontractkit/keepers -/integration-tests/**/*automation* @smartcontractkit/keepers -/integration-tests/**/*lm_* @smartcontractkit/liquidity-manager - -# CI/CD -/.github/** @smartcontractkit/releng @smartcontractkit/test-tooling-team @jasonmci @smartcontractkit/ccip -/.github/workflows/integration-tests.yml @smartcontractkit/test-tooling-team @jasonmci -/.github/workflows/**-tests.yml @smartcontractkit/test-tooling-team @jasonmci -/.github/workflows/integration-chaos-tests.yml @smartcontractkit/test-tooling-team @jasonmci -/.github/workflows/integration-tests-publish.yml @smartcontractkit/test-tooling-team @jasonmci -/.github/workflows/performance-tests.yml @smartcontractkit/test-tooling-team @jasonmci - -/.github/workflows/automation-ondemand-tests.yml @smartcontractkit/keepers -/.github/workflows/automation-benchmark-tests.yml @smartcontractkit/keepers -/.github/workflows/automation-load-tests.yml @smartcontractkit/keepers -/.github/workflows/automation-nightly-tests.yml @smartcontractkit/keepers - -/core/chainlink.Dockerfile @smartcontractkit/prodsec-public - -# Dependencies -contracts/scripts/requirements.txt @smartcontractkit/prodsec-public -.tool-versions @smartcontractkit/prodsec-public -.nvmrc @smartcontractkit/prodsec-public -contracts/package.json @smartcontractkit/prodsec-public -contracts/pnpm.lock @smartcontractkit/prodsec-public -go.mod @smartcontractkit/prodsec-public @smartcontractkit/releng @smartcontractkit/foundations -go.sum @smartcontractkit/prodsec-public @smartcontractkit/releng @smartcontractkit/foundations -integration-tests/go.mod @smartcontractkit/prodsec-public -integration-tests/go.sum @smartcontractkit/prodsec-public -flake.nix @smartcontractkit/prodsec-public -flake.lock @smartcontractkit/prodsec-public - -# Config -./core/config @samsondav @jmank88 - -# LOOP Plugins -/plugins @jmank88 @krehermann - -# Config -./docs/CONFIG.md @smartcontractkit/foundations @smartcontractkit/devrel -./internal/config/docs.toml @smartcontractkit/foundations @smartcontractkit/devrel - - -# CCIP override -/core/ @smartcontractkit/ccip -/core/scripts/ccip/manual-execution @smartcontractkit/ccip-offchain -/contracts/ @smartcontractkit/ccip-onchain @makramkd @elatoskinas @RayXpub -go.mod @smartcontractkit/ccip @smartcontractkit/prodsec-public @smartcontractkit/releng @smartcontractkit/foundations -go.sum @smartcontractkit/ccip @smartcontractkit/prodsec-public @smartcontractkit/releng @smartcontractkit/foundations -integration-tests/go.mod @smartcontractkit/ccip @smartcontractkit/prodsec-public -integration-tests/go.sum @smartcontractkit/ccip @smartcontractkit/prodsec-public - -# leave snapshots & changeset as ownerless -/contracts/gas-snapshots/ -/contracts/.changeset/ - -# CCIP LM -/core/**/liquiditymanager/ @smartcontractkit/liquidity-manager -/core/services/relay/evm/liquidity_manager.go @smartcontractkit/liquidity-manager -/contracts/**/liquiditymanager/ @smartcontractkit/liquidity-manager - -# CCIP RMN -/contracts/src/v0.8/ccip/RMN.sol @smartcontractkit/rmn -/contracts/src/v0.8/ccip/ARMProxy.sol @smartcontractkit/rmn -/contracts/src/v0.8/ccip/interfaces/IRMN.sol @smartcontractkit/rmn -/contracts/src/v0.8/ccip/test/arm @smartcontractkit/rmn - -# CCIP Capabilities -/core/capabilities/ccip @smartcontractkit/ccip-offchain diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml deleted file mode 100644 index c934b6b945..0000000000 --- a/.github/ISSUE_TEMPLATE/config.yml +++ /dev/null @@ -1,5 +0,0 @@ -blank_issues_enabled: true -contact_links: - - name: Question - url: https://stackoverflow.com/questions/tagged/chainlink - about: Please ask and answer questions here. diff --git a/.github/ISSUE_TEMPLATE/development.md b/.github/ISSUE_TEMPLATE/development.md deleted file mode 100644 index 45baaccf24..0000000000 --- a/.github/ISSUE_TEMPLATE/development.md +++ /dev/null @@ -1,22 +0,0 @@ ---- -name: Development Issue -about: Report an issue encountered while working on code found in this repository. -title: '[DEVEL] ' -labels: 'Development' -assignees: '' ---- - -**Description** -[replace this line with a clear and concise description of the development issue you are experiencing] - -**Your Environment** -[replace this line with basic information about your environment, such as your operating system and the versions of any relevant tools you are using (e.g. Go, Docker)] - -**Basic Information** -[replace this line with basic information about the issue you are experiencing, including but not limited to the names of the files you are working with and any relevant error messages] - -**Steps to Reproduce** -[replace this line with detailed steps to reproduce the issue you are experiencing] - -**Additional Information** -[replace this line with any additional information you would like to provide, such as screenshots illustrating the issue] diff --git a/.github/ISSUE_TEMPLATE/faucet.md b/.github/ISSUE_TEMPLATE/faucet.md deleted file mode 100644 index 47d82b0148..0000000000 --- a/.github/ISSUE_TEMPLATE/faucet.md +++ /dev/null @@ -1,19 +0,0 @@ ---- -name: Faucet Issue -about: Report an issue with a Chainlink LINK Faucet. -title: '[FAUC] ' -labels: 'Faucet' -assignees: '' ---- - -**Description** -[replace this line with a clear and concise description of the Chainlink LINK Faucet issue you are experiencing] - -**Basic Information** -[replace this line with basic information about the issue you are experiencing, including but not limited to your testnet address, the name and version of your web browser and wallet, and the link to the faucet transaction on Etherscan] - -**Steps to Reproduce** -[replace this line with detailed steps to reproduce the issue you are experiencing] - -**Additional Information** -[replace this line with any additional information you would like to provide, such as screenshots illustrating the issue] diff --git a/.github/ISSUE_TEMPLATE/feature-request.md b/.github/ISSUE_TEMPLATE/feature-request.md deleted file mode 100644 index 0535501239..0000000000 --- a/.github/ISSUE_TEMPLATE/feature-request.md +++ /dev/null @@ -1,19 +0,0 @@ ---- -name: Feature Request -about: Request a feature. Help us make Chainlink better! -title: '[FEAT] ' -labels: 'Feature Request' -assignees: '' ---- - -**Description** -[replace this line with a clear and concise description of the feature you are requesting] - -**Motivation** -[replace this line with a clear and concise explanation of _why_ you are requesting this feature] - -**Justification** -[replace this line with a clear and concise explanation of _why_ the feature you are requesting is the best way to approach this issue and list other approaches you considered] - -**Additional Information** -[replace this line with any additional information you would like to provide, such as examples or screenshots of similar features] diff --git a/.github/ISSUE_TEMPLATE/node-operator.md b/.github/ISSUE_TEMPLATE/node-operator.md deleted file mode 100644 index 5857679e5e..0000000000 --- a/.github/ISSUE_TEMPLATE/node-operator.md +++ /dev/null @@ -1,30 +0,0 @@ ---- -name: Node Operator Issue -about: Report an issue encountered while operating a Chainlink node. -title: '[NODE] ' -labels: 'Node Operator' -assignees: '' ---- - -**Description** -[replace this line with a clear and concise description of the issue you are experiencing] - -**Basic Information** -[replace this line with basic information about the issue you are experiencing, including but not limited to all relevant logs and any other relevant information, such as if you are using a Docker container to run the node, job specification, oracle contract address, transaction IDs, etc.] - -- Network: [e.g. Ethereum Mainnet, Ropsten] -- Blockchain Client: [name and version of blockchain client e.g. Geth v1.9.6] -- Go Version: [e.g. v1.12] -- Operating System: [name and version of operating system running Chainlink node] -- Commit: [log INFO line when starting node] -- Hosting Provider: [e.g. AWS, GCP, self-hosted] -- Startup Command: [e.g. `docker run smartcontract/chainlink local n`] - -**Environment Variables** -[replace this line with the output of the environment variables when running the node in debug mode] - -**Steps to Reproduce** -[replace this line with detailed steps to reproduce the issue you are experiencing] - -**Additional Information** -[replace this line with any additional information you would like to provide, such as screenshots illustrating the issue] diff --git a/.github/ISSUE_TEMPLATE/smart-contract.md b/.github/ISSUE_TEMPLATE/smart-contract.md deleted file mode 100644 index e4b9b97bf7..0000000000 --- a/.github/ISSUE_TEMPLATE/smart-contract.md +++ /dev/null @@ -1,19 +0,0 @@ ---- -name: Smart Contract Issue -about: Report an issue with smart contracts found in this repository. -title: '[SMRT] ' -labels: 'Smart Contract' -assignees: '' ---- - -**Description** -[replace this line with a clear and concise description of the smart contract issue you are experiencing] - -**Basic Information** -[replace this line with basic information about the issue you are experiencing, including but not limited to the names of the smart contract files and the version of the Chainlink software repository in which they are found, contract addresses, transaction IDs, etc.] - -**Steps to Reproduce** -[replace this line with detailed steps to reproduce the issue you are experiencing] - -**Additional Information** -[replace this line with any additional information you would like to provide, such as screenshots illustrating the issue] diff --git a/.github/actions/build-chainlink-image/action.yml b/.github/actions/build-chainlink-image/action.yml deleted file mode 100644 index cfbf0acfff..0000000000 --- a/.github/actions/build-chainlink-image/action.yml +++ /dev/null @@ -1,54 +0,0 @@ -name: Build Chainlink Image -description: A composite action that allows building and publishing the Chainlink image for integration testing - -inputs: - tag_suffix: - description: The suffix to append to the image tag (usually blank or "-plugins") - default: "" - dockerfile: - description: The path to the Dockerfile to use (usually core/chainlink.Dockerfile or plugins/chainlink.Dockerfile) - default: core/chainlink.Dockerfile - git_commit_sha: - description: The git commit sha to use for the image tag - default: ${{ github.sha }} - AWS_REGION: - description: "AWS region to use for ECR" - AWS_ROLE_TO_ASSUME: - description: "AWS role to assume for ECR" - dep_evm_sha: - description: The chainlink-evm commit sha to use in go deps - required: false - check_image_exists: - description: "Check if the image exists in ECR before building" - required: false - default: 'false' - -runs: - using: composite - steps: - - name: Check if image exists - if: ${{ inputs.dep_evm_sha != '' || inputs.check_image_exists == 'true'}} - id: check-image - uses: smartcontractkit/chainlink-github-actions/docker/image-exists@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - repository: chainlink - tag: ${{ inputs.git_commit_sha }}${{ inputs.tag_suffix }} - AWS_REGION: ${{ inputs.AWS_REGION }} - AWS_ROLE_TO_ASSUME: ${{ inputs.AWS_ROLE_TO_ASSUME }} - - name: Build Image - if: steps.check-image.outputs.exists != 'true' - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/build-image@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - cl_repo: smartcontractkit/ccip - cl_ref: ${{ inputs.git_commit_sha }} - cl_dockerfile: ${{ inputs.dockerfile }} - push_tag: ${{ env.CHAINLINK_IMAGE }}:${{ inputs.git_commit_sha }}${{ inputs.tag_suffix }} - QA_AWS_REGION: ${{ inputs.AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ inputs.AWS_ROLE_TO_ASSUME }} - GO_COVER_FLAG: true - dep_evm_sha: ${{ inputs.dep_evm_sha }} - - name: Print Chainlink Image Built - shell: sh - run: | - echo "### Chainlink node image tag used for this test run :link:" >>$GITHUB_STEP_SUMMARY - echo "\`${GITHUB_SHA}\`" >>$GITHUB_STEP_SUMMARY diff --git a/.github/actions/build-sign-publish-chainlink/action.yml b/.github/actions/build-sign-publish-chainlink/action.yml deleted file mode 100644 index 67ca3068ef..0000000000 --- a/.github/actions/build-sign-publish-chainlink/action.yml +++ /dev/null @@ -1,312 +0,0 @@ -name: Build and Publish Chainlink - -description: A composite action that allows building and publishing signed chainlink images. - -inputs: - # Inputs for publishing - publish: - description: When set to the string boolean value of "true", the resulting built image will be published - default: "false" - required: false - - dockerfile: - description: Path to the Dockerfile (relative to the repo root) - default: core/chainlink.Dockerfile - required: false - dockerhub_username: - description: Username for Docker Hub to avoid rate limits when pulling public images - required: false - dockerhub_password: - description: Password for Docker Hub to avoid rate limits when pulling public images - required: false - ecr-hostname: - description: The ECR registry scope - default: public.ecr.aws - required: false - ecr-image-name: - description: | - The image name with path, in the format of `[registry]/repository`. For private ECR repos the registry name is optional, where for public repos, it is required. - Eg. Public ECR repo `chainlink` and registry alias `chainlinklabs` should be `chainlinklabs/chainlink`. For a private ECR repo `chainlink` the image name should be `chainlink` - default: chainlink/chainlink - required: false - ecr-tag-suffix: - description: Docker image tag suffix - required: false - git-commit-sha: - description: Git commit SHA used as metadata when building the application (appears in logs) - default: ${{ github.event.pull_request.head.sha || github.sha }} - required: false - aws-role-to-assume: - description: The AWS role to assume as the CD user, if any. Used in configuring the docker/login-action - required: false - aws-role-duration-seconds: - description: The duration of the role assumed - required: false - aws-region: - description: The AWS region the ECR repository is located in, should only be needed for public ECR repositories, used in configuring docker/login-action - required: false - - # Inputs for signing - sign-images: - description: When set to the string boolean value of "true", the resulting build image will be signed - default: "false" - required: false - cosign-private-key: - description: The private key to be used with cosign to sign the image - required: false - cosign-public-key: - description: The public key to be used with cosign for verification - required: false - cosign-password: - description: The password to decrypt the cosign private key needed to sign the image - required: false - sign-method: - description: Build image will be signed using keypair or keyless methods - default: "keypair" - required: true - verify-signature: - description: When set to the string boolean value of "true", the resulting build image signature will be verified - default: "false" - required: false -outputs: - docker-image-tag: - description: The docker image tag that was built and pushed - value: ${{ steps.save-non-root-image-name-env.outputs.image-tag }} - docker-image-digest: - description: The docker image digest that was built and pushed - value: ${{ steps.save-non-root-image-name-env.outputs.image-digest }} - -runs: - using: composite - steps: - - name: Set shared variables - shell: bash - # See https://docs.github.com/en/actions/learn-github-actions/workflow-commands-for-github-actions#multiline-strings - run: | - SHARED_IMAGES=${{ inputs.ecr-hostname }}/${{ inputs.ecr-image-name }} - - SHARED_TAG_LIST=$(cat << EOF - type=ref,event=branch,suffix=${{ inputs.ecr-tag-suffix }} - type=semver,pattern={{version}},suffix=${{ inputs.ecr-tag-suffix }} - type=sha,format=short,suffix=${{ inputs.ecr-tag-suffix }} - EOF - ) - - SHARED_BUILD_ARGS=$(cat << EOF - COMMIT_SHA=${{ inputs.git-commit-sha }} - EOF - ) - - echo "shared-images<> $GITHUB_ENV - echo "$SHARED_IMAGES" >> $GITHUB_ENV - echo "EOF" >> $GITHUB_ENV - - echo "shared-tag-list<> $GITHUB_ENV - echo "$SHARED_TAG_LIST" >> $GITHUB_ENV - echo "EOF" >> $GITHUB_ENV - - echo "shared-build-args<> $GITHUB_ENV - echo "$SHARED_BUILD_ARGS" >> $GITHUB_ENV - echo "EOF" >> $GITHUB_ENV - - - if: inputs.publish == 'true' - # Log in to AWS for publish to ECR - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@e3dd6a429d7300a6a4c196c26e071d42e0343502 # v4.0.2 - with: - role-to-assume: ${{ inputs.aws-role-to-assume }} - role-duration-seconds: ${{ inputs.aws-role-duration-seconds }} - aws-region: ${{ inputs.aws-region }} - mask-aws-account-id: true - role-session-name: build-sign-publish-chainlink - - - if: inputs.publish == 'true' - name: Login to ECR - uses: docker/login-action@e92390c5fb421da1463c202d546fed0ec5c39f20 # v3.1.0 - with: - registry: ${{ inputs.ecr-hostname }} - - - name: Setup Docker Buildx - uses: docker/setup-buildx-action@d70bba72b1f3fd22344832f00baa16ece964efeb # v3.3.0 - - - name: Generate docker metadata for root image - id: meta-root - uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81 # v5.5.1 - env: - DOCKER_METADATA_PR_HEAD_SHA: "true" - with: - # list of Docker images to use as base name for tags - images: ${{ env.shared-images }} - # XXX: DO NOT USE SHARED TAGS HERE - tags: | - type=ref,event=branch,suffix=${{ inputs.ecr-tag-suffix }}-root - type=semver,pattern={{version}},suffix=${{ inputs.ecr-tag-suffix }}-root - type=sha,format=short,suffix=${{ inputs.ecr-tag-suffix }}-root - - # To avoid rate limiting from Docker Hub, we login with a paid user account. - - name: Login to Docker Hub - if: inputs.dockerhub_username && inputs.dockerhub_password - uses: docker/login-action@e92390c5fb421da1463c202d546fed0ec5c39f20 # v3.1.0 - with: - username: ${{ inputs.dockerhub_username }} - password: ${{ inputs.dockerhub_password }} - - - name: Build and push root docker image - id: buildpush-root - uses: docker/build-push-action@2cdde995de11925a030ce8070c3d77a52ffcf1c0 # v5.3.0 - with: - push: ${{ inputs.publish }} - context: . - load: ${{ contains(inputs.publish, false) }} - tags: ${{ steps.meta-root.outputs.tags }} - labels: ${{ steps.meta-root.outputs.labels }} - file: ${{ inputs.dockerfile }} - build-args: | - CHAINLINK_USER=root - ${{ env.shared-build-args }} - - - name: Save root image name in GITHUB_ENV - id: save-root-image-name-env - shell: sh - run: | - IMAGES_NAME_RAW=${{ fromJSON(steps.buildpush-root.outputs.metadata)['image.name'] }} - IMAGE_NAME=$(echo "$IMAGES_NAME_RAW" | cut -d"," -f1) - echo "root_image_name=${IMAGE_NAME}" >> $GITHUB_ENV - - - name: Generate docker metadata for non-root image - id: meta-nonroot - uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81 # v5.5.1 - env: - DOCKER_METADATA_PR_HEAD_SHA: "true" - with: - flavor: | - latest=auto - prefix= - suffix= - images: ${{ env.shared-images }} - tags: ${{ env.shared-tag-list }} - - # To avoid rate limiting from Docker Hub, we login with a paid user account. - - name: Login to Docker Hub - if: inputs.dockerhub_username && inputs.dockerhub_password - uses: docker/login-action@e92390c5fb421da1463c202d546fed0ec5c39f20 # v3.1.0 - with: - username: ${{ inputs.dockerhub_username }} - password: ${{ inputs.dockerhub_password }} - - - name: Build and push non-root docker image - id: buildpush-nonroot - uses: docker/build-push-action@2cdde995de11925a030ce8070c3d77a52ffcf1c0 # v5.3.0 - with: - push: ${{ inputs.publish }} - context: . - load: ${{ contains(inputs.publish, false) }} - tags: ${{ steps.meta-nonroot.outputs.tags }} - labels: ${{ steps.meta-nonroot.outputs.labels }} - file: ${{ inputs.dockerfile }} - build-args: | - CHAINLINK_USER=chainlink - ${{ env.shared-build-args }} - - - name: Save non-root image name in GITHUB_ENV and GITHUB_STEP_SUMMARY - id: save-non-root-image-name-env - shell: sh - run: | - IMAGES_NAME_RAW=${{ fromJSON(steps.buildpush-nonroot.outputs.metadata)['image.name'] }} - IMAGE_DIGEST=${{ fromJSON(steps.buildpush-nonroot.outputs.metadata)['containerimage.digest'] }} - IMAGE_NAME=$(echo "$IMAGES_NAME_RAW" | cut -d"," -f1) - IMAGE_TAG=$(echo "$IMAGES_NAME_RAW" | cut -d":" -f2) - echo "nonroot_image_name=${IMAGE_NAME}" >> $GITHUB_ENV - echo '### Docker Image' >> $GITHUB_STEP_SUMMARY - echo "Image Name: ${IMAGE_NAME}" >> $GITHUB_STEP_SUMMARY - echo "Image Digest: ${IMAGE_DIGEST}" >> $GITHUB_STEP_SUMMARY - echo "image-tag=${IMAGE_TAG}" >> $GITHUB_OUTPUT - echo "image-digest=${IMAGE_DIGEST}" >> $GITHUB_OUTPUT - - - name: Check if non-root image runs as root - id: check-nonroot-runs-root - shell: sh - env: - PUBLISH: ${{ inputs.publish }} - run: | - echo "Fail build if non-root image runs as user: root" - # if we're publishing the image, it doesn't get loaded into the local docker daemon - # so we need to pull the image into our daemon - if [ $PUBLISH = "true" ]; then - docker pull "${nonroot_image_name}" - fi - docker inspect "${nonroot_image_name}" | jq -r '.[].Config.User' | ( ! grep "root" ) - - - if: inputs.sign-images == 'true' - name: Install cosign - uses: sigstore/cosign-installer@e1523de7571e31dbe865fd2e80c5c7c23ae71eb4 # v3.4.0 - with: - cosign-release: "v1.6.0" - - - if: inputs.sign-images == 'true' && inputs.sign-method == 'keypair' - name: Sign the published root Docker image using keypair method - shell: sh - env: - COSIGN_PASSWORD: "${{ inputs.cosign-password }}" - run: | - echo "${{ inputs.cosign-private-key }}" > cosign.key - cosign sign --key cosign.key "${{ env.root_image_name }}" - rm -f cosign.key - - - if: inputs.verify-signature == 'true' && inputs.sign-method == 'keypair' - name: Verify the signature of the published root Docker image using keypair - shell: sh - run: | - echo "${{ inputs.cosign-public-key }}" > cosign.key - cosign verify --key cosign.key "${{ env.root_image_name }}" - rm -f cosign.key - - - if: inputs.sign-images == 'true' && inputs.sign-method == 'keyless' - name: Sign the published root Docker image using keyless method - shell: sh - env: - COSIGN_EXPERIMENTAL: 1 - run: | - cosign sign "${{ env.root_image_name }}" - - - if: inputs.verify-signature == 'true' && inputs.sign-method == 'keyless' - name: Verify the signature of the published root Docker image using keyless - shell: sh - env: - COSIGN_EXPERIMENTAL: 1 - run: | - cosign verify "${{ env.root_image_name }}" - - - if: inputs.sign-images == 'true' && inputs.sign-method == 'keypair' - name: Sign the published non-root Docker image using keypair method - shell: sh - env: - COSIGN_PASSWORD: "${{ inputs.cosign-password }}" - run: | - echo "${{ inputs.cosign-private-key }}" > cosign.key - cosign sign --key cosign.key "${{ env.nonroot_image_name }}" - rm -f cosign.key - - - if: inputs.verify-signature == 'true' && inputs.sign-method == 'keypair' - name: Verify the signature of the published non-root Docker image using keypair - shell: sh - run: | - echo "${{ inputs.cosign-public-key }}" > cosign.key - cosign verify --key cosign.key "${{ env.nonroot_image_name }}" - rm -f cosign.key - - - if: inputs.sign-images == 'true' && inputs.sign-method == 'keyless' - name: Sign the published non-root Docker image using keyless method - shell: sh - env: - COSIGN_EXPERIMENTAL: 1 - run: | - cosign sign "${{ env.nonroot_image_name }}" - - - if: inputs.verify-signature == 'true' && inputs.sign-method == 'keyless' - name: Verify the signature of the published non-root Docker image using keyless - shell: sh - env: - COSIGN_EXPERIMENTAL: 1 - run: | - cosign verify "${{ env.nonroot_image_name }}" diff --git a/.github/actions/build-test-image/action.yml b/.github/actions/build-test-image/action.yml deleted file mode 100644 index b719e296f8..0000000000 --- a/.github/actions/build-test-image/action.yml +++ /dev/null @@ -1,123 +0,0 @@ -name: Build Test Image -description: A composite action that allows building and publishing the test remote runner image - -inputs: - repository: - description: The docker repository for the image - default: chainlink-ccip-tests - required: false - tag: - description: The tag to use by default and to use for checking image existance - default: ${{ github.sha }} - required: false - other_tags: - description: Other tags to push if needed - required: false - suites: - description: The test suites to build into the image - default: chaos migration reorg smoke soak benchmark load ccip-tests/load ccip-tests/smoke ccip-tests/chaos - required: false - QA_AWS_ROLE_TO_ASSUME: - description: The AWS role to assume as the CD user, if any. Used in configuring the docker/login-action - required: true - QA_AWS_REGION: - description: The AWS region the ECR repository is located in, should only be needed for public ECR repositories, used in configuring docker/login-action - required: true - QA_AWS_ACCOUNT_NUMBER: - description: The AWS region the ECR repository is located in, should only be needed for public ECR repositories, used in configuring docker/login-action - required: true - -runs: - using: composite - steps: - - # Base Test Image Logic - - name: Get CTF Version - id: version - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/mod-version@fc3e0df622521019f50d772726d6bf8dc919dd38 # v2.3.19 - with: - go-project-path: ./integration-tests - module-name: github.com/smartcontractkit/chainlink-testing-framework/lib - enforce-semantic-tag: false - - name: Get CTF sha - if: steps.version.outputs.is_semantic == 'false' - id: short_sha - env: - VERSION: ${{ steps.version.outputs.version }} - shell: bash - run: | - short_sha="${VERSION##*-}" - echo "short sha is: ${short_sha}" - echo "short_sha=${short_sha}" >> "$GITHUB_OUTPUT" - - name: Checkout chainlink-testing-framework - if: steps.version.outputs.is_semantic == 'false' - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - repository: smartcontractkit/chainlink-testing-framework - ref: main - fetch-depth: 0 - path: ctf - - name: Get long sha - if: steps.version.outputs.is_semantic == 'false' - id: long_sha - env: - SHORT_SHA: ${{ steps.short_sha.outputs.short_sha }} - shell: bash - run: | - cd ctf - long_sha=$(git rev-parse ${SHORT_SHA}) - echo "sha is: ${long_sha}" - echo "long_sha=${long_sha}" >> "$GITHUB_OUTPUT" - - name: Check if test base image exists - if: steps.version.outputs.is_semantic == 'false' - id: check-base-image - uses: smartcontractkit/chainlink-github-actions/docker/image-exists@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - repository: test-base-image - tag: ${{ steps.long_sha.outputs.long_sha }} - AWS_REGION: ${{ inputs.QA_AWS_REGION }} - AWS_ROLE_TO_ASSUME: ${{ inputs.QA_AWS_ROLE_TO_ASSUME }} - - name: Build Base Image - if: steps.version.outputs.is_semantic == 'false' && steps.check-base-image.outputs.exists == 'false' - uses: smartcontractkit/chainlink-github-actions/docker/build-push@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - env: - BASE_IMAGE_NAME: ${{ inputs.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ inputs.QA_AWS_REGION }}.amazonaws.com/test-base-image:${{ steps.long_sha.outputs.long_sha }} - with: - tags: ${{ env.BASE_IMAGE_NAME }} - file: ctf/k8s/Dockerfile.base - AWS_REGION: ${{ inputs.QA_AWS_REGION }} - AWS_ROLE_TO_ASSUME: ${{ inputs.QA_AWS_ROLE_TO_ASSUME }} - # End Base Image Logic - - # Test Runner Logic - - name: Check if image exists - id: check-image - uses: smartcontractkit/chainlink-github-actions/docker/image-exists@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - repository: ${{ inputs.repository }} - tag: ${{ inputs.tag }} - AWS_REGION: ${{ inputs.QA_AWS_REGION }} - AWS_ROLE_TO_ASSUME: ${{ inputs.QA_AWS_ROLE_TO_ASSUME }} - - name: Build and Publish Test Runner - if: steps.check-image.outputs.exists == 'false' - uses: smartcontractkit/chainlink-github-actions/docker/build-push@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - tags: | - ${{ inputs.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ inputs.QA_AWS_REGION }}.amazonaws.com/${{ inputs.repository }}:${{ inputs.tag }} - ${{ inputs.other_tags }} - file: ./integration-tests/test.Dockerfile - build-args: | - BASE_IMAGE=${{ inputs.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ inputs.QA_AWS_REGION }}.amazonaws.com/test-base-image - IMAGE_VERSION=${{ steps.long_sha.outputs.long_sha || steps.version.outputs.version }} - SUITES="${{ inputs.suites }}" - AWS_REGION: ${{ inputs.QA_AWS_REGION }} - AWS_ROLE_TO_ASSUME: ${{ inputs.QA_AWS_ROLE_TO_ASSUME }} - - name: Print Image Built - shell: sh - env: - INPUTS_REPOSITORY: ${{ inputs.repository }} - INPUTS_TAG: ${{ inputs.tag }} - run: | - echo "### ${INPUTS_REPOSITORY} image tag for this test run :ship:" >>$GITHUB_STEP_SUMMARY - echo "\`${INPUTS_TAG}\`" >>$GITHUB_STEP_SUMMARY - # End Test Runner Logic diff --git a/.github/actions/delete-deployments/action.yml b/.github/actions/delete-deployments/action.yml deleted file mode 100644 index eaf7e0f61b..0000000000 --- a/.github/actions/delete-deployments/action.yml +++ /dev/null @@ -1,59 +0,0 @@ -name: Delete Deployments -description: Delete deployments by env and ref -inputs: - environment: - required: true - description: The Github environment to filter deployments by - ref: - required: true - description: The ref to filter deployments by - dry-run: - required: false - description: Whether to actually delete deployments or not - github-token: - description: "The Github token to use for authentication" - required: true - default: ${{ github.token }} - num-of-pages: - required: false - description: The number of pages (of 100 per page) to fetch deployments from, set to 'all' to fetch all deployments - default: "all" - starting-page: - required: false - description: The page to start fetching deployments from, only valid if num-of-pages is set to a number - repository: - required: false - description: The owner and repository name to delete deployments from, defaults to the current repository, ex. 'smartcontractkit/chainlink' - default: ${{ github.repository }} - -runs: - using: composite - steps: - - uses: pnpm/action-setup@a3252b78c470c02df07e9d59298aecedc3ccdd6d # v3.0.0 - with: - version: ^9.0.0 - - - uses: actions/setup-node@60edb5dd545a775178f52524783378180af0d1f8 # v4.0.2 - with: - node-version: "20" - cache: "pnpm" - cache-dependency-path: "./.github/actions/delete-deployments/pnpm-lock.yaml" - - - name: Install dependencies - shell: bash - run: pnpm i --prod - working-directory: "./.github/actions/delete-deployments" - - - name: Run deployment deleter - shell: bash - run: pnpm start - env: - NUM_OF_PAGES: ${{ inputs.num-of-pages }} - STARTING_PAGE: ${{ inputs.starting-page }} - GITHUB_TOKEN: ${{ inputs.github-token }} - ENVIRONMENT: ${{ inputs.environment }} - REF: ${{ inputs.ref }} - DRY_RUN: ${{ inputs.dry-run }} - OWNER: ${{ inputs.owner }} - REPOSITORY: ${{ inputs.repository }} - working-directory: "./.github/actions/delete-deployments" diff --git a/.github/actions/delete-deployments/index.ts b/.github/actions/delete-deployments/index.ts deleted file mode 100644 index e38f1957d2..0000000000 --- a/.github/actions/delete-deployments/index.ts +++ /dev/null @@ -1,232 +0,0 @@ -import { Octokit } from "@octokit/action"; -import { info, warning, isDebug } from "@actions/core"; -import { throttling } from "@octokit/plugin-throttling"; -import { retry } from "@octokit/plugin-retry"; - -async function main() { - const { - dryRun, - environment, - numOfPages, - owner, - ref, - repo, - debug, - startingPage, - } = getInputs(); - const octokit = getOctokit(debug); - - const deployments = await getDeployments({ - octokit, - owner, - repo, - environment, - ref, - paginateOptions: { - numOfPages, - startingPage, - }, - }); - const deploymentIds = deployments.map((d) => d.id); - if (dryRun) { - info(`Dry run: would delete deployments (${deploymentIds.length})`); - return; - } - - info(`Deleting deployments (${deploymentIds.length})`); - const deleteDeployments = deploymentIds.map(async (id) => { - const sharedArgs = { - owner, - repo, - deployment_id: id, - request: { - retries: 0, - }, - }; - - const setStatus = await octokit.repos - .createDeploymentStatus({ - ...sharedArgs, - state: "inactive", - }) - .then(() => true) - .catch((e) => { - warning( - `Marking deployment id ${id} to "inactive" failed: ${e.message}` - ); - return false; - }); - if (!setStatus) return false; - - return octokit.repos - .deleteDeployment({ - ...sharedArgs, - }) - .then(() => true) - .catch((e) => { - warning(`Deleting deployment id ${id} failed: ${e.message}`); - return false; - }); - }); - - const processed = await Promise.all(deleteDeployments); - const succeeded = processed.filter((p) => !!p); - info( - `Successfully deleted ${succeeded.length}/${processed.length} deployments` - ); -} -main(); - -function getInputs() { - const debug = !!(process.env.DEBUG || isDebug()); - - const dryRun = process.env.DRY_RUN === "true"; - - const environment = process.env.ENVIRONMENT; - if (!environment) throw new Error("ENVIRONMENT not set"); - - const ref = process.env.REF; - - const repository = process.env.REPOSITORY; - if (!repository) throw new Error("REPOSITORY not set"); - const [owner, repo] = repository.split("/"); - - const rawStartingPage = process.env.STARTING_PAGE; - - let startingPage: number | undefined; - if (rawStartingPage) { - startingPage = parseInt(rawStartingPage); - if (isNaN(startingPage)) { - throw new Error(`STARTING_PAGE is not a number: ${rawStartingPage}`); - } - if (startingPage < 0) { - throw new Error( - `STARTING_PAGE must be a positive integer or zero: ${rawStartingPage}` - ); - } - info(`Starting from page ${startingPage}`); - } - - const rawNumOfPages = process.env.NUM_OF_PAGES; - let numOfPages: "all" | number = "all"; - if (rawNumOfPages === "all") { - info("Fetching all pages of deployments"); - } else { - const parsedPages = parseInt(rawNumOfPages || ""); - if (isNaN(parsedPages)) { - throw new Error(`NUM_OF_PAGES is not a number: ${rawNumOfPages}`); - } - if (parsedPages < 1) { - throw new Error(`NUM_OF_PAGES must be greater than 0: ${rawNumOfPages}`); - } - numOfPages = parsedPages; - } - - if (numOfPages === "all" && startingPage) { - throw new Error(`Cannot use STARTING_PAGE with NUM_OF_PAGES=all`); - } - - const parsedInputs = { - environment, - ref, - owner, - repo, - numOfPages, - startingPage, - dryRun, - debug, - }; - info(`Configuration: ${JSON.stringify(parsedInputs)}`); - return parsedInputs; -} - -function getOctokit(debug: boolean) { - const OctokitAPI = Octokit.plugin(throttling, retry); - const octokit = new OctokitAPI({ - log: debug ? console : undefined, - throttle: { - onRateLimit: (retryAfter, options, octokit, retryCount) => { - octokit.log.warn( - // Types are busted from octokit - //@ts-expect-error - `Request quota exhausted for request ${options.method} ${options.url}` - ); - - octokit.log.info(`Retrying after ${retryAfter} seconds!`); - return true; - }, - onSecondaryRateLimit: (_retryAfter, options, octokit) => { - octokit.log.warn( - // Types are busted from octokit - //@ts-expect-error - `SecondaryRateLimit detected for request ${options.method} ${options.url}` - ); - return true; - }, - }, - }); - - return octokit; -} - -async function getDeployments({ - octokit, - owner, - repo, - environment, - ref, - paginateOptions, -}: { - octokit: ReturnType; - owner: string; - repo: string; - environment: string; - ref?: string; - paginateOptions: { - numOfPages: number | "all"; - startingPage?: number; - }; -}) { - const listDeploymentsSharedArgs: Parameters< - typeof octokit.repos.listDeployments - >[0] = { - owner, - repo, - environment, - ref, - per_page: 100, - request: { - retries: 20, - }, - }; - - if (paginateOptions.numOfPages === "all") { - info(`Fetching all deployments`); - const deployments = await octokit.paginate(octokit.repos.listDeployments, { - ...listDeploymentsSharedArgs, - }); - - return deployments; - } else { - info( - `Fetching ${ - paginateOptions.numOfPages * listDeploymentsSharedArgs.per_page! - } deployments` - ); - const deployments: Awaited< - ReturnType - >["data"] = []; - - const offset = paginateOptions.startingPage || 0; - for (let i = offset; i < paginateOptions.numOfPages + offset; i++) { - const deploymentPage = await octokit.repos.listDeployments({ - ...listDeploymentsSharedArgs, - page: i, - }); - - deployments.push(...deploymentPage.data); - } - - return deployments; - } -} diff --git a/.github/actions/delete-deployments/package.json b/.github/actions/delete-deployments/package.json deleted file mode 100644 index a2d6f83908..0000000000 --- a/.github/actions/delete-deployments/package.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "name": "delete-deployments", - "version": "1.0.0", - "description": "", - "main": "index.ts", - "scripts": { - "start": "ts-node -T .", - "test": "echo \"Error: no test specified\" && exit 1" - }, - "keywords": [], - "author": "", - "license": "ISC", - "engines": { - "node": ">=18", - "pnpm": ">=9" - }, - "dependencies": { - "@actions/core": "^1.10.1", - "@octokit/action": "^6.1.0", - "@octokit/plugin-retry": "^6.0.1", - "@octokit/plugin-throttling": "^7.0.0", - "ts-node": "^10.9.2" - }, - "devDependencies": { - "@octokit/types": "^11.1.0", - "@types/node": "^20.12.10", - "typescript": "^5.4.5" - } -} diff --git a/.github/actions/delete-deployments/pnpm-lock.yaml b/.github/actions/delete-deployments/pnpm-lock.yaml deleted file mode 100644 index d9f25b3f80..0000000000 --- a/.github/actions/delete-deployments/pnpm-lock.yaml +++ /dev/null @@ -1,408 +0,0 @@ -lockfileVersion: '9.0' - -settings: - autoInstallPeers: true - excludeLinksFromLockfile: false - -importers: - - .: - dependencies: - '@actions/core': - specifier: ^1.10.1 - version: 1.10.1 - '@octokit/action': - specifier: ^6.1.0 - version: 6.1.0 - '@octokit/plugin-retry': - specifier: ^6.0.1 - version: 6.0.1(@octokit/core@5.0.0) - '@octokit/plugin-throttling': - specifier: ^7.0.0 - version: 7.0.0(@octokit/core@5.0.0) - ts-node: - specifier: ^10.9.2 - version: 10.9.2(@types/node@20.12.10)(typescript@5.4.5) - devDependencies: - '@octokit/types': - specifier: ^11.1.0 - version: 11.1.0 - '@types/node': - specifier: ^20.12.10 - version: 20.12.10 - typescript: - specifier: ^5.4.5 - version: 5.4.5 - -packages: - - '@actions/core@1.10.1': - resolution: {integrity: sha512-3lBR9EDAY+iYIpTnTIXmWcNbX3T2kCkAEQGIQx4NVQ0575nk2k3GRZDTPQG+vVtS2izSLmINlxXf0uLtnrTP+g==} - - '@actions/http-client@2.1.1': - resolution: {integrity: sha512-qhrkRMB40bbbLo7gF+0vu+X+UawOvQQqNAA/5Unx774RS8poaOhThDOG6BGmxvAnxhQnDp2BG/ZUm65xZILTpw==} - - '@cspotcode/source-map-support@0.8.1': - resolution: {integrity: sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==} - engines: {node: '>=12'} - - '@jridgewell/resolve-uri@3.1.1': - resolution: {integrity: sha512-dSYZh7HhCDtCKm4QakX0xFpsRDqjjtZf/kjI/v3T3Nwt5r8/qz/M19F9ySyOqU94SXBmeG9ttTul+YnR4LOxFA==} - engines: {node: '>=6.0.0'} - - '@jridgewell/sourcemap-codec@1.4.15': - resolution: {integrity: sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==} - - '@jridgewell/trace-mapping@0.3.9': - resolution: {integrity: sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==} - - '@octokit/action@6.1.0': - resolution: {integrity: sha512-lo+nHx8kAV86bxvOVOI3vFjX3gXPd/L7guAUbvs3pUvnR2KC+R7yjBkA1uACt4gYhs4LcWP3AXSGQzsbeN2XXw==} - engines: {node: '>= 18'} - - '@octokit/auth-action@4.0.0': - resolution: {integrity: sha512-sMm9lWZdiX6e89YFaLrgE9EFs94k58BwIkvjOtozNWUqyTmsrnWFr/M5LolaRzZ7Kmb5FbhF9hi7FEeE274SoQ==} - engines: {node: '>= 18'} - - '@octokit/auth-token@4.0.0': - resolution: {integrity: sha512-tY/msAuJo6ARbK6SPIxZrPBms3xPbfwBrulZe0Wtr/DIY9lje2HeV1uoebShn6mx7SjCHif6EjMvoREj+gZ+SA==} - engines: {node: '>= 18'} - - '@octokit/core@5.0.0': - resolution: {integrity: sha512-YbAtMWIrbZ9FCXbLwT9wWB8TyLjq9mxpKdgB3dUNxQcIVTf9hJ70gRPwAcqGZdY6WdJPZ0I7jLaaNDCiloGN2A==} - engines: {node: '>= 18'} - - '@octokit/endpoint@9.0.0': - resolution: {integrity: sha512-szrQhiqJ88gghWY2Htt8MqUDO6++E/EIXqJ2ZEp5ma3uGS46o7LZAzSLt49myB7rT+Hfw5Y6gO3LmOxGzHijAQ==} - engines: {node: '>= 18'} - - '@octokit/graphql@7.0.1': - resolution: {integrity: sha512-T5S3oZ1JOE58gom6MIcrgwZXzTaxRnxBso58xhozxHpOqSTgDS6YNeEUvZ/kRvXgPrRz/KHnZhtb7jUMRi9E6w==} - engines: {node: '>= 18'} - - '@octokit/openapi-types@18.0.0': - resolution: {integrity: sha512-V8GImKs3TeQRxRtXFpG2wl19V7444NIOTDF24AWuIbmNaNYOQMWRbjcGDXV5B+0n887fgDcuMNOmlul+k+oJtw==} - - '@octokit/openapi-types@20.0.0': - resolution: {integrity: sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA==} - - '@octokit/plugin-paginate-rest@9.2.1': - resolution: {integrity: sha512-wfGhE/TAkXZRLjksFXuDZdmGnJQHvtU/joFQdweXUgzo1XwvBCD4o4+75NtFfjfLK5IwLf9vHTfSiU3sLRYpRw==} - engines: {node: '>= 18'} - peerDependencies: - '@octokit/core': '5' - - '@octokit/plugin-rest-endpoint-methods@10.4.1': - resolution: {integrity: sha512-xV1b+ceKV9KytQe3zCVqjg+8GTGfDYwaT1ATU5isiUyVtlVAO3HNdzpS4sr4GBx4hxQ46s7ITtZrAsxG22+rVg==} - engines: {node: '>= 18'} - peerDependencies: - '@octokit/core': '5' - - '@octokit/plugin-retry@6.0.1': - resolution: {integrity: sha512-SKs+Tz9oj0g4p28qkZwl/topGcb0k0qPNX/i7vBKmDsjoeqnVfFUquqrE/O9oJY7+oLzdCtkiWSXLpLjvl6uog==} - engines: {node: '>= 18'} - peerDependencies: - '@octokit/core': '>=5' - - '@octokit/plugin-throttling@7.0.0': - resolution: {integrity: sha512-KL2k/d0uANc8XqP5S64YcNFCudR3F5AaKO39XWdUtlJIjT9Ni79ekWJ6Kj5xvAw87udkOMEPcVf9xEge2+ahew==} - engines: {node: '>= 18'} - peerDependencies: - '@octokit/core': ^5.0.0 - - '@octokit/request-error@5.0.0': - resolution: {integrity: sha512-1ue0DH0Lif5iEqT52+Rf/hf0RmGO9NWFjrzmrkArpG9trFfDM/efx00BJHdLGuro4BR/gECxCU2Twf5OKrRFsQ==} - engines: {node: '>= 18'} - - '@octokit/request@8.1.1': - resolution: {integrity: sha512-8N+tdUz4aCqQmXl8FpHYfKG9GelDFd7XGVzyN8rc6WxVlYcfpHECnuRkgquzz+WzvHTK62co5di8gSXnzASZPQ==} - engines: {node: '>= 18'} - - '@octokit/types@11.1.0': - resolution: {integrity: sha512-Fz0+7GyLm/bHt8fwEqgvRBWwIV1S6wRRyq+V6exRKLVWaKGsuy6H9QFYeBVDV7rK6fO3XwHgQOPxv+cLj2zpXQ==} - - '@octokit/types@12.6.0': - resolution: {integrity: sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw==} - - '@tsconfig/node10@1.0.9': - resolution: {integrity: sha512-jNsYVVxU8v5g43Erja32laIDHXeoNvFEpX33OK4d6hljo3jDhCBDhx5dhCCTMWUojscpAagGiRkBKxpdl9fxqA==} - - '@tsconfig/node12@1.0.11': - resolution: {integrity: sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==} - - '@tsconfig/node14@1.0.3': - resolution: {integrity: sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==} - - '@tsconfig/node16@1.0.4': - resolution: {integrity: sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==} - - '@types/node@20.12.10': - resolution: {integrity: sha512-Eem5pH9pmWBHoGAT8Dr5fdc5rYA+4NAovdM4EktRPVAAiJhmWWfQrA0cFhAbOsQdSfIHjAud6YdkbL69+zSKjw==} - - acorn-walk@8.2.0: - resolution: {integrity: sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA==} - engines: {node: '>=0.4.0'} - - acorn@8.10.0: - resolution: {integrity: sha512-F0SAmZ8iUtS//m8DmCTA0jlh6TDKkHQyK6xc6V4KDTyZKA9dnvX9/3sRTVQrWm79glUAZbnmmNcdYwUIHWVybw==} - engines: {node: '>=0.4.0'} - hasBin: true - - arg@4.1.3: - resolution: {integrity: sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==} - - before-after-hook@2.2.3: - resolution: {integrity: sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==} - - bottleneck@2.19.5: - resolution: {integrity: sha512-VHiNCbI1lKdl44tGrhNfU3lup0Tj/ZBMJB5/2ZbNXRCPuRCO7ed2mgcK4r17y+KB2EfuYuRaVlwNbAeaWGSpbw==} - - create-require@1.1.1: - resolution: {integrity: sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==} - - deprecation@2.3.1: - resolution: {integrity: sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==} - - diff@4.0.2: - resolution: {integrity: sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==} - engines: {node: '>=0.3.1'} - - is-plain-object@5.0.0: - resolution: {integrity: sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==} - engines: {node: '>=0.10.0'} - - make-error@1.3.6: - resolution: {integrity: sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==} - - once@1.4.0: - resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==} - - ts-node@10.9.2: - resolution: {integrity: sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==} - hasBin: true - peerDependencies: - '@swc/core': '>=1.2.50' - '@swc/wasm': '>=1.2.50' - '@types/node': '*' - typescript: '>=2.7' - peerDependenciesMeta: - '@swc/core': - optional: true - '@swc/wasm': - optional: true - - tunnel@0.0.6: - resolution: {integrity: sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==} - engines: {node: '>=0.6.11 <=0.7.0 || >=0.7.3'} - - typescript@5.4.5: - resolution: {integrity: sha512-vcI4UpRgg81oIRUFwR0WSIHKt11nJ7SAVlYNIu+QpqeyXP+gpQJy/Z4+F0aGxSE4MqwjyXvW/TzgkLAx2AGHwQ==} - engines: {node: '>=14.17'} - hasBin: true - - undici-types@5.26.5: - resolution: {integrity: sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==} - - undici@6.16.0: - resolution: {integrity: sha512-HQfVddOTb5PJtfLnJ1Px8bNGyIg/z7WTj1hjUSna1Itsv59Oca9JdclIU08ToNqvWWXjFLRzc9rqjnpfw5UWcQ==} - engines: {node: '>=18.17'} - - universal-user-agent@6.0.0: - resolution: {integrity: sha512-isyNax3wXoKaulPDZWHQqbmIx1k2tb9fb3GGDBRxCscfYV2Ch7WxPArBsFEG8s/safwXTT7H4QGhaIkTp9447w==} - - uuid@8.3.2: - resolution: {integrity: sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==} - hasBin: true - - v8-compile-cache-lib@3.0.1: - resolution: {integrity: sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==} - - wrappy@1.0.2: - resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==} - - yn@3.1.1: - resolution: {integrity: sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==} - engines: {node: '>=6'} - -snapshots: - - '@actions/core@1.10.1': - dependencies: - '@actions/http-client': 2.1.1 - uuid: 8.3.2 - - '@actions/http-client@2.1.1': - dependencies: - tunnel: 0.0.6 - - '@cspotcode/source-map-support@0.8.1': - dependencies: - '@jridgewell/trace-mapping': 0.3.9 - - '@jridgewell/resolve-uri@3.1.1': {} - - '@jridgewell/sourcemap-codec@1.4.15': {} - - '@jridgewell/trace-mapping@0.3.9': - dependencies: - '@jridgewell/resolve-uri': 3.1.1 - '@jridgewell/sourcemap-codec': 1.4.15 - - '@octokit/action@6.1.0': - dependencies: - '@octokit/auth-action': 4.0.0 - '@octokit/core': 5.0.0 - '@octokit/plugin-paginate-rest': 9.2.1(@octokit/core@5.0.0) - '@octokit/plugin-rest-endpoint-methods': 10.4.1(@octokit/core@5.0.0) - '@octokit/types': 12.6.0 - undici: 6.16.0 - - '@octokit/auth-action@4.0.0': - dependencies: - '@octokit/auth-token': 4.0.0 - '@octokit/types': 11.1.0 - - '@octokit/auth-token@4.0.0': {} - - '@octokit/core@5.0.0': - dependencies: - '@octokit/auth-token': 4.0.0 - '@octokit/graphql': 7.0.1 - '@octokit/request': 8.1.1 - '@octokit/request-error': 5.0.0 - '@octokit/types': 11.1.0 - before-after-hook: 2.2.3 - universal-user-agent: 6.0.0 - - '@octokit/endpoint@9.0.0': - dependencies: - '@octokit/types': 11.1.0 - is-plain-object: 5.0.0 - universal-user-agent: 6.0.0 - - '@octokit/graphql@7.0.1': - dependencies: - '@octokit/request': 8.1.1 - '@octokit/types': 11.1.0 - universal-user-agent: 6.0.0 - - '@octokit/openapi-types@18.0.0': {} - - '@octokit/openapi-types@20.0.0': {} - - '@octokit/plugin-paginate-rest@9.2.1(@octokit/core@5.0.0)': - dependencies: - '@octokit/core': 5.0.0 - '@octokit/types': 12.6.0 - - '@octokit/plugin-rest-endpoint-methods@10.4.1(@octokit/core@5.0.0)': - dependencies: - '@octokit/core': 5.0.0 - '@octokit/types': 12.6.0 - - '@octokit/plugin-retry@6.0.1(@octokit/core@5.0.0)': - dependencies: - '@octokit/core': 5.0.0 - '@octokit/request-error': 5.0.0 - '@octokit/types': 12.6.0 - bottleneck: 2.19.5 - - '@octokit/plugin-throttling@7.0.0(@octokit/core@5.0.0)': - dependencies: - '@octokit/core': 5.0.0 - '@octokit/types': 11.1.0 - bottleneck: 2.19.5 - - '@octokit/request-error@5.0.0': - dependencies: - '@octokit/types': 11.1.0 - deprecation: 2.3.1 - once: 1.4.0 - - '@octokit/request@8.1.1': - dependencies: - '@octokit/endpoint': 9.0.0 - '@octokit/request-error': 5.0.0 - '@octokit/types': 11.1.0 - is-plain-object: 5.0.0 - universal-user-agent: 6.0.0 - - '@octokit/types@11.1.0': - dependencies: - '@octokit/openapi-types': 18.0.0 - - '@octokit/types@12.6.0': - dependencies: - '@octokit/openapi-types': 20.0.0 - - '@tsconfig/node10@1.0.9': {} - - '@tsconfig/node12@1.0.11': {} - - '@tsconfig/node14@1.0.3': {} - - '@tsconfig/node16@1.0.4': {} - - '@types/node@20.12.10': - dependencies: - undici-types: 5.26.5 - - acorn-walk@8.2.0: {} - - acorn@8.10.0: {} - - arg@4.1.3: {} - - before-after-hook@2.2.3: {} - - bottleneck@2.19.5: {} - - create-require@1.1.1: {} - - deprecation@2.3.1: {} - - diff@4.0.2: {} - - is-plain-object@5.0.0: {} - - make-error@1.3.6: {} - - once@1.4.0: - dependencies: - wrappy: 1.0.2 - - ts-node@10.9.2(@types/node@20.12.10)(typescript@5.4.5): - dependencies: - '@cspotcode/source-map-support': 0.8.1 - '@tsconfig/node10': 1.0.9 - '@tsconfig/node12': 1.0.11 - '@tsconfig/node14': 1.0.3 - '@tsconfig/node16': 1.0.4 - '@types/node': 20.12.10 - acorn: 8.10.0 - acorn-walk: 8.2.0 - arg: 4.1.3 - create-require: 1.1.1 - diff: 4.0.2 - make-error: 1.3.6 - typescript: 5.4.5 - v8-compile-cache-lib: 3.0.1 - yn: 3.1.1 - - tunnel@0.0.6: {} - - typescript@5.4.5: {} - - undici-types@5.26.5: {} - - undici@6.16.0: {} - - universal-user-agent@6.0.0: {} - - uuid@8.3.2: {} - - v8-compile-cache-lib@3.0.1: {} - - wrappy@1.0.2: {} - - yn@3.1.1: {} diff --git a/.github/actions/delete-deployments/test.sh b/.github/actions/delete-deployments/test.sh deleted file mode 100755 index 18b7726088..0000000000 --- a/.github/actions/delete-deployments/test.sh +++ /dev/null @@ -1,9 +0,0 @@ -#!/bin/sh -export NUM_OF_PAGES=all -export ENVIRONMENT=integration -export DRY_RUN=false -export REPOSITORY=smartcontractkit/chainlink -export REF=fix/golint -export GITHUB_ACTION=true - -pnpm start diff --git a/.github/actions/delete-deployments/tsconfig.json b/.github/actions/delete-deployments/tsconfig.json deleted file mode 100644 index 4b36d4a178..0000000000 --- a/.github/actions/delete-deployments/tsconfig.json +++ /dev/null @@ -1,104 +0,0 @@ -{ - "compilerOptions": { - /* Visit https://aka.ms/tsconfig to read more about this file */ - - /* Projects */ - // "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */ - // "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */ - // "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */ - // "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */ - // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */ - // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ - - /* Language and Environment */ - "target": "ESNext" /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */, - // "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */ - // "jsx": "preserve", /* Specify what JSX code is generated. */ - // "experimentalDecorators": true, /* Enable experimental support for TC39 stage 2 draft decorators. */ - // "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */ - // "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */ - // "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */ - // "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */ - // "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */ - // "noLib": true, /* Disable including any library files, including the default lib.d.ts. */ - // "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */ - // "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */ - - /* Modules */ - "module": "NodeNext" /* Specify what module code is generated. */, - // "rootDir": "./", /* Specify the root folder within your source files. */ - "moduleResolution": "NodeNext" /* Specify how TypeScript looks up a file from a given module specifier. */, - // "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */ - // "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */ - // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */ - // "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */ - // "types": [], /* Specify type package names to be included without being referenced in a source file. */ - // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ - // "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */ - // "resolveJsonModule": true, /* Enable importing .json files. */ - // "noResolve": true, /* Disallow 'import's, 'require's or ''s from expanding the number of files TypeScript should add to a project. */ - - /* JavaScript Support */ - // "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */ - // "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */ - // "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */ - - /* Emit */ - // "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */ - // "declarationMap": true, /* Create sourcemaps for d.ts files. */ - // "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */ - // "sourceMap": true, /* Create source map files for emitted JavaScript files. */ - // "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */ - // "outDir": "./", /* Specify an output folder for all emitted files. */ - // "removeComments": true, /* Disable emitting comments. */ - "noEmit": true /* Disable emitting files from a compilation. */, - // "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */ - // "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types. */ - // "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */ - // "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */ - // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ - // "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */ - // "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */ - // "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */ - // "newLine": "crlf", /* Set the newline character for emitting files. */ - // "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */ - // "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */ - // "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */ - // "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */ - // "declarationDir": "./", /* Specify the output directory for generated declaration files. */ - // "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */ - - /* Interop Constraints */ - // "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */ - // "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */ - "esModuleInterop": true /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */, - // "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */ - "forceConsistentCasingInFileNames": true /* Ensure that casing is correct in imports. */, - - /* Type Checking */ - "strict": true /* Enable all strict type-checking options. */, - // "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */ - // "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */ - // "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */ - // "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */ - // "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */ - // "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */ - // "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */ - // "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */ - // "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */ - // "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */ - // "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */ - // "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */ - // "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */ - // "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */ - // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */ - // "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */ - // "allowUnusedLabels": true, /* Disable error reporting for unused labels. */ - // "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */ - - /* Completeness */ - // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */ - "skipLibCheck": false /* Skip type checking all .d.ts files. */ - }, - "include": ["src", "test"] -} diff --git a/.github/actions/detect-solidity-file-changes/action.yml b/.github/actions/detect-solidity-file-changes/action.yml deleted file mode 100644 index b86c91dbb4..0000000000 --- a/.github/actions/detect-solidity-file-changes/action.yml +++ /dev/null @@ -1,21 +0,0 @@ -name: 'Detect Solidity File Changes Composite Action' -description: 'Detects changes in solidity files and outputs the result.' -outputs: - changes: - description: 'Whether or not changes were detected' - value: ${{ steps.changed_files.outputs.src }} -runs: - using: 'composite' - steps: - - - name: Filter paths - uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2 - id: changed_files - with: - list-files: 'csv' - filters: | - src: - - 'contracts/**/*' - - '.github/workflows/solidity.yml' - - '.github/workflows/solidity-foundry.yml' - - '.github/workflows/solidity-wrappers.yml' diff --git a/.github/actions/detect-solidity-foundry-version/action.yml b/.github/actions/detect-solidity-foundry-version/action.yml deleted file mode 100644 index b37f1e2509..0000000000 --- a/.github/actions/detect-solidity-foundry-version/action.yml +++ /dev/null @@ -1,26 +0,0 @@ -name: 'Detect Foundry version in GNUmakefile' -description: 'Detects Foundry version in GNUmakefile' -inputs: - working-directory: - description: 'The GNUmakefile directory' - required: false - default: 'contracts' -outputs: - foundry-version: - description: 'Foundry version found in GNUmakefile' - value: ${{ steps.extract-foundry-version.outputs.foundry-version }} -runs: - using: 'composite' - steps: - - name: Extract Foundry version - id: extract-foundry-version - shell: bash - working-directory: ${{ inputs.working-directory }} - run: | - foundry_version=$(grep -Eo "foundryup --version [^ ]+" GNUmakefile | awk '{print $3}') - if [ -z "$foundry_version" ]; then - echo "::error::Foundry version not found in GNUmakefile" - exit 1 - fi - echo "Foundry version found: $foundry_version" - echo "foundry-version=$foundry_version" >> $GITHUB_OUTPUT diff --git a/.github/actions/detect-solidity-readonly-file-changes/action.yml b/.github/actions/detect-solidity-readonly-file-changes/action.yml deleted file mode 100644 index faca16d53f..0000000000 --- a/.github/actions/detect-solidity-readonly-file-changes/action.yml +++ /dev/null @@ -1,31 +0,0 @@ -name: 'Detect Solidity Readonly Files Changes Composite Action' -description: 'Detects changes in readonly solidity files and fails if they are modified.' -outputs: - changes: - description: 'Whether or not changes were detected' - value: ${{ steps.changed_files.outputs.src }} -runs: - using: 'composite' - steps: - - - name: Filter paths - uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2 - id: changed_files - with: - list-files: 'csv' - filters: | - read_only_sol: - - 'contracts/src/v0.8/interfaces/**/*' - - 'contracts/src/v0.8/automation/v1_2/**/*' - - 'contracts/src/v0.8/automation/v1_3/**/*' - - 'contracts/src/v0.8/automation/v2_0/**/*' - - - name: Fail if read-only files have changed - if: ${{ steps.changed_files.outputs.read_only_sol == 'true' }} - shell: bash - run: | - echo "One or more read-only Solidity file(s) has changed." - for file in ${{ steps.changed_files.outputs.read_only_sol_files }}; do - echo "$file was changed" - done - exit 1 diff --git a/.github/actions/golangci-lint/action.yml b/.github/actions/golangci-lint/action.yml deleted file mode 100644 index f3697ed719..0000000000 --- a/.github/actions/golangci-lint/action.yml +++ /dev/null @@ -1,83 +0,0 @@ -name: CI lint for Golang -description: Runs CI lint for Golang -inputs: - # general inputs - id: - description: Unique metrics collection id - required: true - name: - description: Name of the lint action - required: true - go-directory: - description: Go directory to run commands from - default: "." - # setup-go inputs - only-modules: - description: Set to 'true' to only cache modules - default: "false" - cache-version: - description: Set this to cache bust - default: "1" - go-version-file: - description: Set where the go version file is located at - default: "go.mod" - go-module-file: - description: Set where the go module file is located at - default: "go.sum" - # grafana inputs - gc-host: - description: "grafana hostname" - gc-basic-auth: - description: "grafana basic auth" - gc-org-id: - description: "grafana org id" - -runs: - using: composite - steps: - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - name: Setup Go - uses: ./.github/actions/setup-go - with: - only-modules: ${{ inputs.only-modules }} - cache-version: ${{ inputs.cache-version }} - go-version-file: ${{ inputs.go-version-file }} - go-module-file: ${{ inputs.go-module-file }} - - name: Touching core/web/assets/index.html - shell: bash - run: mkdir -p core/web/assets && touch core/web/assets/index.html - - name: Build binary - working-directory: ${{ inputs.go-directory }} - shell: bash - run: go build ./... - - name: golangci-lint - uses: golangci/golangci-lint-action@3cfe3a4abbb849e10058ce4af15d205b6da42804 # v4.0.0 - with: - version: v1.59.1 - # We already cache these directories in setup-go - skip-pkg-cache: true - skip-build-cache: true - # only-new-issues is only applicable to PRs, otherwise it is always set to false - only-new-issues: false # disabled for PRs due to unreliability - args: --out-format colored-line-number,checkstyle:golangci-lint-report.xml - working-directory: ${{ inputs.go-directory }} - - name: Print lint report artifact - if: failure() - shell: bash - run: cat ${{ inputs.go-directory }}/golangci-lint-report.xml - - name: Store lint report artifact - if: always() - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1 - with: - name: golangci-lint-report - path: ${{ inputs.go-directory }}/golangci-lint-report.xml - - name: Collect Metrics - if: always() - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: chainlink-golang-ci-${{ inputs.id }} - basic-auth: ${{ inputs.gc-basic-auth }} - hostname: ${{ inputs.gc-host }} - org-id: ${{ inputs.gc-org-id }} - this-job-name: ${{ inputs.name }} - continue-on-error: true diff --git a/.github/actions/goreleaser-build-sign-publish/README.md b/.github/actions/goreleaser-build-sign-publish/README.md deleted file mode 100644 index d6bf7e6fd4..0000000000 --- a/.github/actions/goreleaser-build-sign-publish/README.md +++ /dev/null @@ -1,131 +0,0 @@ -# goreleaser-build-sign-publish - -> goreleaser wrapper action - -## workflows - -### build publish - -```yaml -name: goreleaser - -on: - push: - tags: - - "v*" - -jobs: - goreleaser: - runs-on: ubuntu-latest - environment: release - permissions: - id-token: write - contents: read - env: - MACOS_SDK_VERSION: 12.3 - steps: - - name: Checkout repository - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - - name: Configure aws credentials - uses: aws-actions/configure-aws-credentials@010d0da01d0b5a38af31e9c3470dbfdabdecca3a # v4.0.1 - with: - role-to-assume: ${{ secrets.aws-role-arn }} - role-duration-seconds: ${{ secrets.aws-role-dur-sec }} - aws-region: ${{ secrets.aws-region }} - - name: Cache macos sdk - id: sdk-cache - uses: actions/cache@v3 - with: - path: ${{ format('MacOSX{0}.sdk', env.MAC_SDK_VERSION) }} - key: ${{ runner.OS }}-${{ env.MAC_SDK_VERSION }}-macos-sdk-cache-${{ hashFiles('**/SDKSettings.json') }} - restore-keys: | - ${{ runner.OS }}-${{ env.MAC_SDK_VERSION }}-macos-sdk-cache- - - name: Get macos sdk - if: steps.sdk-cache.outputs.cache-hit != 'true' - run: | - curl -L https://github.com/joseluisq/macosx-sdks/releases/download/${MACOS_SDK_VERSION}/MacOSX${MACOS_SDK_VERSION}.sdk.tar.xz > MacOSX${MACOS_SDK_VERSION}.sdk.tar.xz - tar -xf MacOSX${MACOS_SDK_VERSION}.sdk.tar.xz - - name: Build, sign, and publish - uses: ./.github/actions/goreleaser-build-sign-publish - with: - enable-docker-publish: "true" - enable-goreleaser-snapshot: "false" - docker-registry: ${{ secrets.aws-ecr-registry }} - goreleaser-exec: goreleaser - goreleaser-config: .goreleaser.yaml - macos-sdk-dir: ${{ format('MacOSX{0}.sdk', env.MAC_SDK_VERSION) }} - env: - GITHUB_TOKEN: ${{ secrets.gh-token }} -``` - -### snapshot release - -```yaml -- name: Build, sign, and publish image - uses: ./.github/actions/goreleaser-build-sign-publish - with: - enable-docker-publish: "true" - enable-goreleaser-snapshot: "true" - docker-registry: ${{ secrets.aws-ecr-registry }} - goreleaser-exec: goreleaser - goreleaser-config: .goreleaser.yaml -``` - -### image signing - -```yaml -- name: Build, sign, and publish - uses: ./.github/actions/goreleaser-build-sign-publish - with: - enable-docker-publish: "true" - enable-goreleaser-snapshot: "false" - enable-cosign: "true" - docker-registry: ${{ secrets.aws-ecr-registry }} - goreleaser-exec: goreleaser - goreleaser-config: .goreleaser.yaml - cosign-password: ${{ secrets.cosign-password }} - cosign-public-key: ${{ secrets.cosign-public-key }} - cosign-private-key: ${{ secrets.cosign-private-key }} - macos-sdk-dir: MacOSX12.3.sdk -``` - -## customizing - -### inputs - -Following inputs can be used as `step.with` keys - -| Name | Type | Default | Description | -| ---------------------------- | ------ | ------------------ | ----------------------------------------------------------------------- | -| `goreleaser-version` | String | `1.13.1` | `goreleaser` version | -| `zig-version` | String | `0.10.0` | `zig` version | -| `cosign-version` | String | `v1.13.1` | `cosign` version | -| `macos-sdk-dir` | String | `MacOSX12.3.sdk` | MacOSX sdk directory | -| `enable-docker-publish` | Bool | `true` | Enable publishing of Docker images / manifests | -| `docker-registry` | String | `localhost:5001` | Docker registry | -| `enable-goreleaser-snapshot` | Bool | `false` | Enable goreleaser build / release snapshot | -| `goreleaser-exec` | String | `goreleaser` | The goreleaser executable, can invoke wrapper script | -| `goreleaser-config` | String | `.goreleaser.yaml` | The goreleaser configuration yaml | -| `enable-cosign` | Bool | `false` | Enable signing of Docker images | -| `cosign-public-key` | String | `""` | The public key to be used with cosign for verification | -| `cosign-private-key` | String | `""` | The private key to be used with cosign to sign the image | -| `cosign-password-key` | String | `""` | The password to decrypt the cosign private key needed to sign the image | - -## testing - -- bring up local docker registry - -```sh -docker run -d --restart=always -p "127.0.0.1:5001:5000" --name registry registry:2 -``` - -- run snapshot release, publish to local docker registry - -```sh -GORELEASER_EXEC=" set -_publish_snapshot_images() { - local full_sha=$(git rev-parse HEAD) - local images=$(docker images --filter "label=org.opencontainers.image.revision=$full_sha" --format "{{.Repository}}:{{.Tag}}") - for image in $images; do - docker push "$image" - done -} - -# publish snapshot docker manifest lists -# must have label=org.opencontainers.image.revision= set -_publish_snapshot_manifests() { - local docker_manifest_extra_args=$DOCKER_MANIFEST_EXTRA_ARGS - local full_sha=$(git rev-parse HEAD) - local images=$(docker images --filter "label=org.opencontainers.image.revision=$full_sha" --format "{{.Repository}}:{{.Tag}}" | sort) - local arches=(amd64 arm64) - local raw_manifest_lists="" - for image in $images; do - for arch in "${arches[@]}"; do - image=${image%"-$arch"} - done - raw_manifest_lists+="$image"$'\n' - done - local manifest_lists=$(echo "$raw_manifest_lists" | sort | uniq) - for manifest_list in $manifest_lists; do - manifests="" - for arch in "${arches[@]}"; do - archExists=$(echo "$images" | grep -c "$manifest_lists-$arch") - if [[ $archExists -ne 0 ]]; then - manifests+="$manifest_list-$arch " - fi - done - docker manifest create $manifest_list $manifests $docker_manifest_extra_args - docker manifest push "$manifest_list" - done -} - -# wrapper function to invoke goreleaser release -goreleaser_release() { - if [[ $ENABLE_COSIGN == "true" ]]; then - echo "$COSIGN_PUBLIC_KEY" > cosign.pub - echo "$COSIGN_PRIVATE_KEY" > cosign.key - fi - if [[ -n $MACOS_SDK_DIR ]]; then - MACOS_SDK_DIR=$(echo "$(cd "$(dirname "$MACOS_SDK_DIR")" || exit; pwd)/$(basename "$MACOS_SDK_DIR")") - fi - if [[ $ENABLE_GORELEASER_SNAPSHOT == "true" ]]; then - $GORELEASER_EXEC release --snapshot --clean --config "$GORELEASER_CONFIG" "$@" - if [[ $ENABLE_DOCKER_PUBLISH == "true" ]]; then - _publish_snapshot_images - _publish_snapshot_manifests - fi - else - $GORELEASER_EXEC release --clean --config "$GORELEASER_CONFIG" "$@" - fi - if [[ $ENABLE_COSIGN == "true" ]]; then - rm -rf cosign.pub - rm -rf cosign.key - fi -} - -"$@" diff --git a/.github/actions/notify-slack-jobs-result/README.md b/.github/actions/notify-slack-jobs-result/README.md deleted file mode 100644 index 298930c0d9..0000000000 --- a/.github/actions/notify-slack-jobs-result/README.md +++ /dev/null @@ -1,37 +0,0 @@ -# Notify Slack Jobs Result - -Sends a Slack message to a specified channel detailing the results of one to many GHA job results using a regex. The job results will be grouped by the `github_job_name_regex` and displayed underneath the `message_title`, with the regex matching group displayed as an individual result. This is primarily designed for when you have test groups running in a matrix, and would like condensed reporting on their status by group. It's often accompanied by posting a Slack message before to start a thread, then attaching all the results to that thread like we do in the reporting section of the [live-testnet-test.yml workflow](../../workflows/live-testnet-tests.yml). Check out the example below, where we post an initial summary message, then use this action to thread together specific results: - -```yaml -message_title: Optimism Goerli -github_job_name_regex: ^Optimism Goerli (?.*?) Tests$ # Note that the regex MUST have a capturing group named "cap" -``` - -![example](image.png) - -## Inputs - -```yaml -inputs: - github_token: - description: "The GitHub token to use for authentication (usually ${{ github.token }})" - required: true - github_repository: - description: "The GitHub owner/repository to use for authentication (usually ${{ github.repository }}))" - required: true - workflow_run_id: - description: "The workflow run ID to get the results from (usually ${{ github.run_id }})" - required: true - github_job_name_regex: - description: "The regex to use to match 1..many job name(s) to collect results from. Should include a capture group named 'cap' for the part of the job name you want to display in the Slack message (e.g. ^Client Compatability Test (?.*?)$)" - required: true - message_title: - description: "The title of the Slack message" - required: true - slack_channel_id: - description: "The Slack channel ID to post the message to" - required: true - slack_thread_ts: - description: "The Slack thread timestamp to post the message to, handy for keeping multiple related results in a single thread" - required: false -``` diff --git a/.github/actions/notify-slack-jobs-result/action.yml b/.github/actions/notify-slack-jobs-result/action.yml deleted file mode 100644 index d8fda4fb90..0000000000 --- a/.github/actions/notify-slack-jobs-result/action.yml +++ /dev/null @@ -1,126 +0,0 @@ -name: Notify Slack Jobs Result -description: Will send a notification in Slack for the result of a GitHub action run, typically for test results -inputs: - github_token: - description: "The GitHub token to use for authentication (usually github.token)" - required: true - github_repository: - description: "The GitHub owner/repository to use for authentication (usually github.repository))" - required: true - workflow_run_id: - description: "The workflow run ID to get the results from (usually github.run_id)" - required: true - github_job_name_regex: - description: "The regex to use to match 1..many job name(s) to collect results from. Should include a capture group named 'cap' for the part of the job name you want to display in the Slack message (e.g. ^Client Compatability Test (?.*?)$)" - required: true - message_title: - description: "The title of the Slack message" - required: true - slack_channel_id: - description: "The Slack channel ID to post the message to" - required: true - slack_bot_token: - description: "The Slack bot token to use for authentication which needs permission and an installed app in the channel" - required: true - slack_thread_ts: - description: "The Slack thread timestamp to post the message to, handy for keeping multiple related results in a single thread" - required: false - base64_parsed_results: - description: "Base64 encoded parsed results to use" - required: false - -runs: - using: composite - steps: - - name: Get Results - shell: bash - id: test-results - run: | - if [ -n "${{ inputs.base64_parsed_results }}" ]; then - echo "Using base64 parsed results" - PARSED_RESULTS=$(echo "${{ inputs.base64_parsed_results }}" | base64 -d) - else - go install github.com/smartcontractkit/chainlink-testing-framework/tools/workflowresultparser@v1.0.0 - PATH=$PATH:$(go env GOPATH)/bin - export PATH - - workflowresultparser -workflowRunID ${{ inputs.workflow_run_id }} -githubToken ${{ inputs.github_token }} -githubRepo "${{ inputs.github_repository }}" -jobNameRegex "${{ inputs.github_job_name_regex }}" -outputFile=output.json - - if [ ! -f output.json ]; then - PARSED_RESULTS='""' - else - PARSED_RESULTS=$(cat output.json | jq -c "select(has(\"results\")) | .results[]") - fi - - fi - - echo "Parsed Results:" - echo $PARSED_RESULTS - - ALL_SUCCESS=true - - if [ "$PARSED_RESULTS" != '""' ]; then - echo "Checking for failures" - echo "$PARSED_RESULTS" | jq -s | jq -r '.[] | select(.conclusion != ":white_check_mark:")' - for row in $(echo "$PARSED_RESULTS" | jq -s | jq -r '.[] | select(.conclusion != ":white_check_mark:")'); do - ALL_SUCCESS=false - break - done - echo "Success: $ALL_SUCCESS" - - echo all_success=$ALL_SUCCESS >> $GITHUB_OUTPUT - - FORMATTED_RESULTS=$(echo $PARSED_RESULTS | jq -s '[.[] - | { - conclusion: .conclusion, - cap: .cap, - html_url: .html_url - } - ] - | map("{\"type\": \"section\", \"text\": {\"type\": \"mrkdwn\", \"text\": \"<\(.html_url)|\(.cap)>: \(.conclusion)\"}}") - | join(",")') - else - echo "Nothing to post, no results found" - exit 0 - fi - - echo "Formatted Results:" - echo $FORMATTED_RESULTS - - # Cleans out backslashes and quotes from jq - CLEAN_RESULTS=$(echo "$FORMATTED_RESULTS" | sed 's/\\\"/"/g' | sed 's/^"//;s/"$//') - - echo "Clean Results" - echo $CLEAN_RESULTS - - echo results=$CLEAN_RESULTS >> $GITHUB_OUTPUT - - name: Post Results - uses: slackapi/slack-github-action@6c661ce58804a1a20f6dc5fbee7f0381b469e001 # v1.25.0 - if: steps.test-results.outputs.results != '' - env: - SLACK_BOT_TOKEN: ${{ inputs.slack_bot_token }} - with: - channel-id: ${{ inputs.slack_channel_id }} - payload: | - { - "thread_ts": "${{ inputs.slack_thread_ts }}", - "attachments": [ - { - "color": "${{ steps.test-results.outputs.all_success == 'true' && '#2E7D32' || '#C62828' }}", - "blocks": [ - { - "type": "header", - "text": { - "type": "plain_text", - "text": "${{ inputs.message_title }} ${{ steps.test-results.outputs.all_success == 'true' && ':white_check_mark:' || ':x:'}}", - "emoji": true - } - }, - { - "type": "divider" - }, - ${{ steps.test-results.outputs.results }} - ] - } - ] - } diff --git a/.github/actions/notify-slack-jobs-result/image.png b/.github/actions/notify-slack-jobs-result/image.png deleted file mode 100644 index 3bd398101f..0000000000 Binary files a/.github/actions/notify-slack-jobs-result/image.png and /dev/null differ diff --git a/.github/actions/setup-create-base64-config-ccip/action.yml b/.github/actions/setup-create-base64-config-ccip/action.yml deleted file mode 100644 index e741a2dac0..0000000000 --- a/.github/actions/setup-create-base64-config-ccip/action.yml +++ /dev/null @@ -1,151 +0,0 @@ -name: Create Base64 Config for CCIP Tests -description: A composite action that creates a base64-encoded config to be used by ccip integration tests - -inputs: - runId: - description: The run id - existingNamespace: - description: If test needs to run against already deployed namespace - testLogCollect: - description: Whether to always collect logs, even for passing tests - default: "false" - selectedNetworks: - description: The networks to run tests against - chainlinkVersion: - description: The git commit sha to use for the image tag - upgradeVersion: - description: The git commit sha to use for the image tag - logstreamLogTargets: - description: Where to send logs (e.g. file, loki) - customEvmNodes: - description: Custom EVM nodes to use in key=value format, where key is chain id and value is docker image to use. If they are provided the number of networksSelected must be equal to the number of customEvmNodes - evmNodeLogLevel: - description: Log level for the custom EVM nodes - default: "info" -outputs: - base64_config: - description: The base64-encoded config - value: ${{ steps.base64_config_override.outputs.base64_config }} - -runs: - using: composite - steps: - - name: Prepare Base64 TOML override - shell: bash - id: base64_config_override - env: - RUN_ID: ${{ inputs.runId }} - SELECTED_NETWORKS: ${{ inputs.selectedNetworks }} - EXISTING_NAMESPACE: ${{ inputs.existingNamespace }} - TEST_LOG_COLLECT: ${{ inputs.testLogCollect }} - CHAINLINK_VERSION: ${{ inputs.chainlinkVersion }} - UPGRADE_VERSION: ${{ inputs.upgradeVersion }} - LOGSTREAM_LOG_TARGETS: ${{ inputs.logstreamLogTargets }} - CUSTOM_EVM_NODES: ${{ inputs.customEvmNodes }} - EVM_NODE_LOG_LEVEL: ${{ inputs.evmNodeLogLevel }} - run: | - function convert_to_toml_array() { - local IFS=',' - local input_array=($1) - local toml_array_format="[" - - for element in "${input_array[@]}"; do - toml_array_format+="\"$element\"," - done - - toml_array_format="${toml_array_format%,}]" - echo "$toml_array_format" - } - - selected_networks=$(convert_to_toml_array "$SELECTED_NETWORKS") - log_targets=$(convert_to_toml_array "$LOGSTREAM_LOG_TARGETS") - - if [ -n "$TEST_LOG_COLLECT" ]; then - test_log_collect=true - else - test_log_collect=false - fi - - # make sure the number of networks and nodes match - IFS=',' read -r -a networks_array <<< "$SELECTED_NETWORKS" - IFS=',' read -r -a nodes_array <<< "$CUSTOM_EVM_NODES" - - networks_count=${#networks_array[@]} - nodes_count=${#nodes_array[@]} - - # Initialize or clear CONFIG_TOML environment variable - custom_nodes_toml="" - - # Check if the number of CUSTOM_EVM_NODES is zero - if [ $nodes_count -eq 0 ]; then - echo "The number of CUSTOM_EVM_NODES is zero, won't output any custom private Ethereum network configurations." - else - if [ $networks_count -ne $nodes_count ]; then - echo "The number of elements in SELECTED_NETWORKS (${networks_count}) and CUSTOM_EVM_NODES does not match (${nodes_count})." - exit 1 - else - for i in "${!networks_array[@]}"; do - IFS='=' read -r chain_id docker_image <<< "${nodes_array[i]}" - custom_nodes_toml+=" - [CCIP.Env.PrivateEthereumNetworks.${networks_array[i]}] - ethereum_version=\"\" - execution_layer=\"\" - - [CCIP.Env.PrivateEthereumNetworks.${networks_array[i]}.EthereumChainConfig] - seconds_per_slot=3 - slots_per_epoch=2 - genesis_delay=15 - validator_count=4 - chain_id=${chain_id} - addresses_to_fund=[\"0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266\", \"0x70997970C51812dc3A010C7d01b50e0d17dc79C8\"] - node_log_level=\"${EVM_NODES_LOG_LEVEL}\" - - [CCIP.Env.PrivateEthereumNetworks.${networks_array[i]}.EthereumChainConfig.HardForkEpochs] - Deneb=500 - - [CCIP.Env.PrivateEthereumNetworks.${networks_array[i]}.CustomDockerImages] - execution_layer=\"${docker_image}\" - " - done - fi - fi - - cat << EOF > config.toml - [CCIP] - [CCIP.Env] - EnvToConnect="$EXISTING_NAMESPACE" - [CCIP.Env.Network] - selected_networks = $selected_networks - [CCIP.Env.NewCLCluster] - [CCIP.Env.NewCLCluster.Common] - [CCIP.Env.NewCLCluster.Common.ChainlinkImage] - version="$CHAINLINK_VERSION" - - $custom_nodes_toml - - [CCIP.Env.Logging] - test_log_collect=$test_log_collect - run_id="$RUN_ID" - - [CCIP.Env.Logging.LogStream] - log_targets=$log_targets - - [CCIP.Groups.load] - TestRunName = '$EXISTING_NAMESPACE' - - [CCIP.Groups.smoke] - TestRunName = '$EXISTING_NAMESPACE' - - EOF - - # Check if UPGRADE_VERSION is not empty and append to config.toml - if [ -n "$UPGRADE_VERSION" ]; then - cat << EOF >> config.toml - [CCIP.Env.NewCLCluster.Common.ChainlinkUpgradeImage] - version="$UPGRADE_VERSION" - EOF - fi - - BASE64_CONFIG=$(cat config.toml | base64 -w 0) - echo ::add-mask::$BASE64_CONFIG - echo "base64_config=$BASE64_CONFIG" >> $GITHUB_OUTPUT diff --git a/.github/actions/setup-create-base64-config-live-testnets/action.yml b/.github/actions/setup-create-base64-config-live-testnets/action.yml deleted file mode 100644 index 64fc134b46..0000000000 --- a/.github/actions/setup-create-base64-config-live-testnets/action.yml +++ /dev/null @@ -1,144 +0,0 @@ -name: Create Base64 Config -description: A composite action that creates a base64-encoded config to be used by integration tests - -inputs: - runId: - description: The run id - testLogCollect: - description: Whether to always collect logs, even for passing tests - default: "false" - chainlinkImage: - description: The chainlink image to use - default: "public.ecr.aws/chainlink/chainlink" - chainlinkVersion: - description: The git commit sha to use for the image tag - chainlinkPostgresVersion: - description: The postgres version to use with the chainlink node - default: "15.6" - pyroscopeServer: - description: URL of Pyroscope server - pyroscopeEnvironment: - description: Name of Pyroscope environment - pyroscopeKey: - description: Pyroscope server key - lokiEndpoint: - description: Loki push endpoint - lokiTenantId: - description: Loki tenant id - lokiBasicAuth: - description: Loki basic auth - logstreamLogTargets: - description: Where to send logs (e.g. file, loki) - grafanaUrl: - description: Grafana URL - grafanaDashboardUrl: - description: Grafana dashboard URL - grafanaBearerToken: - description: Grafana bearer token - network: - description: Network to run tests on - httpEndpoints: - description: HTTP endpoints to use for network - wsEndpoints: - description: WS endpoints to use for network - fundingKeys: - description: Funding keys to use for network - -runs: - using: composite - steps: - - name: Prepare Base64 TOML override - shell: bash - id: base64-config-override - env: - RUN_ID: ${{ inputs.runId }} - PYROSCOPE_SERVER: ${{ inputs.pyroscopeServer }} - PYROSCOPE_ENVIRONMENT: ${{ inputs.pyroscopeEnvironment }} - PYROSCOPE_KEY: ${{ inputs.pyroscopeKey }} - CHAINLINK_IMAGE: ${{ inputs.chainlinkImage }} - CHAINLINK_VERSION: ${{ inputs.chainlinkVersion }} - CHAINLINK_POSTGRES_VERSION: ${{ inputs.chainlinkPostgresVersion }} - LOKI_ENDPOINT: ${{ inputs.lokiEndpoint }} - LOKI_TENANT_ID: ${{ inputs.lokiTenantId }} - LOKI_BASIC_AUTH: ${{ inputs.lokiBasicAuth }} - LOGSTREAM_LOG_TARGETS: ${{ inputs.logstreamLogTargets }} - GRAFANA_URL: ${{ inputs.grafanaUrl }} - GRAFANA_DASHBOARD_URL: ${{ inputs.grafanaDashboardUrl }} - GRAFANA_BEARER_TOKEN: ${{ inputs.grafanaBearerToken }} - NETWORK: ${{ inputs.network }} - HTTP_ENDPOINTS: ${{ inputs.httpEndpoints }} - WS_ENDPOINTS: ${{ inputs.wsEndpoints }} - FUNDING_KEYS: ${{ inputs.fundingKeys }} - run: | - convert_to_toml_array() { - local IFS=',' - local input_array=($1) - local toml_array_format="[" - - for element in "${input_array[@]}"; do - toml_array_format+="\"$element\"," - done - - toml_array_format="${toml_array_format%,}]" - echo "$toml_array_format" - } - - if [ -n "$PYROSCOPE_SERVER" ]; then - pyroscope_enabled=true - else - pyroscope_enabled=false - fi - - grafana_bearer_token="" - if [ -n "$GRAFANA_BEARER_TOKEN" ]; then - grafana_bearer_token="bearer_token_secret=\"$GRAFANA_BEARER_TOKEN\"" - fi - - cat << EOF > config.toml - [Common] - chainlink_node_funding=0.5 - - [ChainlinkImage] - image="$CHAINLINK_IMAGE" - version="$CHAINLINK_VERSION" - postgres_version="$CHAINLINK_POSTGRES_VERSION" - - [Pyroscope] - enabled=$pyroscope_enabled - server_url="$PYROSCOPE_SERVER" - environment="$PYROSCOPE_ENVIRONMENT" - key_secret="$PYROSCOPE_KEY" - - [Logging] - run_id="$RUN_ID" - - [Logging.LogStream] - log_targets=$(convert_to_toml_array "$LOGSTREAM_LOG_TARGETS") - - [Logging.Loki] - tenant_id="$LOKI_TENANT_ID" - endpoint="$LOKI_URL" - basic_auth_secret="$LOKI_BASIC_AUTH" - - [Logging.Grafana] - base_url="$GRAFANA_URL" - dashboard_url="$GRAFANA_DASHBOARD_URL" - $grafana_bearer_token - - [Network] - selected_networks=["$NETWORK"] - - [Network.RpcHttpUrls] - "$NETWORK" = $(convert_to_toml_array "$HTTP_ENDPOINTS") - - [Network.RpcWsUrls] - "$NETWORK" = $(convert_to_toml_array "$WS_ENDPOINTS") - - [Network.WalletKeys] - "$NETWORK" = $(convert_to_toml_array "$FUNDING_KEYS") - EOF - - BASE64_CONFIG_OVERRIDE=$(cat config.toml | base64 -w 0) - echo ::add-mask::$BASE64_CONFIG_OVERRIDE - echo "BASE64_CONFIG_OVERRIDE=$BASE64_CONFIG_OVERRIDE" >> $GITHUB_ENV - touch .root_dir diff --git a/.github/actions/setup-create-base64-upgrade-config/action.yml b/.github/actions/setup-create-base64-upgrade-config/action.yml deleted file mode 100644 index c2d0bc19f3..0000000000 --- a/.github/actions/setup-create-base64-upgrade-config/action.yml +++ /dev/null @@ -1,125 +0,0 @@ -name: Create Base64 Upgrade Config -description: A composite action that creates a base64-encoded config to be used by Chainlink version upgrade tests - -inputs: - selectedNetworks: - description: The networks to run tests against - chainlinkImage: - description: The chainlink image to upgrade from - default: "public.ecr.aws/chainlink/chainlink" - chainlinkVersion: - description: The git commit sha to use for the image tag - chainlinkPostgresVersion: - description: The postgres version to use with the chainlink node - default: "15.6" - upgradeImage: - description: The chainlink image to upgrade to - default: "public.ecr.aws/chainlink/chainlink" - upgradeVersion: - description: The git commit sha to use for the image tag - runId: - description: The run id - testLogCollect: - description: Whether to always collect logs, even for passing tests - default: "false" - lokiEndpoint: - description: Loki push endpoint - lokiTenantId: - description: Loki tenant id - lokiBasicAuth: - description: Loki basic auth - logstreamLogTargets: - description: Where to send logs (e.g. file, loki) - grafanaUrl: - description: Grafana URL - grafanaDashboardUrl: - description: Grafana dashboard URL - grafanaBearerToken: - description: Grafana bearer token - -runs: - using: composite - steps: - - name: Prepare Base64 TOML override - shell: bash - id: base64-config-override - env: - SELECTED_NETWORKS: ${{ inputs.selectedNetworks }} - CHAINLINK_IMAGE: ${{ inputs.chainlinkImage }} - CHAINLINK_VERSION: ${{ inputs.chainlinkVersion }} - CHAINLINK_POSTGRES_VERSION: ${{ inputs.chainlinkPostgresVersion }} - UPGRADE_IMAGE: ${{ inputs.upgradeImage }} - UPGRADE_VERSION: ${{ inputs.upgradeVersion }} - RUN_ID: ${{ inputs.runId }} - TEST_LOG_COLLECT: ${{ inputs.testLogCollect }} - LOKI_ENDPOINT: ${{ inputs.lokiEndpoint }} - LOKI_TENANT_ID: ${{ inputs.lokiTenantId }} - LOKI_BASIC_AUTH: ${{ inputs.lokiBasicAuth }} - LOGSTREAM_LOG_TARGETS: ${{ inputs.logstreamLogTargets }} - GRAFANA_URL: ${{ inputs.grafanaUrl }} - GRAFANA_DASHBOARD_URL: ${{ inputs.grafanaDashboardUrl }} - GRAFANA_BEARER_TOKEN: ${{ inputs.grafanaBearerToken }} - run: | - function convert_to_toml_array() { - local IFS=',' - local input_array=($1) - local toml_array_format="[" - - for element in "${input_array[@]}"; do - toml_array_format+="\"$element\"," - done - - toml_array_format="${toml_array_format%,}]" - echo "$toml_array_format" - } - - selected_networks=$(convert_to_toml_array "$SELECTED_NETWORKS") - - if [ -n "$TEST_LOG_COLLECT" ]; then - test_log_collect=true - else - test_log_collect=false - fi - - log_targets=$(convert_to_toml_array "$LOGSTREAM_LOG_TARGETS") - - grafana_bearer_token="" - if [ -n "$GRAFANA_BEARER_TOKEN" ]; then - grafana_bearer_token="bearer_token_secret=\"$GRAFANA_BEARER_TOKEN\"" - fi - - cat << EOF > config.toml - [Network] - selected_networks=$selected_networks - - [ChainlinkImage] - image="$CHAINLINK_IMAGE" - version="$CHAINLINK_VERSION" - postgres_version="$CHAINLINK_POSTGRES_VERSION" - - [ChainlinkUpgradeImage] - image="$UPGRADE_IMAGE" - version="$UPGRADE_VERSION" - postgres_version="$CHAINLINK_POSTGRES_VERSION" - - [Logging] - test_log_collect=$test_log_collect - run_id="$RUN_ID" - - [Logging.LogStream] - log_targets=$log_targets - - [Logging.Loki] - tenant_id="$LOKI_TENANT_ID" - endpoint="$LOKI_ENDPOINT" - basic_auth_secret="$LOKI_BASIC_AUTH" - - [Logging.Grafana] - base_url="$GRAFANA_URL" - dashboard_url="$GRAFANA_DASHBOARD_URL" - $grafana_bearer_token - EOF - - BASE64_CONFIG_OVERRIDE=$(cat config.toml | base64 -w 0) - echo ::add-mask::$BASE64_CONFIG_OVERRIDE - echo "BASE64_CONFIG_OVERRIDE=$BASE64_CONFIG_OVERRIDE" >> $GITHUB_ENV diff --git a/.github/actions/setup-go/action.yml b/.github/actions/setup-go/action.yml deleted file mode 100644 index 6514f533ef..0000000000 --- a/.github/actions/setup-go/action.yml +++ /dev/null @@ -1,64 +0,0 @@ -name: Setup Go -description: Setup Golang with efficient caching -inputs: - only-modules: - description: Set to 'true' to only cache modules - default: "false" - cache-version: - description: Set this to cache bust - default: "1" - go-version-file: - description: Set where the go version file is located at - default: "go.mod" - go-module-file: - description: Set where the go module file is located at - default: "go.sum" - -runs: - using: composite - steps: - - name: Set up Go - uses: actions/setup-go@0c52d547c9bc32b1aa3301fd7a9cb496313a4491 # v5.0.0 - with: - go-version-file: ${{ inputs.go-version-file }} - cache: false - - - name: Get branch name - if: ${{ inputs.only-modules == 'false' }} - id: branch-name - uses: tj-actions/branch-names@6871f53176ad61624f978536bbf089c574dc19a2 # v8.0.1 - - - name: Set go cache keys - shell: bash - id: go-cache-dir - run: | - echo "gomodcache=$(go env GOMODCACHE)" >> $GITHUB_OUTPUT - echo "gobuildcache=$(go env GOCACHE)" >> $GITHUB_OUTPUT - - - name: Set go module path - id: go-module-path - shell: bash - run: echo "path=./${{ inputs.go-module-file }}" >> $GITHUB_OUTPUT - - - uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 # v4.0.2 - name: Cache Go Modules - with: - path: | - ${{ steps.go-cache-dir.outputs.gomodcache }} - # The lifetime of go modules is much higher than the build outputs, so we increase cache efficiency - # here by not having the primary key contain the branch name - key: ${{ runner.os }}-gomod-${{ inputs.cache-version }}-${{ hashFiles(steps.go-module-path.output.path) }} - restore-keys: | - ${{ runner.os }}-gomod-${{ inputs.cache-version }}- - - - uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 # v4.0.2 - if: ${{ inputs.only-modules == 'false' }} - name: Cache Go Build Outputs - with: - path: | - ${{ steps.go-cache-dir.outputs.gobuildcache }} - # The lifetime of go build outputs is pretty short, so we make our primary cache key be the branch name - key: ${{ runner.os }}-gobuild-${{ inputs.cache-version }}-${{ hashFiles(steps.go-module-path.output.path) }}-${{ steps.branch-name.outputs.current_branch }} - restore-keys: | - ${{ runner.os }}-gobuild-${{ inputs.cache-version }}-${{ hashFiles(steps.go-module-path.output.path) }}- - ${{ runner.os }}-gobuild-${{ inputs.cache-version }}- diff --git a/.github/actions/setup-hardhat/action.yaml b/.github/actions/setup-hardhat/action.yaml deleted file mode 100644 index 189c821002..0000000000 --- a/.github/actions/setup-hardhat/action.yaml +++ /dev/null @@ -1,31 +0,0 @@ -name: Setup NodeJS -inputs: - namespace: - required: true - description: A cache namespace to add - cache-version: - default: "6" - description: Change to bust cache -description: Setup pnpm for contracts -runs: - using: composite - steps: - - name: Cache Compilers - uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 # v4.0.2 - with: - path: ~/.cache/hardhat-nodejs/ - key: contracts-compilers-${{ runner.os }}-${{ inputs.cache-version }}-${{ hashFiles('contracts/pnpm-lock.yaml', 'contracts/hardhat.config.ts') }} - - - name: Cache contracts build outputs - uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 # v4.0.2 - with: - path: | - contracts/cache/ - contracts/artifacts/ - contracts/typechain/ - key: ${{ format('contracts-{0}-{1}-{2}-{3}', runner.os, inputs.cache-version, inputs.namespace, hashFiles('contracts/pnpm-lock.yaml', 'contracts/hardhat.config.ts', 'contracts/src/**/*.sol')) }} - - - name: Compile contracts - shell: bash - run: pnpm compile - working-directory: contracts diff --git a/.github/actions/setup-merge-base64-config/action.yml b/.github/actions/setup-merge-base64-config/action.yml deleted file mode 100644 index 79dc875831..0000000000 --- a/.github/actions/setup-merge-base64-config/action.yml +++ /dev/null @@ -1,70 +0,0 @@ -name: Merge Base64 Config -description: A composite action that merges user-provided Base64-encoded config with repository's secrets - -inputs: - base64Config: - description: Base64-encoded config to decode - -runs: - using: composite - steps: - - name: Install dasel - shell: bash - run: | - if ! which dasel > /dev/null; then - curl -L -o dasel "https://github.com/TomWright/dasel/releases/download/v2.6.0/dasel_linux_amd64" && chmod +x dasel && sudo mv dasel /usr/local/bin/ - else - echo "Dasel is already installed." - fi - - name: Add masks and export base64 config - shell: bash - run: | - BASE64_CONFIG_OVERRIDE=$(jq -r '.inputs.base64Config' $GITHUB_EVENT_PATH) - echo ::add-mask::$BASE64_CONFIG_OVERRIDE - echo "BASE64_CONFIG_OVERRIDE=$BASE64_CONFIG_OVERRIDE" >> $GITHUB_ENV - - decoded_toml=$(echo $BASE64_CONFIG_OVERRIDE | base64 -d) - CHAINLINK_IMAGE=$(echo "$decoded_toml" | { dasel -r toml 'ChainlinkImage.image' 2>/dev/null || echo ''; }) - echo ::add-mask::$CHAINLINK_IMAGE - CHAINLINK_VERSION=$(echo "$decoded_toml" | { dasel -r toml 'ChainlinkImage.version' 2>/dev/null || echo ''; }) - NETWORKS=$(echo "$decoded_toml" | awk -F'=' '/^[[:space:]]*selected_networks[[:space:]]*=/ {gsub(/^[[:space:]]+|[[:space:]]+$/, "", $2); print $2}' 2>/dev/null) - - if [ -n "$CHAINLINK_IMAGE" ]; then - echo "CHAINLINK_IMAGE=$CHAINLINK_IMAGE" >> $GITHUB_ENV - else - echo "No Chainlink Image found in base64-ed config" - fi - if [ -n "$CHAINLINK_VERSION" ]; then - echo "CHAINLINK_VERSION=$CHAINLINK_VERSION" >> $GITHUB_ENV - else - echo "No Chainlink Version found in base64-ed config" - fi - if [ -n "$NETWORKS" ]; then - echo "NETWORKS=$NETWORKS" >> $GITHUB_ENV - fi - - grafana_bearer_token="" - if [ -n "$GRAFANA_BEARER_TOKEN" ]; then - grafana_bearer_token="bearer_token_secret=\"$GRAFANA_BEARER_TOKEN\"" - fi - - # use Loki config from GH secrets and merge it with base64 input - cat << EOF > config.toml - [Logging.Loki] - tenant_id="$LOKI_TENANT_ID" - endpoint="$LOKI_URL" - basic_auth_secret="$LOKI_BASIC_AUTH" - # legacy, you only need this to access the cloud version - # bearer_token_secret="bearer_token" - - [Logging.Grafana] - base_url="$GRAFANA_URL" - dashboard_url="$GRAFANA_DASHBOARD_URL" - $grafana_bearer_token - EOF - - echo "$decoded_toml" >> final_config.toml - cat config.toml >> final_config.toml - BASE64_CONFIG_OVERRIDE=$(cat final_config.toml | base64 -w 0) - echo ::add-mask::$BASE64_CONFIG_OVERRIDE - echo "BASE64_CONFIG_OVERRIDE=$BASE64_CONFIG_OVERRIDE" >> $GITHUB_ENV \ No newline at end of file diff --git a/.github/actions/setup-nodejs/action.yaml b/.github/actions/setup-nodejs/action.yaml index e0de9b0138..eab985db9a 100644 --- a/.github/actions/setup-nodejs/action.yaml +++ b/.github/actions/setup-nodejs/action.yaml @@ -1,12 +1,8 @@ name: Setup NodeJS inputs: prod: - default: "false" + default: 'false' description: Set to 'true' to do a prod only install - base-path: - description: Path to the base of the repo - required: false - default: . description: Setup pnpm for contracts runs: using: composite @@ -17,18 +13,18 @@ runs: - uses: actions/setup-node@60edb5dd545a775178f52524783378180af0d1f8 # v4.0.2 with: - node-version: "20" - cache: "pnpm" - cache-dependency-path: "${{ inputs.base-path }}/contracts/pnpm-lock.yaml" + node-version: '20' + cache: 'pnpm' + cache-dependency-path: 'contracts/pnpm-lock.yaml' - if: ${{ inputs.prod == 'false' }} name: Install dependencies shell: bash run: pnpm i - working-directory: ${{ inputs.base-path }}/contracts + working-directory: contracts - if: ${{ inputs.prod == 'true' }} name: Install prod dependencies shell: bash run: pnpm i --prod - working-directory: ${{ inputs.base-path }}/contracts + working-directory: contracts diff --git a/.github/actions/setup-parse-base64-config/action.yml b/.github/actions/setup-parse-base64-config/action.yml deleted file mode 100644 index 72e8982e6d..0000000000 --- a/.github/actions/setup-parse-base64-config/action.yml +++ /dev/null @@ -1,44 +0,0 @@ -name: Parse Base64 Config -description: A composite action that extracts the chainlink image, version and network from a base64-encoded config - -inputs: - base64Config: - description: Base64-encoded config to decode - -runs: - using: composite - steps: - - name: Install dasel - shell: bash - run: | - if ! which dasel > /dev/null; then - curl -L -o dasel "https://github.com/TomWright/dasel/releases/download/v2.6.0/dasel_linux_amd64" && chmod +x dasel && sudo mv dasel /usr/local/bin/ - else - echo "Dasel is already installed." - fi - - name: Add masks and export base64 config - shell: bash - run: | - decoded_toml=$(echo $BASE64_CONFIG_OVERRIDE | base64 -d) - CHAINLINK_IMAGE=$(echo "$decoded_toml" | { dasel -r toml 'ChainlinkImage.image' 2>/dev/null || echo ''; }) - echo ::add-mask::$CHAINLINK_IMAGE - CHAINLINK_VERSION=$(echo "$decoded_toml" | { dasel -r toml 'ChainlinkImage.version' 2>/dev/null || echo ''; }) - NETWORKS=$(echo "$decoded_toml" | awk -F'=' '/^[[:space:]]*selected_networks[[:space:]]*=/ {gsub(/^[[:space:]]+|[[:space:]]+$/, "", $2); print $2}' 2>/dev/null) - ETH2_EL_CLIENT=$(echo "$decoded_toml" | awk -F'=' '/^[[:space:]]*execution_layer[[:space:]]*=/ {gsub(/^[[:space:]]+|[[:space:]]+$/, "", $2); print $2}' 2>/dev/null) - - if [ -n "$CHAINLINK_IMAGE" ]; then - echo "CHAINLINK_IMAGE=$CHAINLINK_IMAGE" >> $GITHUB_ENV - else - echo "No Chainlink Image found in base64-ed config" - fi - if [ -n "$CHAINLINK_VERSION" ]; then - echo "CHAINLINK_VERSION=$CHAINLINK_VERSION" >> $GITHUB_ENV - else - echo "No Chainlink Version found in base64-ed config. Exiting" - fi - if [ -n "$NETWORKS" ]; then - echo "NETWORKS=$NETWORKS" >> $GITHUB_ENV - fi - if [ -n "$ETH2_EL_CLIENT" ]; then - echo "ETH2_EL_CLIENT=$ETH2_EL_CLIENT" >> $GITHUB_ENV - fi \ No newline at end of file diff --git a/.github/actions/setup-postgres/.env b/.github/actions/setup-postgres/.env deleted file mode 100644 index 47ed8d9bcd..0000000000 --- a/.github/actions/setup-postgres/.env +++ /dev/null @@ -1,5 +0,0 @@ -POSTGRES_USER=postgres -POSTGRES_OPTIONS="-c max_connections=1000 -c shared_buffers=2GB -c log_lock_waits=true" -POSTGRES_PASSWORD=postgres -POSTGRES_DB=chainlink_test -POSTGRES_HOST_AUTH_METHOD=trust diff --git a/.github/actions/setup-postgres/action.yml b/.github/actions/setup-postgres/action.yml deleted file mode 100644 index 45bfba5965..0000000000 --- a/.github/actions/setup-postgres/action.yml +++ /dev/null @@ -1,18 +0,0 @@ -name: Setup Postgresql -description: Setup postgres docker container via docker-compose, allowing usage of a custom command, see https://github.com/orgs/community/discussions/26688 -inputs: - base-path: - description: Path to the base of the repo - required: false - default: . -runs: - using: composite - steps: - - name: Start postgres service - run: docker compose up -d - shell: bash - working-directory: ${{ inputs.base-path }}/.github/actions/setup-postgres - - name: Wait for postgres service to be healthy - run: ./wait-for-healthy-postgres.sh - shell: bash - working-directory: ${{ inputs.base-path }}/.github/actions/setup-postgres diff --git a/.github/actions/setup-postgres/bin/pg_dump b/.github/actions/setup-postgres/bin/pg_dump deleted file mode 100755 index d8135ad824..0000000000 --- a/.github/actions/setup-postgres/bin/pg_dump +++ /dev/null @@ -1,23 +0,0 @@ -#!/usr/bin/env bash -# -# This script acts as a docker replacement around pg_dump so that developers can -# run DB involved tests locally without having postgres installed. -# -# Installation: -# - Make sure that your PATH doesn't already contain a postgres installation -# - Add this script to your PATH -# -# Usage: -# You should be able to setup your test db via: -# - go build -o chainlink.test . # Build the chainlink binary to run test db prep commands from -# - export CL_DATABASE_URL="postgresql://postgres:postgres@localhost:5432/chainlink_test?sslmode=disable" -# - pushd .github/actions/setup-postgres/ # Navigate to the setup-postgres action so we can spin up a docker postgres -# instance -# - docker compose up # Spin up postgres -# - ./chainlink.test local db preparetest # Run the db migration, which will shell out to our pg_dump wrapper too. -# - popd -# - go test -timeout 30s ./core/services/workflows/... -v # Run tests that use the database - -cd "$(dirname "$0")" || exit - -docker compose exec -T postgres pg_dump "$@" diff --git a/.github/actions/setup-postgres/docker-compose.yml b/.github/actions/setup-postgres/docker-compose.yml deleted file mode 100644 index 23f8d82b91..0000000000 --- a/.github/actions/setup-postgres/docker-compose.yml +++ /dev/null @@ -1,15 +0,0 @@ -name: gha_postgres -services: - postgres: - ports: - - "5432:5432" - container_name: cl_pg - image: postgres:14-alpine - command: postgres ${POSTGRES_OPTIONS} - env_file: - - .env - healthcheck: - test: "pg_isready -d ${POSTGRES_DB} -U ${POSTGRES_USER}" - interval: 2s - timeout: 5s - retries: 5 diff --git a/.github/actions/setup-postgres/wait-for-healthy-postgres.sh b/.github/actions/setup-postgres/wait-for-healthy-postgres.sh deleted file mode 100755 index 438cfbaff3..0000000000 --- a/.github/actions/setup-postgres/wait-for-healthy-postgres.sh +++ /dev/null @@ -1,25 +0,0 @@ -#!/bin/bash -RETRIES=10 - -until [ $RETRIES -eq 0 ]; do - DOCKER_OUTPUT=$(docker compose ps postgres --status running --format json) - JSON_TYPE=$(echo "$DOCKER_OUTPUT" | jq -r 'type') - - if [ "$JSON_TYPE" == "array" ]; then - HEALTH_STATUS=$(echo "$DOCKER_OUTPUT" | jq -r '.[0].Health') - elif [ "$JSON_TYPE" == "object" ]; then - HEALTH_STATUS=$(echo "$DOCKER_OUTPUT" | jq -r '.Health') - else - HEALTH_STATUS="Unknown JSON type: $JSON_TYPE" - fi - - echo "postgres health status: $HEALTH_STATUS" - if [ "$HEALTH_STATUS" == "healthy" ]; then - exit 0 - fi - - echo "Waiting for postgres server, $((RETRIES--)) remaining attempts..." - sleep 2 -done - -exit 1 diff --git a/.github/actions/setup-slither/action.yaml b/.github/actions/setup-slither/action.yaml deleted file mode 100644 index b8bef38575..0000000000 --- a/.github/actions/setup-slither/action.yaml +++ /dev/null @@ -1,10 +0,0 @@ -name: Setup Slither -description: Installs Slither 0.10.3 for contract analysis. Requires Python 3.6 or higher. -runs: - using: composite - steps: - - name: Install Slither - shell: bash - run: | - python -m pip install --upgrade pip - pip install slither-analyzer==0.10.3 diff --git a/.github/actions/setup-solana/action.yml b/.github/actions/setup-solana/action.yml deleted file mode 100644 index 02a0b85ca8..0000000000 --- a/.github/actions/setup-solana/action.yml +++ /dev/null @@ -1,27 +0,0 @@ -name: Setup Solana CLI -description: Setup solana CLI -inputs: - base-path: - description: Path to the base of the repo - required: false - default: . -runs: - using: composite - steps: - - uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 # v4.0.2 - id: cache - name: Cache solana CLI - with: - path: | - ~/.local/share/solana/install/active_release/bin - key: ${{ runner.os }}-solana-cli-${{ hashFiles('${{ inputs.base-path }}tools/ci/install_solana') }} - - - if: ${{ steps.cache.outputs.cache-hit != 'true' }} - name: Install solana cli - shell: bash - working-directory: ${{ inputs.base-path }} - run: ./tools/ci/install_solana - - - name: Export solana path to env - shell: bash - run: echo "PATH=$HOME/.local/share/solana/install/active_release/bin:$PATH" >> $GITHUB_ENV diff --git a/.github/actions/setup-solc-select/action.yaml b/.github/actions/setup-solc-select/action.yaml deleted file mode 100644 index b74ffae018..0000000000 --- a/.github/actions/setup-solc-select/action.yaml +++ /dev/null @@ -1,30 +0,0 @@ -name: Setup Solc Select -description: Installs Solc Select, required versions and selects the version to use. Requires Python 3.6 or higher. -inputs: - to_install: - description: Comma-separated list of solc versions to install - required: true - to_use: - description: Solc version to use - required: true - -runs: - using: composite - steps: - - name: Install solc-select and solc - shell: bash - run: | - pip3 install solc-select - sudo ln -s /usr/local/bin/solc-select /usr/bin/solc-select - - IFS=',' read -ra versions <<< "${{ inputs.to_install }}" - for version in "${versions[@]}"; do - solc-select install $version - if [ $? -ne 0 ]; then - echo "Failed to install Solc $version" - exit 1 - fi - done - - solc-select install ${{ inputs.to_use }} - solc-select use ${{ inputs.to_use }} diff --git a/.github/actions/setup-wasmd/action.yml b/.github/actions/setup-wasmd/action.yml deleted file mode 100644 index ae31cf2395..0000000000 --- a/.github/actions/setup-wasmd/action.yml +++ /dev/null @@ -1,28 +0,0 @@ -name: Setup Cosmos wasmd -description: Setup Cosmos wasmd, used for integration tests -inputs: - base-path: - description: Path to the base of the repo - required: false - default: . -runs: - using: composite - steps: - - uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 # v4.0.2 - id: cache - name: Cache wasmd-build - with: - path: ~/wasmd-build - # this caching works without cloning the repo because the install_wasmd contains - # the commit hash. - key: ${{ runner.os }}-wasmd-cli-${{ hashFiles('${{ inputs.base-path }}/tools/ci/install_wasmd') }} - - - if: ${{ steps.cache.outputs.cache-hit != 'true' }} - name: Install wasmd - shell: bash - working-directory: ${{ inputs.base-path }} - run: ./tools/ci/install_wasmd - - - name: Export wasmd path to env - shell: bash - run: echo "PATH=$HOME/wasmd-build/bin:$PATH" >> $GITHUB_ENV diff --git a/.github/actions/validate-artifact-scope/action.yaml b/.github/actions/validate-artifact-scope/action.yaml deleted file mode 100644 index 7440efc63a..0000000000 --- a/.github/actions/validate-artifact-scope/action.yaml +++ /dev/null @@ -1,103 +0,0 @@ -name: Validate Artifact Scope -description: Checks there are any modified Solidity files outside of the specified scope. If so, it prints a warning message, but does not fail the workflow. -inputs: - product: - description: The product for which the artifacts are being generated - required: true - sol_files: - description: Comma-separated (CSV) or space-separated (shell) list of Solidity files to check - required: true - -runs: - using: composite - steps: - - name: Transform input array - id: transform_input_array - shell: bash - run: | - is_csv_format() { - local input="$1" - if [[ "$input" =~ "," ]]; then - return 0 - else - return 1 - fi - } - - is_space_separated_string() { - local input="$1" - if [[ "$input" =~ ^[^[:space:]]+([[:space:]][^[:space:]]+)*$ ]]; then - return 0 - else - return 1 - fi - } - - array="${{ inputs.sol_files }}" - - if is_csv_format "$array"; then - echo "::debug::CSV format detected, nothing to do" - echo "sol_files=$array" >> $GITHUB_OUTPUT - exit 0 - fi - - if is_space_separated_string "$array"; then - echo "::debug::Space-separated format detected, converting to CSV" - csv_array="${array// /,}" - echo "sol_files=$csv_array" >> $GITHUB_OUTPUT - exit 0 - fi - - echo "::error::Invalid input format for sol_files. Please provide a comma-separated (CSV) or space-separated (shell) list of Solidity files" - exit 1 - - - name: Check for changes outside of artifact scope - shell: bash - run: | - echo "::debug::All modified contracts:" - echo "${{ steps.transform_input_array.outputs.sol_files }}" | tr ',' '\n' - if [ "${{ inputs.product }}" = "shared" ]; then - excluded_paths_pattern="!/^contracts\/src\/v0\.8\/interfaces/ && !/^contracts\/src\/v0\.8\/${{ inputs.product }}/ && !/^contracts\/src\/v0\.8\/[^\/]+\.sol$/" - else - excluded_paths_pattern="!/^contracts\/src\/v0\.8\/${{ inputs.product }}/" - fi - echo "::debug::Excluded paths: $excluded_paths_pattern" - unexpected_files=$(echo "${{ steps.transform_input_array.outputs.sol_files }}" | tr ',' '\n' | awk "$excluded_paths_pattern") - missing_files="" - set -e - set -o pipefail - if [[ -n "$unexpected_files" ]]; then - products=() - productsStr="" - IFS=$'\n' read -r -d '' -a files <<< "$unexpected_files" || true - echo "Files: ${files[@]}" - - for file in "${files[@]}"; do - missing_files+="$file," - - product=$(echo "$file" | awk -F'src/v0.8/' '{if ($2 ~ /\//) print substr($2, 1, index($2, "/")-1); else print "shared"}') - if [[ ! " ${products[@]} " =~ " ${product} " ]]; then - products+=("$product") - productsStr+="$product, " - fi - done - productsStr=${productsStr%, } - - set +e - set +o pipefail - - missing_files=$(echo $missing_files | tr ',' '\n') - - echo "Error: Found modified contracts outside of the expected scope: ${{ inputs.product }}" - echo "Files:" - echo "$missing_files" - echo "Action required: If you want to generate artifacts for other products ($productsStr) run this workflow again with updated configuration" - - echo "# Warning!" >> $GITHUB_STEP_SUMMARY - echo "## Reason: Found modified contracts outside of the expected scope: ${{ inputs.product }}" >> $GITHUB_STEP_SUMMARY - echo "### Files:" >> $GITHUB_STEP_SUMMARY - echo "$missing_files" >> $GITHUB_STEP_SUMMARY - echo "## Action required: If you want to generate artifacts for other products ($productsStr) run this workflow again with updated configuration" >> $GITHUB_STEP_SUMMARY - else - echo "No unexpected files found." - fi diff --git a/.github/actions/validate-solidity-artifacts/action.yaml b/.github/actions/validate-solidity-artifacts/action.yaml deleted file mode 100644 index 5357a87f96..0000000000 --- a/.github/actions/validate-solidity-artifacts/action.yaml +++ /dev/null @@ -1,115 +0,0 @@ -name: Validate Solidity Artifacts -description: Checks whether Slither reports and UML diagrams were generated for all necessary files. If not, a warning is printed in job summary, but the job is not marked as failed. -inputs: - slither_reports_path: - description: Path to the Slither reports directory (without trailing slash) - required: true - uml_diagrams_path: - description: Path to the UML diagrams directory (without trailing slash) - required: true - validate_slither_reports: - description: Whether Slither reports should be validated - required: true - validate_uml_diagrams: - description: Whether UML diagrams should be validated - required: true - sol_files: - description: Comma-separated (CSV) or space-separated (shell) list of Solidity files to check - required: true - -runs: - using: composite - steps: - - name: Transform input array - id: transform_input_array - shell: bash - run: | - is_csv_format() { - local input="$1" - if [[ "$input" =~ "," ]]; then - return 0 - else - return 1 - fi - } - - is_space_separated_string() { - local input="$1" - if [[ "$input" =~ ^[^[:space:]]+([[:space:]][^[:space:]]+)*$ ]]; then - return 0 - else - return 1 - fi - } - - array="${{ inputs.sol_files }}" - - if is_csv_format "$array"; then - echo "::debug::CSV format detected, nothing to do" - echo "sol_files=$array" >> $GITHUB_OUTPUT - exit 0 - fi - - if is_space_separated_string "$array"; then - echo "::debug::Space-separated format detected, converting to CSV" - csv_array="${array// /,}" - echo "sol_files=$csv_array" >> $GITHUB_OUTPUT - exit 0 - fi - - echo "::error::Invalid input format for sol_files. Please provide a comma-separated (CSV) or space-separated (shell) list of Solidity files" - exit 1 - - - name: Validate UML diagrams - if: ${{ inputs.validate_uml_diagrams == 'true' }} - shell: bash - run: | - echo "Validating UML diagrams" - IFS=',' read -r -a modified_files <<< "${{ steps.transform_input_array.outputs.sol_files }}" - missing_svgs=() - for file in "${modified_files[@]}"; do - svg_file="$(basename "${file%.sol}").svg" - if [ ! -f "${{ inputs.uml_diagrams_path }}/$svg_file" ]; then - echo "Error: UML diagram for $file not found" - missing_svgs+=("$file") - fi - done - - if [ ${#missing_svgs[@]} -gt 0 ]; then - echo "Error: Missing UML diagrams for files: ${missing_svgs[@]}" - echo "# Warning!" >> $GITHUB_STEP_SUMMARY - echo "## Reason: Missing UML diagrams for files:" >> $GITHUB_STEP_SUMMARY - for file in "${missing_svgs[@]}"; do - echo " $file" >> $GITHUB_STEP_SUMMARY - done - echo "## Action required: Please try to generate artifacts for them locally or using a different tool" >> $GITHUB_STEP_SUMMARY - else - echo "All UML diagrams generated successfully" - fi - - - name: Validate Slither reports - if: ${{ inputs.validate_slither_reports == 'true' }} - shell: bash - run: | - echo "Validating Slither reports" - IFS=',' read -r -a modified_files <<< "${{ steps.transform_input_array.outputs.sol_files }}" - missing_reports=() - for file in "${modified_files[@]}"; do - report_file="$(basename "${file%.sol}")-slither-report.md" - if [ ! -f "${{ inputs.slither_reports_path }}/$report_file" ]; then - echo "Error: Slither report for $file not found" - missing_reports+=("$file") - fi - done - - if [ ${#missing_reports[@]} -gt 0 ]; then - echo "Error: Missing Slither reports for files: ${missing_reports[@]}" - echo "# Warning!" >> $GITHUB_STEP_SUMMARY - echo "## Reason: Missing Slither reports for files:" >> $GITHUB_STEP_SUMMARY - for file in "${missing_reports[@]}"; do - echo " $file" >> $GITHUB_STEP_SUMMARY - done - echo "## Action required: Please try to generate artifacts for them locally" >> $GITHUB_STEP_SUMMARY - else - echo "All Slither reports generated successfully" - fi diff --git a/.github/actions/version-file-bump/action.yml b/.github/actions/version-file-bump/action.yml deleted file mode 100644 index eb8d5c1742..0000000000 --- a/.github/actions/version-file-bump/action.yml +++ /dev/null @@ -1,51 +0,0 @@ -name: version-file-bump -description: "Ensure that the VERSION file has been bumped since the last release." -inputs: - github-token: - description: "Github access token" - default: ${{ github.token }} - required: true -outputs: - result: - value: ${{ steps.compare.outputs.result }} - description: "Result of the comparison" -runs: - using: composite - steps: - - name: Get latest release version - id: get-latest-version - shell: bash - run: | - untrimmed_ver=$( - curl --header "Authorization: token ${{ inputs.github-token }}" \ - --request GET \ - "https://api.github.com/repos/${{ github.repository }}/releases/latest?draft=false&prerelease=false" \ - | jq -r .name - ) - latest_version="${untrimmed_ver:1}" - echo "latest_version=${latest_version}" | tee -a "$GITHUB_OUTPUT" - - name: Get current version - id: get-current-version - shell: bash - run: | - current_version=$(jq -r '.version' ./package.json) - echo "current_version=${current_version}" | tee -a "$GITHUB_OUTPUT" - - name: Compare semantic versions - uses: smartcontractkit/chainlink-github-actions/semver-compare@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - id: compare - with: - version1: ${{ steps.get-current-version.outputs.current_version }} - operator: eq - version2: ${{ steps.get-latest-version.outputs.latest_version }} - - name: Fail if version not bumped - # XXX: The reason we are not checking if the current is greater than the - # latest release is to account for hot fixes which may have been branched - # from a previous tag. - shell: bash - env: - VERSION_NOT_BUMPED: ${{ steps.compare.outputs.result }} - run: | - if [[ "${VERSION_NOT_BUMPED:-}" = "true" ]]; then - echo "The version in `package.json` has not bumped since the last release. Please fix by running `pnpm changeset version`." - exit 1 - fi diff --git a/.github/e2e-tests.yml b/.github/e2e-tests.yml deleted file mode 100644 index 0d92d1900d..0000000000 --- a/.github/e2e-tests.yml +++ /dev/null @@ -1,824 +0,0 @@ -# This file specifies the GitHub runner for each E2E test and is utilized by all E2E CI workflows. -# -# Each entry in this file includes the following: -# - The GitHub runner (runs_on field) that will execute tests. -# - The tests that will be run by the runner. -# - The workflows (e.g., Run PR E2E Tests, Run Nightly E2E Tests) that should trigger these tests. -# -runner-test-matrix: - - # START: OCR tests - - # Example of 1 runner for all tests in integration-tests/smoke/ocr_test.go - - id: integration-tests/smoke/ocr_test.go:* - path: integration-tests/smoke/ocr_test.go - test_env_type: docker - runs_on: ubuntu-latest - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/ && go test smoke/ocr_test.go -timeout 30m -count=1 -test.parallel=2 -json - pyroscope_env: ci-smoke-ocr-evm-simulated - - # Example of 2 separate runners for the same test file but different tests. Can be used if tests if are too heavy to run on the same runner - - id: integration-tests/smoke/ocr2_test.go:^TestOCRv2Request$ - path: integration-tests/smoke/ocr2_test.go - test_env_type: docker - runs_on: ubuntu-latest - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/ && go test smoke/ocr2_test.go -test.run ^TestOCRv2Request$ -test.parallel=1 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-ocr2-evm-simulated-nightly - - - id: integration-tests/smoke/ocr2_test.go:^TestOCRv2Basic$ - path: integration-tests/smoke/ocr2_test.go - test_env_type: docker - runs_on: ubuntu-latest - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/ && go test smoke/ocr2_test.go -test.run ^TestOCRv2Basic$ -test.parallel=1 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-ocr2-evm-simulated-nightly - - # Example of a configuration for running a single soak test in Kubernetes Remote Runner - - id: integration-tests/soak/ocr_test.go:^TestOCRv1Soak$ - path: integration-tests/soak/ocr_test.go - test_env_type: k8s-remote-runner - runs_on: ubuntu-latest - test_cmd: cd integration-tests/ && go test soak/ocr_test.go -v -test.run ^TestOCRv1Soak$ -test.parallel=1 -timeout 30m -count=1 -json - test_inputs: - test_suite: soak - - - id: integration-tests/soak/ocr_test.go:^TestOCRv2Soak$ - path: integration-tests/soak/ocr_test.go - test_env_type: k8s-remote-runner - runs_on: ubuntu-latest - test_cmd: cd integration-tests/ && go test soak/ocr_test.go -v -test.run ^TestOCRv2Soak$ -test.parallel=1 -timeout 30m -count=1 -json - test_config_override_required: true - test_secrets_required: true - test_inputs: - test_suite: soak - - - id: integration-tests/soak/ocr_test.go:^TestForwarderOCRv1Soak$ - path: integration-tests/soak/ocr_test.go - test_env_type: k8s-remote-runner - runs_on: ubuntu-latest - test_cmd: cd integration-tests/ && go test soak/ocr_test.go -v -test.run ^TestForwarderOCRv1Soak$ -test.parallel=1 -timeout 30m -count=1 -json - test_config_override_required: true - test_secrets_required: true - test_inputs: - test_suite: soak - - - id: integration-tests/soak/ocr_test.go:^TestForwarderOCRv2Soak$ - path: integration-tests/soak/ocr_test.go - test_env_type: k8s-remote-runner - runs_on: ubuntu-latest - test_cmd: cd integration-tests/ && go test soak/ocr_test.go -v -test.run ^TestForwarderOCRv2Soak$ -test.parallel=1 -timeout 30m -count=1 -json - test_config_override_required: true - test_secrets_required: true - test_inputs: - test_suite: soak - - - id: integration-tests/soak/ocr_test.go:^TestOCRSoak_GethReorgBelowFinality_FinalityTagDisabled$ - path: integration-tests/soak/ocr_test.go - test_env_type: k8s-remote-runner - runs_on: ubuntu-latest - test_cmd: cd integration-tests/ && go test soak/ocr_test.go -v -test.run ^TestOCRSoak_GethReorgBelowFinality_FinalityTagDisabled$ -test.parallel=1 -timeout 30m -count=1 -json - test_config_override_required: true - test_secrets_required: true - test_inputs: - test_suite: soak - - - id: integration-tests/soak/ocr_test.go:^TestOCRSoak_GethReorgBelowFinality_FinalityTagEnabled$ - path: integration-tests/soak/ocr_test.go - test_env_type: k8s-remote-runner - runs_on: ubuntu-latest - test_cmd: cd integration-tests/ && go test soak/ocr_test.go -v -test.run ^TestOCRSoak_GethReorgBelowFinality_FinalityTagEnabled$ -test.parallel=1 -timeout 30m -count=1 -json - test_config_override_required: true - test_secrets_required: true - test_inputs: - test_suite: soak - - - id: integration-tests/soak/ocr_test.go:^TestOCRSoak_GasSpike$ - path: integration-tests/soak/ocr_test.go - test_env_type: k8s-remote-runner - runs_on: ubuntu-latest - test_cmd: cd integration-tests/ && go test soak/ocr_test.go -v -test.run ^TestOCRSoak_GasSpike$ -test.parallel=1 -timeout 30m -count=1 -json - test_config_override_required: true - test_secrets_required: true - test_inputs: - test_suite: soak - - - id: integration-tests/soak/ocr_test.go:^TestOCRSoak_ChangeBlockGasLimit$ - path: integration-tests/soak/ocr_test.go - test_env_type: k8s-remote-runner - runs_on: ubuntu-latest - test_cmd: cd integration-tests/ && go test soak/ocr_test.go -v -test.run ^TestOCRSoak_ChangeBlockGasLimit$ -test.parallel=1 -timeout 30m -count=1 -json - test_config_override_required: true - test_secrets_required: true - test_inputs: - test_suite: soak - - - id: integration-tests/soak/ocr_test.go:^TestOCRSoak_RPCDownForAllCLNodes$ - path: integration-tests/soak/ocr_test.go - test_env_type: k8s-remote-runner - runs_on: ubuntu-latest - test_cmd: cd integration-tests/ && go test soak/ocr_test.go -v -test.run ^TestOCRSoak_RPCDownForAllCLNodes$ -test.parallel=1 -timeout 30m -count=1 -json - test_config_override_required: true - test_secrets_required: true - test_inputs: - test_suite: soak - - - id: integration-tests/soak/ocr_test.go:^TestOCRSoak_RPCDownForHalfCLNodes$ - path: integration-tests/soak/ocr_test.go - test_env_type: k8s-remote-runner - runs_on: ubuntu-latest - test_cmd: cd integration-tests/ && go test soak/ocr_test.go -v -test.run ^TestOCRSoak_RPCDownForHalfCLNodes$ -test.parallel=1 -timeout 30m -count=1 -json - test_config_override_required: true - test_secrets_required: true - test_inputs: - test_suite: soak - - - id: integration-tests/smoke/forwarder_ocr_test.go:* - path: integration-tests/smoke/forwarder_ocr_test.go - test_env_type: docker - runs_on: ubuntu-latest - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/ && go test smoke/forwarder_ocr_test.go -timeout 30m -count=1 -test.parallel=2 -json - pyroscope_env: ci-smoke-forwarder-ocr-evm-simulated - - - id: integration-tests/smoke/forwarders_ocr2_test.go:* - path: integration-tests/smoke/forwarders_ocr2_test.go - test_env_type: docker - runs_on: ubuntu-latest - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/ && go test smoke/forwarders_ocr2_test.go -timeout 30m -count=1 -test.parallel=2 -json - pyroscope_env: ci-smoke-forwarder-ocr-evm-simulated - - - id: integration-tests/smoke/ocr2_test.go:* - path: integration-tests/smoke/ocr2_test.go - test_env_type: docker - runs_on: ubuntu22.04-16cores-64GB - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/ && go test smoke/ocr2_test.go -timeout 30m -count=1 -test.parallel=6 -json - pyroscope_env: ci-smoke-ocr2-evm-simulated - - - id: integration-tests/smoke/ocr2_test.go:*-plugins - path: integration-tests/smoke/ocr2_test.go - test_env_type: docker - runs_on: ubuntu22.04-16cores-64GB - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/ && go test smoke/ocr2_test.go -timeout 30m -count=1 -test.parallel=6 -json - pyroscope_env: ci-smoke-ocr2-plugins-evm-simulated - test_inputs: - # chainlink_version: '{{ env.GITHUB_SHA_PLUGINS }}' # This is the chainlink version that has the plugins - chainlink_version: develop-plugins - - # END: OCR tests - - # START: Automation tests - - - id: integration-tests/smoke/automation_test.go:^TestAutomationBasic/registry_2_0|TestAutomationBasic/registry_2_1_conditional|TestAutomationBasic/registry_2_1_logtrigger$ - path: integration-tests/smoke/automation_test.go - test_env_type: docker - runs_on: ubuntu-latest - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/smoke && go test -test.run "^TestAutomationBasic/registry_2_0|TestAutomationBasic/registry_2_1_conditional|TestAutomationBasic/registry_2_1_logtrigger$" -test.parallel=3 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-automation-evm-simulated - - - id: integration-tests/smoke/automation_test.go:^TestAutomationBasic/registry_2_1_with_mercury_v02|TestAutomationBasic/registry_2_1_with_mercury_v03|TestAutomationBasic/registry_2_1_with_logtrigger_and_mercury_v02$ - path: integration-tests/smoke/automation_test.go - test_env_type: docker - runs_on: ubuntu-latest - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/smoke && go test -test.run "^TestAutomationBasic/registry_2_1_with_mercury_v02|TestAutomationBasic/registry_2_1_with_mercury_v03|TestAutomationBasic/registry_2_1_with_logtrigger_and_mercury_v02$" -test.parallel=3 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-automation-evm-simulated - - - id: integration-tests/smoke/automation_test.go:^TestAutomationBasic/registry_2_2_conditional|TestAutomationBasic/registry_2_2_logtrigger|TestAutomationBasic/registry_2_2_with_mercury_v02$ - path: integration-tests/smoke/automation_test.go - test_env_type: docker - runs_on: ubuntu-latest - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/smoke && go test -test.run "^TestAutomationBasic/registry_2_2_conditional|TestAutomationBasic/registry_2_2_logtrigger|TestAutomationBasic/registry_2_2_with_mercury_v02$" -test.parallel=3 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-automation-evm-simulated - - - id: integration-tests/smoke/automation_test.go:^TestAutomationBasic/registry_2_2_with_mercury_v03|TestAutomationBasic/registry_2_2_with_logtrigger_and_mercury_v02$ - path: integration-tests/smoke/automation_test.go - test_env_type: docker - runs_on: ubuntu-latest - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/smoke && go test -test.run "^TestAutomationBasic/registry_2_2_with_mercury_v03|TestAutomationBasic/registry_2_2_with_logtrigger_and_mercury_v02$" -test.parallel=2 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-automation-evm-simulated - - - id: integration-tests/smoke/automation_test.go:^TestAutomationBasic/registry_2_3_conditional_native|TestAutomationBasic/registry_2_3_conditional_link$ - path: integration-tests/smoke/automation_test.go - test_env_type: docker - runs_on: ubuntu-latest - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/smoke && go test -test.run "^TestAutomationBasic/registry_2_3_conditional_native|TestAutomationBasic/registry_2_3_conditional_link$" -test.parallel=3 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-automation-evm-simulated - - - id: integration-tests/smoke/automation_test.go:^TestAutomationBasic/registry_2_3_logtrigger_native|TestAutomationBasic/registry_2_3_logtrigger_link$ - path: integration-tests/smoke/automation_test.go - test_env_type: docker - runs_on: ubuntu-latest - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/smoke && go test -test.run "^TestAutomationBasic/registry_2_3_logtrigger_native|TestAutomationBasic/registry_2_3_logtrigger_link$" -test.parallel=2 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-automation-evm-simulated - - - id: integration-tests/smoke/automation_test.go:^TestAutomationBasic/registry_2_3_with_mercury_v03_link|TestAutomationBasic/registry_2_3_with_logtrigger_and_mercury_v02_link$ - path: integration-tests/smoke/automation_test.go - test_env_type: docker - runs_on: ubuntu-latest - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/smoke && go test -test.run "^TestAutomationBasic/registry_2_3_with_mercury_v03_link|TestAutomationBasic/registry_2_3_with_logtrigger_and_mercury_v02_link$" -test.parallel=2 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-automation-evm-simulated - - - id: integration-tests/smoke/automation_test.go:^TestSetUpkeepTriggerConfig$ - path: integration-tests/smoke/automation_test.go - test_env_type: docker - runs_on: ubuntu-latest - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/smoke && go test -test.run ^TestSetUpkeepTriggerConfig$ -test.parallel=2 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-automation-evm-simulated - - - id: integration-tests/smoke/automation_test.go:^TestAutomationAddFunds$ - path: integration-tests/smoke/automation_test.go - test_env_type: docker - runs_on: ubuntu-latest - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/smoke && go test -test.run ^TestAutomationAddFunds$ -test.parallel=3 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-automation-evm-simulated - - - id: integration-tests/smoke/automation_test.go:^TestAutomationPauseUnPause$ - path: integration-tests/smoke/automation_test.go - test_env_type: docker - runs_on: ubuntu-latest - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/smoke && go test -test.run ^TestAutomationPauseUnPause$ -test.parallel=3 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-automation-evm-simulated - - - id: integration-tests/smoke/automation_test.go:^TestAutomationRegisterUpkeep$ - path: integration-tests/smoke/automation_test.go - test_env_type: docker - runs_on: ubuntu-latest - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/smoke && go test -test.run ^TestAutomationRegisterUpkeep$ -test.parallel=3 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-automation-evm-simulated - - - id: integration-tests/smoke/automation_test.go:^TestAutomationPauseRegistry$ - path: integration-tests/smoke/automation_test.go - test_env_type: docker - runs_on: ubuntu-latest - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/smoke && go test -test.run ^TestAutomationPauseRegistry$ -test.parallel=3 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-automation-evm-simulated - - - id: integration-tests/smoke/automation_test.go:^TestAutomationKeeperNodesDown$ - path: integration-tests/smoke/automation_test.go - test_env_type: docker - runs_on: ubuntu-latest - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/smoke && go test -test.run ^TestAutomationKeeperNodesDown$ -test.parallel=3 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-automation-evm-simulated - - - id: integration-tests/smoke/automation_test.go:^TestAutomationPerformSimulation$ - path: integration-tests/smoke/automation_test.go - test_env_type: docker - runs_on: ubuntu-latest - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/smoke && go test -test.run ^TestAutomationPerformSimulation$ -test.parallel=3 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-automation-evm-simulated - - - id: integration-tests/smoke/automation_test.go:^TestAutomationCheckPerformGasLimit$ - path: integration-tests/smoke/automation_test.go - test_env_type: docker - runs_on: ubuntu-latest - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/smoke && go test -test.run ^TestAutomationCheckPerformGasLimit$ -test.parallel=3 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-automation-evm-simulated - - - id: integration-tests/smoke/automation_test.go:^TestUpdateCheckData$ - path: integration-tests/smoke/automation_test.go - test_env_type: docker - runs_on: ubuntu-latest - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/smoke && go test -test.run ^TestUpdateCheckData$ -test.parallel=3 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-automation-evm-simulated - - - id: integration-tests/smoke/automation_test.go:^TestSetOffchainConfigWithMaxGasPrice$ - path: integration-tests/smoke/automation_test.go - test_env_type: docker - runs_on: ubuntu-latest - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/smoke && go test -test.run ^TestSetOffchainConfigWithMaxGasPrice$ -test.parallel=2 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-automation-evm-simulated - - - id: integration-tests/smoke/keeper_test.go:^TestKeeperBasicSmoke$ - path: integration-tests/smoke/keeper_test.go - test_env_type: docker - runs_on: ubuntu-latest - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/smoke && go test -test.run ^TestKeeperBasicSmoke$ -test.parallel=3 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-keeper-evm-simulated - - - id: integration-tests/smoke/keeper_test.go:^TestKeeperBlockCountPerTurn$ - path: integration-tests/smoke/keeper_test.go - test_env_type: docker - runs_on: ubuntu-latest - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/smoke && go test -test.run ^TestKeeperBlockCountPerTurn$ -test.parallel=3 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-keeper-evm-simulated - - - id: integration-tests/smoke/keeper_test.go:^TestKeeperSimulation$ - path: integration-tests/smoke/keeper_test.go - test_env_type: docker - runs_on: ubuntu-latest - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/smoke && go test -test.run ^TestKeeperSimulation$ -test.parallel=2 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-keeper-evm-simulated - - - id: integration-tests/smoke/keeper_test.go:^TestKeeperCheckPerformGasLimit$ - path: integration-tests/smoke/keeper_test.go - test_env_type: docker - runs_on: ubuntu-latest - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/smoke && go test -test.run ^TestKeeperCheckPerformGasLimit$ -test.parallel=3 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-keeper-evm-simulated - - - id: integration-tests/smoke/keeper_test.go:^TestKeeperRegisterUpkeep$ - path: integration-tests/smoke/keeper_test.go - test_env_type: docker - runs_on: ubuntu-latest - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/smoke && go test -test.run ^TestKeeperRegisterUpkeep$ -test.parallel=3 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-keeper-evm-simulated - - - id: integration-tests/smoke/keeper_test.go:^TestKeeperAddFunds$ - path: integration-tests/smoke/keeper_test.go - test_env_type: docker - runs_on: ubuntu-latest - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/smoke && go test -test.run ^TestKeeperAddFunds$ -test.parallel=3 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-keeper-evm-simulated - - - id: integration-tests/smoke/keeper_test.go:^TestKeeperRemove$ - path: integration-tests/smoke/keeper_test.go - test_env_type: docker - runs_on: ubuntu-latest - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/smoke && go test -test.run ^TestKeeperRemove$ -test.parallel=3 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-keeper-evm-simulated - - - id: integration-tests/smoke/keeper_test.go:^TestKeeperPauseRegistry$ - path: integration-tests/smoke/keeper_test.go - test_env_type: docker - runs_on: ubuntu-latest - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/smoke && go test -test.run ^TestKeeperPauseRegistry$ -test.parallel=2 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-keeper-evm-simulated - - - id: integration-tests/smoke/keeper_test.go:^TestKeeperMigrateRegistry$ - path: integration-tests/smoke/keeper_test.go - test_env_type: docker - runs_on: ubuntu-latest - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/smoke && go test -test.run ^TestKeeperMigrateRegistry$ -test.parallel=1 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-keeper-evm-simulated - - - id: integration-tests/smoke/keeper_test.go:^TestKeeperNodeDown$ - path: integration-tests/smoke/keeper_test.go - test_env_type: docker - runs_on: ubuntu-latest - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/smoke && go test -test.run ^TestKeeperNodeDown$ -test.parallel=3 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-keeper-evm-simulated - - - id: integration-tests/smoke/keeper_test.go:^TestKeeperPauseUnPauseUpkeep$ - path: integration-tests/smoke/keeper_test.go - test_env_type: docker - runs_on: ubuntu-latest - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/smoke && go test -test.run ^TestKeeperPauseUnPauseUpkeep$ -test.parallel=1 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-keeper-evm-simulated - - - id: integration-tests/smoke/keeper_test.go:^TestKeeperUpdateCheckData$ - path: integration-tests/smoke/keeper_test.go - test_env_type: docker - runs_on: ubuntu-latest - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/smoke && go test -test.run ^TestKeeperUpdateCheckData$ -test.parallel=1 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-keeper-evm-simulated - - - id: integration-tests/smoke/keeper_test.go:^TestKeeperJobReplacement$ - path: integration-tests/smoke/keeper_test.go - test_env_type: docker - runs_on: ubuntu-latest - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/smoke && go test -test.run ^TestKeeperJobReplacement$ -test.parallel=1 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-keeper-evm-simulated - - - id: integration-tests/load/automationv2_1/automationv2_1_test.go:TestLogTrigger - path: integration-tests/load/automationv2_1/automationv2_1_test.go - runs_on: ubuntu-latest - test_env_type: k8s-remote-runner - test_cmd: cd integration-tests/load/automationv2_1 && go test -test.run TestLogTrigger -test.parallel=1 -timeout 60m -count=1 -json - remote_runner_memory: 4Gi - test_config_override_required: true - test_secrets_required: true - test_inputs: - test_suite: automationv2_1 - workflows: - - Automation Load Test - pyroscope_env: automation-load-test - - - id: integration-tests/smoke/automation_upgrade_test.go:^TestAutomationNodeUpgrade/registry_2_0 - path: integration-tests/smoke/automation_upgrade_test.go - test_env_type: docker - runs_on: ubuntu22.04-8cores-32GB - workflows: - - Run Automation Product Nightly E2E Tests - test_cmd: cd integration-tests/smoke && go test -test.run ^TestAutomationNodeUpgrade/registry_2_0 -test.parallel=1 -timeout 60m -count=1 -json - test_inputs: - chainlink_image: public.ecr.aws/chainlink/chainlink - chainlink_version: latest - chainlink_upgrade_image: '{{ env.QA_CHAINLINK_IMAGE }}' - chainlink_upgrade_version: develop - pyroscope_env: ci-smoke-automation-upgrade-tests - - - id: integration-tests/smoke/automation_upgrade_test.go:^TestAutomationNodeUpgrade/registry_2_1 - path: integration-tests/smoke/automation_upgrade_test.go - test_env_type: docker - runs_on: ubuntu22.04-8cores-32GB - workflows: - - Run Automation Product Nightly E2E Tests - test_cmd: cd integration-tests/smoke && go test -test.run ^TestAutomationNodeUpgrade/registry_2_1 -test.parallel=5 -timeout 60m -count=1 -json - test_inputs: - chainlink_image: public.ecr.aws/chainlink/chainlink - chainlink_version: latest - chainlink_upgrade_image: '{{ env.QA_CHAINLINK_IMAGE }}' - chainlink_upgrade_version: develop - pyroscope_env: ci-smoke-automation-upgrade-tests - - - id: integration-tests/smoke/automation_upgrade_test.go:^TestAutomationNodeUpgrade/registry_2_2 - path: integration-tests/smoke/automation_upgrade_test.go - test_env_type: docker - runs_on: ubuntu22.04-8cores-32GB - workflows: - - Run Automation Product Nightly E2E Tests - test_cmd: cd integration-tests/smoke && go test -test.run ^TestAutomationNodeUpgrade/registry_2_2 -test.parallel=5 -timeout 60m -count=1 -json - test_inputs: - chainlink_image: public.ecr.aws/chainlink/chainlink - chainlink_version: latest - chainlink_upgrade_image: '{{ env.QA_CHAINLINK_IMAGE }}' - chainlink_upgrade_version: develop - pyroscope_env: ci-smoke-automation-upgrade-tests - - - id: integration-tests/reorg/automation_reorg_test.go - path: integration-tests/reorg/automation_reorg_test.go - runs_on: ubuntu-latest - test_env_type: k8s-remote-runner - test_inputs: - test_suite: reorg - workflows: - - Run Automation On Demand Tests (TEST WORKFLOW) - test_cmd: cd integration-tests/reorg && DETACH_RUNNER=false go test -v -test.run ^TestAutomationReorg$ -test.parallel=7 -timeout 60m -count=1 -json - pyroscope_env: ci-automation-on-demand-reorg - - - id: integration-tests/chaos/automation_chaos_test.go - path: integration-tests/chaos/automation_chaos_test.go - test_env_type: k8s-remote-runner - runs_on: ubuntu-latest - workflows: - - Run Automation On Demand Tests (TEST WORKFLOW) - test_cmd: cd integration-tests/chaos && DETACH_RUNNER=false go test -v -test.run ^TestAutomationChaos$ -test.parallel=15 -timeout 60m -count=1 -json - pyroscope_env: ci-automation-on-demand-chaos - test_inputs: - test_suite: chaos - - - id: integration-tests/benchmark/keeper_test.go:^TestAutomationBenchmark$ - path: integration-tests/benchmark/keeper_test.go - test_env_type: k8s-remote-runner - remote_runner_memory: 4Gi - runs_on: ubuntu-latest - # workflows: - # - Run Nightly E2E Tests - test_cmd: cd integration-tests/benchmark && go test -v -test.run ^TestAutomationBenchmark$ -test.parallel=1 -timeout 30m -count=1 -json - pyroscope_env: ci-benchmark-automation-nightly - test_inputs: - test_suite: benchmark - - # END: Automation tests - - # START: VRF tests - - - id: integration-tests/smoke/vrfv2_test.go:TestVRFv2Basic - path: integration-tests/smoke/vrfv2_test.go - runs_on: ubuntu22.04-8cores-32GB - test_env_type: docker - test_cmd: cd integration-tests/smoke && go test -v -test.run TestVRFv2Basic -test.parallel=1 -timeout 30m -count=1 -json - test_config_override_required: true - test_secrets_required: true - workflows: - - On Demand VRFV2 Smoke Test (Ethereum clients) - - - id: integration-tests/load/vrfv2plus/vrfv2plus_test.go:^TestVRFV2PlusPerformance$Smoke - path: integration-tests/load/vrfv2plus/vrfv2plus_test.go - runs_on: ubuntu22.04-8cores-32GB - test_env_type: docker - test_cmd: cd integration-tests/load/vrfv2plus && go test -v -test.run ^TestVRFV2PlusPerformance$ -test.parallel=1 -timeout 24h -count=1 -json - test_config_override_required: true - test_secrets_required: true - test_inputs: - test_type: Smoke - workflows: - - On Demand VRFV2 Plus Performance Test - - - id: integration-tests/load/vrfv2plus/vrfv2plus_test.go:^TestVRFV2PlusBHSPerformance$Smoke - path: integration-tests/load/vrfv2plus/vrfv2plus_test.go - runs_on: ubuntu22.04-8cores-32GB - test_env_type: docker - test_cmd: cd integration-tests/load/vrfv2plus && go test -v -test.run ^TestVRFV2PlusBHSPerformance$ -test.parallel=1 -timeout 24h -count=1 -json - test_config_override_required: true - test_secrets_required: true - test_inputs: - test_type: Smoke - workflows: - - On Demand VRFV2 Plus Performance Test - - - id: integration-tests/load/vrfv2/vrfv2_test.go:^TestVRFV2Performance$Smoke - path: integration-tests/load/vrfv2/vrfv2_test.go - runs_on: ubuntu22.04-8cores-32GB - test_env_type: docker - test_cmd: cd integration-tests/load/vrfv2 && go test -v -test.run ^TestVRFV2Performance$ -test.parallel=1 -timeout 24h -count=1 -json - test_config_override_required: true - test_secrets_required: true - test_inputs: - test_type: Smoke - workflows: - - On Demand VRFV2 Performance Test - - - id: integration-tests/load/vrfv2/vrfv2_test.go:^TestVRFV2PlusBHSPerformance$Smoke - path: integration-tests/load/vrfv2/vrfv2_test.go - runs_on: ubuntu22.04-8cores-32GB - test_env_type: docker - test_cmd: cd integration-tests/load/vrfv2 && go test -v -test.run ^TestVRFV2PlusBHSPerformance$ -test.parallel=1 -timeout 24h -count=1 -json - test_config_override_required: true - test_secrets_required: true - test_inputs: - test_type: Smoke - workflows: - - On Demand VRFV2 Performance Test - - - id: integration-tests/smoke/vrfv2plus_test.go:^TestVRFv2Plus$/^Link_Billing$ - path: integration-tests/smoke/vrfv2plus_test.go - runs_on: ubuntu22.04-8cores-32GB - test_env_type: docker - test_cmd: cd integration-tests && go test -v -test.run ^TestVRFv2Plus$/^Link_Billing$ smoke/vrfv2plus_test.go -test.parallel=1 -timeout 30m -count=1 -json - test_config_override_required: true - test_secrets_required: true - workflows: - - On Demand VRFV2Plus Smoke Test (Ethereum clients) - - - id: integration-tests/smoke/vrf_test.go:* - path: integration-tests/smoke/vrf_test.go - test_env_type: docker - runs_on: ubuntu-latest - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/ && go test smoke/vrf_test.go -timeout 30m -count=1 -test.parallel=2 -json - pyroscope_env: ci-smoke-vrf-evm-simulated - - - id: integration-tests/smoke/vrfv2_test.go:* - path: integration-tests/smoke/vrfv2_test.go - test_env_type: docker - runs_on: ubuntu-latest - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/ && go test smoke/vrfv2_test.go -timeout 30m -count=1 -test.parallel=6 -json - pyroscope_env: ci-smoke-vrf2-evm-simulated - - - id: integration-tests/smoke/vrfv2plus_test.go:* - path: integration-tests/smoke/vrfv2plus_test.go - test_env_type: docker - runs_on: ubuntu-latest - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/ && go test smoke/vrfv2plus_test.go -timeout 30m -count=1 -test.parallel=9 -json - pyroscope_env: ci-smoke-vrf2plus-evm-simulated - - # END: VRF tests - - # START: LogPoller tests - - - id: integration-tests/smoke/log_poller_test.go:^TestLogPollerFewFiltersFixedDepth$ - path: integration-tests/smoke/log_poller_test.go - test_env_type: docker - runs_on: ubuntu-latest - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/smoke && go test -test.run ^TestLogPollerFewFiltersFixedDepth$ -test.parallel=1 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-log_poller-evm-simulated - - - id: integration-tests/smoke/log_poller_test.go:^TestLogPollerFewFiltersFinalityTag$ - path: integration-tests/smoke/log_poller_test.go - test_env_type: docker - runs_on: ubuntu-latest - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/smoke && go test -test.run ^TestLogPollerFewFiltersFinalityTag$ -test.parallel=1 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-log_poller-evm-simulated - - - id: integration-tests/smoke/log_poller_test.go:^TestLogPollerWithChaosFixedDepth$ - path: integration-tests/smoke/log_poller_test.go - test_env_type: docker - runs_on: ubuntu-latest - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/smoke && go test -test.run ^TestLogPollerWithChaosFixedDepth$ -test.parallel=1 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-log_poller-evm-simulated - - - id: integration-tests/smoke/log_poller_test.go:^TestLogPollerWithChaosFinalityTag$ - path: integration-tests/smoke/log_poller_test.go - test_env_type: docker - runs_on: ubuntu-latest - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/smoke && go test -test.run ^TestLogPollerWithChaosFinalityTag$ -test.parallel=1 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-log_poller-evm-simulated - - - id: integration-tests/smoke/log_poller_test.go:^TestLogPollerWithChaosPostgresFinalityTag$ - path: integration-tests/smoke/log_poller_test.go - test_env_type: docker - runs_on: ubuntu-latest - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/smoke && go test -test.run ^TestLogPollerWithChaosPostgresFinalityTag$ -test.parallel=1 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-log_poller-evm-simulated - - - id: integration-tests/smoke/log_poller_test.go:^TestLogPollerWithChaosPostgresFixedDepth$ - path: integration-tests/smoke/log_poller_test.go - test_env_type: docker - runs_on: ubuntu-latest - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/smoke && go test -test.run ^TestLogPollerWithChaosPostgresFixedDepth$ -test.parallel=1 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-log_poller-evm-simulated - - - id: integration-tests/smoke/log_poller_test.go:^TestLogPollerReplayFixedDepth$ - path: integration-tests/smoke/log_poller_test.go - test_env_type: docker - runs_on: ubuntu-latest - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/smoke && go test -test.run ^TestLogPollerReplayFixedDepth$ -test.parallel=1 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-log_poller-evm-simulated - - - id: integration-tests/smoke/log_poller_test.go:^TestLogPollerReplayFinalityTag$ - path: integration-tests/smoke/log_poller_test.go - test_env_type: docker - runs_on: ubuntu-latest - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/smoke && go test -test.run ^TestLogPollerReplayFinalityTag$ -test.parallel=1 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-log_poller-evm-simulated - - # END: LogPoller tests - - # START: Other tests - - - id: integration-tests/smoke/runlog_test.go:* - path: integration-tests/smoke/runlog_test.go - test_env_type: docker - runs_on: ubuntu-latest - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/ && go test smoke/runlog_test.go -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-runlog-evm-simulated - - - id: integration-tests/smoke/cron_test.go:* - path: integration-tests/smoke/cron_test.go - test_env_type: docker - runs_on: ubuntu-latest - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/ && go test smoke/cron_test.go -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-cron-evm-simulated - - - id: integration-tests/smoke/flux_test.go:* - path: integration-tests/smoke/flux_test.go - test_env_type: docker - runs_on: ubuntu-latest - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/ && go test smoke/flux_test.go -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-flux-evm-simulated - - - id: integration-tests/smoke/reorg_above_finality_test.go:* - path: integration-tests/smoke/reorg_above_finality_test.go - test_env_type: docker - runs_on: ubuntu-latest - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/ && go test smoke/reorg_above_finality_test.go -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-reorg-above-finality-evm-simulated - - - id: integration-tests/migration/upgrade_version_test.go:* - path: integration-tests/migration/upgrade_version_test.go - test_env_type: docker - runs_on: ubuntu-latest - workflows: - - Run PR E2E Tests - - Run Nightly E2E Tests - test_cmd: cd integration-tests/migration && go test upgrade_version_test.go -timeout 30m -count=1 -test.parallel=2 -json - test_inputs: - chainlink_image: public.ecr.aws/chainlink/chainlink - chainlink_version: '{{ env.LATEST_CHAINLINK_RELEASE_VERSION }}' - chainlink_upgrade_image: '{{ env.QA_CHAINLINK_IMAGE }}' - chainlink_upgrade_version: develop - - # END: Other tests diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md deleted file mode 100644 index e77921d0dd..0000000000 --- a/.github/pull_request_template.md +++ /dev/null @@ -1,4 +0,0 @@ -## Motivation - - -## Solution diff --git a/.github/scripts/check-changeset-tags.sh b/.github/scripts/check-changeset-tags.sh deleted file mode 100755 index 5447cb7915..0000000000 --- a/.github/scripts/check-changeset-tags.sh +++ /dev/null @@ -1,56 +0,0 @@ -#!/bin/bash - -# This checks for if at least one tag exists from a list of tags provided in a changeset file -# -# TAG LIST: -# #nops : For any feature that is NOP facing and needs to be in the official Release Notes for the release. -# #added : For any new functionality added. -# #changed : For any change to the existing functionality. -# #removed : For any functionality/config that is removed. -# #updated : For any functionality that is updated. -# #deprecation_notice : For any upcoming deprecation functionality. -# #breaking_change : For any functionality that requires manual action for the node to boot. -# #db_update : For any feature that introduces updates to database schema. -# #wip : For any change that is not ready yet and external communication about it should be held off till it is feature complete. -# #bugfix - For bug fixes. -# #internal - For changesets that need to be excluded from the final changelog. - -if [ $# -eq 0 ]; then - echo "Error: No changeset file path provided." - exit 1 -fi - -CHANGESET_FILE_PATH=$1 -tags_list=( "#nops" "#added" "#changed" "#removed" "#updated" "#deprecation_notice" "#breaking_change" "#db_update" "#wip" "#bugfix" "#internal" ) -has_tags=false -found_tags=() - -if [[ ! -f "$CHANGESET_FILE_PATH" ]]; then - echo "Error: File '$CHANGESET_FILE_PATH' does not exist." - exit 1 -fi - -changeset_content=$(sed -n '/^---$/,/^---$/{ /^---$/!p; }' $CHANGESET_FILE_PATH) -semvar_value=$(echo "$changeset_content" | awk -F": " '/"ccip"/ {print $2}') - -if [[ "$semvar_value" != "major" && "$semvar_value" != "minor" && "$semvar_value" != "patch" ]]; then - echo "Invalid changeset semvar value for 'ccip'. Must be 'major', 'minor', or 'patch'." - exit 1 -fi - -while IFS= read -r line; do - for tag in "${tags_list[@]}"; do - if [[ "$line" == *"$tag"* ]]; then - found_tags+=("$tag") - echo "Found tag: $tag in $CHANGESET_FILE_PATH" - has_tags=true - fi - done -done < "$CHANGESET_FILE_PATH" - -if [[ "$has_tags" == false ]]; then - echo "Error: No tags found in $CHANGESET_FILE_PATH" -fi - -echo "has_tags=$has_tags" >> $GITHUB_OUTPUT -echo "found_tags=$(jq -jR 'split(" ") | join(",")' <<< "${found_tags[*]}")" >> $GITHUB_OUTPUT diff --git a/.github/scripts/functions.sh b/.github/scripts/functions.sh deleted file mode 100644 index 53b5339226..0000000000 --- a/.github/scripts/functions.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash - -# Function to convert a comma-separated list into a TOML array format. -# Usage: convert_to_toml_array "elem1,elem2,elem3" -# Effect: "a,b,c" -> ["a","b","c"] -function convert_to_toml_array() { - local IFS=',' - local input_array=($1) - local toml_array_format="[" - - for element in "${input_array[@]}"; do - toml_array_format+="\"$element\"," - done - - toml_array_format="${toml_array_format%,}]" - echo "$toml_array_format" -} \ No newline at end of file diff --git a/.github/scripts/jira/package.json b/.github/scripts/jira/package.json deleted file mode 100644 index 2c57d35a64..0000000000 --- a/.github/scripts/jira/package.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "name": "jira", - "version": "0.1.0", - "description": "Updates Jira issue with release information like the version and tags for a PR.", - "main": "update-jira-issue.js", - "type": "module", - "private": true, - "keywords": [], - "author": "", - "license": "MIT", - "engines": { - "node": ">=18", - "pnpm": ">=9" - }, - "dependencies": { - "@actions/core": "^1.10.1", - "node-fetch": "^2.7.0" - } -} diff --git a/.github/scripts/jira/pnpm-lock.yaml b/.github/scripts/jira/pnpm-lock.yaml deleted file mode 100644 index 09ba8c749c..0000000000 --- a/.github/scripts/jira/pnpm-lock.yaml +++ /dev/null @@ -1,93 +0,0 @@ -lockfileVersion: '9.0' - -settings: - autoInstallPeers: true - excludeLinksFromLockfile: false - -importers: - - .: - dependencies: - '@actions/core': - specifier: ^1.10.1 - version: 1.10.1 - node-fetch: - specifier: ^2.7.0 - version: 2.7.0 - -packages: - - '@actions/core@1.10.1': - resolution: {integrity: sha512-3lBR9EDAY+iYIpTnTIXmWcNbX3T2kCkAEQGIQx4NVQ0575nk2k3GRZDTPQG+vVtS2izSLmINlxXf0uLtnrTP+g==} - - '@actions/http-client@2.2.1': - resolution: {integrity: sha512-KhC/cZsq7f8I4LfZSJKgCvEwfkE8o1538VoBeoGzokVLLnbFDEAdFD3UhoMklxo2un9NJVBdANOresx7vTHlHw==} - - '@fastify/busboy@2.1.1': - resolution: {integrity: sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==} - engines: {node: '>=14'} - - node-fetch@2.7.0: - resolution: {integrity: sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==} - engines: {node: 4.x || >=6.0.0} - peerDependencies: - encoding: ^0.1.0 - peerDependenciesMeta: - encoding: - optional: true - - tr46@0.0.3: - resolution: {integrity: sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==} - - tunnel@0.0.6: - resolution: {integrity: sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==} - engines: {node: '>=0.6.11 <=0.7.0 || >=0.7.3'} - - undici@5.28.4: - resolution: {integrity: sha512-72RFADWFqKmUb2hmmvNODKL3p9hcB6Gt2DOQMis1SEBaV6a4MH8soBvzg+95CYhCKPFedut2JY9bMfrDl9D23g==} - engines: {node: '>=14.0'} - - uuid@8.3.2: - resolution: {integrity: sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==} - hasBin: true - - webidl-conversions@3.0.1: - resolution: {integrity: sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==} - - whatwg-url@5.0.0: - resolution: {integrity: sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==} - -snapshots: - - '@actions/core@1.10.1': - dependencies: - '@actions/http-client': 2.2.1 - uuid: 8.3.2 - - '@actions/http-client@2.2.1': - dependencies: - tunnel: 0.0.6 - undici: 5.28.4 - - '@fastify/busboy@2.1.1': {} - - node-fetch@2.7.0: - dependencies: - whatwg-url: 5.0.0 - - tr46@0.0.3: {} - - tunnel@0.0.6: {} - - undici@5.28.4: - dependencies: - '@fastify/busboy': 2.1.1 - - uuid@8.3.2: {} - - webidl-conversions@3.0.1: {} - - whatwg-url@5.0.0: - dependencies: - tr46: 0.0.3 - webidl-conversions: 3.0.1 diff --git a/.github/scripts/jira/update-jira-issue.js b/.github/scripts/jira/update-jira-issue.js deleted file mode 100644 index 77d5b81bff..0000000000 --- a/.github/scripts/jira/update-jira-issue.js +++ /dev/null @@ -1,118 +0,0 @@ -#!/usr/bin/env node - -import * as core from "@actions/core"; -import fetch from "node-fetch"; - -function parseIssueNumber(prTitle, commitMessage, branchName) { - const jiraIssueRegex = /[A-Z]{2,}-\d+/; - if (!!branchName && jiraIssueRegex.test(branchName.toUpperCase())) { - return branchName.toUpperCase().match(jiraIssueRegex)[0]; - } else if ( - !!commitMessage && - jiraIssueRegex.test(commitMessage.toUpperCase()) - ) { - return commitMessage.toUpperCase().match(jiraIssueRegex)[0]; - } else if (!!prTitle && jiraIssueRegex.test(prTitle.toUpperCase())) { - return prTitle.toUpperCase().match(jiraIssueRegex)[0]; - } else { - return null; - } -} - -function getLabels(tags) { - const labelPrefix = "core-release"; - return tags.map((tag) => { - return { - add: `${labelPrefix}/${tag.substring(1)}`, - }; - }); -} - -async function updateJiraIssue( - jiraHost, - jiraUserName, - jiraApiToken, - issueNumber, - tags, - fixVersionName -) { - const token = Buffer.from(`${jiraUserName}:${jiraApiToken}`).toString( - "base64" - ); - const bodyData = { - update: { - labels: getLabels(tags), - fixVersions: [{ set: [{ name: fixVersionName }] }], - }, - }; - - fetch(`https://${jiraHost}/rest/api/3/issue/${issueNumber}`, { - method: "PUT", - headers: { - Authorization: `Basic ${token}`, - Accept: "application/json", - "Content-Type": "application/json", - }, - body: JSON.stringify(bodyData), - }) - .then((response) => { - console.log(`Response: ${JSON.stringify(response)}`); - return response.text(); - }) - .then((text) => console.log(text)) - .catch((err) => console.error(err)); -} - -async function run() { - try { - const jiraHost = process.env.JIRA_HOST; - const jiraUserName = process.env.JIRA_USERNAME; - const jiraApiToken = process.env.JIRA_API_TOKEN; - const chainlinkVersion = process.env.CHAINLINK_VERSION; - const prTitle = process.env.PR_TITLE; - const commitMessage = process.env.COMMIT_MESSAGE; - const branchName = process.env.BRANCH_NAME; - // tags are not getting used at the current moment so will always default to [] - const tags = process.env.FOUND_TAGS - ? process.env.FOUND_TAGS.split(",") - : []; - - // Check for the existence of JIRA_HOST and JIRA_USERNAME and JIRA_API_TOKEN - if (!jiraHost || !jiraUserName || !jiraApiToken) { - core.setFailed( - "Error: Missing required environment variables: JIRA_HOST and JIRA_USERNAME and JIRA_API_TOKEN." - ); - return; - } - - // Checks for the Jira issue number and exit if it can't find it - const issueNumber = parseIssueNumber(prTitle, commitMessage, branchName); - if (!issueNumber) { - core.info( - "No JIRA issue number found in: PR title, commit message, or branch name. Please include the issue ID in one of these." - ); - core.notice( - "No JIRA issue number found in: PR title, commit message, or branch name. Please include the issue ID in one of these." - ); - core.setOutput( - "jiraComment", - "> :medal_military: No JIRA issue number found - Please include it in the PR title or in a commit message." - ); - return; - } - const fixVersionName = `chainlink-v${chainlinkVersion}`; - await updateJiraIssue( - jiraHost, - jiraUserName, - jiraApiToken, - issueNumber, - tags, - fixVersionName - ); - core.setOutput("jiraComment", ""); - } catch (error) { - core.setFailed(error.message); - } -} - -run(); diff --git a/.github/tracing/README.md b/.github/tracing/README.md deleted file mode 100644 index 06b2eef665..0000000000 --- a/.github/tracing/README.md +++ /dev/null @@ -1,112 +0,0 @@ -# Distributed Tracing - -As part of the LOOP plugin effort, we've added distributed tracing to the core node. This is helpful for initial development and maintenance of LOOPs, but will also empower product teams building on top of core. - -## Dev environment - -One way to generate traces locally today is with the OCR2 basic smoke test. - -1. navigate to `.github/tracing/` and then run `docker compose --file local-smoke-docker-compose.yaml up` -2. setup a local docker registry at `127.0.0.1:5000` (https://www.docker.com/blog/how-to-use-your-own-registry-2/) -3. run `make build_push_plugin_docker_image` in `chainlink/integration-tests/Makefile` -4. preapre your `overrides.toml` file with selected network and CL image name and version and place it anywhere -inside `integration-tests` directory. Sample `overrides.toml` file: -```toml -[ChainlinkImage] -image="127.0.0.1:5000/chainlink" -version="develop" - -[Network] -selected_networks=["simulated"] -``` -5. run `go test -run TestOCRv2Basic ./smoke/ocr2_test.go` -6. navigate to `localhost:3000/explore` in a web browser to query for traces - -Core and the median plugins are instrumented with open telemetry traces, which are sent to the OTEL collector and forwarded to the Tempo backend. The grafana UI can then read the trace data from the Tempo backend. - - - -## CI environment - -Another way to generate traces is by enabling traces for PRs. This will instrument traces for `TestOCRv2Basic` in the CI run. - -1. Cut a PR in the core repo -2. Add the `enable tracing` label to the PR -3. Navigate to `Integration Tests / ETH Smoke Tests ocr2-plugins (pull_request)` details -4. Navigate to the summary of the integration tests -5. After the test completes, the generated trace data will be saved as an artifact, currently called `trace-data` -6. Download the artifact to this directory (`chainlink/.github/tracing`) -7. `docker compose --file local-smoke-docker-compose.yaml up` -8. Run `sh replay.sh` to replay those traces to the otel-collector container that was spun up in the last step. -9. navigate to `localhost:3000/explore` in a web browser to query for traces - -The artifact is not json encoded - each individual line is a well formed and complete json object. - - -## Production and NOPs environments - -In a production environment, we suggest coupling the lifecycle of nodes and otel-collectors. A best practice is to deploy the otel-collector alongside your node, using infrastructure as code (IAC) to automate deployments and certificate lifecycles. While there are valid use cases for using `Tracing.Mode = unencrypted`, we have set the default encryption setting to `Tracing.Mode = tls`. Externally deployed otel-collectors can not be used with `Tracing.Mode = unencrypted`. i.e. If `Tracing.Mode = unencrypted` and an external URI is detected for `Tracing.CollectorTarget` node configuration will fail to validate and the node will not boot. The node requires a valid encryption mode and collector target to send traces. - -Once traces reach the otel-collector, the rest of the observability pipeline is flexible. We recommend deploying (through automation) centrally managed Grafana Tempo and Grafana UI instances to receive from one or many otel-collector instances. Always use networking best practices and encrypt trace data, especially at network boundaries. - -## Configuration -This folder contains the following config files: -* otel-collector-ci.yaml -* otel-collector-dev.yaml -* tempo.yaml -* grafana-datasources.yaml - -These config files are for an OTEL collector, grafana Tempo, and a grafana UI instance to run as containers on the same network. -`otel-collector-dev.yaml` is the configuration for dev (i.e. your local machine) environments, and forwards traces from the otel collector to the grafana tempo instance on the same network. -`otel-collector-ci.yaml` is the configuration for the CI runs, and exports the trace data to the artifact from the github run. - -## Adding Traces to Plugins and to core - -Adding traces requires identifying an observability gap in a related group of code executions or a critical path in your application. This is intuitive for the developer: - -- "What's the flow of component interaction in this distributed system?" -- "What's the behavior of the JobProcessorOne component when jobs with [x, y, z] attributes are processed?" -- "Is this critical path workflow behaving the way we expect?" - -The developer will measure a flow of execution from end to end in one trace. Each logically separate measure of this flow is called a span. Spans have either one or no parent span and multiple children span. The relationship between parent and child spans in agreggate will form a directed acyclic graph. The trace begins at the root of this graph. - -The most trivial application of a span is measuring top level performance in one critical path. There is much more you can do, including creating human readable and timestamped events within a span (useful for monitoring concurrent access to resources), recording errors, linking parent and children spans through large parts of an application, and even extending a span beyond a single process. - -Spans are created by `tracers` and passed through go applications by `Context`s. A tracer must be initialized first. Both core and plugin developers will initialize a tracer from the globally registered trace provider: - -``` -tracer := otel.GetTracerProvider().Tracer("example.com/foo") -``` - -The globally registered tracer provider is available for plugins after they are initialized, and available in core after configuration is processed (`initGlobals`). - -Add spans by: -``` - func interestingFunc() { - // Assuming there is an appropriate parentContext - ctx, span := tracer.Start(parentContext, "hello-span") - defer span.End() - - // do some work to track with hello-span - } -``` -As implied by the example, `span` is a child of its parent span captured by `parentContext`. - - -Note that in certain situations, there are 3rd party libraries that will setup spans. For instance: - -``` -import ( - "github.com/gin-gonic/gin" - "go.opentelemetry.io/contrib/instrumentation/github.com/gin-gonic/gin/otelgin" -) - -router := gin.Default() -router.Use(otelgin.Middleware("service-name")) -``` - -The developer aligns with best practices when they: -- Start with critical paths -- Measure paths from end to end (Context is wired all the way through) -- Emphasize broadness of measurement over depth -- Use automatic instrumentation if possible \ No newline at end of file diff --git a/.github/tracing/grafana-datasources.yaml b/.github/tracing/grafana-datasources.yaml deleted file mode 100644 index 098b06ec76..0000000000 --- a/.github/tracing/grafana-datasources.yaml +++ /dev/null @@ -1,18 +0,0 @@ -apiVersion: 1 - -datasources: -- name: Tempo - type: tempo - access: proxy - orgId: 1 - url: http://tempo:3200 - basicAuth: false - isDefault: true - version: 1 - editable: false - apiVersion: 1 - uid: tempo - jsonData: - httpMethod: GET - serviceMap: - datasourceUid: prometheus \ No newline at end of file diff --git a/.github/tracing/local-smoke-docker-compose.yaml b/.github/tracing/local-smoke-docker-compose.yaml deleted file mode 100644 index 744ba88ef6..0000000000 --- a/.github/tracing/local-smoke-docker-compose.yaml +++ /dev/null @@ -1,48 +0,0 @@ -version: "3" -services: - - # ... the OpenTelemetry Collector configured to receive traces and export to Tempo ... - otel-collector: - image: otel/opentelemetry-collector:0.61.0 - command: [ "--config=/etc/otel-collector.yaml" ] - volumes: - - ./otel-collector-dev.yaml:/etc/otel-collector.yaml - - ../../integration-tests/smoke/traces/trace-data.json:/etc/trace-data.json # local trace data stored consistent with smoke/logs - ports: - - "4317:4317" # otlp grpc - - "3100:3100" - depends_on: - - tempo - networks: - - tracing-network - - # .. Which accepts requests from grafana ... - tempo: - image: grafana/tempo:latest - command: [ "-config.file=/etc/tempo.yaml" ] - volumes: - - ./tempo.yaml:/etc/tempo.yaml - - ./tempo-data:/tmp/tempo - ports: - - "4317" # otlp grpc - networks: - - tracing-network - - grafana: - image: grafana/grafana:9.4.3 - volumes: - - ./grafana-datasources.yaml:/etc/grafana/provisioning/datasources/datasources.yaml - environment: - - GF_AUTH_ANONYMOUS_ENABLED=true - - GF_AUTH_ANONYMOUS_ORG_ROLE=Admin - - GF_AUTH_DISABLE_LOGIN_FORM=true - - GF_FEATURE_TOGGLES_ENABLE=traceqlEditor - ports: - - "3000:3000" - networks: - - tracing-network - -networks: - tracing-network: - name: tracing - driver: bridge \ No newline at end of file diff --git a/.github/tracing/otel-collector-ci.yaml b/.github/tracing/otel-collector-ci.yaml deleted file mode 100644 index 0bf123d29b..0000000000 --- a/.github/tracing/otel-collector-ci.yaml +++ /dev/null @@ -1,22 +0,0 @@ -receivers: - otlp: - protocols: - grpc: - endpoint: "0.0.0.0:4317" - http: - endpoint: "0.0.0.0:3100" -exporters: - file: - path: /tracing/trace-data.json - otlp: - endpoint: tempo:4317 - tls: - insecure: true -service: - telemetry: - logs: - level: "debug" # Set log level to debug - pipelines: - traces: - receivers: [otlp] - exporters: [file,otlp] \ No newline at end of file diff --git a/.github/tracing/otel-collector-dev.yaml b/.github/tracing/otel-collector-dev.yaml deleted file mode 100644 index dd059127b8..0000000000 --- a/.github/tracing/otel-collector-dev.yaml +++ /dev/null @@ -1,20 +0,0 @@ -receivers: - otlp: - protocols: - grpc: - endpoint: "0.0.0.0:4317" - http: - endpoint: "0.0.0.0:3100" -exporters: - otlp: - endpoint: tempo:4317 - tls: - insecure: true -service: - telemetry: - logs: - level: "debug" # Set log level to debug - pipelines: - traces: - receivers: [otlp] - exporters: [otlp] \ No newline at end of file diff --git a/.github/tracing/replay.sh b/.github/tracing/replay.sh deleted file mode 100644 index b2e564567c..0000000000 --- a/.github/tracing/replay.sh +++ /dev/null @@ -1,6 +0,0 @@ -# Read JSON file and loop through each trace -while IFS= read -r trace; do - curl -X POST http://localhost:3100/v1/traces \ - -H "Content-Type: application/json" \ - -d "$trace" -done < "trace-data" diff --git a/.github/tracing/tempo.yaml b/.github/tracing/tempo.yaml deleted file mode 100644 index aa8c0ecbf0..0000000000 --- a/.github/tracing/tempo.yaml +++ /dev/null @@ -1,24 +0,0 @@ -server: - http_listen_port: 3200 - -distributor: - receivers: - otlp: - protocols: - http: - grpc: - -ingester: - max_block_duration: 5m # cut the headblock when this much time passes. this is being set for demo purposes and should probably be left alone normally - -compactor: - compaction: - block_retention: 1h # overall Tempo trace retention. set for demo purposes - -storage: - trace: - backend: local # backend configuration to use - wal: - path: /tmp/tempo/wal # where to store the wal locally - local: - path: /tmp/tempo/blocks \ No newline at end of file diff --git a/.github/workflows/auto-update.yml b/.github/workflows/auto-update.yml deleted file mode 100644 index 963145c404..0000000000 --- a/.github/workflows/auto-update.yml +++ /dev/null @@ -1,17 +0,0 @@ -name: Auto Update -on: - push: - branches: - - develop -jobs: - autoupdate: - name: Auto Update - runs-on: ubuntu-latest - steps: - - uses: docker://chinthakagodawita/autoupdate-action:v1 - env: - GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}" - PR_FILTER: "labelled" - PR_LABELS: "auto-update" - MERGE_MSG: "Branch was auto-updated." - MERGE_CONFLICT_ACTION: "ignore" diff --git a/.github/workflows/automation-benchmark-tests.yml b/.github/workflows/automation-benchmark-tests.yml deleted file mode 100644 index c21171a83d..0000000000 --- a/.github/workflows/automation-benchmark-tests.yml +++ /dev/null @@ -1,101 +0,0 @@ -name: Automation Benchmark Test -on: - workflow_dispatch: - inputs: - testType: - description: Type of test to run (benchmark, soak) - required: true - default: benchmark - type: string - base64Config: - description: base64-ed config - required: true - type: string - slackMemberID: - description: Notifies test results (Not your @) - required: true - default: U02Q14G80TY - type: string -jobs: - automation_benchmark: - environment: integration - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - name: Automation Benchmark Test - runs-on: ubuntu22.04-16cores-64GB - env: - SLACK_API_KEY: ${{ secrets.QA_SLACK_API_KEY }} - SLACK_CHANNEL: C03KJ5S7KEK - CHAINLINK_ENV_USER: ${{ github.actor }} - REF_NAME: ${{ github.head_ref || github.ref_name }} - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - ref: ${{ env.REF_NAME }} - - name: Get Slack config and mask base64 config - run: | - SLACK_USER=$(jq -r '.inputs.slackMemberID' $GITHUB_EVENT_PATH) - echo ::add-mask::$SLACK_USER - echo SLACK_USER=$SLACK_USER >> $GITHUB_ENV - - BASE64_CONFIG_OVERRIDE=$(jq -r '.inputs.base64Config' $GITHUB_EVENT_PATH) - echo ::add-mask::$BASE64_CONFIG_OVERRIDE - echo "BASE64_CONFIG_OVERRIDE=$BASE64_CONFIG_OVERRIDE" >> $GITHUB_ENV - - name: Parse base64 config - uses: ./.github/actions/setup-parse-base64-config - with: - base64Config: ${{ env.BASE64_CONFIG_OVERRIDE }} - - name: Send details to Step Summary - shell: bash - run: | - echo "### chainlink image used for this test run :link:" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.CHAINLINK_IMAGE }}\`" >>$GITHUB_STEP_SUMMARY - echo "### chainlink-tests image tag for this test run :ship:" >>$GITHUB_STEP_SUMMARY - echo "\`${GITHUB_SHA}\`" >>$GITHUB_STEP_SUMMARY - echo "### Networks on which test was run" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.NETWORKS }}\`" >>$GITHUB_STEP_SUMMARY - - name: Build Test Image - uses: ./.github/actions/build-test-image - with: - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ACCOUNT_NUMBER: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - suites: benchmark chaos reorg load - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@2967f2287bd3f3ddbac7b476e9568993df01796e # v2.3.27 - env: - DETACH_RUNNER: true - TEST_SUITE: benchmark - TEST_ARGS: -test.timeout 720h - ENV_JOB_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink-tests:${{ github.sha }} - INTERNAL_DOCKER_REPO: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com - TEST_TYPE: ${{ github.event.inputs.testType }} - TEST_TEST_TYPE: ${{ github.event.inputs.testType }} - RR_MEM: 4Gi - TEST_LOG_LEVEL: info - with: - test_command_to_run: cd integration-tests && go test -timeout 30m -v -run ^TestAutomationBenchmark$ ./benchmark -count=1 - test_download_vendor_packages_command: make gomod - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ env.CHAINLINK_VERSION }} - token: ${{ secrets.GITHUB_TOKEN }} - should_cleanup: false - go_mod_path: ./integration-tests/go.mod - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: automation-benchmark-build-test-image - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Automation Benchmark Test - continue-on-error: true diff --git a/.github/workflows/automation-load-tests.yml b/.github/workflows/automation-load-tests.yml deleted file mode 100644 index 187ca3b5b6..0000000000 --- a/.github/workflows/automation-load-tests.yml +++ /dev/null @@ -1,122 +0,0 @@ -name: Automation Load Test -on: - workflow_dispatch: - inputs: - base64Config: - description: base64-ed config - required: true - type: string - slackMemberID: - description: Notifies test results (Not your @) - required: true - default: U02Q14G80TY - type: string - test_secrets_override_key: - description: 'Key to run tests with custom test secrets' - required: false - type: string - -jobs: - automation_load: - environment: integration - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - name: Automation Load Test - runs-on: ubuntu22.04-16cores-64GB - env: - SLACK_API_KEY: ${{ secrets.QA_SLACK_API_KEY }} - SLACK_CHANNEL: C03KJ5S7KEK - CHAINLINK_ENV_USER: ${{ github.actor }} - REF_NAME: ${{ github.head_ref || github.ref_name }} - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - ref: ${{ env.REF_NAME }} - - name: Get Slack config and mask base64 config - run: | - SLACK_USER=$(jq -r '.inputs.slackMemberID' $GITHUB_EVENT_PATH) - echo ::add-mask::$SLACK_USER - echo SLACK_USER=$SLACK_USER >> $GITHUB_ENV - - BASE64_CONFIG_OVERRIDE=$(jq -r '.inputs.base64Config' $GITHUB_EVENT_PATH) - echo ::add-mask::$BASE64_CONFIG_OVERRIDE - echo "BASE64_CONFIG_OVERRIDE=$BASE64_CONFIG_OVERRIDE" >> $GITHUB_ENV - - name: Merge Pyrsoscope config - env: - PYROSCOPE_SERVER: ${{ secrets.QA_PYROSCOPE_INSTANCE }} - PYROSCOPE_ENVIRONMENT: "automation-load-test" - PYROSCOPE_KEY: ${{ secrets.QA_PYROSCOPE_KEY }} - run: | - decoded_toml=$(echo $BASE64_CONFIG_OVERRIDE | base64 -d) - - # use Pyroscope config from GH secrets and merge it with base64 input - cat << EOF > config.toml - server_url="$PYROSCOPE_SERVER" - environment="$PYROSCOPE_ENVIRONMENT" - key_secret="$PYROSCOPE_KEY" - EOF - - echo "$decoded_toml" >> final_config.toml - cat config.toml >> final_config.toml - BASE64_CONFIG_OVERRIDE=$(cat final_config.toml | base64 -w 0) - echo ::add-mask::$BASE64_CONFIG_OVERRIDE - echo "BASE64_CONFIG_OVERRIDE=$BASE64_CONFIG_OVERRIDE" >> $GITHUB_ENV - - name: Parse base64 config - uses: ./.github/actions/setup-parse-base64-config - with: - base64Config: ${{ env.BASE64_CONFIG_OVERRIDE }} - - name: Send details to Step Summary - shell: bash - run: | - echo "### chainlink image used for this test run :link:" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.CHAINLINK_IMAGE }}\`" >>$GITHUB_STEP_SUMMARY - echo "### chainlink-tests image tag for this test run :ship:" >>$GITHUB_STEP_SUMMARY - echo "\`${GITHUB_SHA}\`" >>$GITHUB_STEP_SUMMARY - echo "### Networks on which test was run" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.NETWORKS }}\`" >>$GITHUB_STEP_SUMMARY - - name: Build Test Image - uses: ./.github/actions/build-test-image - with: - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ACCOUNT_NUMBER: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - suites: benchmark chaos reorg load - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@2967f2287bd3f3ddbac7b476e9568993df01796e # v2.3.27 - env: - RR_CPU: 4000m - RR_MEM: 4Gi - DETACH_RUNNER: true - TEST_SUITE: automationv2_1 - TEST_ARGS: -test.timeout 720h - ENV_JOB_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink-tests:${{ github.sha }} - INTERNAL_DOCKER_REPO: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com - PYROSCOPE_SERVER: ${{ secrets.QA_PYROSCOPE_INSTANCE }} - PYROSCOPE_KEY: ${{ secrets.QA_PYROSCOPE_KEY }} - with: - test_command_to_run: cd integration-tests/load && go test -timeout 1h -v -run TestLogTrigger ./automationv2_1 -count=1 - test_secrets_override_base64: ${{ secrets[inputs.test_secrets_override_key] }} - test_download_vendor_packages_command: make gomod - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ env.CHAINLINK_VERSION }} - token: ${{ secrets.GITHUB_TOKEN }} - should_cleanup: false - go_mod_path: ./integration-tests/go.mod - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: automation-load-test - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Automation Load Test - continue-on-error: true diff --git a/.github/workflows/automation-nightly-tests.yml b/.github/workflows/automation-nightly-tests.yml deleted file mode 100644 index f018124624..0000000000 --- a/.github/workflows/automation-nightly-tests.yml +++ /dev/null @@ -1,300 +0,0 @@ -name: Automation Nightly Tests -on: - schedule: - - cron: "0 0 * * *" # Run nightly - push: - tags: - - "*" - workflow_dispatch: - -env: - CHAINLINK_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink - -jobs: - build-chainlink: - environment: integration - permissions: - id-token: write - contents: read - name: Build Chainlink Image - runs-on: ubuntu22.04-16cores-64GB - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: automation-nightly-build-chainlink - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Build Chainlink Image - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - ref: ${{ github.event.pull_request.head.sha || github.event.merge_group.head_sha }} - - name: Build Chainlink Image - uses: ./.github/actions/build-chainlink-image - with: - tag_suffix: "" - dockerfile: core/chainlink.Dockerfile - git_commit_sha: ${{ github.sha }} - AWS_REGION: ${{ secrets.QA_AWS_REGION }} - AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - - automation-upgrade-tests: - environment: integration - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - needs: [build-chainlink] - env: - CHAINLINK_COMMIT_SHA: ${{ github.sha }} - CHAINLINK_ENV_USER: ${{ github.actor }} - TEST_LOG_LEVEL: info - SELECTED_NETWORKS: "SIMULATED" - strategy: - fail-fast: false - matrix: - tests: - - name: Upgrade 2.0 - id: upgrade-2-0 - suite: smoke - nodes: 1 - os: ubuntu22.04-8cores-32GB - network: SIMULATED - command: -run ^TestAutomationNodeUpgrade/registry_2_0 ./smoke - - name: Upgrade 2.1 - id: upgrade-2-1 - suite: smoke - nodes: 5 - os: ubuntu22.04-8cores-32GB - network: SIMULATED - command: -run ^TestAutomationNodeUpgrade/registry_2_1 ./smoke - - name: Upgrade 2.2 - id: upgrade-2-2 - suite: smoke - nodes: 5 - os: ubuntu22.04-8cores-32GB - network: SIMULATED - command: -run ^TestAutomationNodeUpgrade/registry_2_2 ./smoke - runs-on: ${{ matrix.tests.os }} - name: Automation ${{ matrix.tests.name }} Test - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - ref: ${{ github.head_ref || github.ref_name }} - - name: Setup GAP for Grafana - uses: smartcontractkit/.github/actions/setup-gap@d316f66b2990ea4daa479daa3de6fc92b00f863e # setup-gap@0.3.2 - with: - # aws inputs - aws-region: ${{ secrets.AWS_REGION }} - aws-role-arn: ${{ secrets.AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN }} - api-gateway-host: ${{ secrets.AWS_API_GW_HOST_GRAFANA }} - # other inputs - duplicate-authorization-header: "true" - - name: Prepare Base64 TOML override - uses: ./.github/actions/setup-create-base64-upgrade-config - with: - selectedNetworks: ${{ env.SELECTED_NETWORKS }} - chainlinkImage: "public.ecr.aws/chainlink/chainlink" - chainlinkVersion: "latest" - upgradeImage: ${{ env.CHAINLINK_IMAGE }} - upgradeVersion: ${{ github.sha }} - runId: ${{ github.run_id }} - testLogCollect: "true" - lokiEndpoint: https://${{ secrets.GRAFANA_INTERNAL_HOST }}/loki/api/v1/push - lokiTenantId: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - lokiBasicAuth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - grafanaUrl: "http://localhost:8080/primary" - grafanaDashboardUrl: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - grafanaBearerToken: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@2967f2287bd3f3ddbac7b476e9568993df01796e # v2.3.27 - env: - TEST_SUITE: ${{ matrix.tests.suite }} - with: - test_command_to_run: cd ./integration-tests && go test -timeout 60m -count=1 -json -test.parallel=${{ matrix.tests.nodes }} ${{ matrix.tests.command }} 2>&1 | tee /tmp/gotest.log | gotestloghelper -ci -singlepackage -hidepassingtests=false -hidepassinglogs - test_download_vendor_packages_command: cd ./integration-tests && go mod download - cl_repo: 'public.ecr.aws/chainlink/chainlink' - cl_image_tag: 'latest' - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - artifacts_location: ./integration-tests/${{ matrix.tests.suite }}/logs - artifacts_name: testcontainers-logs-${{ matrix.tests.name }} - publish_check_name: Automation Results ${{ matrix.tests.name }} - token: ${{ secrets.GITHUB_TOKEN }} - go_mod_path: ./integration-tests/go.mod - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - - name: Upload test log - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1 - if: failure() - with: - name: gotest-logs-${{ matrix.tests.name }} - path: /tmp/gotest.log - retention-days: 7 - continue-on-error: true - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: automation-nightly-upgrade-tests-${{ matrix.tests.id }} - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Automation ${{ matrix.tests.name }} Test - test-results-file: '{"testType":"go","filePath":"/tmp/gotest.log"}' - continue-on-error: true - - name: Print failed test summary - if: always() - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/show-test-summary@b49a9d04744b0237908831730f8553f26d73a94b # v2.3.17 - - test-notify: - name: Start Slack Thread - if: ${{ always() && needs.*.result != 'skipped' && needs.*.result != 'cancelled' }} - environment: integration - outputs: - thread_ts: ${{ steps.slack.outputs.thread_ts }} - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - runs-on: ubuntu-latest - needs: [ automation-upgrade-tests ] - steps: - - name: Debug Result - run: echo ${{ join(needs.*.result, ',') }} - - name: Main Slack Notification - uses: slackapi/slack-github-action@6c661ce58804a1a20f6dc5fbee7f0381b469e001 # v1.25.0 - id: slack - with: - channel-id: C03KJ5S7KEK - payload: | - { - "attachments": [ - { - "color": "${{ contains(join(needs.*.result, ','), 'failure') && '#C62828' || '#2E7D32' }}", - "blocks": [ - { - "type": "header", - "text": { - "type": "plain_text", - "text": "Automation Nightly Tests ${{ contains(join(needs.*.result, ','), 'failure') && ':x:' || ':white_check_mark:'}}", - "emoji": true - } - }, - { - "type": "divider" - }, - { - "type": "section", - "text": { - "type": "mrkdwn", - "text": "<${{ github.server_url }}/${{ github.repository }}/releases/tag/${{ github.ref_name }}|${{ github.ref_name }}> | <${{ github.server_url }}/${{ github.repository }}/commit/${{ github.sha }}|${{ github.sha }}> | <${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|Run>" - } - } - ] - } - ] - } - env: - SLACK_BOT_TOKEN: ${{ secrets.QA_SLACK_API_KEY }} - - test-results: - name: Post Test Results for ${{ matrix.name }} - if: ${{ always() && needs.*.result != 'skipped' && needs.*.result != 'cancelled' }} - environment: integration - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - runs-on: ubuntu-latest - needs: test-notify - strategy: - fail-fast: false - matrix: - name: [ Upgrade 2.0, Upgrade 2.1, Upgrade 2.2 ] - steps: - - name: Get Results - id: test-results - run: | - # I feel like there's some clever, fully jq way to do this, but I ain't got the motivation to figure it out - echo "Querying test results" - - PARSED_RESULTS=$(curl \ - -H "Authorization: Bearer ${{ github.token }}" \ - 'https://api.github.com/repos/${{github.repository}}/actions/runs/${{ github.run_id }}/jobs' \ - | jq -r --arg pattern "${{ matrix.name }} Test" '.jobs[] - | select(.name | test($pattern)) as $job - | $job.steps[] - | select(.name == "Run Tests") - | { conclusion: (if .conclusion == "success" then ":white_check_mark:" else ":x:" end), product: ("*" + ($job.name | capture($pattern).product) + "*") }') - - echo "Parsed Results:" - echo $PARSED_RESULTS - - ALL_SUCCESS=true - for row in $(echo "$PARSED_RESULTS" | jq -s | jq -r '.[] | select(.conclusion != ":white_check_mark:")'); do - success=false - break - done - - echo all_success=$ALL_SUCCESS >> $GITHUB_OUTPUT - - FORMATTED_RESULTS=$(echo $PARSED_RESULTS | jq -s '[.[] - | { - conclusion: .conclusion, - product: .product - } - ] - | map("{\"type\": \"section\", \"text\": {\"type\": \"mrkdwn\", \"text\": \"\(.product): \(.conclusion)\"}}") - | join(",")') - - echo "Formatted Results:" - echo $FORMATTED_RESULTS - - # Cleans out backslashes and quotes from jq - CLEAN_RESULTS=$(echo "$FORMATTED_RESULTS" | sed 's/\\\"/"/g' | sed 's/^"//;s/"$//') - - echo "Clean Results" - echo $CLEAN_RESULTS - - echo results=$CLEAN_RESULTS >> $GITHUB_OUTPUT - - - name: Test Details - uses: slackapi/slack-github-action@6c661ce58804a1a20f6dc5fbee7f0381b469e001 # v1.25.0 - with: - channel-id: C03KJ5S7KEK - payload: | - { - "thread_ts": "${{ needs.test-notify.outputs.thread_ts }}", - "attachments": [ - { - "color": "${{ steps.test-results.outputs.all_success && '#2E7D32' || '#C62828' }}", - "blocks": [ - { - "type": "header", - "text": { - "type": "plain_text", - "text": "${{ matrix.name }} ${{ steps.test-results.outputs.all_success && ':white_check_mark:' || ':x: Notifying <@U02Q14G80TY>'}}", - "emoji": true - } - }, - { - "type": "divider" - }, - ${{ steps.test-results.outputs.results }} - ] - } - ] - } - env: - SLACK_BOT_TOKEN: ${{ secrets.QA_SLACK_API_KEY }} \ No newline at end of file diff --git a/.github/workflows/automation-ondemand-tests.yml b/.github/workflows/automation-ondemand-tests.yml deleted file mode 100644 index 7514743fa8..0000000000 --- a/.github/workflows/automation-ondemand-tests.yml +++ /dev/null @@ -1,318 +0,0 @@ -name: Automation On Demand Tests -on: - workflow_dispatch: - inputs: - chainlinkVersionUpdate: - description: Chainlink image version to upgrade to - required: false - type: string - chainlinkImageUpdate: - description: Chainlink image repo to upgrade to (Leave empty to build from head/ref) - options: - - public.ecr.aws/chainlink/chainlink - - QA_ECR - type: choice - chainlinkVersion: - description: Chainlink image version to use initially for upgrade test - default: latest - required: true - type: string - chainlinkImage: - description: Chainlink image repo to use initially for upgrade test - required: true - options: - - public.ecr.aws/chainlink/chainlink - - QA_ECR - type: choice - enableChaos: - description: Check to enable chaos tests - type: boolean - default: false - required: true - enableReorg: - description: Check to enable reorg tests - type: boolean - default: false - required: true - -env: - ENV_JOB_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink-tests:${{ github.sha }} - CHAINLINK_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink - -jobs: - build-chainlink: - environment: integration - permissions: - id-token: write - contents: read - strategy: - matrix: - image: - - name: "" - dockerfile: core/chainlink.Dockerfile - tag-suffix: "" - - name: (plugins) - dockerfile: plugins/chainlink.Dockerfile - tag-suffix: -plugins - name: Build Chainlink Image ${{ matrix.image.name }} - runs-on: ubuntu22.04-16cores-64GB - steps: - - name: Collect Metrics - if: inputs.chainlinkImage == '' - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: automation-on-demand-build-chainlink - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Build Chainlink Image ${{ matrix.image.name }} - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - ref: ${{ github.head_ref || github.ref_name }} - - name: Check if image exists - if: inputs.chainlinkImage == '' - id: check-image - uses: smartcontractkit/chainlink-github-actions/docker/image-exists@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - repository: chainlink - tag: ${{ github.sha }}${{ matrix.image.tag-suffix }} - AWS_REGION: ${{ secrets.QA_AWS_REGION }} - AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - - name: Build Image - if: steps.check-image.outputs.exists == 'false' && inputs.chainlinkImage == '' - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/build-image@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - cl_repo: smartcontractkit/chainlink - cl_ref: ${{ github.sha }} - cl_dockerfile: ${{ matrix.image.dockerfile }} - push_tag: ${{ env.CHAINLINK_IMAGE }}:${{ github.sha }}${{ matrix.image.tag-suffix }} - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - - name: Print Chainlink Image Built - if: inputs.chainlinkImage == '' - run: | - echo "### chainlink node image tag used for this test run :link:" >>$GITHUB_STEP_SUMMARY - echo "\`${GITHUB_SHA}\`" >>$GITHUB_STEP_SUMMARY - - build-test-image: - environment: integration - permissions: - id-token: write - contents: read - name: Build Test Image - runs-on: ubuntu22.04-16cores-64GB - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: automation-on-demand-build-test-image - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Build Test Image - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - ref: ${{ github.head_ref || github.ref_name }} - - name: Build Test Image - if: inputs.enableChaos || inputs.enableReorg - uses: ./.github/actions/build-test-image - with: - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ACCOUNT_NUMBER: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - - automation-on-demand-tests: - environment: integration - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - needs: [build-chainlink, build-test-image] - env: - CHAINLINK_COMMIT_SHA: ${{ github.sha }} - CHAINLINK_ENV_USER: ${{ github.actor }} - TEST_LOG_LEVEL: info - strategy: - fail-fast: false - matrix: - tests: - - name: chaos - id: chaos - suite: chaos - nodes: 20 - os: ubuntu-latest - enabled: ${{ inputs.enableChaos }} - pyroscope_env: ci-automation-on-demand-chaos - network: SIMULATED - command: -run ^TestAutomationChaos$ ./chaos - - name: reorg 2.0 - id: reorg-2.0 - suite: reorg - nodes: 1 - os: ubuntu-latest - enabled: ${{ inputs.enableReorg }} - pyroscope_env: ci-automation-on-demand-reorg - network: SIMULATED - command: -run ^TestAutomationReorg/registry_2_0 ./reorg - - name: reorg 2.1 - id: reorg-2.1 - suite: reorg - nodes: 2 - os: ubuntu-latest - enabled: ${{ inputs.enableReorg }} - pyroscope_env: ci-automation-on-demand-reorg - network: SIMULATED - command: -run ^TestAutomationReorg/registry_2_1 ./reorg - - name: reorg 2.2 - id: reorg-2.2 - suite: reorg - nodes: 2 - os: ubuntu-latest - enabled: ${{ inputs.enableReorg }} - pyroscope_env: ci-automation-on-demand-reorg - network: SIMULATED - command: -run ^TestAutomationReorg/registry_2_2 ./reorg - - name: reorg 2.3 - id: reorg-2.3 - suite: reorg - nodes: 2 - os: ubuntu-latest - enabled: ${{ inputs.enableReorg }} - pyroscope_env: ci-automation-on-demand-reorg - network: SIMULATED - command: -run ^TestAutomationReorg/registry_2_3 ./reorg - - name: upgrade 2.0 - id: upgrade-2.0 - type: upgrade - suite: smoke - nodes: 1 - os: ubuntu22.04-8cores-32GB - enabled: true - pyroscope_env: ci-automation-on-demand-upgrade - network: SIMULATED - command: -run ^TestAutomationNodeUpgrade/registry_2_0 ./smoke - - name: upgrade 2.1 - id: upgrade-2.1 - type: upgrade - suite: smoke - nodes: 5 - os: ubuntu22.04-8cores-32GB - enabled: true - pyroscope_env: ci-automation-on-demand-upgrade - network: SIMULATED - command: -run ^TestAutomationNodeUpgrade/registry_2_1 ./smoke - - name: upgrade 2.2 - id: upgrade-2.2 - type: upgrade - suite: smoke - nodes: 5 - os: ubuntu22.04-8cores-32GB - enabled: true - pyroscope_env: ci-automation-on-demand-upgrade - network: SIMULATED - command: -run ^TestAutomationNodeUpgrade/registry_2_2 ./smoke - runs-on: ${{ matrix.tests.os }} - name: Automation On Demand ${{ matrix.tests.name }} Test - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - ref: ${{ github.head_ref || github.ref_name }} - - name: Determine build to use - id: determine-build - shell: bash - run: | - if [[ "${{ inputs.chainlinkImage }}" == "QA_ECR" ]]; then - echo "image=${{ env.CHAINLINK_IMAGE }}" >>$GITHUB_OUTPUT - else - echo "image=${{ inputs.chainlinkImage }}" >>$GITHUB_OUTPUT - fi - if [[ "${{ inputs.chainlinkImageUpdate }}" == "QA_ECR" ]]; then - echo "upgrade_image=${{ env.CHAINLINK_IMAGE }}" >>$GITHUB_OUTPUT - else - echo "upgrade_image=${{ inputs.chainlinkImageUpdate }}" >>$GITHUB_OUTPUT - fi - if [[ -z "${{ inputs.chainlinkVersion }}" ]] && [[ "${{ inputs.chainlinkImage }}" == "QA_ECR" ]]; then - echo "version=${{ github.sha }}" >>$GITHUB_OUTPUT - else - echo "version=${{ inputs.chainlinkVersion }}" >>$GITHUB_OUTPUT - fi - if [[ -z "${{ inputs.chainlinkVersionUpdate }}" ]] && [[ "${{ inputs.chainlinkImageUpdate }}" == "QA_ECR" ]]; then - echo "upgrade_version=${{ github.sha }}" >>$GITHUB_OUTPUT - else - echo "upgrade_version=${{ inputs.chainlinkVersionUpdate }}" >>$GITHUB_OUTPUT - fi - - name: Setup GAP for Grafana - uses: smartcontractkit/.github/actions/setup-gap@d316f66b2990ea4daa479daa3de6fc92b00f863e # setup-gap@0.3.2 - with: - # aws inputs - aws-region: ${{ secrets.AWS_REGION }} - aws-role-arn: ${{ secrets.AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN }} - api-gateway-host: ${{ secrets.AWS_API_GW_HOST_GRAFANA }} - # other inputs - duplicate-authorization-header: "true" - - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@aa8eea635029ab8d95abd3c206f56dae1e22e623 # v2.3.28 - if: ${{ matrix.tests.enabled == true }} - with: - test_config_override_base64: ${{ env.BASE64_CONFIG_OVERRIDE }} - test_command_to_run: cd ./integration-tests && go test -timeout 60m -count=1 -json -test.parallel=${{ matrix.tests.nodes }} ${{ matrix.tests.command }} 2>&1 | tee /tmp/gotest.log | gotestloghelper -ci -singlepackage -hidepassingtests=false -hidepassinglogs - test_download_vendor_packages_command: cd ./integration-tests && go mod download - test_suite: ${{ matrix.tests.suite }} - test_config_chainlink_version: ${{ steps.determine-build.outputs.version }} - test_config_chainlink_upgrade_version: ${{ steps.determine-build.outputs.upgrade_version }} - test_config_selected_networks: ${{ matrix.tests.network }} - test_config_logging_run_id: ${{ github.run_id }} - test_config_logstream_log_targets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - test_config_test_log_collect: "true" - cl_repo: ${{ steps.determine-build.outputs.image }} - cl_image_tag: ${{ steps.determine-build.outputs.version }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - artifacts_location: ./integration-tests/${{ matrix.tests.suite }}/logs - publish_check_name: Automation On Demand Results ${{ matrix.tests.name }} - token: ${{ secrets.GITHUB_TOKEN }} - go_mod_path: ./integration-tests/go.mod - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - DEFAULT_CHAINLINK_IMAGE: ${{ steps.determine-build.outputs.image }} - DEFAULT_CHAINLINK_UPGRADE_IMAGE: ${{ steps.determine-build.outputs.upgrade_image }} - DEFAULT_LOKI_TENANT_ID: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - DEFAULT_LOKI_ENDPOINT: https://${{ secrets.GRAFANA_INTERNAL_HOST }}/loki/api/v1/push - DEFAULT_LOKI_BASIC_AUTH: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - DEFAULT_GRAFANA_BASE_URL: "http://localhost:8080/primary" - DEFAULT_GRAFANA_DASHBOARD_URL: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - DEFAULT_GRAFANA_BEARER_TOKEN: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} - DEFAULT_PYROSCOPE_SERVER_URL: ${{ matrix.tests.pyroscope_env == '' && '' || !startsWith(github.ref, 'refs/tags/') && '' || secrets.QA_PYROSCOPE_INSTANCE }} # Avoid sending blank envs https://github.com/orgs/community/discussions/25725 - DEFAULT_PYROSCOPE_KEY: ${{ secrets.QA_PYROSCOPE_KEY }} - DEFAULT_PYROSCOPE_ENVIRONMENT: ${{ matrix.tests.pyroscope_env }} - DEFAULT_PYROSCOPE_ENABLED: ${{ matrix.tests.pyroscope_env == '' || !startsWith(github.ref, 'refs/tags/') && 'false' || 'true' }} - - - name: Upload test log - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1 - if: failure() - with: - name: test-log-${{ matrix.tests.name }} - path: /tmp/gotest.log - retention-days: 7 - continue-on-error: true - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: automation-on-demand-tests-${{ matrix.tests.id }} - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Automation On Demand ${{ matrix.tests.name }} Test - test-results-file: '{"testType":"go","filePath":"/tmp/gotest.log"}' - continue-on-error: true diff --git a/.github/workflows/bash-scripts.yml b/.github/workflows/bash-scripts.yml deleted file mode 100644 index b27def4b5e..0000000000 --- a/.github/workflows/bash-scripts.yml +++ /dev/null @@ -1,37 +0,0 @@ -name: Bash Scripts - -on: - push: - branches: - - this-workflow-is-disabled-for-ccip - -jobs: - changes: - name: detect changes - runs-on: ubuntu-latest - outputs: - bash-scripts-src: ${{ steps.bash-scripts.outputs.src }} - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2 - id: bash-scripts - with: - filters: | - src: - - 'tools/bin/**' - - '.github/workflows/bash-scripts.yml' - shellcheck: - name: ShellCheck Lint - runs-on: ubuntu-latest - needs: [changes] - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - name: Run ShellCheck - if: needs.changes.outputs.bash-scripts-src == 'true' - uses: ludeeus/action-shellcheck@00cae500b08a931fb5698e11e79bfbd38e612a38 # v2.0.0 - with: - scandir: "./tools/bin" - # Consider changing this to check for warnings once all warnings are fixed. - severity: error diff --git a/.github/workflows/build-publish-develop.yml b/.github/workflows/build-publish-develop.yml deleted file mode 100644 index 6e8e5ba3f5..0000000000 --- a/.github/workflows/build-publish-develop.yml +++ /dev/null @@ -1,68 +0,0 @@ -name: "Push develop to private ECR" - -on: - push: - branches: - - ccip-develop - workflow_dispatch: - inputs: - git_ref: - description: "Git ref (commit SHA, branch name, tag name, etc.) to checkout" - required: true -env: - GIT_REF: ${{ github.event.inputs.git_ref || github.ref }} - -jobs: - push-ccip-develop: - runs-on: ubuntu-20.04 - environment: build-develop - permissions: - id-token: write - contents: read - strategy: - matrix: - image: - - name: "" - dockerfile: core/chainlink.Dockerfile - tag-suffix: "" - - name: (plugins) - dockerfile: plugins/chainlink.Dockerfile - tag-suffix: -plugins - name: push-ccip-develop ${{ matrix.image.name }} - steps: - - name: Checkout repository - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - ref: ${{ env.GIT_REF }} - # When this is ran from manual workflow_dispatch, the github.sha may be - # different than the checked out commit sha. The core build uses this - # commit sha as build metadata, so we need to make sure it's correct. - - name: Get checked out git ref - if: github.event.inputs.git_ref - id: git-ref - run: echo "checked-out=$(git rev-parse HEAD)" | tee -a "${GITHUB_OUTPUT}" - - name: Build, sign and publish ccip image - uses: ./.github/actions/build-sign-publish-chainlink - with: - publish: true - aws-role-to-assume: ${{ secrets.AWS_OIDC_IAM_ROLE_ARN }} - aws-role-duration-seconds: ${{ secrets.AWS_ROLE_DURATION_SECONDS }} - aws-region: ${{ secrets.AWS_REGION }} - ecr-hostname: ${{ secrets.AWS_DEVELOP_ECR_HOSTNAME }} - ecr-image-name: ccip-develop - ecr-tag-suffix: ${{ matrix.image.tag-suffix }} - dockerfile: ${{ matrix.image.dockerfile }} - dockerhub_username: ${{ secrets.DOCKER_READONLY_USERNAME }} - dockerhub_password: ${{ secrets.DOCKER_READONLY_PASSWORD }} - git-commit-sha: ${{ steps.git-ref.outputs.checked-out || github.sha }} - - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: push-ccip-develop ${{ matrix.image.name }} - continue-on-error: true diff --git a/.github/workflows/build-publish-pr.yml b/.github/workflows/build-publish-pr.yml deleted file mode 100644 index fd62739376..0000000000 --- a/.github/workflows/build-publish-pr.yml +++ /dev/null @@ -1,66 +0,0 @@ -name: "Build and Publish from PR" - -## -# This workflow builds and publishes a Docker image for Chainlink from a PR. -# It has its own special IAM role, does not sign the image, and publishes to -# a special ECR repo. -## - -on: - pull_request: - -jobs: - build-publish-untrusted: - if: ${{ ! startsWith(github.ref_name, 'release/') || (! startsWith(github.head_ref, 'release/') && ! startsWith(github.ref_name, 'chore/'))}} - runs-on: ubuntu-20.04 - environment: sdlc - permissions: - id-token: write - contents: read - env: - ECR_IMAGE_NAME: crib-ccip-untrusted - steps: - - name: Checkout repository - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - - name: Git Short SHA - shell: bash - env: - GIT_PR_HEAD_SHA: ${{ github.event.pull_request.head.sha }} - run: | - echo "GIT_SHORT_SHA=${GIT_PR_HEAD_SHA:0:7}" | tee -a "$GITHUB_ENV" - - - name: Check if image exists - id: check-image - uses: smartcontractkit/chainlink-github-actions/docker/image-exists@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - repository: ${{ env.ECR_IMAGE_NAME}} - tag: sha-${{ env.GIT_SHORT_SHA }} - AWS_REGION: ${{ secrets.AWS_REGION }} - AWS_ROLE_TO_ASSUME: ${{ secrets.AWS_OIDC_IAM_ROLE_PUBLISH_PR_ARN }} - - - name: Build and publish chainlink image - if: steps.check-image.outputs.exists == 'false' - uses: ./.github/actions/build-sign-publish-chainlink - with: - publish: true - aws-role-to-assume: ${{ secrets.AWS_OIDC_IAM_ROLE_PUBLISH_PR_ARN }} - aws-role-duration-seconds: ${{ secrets.AWS_ROLE_DURATION_SECONDS_DEFAULT }} - aws-region: ${{ secrets.AWS_REGION }} - sign-images: false - ecr-hostname: ${{ secrets.AWS_SDLC_ECR_HOSTNAME }} - ecr-image-name: ${{ env.ECR_IMAGE_NAME }} - dockerhub_username: ${{ secrets.DOCKER_READONLY_USERNAME }} - dockerhub_password: ${{ secrets.DOCKER_READONLY_PASSWORD }} - - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: build-chainlink-pr - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: build-publish-untrusted - continue-on-error: true diff --git a/.github/workflows/build-publish.yml b/.github/workflows/build-publish.yml deleted file mode 100644 index 4a477a0e14..0000000000 --- a/.github/workflows/build-publish.yml +++ /dev/null @@ -1,113 +0,0 @@ -name: "Build Chainlink and Publish" - -on: - # Mimics old circleci behaviour - push: - tags: - - "v*" - branches: - - "release/**" - -env: - ECR_HOSTNAME: public.ecr.aws - ECR_IMAGE_NAME: chainlink/chainlink - -jobs: - checks: - name: "Checks" - runs-on: ubuntu-20.04 - steps: - - name: Checkout repository - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - name: Check for VERSION file bump on tags - # Avoids checking VERSION file bump on forks. - if: ${{ github.repository == 'smartcontractkit/chainlink' && startsWith(github.ref, 'refs/tags/v') }} - uses: ./.github/actions/version-file-bump - with: - github-token: ${{ secrets.GITHUB_TOKEN }} - - build-sign-publish-chainlink: - needs: [checks] - if: ${{ ! startsWith(github.ref_name, 'release/') }} - runs-on: ubuntu-20.04 - environment: build-publish - permissions: - id-token: write - contents: read - outputs: - docker-image-tag: ${{ steps.build-sign-publish.outputs.docker-image-tag }} - docker-image-digest: ${{ steps.build-sign-publish.outputs.docker-image-digest }} - steps: - - name: Checkout repository - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - - name: Build, sign and publish chainlink image - id: build-sign-publish - uses: ./.github/actions/build-sign-publish-chainlink - with: - publish: true - aws-role-to-assume: ${{ secrets.AWS_OIDC_IAM_ROLE_ARN }} - aws-role-duration-seconds: ${{ secrets.AWS_ROLE_DURATION_SECONDS }} - aws-region: ${{ secrets.AWS_REGION }} - ecr-hostname: ${{ env.ECR_HOSTNAME }} - ecr-image-name: ${{ env.ECR_IMAGE_NAME }} - sign-images: true - sign-method: "keypair" - cosign-private-key: ${{ secrets.COSIGN_PRIVATE_KEY }} - cosign-public-key: ${{ secrets.COSIGN_PUBLIC_KEY }} - cosign-password: ${{ secrets.COSIGN_PASSWORD }} - dockerhub_username: ${{ secrets.DOCKER_READONLY_USERNAME }} - dockerhub_password: ${{ secrets.DOCKER_READONLY_PASSWORD }} - verify-signature: true - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: build-chainlink-publish - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: build-sign-publish-chainlink - continue-on-error: true - - # Notify Slack channel for new git tags. - slack-notify: - if: github.ref_type == 'tag' - needs: [build-sign-publish-chainlink] - runs-on: ubuntu-24.04 - environment: build-publish - steps: - - name: Checkout repository - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - name: Notify Slack - uses: smartcontractkit/.github/actions/slack-notify-git-ref@31e00facdd8f57a2bc7868b5e4c8591bf2aa3727 # slack-notify-git-ref@0.1.2 - with: - slack-channel-id: ${{ secrets.SLACK_CHANNEL_RELEASE_NOTIFICATIONS }} - slack-bot-token: ${{ secrets.SLACK_BOT_TOKEN_RELENG }} # Releng Bot - git-ref: ${{ github.ref_name }} - git-ref-type: ${{ github.ref_type }} - changelog-url: >- - ${{ - github.ref_type == 'tag' && - format( - 'https://github.com/{0}/blob/{1}/CHANGELOG.md', - github.repository, - github.ref_name - ) || '' - }} - docker-image-name: >- - ${{ - github.ref_type == 'tag' && - format( - '{0}/{1}:{2}', - env.ECR_HOSTNAME, - env.ECR_IMAGE_NAME, - needs.build-sign-publish-chainlink.outputs.docker-image-tag - ) || '' - }} - docker-image-digest: >- - ${{ - github.ref_type == 'tag' && - needs.build-sign-publish-chainlink.outputs.docker-image-digest || '' - }} diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml deleted file mode 100644 index b82b5a8203..0000000000 --- a/.github/workflows/build.yml +++ /dev/null @@ -1,52 +0,0 @@ -name: "Build Chainlink" - -on: - pull_request: - -jobs: - build-chainlink: - runs-on: ubuntu-20.04 - if: ${{ ! startsWith(github.head_ref, 'release/') && ! startsWith(github.ref_name, 'chore/') }} - steps: - - name: Checkout repository - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - - uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2 - id: change - with: - predicate-quantifier: every - filters: | - changelog-only: - - 'CHANGELOG.md' - - '!common/**' - - '!contracts/**' - - '!core/**' - - '!crib/**' - - '!dashboard-lib/**' - - '!fuzz/**' - - '!integration-tests/**' - - '!internal/**' - - '!operator_ui/**' - - '!plugins/**' - - '!tools/**' - - - name: Build chainlink image - if: ${{ steps.change.outputs.changelog-only == 'false' }} - uses: ./.github/actions/build-sign-publish-chainlink - with: - dockerhub_username: ${{ secrets.DOCKER_READONLY_USERNAME }} - dockerhub_password: ${{ secrets.DOCKER_READONLY_PASSWORD }} - publish: false - sign-images: false - - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: build-chainlink - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: build-chainlink - continue-on-error: true diff --git a/.github/workflows/ccip-chaos-tests.yml b/.github/workflows/ccip-chaos-tests.yml deleted file mode 100644 index da868886f1..0000000000 --- a/.github/workflows/ccip-chaos-tests.yml +++ /dev/null @@ -1,259 +0,0 @@ -name: CCIP Chaos Tests -on: - workflow_run: - workflows: [ CCIP Load Test ] - types: [ completed ] - branches: [ ccip-develop ] - workflow_dispatch: - - - -# Only run 1 of this workflow at a time per PR -concurrency: - group: chaos-ccip-tests-chainlink-${{ github.ref }} - cancel-in-progress: true - -env: - CL_ECR: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink - ENV_JOB_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink-ccip-tests:${{ github.sha }} - MOD_CACHE_VERSION: 1 - -jobs: - build-chainlink: - environment: integration - permissions: - id-token: write - contents: read - name: Build Chainlink Image - runs-on: ubuntu20.04-16cores-64GB - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - name: Check if image exists - id: check-image - uses: smartcontractkit/chainlink-github-actions/docker/image-exists@b49a9d04744b0237908831730f8553f26d73a94b # v2.3.17 - with: - repository: chainlink - tag: ${{ github.sha }} - AWS_REGION: ${{ secrets.QA_AWS_REGION }} - AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - - name: Build Image - if: steps.check-image.outputs.exists == 'false' - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/build-image@b49a9d04744b0237908831730f8553f26d73a94b # v2.3.17 - env: - GH_TOKEN: ${{ github.token }} - with: - cl_repo: smartcontractkit/chainlink-ccip - cl_ref: ${{ github.sha }} - push_tag: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink:${{ github.sha }} - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@dea9b546553cb4ca936607c2267a09c004e4ab3f # v3.0.0 - with: - id: ccip-chaos-tests-build-chainlink-image - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Build Chainlink Image - continue-on-error: true - - build-test-image: - environment: integration - permissions: - id-token: write - contents: read - name: Build Test Image - runs-on: ubuntu20.04-16cores-64GB - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@dea9b546553cb4ca936607c2267a09c004e4ab3f # v3.0.0 - with: - id: ccip-chaos-tests-build-test-image - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Build Test Image - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - name: Build Test Image - uses: ./.github/actions/build-test-image - with: - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ACCOUNT_NUMBER: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - - ccip-chaos-tests: - environment: integration - permissions: - issues: read - checks: write - pull-requests: write - id-token: write - contents: read - name: CCIP Chaos Tests - runs-on: ubuntu-latest - needs: [ build-chainlink, build-test-image ] - env: - TEST_SUITE: chaos - TEST_ARGS: -test.timeout 30m - CHAINLINK_COMMIT_SHA: ${{ github.sha }} - CHAINLINK_ENV_USER: ${{ github.actor }} - TEST_TRIGGERED_BY: ccip-cron-chaos-eth - TEST_LOG_LEVEL: debug - DATABASE_URL: postgresql://postgres:node@localhost:5432/chainlink_test?sslmode=disable - GH_TOKEN: ${{ github.token }} - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@dea9b546553cb4ca936607c2267a09c004e4ab3f # v3.0.0 - with: - id: ccip-chaos-tests - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: CCIP Chaos Tests - test-results-file: '{"testType":"go","filePath":"/tmp/gotest.log"}' - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - name: Prepare Base64 TOML override for CCIP secrets - uses: ./.github/actions/setup-create-base64-config-ccip - id: setup_create_base64_config_ccip - with: - runId: ${{ github.run_id }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - chainlinkVersion: ${{ github.sha }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - - name: Run Chaos Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@d38226be720c5ccc1ff4d3cee40608ebf264cd59 # v2.3.26 - env: - BASE64_CCIP_CONFIG_OVERRIDE: ${{ steps.setup_create_base64_config_ccip.outputs.base64_config }} - TEST_BASE64_CCIP_CONFIG_OVERRIDE: ${{ steps.setup_create_base64_config_ccip.outputs.base64_config }} - with: - test_command_to_run: cd ./integration-tests && go test -timeout 1h -count=1 -json -test.parallel 11 -run 'TestChaosCCIP' ./chaos 2>&1 | tee /tmp/gotest.log | gotestloghelper -ci - test_download_vendor_packages_command: make gomod - artifacts_location: ./integration-tests/chaos/logs - publish_check_name: CCIP Chaos Test Results - publish_report_paths: ./tests-chaos-report.xml - triggered_by: ${{ env.TEST_TRIGGERED_BY }} - token: ${{ secrets.GITHUB_TOKEN }} - go_mod_path: ./integration-tests/go.mod - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - CGO_ENABLED: "1" - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - cache_key_id: ccip-load-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - DEFAULT_LOKI_TENANT_ID: ${{ vars.LOKI_TENANT_ID }} - DEFAULT_LOKI_ENDPOINT: ${{ secrets.LOKI_URL }} - DEFAULT_LOKI_BASIC_AUTH: ${{ secrets.LOKI_BASIC_AUTH }} - DEFAULT_CHAINLINK_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink - DEFAULT_GRAFANA_BASE_URL: ${{ vars.GRAFANA_URL }} - DEFAULT_GRAFANA_DASHBOARD_URL: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - - ## Notify in slack if the job fails - - name: Notify Slack - if: failure() && github.event_name != 'workflow_dispatch' - uses: slackapi/slack-github-action@6c661ce58804a1a20f6dc5fbee7f0381b469e001 # v1.25.0 - env: - SLACK_BOT_TOKEN: ${{ secrets.QA_SLACK_API_KEY }} - with: - channel-id: "#ccip-testing" - slack-message: ":x: :mild-panic-intensifies: CCIP chaos tests failed: \n${{ format('https://github.com/{0}/actions/runs/{1}', github.repository, github.run_id) }}" - ## Run Cleanup if the job succeeds - - name: cleanup - if: always() - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/cleanup@b49a9d04744b0237908831730f8553f26d73a94b # v2.3.17 - with: - triggered_by: ${{ env.TEST_TRIGGERED_BY }} - - ccip-chaos-with-load-tests: - environment: integration - permissions: - issues: read - checks: write - pull-requests: write - id-token: write - contents: read - name: CCIP Load With Chaos Tests - if: false # Disabled until CCIP-2555 is resolved - runs-on: ubuntu-latest - needs: [ build-chainlink, build-test-image ] - env: - TEST_SUITE: load - TEST_ARGS: -test.timeout 1h - CHAINLINK_COMMIT_SHA: ${{ github.sha }} - CHAINLINK_ENV_USER: ${{ github.actor }} - TEST_TRIGGERED_BY: ccip-cron-chaos-and-load-eth - TEST_LOG_LEVEL: debug - DATABASE_URL: postgresql://postgres:node@localhost:5432/chainlink_test?sslmode=disable - GH_TOKEN: ${{ github.token }} - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@dea9b546553cb4ca936607c2267a09c004e4ab3f # v3.0.0 - with: - id: ccip-chaos-tests-with-load-test - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: CCIP load with chaos test - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - name: Prepare Base64 TOML override for CCIP secrests - uses: ./.github/actions/setup-create-base64-config-ccip - id: setup_create_base64_config_ccip - with: - runId: ${{ github.run_id }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - chainlinkVersion: ${{ github.sha }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - - name: Run Load With Chaos Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@d38226be720c5ccc1ff4d3cee40608ebf264cd59 # v2.3.26 - env: - BASE64_CCIP_CONFIG_OVERRIDE: ${{ steps.setup_create_base64_config_ccip.outputs.base64_config }} - TEST_BASE64_CCIP_CONFIG_OVERRIDE: ${{ steps.setup_create_base64_config_ccip.outputs.base64_config }} - with: - test_command_to_run: cd ./integration-tests/ccip-tests && go test -timeout 2h -count=1 -json -test.parallel 4 -run '^TestLoadCCIPStableWithPodChaosDiffCommitAndExec' ./load 2>&1 | tee /tmp/gotest.log | gotestfmt - test_download_vendor_packages_command: make gomod - artifacts_location: ./integration-tests/load/logs - publish_check_name: CCIP Chaos With Load Test Results - publish_report_paths: ./tests-chaos-with-load-report.xml - triggered_by: ${{ env.TEST_TRIGGERED_BY }} - token: ${{ secrets.GITHUB_TOKEN }} - go_mod_path: ./integration-tests/go.mod - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - CGO_ENABLED: "1" - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - cache_key_id: ccip-load-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - DEFAULT_LOKI_TENANT_ID: ${{ vars.LOKI_TENANT_ID }} - DEFAULT_LOKI_ENDPOINT: ${{ secrets.LOKI_URL }} - DEFAULT_LOKI_BASIC_AUTH: ${{ secrets.LOKI_BASIC_AUTH }} - DEFAULT_CHAINLINK_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink - DEFAULT_GRAFANA_BASE_URL: ${{ vars.GRAFANA_URL }} - DEFAULT_GRAFANA_DASHBOARD_URL: "/d/6vjVx-1V8/ccip-long-running-tests" - ## Notify in slack if the job fails - - name: Notify Slack - if: failure() && github.event_name != 'workflow_dispatch' - uses: slackapi/slack-github-action@6c661ce58804a1a20f6dc5fbee7f0381b469e001 # v1.25.0 - env: - SLACK_BOT_TOKEN: ${{ secrets.QA_SLACK_API_KEY }} - with: - channel-id: "#ccip-testing" - slack-message: ":x: :mild-panic-intensifies: CCIP chaos with load tests failed: \n${{ format('https://github.com/{0}/actions/runs/{1}', github.repository, github.run_id) }}" - ## Run Cleanup if the job succeeds - - name: cleanup - if: always() - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/cleanup@b49a9d04744b0237908831730f8553f26d73a94b # v2.3.17 - with: - triggered_by: ${{ env.TEST_TRIGGERED_BY }} diff --git a/.github/workflows/ccip-client-compatibility-tests.yml b/.github/workflows/ccip-client-compatibility-tests.yml deleted file mode 100644 index 58c05f2f72..0000000000 --- a/.github/workflows/ccip-client-compatibility-tests.yml +++ /dev/null @@ -1,739 +0,0 @@ -name: CCIP Client Compatibility Tests -on: - schedule: - - cron: "30 5 * * TUE,FRI" # Run every Tuesday and Friday at midnight + 30min EST - push: - tags: - - "*" - merge_group: - pull_request: - workflow_dispatch: - inputs: - chainlinkVersion: - description: commit SHA or tag of the Chainlink version to test - required: true - type: string - evmImplementations: - description: comma separated list of EVM implementations to test (ignored if base64TestList is used) - required: true - type: string - default: "geth,besu,nethermind,erigon" - latestVersionsNumber: - description: how many of latest images of EVM implementations to test with (ignored if base64TestList is used) - required: true - type: number - default: 3 - base64TestList: - description: base64 encoded list of tests to run (same as base64-ed output of testlistgenerator tool) - required: false - type: string - -env: - CHAINLINK_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink - INTERNAL_DOCKER_REPO: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com - MOD_CACHE_VERSION: 2 - -jobs: - # Build Test Dependencies - - check-dependency-bump: - name: Check for go-ethereum dependency bump - if: github.event_name == 'pull_request' || github.event_name == 'merge_queue' - runs-on: ubuntu-latest - outputs: - dependency_changed: ${{ steps.changes.outputs.dependency_changed }} - steps: - - name: Checkout code - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - repository: smartcontractkit/ccip - fetch-depth: 0 - - name: Check for go.mod changes - id: changes - run: | - git fetch origin ${{ github.base_ref }} - # if no match is found then grep exits with code 1, but if there is a match it exits with code 0 - # this will return a match if there are any changes on that corresponding line, for example if spacing was changed - DEPENDENCY_CHANGED=$(git diff -U0 origin/${{ github.base_ref }}...HEAD -- go.mod | grep -q 'github.com/ethereum/go-ethereum'; echo $?) - PR_VERSION=$(grep 'github.com/ethereum/go-ethereum' go.mod | awk '{print $2}') - - # here 0 means a match was found, 1 means no match was found - if [ "$DEPENDENCY_CHANGED" -eq 0 ]; then - # Dependency was changed in the PR, now compare with the base branch - git fetch origin ${{ github.base_ref }} - BASE_VERSION=$(git show origin/${{ github.base_ref }}:go.mod | grep 'github.com/ethereum/go-ethereum' | awk '{print $2}') - - echo "Base branch version: $BASE_VERSION" - echo "PR branch version: $PR_VERSION" - - echo "Dependency version changed in the PR compared to the base branch." - echo "dependency_changed=true" >> $GITHUB_OUTPUT - else - echo "No changes to ethereum/go-ethereum dependency in the PR." - echo "PR branch version: $PR_VERSION" - echo "dependency_changed=false" >> $GITHUB_OUTPUT - fi - - should-run: - if: always() - name: Check if the job should run - needs: check-dependency-bump - runs-on: ubuntu-latest - outputs: - should_run: ${{ steps.should-run.outputs.should_run }} - eth_implementations : ${{ steps.should-run.outputs.eth_implementations }} - env: - GITHUB_REF_TYPE: ${{ github.ref_type }} - steps: - - name: Check if the job should run - id: should-run - run: | - if [ "${{ needs.check-dependency-bump.outputs.dependency_changed }}" == "true" ]; then - echo "Will run tests, because go-ethereum dependency was bumped" - echo "should_run=true" >> $GITHUB_OUTPUT - elif [ "$GITHUB_EVENT_NAME" = "schedule" ]; then - echo "Will run tests, because trigger event was $GITHUB_EVENT_NAME" - echo "should_run=true" >> $GITHUB_OUTPUT - elif [ "$GITHUB_EVENT_NAME" = "workflow_dispatch" ]; then - echo "Will run tests, because trigger event was $GITHUB_EVENT_NAME" - echo "should_run=true" >> $GITHUB_OUTPUT - elif [ "$GITHUB_REF_TYPE" = "tag" ]; then - echo "Will run tests, because new tag was created" - echo "should_run=true" >> $GITHUB_OUTPUT - else - echo "Will not run tests" - echo "should_run=false" >> $GITHUB_OUTPUT - fi - - select-versions: - if: always() && needs.should-run.outputs.should_run == 'true' - name: Select Versions - needs: should-run - runs-on: ubuntu-latest - env: - RELEASED_DAYS_AGO: 4 - GITHUB_REF_TYPE: ${{ github.ref_type }} - outputs: - evm_implementations : ${{ steps.select-implementations.outputs.evm_implementations }} - chainlink_version: ${{ steps.select-chainlink-version.outputs.chainlink_version }} - latest_image_count: ${{ steps.get-image-count.outputs.image_count }} - steps: - # ghlatestreleasechecker is a tool to check if new release is available for a given repo - - name: Set Up ghlatestreleasechecker - shell: bash - run: | - go install github.com/smartcontractkit/chainlink-testing-framework/tools/ghlatestreleasechecker@v1.0.0 - - name: Select EVM implementations to test - id: select-implementations - run: | - PATH=$PATH:$(go env GOPATH)/bin - export PATH - - if [ "$GITHUB_EVENT_NAME" = "schedule" ]; then - echo "Checking for new releases" - implementations_arr=() - new_geth=$(ghlatestreleasechecker "ethereum/go-ethereum" $RELEASED_DAYS_AGO) - if [ "$new_geth" != "none" ]; then - echo "New geth release found: $new_geth" - implementations_arr+=("geth") - fi - new_besu=$(ghlatestreleasechecker "hyperledger/besu" $RELEASED_DAYS_AGO) - if [ "new_besu" != "none" ]; then - echo "New besu release found: $new_besu" - implementations_arr+=("besu") - fi - new_erigon=$(ghlatestreleasechecker "ledgerwatch/erigon" $RELEASED_DAYS_AGO) - if [ "new_erigon" != "none" ]; then - echo "New erigon release found: $new_erigon" - implementations_arr+=("erigon") - fi - new_nethermind=$(ghlatestreleasechecker "nethermindEth/nethermind" $RELEASED_DAYS_AGO) - if [ "new_nethermind" != "none" ]; then - echo "New nethermind release found: $new_nethermind" - implementations_arr+=("nethermind") - fi - IFS=',' - eth_implementations="${implementations_arr[*]}" - echo "Found new releases for: $eth_implementations" - echo "evm_implementations=$eth_implementations" >> $GITHUB_OUTPUT - elif [ "$GITHUB_EVENT_NAME" = "workflow_dispatch" ]; then - if [ -n "${{ github.event.inputs.base64TestList }}" ]; then - echo "Base64-ed Test Input provided, ignoring EVM implementations" - else - echo "Will test following EVM implementations: ${{ github.event.inputs.evmImplementations }}" - echo "evm_implementations=${{ github.event.inputs.evmImplementations }}" >> $GITHUB_OUTPUT - fi - else - echo "Will test all EVM implementations" - echo "evm_implementations=geth,besu,nethermind,erigon" >> $GITHUB_OUTPUT - fi - - name: Select Chainlink CCIP version - id: select-chainlink-version - run: | - PATH=$PATH:$(go env GOPATH)/bin - export PATH - - if [ "$GITHUB_EVENT_NAME" = "schedule" ]; then - echo "Fetching latest Chainlink CCIP stable version" - implementations_arr=() - # we use 100 days since we really want the latest one, and it's highly improbable there won't be a release in last 100 days - chainlink_version=$(ghlatestreleasechecker "smartcontractkit/ccip" 100) - echo "chainlink_version=$chainlink_version" >> $GITHUB_OUTPUT - elif [ "$GITHUB_EVENT_NAME" = "workflow_dispatch" ]; then - echo "Fetching Chainlink version from input" - if [ -n "${{ github.event.inputs.chainlinkVersion }}" ]; then - echo "Chainlink version provided in input" - chainlink_version="${{ github.event.inputs.chainlinkVersion }}" - else - echo "Chainlink version not provided in input. Using latest commit SHA." - chainlink_version=${{ github.sha }} - fi - echo "chainlink_version=$chainlink_version" >> $GITHUB_OUTPUT - elif [ "$GITHUB_EVENT_NAME" = "pull_request" ]; then - echo "Fetching Chainlink version from PR's head commit" - chainlink_version="${{ github.event.pull_request.head.sha }}" - echo "chainlink_version=$chainlink_version" >> $GITHUB_OUTPUT - elif [ "$GITHUB_EVENT_NAME" = "merge_queue" ]; then - echo "Fetching Chainlink version from merge queue's head commit" - chainlink_version="${{ github.event.merge_group.head_sha }}" - echo "chainlink_version=$chainlink_version" >> $GITHUB_OUTPUT - elif [ "$GITHUB_REF_TYPE" = "tag" ]; then - echo "Fetching Chainlink version from tag" - chainlink_version="${{ github.ref_name }}" - echo "chainlink_version=$chainlink_version" >> $GITHUB_OUTPUT - else - echo "Unsupported trigger event. It's probably an issue with the pipeline definition. Please reach out to the Test Tooling team." - exit 1 - fi - echo "Will use following Chainlink version: $chainlink_version" - - name: Get image count - id: get-image-count - run: | - if [ "$GITHUB_EVENT_NAME" = "workflow_dispatch" ]; then - echo "Fetching latest image count from input" - if [ -n "${{ github.event.inputs.base64TestList }}" ]; then - echo "Base64-ed Test Input provided, ignoring latest image count" - else - image_count="${{ github.event.inputs.latestVersionsNumber }}" - echo "image_count=$image_count" >> $GITHUB_OUTPUT - fi - else - echo "Fetching default latest image count" - image_count=3 - echo "image_count=$image_count" >> $GITHUB_OUTPUT - fi - echo "Will use following latest image count: $image_count" - - check-ecr-images-exist: - name: Check images used as test dependencies exist in ECR - if: always() && needs.should-run.outputs.should_run == 'true' - environment: integration - permissions: - id-token: write - contents: read - needs: [should-run] - runs-on: ubuntu-latest - strategy: - fail-fast: false - matrix: - mirror: - - name: ethereum/client-go - expression: '^(alltools-v|v)[0-9]\.[0-9]+\.[0-9]+$' - - name: hyperledger/besu - expression: '^[0-9]+\.[0-9]+(\.[0-9]+)?$' - page_size: 300 - - name: thorax/erigon - expression: '^v[0-9]+\.[0-9]+\.[0-9]+$' - - name: nethermind/nethermind - expression: '^[0-9]+\.[0-9]+\.[0-9]+$' - - name: tofelb/ethereum-genesis-generator - expression: '^[0-9]+\.[0-9]+\.[0-9]+(\-slots\-per\-epoch)?' - steps: - - name: Update internal ECR if the latest Ethereum client image does not exist - uses: smartcontractkit/chainlink-testing-framework/.github/actions/update-internal-mirrors@5eea86ee4f7742b4e944561a570a6b268e712d9e # v1.30.3 - with: - aws_region: ${{ secrets.QA_AWS_REGION }} - role_to_assume: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - aws_account_number: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - image_name: ${{matrix.mirror.name}} - expression: ${{matrix.mirror.expression}} - page_size: ${{matrix.mirror.page_size}} - - build-chainlink: - if: always() && needs.should-run.outputs.should_run == 'true' - environment: integration - permissions: - id-token: write - contents: read - name: Build Chainlink Image - needs: [should-run, select-versions] - runs-on: ubuntu-latest - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@dea9b546553cb4ca936607c2267a09c004e4ab3f # v3.0.0 - with: - id: client-compatablility-build-chainlink - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Build Chainlink Image - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - repository: smartcontractkit/ccip - ref: ${{ needs.select-versions.outputs.chainlink_version }} - - name: Build Chainlink Image - uses: ./.github/actions/build-chainlink-image - with: - tag_suffix: "" - dockerfile: core/chainlink.Dockerfile - git_commit_sha: ${{ needs.select-versions.outputs.chainlink_version }} - check_image_exists: 'true' - AWS_REGION: ${{ secrets.QA_AWS_REGION }} - AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - - get-latest-available-images: - name: Get Latest EVM Implementation's Images - if: always() && needs.should-run.outputs.should_run == 'true' - environment: integration - runs-on: ubuntu-latest - needs: [check-ecr-images-exist, should-run, select-versions] - permissions: - id-token: write - contents: read - env: - LATEST_IMAGE_COUNT: ${{ needs.select-versions.outputs.latest_image_count }} - outputs: - geth_images: ${{ env.GETH_IMAGES }} - nethermind_images: ${{ env.NETHERMIND_IMAGES }} - besu_images: ${{ env.BESU_IMAGES }} - erigon_images: ${{ env.ERIGON_IMAGES }} - steps: - # Setup AWS creds - - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@e3dd6a429d7300a6a4c196c26e071d42e0343502 # v4.0.2 - with: - aws-region: ${{ secrets.QA_AWS_REGION }} - role-to-assume: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - role-duration-seconds: 3600 - # Login to ECR - - name: Login to Amazon ECR - id: login-ecr - uses: aws-actions/amazon-ecr-login@062b18b96a7aff071d4dc91bc00c4c1a7945b076 # v2.0.1 - with: - mask-password: "true" - env: - AWS_REGION: ${{ secrets.QA_AWS_REGION }} - # ecrimagefetcher is a tool to get latest images from ECR - - name: Set Up ecrimagefetcher - shell: bash - run: | - go install github.com/smartcontractkit/chainlink-testing-framework/tools/ecrimagefetcher@v1.0.1 - - name: Get latest docker images from ECR - if: ${{ github.event.inputs.base64TestList == '' }} - env: - AWS_REGION: ${{ secrets.QA_AWS_REGION }} - ETH_IMPLEMENTATIONS: ${{ needs.select-versions.outputs.evm_implementations }} - run: | - PATH=$PATH:$(go env GOPATH)/bin - export PATH - if [[ "$ETH_IMPLEMENTATIONS" == *"geth"* ]]; then - geth_images=$(ecrimagefetcher 'ethereum/client-go' '^v[0-9]+\.[0-9]+\.[0-9]+$' ${{ env.LATEST_IMAGE_COUNT }}) - echo "GETH_IMAGES=$geth_images" >> $GITHUB_ENV - echo "Geth latest images: $geth_images" - fi - - if [[ "$ETH_IMPLEMENTATIONS" == *"nethermind"* ]]; then - nethermind_images=$(ecrimagefetcher 'nethermind/nethermind' '^[0-9]+\.[0-9]+\.[0-9]+$' ${{ env.LATEST_IMAGE_COUNT }}) - echo "NETHERMIND_IMAGES=$nethermind_images" >> $GITHUB_ENV - echo "Nethermind latest images: $nethermind_images" - fi - - if [[ "$ETH_IMPLEMENTATIONS" == *"besu"* ]]; then - # 24.3.3 is ignored as it doesn't support data & input fields in eth_call - besu_images=$(ecrimagefetcher 'hyperledger/besu' '^[0-9]+\.[0-9]+(\.[0-9]+)?$' ${{ env.LATEST_IMAGE_COUNT }} ">=24.5.1") - echo "BESU_IMAGES=$besu_images" >> $GITHUB_ENV - echo "Besu latest images: $besu_images" - fi - - if [[ "$ETH_IMPLEMENTATIONS" == *"erigon"* ]]; then - # 2.60.0 and 2.60.1 are ignored as they stopped working with CL node - erigon_images=$(ecrimagefetcher 'thorax/erigon' '^v[0-9]+\.[0-9]+\.[0-9]+$' ${{ env.LATEST_IMAGE_COUNT }} "> $GITHUB_ENV - echo "Erigon latest images: $erigon_images" - fi - - # End Build Test Dependencies - - prepare-compatibility-matrix: - name: Prepare Compatibility Matrix - if: always() && needs.should-run.outputs.should_run == 'true' - environment: integration - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - needs: [get-latest-available-images,should-run,select-versions] - runs-on: ubuntu-latest - env: - ETH_IMPLEMENTATIONS: ${{ needs.select-versions.outputs.evm_implementations }} - BASE64_TEST_LIST: ${{ github.event.inputs.base64TestList }} - outputs: - matrix: ${{ env.JOB_MATRIX_JSON }} - steps: - - name: Decode Base64 Test List Input if Set - if: env.BASE64_TEST_LIST != '' - run: | - echo "Decoding base64 tests list from the input" - DECODED_BASE64_TEST_LIST=$(echo $BASE64_TEST_LIST | base64 -d) - echo "Decoded input:" - echo "$DECODED_BASE64_TEST_LIST" - is_valid=$(echo "$DECODED_BASE64_TEST_LIST" | jq . > /dev/null 2>&1; echo $?) - if [ "$is_valid" -ne 0 ]; then - echo "Invalid base64 input. Please provide a valid base64 encoded JSON list of tests." - echo "Here is an example of valid JSON:" - cat <> $GITHUB_ENV - # testlistgenerator is a tool that builds a matrix of tests to run - - name: Set Up testlistgenerator - if: env.BASE64_TEST_LIST == '' - shell: bash - run: | - go install github.com/smartcontractkit/chainlink-testing-framework/tools/testlistgenerator@v1.1.0 - - name: Prepare matrix input - if: env.BASE64_TEST_LIST == '' - run: | - PATH=$PATH:$(go env GOPATH)/bin - export PATH - - if [[ "$ETH_IMPLEMENTATIONS" == *"geth"* ]]; then - echo "Will test compatibility with geth" - testlistgenerator -o compatibility_test_list.json -p ccip -r TestSmokeCCIPForBidirectionalLane -f './ccip-tests/smoke/ccip_test.go' -e geth -d "${{ needs.get-latest-available-images.outputs.geth_images }}" -t "ccip-geth-compatibility-test" -w "SIMULATED_1,SIMULATED_2" -c 1337,2337 -n ubuntu-latest - else - echo "Will not test compatibility with geth" - fi - - if [[ "$ETH_IMPLEMENTATIONS" == *"besu"* ]]; then - echo "Will test compatibility with besu" - testlistgenerator -o compatibility_test_list.json -p ccip -r TestSmokeCCIPForBidirectionalLane -f './ccip-tests/smoke/ccip_test.go' -e besu -d "${{ needs.get-latest-available-images.outputs.besu_images }}" -t "ccip-besu-compatibility-test" -w "SIMULATED_BESU_NONDEV_1,SIMULATED_BESU_NONDEV_2" -c 1337,2337 -n ubuntu-latest - else - echo "Will not test compatibility with besu" - fi - - # TODO: Waiting for CCIP-2255 to be resolved - if [[ "$ETH_IMPLEMENTATIONS" == *"erigon"* ]]; then - echo "Will test compatibility with erigon" - testlistgenerator -o compatibility_test_list.json -p ccip -r TestSmokeCCIPForBidirectionalLane -f './ccip-tests/smoke/ccip_test.go' -e erigon -d "${{ needs.get-latest-available-images.outputs.erigon_images }}" -t "ccip-erigon-compatibility-test" -w "SIMULATED_1,SIMULATED_2" -c 1337,2337 -n ubuntu-latest - else - echo "Will not test compatibility with erigon" - fi - - # TODO: uncomment when nethermind flake reason is addressed - if [[ "$ETH_IMPLEMENTATIONS" == *"nethermind"* ]]; then - echo "Will not test compatibility with nethermind due to flakiness" - # echo "Will test compatibility with nethermind" - # testlistgenerator -o compatibility_test_list.json -p ccip -r TestSmokeCCIPForBidirectionalLane -f './ccip-tests/smoke/ccip_test.go' -e nethermind -d "${{ needs.get-latest-available-images.outputs.nethermind_images }}" -t "ccip-nethermind-compatibility-test" -w "SIMULATED_1,SIMULATED_2" -c 1337,2337 -n ubuntu-latest - else - echo "Will not test compatibility with nethermind" - fi - - jq . compatibility_test_list.json - echo "Adding human-readable name" - jq 'map(. + {visible_name: (.docker_image | split(",")[0] | split("=")[1])})' compatibility_test_list.json > compatibility_test_list_modified.json - jq . compatibility_test_list_modified.json - JOB_MATRIX_JSON=$(jq -c . compatibility_test_list_modified.json) - echo "JOB_MATRIX_JSON=${JOB_MATRIX_JSON}" >> $GITHUB_ENV - - run-client-compatibility-matrix: - name: CCIP Compatibility with ${{ matrix.evm_node.visible_name }} - if: always() && needs.should-run.outputs.should_run == 'true' - environment: integration - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - needs: [build-chainlink, prepare-compatibility-matrix, should-run, select-versions] - env: - CHAINLINK_COMMIT_SHA: ${{ needs.select-versions.outputs.chainlink_version }} - CHAINLINK_ENV_USER: ${{ github.actor }} - TEST_LOG_LEVEL: debug - strategy: - fail-fast: false - matrix: - evm_node: ${{fromJson(needs.prepare-compatibility-matrix.outputs.matrix)}} - runs-on: ubuntu-latest - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - repository: smartcontractkit/ccip - ref: ${{ needs.select-versions.outputs.chainlink_version }} - - name: Prepare Base64 TOML override - uses: ./.github/actions/setup-create-base64-config - with: - runId: ${{ github.run_id }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - selectedNetworks: ${{ matrix.evm_node.networks }} - chainlinkVersion: ${{ needs.select-versions.outputs.chainlink_version }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - - name: Prepare Base64 TOML override for CCIP secrets - uses: ./.github/actions/setup-create-base64-config-ccip - id: setup_create_base64_config_ccip - with: - runId: ${{ github.run_id }} - selectedNetworks: ${{ matrix.evm_node.networks }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - chainlinkVersion: ${{ needs.select-versions.outputs.chainlink_version }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - customEvmNodes: ${{ matrix.evm_node.docker_image }} - - name: Prepare test log name - run: | - replace_special_chars() { - if [ -z "$1" ]; then - echo "Please provide a string as an argument." - return 1 - fi - - local input_string="$1" - - # Replace '/' with '-' - local modified_string="${input_string//\//-}" - - # Replace ':' with '-' - modified_string="${modified_string//:/-}" - - # Replace '.' with '-' - modified_string="${modified_string//./-}" - - echo "$modified_string" - } - echo "TEST_LOG_NAME=$(replace_special_chars "ccip-${{ matrix.evm_node.name }}-test-logs")" >> $GITHUB_ENV - - name: Print Test details - ${{ matrix.evm_node.docker_image }} - run: | - echo "EVM Implementation Docker Image: ${{ matrix.evm_node.docker_image }}" - echo "EVM Implementation Networks: ${{ matrix.evm_node.networks }}" - echo "Test identifier: ${{ matrix.evm_node.name }}" - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@d38226be720c5ccc1ff4d3cee40608ebf264cd59 # v2.3.26 - env: - BASE64_CCIP_CONFIG_OVERRIDE: ${{ steps.setup_create_base64_config_ccip.outputs.base64_config }} - TEST_BASE64_CCIP_CONFIG_OVERRIDE: ${{ steps.setup_create_base64_config_ccip.outputs.base64_config }} - with: - test_command_to_run: cd ./integration-tests && go test -timeout 30m -count=1 -json -test.parallel=2 ${{ matrix.evm_node.run }} 2>&1 | tee /tmp/gotest.log | gotestloghelper -ci - test_download_vendor_packages_command: cd ./integration-tests && go mod download - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - artifacts_name: ${{ env.TEST_LOG_NAME }} - artifacts_location: | - ./integration-tests/smoke/logs/ - ./integration-tests/ccip-tests/smoke/logs/* - /tmp/gotest.log - publish_check_name: ${{ matrix.evm_node.name }} - token: ${{ secrets.GITHUB_TOKEN }} - go_mod_path: ./integration-tests/go.mod - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: "" - should_tidy: "false" - DEFAULT_LOKI_TENANT_ID: ${{ vars.LOKI_TENANT_ID }} - DEFAULT_LOKI_ENDPOINT: ${{ secrets.LOKI_URL_CI }} - DEFAULT_LOKI_BASIC_AUTH: ${{ secrets.LOKI_BASIC_AUTH }} - DEFAULT_CHAINLINK_IMAGE: ${{ env.CHAINLINK_IMAGE }} - DEFAULT_GRAFANA_BASE_URL: ${{ vars.GRAFANA_URL }} - DEFAULT_GRAFANA_DASHBOARD_URL: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - DEFAULT_PYROSCOPE_SERVER_URL: ${{ !startsWith(github.ref, 'refs/tags/') && '' || secrets.QA_PYROSCOPE_INSTANCE }} # Avoid sending blank envs https://github.com/orgs/community/discussions/25725 - DEFAULT_PYROSCOPE_KEY: ${{ secrets.QA_PYROSCOPE_KEY }} - DEFAULT_PYROSCOPE_ENVIRONMENT: ci-ccip-bidirectional-lane-${{ matrix.evm_node.name }} - DEFAULT_PYROSCOPE_ENABLED: 'true' - - - name: Print failed test summary - if: always() - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/show-test-summary@1587f59bfd626b668d303abbc90fee41b12397e6 # v2.3.23 - with: - test_directories: ./integration-tests/smoke/,./integration-tests/ccip-tests/smoke/ - - start-slack-thread: - name: Start Slack Thread - if: ${{ always() && needs.*.result != 'skipped' && needs.*.result != 'cancelled' && needs.should-run.outputs.should_run == 'true' }} - environment: integration - outputs: - thread_ts: ${{ steps.slack.outputs.thread_ts }} - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - runs-on: ubuntu-latest - needs: [run-client-compatibility-matrix,should-run,select-versions] - steps: - - name: Debug Result - run: echo ${{ join(needs.*.result, ',') }} - - name: Main Slack Notification - uses: slackapi/slack-github-action@6c661ce58804a1a20f6dc5fbee7f0381b469e001 # v1.25.0 - id: slack - with: - channel-id: ${{ secrets.QA_CCIP_SLACK_CHANNEL }} - payload: | - { - "attachments": [ - { - "color": "${{ contains(join(needs.*.result, ','), 'failure') && '#C62828' || '#2E7D32' }}", - "blocks": [ - { - "type": "header", - "text": { - "type": "plain_text", - "text": "CCIP Compatibility Test Results ${{ contains(join(needs.*.result, ','), 'failure') && ':x:' || ':white_check_mark:'}}", - "emoji": true - } - }, - { - "type": "section", - "text": { - "type": "mrkdwn", - "text": "${{ contains(join(needs.*.result, ','), 'failure') && 'Some tests failed! Notifying ' || 'All Good!' }}" - } - }, - { - "type": "divider" - }, - { - "type": "section", - "text": { - "type": "mrkdwn", - "text": "<${{ github.server_url }}/${{ github.repository }}/releases/tag/${{ github.ref_name }}|${{ github.ref_name }}> | <${{ github.server_url }}/${{ github.repository }}/commit/${{ needs.select-versions.outputs.chainlink_version }}|${{ needs.select-versions.outputs.chainlink_version }}> | <${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|Run>" - } - } - ] - } - ] - } - env: - SLACK_BOT_TOKEN: ${{ secrets.QA_SLACK_API_KEY }} - - parse-test-results: - name: Parse Test Results - if: always() && needs.*.result != 'skipped' && needs.*.result != 'cancelled' && needs.should-run.outputs.should_run == 'true' - environment: integration - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - runs-on: ubuntu-latest - needs: [run-client-compatibility-matrix,should-run] - outputs: - base64_parsed_results: ${{ steps.get-test-results.outputs.base64_parsed_results }} - steps: - # workflowresultparser is a tool to get job results from a workflow run - - name: Set Up workflowresultparser - shell: bash - run: | - go install github.com/smartcontractkit/chainlink-testing-framework/tools/workflowresultparser@v1.0.0 - - name: Get and parse Test Results - shell: bash - id: get-test-results - run: | - PATH=$PATH:$(go env GOPATH)/bin - export PATH - - workflowresultparser -workflowRunID ${{ github.run_id }} -githubToken ${{ github.token }} -githubRepo "${{ github.repository }}" -jobNameRegex "^CCIP Compatibility with (.*)$" -namedKey="CCIP" -outputFile=output.json - - echo "base64_parsed_results=$(base64 -w 0 output.json)" >> $GITHUB_OUTPUT - - display-test-results: - name: Aggregated test results - if: always() && needs.*.result != 'skipped' && needs.*.result != 'cancelled' && needs.should-run.outputs.should_run == 'true' && needs.parse-test-results.result == 'success' - environment: integration - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - runs-on: ubuntu-latest - needs: [start-slack-thread, should-run, select-versions, parse-test-results] - steps: - # asciitable is a tool that prints results in a nice ASCII table - - name: Set Up asciitable - shell: bash - run: | - go install github.com/smartcontractkit/chainlink-testing-framework/tools/asciitable@v1.0.2 - - name: Print aggregated test results - shell: bash - run: | - PATH=$PATH:$(go env GOPATH)/bin - export PATH - - raw_results="$(echo ${{ needs.parse-test-results.outputs.base64_parsed_results }} | base64 -d)" - echo $raw_results > input.json - asciitable --firstColumn "EVM Implementation" --secondColumn Result --jsonfile input.json --outputFile output.txt --section "CCIP" --namedKey "CCIP" - - echo - echo "AGGREGATED RESULTS" - cat output.txt - - echo "## Aggregated EVM Implementations compatibility results summary" >> $GITHUB_STEP_SUMMARY - echo '```' >> $GITHUB_STEP_SUMMARY - cat output.txt >> $GITHUB_STEP_SUMMARY - echo '```' >> $GITHUB_STEP_SUMMARY - - post-test-results-to-slack: - name: Post Test Results - if: ${{ always() && needs.*.result != 'skipped' && needs.*.result != 'cancelled' && needs.should-run.outputs.should_run == 'true' }} - environment: integration - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - runs-on: ubuntu-latest - needs: [start-slack-thread,should-run,select-versions] - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - ref: ${{ needs.select-versions.outputs.chainlink_version }} - - name: Get test results for CCIP - id: get-product-results - shell: bash - run: | - raw_results="$(echo ${{ needs.parse-test-results.outputs.base64_parsed_results }} | base64 -d)" - product_result=$(echo "$raw_results" | jq -c "select(has(\"CCIP\")) | .CCIP[]") - if [ -n "$product_result" ]; then - base64_result=$(echo $product_result | base64 -w 0) - echo "base64_result=$base64_result" >> $GITHUB_OUTPUT - else - echo "No results found for CCIP" - echo "base64_result=" >> $GITHUB_OUTPUT - fi - - name: Post Test Results to Slack - uses: ./.github/actions/notify-slack-jobs-result - with: - github_token: ${{ github.token }} - github_repository: ${{ github.repository }} - workflow_run_id: ${{ github.run_id }} - github_job_name_regex: ^CCIP Compatibility with (.*?)$ - message_title: CCIP Compatibility Test Results - slack_channel_id: ${{ secrets.QA_CCIP_SLACK_CHANNEL }} - slack_bot_token: ${{ secrets.QA_SLACK_API_KEY }} - slack_thread_ts: ${{ needs.start-slack-thread.outputs.thread_ts }} - base64_parsed_results: ${{ steps.get-product-results.outputs.base64_result }} diff --git a/.github/workflows/ccip-live-network-tests.yml b/.github/workflows/ccip-live-network-tests.yml deleted file mode 100644 index d1789fca74..0000000000 --- a/.github/workflows/ccip-live-network-tests.yml +++ /dev/null @@ -1,480 +0,0 @@ -name: CCIP On-Demand Live Network Tests -on: - schedule: - - cron: '0 */6 * * *' - workflow_dispatch: - inputs: - base64_test_input : # base64 encoded toml for test input - description: 'Base64 encoded toml test input' - required: false - slackMemberID: - description: 'Slack member ID to notify' - required: false - test_type: - description: 'Type of test to run' - required: false - type: choice - options: - - 'smoke' - - 'load' - test_secrets_override_key: - description: 'Key to run tests with custom test secrets' - required: false - type: string - -# Only run 1 of this workflow at a time per PR -concurrency: - group: live-testnet-tests - cancel-in-progress: true - -env: - CHAINLINK_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink - CHAINLINK_VERSION: ${{ github.sha }} - INTERNAL_DOCKER_REPO: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com - AWS_ECR_REPO_PUBLIC_REGISTRY: public.ecr.aws - E2E_TEST_CHAINLINK_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink - E2E_TEST_LOKI_TENANT_ID: ${{ vars.LOKI_TENANT_ID }} - E2E_TEST_LOKI_ENDPOINT: ${{ secrets.LOKI_URL }} - E2E_TEST_LOKI_BASIC_AUTH: ${{ secrets.LOKI_BASIC_AUTH }} - E2E_TEST_GRAFANA_BASE_URL: ${{ vars.GRAFANA_URL }} - # Default private key test secret loaded from Github Secret as only security team has access to it. - # this key secrets.QA_SHARED_803C_KEY has a story behind it. To know more, see CCIP-2875 and SECHD-16575 tickets. - E2E_TEST_ARBITRUM_MAINNET_WALLET_KEY: ${{ secrets.QA_SHARED_803C_KEY }} - E2E_TEST_AVALANCHE_MAINNET_WALLET_KEY: ${{ secrets.QA_SHARED_803C_KEY }} - E2E_TEST_BASE_MAINNET_WALLET_KEY: ${{ secrets.QA_SHARED_803C_KEY }} - E2E_TEST_BLAST_MAINNET_WALLET_KEY: ${{ secrets.QA_SHARED_803C_KEY }} - E2E_TEST_BSC_MAINNET_WALLET_KEY: ${{ secrets.QA_SHARED_803C_KEY }} - E2E_TEST_CELO_MAINNET_WALLET_KEY: ${{ secrets.QA_SHARED_803C_KEY }} - E2E_TEST_ETHEREUM_MAINNET_WALLET_KEY: ${{ secrets.QA_SHARED_803C_KEY }} - E2E_TEST_GNOSIS_MAINNET_WALLET_KEY: ${{ secrets.QA_SHARED_803C_KEY }} - E2E_TEST_KROMA_MAINNET_WALLET_KEY: ${{ secrets.QA_SHARED_803C_KEY }} - E2E_TEST_LINEA_MAINNET_WALLET_KEY: ${{ secrets.QA_SHARED_803C_KEY }} - E2E_TEST_METIS_ANDROMEDA_WALLET_KEY: ${{ secrets.QA_SHARED_803C_KEY }} - E2E_TEST_MODE_MAINNET_WALLET_KEY: ${{ secrets.QA_SHARED_803C_KEY }} - E2E_TEST_OPTIMISM_MAINNET_WALLET_KEY: ${{ secrets.QA_SHARED_803C_KEY }} - E2E_TEST_POLYGON_MAINNET_WALLET_KEY: ${{ secrets.QA_SHARED_803C_KEY }} - E2E_TEST_SCROLL_MAINNET_WALLET_KEY: ${{ secrets.QA_SHARED_803C_KEY }} - E2E_TEST_WEMIX_MAINNET_WALLET_KEY: ${{ secrets.QA_SHARED_803C_KEY }} - E2E_TEST_ZKSYNC_MAINNET_WALLET_KEY: ${{ secrets.QA_SHARED_803C_KEY }} - -jobs: - build-chainlink: - environment: integration - permissions: - id-token: write - contents: read - name: Build Chainlink Image - runs-on: ubuntu20.04-16cores-64GB - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - name: Check if image exists - id: check-image - uses: smartcontractkit/chainlink-github-actions/docker/image-exists@5dd916d08c03cb5f9a97304f4f174820421bb946 # v2.3.11 - with: - repository: chainlink - tag: ${{ env.CHAINLINK_VERSION }} - AWS_REGION: ${{ secrets.QA_AWS_REGION }} - AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - - name: Build Image - if: steps.check-image.outputs.exists == 'false' - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/build-image@5dd916d08c03cb5f9a97304f4f174820421bb946 # v2.3.11 - env: - GH_TOKEN: ${{ github.token }} - with: - cl_repo: smartcontractkit/chainlink-ccip - cl_ref: ${{ env.CHAINLINK_VERSION }} - push_tag: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink:${{ env.CHAINLINK_VERSION }} - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@dea9b546553cb4ca936607c2267a09c004e4ab3f # v3.0.0 - with: - id: ccip-on-demand-live-testnet-tests-build-chainlink-image - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Build Chainlink Image - continue-on-error: true - - build-test-image: - environment: integration - if: ${{ github.event_name == 'workflow_dispatch' && inputs.test_type == 'load' }} - permissions: - id-token: write - contents: read - name: Build Test Image - runs-on: ubuntu20.04-16cores-64GB - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@dea9b546553cb4ca936607c2267a09c004e4ab3f # v3.0.0 - with: - id: ccip-on-demand-live-testnet-tests-build-test-image - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Build Test Image - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - name: Build Test Image - uses: ./.github/actions/build-test-image - with: - tag: ${{ env.CHAINLINK_TEST_VERSION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ACCOUNT_NUMBER: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - - ccip-load-test: - name: CCIP Load Test - environment: integration - runs-on: ubuntu-latest - strategy: - matrix: - config: [mainnet.toml] - needs: [ build-chainlink, build-test-image ] - # if the event is a workflow_dispatch event and the test type is load and no previous job failed - if: ${{ github.event_name == 'workflow_dispatch' && inputs.test_type == 'load' && !contains(needs.*.result, 'failure') }} - permissions: - issues: read - checks: write - pull-requests: write - id-token: write - contents: read - env: - CHAINLINK_ENV_USER: ${{ github.actor }} - SLACK_API_KEY: ${{ secrets.QA_SLACK_API_KEY }} - SLACK_CHANNEL: ${{ secrets.QA_SLACK_CHANNEL }} - TEST_LOG_LEVEL: info - REF_NAME: ${{ github.head_ref || github.ref_name }} - CHAINLINK_TEST_VERSION: ${{ github.sha }} - ENV_JOB_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink-ccip-tests:${{ github.sha }} - ENV_JOB_IMAGE_BASE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink-ccip-tests - - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@dea9b546553cb4ca936607c2267a09c004e4ab3f # v3.0.0 - with: - id: ccip-on-demand-live-testnet-tests-load-tests - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: CCIP Load Test - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - ref: ${{ env.REF_NAME }} - - name: Prepare Base64 TOML override - id: set_override_config - shell: bash - run: | - SLACK_USER=$(jq -r '.inputs.slackMemberID' $GITHUB_EVENT_PATH) - echo ::add-mask::$SLACK_USER - echo "SLACK_USER=$SLACK_USER" >> "$GITHUB_ENV" - if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then - BASE64_CCIP_CONFIG_OVERRIDE=$(jq -r '.inputs.base64_test_input' $GITHUB_EVENT_PATH) - echo ::add-mask::$BASE64_CCIP_CONFIG_OVERRIDE - echo "base_64_override=$BASE64_CCIP_CONFIG_OVERRIDE" >> $GITHUB_OUTPUT - fi - if [[ "${{ github.event_name }}" == "schedule" ]]; then - BASE64_CCIP_CONFIG_OVERRIDE=$(base64 -w 0 -i ./integration-tests/ccip-tests/testconfig/override/${{ matrix.config }}) - echo ::add-mask::$BASE64_CCIP_CONFIG_OVERRIDE - echo "base_64_override=$BASE64_CCIP_CONFIG_OVERRIDE" >> $GITHUB_OUTPUT - echo "SLACK_USER=${{ secrets.QA_SLACK_USER }}" >> $GITHUB_ENV - fi - - name: step summary - shell: bash - run: | - echo "### chainlink image used for this test run :link:" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.CHAINLINK_VERSION }}\`" >> $GITHUB_STEP_SUMMARY - echo "### chainlink-tests image tag for this test run :ship:" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.CHAINLINK_TEST_VERSION }}\`" >> $GITHUB_STEP_SUMMARY - - name: Prepare Base64 TOML override for CCIP secrets - uses: ./.github/actions/setup-create-base64-config-ccip - id: setup_create_base64_config_ccip - with: - runId: ${{ github.run_id }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - chainlinkVersion: ${{ env.CHAINLINK_VERSION }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@94cb11f4bd545607a2f221c6685052b3abee723d # v2.3.32 - env: - TEST_SUITE: load - TEST_ARGS: -test.timeout 900h - DATABASE_URL: postgresql://postgres:node@localhost:5432/chainlink_test?sslmode=disable - RR_MEM: 8Gi - RR_CPU: 4 - DETACH_RUNNER: true - TEST_TRIGGERED_BY: ccip-load-test-ci - BASE64_CCIP_CONFIG_OVERRIDE: ${{ steps.setup_create_base64_config_ccip.outputs.base64_config }},${{ steps.set_override_config.outputs.base_64_override }} - TEST_BASE64_CCIP_CONFIG_OVERRIDE: ${{ steps.setup_create_base64_config_ccip.outputs.base64_config }},${{ steps.set_override_config.outputs.base_64_override }} - E2E_TEST_GRAFANA_DASHBOARD_URL: "/d/6vjVx-1V8/ccip-long-running-tests" - with: - test_command_to_run: cd ./integration-tests/ccip-tests && go test -v -timeout 70m -count=1 -json -run ^TestLoadCCIPStableRPS$ ./load 2>&1 | tee /tmp/gotest.log | gotestloghelper -ci -singlepackage -hidepassingtests=false - test_download_vendor_packages_command: cd ./integration-tests && go mod download - # Other default test secrets loaded from dotenv Github Secret. - test_secrets_defaults_base64: ${{ secrets.CCIP_DEFAULT_TEST_SECRETS }} - test_secrets_override_base64: ${{ secrets[inputs.test_secrets_override_key] }} - token: ${{ secrets.GITHUB_TOKEN }} - go_mod_path: ./integration-tests/go.mod - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - triggered_by: ${{ env.TEST_TRIGGERED_BY }} - artifacts_location: ./integration-tests/load/logs/payload_ccip.json - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - cache_key_id: ccip-load-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - should_cleanup: false - - ccip-smoke-test: - name: 'CCIP Smoke - Source: ${{ matrix.lanes.name }}' - environment: integration - runs-on: ubuntu-latest - needs: [ build-chainlink ] - # if the event is a scheduled event or the test type is smoke and no previous job failed - if: ${{ (github.event_name == 'schedule' || inputs.test_type == 'smoke') && !contains(needs.*.result, 'failure') }} - permissions: - issues: read - checks: write - pull-requests: write - id-token: write - contents: read - env: - CHAINLINK_ENV_USER: ${{ github.actor }} - SLACK_API_KEY: ${{ secrets.QA_SLACK_API_KEY }} - SLACK_CHANNEL: ${{ secrets.QA_SLACK_CHANNEL }} - TEST_LOG_LEVEL: info - REF_NAME: ${{ github.head_ref || github.ref_name }} - strategy: - fail-fast: false - matrix: - lanes: - - name: 'ARBITRUM_MAINNET' - pairs: 'ARBITRUM_MAINNET,BSC_MAINNET;ARBITRUM_MAINNET,OPTIMISM_MAINNET' - enabled: true - phaseTimeout: 20m - - name: 'AVALANCHE_MAINNET' - pairs: 'AVALANCHE_MAINNET,ARBITRUM_MAINNET;AVALANCHE_MAINNET,BASE_MAINNET;AVALANCHE_MAINNET,BSC_MAINNET;AVALANCHE_MAINNET,OPTIMISM_MAINNET;AVALANCHE_MAINNET,POLYGON_MAINNET;AVALANCHE_MAINNET,WEMIX_MAINNET' - enabled: true - phaseTimeout: 20m - - name: 'BASE_MAINNET' - pairs: 'BASE_MAINNET,ARBITRUM_MAINNET;BASE_MAINNET,BSC_MAINNET;BASE_MAINNET,OPTIMISM_MAINNET;BASE_MAINNET,POLYGON_MAINNET' - enabled: true - phaseTimeout: 20m - - name: 'BLAST_MAINNET' - pairs: 'BLAST_MAINNET,ARBITRUM_MAINNET;BLAST_MAINNET,BASE_MAINNET;BLAST_MAINNET,BSC_MAINNET' - enabled: true - phaseTimeout: 20m - - name: 'BSC_MAINNET' - pairs: 'BSC_MAINNET,OPTIMISM_MAINNET;BSC_MAINNET,POLYGON_MAINNET;BSC_MAINNET,WEMIX_MAINNET' - enabled: true - phaseTimeout: 20m - - name: 'ETHEREUM_MAINNET 1' - pairs: 'ETHEREUM_MAINNET,ARBITRUM_MAINNET;ETHEREUM_MAINNET,AVALANCHE_MAINNET;ETHEREUM_MAINNET,BASE_MAINNET;ETHEREUM_MAINNET,BLAST_MAINNET;ETHEREUM_MAINNET,BSC_MAINNET;ETHEREUM_MAINNET,CELO_MAINNET;ETHEREUM_MAINNET,GNOSIS_MAINNET;ETHEREUM_MAINNET,OPTIMISM_MAINNET;ETHEREUM_MAINNET,POLYGON_MAINNET;ETHEREUM_MAINNET,WEMIX_MAINNET' - enabled: true - phaseTimeout: 40m - - name: 'ETHEREUM_MAINNET 2' - pairs: 'ETHEREUM_MAINNET,METIS_ANDROMEDA;ETHEREUM_MAINNET,ZKSYNC_MAINNET' - enabled: true - phaseTimeout: 90m - - name: 'GNOSIS_MAINNET' - pairs: 'GNOSIS_MAINNET,ARBITRUM_MAINNET;GNOSIS_MAINNET,AVALANCHE_MAINNET;GNOSIS_MAINNET,BASE_MAINNET;GNOSIS_MAINNET,BSC_MAINNET;GNOSIS_MAINNET,OPTIMISM_MAINNET;GNOSIS_MAINNET,POLYGON_MAINNET' - enabled: true - phaseTimeout: 20m - - name: 'METIS_ANDROMEDA' - pairs: 'METIS_ANDROMEDA,ARBITRUM_MAINNET' - enabled: true - phaseTimeout: 60m - - name: 'MODE_MAINNET' - pairs: 'MODE_MAINNET,OPTIMISM_MAINNET;MODE_MAINNET,ARBITRUM_MAINNET;MODE_MAINNET,BASE_MAINNET;MODE_MAINNET,BSC_MAINNET' - enabled: true - phaseTimeout: 20m - - name: 'OPTIMISM_MAINNET' - pairs: 'OPTIMISM_MAINNET,POLYGON_MAINNET;OPTIMISM_MAINNET,WEMIX_MAINNET' - enabled: true - phaseTimeout: 20m - - name: 'POLYGON_MAINNET' - pairs: 'POLYGON_MAINNET,ARBITRUM_MAINNET;POLYGON_MAINNET,WEMIX_MAINNET' - enabled: true - phaseTimeout: 20m - - name: 'WEMIX_MAINNET' - pairs: 'WEMIX_MAINNET,ARBITRUM_MAINNET;WEMIX_MAINNET,KROMA_MAINNET' - enabled: true - phaseTimeout: 20m - - name: 'ZKSYNC_MAINNET' - pairs: 'ZKSYNC_MAINNET,ARBITRUM_MAINNET' - enabled: true - phaseTimeout: 90m - steps: - - name: Collect Metrics - if: ${{ matrix.lanes.enabled == true }} - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@dea9b546553cb4ca936607c2267a09c004e4ab3f # v3.0.0 - with: - id: ccip-on-demand-live-testnet-tests-smoke-test - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: 'CCIP Smoke - Source: ${{ matrix.lanes.name }}' - continue-on-error: true - - name: Checkout the repo - if: ${{ matrix.lanes.enabled == true }} - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - ref: ${{ env.REF_NAME }} - - name: Prepare Base64 TOML override - if: ${{ matrix.lanes.enabled == true }} - id: set_override_config - shell: bash - run: | - SLACK_USER=$(jq -r '.inputs.slackMemberID' $GITHUB_EVENT_PATH) - echo ::add-mask::$SLACK_USER - echo "SLACK_USER=$SLACK_USER" >> "$GITHUB_ENV" - if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then - BASE64_CCIP_CONFIG_OVERRIDE=$(jq -r '.inputs.base64_test_input' $GITHUB_EVENT_PATH) - echo ::add-mask::$BASE64_CCIP_CONFIG_OVERRIDE - echo "base_64_override=$BASE64_CCIP_CONFIG_OVERRIDE" >> $GITHUB_OUTPUT - fi - if [[ "${{ github.event_name }}" == "schedule" ]]; then - BASE64_CCIP_CONFIG_OVERRIDE=$(base64 -w 0 -i ./integration-tests/ccip-tests/testconfig/override/mainnet.toml) - echo ::add-mask::$BASE64_CCIP_CONFIG_OVERRIDE - echo "base_64_override=$BASE64_CCIP_CONFIG_OVERRIDE" >> $GITHUB_OUTPUT - echo "SLACK_USER=${{ secrets.QA_SLACK_USER }}" >> $GITHUB_ENV - fi - - name: step summary - if: ${{ matrix.lanes.enabled == true }} - shell: bash - run: | - echo "### chainlink image used for this test run :link:" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.CHAINLINK_VERSION }}\`" >> $GITHUB_STEP_SUMMARY - - name: Prepare Base64 TOML override for CCIP secrets - if: ${{ matrix.lanes.enabled == true }} - uses: ./.github/actions/setup-create-base64-config-ccip - id: setup_create_base64_config_ccip - with: - runId: ${{ github.run_id }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - chainlinkVersion: ${{ env.CHAINLINK_VERSION }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@94cb11f4bd545607a2f221c6685052b3abee723d # v2.3.32 - if: ${{ matrix.lanes.enabled == true }} - env: - TEST_SUITE: smoke - TEST_ARGS: -test.timeout 900h - DETACH_RUNNER: false - DATABASE_URL: postgresql://postgres:node@localhost:5432/chainlink_test?sslmode=disable - RR_MEM: 8Gi - RR_CPU: 4 - TEST_TRIGGERED_BY: ccip-smoke-test-ci - BASE64_CCIP_CONFIG_OVERRIDE: ${{ steps.setup_create_base64_config_ccip.outputs.base64_config }},${{ steps.set_override_config.outputs.base_64_override }} - TEST_BASE64_CCIP_CONFIG_OVERRIDE: ${{ steps.setup_create_base64_config_ccip.outputs.base64_config }},${{ steps.set_override_config.outputs.base_64_override }} - E2E_TEST_GRAFANA_DASHBOARD_URL: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - OVERRIDE_NETWORK_PAIRS: ${{ matrix.lanes.pairs }} - OVERRIDE_PHASE_TIMEOUT: ${{ matrix.lanes.phaseTimeout }} - with: - test_command_to_run: cd ./integration-tests/ccip-tests && go test -v -timeout 3h -count=1 -p 30 -json -run ^TestSmokeCCIPForGivenNetworkPairs$ ./smoke 2>&1 | tee /tmp/gotest.log | gotestloghelper -ci -singlepackage -hidepassingtests=false - test_download_vendor_packages_command: cd ./integration-tests && go mod download - # Other default test secrets loaded from dotenv Github Secret. - test_secrets_defaults_base64: ${{ secrets.CCIP_DEFAULT_TEST_SECRETS }} - test_secrets_override_base64: ${{ secrets[inputs.test_secrets_override_key] }} - token: ${{ secrets.GITHUB_TOKEN }} - go_mod_path: ./integration-tests/go.mod - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - triggered_by: ${{ env.TEST_TRIGGERED_BY }} - artifacts_location: ./integration-tests/smoke/logs/payload_ccip.json - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - cache_key_id: ccip-smoke-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - should_cleanup: false - - # Custom reporting Jobs - start-slack-thread: - name: Start Slack Thread - if: ${{ always() && needs.ccip-smoke-test.result != 'skipped' && needs.ccip-smoke-test.result != 'cancelled' }} - environment: integration - outputs: - thread_ts: ${{ steps.slack.outputs.thread_ts }} - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - runs-on: ubuntu-latest - needs: [ ccip-smoke-test ] - steps: - - name: Debug Result - run: echo ${{ join(needs.*.result, ',') }} - - name: Main Slack Notification - uses: slackapi/slack-github-action@6c661ce58804a1a20f6dc5fbee7f0381b469e001 # v1.25.0 - id: slack - with: - channel-id: "#test-run-notifications" - payload: | - { - "attachments": [ - { - "color": "${{ contains(join(needs.*.result, ','), 'failure') && '#C62828' || '#2E7D32' }}", - "blocks": [ - { - "type": "header", - "text": { - "type": "plain_text", - "text": "CCIP Mainnet Smoke tests ${{ contains(join(needs.*.result, ','), 'failure') && ':x:' || ':white_check_mark:'}}", - "emoji": true - } - }, - { - "type": "divider" - }, - { - "type": "section", - "text": { - "type": "mrkdwn", - "text": "<${{ github.server_url }}/${{ github.repository }}/${{contains(github.ref_name, 'release') && 'releases/tag' || 'tree'}}/${{ github.ref_name }}|${{ github.ref_name }}> | <${{ github.server_url }}/${{ github.repository }}/commit/${{ github.sha }}|${{ github.sha }}> | <${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|Run>" - } - } - ] - } - ] - } - env: - SLACK_BOT_TOKEN: ${{ secrets.QA_SLACK_API_KEY }} - - post-test-results-to-slack: - name: Post Test Results - if: ${{ always() && needs.start-slack-thread.result != 'skipped' && needs.start-slack-thread.result != 'cancelled' }} - environment: integration - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - runs-on: ubuntu-latest - needs: start-slack-thread - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - ref: ${{ github.event.pull_request.head.sha || github.event.merge_group.head_sha }} - - name: Post Test Results - uses: ./.github/actions/notify-slack-jobs-result - with: - github_token: ${{ github.token }} - github_repository: ${{ github.repository }} - workflow_run_id: ${{ github.run_id }} - github_job_name_regex: ^CCIP Smoke (.*)$ - message_title: CCIP Mainnet Smoke test - slack_channel_id: "#test-run-notifications" - slack_bot_token: ${{ secrets.QA_SLACK_API_KEY }} - slack_thread_ts: ${{ needs.start-slack-thread.outputs.thread_ts }} - - # End Reporting Jobs \ No newline at end of file diff --git a/.github/workflows/ccip-load-tests.yml b/.github/workflows/ccip-load-tests.yml deleted file mode 100644 index a9b1af2f30..0000000000 --- a/.github/workflows/ccip-load-tests.yml +++ /dev/null @@ -1,310 +0,0 @@ -name: CCIP Load Test -on: - push: - branches: - - ccip-develop - tags: - - '*' - workflow_dispatch: - inputs: - base64_test_input: # base64 encoded toml for test input - description: 'Base64 encoded toml test input' - required: false - test_secrets_override_key: - description: 'Key to run tests with custom test secrets' - required: false - type: string - -# Only run 1 of this workflow at a time per PR -concurrency: - group: load-ccip-tests-chainlink-${{ github.ref }} - cancel-in-progress: true - -env: - CHAINLINK_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink - CHAINLINK_VERSION: ${{ github.sha}} - INPUT_CHAINLINK_TEST_VERSION: ${{ github.sha}} - ENV_JOB_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink-ccip-tests:${{ github.sha }} - INTERNAL_DOCKER_REPO: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com - AWS_ECR_REPO_PUBLIC_REGISTRY: public.ecr.aws - MOD_CACHE_VERSION: 1 - E2E_TEST_CHAINLINK_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink - E2E_TEST_LOKI_TENANT_ID: ${{ vars.LOKI_TENANT_ID }} - E2E_TEST_LOKI_ENDPOINT: ${{ secrets.LOKI_URL }} - E2E_TEST_LOKI_BASIC_AUTH: ${{ secrets.LOKI_BASIC_AUTH }} - E2E_TEST_GRAFANA_BASE_URL: ${{ vars.GRAFANA_URL }} - # Default private key test secret loaded from Github Secret as only security team has access to it. - # this key secrets.QA_SHARED_803C_KEY has a story behind it. To know more, see CCIP-2875 and SECHD-16575 tickets. - E2E_TEST_ETHEREUM_MAINNET_WALLET_KEY: ${{ secrets.QA_SHARED_803C_KEY }} - E2E_TEST_ARBITRUM_MAINNET_WALLET_KEY: ${{ secrets.QA_SHARED_803C_KEY }} - E2E_TEST_BASE_MAINNET_WALLET_KEY: ${{ secrets.QA_SHARED_803C_KEY }} - E2E_TEST_WEMIX_MAINNET_WALLET_KEY: ${{ secrets.QA_SHARED_803C_KEY }} - E2E_TEST_AVALANCHE_MAINNET_WALLET_KEY: ${{ secrets.QA_SHARED_803C_KEY }} - E2E_TEST_ZKSYNC_MAINNET_WALLET_KEY: ${{ secrets.QA_SHARED_803C_KEY }} - E2E_TEST_MODE_MAINNET_WALLET_KEY: ${{ secrets.QA_SHARED_803C_KEY }} - E2E_TEST_METIS_ANDROMEDA_WALLET_KEY: ${{ secrets.QA_SHARED_803C_KEY }} - E2E_TEST_OPTIMISM_MAINNET_WALLET_KEY: ${{ secrets.QA_SHARED_803C_KEY }} - E2E_TEST_KROMA_MAINNET_WALLET_KEY: ${{ secrets.QA_SHARED_803C_KEY }} - E2E_TEST_GNOSIS_MAINNET_WALLET_KEY: ${{ secrets.QA_SHARED_803C_KEY }} - E2E_TEST_POLYGON_MAINNET_WALLET_KEY: ${{ secrets.QA_SHARED_803C_KEY }} - E2E_TEST_BSC_MAINNET_WALLET_KEY: ${{ secrets.QA_SHARED_803C_KEY }} - -jobs: - build-chainlink: - environment: integration - permissions: - id-token: write - contents: read - name: Build Chainlink Image - runs-on: ubuntu20.04-16cores-64GB - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - name: Check if image exists - id: check-image - uses: smartcontractkit/chainlink-github-actions/docker/image-exists@b49a9d04744b0237908831730f8553f26d73a94b # v2.3.17 - with: - repository: chainlink - tag: ${{ env.CHAINLINK_VERSION }} - AWS_REGION: ${{ secrets.QA_AWS_REGION }} - AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - - name: Build Image - if: steps.check-image.outputs.exists == 'false' - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/build-image@b49a9d04744b0237908831730f8553f26d73a94b # v2.3.17 - env: - GH_TOKEN: ${{ github.token }} - with: - cl_repo: smartcontractkit/chainlink-ccip - cl_ref: ${{ env.CHAINLINK_VERSION }} - push_tag: ${{ env.CHAINLINK_IMAGE }}:${{ env.CHAINLINK_VERSION }} - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@dea9b546553cb4ca936607c2267a09c004e4ab3f # v3.0.0 - with: - id: ccip-load-test-build-chainlink-image - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Build Chainlink Image - continue-on-error: true - - build-test-image: - environment: integration - permissions: - id-token: write - contents: read - name: Build Test Image - runs-on: ubuntu20.04-16cores-64GB - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@dea9b546553cb4ca936607c2267a09c004e4ab3f # v3.0.0 - with: - id: ccip-load-test-build-test-image - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Build Test Image - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - name: Build Test Image - uses: ./.github/actions/build-test-image - with: - tag: ${{ env.INPUT_CHAINLINK_TEST_VERSION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ACCOUNT_NUMBER: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - - ccip-load-test: - environment: integration - needs: [ build-chainlink, build-test-image ] - if: ${{ always() && !contains(needs.*.result, 'failure') }} - permissions: - issues: read - checks: write - pull-requests: write - id-token: write - contents: read - env: - CHAINLINK_ENV_USER: ${{ github.actor }} - SLACK_API_KEY: ${{ secrets.QA_SLACK_API_KEY }} - SLACK_CHANNEL: ${{ secrets.QA_SLACK_CHANNEL }} - TEST_LOG_LEVEL: info - REF_NAME: ${{ github.head_ref || github.ref_name }} - strategy: - fail-fast: false - matrix: - type: - - name: stable-load - run: ^TestLoadCCIPStableRPS$ - os: ubuntu-latest - - name: load-with-arm-curse-uncurse - run: ^TestLoadCCIPStableRPSAfterARMCurseAndUncurse$ - config_path: ./integration-tests/ccip-tests/testconfig/tomls/load-with-arm-curse-uncurse.toml - os: ubuntu-latest - runs-on: ${{ matrix.type.os }} - name: CCIP ${{ matrix.type.name }} - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@dea9b546553cb4ca936607c2267a09c004e4ab3f # v3.0.0 - with: - id: ccip-load-test-${{ matrix.type.name }} - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: CCIP ${{ matrix.type.name }} - test-results-file: '{"testType":"go","filePath":"/tmp/gotest.log"}' - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - ref: ${{ env.REF_NAME }} - - name: Sets env vars - id: set_override_config - shell: bash - run: | - # if the matrix.type.config_path is set, use it as the override config - if [ -n "${{ matrix.type.config_path }}" ]; then - BASE64_CCIP_CONFIG_OVERRIDE=$(base64 -w 0 -i ${{ matrix.type.config_path }}) - echo ::add-mask::$BASE64_CCIP_CONFIG_OVERRIDE - echo "base_64_override=$BASE64_CCIP_CONFIG_OVERRIDE" >> $GITHUB_OUTPUT - fi - if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then - BASE64_CCIP_CONFIG_OVERRIDE=$(jq -r '.inputs.base64_test_input' $GITHUB_EVENT_PATH) - echo ::add-mask::$BASE64_CCIP_CONFIG_OVERRIDE - echo "base_64_override=$BASE64_CCIP_CONFIG_OVERRIDE" >> $GITHUB_OUTPUT - fi - - name: step summary - shell: bash - run: | - echo "### chainlink image used for this test run :link:" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.CHAINLINK_VERSION }}\`" >> $GITHUB_STEP_SUMMARY - echo "### chainlink-tests image tag for this test run :ship:" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.INPUT_CHAINLINK_TEST_VERSION }}\`" >> $GITHUB_STEP_SUMMARY - - name: Prepare Base64 TOML override for CCIP secrets - uses: ./.github/actions/setup-create-base64-config-ccip - id: setup_create_base64_config_ccip - with: - runId: ${{ github.run_id }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - chainlinkVersion: ${{ github.sha }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@94cb11f4bd545607a2f221c6685052b3abee723d # v2.3.32 - env: - TEST_SUITE: load - TEST_ARGS: -test.timeout 900h - DATABASE_URL: postgresql://postgres:node@localhost:5432/chainlink_test?sslmode=disable - RR_MEM: 8Gi - RR_CPU: 4 - TEST_TRIGGERED_BY: ccip-load-test-ci-${{ matrix.type.name }} - BASE64_CCIP_CONFIG_OVERRIDE: ${{ steps.set_override_config.outputs.base_64_override }},${{ steps.setup_create_base64_config_ccip.outputs.base64_config }} - TEST_BASE64_CCIP_CONFIG_OVERRIDE: ${{ steps.set_override_config.outputs.base_64_override }},${{ steps.setup_create_base64_config_ccip.outputs.base64_config }} - E2E_TEST_GRAFANA_DASHBOARD_URL: "/d/6vjVx-1V8/ccip-long-running-tests" - with: - test_command_to_run: cd ./integration-tests/ccip-tests && go test -v -timeout 70m -count=1 -json -run ${{ matrix.type.run }} ./load 2>&1 | tee /tmp/gotest.log | gotestloghelper -ci - test_download_vendor_packages_command: cd ./integration-tests && go mod download - # Load default test secrets - test_secrets_defaults_base64: ${{ secrets.CCIP_DEFAULT_TEST_SECRETS }} - # Override default test secrets with custom test secrets if provided - test_secrets_override_base64: ${{ secrets[inputs.test_secrets_override_key] }} - token: ${{ secrets.GITHUB_TOKEN }} - go_mod_path: ./integration-tests/go.mod - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - triggered_by: ${{ env.TEST_TRIGGERED_BY }} - publish_check_name: ${{ matrix.type.name }} - artifacts_location: ./integration-tests/load/logs/payload_ccip.json - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - cache_key_id: ccip-load-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - should_cleanup: "true" - - # Reporting Jobs - start-slack-thread: - name: Start Slack Thread - if: ${{ failure() && needs.ccip-load-test.result != 'skipped' && needs.ccip-load-test.result != 'cancelled' }} - environment: integration - outputs: - thread_ts: ${{ steps.slack.outputs.thread_ts }} - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - runs-on: ubuntu-latest - needs: [ccip-load-test] - steps: - - name: Debug Result - run: echo ${{ join(needs.*.result, ',') }} - - name: Main Slack Notification - uses: slackapi/slack-github-action@6c661ce58804a1a20f6dc5fbee7f0381b469e001 # v1.25.0 - id: slack - with: - channel-id: "#ccip-testing" - payload: | - { - "attachments": [ - { - "color": "${{ contains(join(needs.*.result, ','), 'failure') && '#C62828' || '#2E7D32' }}", - "blocks": [ - { - "type": "header", - "text": { - "type": "plain_text", - "text": "CCIP load tests results ${{ contains(join(needs.*.result, ','), 'failure') && ':x:' || ':white_check_mark:'}}", - "emoji": true - } - }, - { - "type": "divider" - }, - { - "type": "section", - "text": { - "type": "mrkdwn", - "text": "<${{ github.server_url }}/${{ github.repository }}/${{contains(github.ref_name, 'release') && 'releases/tag' || 'tree'}}/${{ github.ref_name }}|${{ github.ref_name }}> | <${{ github.server_url }}/${{ github.repository }}/commit/${{ github.sha }}|${{ github.sha }}> | <${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|Run>" - } - } - ] - } - ] - } - env: - SLACK_BOT_TOKEN: ${{ secrets.QA_SLACK_API_KEY }} - - post-test-results-to-slack: - name: Post Test Results - if: ${{ failure() && needs.start-slack-thread.result != 'skipped' && needs.start-slack-thread.result != 'cancelled' }} - environment: integration - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - runs-on: ubuntu-latest - needs: start-slack-thread - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - ref: ${{ github.event.pull_request.head.sha || github.event.merge_group.head_sha }} - - name: Post Test Results - uses: ./.github/actions/notify-slack-jobs-result - with: - github_token: ${{ github.token }} - github_repository: ${{ github.repository }} - workflow_run_id: ${{ github.run_id }} - github_job_name_regex: ^CCIP (.*)$ - message_title: CCIP Jobs - slack_channel_id: "#ccip-testing" - slack_bot_token: ${{ secrets.QA_SLACK_API_KEY }} - slack_thread_ts: ${{ needs.start-slack-thread.outputs.thread_ts }} - - # End Reporting Jobs diff --git a/.github/workflows/ccip-offchain-upgrade-tests.yml b/.github/workflows/ccip-offchain-upgrade-tests.yml deleted file mode 100644 index 3fe3c79cdd..0000000000 --- a/.github/workflows/ccip-offchain-upgrade-tests.yml +++ /dev/null @@ -1,411 +0,0 @@ -name: CCIP Offchain Upgrade Compatibility Tests -on: - merge_group: - pull_request: - push: - tags: - - "*" - workflow_dispatch: - -concurrency: - group: upgrade-tests-ccip-${{ github.ref }} - cancel-in-progress: true - -env: - # for run-test variables and environment - ECR_TAG: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink-ccip-tests:ccip-develop - ENV_JOB_IMAGE_BASE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink-ccip-tests - CHAINLINK_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink - INTERNAL_DOCKER_REPO: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com - MOD_CACHE_VERSION: 2 - AWS_ECR_REPO_PUBLIC_REGISTRY: public.ecr.aws - # Default private key test secret loaded from Github Secret as only security team has access to it. - # this key secrets.QA_SHARED_803C_KEY has a story behind it. To know more, see CCIP-2875 and SECHD-16575 tickets. - E2E_TEST_ETHEREUM_MAINNET_WALLET_KEY: ${{ secrets.QA_SHARED_803C_KEY }} - E2E_TEST_ARBITRUM_MAINNET_WALLET_KEY: ${{ secrets.QA_SHARED_803C_KEY }} - E2E_TEST_BASE_MAINNET_WALLET_KEY: ${{ secrets.QA_SHARED_803C_KEY }} - E2E_TEST_WEMIX_MAINNET_WALLET_KEY: ${{ secrets.QA_SHARED_803C_KEY }} - E2E_TEST_AVALANCHE_MAINNET_WALLET_KEY: ${{ secrets.QA_SHARED_803C_KEY }} - E2E_TEST_ZKSYNC_MAINNET_WALLET_KEY: ${{ secrets.QA_SHARED_803C_KEY }} - E2E_TEST_MODE_MAINNET_WALLET_KEY: ${{ secrets.QA_SHARED_803C_KEY }} - E2E_TEST_METIS_ANDROMEDA_WALLET_KEY: ${{ secrets.QA_SHARED_803C_KEY }} - E2E_TEST_OPTIMISM_MAINNET_WALLET_KEY: ${{ secrets.QA_SHARED_803C_KEY }} - E2E_TEST_KROMA_MAINNET_WALLET_KEY: ${{ secrets.QA_SHARED_803C_KEY }} - E2E_TEST_GNOSIS_MAINNET_WALLET_KEY: ${{ secrets.QA_SHARED_803C_KEY }} - E2E_TEST_POLYGON_MAINNET_WALLET_KEY: ${{ secrets.QA_SHARED_803C_KEY }} - E2E_TEST_BSC_MAINNET_WALLET_KEY: ${{ secrets.QA_SHARED_803C_KEY }} - -jobs: - # Build Test Dependencies - changes: - environment: integration - name: Check Paths That Require Tests To Run - runs-on: ubuntu-latest - # We don't directly merge dependabot PRs, so let's not waste the resources - if: github.actor != 'dependabot[bot]' - steps: - - name: Checkout the repo - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - repository: smartcontractkit/ccip - ref: ${{ inputs.cl_ref }} - - uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2 - id: changes - with: - filters: | - src: - - '**/*.go' - - '**/*go.sum' - - '**/*go.mod' - - '.github/workflows/integration-tests.yml' - - '**/*Dockerfile' - - 'core/**/config/**/*.toml' - - 'integration-tests/**/*.toml' - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@dea9b546553cb4ca936607c2267a09c004e4ab3f # v3.0.0 - with: - id: ccip-offchain-upgrade-compatibility-tests-changes - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Check Paths That Require Tests To Run - continue-on-error: true - outputs: - src: ${{ inputs.set_changes_output || steps.changes.outputs.src }} - - build-chainlink: - if: needs.changes.outputs.src == 'true' || github.event_name == 'workflow_dispatch' - needs: [ changes ] - environment: integration - permissions: - id-token: write - contents: read - name: Build Chainlink Image - runs-on: ubuntu-latest - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@dea9b546553cb4ca936607c2267a09c004e4ab3f # v3.0.0 - with: - id: ccip-offchain-upgrade-compatibility-tests-build-chainlink - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Build Chainlink Image - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - repository: smartcontractkit/ccip - ref: ${{ github.event.pull_request.head.sha || github.event.merge_group.head_sha }} - - name: Build Chainlink Image - uses: ./.github/actions/build-chainlink-image - with: - tag_suffix: "" - dockerfile: core/chainlink.Dockerfile - git_commit_sha: ${{ github.sha }} - AWS_REGION: ${{ secrets.QA_AWS_REGION }} - AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - - build-test-image: - if: needs.changes.outputs.src == 'true' || github.event_name == 'workflow_dispatch' - needs: [ changes ] - environment: integration - permissions: - id-token: write - contents: read - name: Build Test Image - runs-on: ubuntu-latest - steps: - - name: Collect Metrics - if: needs.changes.outputs.src == 'true' || github.event_name == 'workflow_dispatch' - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@dea9b546553cb4ca936607c2267a09c004e4ab3f # v3.0.0 - with: - id: ccip-offchain-upgrade-compatibility-tests-build-test-image - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Build Test Image with Current Sha - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - repository: smartcontractkit/ccip - ref: ${{ github.event.pull_request.head.sha || github.sha }} - - name: Build Test Image - if: needs.changes.outputs.src == 'true' || github.event_name == 'workflow_dispatch' - uses: ./.github/actions/build-test-image - with: - # we just want to build the load tests - suites: ccip-tests/load ccip-tests/smoke - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ACCOUNT_NUMBER: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - - last-release-info: - if: needs.changes.outputs.src == 'true' || github.event_name == 'workflow_dispatch' - needs: [ changes ] - environment: integration - permissions: - id-token: write - contents: read - name: Fetch Info for Last Release - runs-on: ubuntu-latest - outputs: - release_name: ${{ steps.fetch_last_release.outputs.release_name }} - release_sha: ${{ steps.fetch_last_release.outputs.sha_ref }} - release_tag: ${{ steps.fetch_last_release.outputs.release_tag_name }} - steps: - - name: Collect Metrics - if: needs.changes.outputs.src == 'true' || github.event_name == 'workflow_dispatch' - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@dea9b546553cb4ca936607c2267a09c004e4ab3f # v3.0.0 - with: - id: ccip-offchain-upgrade-compatibility-tests-last-release-info - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Build Test Image for Last Release - continue-on-error: true - - name: find last release - id: fetch_last_release - shell: bash - run: | - release_name=$(curl --header "Authorization: token ${{ secrets.GITHUB_TOKEN }}" --request GET https://api.github.com/repos/${{ github.repository }}/releases | jq -r --arg SUFFIX "release" '.[] | select(.tag_name | endswith("release")) | .tag_name' | sort -V | tail -n 1) - release_tag_name="${release_name:1}" - echo "release_tag_name=${release_tag_name}" >> $GITHUB_OUTPUT - echo "release_name=${release_name}" >> $GITHUB_OUTPUT - sha_ref=$(curl -s --header "Authorization: token ${{ secrets.GITHUB_TOKEN }}" --request GET https://api.github.com/repos/${{ github.repository }}/git/refs/tags/${release_name} | jq -r '.object.sha' | sort -V | tail -n 1) - echo "sha_ref=${sha_ref}" >> $GITHUB_OUTPUT - # End Build Test Dependencies - - # run test with previous image - run-test-with-last-release: - environment: integration - permissions: - actions: read - checks: write - pull-requests: write - id-token: write - contents: read - needs: [ changes, last-release-info, build-test-image ] - outputs: - existing_namespace: ${{ steps.fetch_namespace.outputs.existing_namespace }} - triggered_by: ${{ steps.fetch_namespace.outputs.triggered_by }} - strategy: - fail-fast: false - matrix: - product: - - name: ccip-smoke-with-last-release - os: ubuntu-latest - run: ^TestSmokeCCIPForBidirectionalLane$ - config_path: ./integration-tests/ccip-tests/testconfig/tomls/node-pre-upgrade-compatibility.toml - runs-on: ubuntu-latest - name: CCIP Deployment with ${{ needs.last-release-info.outputs.release_tag }} - ${{ matrix.product.name }} - env: - RELEASE_TAG: ${{ needs.last-release-info.outputs.release_tag }} - RELEASE_SHA: ${{ needs.last-release-info.outputs.release_sha }} - RELEASE_NAME: ${{ needs.last-release-info.outputs.release_name }} - TEST_TRIGGERED_BY: ${{ matrix.product.name }}-${{ github.run_id }} - steps: - - name: Collect Metrics - if: needs.changes.outputs.src == 'true' || github.event_name == 'workflow_dispatch' - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@dea9b546553cb4ca936607c2267a09c004e4ab3f # v3.0.0 - with: - id: ccip-offchain-upgrade-compatibility-tests-run-tests-with-last-release - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: CCIP Deployment with ${{ needs.last-release-info.outputs.release_tag }} - ${{ matrix.product.name }} - test-results-file: '{"testType":"go","filePath":"/tmp/gotest.log"}' - - name: Checkout the repo - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - repository: smartcontractkit/ccip - ref: ${{ github.event.pull_request.head.sha || github.event.merge_group.head_sha }} - - name: Set Override Config - id: set_override_config - run: | - # if the matrix.product.config_path is set, use it as the override config - if [ "${{ matrix.product.config_path }}" != "" ]; then - echo "base_64_override=$(base64 -w 0 -i ${{ matrix.product.config_path }})" >> "$GITHUB_OUTPUT" - fi - - name: Prepare Base64 TOML override for CCIP secrets - uses: ./.github/actions/setup-create-base64-config-ccip - id: setup_create_base64_config_ccip - with: - runId: ${{ github.run_id }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - chainlinkVersion: ${{ env.RELEASE_TAG }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - selectedNetworks: SIMULATED_1,SIMULATED_2 - - name: Run Tests - if: needs.changes.outputs.src == 'true' || github.event_name == 'workflow_dispatch' - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@94cb11f4bd545607a2f221c6685052b3abee723d # v2.3.32 - env: - BASE64_CCIP_CONFIG_OVERRIDE: ${{ steps.set_override_config.outputs.base_64_override }},${{ steps.setup_create_base64_config_ccip.outputs.base64_config }} - TEST_BASE64_CCIP_CONFIG_OVERRIDE: ${{ steps.set_override_config.outputs.base_64_override }},${{ steps.setup_create_base64_config_ccip.outputs.base64_config }} - ENV_JOB_IMAGE: ${{ env.ENV_JOB_IMAGE_BASE }}:${{ github.sha }} - TEST_SUITE: smoke - TEST_ARGS: -test.timeout 30m - TEST_LOG_LEVEL: info - DATABASE_URL: postgresql://postgres:node@localhost:5432/chainlink_test?sslmode=disable - RR_MEM: 8Gi - RR_CPU: 4 - E2E_TEST_CHAINLINK_IMAGE: ${{ env.AWS_ECR_REPO_PUBLIC_REGISTRY }}/w0i8p0z9/chainlink-ccip - E2E_TEST_LOKI_TENANT_ID: ${{ vars.LOKI_TENANT_ID }} - E2E_TEST_LOKI_ENDPOINT: ${{ secrets.LOKI_URL_CI }} - E2E_TEST_LOKI_BASIC_AUTH: ${{ secrets.LOKI_BASIC_AUTH }} - E2E_TEST_GRAFANA_BASE_URL: ${{ vars.GRAFANA_URL }} - E2E_TEST_GRAFANA_DASHBOARD_URL: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - with: - test_command_to_run: cd ./integration-tests/ccip-tests && go test -timeout 30m -count=1 -json -run ${{ matrix.product.run }} ./smoke 2>&1 | tee /tmp/gotest.log | gotestloghelper -ci - test_download_vendor_packages_command: cd ./integration-tests && go mod download - # Load default test secrets - test_secrets_defaults_base64: ${{ secrets.CCIP_DEFAULT_TEST_SECRETS }} - cl_repo: ${{ env.AWS_ECR_REPO_PUBLIC_REGISTRY }}/w0i8p0z9/chainlink-ccip # releases are published to public registry - cl_image_tag: ${{ env.RELEASE_TAG }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - artifacts_name: ${{ matrix.product.name }}${{ matrix.product.tag_suffix }}-test-logs - artifacts_location: | - ./integration-tests/ccip-tests/smoke/logs/* - token: ${{ secrets.GITHUB_TOKEN }} - go_mod_path: ./integration-tests/go.mod - cache_key_id: ccip-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - triggered_by: ${{ env.TEST_TRIGGERED_BY }} - should_tidy: "false" - should_cleanup: "false" - - - name: store laneconfig in artifacts - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1 - with: - name: lane-config - path: ./integration-tests/ccip-tests/smoke/tmp_*.json - if-no-files-found: error - - - name: fetch namespace - id: fetch_namespace - run: | - echo "looking for namespaces" - ITEMS=$(kubectl get ns -l=triggered-by=${{ env.TEST_TRIGGERED_BY }}-${{ github.event.pull_request.number || github.run_id }} -o jsonpath='{.items}') - COUNT=$(echo "${ITEMS}" | jq '. | length') - echo "found ${COUNT} namespaces. will set the env var with first one" - for ((i=0;i<${COUNT};i++)); do - name=$(echo "${ITEMS}" | jq -r ".[${i}].metadata.name") - echo "setting output var with namespace: ${name}" - echo "existing_namespace=${name}" >> $GITHUB_OUTPUT - break - done - echo "triggered_by=${{ env.TEST_TRIGGERED_BY }}" >> $GITHUB_OUTPUT - echo "completed env var set up" - - # run load test with current image - run-test-with-current-sha: - environment: integration - permissions: - actions: read - checks: write - pull-requests: write - id-token: write - contents: read - needs: [ build-chainlink, changes, build-test-image, run-test-with-last-release ] - strategy: - fail-fast: false - matrix: - product: - - name: ccip-load-after-upgrade - os: ubuntu-latest - run: ^TestLoadCCIPWithUpgradeNodeVersion$ - config_path: ./integration-tests/ccip-tests/testconfig/tomls/node-post-upgrade-compatibility.toml - runs-on: ubuntu-latest - name: Upgrade Nodes with Current SHA and Run ${{ matrix.product.name }} - steps: - - name: Collect Metrics - if: needs.changes.outputs.src == 'true' || github.event_name == 'workflow_dispatch' - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@dea9b546553cb4ca936607c2267a09c004e4ab3f # v3.0.0 - with: - id: ccip-offchain-upgrade-compatibility-tests-run-test-image-current-sha - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Upgrade Nodes with Current SHA and Run ${{ matrix.product.name }} - test-results-file: '{"testType":"go","filePath":"/tmp/gotest.log"}' - - name: Checkout the repo - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - repository: smartcontractkit/ccip - ref: ${{ github.event.pull_request.head.sha || github.event.merge_group.head_sha }} - - name: Download LaneConfig From Last Release - uses: actions/download-artifact@c850b930e6ba138125429b7e5c93fc707a7f8427 # v4.1.4 - with: - name: lane-config - path: ./integration-tests/ccip-tests/load/lane-config - - - name: Set Override Config - id: set_override_config - run: | - # if the matrix.product.config_path is set, use it as the override config - if [ "${{ matrix.product.config_path }}" != "" ]; then - echo "base_64_override=$(base64 -w 0 -i ${{ matrix.product.config_path }})" >> "$GITHUB_OUTPUT" - fi - - name: Prepare Base64 TOML override for CCIP secrets - uses: ./.github/actions/setup-create-base64-config-ccip - id: setup_create_base64_config_ccip - with: - runId: ${{ github.run_id }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - chainlinkVersion: ${{ github.sha }} - upgradeVersion: ${{ github.sha }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - existingNamespace: ${{ needs.run-test-with-last-release.outputs.existing_namespace }} - - name: Run Tests - if: needs.changes.outputs.src == 'true' || github.event_name == 'workflow_dispatch' - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@94cb11f4bd545607a2f221c6685052b3abee723d # v2.3.32 - env: - BASE64_CCIP_CONFIG_OVERRIDE: ${{ steps.set_override_config.outputs.base_64_override }},${{ steps.setup_create_base64_config_ccip.outputs.base64_config }} - TEST_BASE64_CCIP_CONFIG_OVERRIDE: ${{ steps.set_override_config.outputs.base_64_override }},${{ steps.setup_create_base64_config_ccip.outputs.base64_config }} - ENV_JOB_IMAGE: ${{ env.ENV_JOB_IMAGE_BASE }}:${{ github.sha }} - TEST_SUITE: load - TEST_ARGS: -test.timeout 1h - DATABASE_URL: postgresql://postgres:node@localhost:5432/chainlink_test?sslmode=disable - RR_MEM: 8Gi - RR_CPU: 4 - E2E_TEST_GRAFANA_DASHBOARD_URL: "/d/6vjVx-1V8/ccip-long-running-tests" - E2E_TEST_CHAINLINK_IMAGE: ${{ env.CHAINLINK_IMAGE }} - E2E_TEST_CHAINLINK_UPGRADE_IMAGE: ${{ env.CHAINLINK_IMAGE }} - E2E_TEST_LOKI_TENANT_ID: ${{ vars.LOKI_TENANT_ID }} - E2E_TEST_LOKI_ENDPOINT: ${{ secrets.LOKI_URL }} - E2E_TEST_LOKI_BASIC_AUTH: ${{ secrets.LOKI_BASIC_AUTH }} - E2E_TEST_GRAFANA_BASE_URL: ${{ vars.GRAFANA_URL }} - with: - test_command_to_run: cd ./integration-tests/ccip-tests && go test -timeout 1h -count=1 -json -run ${{ matrix.product.run }} ./load 2>&1 | tee /tmp/gotest.log | gotestloghelper -ci - test_download_vendor_packages_command: cd ./integration-tests && go mod download - # Load default test secrets - test_secrets_defaults_base64: ${{ secrets.CCIP_DEFAULT_TEST_SECRETS }} - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ github.sha }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - artifacts_name: ${{ matrix.product.name }}${{ matrix.product.tag_suffix }}-test-logs - artifacts_location: | - ./integration-tests/ccip-tests/load/tmp_*.json - ./integration-tests/ccip-tests/load/logs/* - publish_check_name: ${{ matrix.product.name }} - token: ${{ secrets.GITHUB_TOKEN }} - go_mod_path: ./integration-tests/go.mod - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - should_tidy: "false" - should_cleanup: "true" - triggered_by: ${{ needs.run-test-with-last-release.outputs.triggered_by }} diff --git a/.github/workflows/certora.yml b/.github/workflows/certora.yml new file mode 100644 index 0000000000..58fdaaf3a5 --- /dev/null +++ b/.github/workflows/certora.yml @@ -0,0 +1,43 @@ +name: certora + +on: push + +jobs: + verify: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + with: + submodules: recursive + + - name: Install python + uses: actions/setup-python@v2 + with: { python-version: 3.9 } + + - name: Install java + uses: actions/setup-java@v1 + with: { java-version: '11', java-package: jre } + + - name: Install certora cli + run: pip install certora-cli==7.17.2 + + - name: Install solc + run: | + wget https://github.com/ethereum/solidity/releases/download/v0.8.20/solc-static-linux + chmod +x solc-static-linux + sudo mv solc-static-linux /usr/local/bin/solc8.20 + + - name: Verify rule ${{ matrix.rule }} + run: | + echo "key length" ${#CERTORAKEY} + certoraRun certora/confs/${{ matrix.rule }} + env: + CERTORAKEY: ${{ secrets.CERTORAKEY }} + + strategy: + fail-fast: false + max-parallel: 16 + matrix: + rule: + - ccip.conf diff --git a/.github/workflows/chain-selectors-check.yml b/.github/workflows/chain-selectors-check.yml deleted file mode 100644 index 633388b319..0000000000 --- a/.github/workflows/chain-selectors-check.yml +++ /dev/null @@ -1,39 +0,0 @@ -name: Chain Selectors Version Check - -on: - push: - branches: - - ccip-develop - - release/* - tags: - - v* - pull_request: - branches: - - release/* - - -jobs: - verify-version: - runs-on: ubuntu-latest - steps: - - name: Checkout Repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - - name: Setup Go - uses: ./.github/actions/setup-go - with: - only-modules: true - go-version-file: "go.mod" - - - name: Get chain-selectors version - id: get-chain-selectors-version - shell: bash - env: - GH_TOKEN: ${{ github.token }} - run: | - current_chain_selector_version=$(go list -m -f '{{.Version}}' github.com/smartcontractkit/chain-selectors) - latest_chain_selector_version=$(gh release view -R smartcontractkit/chain-selectors --json tagName --jq '.tagName') - if [[ "$current_chain_selector_version" != "$latest_chain_selector_version" ]]; then - echo "::error:: Chain Selectors version mismatch. Current version: $current_chain_selector_version, Latest version: $latest_chain_selector_version" - exit 1 - fi diff --git a/.github/workflows/changeset.yml b/.github/workflows/changeset.yml deleted file mode 100644 index e5853d374c..0000000000 --- a/.github/workflows/changeset.yml +++ /dev/null @@ -1,183 +0,0 @@ -# -# This action checks PRs to see if any changeset files were added in the PR core files were changed. -# If none were, it will add a comment in the PR to run changeset command to generate a changeset file. -# -name: Changeset - -on: - -jobs: - changeset: - env: - TAGS: | - - `#added` For any new functionality added. - - `#breaking_change` For any functionality that requires manual action for the node to boot. - - `#bugfix` For bug fixes. - - `#changed` For any change to the existing functionality. - - `#db_update` For any feature that introduces updates to database schema. - - `#deprecation_notice` For any upcoming deprecation functionality. - - `#internal` For changesets that need to be excluded from the final changelog. - - `#nops` For any feature that is NOP facing and needs to be in the official Release Notes for the release. - - `#removed` For any functionality/config that is removed. - - `#updated` For any functionality that is updated. - - `#wip` For any change that is not ready yet and external communication about it should be held off till it is feature complete. - - # For security reasons, GITHUB_TOKEN is read-only on forks, so we cannot leave comments on PRs. - # This check skips the job if it is detected we are running on a fork. - if: ${{ github.event.pull_request.head.repo.full_name == 'smartcontractkit/ccip' }} - name: Changeset checker - runs-on: ubuntu-latest - steps: - - name: Checkout repository - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - - uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2 - id: files-changed - with: - token: ${{ secrets.GITHUB_TOKEN }} - predicate-quantifier: every - list-files: shell - filters: | - shared: - - common/** - - '!common/**/*_test.go' - - plugins/** - - '!plugins/**/*_test.go' - core: - - core/** - - '!core/**/*_test.go' - - '!core/**/*.md' - - '!core/**/*.json' - - '!core/chainlink.goreleaser.Dockerfile' - - '!core/chainlink.Dockerfile' - contracts: - - contracts/**/*.sol - - '!contracts/**/*.t.sol' - core-changeset: - - added: '.changeset/**' - contracts-changeset: - - added: 'contracts/.changeset/**' - - - name: Check for changeset tags for core - id: changeset-tags - if: ${{ steps.files-changed.outputs.core-changeset == 'true' }} - shell: bash - run: bash ./.github/scripts/check-changeset-tags.sh ${{ steps.files-changed.outputs.core-changeset_files }} - - - name: Setup pnpm - uses: pnpm/action-setup@a3252b78c470c02df07e9d59298aecedc3ccdd6d # v3.0.0 - if: ${{ steps.files-changed.outputs.core == 'true' || steps.files-changed.outputs.shared == 'true' }} - with: - version: ^9.0.0 - - - name: Setup node - uses: actions/setup-node@60edb5dd545a775178f52524783378180af0d1f8 # v4.0.2 - if: ${{ steps.files-changed.outputs.core == 'true' || steps.files-changed.outputs.shared == 'true' }} - with: - node-version: 20 - cache: pnpm - cache-dependency-path: ./pnpm-lock.yaml - - - name: Get next chainlink core version - id: chainlink-version - if: ${{ steps.files-changed.outputs.core == 'true' || steps.files-changed.outputs.shared == 'true' }} - run: | - pnpm install && pnpm changeset version - echo "chainlink_version=$(jq -r '.version' package.json)" >> $GITHUB_OUTPUT - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - - name: Update Jira ticket for core - id: jira - if: ${{ steps.files-changed.outputs.core == 'true' || steps.files-changed.outputs.shared == 'true' }} - shell: bash - working-directory: ./.github/scripts/jira - run: | - echo "COMMIT_MESSAGE=$(git log -1 --pretty=format:'%s')" >> $GITHUB_ENV - pnpm install && node update-jira-issue.js - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - JIRA_HOST: ${{ secrets.JIRA_HOST }} - JIRA_USERNAME: ${{ secrets.JIRA_USERNAME }} - JIRA_API_TOKEN: ${{ secrets.JIRA_API_TOKEN }} - CHAINLINK_VERSION: ${{ steps.chainlink-version.outputs.chainlink_version }} - PR_TITLE: ${{ github.event.pull_request.title }} - BRANCH_NAME: ${{ github.event.pull_request.head.ref }} - - - name: Make a comment - uses: thollander/actions-comment-pull-request@fabd468d3a1a0b97feee5f6b9e499eab0dd903f6 # v2.5.0 - if: ${{ steps.files-changed.outputs.core == 'true' || steps.files-changed.outputs.shared == 'true' }} - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - JIRA_COMMENT: ${{ steps.jira.outputs.jiraComment }} - with: - message: | - I see you updated files related to `core`. Please run `pnpm changeset` in the root directory to add a changeset as well as in the text include at least one of the following tags: - ${{ env.TAGS }} - ${{ env.JIRA_COMMENT }} - reactions: eyes - comment_tag: changeset-core - mode: ${{ steps.files-changed.outputs.core-changeset == 'false' && 'upsert' || 'delete' }} - create_if_not_exists: ${{ steps.files-changed.outputs.core-changeset == 'false' && 'true' || 'false' }} - - - name: Make a comment - uses: thollander/actions-comment-pull-request@fabd468d3a1a0b97feee5f6b9e499eab0dd903f6 # v2.5.0 - if: ${{ steps.files-changed.outputs.contracts == 'true' }} - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - message: | - I see you updated files related to `contracts`. Please run `pnpm changeset` in the `contracts` directory to add a changeset. - reactions: eyes - comment_tag: changeset-contracts - mode: ${{ steps.files-changed.outputs.contracts-changeset == 'false' && 'upsert' || 'delete' }} - create_if_not_exists: ${{ steps.files-changed.outputs.contracts-changeset == 'false' && 'true' || 'false' }} - - - name: Check for new changeset for core - if: ${{ (steps.files-changed.outputs.core == 'true' || steps.files-changed.outputs.shared == 'true') && steps.files-changed.outputs.core-changeset == 'false' }} - shell: bash - run: | - echo "Please run pnpm changeset to add a changeset for core and include in the text at least one tag." - exit 1 - - - name: Check for new changeset for contracts - if: ${{ steps.files-changed.outputs.contracts == 'true' && steps.files-changed.outputs.contracts-changeset == 'false' }} - shell: bash - run: | - echo "Please run pnpm changeset to add a changeset for contracts." - exit 1 - - - name: Make a comment - uses: thollander/actions-comment-pull-request@fabd468d3a1a0b97feee5f6b9e499eab0dd903f6 # v2.5.0 - if: ${{ steps.files-changed.outputs.core-changeset == 'true' }} - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - JIRA_COMMENT: ${{ steps.jira.outputs.jiraComment }} - with: - message: | - I see you added a changeset file but it does not contain a tag. Please edit the text include at least one of the following tags: - ${{ env.TAGS }} - ${{ env.JIRA_COMMENT }} - reactions: eyes - comment_tag: changeset-core-tags - mode: ${{ steps.changeset-tags.outputs.has_tags == 'false' && 'upsert' || 'delete' }} - create_if_not_exists: ${{ steps.changeset-tags.outputs.has_tags == 'false' && 'true' || 'false' }} - - - name: Check for new changeset tags for core - if: ${{ steps.files-changed.outputs.core-changeset == 'true' && steps.changeset-tags.outputs.has_tags == 'false' }} - shell: bash - run: | - echo "Please include at least one tag in the core changeset file" - exit 1 - - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: chainlink-changesets - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Changeset checker - continue-on-error: true diff --git a/.github/workflows/changesets-preview-pr.yml b/.github/workflows/changesets-preview-pr.yml deleted file mode 100644 index 94dc1635c4..0000000000 --- a/.github/workflows/changesets-preview-pr.yml +++ /dev/null @@ -1,67 +0,0 @@ -# -# This action creates or updates a Release Preview PR that shows which changes are going to be part of the next release. -# - -name: Release Preview - Changeset - -on: - push: - branches: - - develop - -jobs: - changesets-release-preview: - runs-on: ubuntu-latest - permissions: - contents: write - pull-requests: write - steps: - - name: Checkout repository - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - - uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2 - id: change - with: - token: ${{ secrets.GITHUB_TOKEN }} - filters: | - core-changeset: - - '.changeset/**' - - - name: Setup pnpm - uses: pnpm/action-setup@a3252b78c470c02df07e9d59298aecedc3ccdd6d # v3.0.0 - if: steps.change.outputs.core-changeset == 'true' - with: - version: ^9.0.0 - - - name: Setup node - uses: actions/setup-node@60edb5dd545a775178f52524783378180af0d1f8 # v4.0.2 - if: steps.change.outputs.core-changeset == 'true' - with: - node-version: 20 - cache: pnpm - cache-dependency-path: ./pnpm-lock.yaml - - - name: Generate new changelog - if: steps.change.outputs.core-changeset == 'true' - id: changelog - run: pnpm install && ./tools/ci/format_changelog - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - - name: Create release preview PR - if: steps.change.outputs.core-changeset == 'true' - uses: peter-evans/create-pull-request@6d6857d36972b65feb161a90e484f2984215f83e # v6.0.5 - with: - git-token: ${{ secrets.GITHUB_TOKEN }} - add-paths: | - .changeset/** - CHANGELOG.md - commit-message: "changeset: release preview" - committer: app-token-issuer-releng[bot] - branch: changesets/release-preview - title: "[DO NOT MERGE] Changeset Release Preview - v${{ steps.changelog.outputs.version }}" - body: ${{ steps.changelog.outputs.pr_body }} - draft: true - labels: | - release-preview - do-not-merge diff --git a/.github/workflows/ci-core.yml b/.github/workflows/ci-core.yml deleted file mode 100644 index ddb89e517b..0000000000 --- a/.github/workflows/ci-core.yml +++ /dev/null @@ -1,446 +0,0 @@ -name: CI Core -run-name: CI Core ${{ inputs.distinct_run_name && inputs.distinct_run_name || '' }} - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }}-${{ inputs.distinct_run_name }} - cancel-in-progress: true - -# Run on key branches to make sure integration is good, otherwise run on all PR's -on: - push: - branches: - - develop - - ccip-develop - - "release/*" - - "ccip-release/*" - merge_group: - pull_request: - schedule: - - cron: "0 0 * * *" - workflow_dispatch: - inputs: - distinct_run_name: - description: "A unique identifier for this run, used when running from other repos" - required: false - type: string - evm-ref: - description: The chainlink-evm reference to use when testing against a specific version for compatibliity - required: false - default: "" - type: string - -jobs: - filter: # No need to run core tests if there are only changes to the integration-tests - name: Detect Changes - permissions: - pull-requests: read - outputs: - changes: ${{ steps.ignore-filter.outputs.changes || steps.changes.outputs.changes }} - runs-on: ubuntu-latest - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - repository: smartcontractkit/ccip - - uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2 - id: changes - with: - filters: | - changes: - - '!integration-tests/**' - - name: Ignore Filter On Workflow Dispatch - if: ${{ github.event_name == 'workflow_dispatch' }} - id: ignore-filter - run: echo "changes=true" >> $GITHUB_OUTPUT - - golangci: - # We don't directly merge dependabot PRs, so let's not waste the resources - if: ${{ (github.event_name == 'pull_request' || github.event_name == 'schedule') && github.actor != 'dependabot[bot]' }} - name: lint - runs-on: ubuntu22.04-8cores-32GB - needs: [filter] - steps: - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - name: Golang Lint - uses: ./.github/actions/golangci-lint - if: ${{ needs.filter.outputs.changes == 'true' }} - with: - id: core - name: lint - gc-basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - gc-host: ${{ secrets.GRAFANA_INTERNAL_HOST }} - gc-org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - - name: Notify Slack - if: ${{ failure() && github.event.schedule != '' }} - uses: slackapi/slack-github-action@6c661ce58804a1a20f6dc5fbee7f0381b469e001 # v1.25.0 - env: - SLACK_BOT_TOKEN: ${{ secrets.QA_SLACK_API_KEY }} - with: - channel-id: "#team-core" - slack-message: "golangci-lint failed: \n${{ format('https://github.com/{0}/actions/runs/{1}', github.repository, github.run_id) }}" - - core: - env: - # We explicitly have this env var not be "CL_DATABASE_URL" to avoid having it be used by core related tests - # when they should not be using it, while still allowing us to DRY up the setup - DB_URL: postgresql://postgres:postgres@localhost:5432/chainlink_test?sslmode=disable - strategy: - fail-fast: false - matrix: - type: - - cmd: go_core_tests - id: core_unit - os: ubuntu22.04-32cores-128GB - printResults: true - - cmd: go_core_race_tests - id: core_race - # use 64cores for overnight runs only due to massive number of runs from PRs - os: ${{ github.event_name == 'schedule' && 'ubuntu-latest-64cores-256GB' || 'ubuntu-latest-32cores-128GB' }} - - cmd: go_core_fuzz - id: core_fuzz - os: ubuntu22.04-8cores-32GB - name: Core Tests (${{ matrix.type.cmd }}) - # We don't directly merge dependabot PRs, so let's not waste the resources - if: github.actor != 'dependabot[bot]' - needs: [filter] - runs-on: ${{ matrix.type.os }} - permissions: - id-token: write - contents: read - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - name: Setup node - if: ${{ needs.filter.outputs.changes == 'true' }} - uses: actions/setup-node@60edb5dd545a775178f52524783378180af0d1f8 # v4.0.2 - - name: Setup NodeJS - if: ${{ needs.filter.outputs.changes == 'true' }} - uses: ./.github/actions/setup-nodejs - with: - prod: "true" - - name: Setup Go - if: ${{ needs.filter.outputs.changes == 'true' }} - uses: ./.github/actions/setup-go - - name: Replace chainlink-evm deps - if: ${{ needs.filter.outputs.changes == 'true' && inputs.evm-ref != ''}} - shell: bash - run: go get github.com/smartcontractkit/chainlink-integrations/evm/relayer@${{ inputs.evm-ref }} - - name: Setup Solana - if: ${{ needs.filter.outputs.changes == 'true' }} - uses: ./.github/actions/setup-solana - - name: Setup wasmd - if: ${{ needs.filter.outputs.changes == 'true' }} - uses: ./.github/actions/setup-wasmd - - name: Setup Postgres - if: ${{ needs.filter.outputs.changes == 'true' }} - uses: ./.github/actions/setup-postgres - - name: Touching core/web/assets/index.html - if: ${{ needs.filter.outputs.changes == 'true' }} - run: mkdir -p core/web/assets && touch core/web/assets/index.html - - name: Download Go vendor packages - if: ${{ needs.filter.outputs.changes == 'true' }} - run: go mod download - - name: Build binary - if: ${{ needs.filter.outputs.changes == 'true' }} - run: go build -o chainlink.test . - - name: Setup DB - if: ${{ needs.filter.outputs.changes == 'true' }} - run: ./chainlink.test local db preparetest - env: - CL_DATABASE_URL: ${{ env.DB_URL }} - - name: Install LOOP Plugins - if: ${{ needs.filter.outputs.changes == 'true' }} - run: | - pushd $(go list -m -f "{{.Dir}}" github.com/smartcontractkit/chainlink-feeds) - go install ./cmd/chainlink-feeds - popd - pushd $(go list -m -f "{{.Dir}}" github.com/smartcontractkit/chainlink-data-streams) - go install ./mercury/cmd/chainlink-mercury - popd - pushd $(go list -m -f "{{.Dir}}" github.com/smartcontractkit/chainlink-solana) - go install ./pkg/solana/cmd/chainlink-solana - popd - pushd $(go list -m -f "{{.Dir}}" github.com/smartcontractkit/chainlink-starknet/relayer) - go install ./pkg/chainlink/cmd/chainlink-starknet - popd - - name: Increase Race Timeout - if: ${{ github.event.schedule != '' && needs.filter.outputs.changes == 'true' }} - run: | - echo "TIMEOUT=10m" >> $GITHUB_ENV - echo "COUNT=50" >> $GITHUB_ENV - - name: Install gotestloghelper - if: ${{ needs.filter.outputs.changes == 'true' }} - run: go install github.com/smartcontractkit/chainlink-testing-framework/tools/gotestloghelper@v1.1.1 - - name: Run tests - if: ${{ needs.filter.outputs.changes == 'true' }} - id: run-tests - env: - OUTPUT_FILE: ./output.txt - USE_TEE: false - CL_DATABASE_URL: ${{ env.DB_URL }} - run: ./tools/bin/${{ matrix.type.cmd }} ./... - - name: Print Filtered Test Results - if: ${{ failure() && needs.filter.outputs.changes == 'true' && steps.run-tests.conclusion == 'failure' }} - run: | - if [[ "${{ matrix.type.printResults }}" == "true" ]]; then - cat output.txt | gotestloghelper -ci - fi - - name: Print Races - id: print-races - if: ${{ failure() && matrix.type.cmd == 'go_core_race_tests' && needs.filter.outputs.changes == 'true' }} - run: | - find race.* | xargs cat > race.txt - if [[ -s race.txt ]]; then - cat race.txt - echo "post_to_slack=true" >> $GITHUB_OUTPUT - else - echo "post_to_slack=false" >> $GITHUB_OUTPUT - fi - echo "github.event_name: ${{ github.event_name }}" - echo "github.ref: ${{ github.ref }}" - - name: Print postgres logs - if: ${{ always() && needs.filter.outputs.changes == 'true' }} - run: docker compose logs postgres | tee ../../../postgres_logs.txt - working-directory: ./.github/actions/setup-postgres - - name: Store logs artifacts - if: ${{ needs.filter.outputs.changes == 'true' && always() }} - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1 - with: - name: ${{ matrix.type.cmd }}_logs - path: | - ./output.txt - ./output-short.txt - ./race.* - ./coverage.txt - ./postgres_logs.txt - - name: Notify Slack - if: ${{ failure() && steps.print-races.outputs.post_to_slack == 'true' && matrix.type.cmd == 'go_core_race_tests' && (github.event_name == 'merge_group' || github.event.branch == 'ccip-develop') && needs.filter.outputs.changes == 'true' }} - uses: slackapi/slack-github-action@6c661ce58804a1a20f6dc5fbee7f0381b469e001 # v1.25.0 - env: - SLACK_BOT_TOKEN: ${{ secrets.QA_SLACK_API_KEY }} - with: - channel-id: "#topic-data-races" - slack-message: "Race tests failed: \n${{ format('https://github.com/{0}/actions/runs/{1}', github.repository, github.run_id) }}" - - name: Collect Path Output - id: collect-path-output - env: - MATRIX_ID: ${{ matrix.type.id }} - run: | - # only push the test result file for the unit tests - if [[ "$MATRIX_ID" == "core_unit" ]]; then - resultsFile='{"testType":"go","filePath":"./output.txt"}' - echo "path_output=${resultsFile}" >> $GITHUB_OUTPUT - fi - - name: Collect Metrics - if: ${{ needs.filter.outputs.changes == 'true' && always() }} - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: ${{ matrix.type.id }} - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Core Tests (${{ matrix.type.cmd }}) - test-results-file: ${{ steps.collect-path-output.outputs.path_output }} - test-results-batch-split-size: "524288" # 512KB - continue-on-error: true - - detect-flakey-tests: - needs: [filter, core] - name: Flakey Test Detection - runs-on: ubuntu-latest - if: ${{ always() && github.actor != 'dependabot[bot]' }} - env: - CL_DATABASE_URL: postgresql://postgres:postgres@localhost:5432/chainlink_test?sslmode=disable - permissions: - id-token: write - contents: read - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - name: Setup node - if: ${{ needs.filter.outputs.changes == 'true' }} - uses: actions/setup-node@60edb5dd545a775178f52524783378180af0d1f8 # v4.0.2 - - name: Setup NodeJS - if: ${{ needs.filter.outputs.changes == 'true' }} - uses: ./.github/actions/setup-nodejs - with: - prod: "true" - - name: Setup Go - if: ${{ needs.filter.outputs.changes == 'true' }} - uses: ./.github/actions/setup-go - - name: Setup Postgres - if: ${{ needs.filter.outputs.changes == 'true' }} - uses: ./.github/actions/setup-postgres - - name: Touching core/web/assets/index.html - if: ${{ needs.filter.outputs.changes == 'true' }} - run: mkdir -p core/web/assets && touch core/web/assets/index.html - - name: Download Go vendor packages - if: ${{ needs.filter.outputs.changes == 'true' }} - run: go mod download - - name: Replace chainlink-evm deps - if: ${{ needs.filter.outputs.changes == 'true' && inputs.evm-ref != ''}} - shell: bash - run: go get github.com/smartcontractkit/chainlink-integrations/evm/relayer@${{ inputs.evm-ref }} - - name: Build binary - if: ${{ needs.filter.outputs.changes == 'true' }} - run: go build -o chainlink.test . - - name: Setup DB - if: ${{ needs.filter.outputs.changes == 'true' }} - run: ./chainlink.test local db preparetest - - name: Load test outputs - if: ${{ needs.filter.outputs.changes == 'true' }} - uses: actions/download-artifact@c850b930e6ba138125429b7e5c93fc707a7f8427 # v4.1.4 - with: - name: go_core_tests_logs - path: ./artifacts - - name: Delete go_core_tests_logs/coverage.txt - if: ${{ needs.filter.outputs.changes == 'true' }} - shell: bash - run: | - # Need to delete coverage.txt so the disk doesn't fill up - rm -f ./artifacts/go_core_tests_logs/coverage.txt - - name: Build flakey test runner - if: ${{ needs.filter.outputs.changes == 'true' }} - run: go build ./tools/flakeytests/cmd/runner - - name: Re-run tests - if: ${{ needs.filter.outputs.changes == 'true' }} - env: - GRAFANA_INTERNAL_BASIC_AUTH: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - GRAFANA_INTERNAL_HOST: ${{ secrets.GRAFANA_INTERNAL_HOST }} - GRAFANA_INTERNAL_TENANT_ID: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - GITHUB_EVENT_PATH: ${{ github.event_path }} - GITHUB_EVENT_NAME: ${{ github.event_name }} - GITHUB_REPO: ${{ github.repository }} - GITHUB_RUN_ID: ${{ github.run_id }} - run: | - ./runner \ - -grafana_auth=$GRAFANA_INTERNAL_BASIC_AUTH \ - -grafana_host=$GRAFANA_INTERNAL_HOST \ - -grafana_org_id=$GRAFANA_INTERNAL_TENANT_ID \ - -gh_sha=$GITHUB_SHA \ - -gh_event_path=$GITHUB_EVENT_PATH \ - -gh_event_name=$GITHUB_EVENT_NAME \ - -gh_run_id=$GITHUB_RUN_ID \ - -gh_repo=$GITHUB_REPO \ - -command=./tools/bin/go_core_tests \ - `ls -R ./artifacts/go_core_tests*/output.txt` - - name: Store logs artifacts - if: ${{ needs.filter.outputs.changes == 'true' && always() }} - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1 - with: - name: flakey_test_runner_logs - path: | - ./output.txt - - scan: - name: SonarQube Scan - needs: [core] - if: ${{ always() && github.actor != 'dependabot[bot]' }} - runs-on: ubuntu-latest - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - fetch-depth: 0 # fetches all history for all tags and branches to provide more metadata for sonar reports - - name: Download all workflow run artifacts - uses: actions/download-artifact@c850b930e6ba138125429b7e5c93fc707a7f8427 # v4.1.4 - - - name: Set SonarQube Report Paths - id: sonarqube_report_paths - shell: bash - run: | - echo "sonarqube_tests_report_paths=$(find go_core_tests_logs -name output.txt | paste -sd "," -)" >> $GITHUB_OUTPUT - echo "sonarqube_coverage_report_paths=$(find go_core_tests_logs -name coverage.txt | paste -sd "," -)" >> $GITHUB_OUTPUT - echo "sonarqube_lint_report_paths=$(find golangci-lint-report -name golangci-lint-report.xml | paste -sd "," -)" >> $GITHUB_OUTPUT - - - name: Check SonarQube Report Paths - id: check_sonarqube_paths - run: | - ARGS="" - - if [[ -z "${{ steps.sonarqube_report_paths.outputs.sonarqube_tests_report_paths }}" ]]; then - echo "::warning::No test report paths found, will not pass to sonarqube" - else - ARGS="$ARGS -Dsonar.go.tests.reportPaths=${{ steps.sonarqube_report_paths.outputs.sonarqube_tests_report_paths }}" - fi - - if [[ -z "${{ steps.sonarqube_report_paths.outputs.sonarqube_coverage_report_paths }}" ]]; then - echo "::warning::No coverage report paths found, will not pass to sonarqube" - else - ARGS="$ARGS -Dsonar.go.coverage.reportPaths=${{ steps.sonarqube_report_paths.outputs.sonarqube_coverage_report_paths }}" - fi - - if [[ -z "${{ steps.sonarqube_report_paths.outputs.sonarqube_lint_report_paths }}" ]]; then - echo "::warning::No lint report paths found, will not pass to sonarqube" - else - ARGS="$ARGS -Dsonar.go.golangci-lint.reportPaths=${{ steps.sonarqube_report_paths.outputs.sonarqube_lint_report_paths }}" - fi - - echo "SONARQUBE_ARGS=$ARGS" >> $GITHUB_ENV - - - name: SonarQube Scan - if: ${{ env.SONARQUBE_ARGS != '' }} - uses: sonarsource/sonarqube-scan-action@aecaf43ae57e412bd97d70ef9ce6076e672fe0a9 # v2.3.0 - with: - args: ${{ env.SONARQUBE_ARGS }} - env: - SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} - SONAR_HOST_URL: ${{ secrets.SONAR_HOST_URL }} - SONAR_SCANNER_OPTS: "-Xms6g -Xmx8g" - - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: ci-core-sonarqube - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: SonarQube Scan - continue-on-error: true - - clean: - name: Clean Go Tidy & Generate - if: ${{ !contains(join(github.event.pull_request.labels.*.name, ' '), 'skip-smoke-tests') && github.actor != 'dependabot[bot]' }} - runs-on: ubuntu22.04-8cores-32GB - defaults: - run: - shell: bash - steps: - - name: Check for Skip Tests Label - if: contains(join(github.event.pull_request.labels.*.name, ' '), 'skip-smoke-tests') - run: | - echo "## \`skip-smoke-tests\` label is active, skipping E2E smoke tests" >>$GITHUB_STEP_SUMMARY - exit 0 - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - fetch-depth: 0 - - name: Setup Go - uses: ./.github/actions/setup-go - with: - only-modules: "true" - - name: Install protoc-gen-go-wsrpc - run: curl https://github.com/smartcontractkit/wsrpc/raw/main/cmd/protoc-gen-go-wsrpc/protoc-gen-go-wsrpc --output $HOME/go/bin/protoc-gen-go-wsrpc && chmod +x $HOME/go/bin/protoc-gen-go-wsrpc - - name: Setup NodeJS - uses: ./.github/actions/setup-nodejs - - run: | - make rm-mocked - make generate - - name: Ensure clean after generate - run: git diff --stat --exit-code - - run: make gomodtidy - - name: Ensure clean after tidy - run: git diff --minimal --exit-code - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: ci-core-generate - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Clean Go Tidy & Generate - continue-on-error: true diff --git a/.github/workflows/ci-protobuf.yml b/.github/workflows/ci-protobuf.yml deleted file mode 100644 index d832939ded..0000000000 --- a/.github/workflows/ci-protobuf.yml +++ /dev/null @@ -1,36 +0,0 @@ -name: CI ProtoBuf - -on: - pull_request: - -jobs: - buf-breaking: - runs-on: ubuntu-latest - steps: - - name: Checkout repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - - name: Setup buf - uses: bufbuild/buf-setup-action@35c243d7f2a909b1d4e40399b348a7fdab27d78d # v1.34.0 - with: - github_token: ${{ secrets.GITHUB_TOKEN }} - - - name: Run buf breaking - uses: bufbuild/buf-breaking-action@c57b3d842a5c3f3b454756ef65305a50a587c5ba # v1.1.4 - env: - REPO_URL: https://github.com/${{ github.repository }} - BASE_BRANCH: ${{ github.base_ref }} - with: - against: "${REPO_URL}.git#branch=${BASE_BRANCH}" - - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: ci-protobuf - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: buf-breaking - continue-on-error: true diff --git a/.github/workflows/ci-scripts.yml b/.github/workflows/ci-scripts.yml deleted file mode 100644 index 8497262977..0000000000 --- a/.github/workflows/ci-scripts.yml +++ /dev/null @@ -1,48 +0,0 @@ -name: CI Scripts - -on: - merge_group: - pull_request: - -jobs: - lint-scripts: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - name: Golang Lint - uses: ./.github/actions/golangci-lint - with: - id: scripts - name: lint-scripts - version: v1.56 - go-directory: core/scripts/ccip - go-version-file: core/scripts/go.mod - go-module-file: core/scripts/go.sum - gc-basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - gc-host: ${{ secrets.GRAFANA_INTERNAL_HOST }} - gc-org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - - test-scripts: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - name: Setup Go - uses: ./.github/actions/setup-go - with: - go-version-file: core/scripts/go.mod - go-module-file: core/scripts/go.sum - - name: Run Tests - shell: bash - working-directory: core/scripts/ccip - run: go test ./... - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: ci-test-scripts - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: test-scripts - continue-on-error: true diff --git a/.github/workflows/client-compatibility-tests.yml b/.github/workflows/client-compatibility-tests.yml deleted file mode 100644 index 7f569168c0..0000000000 --- a/.github/workflows/client-compatibility-tests.yml +++ /dev/null @@ -1,843 +0,0 @@ -name: Client Compatibility Tests -on: - schedule: - - cron: "30 5 * * TUE,FRI" # Run every Tuesday and Friday at midnight + 30min EST - push: - tags: - - "*" - merge_group: - pull_request: - workflow_dispatch: - inputs: - chainlinkVersion: - description: commit SHA or tag of the Chainlink version to test - required: false - type: string - evmImplementations: - description: comma separated list of EVM implementations to test (ignored if base64TestList is used) - required: true - type: string - default: "geth,besu,nethermind,erigon" - latestVersionsNumber: - description: how many of latest images of EVM implementations to test with (ignored if base64TestList is used) - required: true - type: number - default: 3 - base64TestList: - description: base64 encoded list of tests to run (same as base64-ed output of testlistgenerator tool) - required: false - type: string - -env: - CHAINLINK_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink-ccip - INTERNAL_DOCKER_REPO: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com - MOD_CACHE_VERSION: 2 - -concurrency: - group: ${{ github.ref }}-${{ github.repository }}-${{ github.event_name }}--evm-compatibility-tests - cancel-in-progress: true - -jobs: - # Build Test Dependencies - - check-dependency-bump: - name: Check for go-ethereum dependency bump - if: github.event_name == 'pull_request' || github.event_name == 'merge_queue' - runs-on: ubuntu-latest - outputs: - dependency_changed: ${{ steps.changes.outputs.dependency_changed }} - steps: - - name: Checkout code - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - fetch-depth: 0 - - name: Check for go.mod changes - id: changes - run: | - if [ -z "${{ github.base_ref }}" ]; then - echo "No base branch found, this should not happen in a PR or MQ. Please reach out to the Test Tooling team." - echo "Github even that triggered the workflow: $GITHUB_EVENT_NAME" - echo "Github ref that triggered the workflow: $GITHUB_REF" - exit 1 - fi - git fetch origin ${{ github.base_ref }} - # if no match is found then grep exits with code 1, but if there is a match it exits with code 0 - # this will return a match if there are any changes on that corresponding line, for example if spacing was changed - DEPENDENCY_CHANGED=$(git diff -U0 origin/${{ github.base_ref }}...HEAD -- go.mod | grep -q 'github.com/ethereum/go-ethereum'; echo $?) - PR_VERSION=$(grep 'github.com/ethereum/go-ethereum' go.mod | awk '{print $2}') - - # here 0 means a match was found, 1 means no match was found - if [ "$DEPENDENCY_CHANGED" -eq 0 ]; then - # Dependency was changed in the PR, now compare with the base branch - git fetch origin ${{ github.base_ref }} - BASE_VERSION=$(git show origin/${{ github.base_ref }}:go.mod | grep 'github.com/ethereum/go-ethereum' | awk '{print $2}') - - echo "Base branch version: $BASE_VERSION" - echo "PR branch version: $PR_VERSION" - - echo "Dependency version changed in the PR compared to the base branch." - echo "dependency_changed=true" >> $GITHUB_OUTPUT - else - echo "No changes to ethereum/go-ethereum dependency in the PR." - echo "PR branch version: $PR_VERSION" - echo "dependency_changed=false" >> $GITHUB_OUTPUT - fi - - should-run: - if: always() - name: Check if the job should run - needs: check-dependency-bump - runs-on: ubuntu-latest - outputs: - should_run: ${{ steps.should-run.outputs.should_run }} - eth_implementations: ${{ steps.should-run.outputs.eth_implementations }} - env: - GITHUB_REF_TYPE: ${{ github.ref_type }} - steps: - - name: Check if the job should run - id: should-run - run: | - if [ "${{ needs.check-dependency-bump.outputs.dependency_changed }}" == "true" ]; then - echo "Will run tests, because go-ethereum dependency was bumped" - echo "should_run=true" >> $GITHUB_OUTPUT - elif [ "$GITHUB_EVENT_NAME" = "schedule" ]; then - echo "Will run tests, because trigger event was $GITHUB_EVENT_NAME" - echo "should_run=true" >> $GITHUB_OUTPUT - elif [ "$GITHUB_EVENT_NAME" = "workflow_dispatch" ]; then - echo "Will run tests, because trigger event was $GITHUB_EVENT_NAME" - echo "should_run=true" >> $GITHUB_OUTPUT - elif [ "$GITHUB_REF_TYPE" = "tag" ]; then - echo "Will run tests, because new tag was created" - echo "should_run=true" >> $GITHUB_OUTPUT - else - echo "Will not run tests" - echo "should_run=false" >> $GITHUB_OUTPUT - fi - - select-versions: - if: always() && needs.should-run.outputs.should_run == 'true' - name: Select Versions - needs: should-run - runs-on: ubuntu-latest - env: - RELEASED_DAYS_AGO: 4 - GITHUB_REF_TYPE: ${{ github.ref_type }} - outputs: - evm_implementations: ${{ steps.select-implementations.outputs.evm_implementations }} - chainlink_version: ${{ steps.select-chainlink-version.outputs.chainlink_version }} - latest_image_count: ${{ steps.get-image-count.outputs.image_count }} - chainlink_ref_path: ${{ steps.select-chainlink-version.outputs.cl_ref_path }} - steps: - # ghlatestreleasechecker is a tool to check if new release is available for a given repo - - name: Set Up ghlatestreleasechecker - shell: bash - run: | - go install github.com/smartcontractkit/chainlink-testing-framework/tools/ghlatestreleasechecker@v1.0.0 - - name: Select EVM implementations to test - id: select-implementations - run: | - PATH=$PATH:$(go env GOPATH)/bin - export PATH - - if [ "$GITHUB_EVENT_NAME" = "schedule" ]; then - echo "Checking for new releases" - implementations_arr=() - new_geth=$(ghlatestreleasechecker "ethereum/go-ethereum" $RELEASED_DAYS_AGO) - if [ "$new_geth" != "none" ]; then - echo "New geth release found: $new_geth" - implementations_arr+=("geth") - fi - new_besu=$(ghlatestreleasechecker "hyperledger/besu" $RELEASED_DAYS_AGO) - if [ "$new_besu" != "none" ]; then - echo "New besu release found: $new_besu" - implementations_arr+=("besu") - fi - new_erigon=$(ghlatestreleasechecker "ledgerwatch/erigon" $RELEASED_DAYS_AGO) - if [ "$new_erigon" != "none" ]; then - echo "New erigon release found: $new_erigon" - implementations_arr+=("erigon") - fi - new_nethermind=$(ghlatestreleasechecker "nethermindEth/nethermind" $RELEASED_DAYS_AGO) - if [ "$new_nethermind" != "none" ]; then - echo "New nethermind release found: $new_nethermind" - implementations_arr+=("nethermind") - fi - IFS=',' - eth_implementations="${implementations_arr[*]}" - if [ -n "$eth_implementations" ]; then - echo "Found new releases for: $eth_implementations" - else - echo "No new releases found" - fi - echo "evm_implementations=$eth_implementations" >> $GITHUB_OUTPUT - elif [ "$GITHUB_EVENT_NAME" = "workflow_dispatch" ]; then - if [ -n "${{ github.event.inputs.base64TestList }}" ]; then - echo "Base64-ed Test Input provided, ignoring EVM implementations" - else - echo "Will test following EVM implementations: ${{ github.event.inputs.evmImplementations }}" - echo "evm_implementations=${{ github.event.inputs.evmImplementations }}" >> $GITHUB_OUTPUT - fi - else - echo "Will test all EVM implementations" - echo "evm_implementations=geth,besu,nethermind,erigon" >> $GITHUB_OUTPUT - fi - - name: Select Chainlink version - id: select-chainlink-version - run: | - PATH=$PATH:$(go env GOPATH)/bin - export PATH - - if [ "$GITHUB_EVENT_NAME" = "schedule" ]; then - echo "Fetching latest Chainlink stable version" - implementations_arr=() - # we use 100 days since we really want the latest one, and it's highly improbable there won't be a release in last 100 days - chainlink_version=$(ghlatestreleasechecker "smartcontractkit/chainlink" 100) - echo "chainlink_version=$chainlink_version" >> $GITHUB_OUTPUT - cl_ref_path="release" - elif [ "$GITHUB_EVENT_NAME" = "workflow_dispatch" ]; then - echo "Fetching Chainlink version from input" - if [ -n "${{ github.event.inputs.chainlinkVersion }}" ]; then - echo "Chainlink version provided in input" - chainlink_version="${{ github.event.inputs.chainlinkVersion }}" - if [[ "$chainlink_version" =~ ^[0-9a-f]{40}$ ]]; then - cl_ref_path="commit" - else - cl_ref_path="release" - fi - else - echo "Chainlink version not provided in input. Using latest commit SHA." - chainlink_version=${{ github.sha }} - cl_ref_path="commit" - fi - echo "chainlink_version=$chainlink_version" >> $GITHUB_OUTPUT - echo "cl_ref_path=$cl_ref_path" >> $GITHUB_OUTPUT - elif [ "$GITHUB_EVENT_NAME" = "pull_request" ]; then - echo "Fetching Chainlink version from PR's head commit" - chainlink_version="${{ github.event.pull_request.head.sha }}" - echo "chainlink_version=$chainlink_version" >> $GITHUB_OUTPUT - echo "cl_ref_path=commit" >> $GITHUB_OUTPUT - elif [ "$GITHUB_EVENT_NAME" = "merge_queue" ]; then - echo "Fetching Chainlink version from merge queue's head commit" - chainlink_version="${{ github.event.merge_group.head_sha }}" - echo "chainlink_version=$chainlink_version" >> $GITHUB_OUTPUT - echo "cl_ref_path=commit" >> $GITHUB_OUTPUT - elif [ "$GITHUB_REF_TYPE" = "tag" ]; then - echo "Fetching Chainlink version from tag" - chainlink_version="${{ github.ref_name }}" - echo "chainlink_version=$chainlink_version" >> $GITHUB_OUTPUT - echo "cl_ref_path=release" >> $GITHUB_OUTPUT - else - echo "Unsupported trigger event. It's probably an issue with the pipeline definition. Please reach out to the Test Tooling team." - exit 1 - fi - echo "Will use following Chainlink version: $chainlink_version" - - name: Get image count - id: get-image-count - run: | - if [ "$GITHUB_EVENT_NAME" = "workflow_dispatch" ]; then - echo "Fetching latest image count from input" - if [ -n "${{ github.event.inputs.base64TestList }}" ]; then - echo "Base64-ed Test Input provided, ignoring latest image count" - else - image_count="${{ github.event.inputs.latestVersionsNumber }}" - echo "image_count=$image_count" >> $GITHUB_OUTPUT - fi - else - echo "Fetching default latest image count" - image_count=3 - echo "image_count=$image_count" >> $GITHUB_OUTPUT - fi - echo "Will use following latest image count: $image_count" - - check-ecr-images-exist: - name: Check images used as test dependencies exist in ECR - if: always() && needs.should-run.outputs.should_run == 'true' && (needs.select-versions.outputs.evm_implementations != '' || github.event.inputs.base64TestList != '') - environment: integration - permissions: - id-token: write - contents: read - needs: [should-run] - runs-on: ubuntu-latest - strategy: - fail-fast: false - matrix: - mirror: - - name: ethereum/client-go - expression: '^(alltools-v|v)[0-9]\.[0-9]+\.[0-9]+$' - - name: hyperledger/besu - expression: '^[0-9]+\.[0-9]+(\.[0-9]+)?$' - page_size: 300 - - name: thorax/erigon - expression: '^v[0-9]+\.[0-9]+\.[0-9]+$' - - name: nethermind/nethermind - expression: '^[0-9]+\.[0-9]+\.[0-9]+$' - - name: tofelb/ethereum-genesis-generator - expression: '^[0-9]+\.[0-9]+\.[0-9]+(\-slots\-per\-epoch)?' - steps: - - name: Update internal ECR if the latest Ethereum client image does not exist - uses: smartcontractkit/chainlink-testing-framework/.github/actions/update-internal-mirrors@5eea86ee4f7742b4e944561a570a6b268e712d9e # v1.30.3 - with: - aws_region: ${{ secrets.QA_AWS_REGION }} - role_to_assume: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - aws_account_number: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - image_name: ${{matrix.mirror.name}} - expression: ${{matrix.mirror.expression}} - page_size: ${{matrix.mirror.page_size}} - - build-chainlink: - if: | - always() && - needs.should-run.outputs.should_run == 'true' && - ( - needs.select-versions.outputs.evm_implementations != '' || - github.event.inputs.base64TestList != '' - ) - environment: integration - permissions: - id-token: write - contents: read - name: Build Chainlink Image - runs-on: ubuntu-latest - needs: [should-run, select-versions] - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - ref: ${{ needs.select-versions.outputs.chainlink_version }} - - name: Build Chainlink Image - uses: ./.github/actions/build-chainlink-image - with: - tag_suffix: "" - dockerfile: core/chainlink.Dockerfile - git_commit_sha: ${{ needs.select-versions.outputs.chainlink_version }} - check_image_exists: "true" - AWS_REGION: ${{ secrets.QA_AWS_REGION }} - AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: client-compatablility-build-chainlink - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Build Chainlink Image - continue-on-error: true - - get-latest-available-images: - name: Get Latest EVM Implementation's Images - if: always() && needs.should-run.outputs.should_run == 'true' && (needs.select-versions.outputs.evm_implementations != '' || github.event.inputs.base64TestList != '') - environment: integration - runs-on: ubuntu-latest - needs: [check-ecr-images-exist, should-run, select-versions] - permissions: - id-token: write - contents: read - env: - LATEST_IMAGE_COUNT: ${{ needs.select-versions.outputs.latest_image_count }} - outputs: - geth_images: ${{ env.GETH_IMAGES }} - nethermind_images: ${{ env.NETHERMIND_IMAGES }} - besu_images: ${{ env.BESU_IMAGES }} - erigon_images: ${{ env.ERIGON_IMAGES }} - steps: - # Setup AWS creds - - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@e3dd6a429d7300a6a4c196c26e071d42e0343502 # v4.0.2 - with: - aws-region: ${{ secrets.QA_AWS_REGION }} - role-to-assume: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - role-duration-seconds: 3600 - # Login to ECR - - name: Login to Amazon ECR - id: login-ecr - uses: aws-actions/amazon-ecr-login@062b18b96a7aff071d4dc91bc00c4c1a7945b076 # v2.0.1 - with: - mask-password: "true" - env: - AWS_REGION: ${{ secrets.QA_AWS_REGION }} - # ecrimagefetcher is a tool to get latest images from ECR - - name: Set Up ecrimagefetcher - shell: bash - run: | - go install github.com/smartcontractkit/chainlink-testing-framework/tools/ecrimagefetcher@v1.0.1 - - name: Get latest docker images from ECR - if: ${{ github.event.inputs.base64TestList == '' }} - env: - AWS_REGION: ${{ secrets.QA_AWS_REGION }} - ETH_IMPLEMENTATIONS: ${{ needs.select-versions.outputs.evm_implementations }} - run: | - PATH=$PATH:$(go env GOPATH)/bin - export PATH - if [[ "$ETH_IMPLEMENTATIONS" == *"geth"* ]]; then - geth_images=$(ecrimagefetcher 'ethereum/client-go' '^v[0-9]+\.[0-9]+\.[0-9]+$' ${{ env.LATEST_IMAGE_COUNT }}) - echo "GETH_IMAGES=$geth_images" >> $GITHUB_ENV - echo "Geth latest images: $geth_images" - fi - - if [[ "$ETH_IMPLEMENTATIONS" == *"nethermind"* ]]; then - nethermind_images=$(ecrimagefetcher 'nethermind/nethermind' '^[0-9]+\.[0-9]+\.[0-9]+$' ${{ env.LATEST_IMAGE_COUNT }}) - echo "NETHERMIND_IMAGES=$nethermind_images" >> $GITHUB_ENV - echo "Nethermind latest images: $nethermind_images" - fi - - if [[ "$ETH_IMPLEMENTATIONS" == *"besu"* ]]; then - # 24.3.3 is ignored as it doesn't support data & input fields in eth_call - besu_images=$(ecrimagefetcher 'hyperledger/besu' '^[0-9]+\.[0-9]+(\.[0-9]+)?$' ${{ env.LATEST_IMAGE_COUNT }} ">=24.5.1") - echo "BESU_IMAGES=$besu_images" >> $GITHUB_ENV - echo "Besu latest images: $besu_images" - fi - - if [[ "$ETH_IMPLEMENTATIONS" == *"erigon"* ]]; then - # 2.60.0 and 2.60.1 are ignored as they stopped working with CL node - erigon_images=$(ecrimagefetcher 'thorax/erigon' '^v[0-9]+\.[0-9]+\.[0-9]+$' ${{ env.LATEST_IMAGE_COUNT }} "> $GITHUB_ENV - echo "Erigon latest images: $erigon_images" - fi - - # End Build Test Dependencies - - prepare-compatibility-matrix: - name: Prepare Compatibility Matrix - if: always() && needs.should-run.outputs.should_run == 'true' && (needs.select-versions.outputs.evm_implementations != '' || github.event.inputs.base64TestList != '') - environment: integration - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - needs: [get-latest-available-images, should-run, select-versions] - runs-on: ubuntu-latest - env: - ETH_IMPLEMENTATIONS: ${{ needs.select-versions.outputs.evm_implementations }} - BASE64_TEST_LIST: ${{ github.event.inputs.base64TestList }} - outputs: - matrix: ${{ env.JOB_MATRIX_JSON }} - steps: - - name: Decode Base64 Test List Input if Set - if: env.BASE64_TEST_LIST != '' - run: | - echo "Decoding base64 tests list from the input" - DECODED_BASE64_TEST_LIST=$(echo $BASE64_TEST_LIST | base64 -d) - echo "Decoded input:" - echo "$DECODED_BASE64_TEST_LIST" - is_valid=$(echo "$DECODED_BASE64_TEST_LIST" | jq . > /dev/null 2>&1; echo $?) - if [ "$is_valid" -ne 0 ]; then - echo "Invalid base64 input. Please provide a valid base64 encoded JSON list of tests." - echo "Here is an example of valid JSON:" - cat <> $GITHUB_ENV - # testlistgenerator is a tool that builds a matrix of tests to run - - name: Set Up testlistgenerator - if: env.BASE64_TEST_LIST == '' - shell: bash - run: | - go install github.com/smartcontractkit/chainlink-testing-framework/tools/testlistgenerator@v1.1.0 - - name: Prepare matrix input - if: env.BASE64_TEST_LIST == '' - run: | - PATH=$PATH:$(go env GOPATH)/bin - export PATH - - if [[ "$ETH_IMPLEMENTATIONS" == *"geth"* ]]; then - echo "Will test compatibility with geth" - testlistgenerator -o compatibility_test_list.json -p cron -r TestCronBasic -f './smoke/cron_test.go' -e geth -d "${{ needs.get-latest-available-images.outputs.geth_images }}" -t "evm-implementation-compatibility-test" -n "ubuntu-latest" - testlistgenerator -o compatibility_test_list.json -p flux -r TestFluxBasic -f './smoke/flux_test.go' -e geth -d "${{ needs.get-latest-available-images.outputs.geth_images }}" -t "evm-implementation-compatibility-test" -n "ubuntu-latest" - testlistgenerator -o compatibility_test_list.json -p runlog -r TestRunLogBasic -f './smoke/runlog_test.go' -e geth -d "${{ needs.get-latest-available-images.outputs.geth_images }}" -t "evm-implementation-compatibility-test" -n "ubuntu-latest" - testlistgenerator -o compatibility_test_list.json -p log_poller -r TestLogPollerFewFiltersFixedDepth -f './smoke/log_poller_test.go' -e geth -d "${{ needs.get-latest-available-images.outputs.geth_images }}" -t "evm-implementation-compatibility-test" -n "ubuntu-latest" - testlistgenerator -o compatibility_test_list.json -p ocr -r TestOCRBasic -f './smoke/ocr_test.go' -e geth -d "${{ needs.get-latest-available-images.outputs.geth_images }}" -t "evm-implementation-compatibility-test" -n "ubuntu-latest" - testlistgenerator -o compatibility_test_list.json -p ocr2 -r '^TestOCRv2Basic/plugins$' -f './smoke/ocr2_test.go' -e geth -d "${{ needs.get-latest-available-images.outputs.geth_images }}" -t "evm-implementation-compatibility-test" -n "ubuntu-latest" - testlistgenerator -o compatibility_test_list.json -p automation -r 'TestAutomationBasic/registry_2_1_logtrigger' -f './smoke/automation_test.go' -e geth -d "${{ needs.get-latest-available-images.outputs.geth_images }}" -t "evm-implementation-compatibility-test" -n "ubuntu-latest" - testlistgenerator -o compatibility_test_list.json -p keeper -r 'TestKeeperBasicSmoke/registry_1_3' -f './smoke/keeper_test.go' -e geth -d "${{ needs.get-latest-available-images.outputs.geth_images }}" -t "evm-implementation-compatibility-test" -n "ubuntu-latest" - testlistgenerator -o compatibility_test_list.json -p vrf -r '^TestVRFBasic/Request_Randomness$' -f './smoke/vrf_test.go' -e geth -d "${{ needs.get-latest-available-images.outputs.geth_images }}" -t "evm-implementation-compatibility-test" -n "ubuntu-latest" - testlistgenerator -o compatibility_test_list.json -p vrfv2 -r '^TestVRFv2Basic/Request_Randomness$' -f './smoke/vrfv2_test.go' -e geth -d "${{ needs.get-latest-available-images.outputs.geth_images }}" -t "evm-implementation-compatibility-test" -n "ubuntu-latest" - testlistgenerator -o compatibility_test_list.json -p vrfv2plus -r '^TestVRFv2Plus$/^Link_Billing$' -f './smoke/vrfv2plus_test.go' -e geth -d "${{ needs.get-latest-available-images.outputs.geth_images }}" -t "evm-implementation-compatibility-test" -n "ubuntu-latest" - else - echo "Will not test compatibility with geth" - fi - - if [[ "$ETH_IMPLEMENTATIONS" == *"besu"* ]]; then - echo "Will test compatibility with besu" - testlistgenerator -o compatibility_test_list.json -p cron -r TestCronBasic -f './smoke/cron_test.go' -e besu -d "${{ needs.get-latest-available-images.outputs.besu_images }}" -t "evm-implementation-compatibility-test" -n "ubuntu-latest" - testlistgenerator -o compatibility_test_list.json -p flux -r TestFluxBasic -f './smoke/flux_test.go' -e besu -d "${{ needs.get-latest-available-images.outputs.besu_images }}" -t "evm-implementation-compatibility-test" -n "ubuntu-latest" - testlistgenerator -o compatibility_test_list.json -p runlog -r TestRunLogBasic -f './smoke/runlog_test.go' -e besu -d "${{ needs.get-latest-available-images.outputs.besu_images }}" -t "evm-implementation-compatibility-test" -n "ubuntu-latest" - testlistgenerator -o compatibility_test_list.json -p log_poller -r TestLogPollerFewFiltersFixedDepth -f './smoke/log_poller_test.go' -e besu -d "${{ needs.get-latest-available-images.outputs.besu_images }}" -t "evm-implementation-compatibility-test" -n "ubuntu-latest" - testlistgenerator -o compatibility_test_list.json -p ocr -r TestOCRBasic -f './smoke/ocr_test.go' -e besu -d "${{ needs.get-latest-available-images.outputs.besu_images }}" -t "evm-implementation-compatibility-test" -n "ubuntu-latest" - testlistgenerator -o compatibility_test_list.json -p ocr2 -r '^TestOCRv2Basic/plugins$' -f './smoke/ocr2_test.go' -e besu -d "${{ needs.get-latest-available-images.outputs.besu_images }}" -t "evm-implementation-compatibility-test" -n "ubuntu-latest" - testlistgenerator -o compatibility_test_list.json -p automation -r 'TestAutomationBasic/registry_2_1_logtrigger' -f './smoke/automation_test.go' -e besu -d "${{ needs.get-latest-available-images.outputs.besu_images }}" -t "evm-implementation-compatibility-test" -n "ubuntu-latest" - testlistgenerator -o compatibility_test_list.json -p keeper -r 'TestKeeperBasicSmoke/registry_1_3' -f './smoke/keeper_test.go' -e besu -d "${{ needs.get-latest-available-images.outputs.besu_images }}" -t "evm-implementation-compatibility-test" -n "ubuntu-latest" - testlistgenerator -o compatibility_test_list.json -p vrf -r '^TestVRFBasic/Request_Randomness$' -f './smoke/vrf_test.go' -e besu -d "${{ needs.get-latest-available-images.outputs.besu_images }}" -t "evm-implementation-compatibility-test" -n "ubuntu-latest" - # VRFv2 and VRFV2Plus tests are disabled for besu until the functionalities they rely on are supported - # testlistgenerator -o compatibility_test_list.json -p vrfv2 -r '^TestVRFv2Basic/Request_Randomness$' -f './smoke/vrfv2_test.go' -e besu -d "${{ needs.get-latest-available-images.outputs.besu_images }}" -t "evm-implementation-compatibility-test" -n "ubuntu-latest" - # testlistgenerator -o compatibility_test_list.json -p vrfv2plus -r '^TestVRFv2Plus$/^Link_Billing$' -f './smoke/vrfv2plus_test.go' -e besu -d "${{ needs.get-latest-available-images.outputs.besu_images }}" -t "evm-implementation-compatibility-test" -n "ubuntu-latest" - else - echo "Will not test compatibility with besu" - fi - - if [[ "$ETH_IMPLEMENTATIONS" == *"erigon"* ]]; then - echo "Will test compatibility with erigon" - testlistgenerator -o compatibility_test_list.json -p cron -r TestCronBasic -f './smoke/cron_test.go' -e erigon -d "${{ needs.get-latest-available-images.outputs.erigon_images }}" -t "evm-implementation-compatibility-test" -n "ubuntu-latest" - testlistgenerator -o compatibility_test_list.json -p flux -r TestFluxBasic -f './smoke/flux_test.go' -e erigon -d "${{ needs.get-latest-available-images.outputs.erigon_images }}" -t "evm-implementation-compatibility-test" -n "ubuntu-latest" - testlistgenerator -o compatibility_test_list.json -p runlog -r TestRunLogBasic -f './smoke/runlog_test.go' -e erigon -d "${{ needs.get-latest-available-images.outputs.erigon_images }}" -t "evm-implementation-compatibility-test" -n "ubuntu-latest" - testlistgenerator -o compatibility_test_list.json -p log_poller -r TestLogPollerFewFiltersFixedDepth -f './smoke/log_poller_test.go' -e erigon -d "${{ needs.get-latest-available-images.outputs.erigon_images }}" -t "evm-implementation-compatibility-test" -n "ubuntu-latest" - testlistgenerator -o compatibility_test_list.json -p ocr -r TestOCRBasic -f './smoke/ocr_test.go' -e erigon -d "${{ needs.get-latest-available-images.outputs.erigon_images }}" -t "evm-implementation-compatibility-test" -n "ubuntu-latest" - testlistgenerator -o compatibility_test_list.json -p ocr2 -r '^TestOCRv2Basic/plugins$' -f './smoke/ocr2_test.go' -e erigon -d "${{ needs.get-latest-available-images.outputs.erigon_images }}" -t "evm-implementation-compatibility-test" -n "ubuntu-latest" - testlistgenerator -o compatibility_test_list.json -p automation -r 'TestAutomationBasic/registry_2_1_logtrigger' -f './smoke/automation_test.go' -e erigon -d "${{ needs.get-latest-available-images.outputs.erigon_images }}" -t "evm-implementation-compatibility-test" -n "ubuntu-latest" - testlistgenerator -o compatibility_test_list.json -p keeper -r 'TestKeeperBasicSmoke/registry_1_3' -f './smoke/keeper_test.go' -e erigon -d "${{ needs.get-latest-available-images.outputs.erigon_images }}" -t "evm-implementation-compatibility-test" -n "ubuntu-latest" - testlistgenerator -o compatibility_test_list.json -p vrf -r '^TestVRFBasic/Request_Randomness$' -f './smoke/vrf_test.go' -e erigon -d "${{ needs.get-latest-available-images.outputs.erigon_images }}" -t "evm-implementation-compatibility-test" -n "ubuntu-latest" - testlistgenerator -o compatibility_test_list.json -p vrfv2 -r '^TestVRFv2Basic/Request_Randomness$' -f './smoke/vrfv2_test.go' -e erigon -d "${{ needs.get-latest-available-images.outputs.erigon_images }}" -t "evm-implementation-compatibility-test" -n "ubuntu-latest" - testlistgenerator -o compatibility_test_list.json -p vrfv2plus -r '^TestVRFv2Plus$/^Link_Billing$' -f './smoke/vrfv2plus_test.go' -e erigon -d "${{ needs.get-latest-available-images.outputs.erigon_images }}" -t "evm-implementation-compatibility-test" -n "ubuntu-latest" - else - echo "Will not test compatibility with erigon" - fi - - if [[ "$ETH_IMPLEMENTATIONS" == *"nethermind"* ]]; then - echo "Will test compatibility with nethermind" - testlistgenerator -o compatibility_test_list.json -p cron -r TestCronBasic -f './smoke/cron_test.go' -e nethermind -d "${{ needs.get-latest-available-images.outputs.nethermind_images }}" -t "evm-implementation-compatibility-test" -n "ubuntu-latest" - testlistgenerator -o compatibility_test_list.json -p flux -r TestFluxBasic -f './smoke/flux_test.go' -e nethermind -d "${{ needs.get-latest-available-images.outputs.nethermind_images }}" -t "evm-implementation-compatibility-test" -n "ubuntu-latest" - testlistgenerator -o compatibility_test_list.json -p runlog -r TestRunLogBasic -f './smoke/runlog_test.go' -e nethermind -d "${{ needs.get-latest-available-images.outputs.nethermind_images }}" -t "evm-implementation-compatibility-test" -n "ubuntu-latest" - testlistgenerator -o compatibility_test_list.json -p log_poller -r TestLogPollerFewFiltersFixedDepth -f './smoke/log_poller_test.go' -e nethermind -d "${{ needs.get-latest-available-images.outputs.nethermind_images }}" -t "evm-implementation-compatibility-test" -n "ubuntu-latest" - testlistgenerator -o compatibility_test_list.json -p ocr -r TestOCRBasic -f './smoke/ocr_test.go' -e nethermind -d "${{ needs.get-latest-available-images.outputs.nethermind_images }}" -t "evm-implementation-compatibility-test" -n "ubuntu-latest" - testlistgenerator -o compatibility_test_list.json -p ocr2 -r '^TestOCRv2Basic/plugins$' -f './smoke/ocr2_test.go' -e nethermind -d "${{ needs.get-latest-available-images.outputs.nethermind_images }}" -t "evm-implementation-compatibility-test" -n "ubuntu-latest" - testlistgenerator -o compatibility_test_list.json -p automation -r 'TestAutomationBasic/registry_2_1_logtrigger' -f './smoke/automation_test.go' -e nethermind -d "${{ needs.get-latest-available-images.outputs.nethermind_images }}" -t "evm-implementation-compatibility-test" -n "ubuntu-latest" - testlistgenerator -o compatibility_test_list.json -p keeper -r 'TestKeeperBasicSmoke/registry_1_3' -f './smoke/keeper_test.go' -e nethermind -d "${{ needs.get-latest-available-images.outputs.nethermind_images }}" -t "evm-implementation-compatibility-test" -n "ubuntu-latest" - testlistgenerator -o compatibility_test_list.json -p vrf -r '^TestVRFBasic/Request_Randomness$' -f './smoke/vrf_test.go' -e nethermind -d "${{ needs.get-latest-available-images.outputs.nethermind_images }}" -t "evm-implementation-compatibility-test" -n "ubuntu-latest" - # VRFv2 and VRFV2Plus tests are disabled for nethermind until the functionalities they rely on are supported - # testlistgenerator -o compatibility_test_list.json -p vrfv2 -r '^TestVRFv2Basic/Request_Randomness$' -f './smoke/vrfv2_test.go' -e nethermind -d "${{ needs.get-latest-available-images.outputs.nethermind_images }}" -t "evm-implementation-compatibility-test" -n "ubuntu-latest" - # testlistgenerator -o compatibility_test_list.json -p vrfv2plus -r '^TestVRFv2Plus$/^Link_Billing$' -f './smoke/vrfv2plus_test.go' -e nethermind -d "${{ needs.get-latest-available-images.outputs.nethermind_images }}" -t "evm-implementation-compatibility-test" -n "ubuntu-latest" - else - echo "Will not test compatibility with nethermind" - fi - - jq . compatibility_test_list.json - JOB_MATRIX_JSON=$(jq -c . compatibility_test_list.json) - echo "JOB_MATRIX_JSON=${JOB_MATRIX_JSON}" >> $GITHUB_ENV - - run-client-compatibility-matrix: - name: ${{ matrix.evm_node.product }} compatibility with ${{ matrix.evm_node.docker_image }} - if: always() && needs.should-run.outputs.should_run == 'true' && (needs.build-chainlink.result == 'success' || needs.build-chainlink.result == 'skipped') && needs.prepare-compatibility-matrix.outputs.matrix != '' - environment: integration - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - needs: - - build-chainlink - - prepare-compatibility-matrix - - should-run - - select-versions - env: - SELECTED_NETWORKS: SIMULATED,SIMULATED_1,SIMULATED_2 - CHAINLINK_COMMIT_SHA: ${{ needs.select-versions.outputs.chainlink_version }} - CHAINLINK_ENV_USER: ${{ github.actor }} - TEST_LOG_LEVEL: debug - strategy: - fail-fast: false - max-parallel: 10 - matrix: - evm_node: ${{fromJson(needs.prepare-compatibility-matrix.outputs.matrix)}} - runs-on: ubuntu-latest - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - repository: smartcontractkit/chainlink - ref: ${{ needs.select-versions.outputs.chainlink_version }} - - name: Setup GAP for Grafana - uses: smartcontractkit/.github/actions/setup-gap@d316f66b2990ea4daa479daa3de6fc92b00f863e # setup-gap@0.3.2 - with: - # aws inputs - aws-region: ${{ secrets.AWS_REGION }} - aws-role-arn: ${{ secrets.AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN }} - api-gateway-host: ${{ secrets.AWS_API_GW_HOST_GRAFANA }} - # other inputs - duplicate-authorization-header: "true" - - name: Prepare test log name - run: | - replace_special_chars() { - if [ -z "$1" ]; then - echo "Please provide a string as an argument." - return 1 - fi - - local input_string="$1" - - # Replace '/' with '-' - local modified_string="${input_string//\//-}" - - # Replace ':' with '-' - modified_string="${modified_string//:/-}" - - # Replace '.' with '-' - modified_string="${modified_string//./-}" - - echo "$modified_string" - } - echo "TEST_LOG_NAME=$(replace_special_chars "${{ matrix.evm_node.product }}-${{ matrix.evm_node.docker_image }}-test-logs")" >> $GITHUB_ENV - # - name: Collect Workflow Telemetry - # uses: catchpoint/workflow-telemetry-action@v2 - # with: - # comment_on_pr: false - # theme: 'dark' - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@aa8eea635029ab8d95abd3c206f56dae1e22e623 # v2.3.28 - with: - test_command_to_run: cd ./integration-tests && touch .root_dir && go test -timeout 30m -count=1 -json ${{ matrix.evm_node.run }} 2>&1 | tee /tmp/gotest.log | gotestloghelper -ci -singlepackage -hidepassingtests=false -hidepassinglogs - test_download_vendor_packages_command: cd ./integration-tests && go mod download - test_config_chainlink_version: ${{ needs.select-versions.outputs.chainlink_version }} - test_config_selected_networks: ${{ env.SELECTED_NETWORKS}} - test_config_logging_run_id: ${{ github.run_id }} - test_config_test_log_collect: ${{ vars.TEST_LOG_COLLECT }} - test_config_logstream_log_targets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - test_config_private_ethereum_network_execution_layer: ${{ matrix.evm_node.eth_implementation || 'geth' }} - test_config_private_ethereum_network_custom_docker_image: ${{ matrix.evm_node.docker_image }} - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ needs.select-versions.outputs.chainlink_version }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - artifacts_name: ${{ env.TEST_LOG_NAME }} - artifacts_location: | - ./integration-tests/smoke/logs/ - /tmp/gotest.log - publish_check_name: ${{ matrix.evm_node.product }}-${{ matrix.evm_node.eth_implementation }} - token: ${{ secrets.GITHUB_TOKEN }} - go_mod_path: ./integration-tests/go.mod - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: "" - should_tidy: "false" - go_coverage_src_dir: /var/tmp/go-coverage - go_coverage_dest_dir: ${{ github.workspace }}/.covdata - DEFAULT_CHAINLINK_IMAGE: ${{ env.CHAINLINK_IMAGE }} - DEFAULT_LOKI_TENANT_ID: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - DEFAULT_LOKI_ENDPOINT: https://${{ secrets.GRAFANA_INTERNAL_HOST }}/loki/api/v1/push - DEFAULT_LOKI_BASIC_AUTH: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - DEFAULT_GRAFANA_BASE_URL: "http://localhost:8080/primary" - DEFAULT_GRAFANA_DASHBOARD_URL: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - DEFAULT_GRAFANA_BEARER_TOKEN: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} - DEFAULT_PYROSCOPE_SERVER_URL: ${{ secrets.QA_PYROSCOPE_INSTANCE }} - DEFAULT_PYROSCOPE_KEY: ${{ secrets.QA_PYROSCOPE_KEY }} - DEFAULT_PYROSCOPE_ENVIRONMENT: ci-client-compatability-${{ matrix.eth_client }}-testnet - DEFAULT_PYROSCOPE_ENABLED: "true" - - - name: Print failed test summary - if: always() - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/show-test-summary@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - - start-slack-thread: - name: Start Slack Thread - if: always() && needs.*.result != 'skipped' && needs.*.result != 'cancelled' && needs.should-run.outputs.should_run == 'true' && (needs.select-versions.outputs.evm_implementations != '' || github.event.inputs.base64TestList != '') - environment: integration - outputs: - thread_ts: ${{ steps.slack.outputs.thread_ts }} - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - runs-on: ubuntu-latest - needs: [run-client-compatibility-matrix, should-run, select-versions] - steps: - - name: Debug Result - run: echo ${{ join(needs.*.result, ',') }} - - name: Main Slack Notification - uses: slackapi/slack-github-action@6c661ce58804a1a20f6dc5fbee7f0381b469e001 # v1.25.0 - id: slack - with: - channel-id: ${{ secrets.QA_SLACK_CHANNEL }} - payload: | - { - "attachments": [ - { - "color": "${{ contains(join(needs.*.result, ','), 'failure') && '#C62828' || '#2E7D32' }}", - "blocks": [ - { - "type": "header", - "text": { - "type": "plain_text", - "text": "EVM Implementation Compatibility Test Results ${{ contains(join(needs.*.result, ','), 'failure') && ':x:' || ':white_check_mark:'}}", - "emoji": true - } - }, - { - "type": "section", - "text": { - "type": "mrkdwn", - "text": "${{ contains(join(needs.*.result, ','), 'failure') && format('Some tests failed, notifying <{0}>', secrets.COMPAT_SLACK_NOTIFICATION_HANDLE) || 'All Good!' }}" - } - }, - { - "type": "divider" - }, - { - "type": "section", - "text": { - "type": "mrkdwn", - "text": "<${{ github.server_url }}/${{ github.repository }}/${{ needs.select-versions.outputs.chainlink_ref_path }}/${{ needs.select-versions.outputs.chainlink_version }}|${{ needs.select-versions.outputs.chainlink_version }}> | <${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|Run>" - } - } - ] - } - ] - } - env: - SLACK_BOT_TOKEN: ${{ secrets.QA_SLACK_API_KEY }} - - parse-test-results: - name: Parse Test Results - if: always() && needs.*.result != 'skipped' && needs.*.result != 'cancelled' && needs.should-run.outputs.should_run == 'true' && (needs.select-versions.outputs.evm_implementations != '' || github.event.inputs.base64TestList != '') - environment: integration - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - runs-on: ubuntu-latest - needs: [run-client-compatibility-matrix, should-run] - outputs: - base64_parsed_results: ${{ steps.get-test-results.outputs.base64_parsed_results }} - steps: - # workflowresultparser is a tool to get job results from a workflow run - - name: Set Up workflowresultparser - shell: bash - run: | - go install github.com/smartcontractkit/chainlink-testing-framework/tools/workflowresultparser@v1.0.0 - - name: Get and parse Test Results - shell: bash - id: get-test-results - run: | - PATH=$PATH:$(go env GOPATH)/bin - export PATH - - workflowresultparser -workflowRunID ${{ github.run_id }} -githubToken ${{ github.token }} -githubRepo "${{ github.repository }}" -jobNameRegex "^automation compatibility with (.*?)$" -namedKey="automation" -outputFile=output.json - workflowresultparser -workflowRunID ${{ github.run_id }} -githubToken ${{ github.token }} -githubRepo "${{ github.repository }}" -jobNameRegex "^keeper compatibility with (.*?)$" -namedKey="keeper" -outputFile=output.json - workflowresultparser -workflowRunID ${{ github.run_id }} -githubToken ${{ github.token }} -githubRepo "${{ github.repository }}" -jobNameRegex "^log_poller compatibility with (.*?)$" -namedKey="log_poller" -outputFile=output.json - workflowresultparser -workflowRunID ${{ github.run_id }} -githubToken ${{ github.token }} -githubRepo "${{ github.repository }}" -jobNameRegex "^ocr compatibility with (.*?)$" -namedKey="ocr" -outputFile=output.json - workflowresultparser -workflowRunID ${{ github.run_id }} -githubToken ${{ github.token }} -githubRepo "${{ github.repository }}" -jobNameRegex "^ocr2 compatibility with (.*?)$" -namedKey="ocr2" -outputFile=output.json - workflowresultparser -workflowRunID ${{ github.run_id }} -githubToken ${{ github.token }} -githubRepo "${{ github.repository }}" -jobNameRegex "^vrf compatibility with (.*?)$" -namedKey="vrf" -outputFile=output.json - workflowresultparser -workflowRunID ${{ github.run_id }} -githubToken ${{ github.token }} -githubRepo "${{ github.repository }}" -jobNameRegex "^vrfv2 compatibility with (.*?)$" -namedKey="vrfv2" -outputFile=output.json - workflowresultparser -workflowRunID ${{ github.run_id }} -githubToken ${{ github.token }} -githubRepo "${{ github.repository }}" -jobNameRegex "^vrfv2plus compatibility with (.*?)$" -namedKey="vrfv2plus" -outputFile=output.json - workflowresultparser -workflowRunID ${{ github.run_id }} -githubToken ${{ github.token }} -githubRepo "${{ github.repository }}" -jobNameRegex "^flux compatibility with (.*?)$" -namedKey="flux" -outputFile=output.json - workflowresultparser -workflowRunID ${{ github.run_id }} -githubToken ${{ github.token }} -githubRepo "${{ github.repository }}" -jobNameRegex "^runlog compatibility with (.*?)$" -namedKey="runlog" -outputFile=output.json - workflowresultparser -workflowRunID ${{ github.run_id }} -githubToken ${{ github.token }} -githubRepo "${{ github.repository }}" -jobNameRegex "^cron compatibility with (.*?)$" -namedKey="cron" -outputFile=output.json - - echo "base64_parsed_results=$(base64 -w 0 output.json)" >> $GITHUB_OUTPUT - - display-test-results: - name: Aggregated test results - if: always() && needs.*.result != 'skipped' && needs.*.result != 'cancelled' && needs.should-run.outputs.should_run == 'true' && needs.parse-test-results.result == 'success' - environment: integration - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - runs-on: ubuntu-latest - needs: [start-slack-thread, should-run, select-versions, parse-test-results] - steps: - # asciitable is a tool that prints results in a nice ASCII table - - name: Set Up asciitable - shell: bash - run: | - go install github.com/smartcontractkit/chainlink-testing-framework/tools/asciitable@v1.0.2 - - name: Print aggregated test results - shell: bash - run: | - PATH=$PATH:$(go env GOPATH)/bin - export PATH - - raw_results="$(echo ${{ needs.parse-test-results.outputs.base64_parsed_results }} | base64 -d)" - echo $raw_results > input.json - asciitable --firstColumn "EVM Implementation" --secondColumn Result --jsonfile input.json --outputFile output.txt --section "automation" --namedKey "automation" - asciitable --firstColumn "EVM Implementation" --secondColumn Result --jsonfile input.json --outputFile output.txt --section "keeper" --namedKey "keeper" - asciitable --firstColumn "EVM Implementation" --secondColumn Result --jsonfile input.json --outputFile output.txt --section "log_poller" --namedKey "log_poller" - asciitable --firstColumn "EVM Implementation" --secondColumn Result --jsonfile input.json --outputFile output.txt --section "ocr" --namedKey "ocr" - asciitable --firstColumn "EVM Implementation" --secondColumn Result --jsonfile input.json --outputFile output.txt --section "ocr2" --namedKey "ocr2" - asciitable --firstColumn "EVM Implementation" --secondColumn Result --jsonfile input.json --outputFile output.txt --section "vrf" --namedKey "vrf" - asciitable --firstColumn "EVM Implementation" --secondColumn Result --jsonfile input.json --outputFile output.txt --section "vrfv2" --namedKey "vrfv2" - asciitable --firstColumn "EVM Implementation" --secondColumn Result --jsonfile input.json --outputFile output.txt --section "vrfv2plus" --namedKey "vrfv2plus" - asciitable --firstColumn "EVM Implementation" --secondColumn Result --jsonfile input.json --outputFile output.txt --section "flux" --namedKey "flux" - asciitable --firstColumn "EVM Implementation" --secondColumn Result --jsonfile input.json --outputFile output.txt --section "cron" --namedKey "cron" - asciitable --firstColumn "EVM Implementation" --secondColumn Result --jsonfile input.json --outputFile output.txt --section "runlog" --namedKey "runlog" - - echo - echo "AGGREGATED RESULTS" - cat output.txt - - echo "## Aggregated EVM Implementations compatibility results summary" >> $GITHUB_STEP_SUMMARY - echo '```' >> $GITHUB_STEP_SUMMARY - cat output.txt >> $GITHUB_STEP_SUMMARY - echo '```' >> $GITHUB_STEP_SUMMARY - - post-test-results-to-slack: - name: Post Test Results for ${{matrix.product}} - if: always() && needs.*.result != 'skipped' && needs.*.result != 'cancelled' && needs.should-run.outputs.should_run == 'true' && needs.parse-test-results.result == 'success' - environment: integration - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - runs-on: ubuntu-latest - needs: [start-slack-thread, should-run, parse-test-results] - strategy: - fail-fast: false - matrix: - product: - - automation - - keeper - - log_poller - - ocr - - ocr2 - - vrf - - vrfv2 - - vrfv2plus - - cron - - flux - - runlog - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - ref: ${{ needs.select-versions.outputs.chainlink_version }} - - name: Get test results for ${{ matrix.product }} - id: get-product-results - shell: bash - run: | - raw_results="$(echo ${{ needs.parse-test-results.outputs.base64_parsed_results }} | base64 -d)" - product_result=$(echo "$raw_results" | jq -c "select(has(\"${{ matrix.product }}\")) | .${{ matrix.product }}[]") - if [ -n "$product_result" ]; then - base64_result=$(echo $product_result | base64 -w 0) - echo "base64_result=$base64_result" >> $GITHUB_OUTPUT - else - echo "No results found for ${{ matrix.product }}" - echo "base64_result=" >> $GITHUB_OUTPUT - fi - - name: Post Test Results to Slack - uses: ./.github/actions/notify-slack-jobs-result - with: - github_token: ${{ github.token }} - github_repository: ${{ github.repository }} - workflow_run_id: ${{ github.run_id }} - github_job_name_regex: ^${{ matrix.product }} compatibility with (.*?)$ - message_title: ${{ matrix.product }} - slack_channel_id: ${{ secrets.QA_SLACK_CHANNEL }} - slack_bot_token: ${{ secrets.QA_SLACK_API_KEY }} - slack_thread_ts: ${{ needs.start-slack-thread.outputs.thread_ts }} - base64_parsed_results: ${{ steps.get-product-results.outputs.base64_result }} diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml deleted file mode 100644 index 88f5de5668..0000000000 --- a/.github/workflows/codeql-analysis.yml +++ /dev/null @@ -1,55 +0,0 @@ -name: 'CodeQL' - -on: - push: - branches: - - develop - pull_request: - # The branches below must be a subset of the branches above - branches: [develop] - schedule: - - cron: '23 19 * * 4' - -jobs: - analyze: - name: Analyze ${{ matrix.language }} - runs-on: ubuntu-latest - - strategy: - fail-fast: false - matrix: - language: ['go', 'javascript'] - - steps: - - name: Checkout repository - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - - name: Set up Go - if: ${{ matrix.language == 'go' }} - uses: actions/setup-go@0c52d547c9bc32b1aa3301fd7a9cb496313a4491 # v5.0.0 - with: - go-version-file: 'go.mod' - - - name: Touching core/web/assets/index.html - if: ${{ matrix.language == 'go' }} - run: mkdir -p core/web/assets && touch core/web/assets/index.html - - - name: Initialize CodeQL - uses: github/codeql-action/init@65c74964a9ed8c44ed9f19d4bbc5757a6a8e9ab9 # codeql-bundle-v2.16.1 - with: - languages: ${{ matrix.language }} - - - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@65c74964a9ed8c44ed9f19d4bbc5757a6a8e9ab9 # codeql-bundle-v2.16.1 - - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: chainlink-codeql - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Analyze ${{ matrix.language }} - continue-on-error: true diff --git a/.github/workflows/crib-integration-test.yml b/.github/workflows/crib-integration-test.yml deleted file mode 100644 index 75b2215d2f..0000000000 --- a/.github/workflows/crib-integration-test.yml +++ /dev/null @@ -1,74 +0,0 @@ -# this is disabled because of GAP limitations, should be re-enabled when github-actions-controller will be installed - -#name: CRIB Integration Tests -#on: -# push: -# workflow_call: -#concurrency: -# group: ${{ github.workflow }}-${{ github.ref }} -# cancel-in-progress: true -#jobs: -# test: -# runs-on: ubuntu-latest -# environment: integration -# permissions: -# id-token: write -# contents: read -# actions: read -# steps: -# - name: Checkout repository -# uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 -# -# - name: Setup Nix + GATI environment -# uses: smartcontractkit/.github/actions/setup-nix-gati@514fe346780e2eddf7ea8b9f48120c2fba120d94 -# with: -# aws-role-arn: ${{ secrets.AWS_OIDC_CHAINLINK_AUTO_PR_TOKEN_ISSUER_ROLE_ARN }} -# aws-lambda-url: ${{ secrets.AWS_CORE_TOKEN_ISSUER_LAMBDA_URL }} # see https://github.com/smartcontractkit/ infra/blob/a79bcfb48315c4411023c182e98eb80ff9e9cda6/accounts/production/us-west-2/lambda/ github-app-token-issuer-production/teams/releng/config.json#L9 -# aws-region: ${{ secrets.AWS_REGION }} -# aws-role-duration-seconds: ${{ secrets.AWS_ROLE_DURATION_SECONDS }} -# enable-magic-cache: true -# -# - name: Nix Develop Action -# uses: nicknovitski/nix-develop@v1 -# with: -# arguments: "--accept-flake-config" -# - name: setup-gap -# uses: smartcontractkit/.github/actions/setup-gap@d316f66b2990ea4daa479daa3de6fc92b00f863e # setup-gap@0.3.2 -# with: -# aws-role-arn: ${{ secrets.AWS_OIDC_CRIB_ROLE_ARN_STAGE }} -# api-gateway-host: ${{ secrets.AWS_API_GW_HOST_K8S_STAGE }} -# aws-region: ${{ secrets.AWS_REGION }} -# ecr-private-registry: ${{ secrets.AWS_ACCOUNT_ID_PROD }} -# k8s-cluster-name: ${{ secrets.AWS_K8S_CLUSTER_NAME_STAGE }} -# use-private-ecr-registry: true -# use-k8s: true -# metrics-job-name: "k8s" -# gc-basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} -# gc-host: ${{ secrets.GRAFANA_INTERNAL_HOST }} -# gc-org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} -# - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 -# name: Checkout CRIB repository -# with: -# repository: 'smartcontractkit/crib' -# ref: 'main' -# - name: Generate Short UUID -# id: uuid -# run: echo "CRIB_NAMESPACE=$(uuidgen | cut -c1-5)" >> $GITHUB_ENV -# - name: Create a new CRIB environment -# run: |- -# devspace use namespace $CRIB_NAMESPACE -# devspace deploy --profile local-dev-simulated-core-ocr1 -# - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 -# - name: Setup go -# uses: actions/setup-go@0c52d547c9bc32b1aa3301fd7a9cb496313a4491 # v5.0.0 -# with: -# go-version-file: "go.mod" -# - name: Run CRIB integration test -# working-directory: integration-tests/crib -# env: -# K8S_STAGING_INGRESS_SUFFIX: ${{ secrets.K8S_STAGING_INGRESS_SUFFIX }} -# CRIB_NAMESPACE: ${{ env.CRIB_NAMESPACE }} -# CRIB_NETWORK: geth -# CRIB_NODES: 5 -# run: |- -# go test -v -run TestCRIB \ No newline at end of file diff --git a/.github/workflows/delete-deployments.yml b/.github/workflows/delete-deployments.yml deleted file mode 100644 index 1eb839e462..0000000000 --- a/.github/workflows/delete-deployments.yml +++ /dev/null @@ -1,34 +0,0 @@ -name: Cleanup integration deployments -on: - workflow_dispatch: - schedule: - # every 10 mins - - cron: "*/10 * * * *" - -jobs: - cleanup: - name: Clean up integration environment deployments - runs-on: ubuntu-latest - steps: - - name: Checkout repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - - name: Clean up integration environment - uses: ./.github/actions/delete-deployments - with: - environment: integration - # Delete 300 deployments at a time - num-of-pages: 3 - # We start with page 2 because usually the first 200 deployments are still active, so we cannot delete them - starting-page: 2 - - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: chainlink-delete-deployments - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Clean up integration environment deployments - continue-on-error: true diff --git a/.github/workflows/dependency-check.yml b/.github/workflows/dependency-check.yml deleted file mode 100644 index ede188de64..0000000000 --- a/.github/workflows/dependency-check.yml +++ /dev/null @@ -1,57 +0,0 @@ -name: Dependency Vulnerability Check - -on: - push: - -jobs: - changes: - name: Detect changes - runs-on: ubuntu-latest - outputs: - changes: ${{ steps.changes.outputs.src }} - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2 - id: changes - with: - filters: | - src: - - '**/*go.sum' - - '**/*go.mod' - - '.github/workflows/dependency-check.yml' - Go: - runs-on: ubuntu-latest - needs: [changes] - steps: - - name: Check out code - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - - name: Set up Go - if: needs.changes.outputs.src == 'true' - uses: actions/setup-go@0c52d547c9bc32b1aa3301fd7a9cb496313a4491 # v5.0.0 - with: - go-version-file: 'go.mod' - id: go - - - name: Write Go Modules list - if: needs.changes.outputs.src == 'true' - run: go list -json -m all > go.list - - - name: Check vulnerabilities - if: needs.changes.outputs.src == 'true' - uses: sonatype-nexus-community/nancy-github-action@726e338312e68ecdd4b4195765f174d3b3ce1533 # v1.0.3 - with: - nancyVersion: "v1.0.39" - - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: dependency-vulnerability-check - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Go - continue-on-error: true diff --git a/.github/workflows/gha-workflow-validation.yml b/.github/workflows/gha-workflow-validation.yml deleted file mode 100644 index a265a4eef3..0000000000 --- a/.github/workflows/gha-workflow-validation.yml +++ /dev/null @@ -1,31 +0,0 @@ -name: GHA Workflow Validation - -on: - pull_request: - -jobs: - - validate-worfklow-changes: - name: Validate Workflow Changes - permissions: - contents: read - pull-requests: write - actions: read - runs-on: ubuntu-latest - steps: - - name: GHA Workflow Validator - uses: smartcontractkit/.github/actions/gha-workflow-validator@d316f66b2990ea4daa479daa3de6fc92b00f863e # gha-workflow-validator@0.2.0 - env: - GITHUB_TOKEN: ${{ github.token }} - - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: lint-gh-workflows - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Validate Workflow Changes - continue-on-error: true diff --git a/.github/workflows/goreleaser-build-publish-develop.yml b/.github/workflows/goreleaser-build-publish-develop.yml deleted file mode 100644 index 835d650f18..0000000000 --- a/.github/workflows/goreleaser-build-publish-develop.yml +++ /dev/null @@ -1,52 +0,0 @@ -name: "Build publish Chainlink develop on private ECR" - -on: - push: - branches: - - develop - -jobs: - push-chainlink-develop-goreleaser: - runs-on: - labels: ubuntu22.04-16cores-64GB - outputs: - goreleaser-metadata: ${{ steps.build-sign-publish.outputs.goreleaser-metadata }} - goreleaser-artifacts: ${{ steps.build-sign-publish.outputs.goreleaser-artifacts }} - environment: build-develop - permissions: - id-token: write - contents: read - steps: - - name: Checkout repository - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - name: Configure aws credentials - uses: aws-actions/configure-aws-credentials@e3dd6a429d7300a6a4c196c26e071d42e0343502 # v4.0.2 - with: - role-to-assume: ${{ secrets.AWS_OIDC_IAM_ROLE_ARN }} - role-duration-seconds: ${{ secrets.AWS_ROLE_DURATION_SECONDS }} - aws-region: ${{ secrets.AWS_REGION }} - mask-aws-account-id: true - role-session-name: goreleaser-build-publish-chainlink.push-develop - - name: Build, sign, and publish image - id: build-sign-publish - uses: ./.github/actions/goreleaser-build-sign-publish - with: - enable-docker-publish: "true" - docker-registry: ${{ secrets.AWS_DEVELOP_ECR_HOSTNAME }} - enable-goreleaser-snapshot: "true" - goreleaser-exec: ./tools/bin/goreleaser_wrapper - goreleaser-config: .goreleaser.develop.yaml - goreleaser-key: ${{ secrets.GORELEASER_KEY }} - zig-version: 0.11.0 - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: goreleaser-build-publish - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: push-chainlink-develop-goreleaser - continue-on-error: true - \ No newline at end of file diff --git a/.github/workflows/integration-chaos-tests.yml b/.github/workflows/integration-chaos-tests.yml deleted file mode 100644 index 673614bf2c..0000000000 --- a/.github/workflows/integration-chaos-tests.yml +++ /dev/null @@ -1,154 +0,0 @@ -name: Integration Chaos Test -on: - schedule: - - cron: "0 0 * * *" - push: - tags: - - "*" - workflow_dispatch: - -env: - CHAINLINK_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink - ENV_JOB_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink-tests:${{ github.sha }} - TEST_SUITE: chaos - TEST_ARGS: -test.timeout 1h - CHAINLINK_COMMIT_SHA: ${{ github.sha }} - CHAINLINK_ENV_USER: ${{ github.actor }} - TEST_LOG_LEVEL: debug - -jobs: - build-chainlink: - environment: integration - permissions: - id-token: write - contents: read - name: Build Chainlink Image - runs-on: ubuntu-latest - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - name: Check if image exists - id: check-image - uses: smartcontractkit/chainlink-github-actions/docker/image-exists@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - repository: chainlink - tag: ${{ github.sha }} - AWS_REGION: ${{ secrets.QA_AWS_REGION }} - AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - - name: Build Image - if: steps.check-image.outputs.exists == 'false' - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/build-image@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - cl_repo: smartcontractkit/chainlink - cl_ref: ${{ github.sha }} - push_tag: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink:${{ github.sha }} - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - - name: Print Chainlink Image Built - id: push - run: | - echo "### chainlink node image tag used for this test run :link:" >>$GITHUB_STEP_SUMMARY - echo "\`${GITHUB_SHA}\`" >>$GITHUB_STEP_SUMMARY - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: e2e-chaos-build-chainlink - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Build Chainlink Image - continue-on-error: true - - build-test-runner: - environment: integration - permissions: - id-token: write - contents: read - name: Build Test Runner Image - runs-on: ubuntu-latest - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - name: Build Test Image - uses: ./.github/actions/build-test-image - with: - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ACCOUNT_NUMBER: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: e2e-chaos-build-test-image - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Build Test Runner Image - continue-on-error: true - - chaos-tests: - environment: integration - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - name: EVM Pods Chaos Tests - runs-on: ubuntu-latest - needs: [build-test-runner, build-chainlink] - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: e2e-chaos-tests - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: EVM Pods Chaos Tests - test-results-file: '{"testType":"go","filePath":"/tmp/gotest.log"}' - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - name: Prepare Base64 TOML config - env: - CHAINLINK_VERSION: ${{ github.sha }} - run: | - echo ::add-mask::$CHAINLINK_IMAGE - - cat << EOF > config.toml - [Network] - selected_networks=["SIMULATED"] - - [ChainlinkImage] - image="$CHAINLINK_IMAGE" - version="$CHAINLINK_VERSION" - EOF - - BASE64_CONFIG_OVERRIDE=$(cat config.toml | base64 -w 0) - echo ::add-mask::$BASE64_CONFIG_OVERRIDE - echo "BASE64_CONFIG_OVERRIDE=$BASE64_CONFIG_OVERRIDE" >> $GITHUB_ENV - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@2967f2287bd3f3ddbac7b476e9568993df01796e # v2.3.27 - with: - test_command_to_run: cd integration-tests && go test -timeout 1h -count=1 -json -test.parallel 11 ./chaos 2>&1 | tee /tmp/gotest.log | gotestloghelper -ci -singlepackage -hidepassingtests=false -hidepassinglogs - test_download_vendor_packages_command: cd ./integration-tests && go mod download - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ github.sha }} - artifacts_location: ./integration-tests/chaos/logs - publish_check_name: EVM Pods Chaos Test Results - token: ${{ secrets.GITHUB_TOKEN }} - go_mod_path: ./integration-tests/go.mod - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - - name: Upload test log - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1 - if: failure() - with: - name: Test Results Log - path: /tmp/gotest.log - retention-days: 7 diff --git a/.github/workflows/integration-staging-tests.yml b/.github/workflows/integration-staging-tests.yml deleted file mode 100644 index d092b2bca1..0000000000 --- a/.github/workflows/integration-staging-tests.yml +++ /dev/null @@ -1,132 +0,0 @@ -# NEEDS ADJUSTING TO TOML CONFIG BEFORE USING!! -name: E2E Functions staging tests - -on: -# TODO: enable when env will be stable -# schedule: -# - cron: "0 0 * * *" - workflow_dispatch: - inputs: - network: - description: Blockchain network (testnet) - type: choice - default: "MUMBAI" - options: - - "MUMBAI" - test_type: - description: Test type - type: choice - default: "mumbai_functions_soak_test_real" - options: - - "mumbai_functions_soak_test_http" - - "mumbai_functions_stress_test_http" - - "mumbai_functions_soak_test_only_secrets" - - "mumbai_functions_stress_test_only_secrets" - - "mumbai_functions_soak_test_real" - - "mumbai_functions_stress_test_real" -# TODO: disabled, need GATI access -# - "gateway_secrets_set_soak_test" -# - "gateway_secrets_list_soak_test" - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -jobs: - e2e-soak-test: - environment: sdlc - runs-on: ubuntu22.04-8cores-32GB - permissions: - contents: read - id-token: write - env: - LOKI_URL: ${{ secrets.LOKI_URL }} - LOKI_TOKEN: ${{ secrets.LOKI_TOKEN }} - SELECTED_NETWORKS: ${{ inputs.network }} - SELECTED_TEST: ${{ inputs.test_type }} - MUMBAI_URLS: ${{ secrets.FUNCTIONS_STAGING_MUMBAI_URLS }} - MUMBAI_KEYS: ${{ secrets.FUNCTIONS_STAGING_MUMBAI_KEYS }} - WASP_LOG_LEVEL: info - steps: - - name: Checkout code - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - fetch-depth: 0 - - name: Prepare Base64 TOML override - env: - PYROSCOPE_SERVER: ${{ secrets.QA_PYROSCOPE_INSTANCE }} - PYROSCOPE_ENVIRONMENT: ci-smoke-${{ matrix.product }}-sepolia - PYROSCOPE_KEY: ${{ secrets.QA_PYROSCOPE_KEY }} - run: | - convert_to_toml_array() { - local IFS=',' - local input_array=($1) - local toml_array_format="[" - - for element in "${input_array[@]}"; do - toml_array_format+="\"$element\"," - done - - toml_array_format="${toml_array_format%,}]" - echo "$toml_array_format" - } - - if [ -n "$PYROSCOPE_SERVER" ]; then - pyroscope_enabled=true - else - pyroscope_enabled=false - fi - - cat << EOF > config.toml - [Common] - chainlink_node_funding=0.5 - - [ChainlinkImage] - image="$CHAINLINK_IMAGE" - version="${{ github.sha }}" - - [Pyroscope] - enabled=$pyroscope_enabled - server_url="$PYROSCOPE_SERVER" - environment="$PYROSCOPE_ENVIRONMENT" - key_secret="$PYROSCOPE_KEY" - - [Logging] - run_id="$RUN_ID" - - [Logging.LogStream] - log_targets=$log_targets - - [Logging.Loki] - tenant_id="$LOKI_TENANT_ID" - endpoint="$LOKI_URL" - basic_auth_secret="$LOKI_BASIC_AUTH" - - [Logging.Grafana] - base_url="$GRAFANA_URL" - dashboard_url="/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - - [Network] - selected_networks=["sepolia"] - - [Network.RpcHttpUrls] - sepolia = $(convert_to_toml_array "$SEPOLIA_HTTP_URLS") - - [Network.RpcWsUrls] - sepolia = $(convert_to_toml_array "$SEPOLIA_URLS") - - [Network.WalletKeys] - sepolia = $(convert_to_toml_array "$EVM_KEYS") - EOF - - BASE64_CONFIG_OVERRIDE=$(cat config.toml | base64 -w 0) - echo ::add-mask::$BASE64_CONFIG_OVERRIDE - echo "BASE64_CONFIG_OVERRIDE=$BASE64_CONFIG_OVERRIDE" >> $GITHUB_ENV - - name: Run E2E soak tests - run: | - cd integration-tests/load/functions - if [[ $SELECTED_TEST == mumbai_functions* ]]; then - go test -v -timeout 6h -run TestFunctionsLoad/$SELECTED_TEST - elif [[ $SELECTED_TEST == gateway* ]]; then - go test -v -timeout 6h -run TestGatewayLoad/$SELECTED_TEST - fi \ No newline at end of file diff --git a/.github/workflows/integration-tests-publish.yml b/.github/workflows/integration-tests-publish.yml deleted file mode 100644 index 7a842fef6f..0000000000 --- a/.github/workflows/integration-tests-publish.yml +++ /dev/null @@ -1,98 +0,0 @@ -name: Test Image Publish -# Publish the compiled integration tests -on: - push: - tags: - - "v*" - branches: - - ccip-develop - workflow_dispatch: - -env: - CHAINLINK_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink - -jobs: - publish-integration-test-image: - environment: integration - permissions: - id-token: write - contents: read - name: Publish Integration Test Image - runs-on: ubuntu22.04-16cores-64GB - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: publish-e2e-test-image - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Publish Integration Test Image - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - ref: ${{ github.event.pull_request.head.sha || github.sha }} - - name: Setup Other Tags - id: tags - run: | - echo 'release_tag="${{ format('{0}.dkr.ecr.{1}.amazonaws.com/chainlink-ccip-tests:{2}', secrets.QA_AWS_ACCOUNT_NUMBER, secrets.QA_AWS_REGION, github.ref_name) }}"' >> $GITHUB_OUTPUT - - name: Build Image - uses: ./.github/actions/build-test-image - with: - other_tags: ${{steps.tags.outputs.release_tag}} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ACCOUNT_NUMBER: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - - name: Notify Slack - # Only run this notification for merge to develop failures - if: failure() && github.event_name != 'workflow_dispatch' - uses: slackapi/slack-github-action@6c661ce58804a1a20f6dc5fbee7f0381b469e001 # v1.25.0 - env: - SLACK_BOT_TOKEN: ${{ secrets.QA_SLACK_API_KEY }} - with: - channel-id: "#team-test-tooling-internal" - slack-message: ":x: :mild-panic-intensifies: Publish Integration Test Image failed: \n${{ format('https://github.com/{0}/actions/runs/{1}', github.repository, github.run_id) }}" - build-chainlink-image: - environment: integration - # Only run this build for workflow_dispatch - if: github.event_name == 'workflow_dispatch' - permissions: - id-token: write - contents: read - strategy: - matrix: - image: - - name: "" - dockerfile: core/chainlink.Dockerfile - tag-suffix: "" - # uncomment in the future if we end up needing to soak test the plugins image - # - name: (plugins) - # dockerfile: plugins/chainlink.Dockerfile - # tag-suffix: -plugins - name: Build Chainlink Image ${{ matrix.image.name }} - runs-on: ubuntu22.04-8cores-32GB - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: build-chainlink - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Build Chainlink Image ${{ matrix.image.name }} - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - ref: ${{ github.sha }} - - name: Build Chainlink Image - uses: ./.github/actions/build-chainlink-image - with: - tag_suffix: ${{ matrix.image.tag-suffix }} - dockerfile: ${{ matrix.image.dockerfile }} - git_commit_sha: ${{ github.sha }} - AWS_REGION: ${{ secrets.QA_AWS_REGION }} - AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml deleted file mode 100644 index c359887e0f..0000000000 --- a/.github/workflows/integration-tests.yml +++ /dev/null @@ -1,930 +0,0 @@ -name: Integration Tests -run-name: Integration Tests ${{ inputs.distinct_run_name && inputs.distinct_run_name || '' }} -on: - merge_group: - pull_request: - push: - tags: - - "*" - workflow_dispatch: - inputs: - cl_ref: - description: 'The ref to checkout, defaults to the calling branch' - required: false - type: string - evm-ref: - description: 'The sha of the chainlink-evm commit to use if wanted' - required: false - type: string - distinct_run_name: - description: 'A unique identifier for this run, only use from other repos' - required: false - type: string - -# Only run 1 of this workflow at a time per PR -concurrency: - group: ${{ github.ref }}-${{ github.repository }}-${{ github.event_name }}--e2e-tests-${{ inputs.distinct_run_name }} - cancel-in-progress: true - -env: - # for run-test variables and environment - ENV_JOB_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink-ccip-tests:${{ inputs.evm-ref || github.sha }} - CHAINLINK_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink - TEST_SUITE: smoke - TEST_ARGS: -test.timeout 12m - INTERNAL_DOCKER_REPO: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com - MOD_CACHE_VERSION: 2 - COLLECTION_ID: chainlink-e2e-tests - -jobs: - enforce-ctf-version: - name: Enforce CTF Version - runs-on: ubuntu-latest - # We don't directly merge dependabot PRs, so let's not waste the resources - if: github.actor != 'dependabot[bot]' - steps: - - run: echo "${{github.event_name}}" - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - repository: smartcontractkit/ccip - ref: ${{ inputs.cl_ref }} - - name: Check Merge Group Condition - id: condition-check - run: | - echo "Checking event condition..." - SHOULD_ENFORCE="false" - if [[ "$GITHUB_EVENT_NAME" == "merge_group" ]]; then - echo "We are in a merge_group event, now check if we are on the develop branch" - target_branch=$(cat $GITHUB_EVENT_PATH | jq -r .merge_group.base_ref) - if [[ "$target_branch" == "refs/heads/develop" ]]; then - echo "We are on the develop branch, we should enforce ctf version" - SHOULD_ENFORCE="true" - fi - fi - echo "should we enforce ctf version = $SHOULD_ENFORCE" - echo "should-enforce=$SHOULD_ENFORCE" >> $GITHUB_OUTPUT - - name: Enforce CTF Version - if: steps.condition-check.outputs.should-enforce == 'true' - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/mod-version@fc3e0df622521019f50d772726d6bf8dc919dd38 # v2.3.19 - with: - go-project-path: ./integration-tests - module-name: github.com/smartcontractkit/chainlink-testing-framework/lib - enforce-semantic-tag: "true" - - changes: - environment: integration - name: Check Paths That Require Tests To Run - runs-on: ubuntu-latest - # We don't directly merge dependabot PRs, so let's not waste the resources - if: github.actor != 'dependabot[bot]' - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - repository: smartcontractkit/ccip - ref: ${{ inputs.cl_ref }} - - uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2 - id: changes - with: - filters: | - changes: - - '**/*.go' - - '**/*go.sum' - - '**/*go.mod' - - '.github/workflows/integration-tests.yml' - - '**/*Dockerfile' - - 'core/**/migrations/*.sql' - - 'core/**/config/**/*.toml' - - 'integration-tests/**/*.toml' - - name: Ignore Filter On Workflow Dispatch - if: ${{ github.event_name == 'workflow_dispatch' }} - id: ignore-filter - run: echo "changes=true" >> $GITHUB_OUTPUT - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: ${{ env.COLLECTION_ID }}-check-paths - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Check Paths That Require Tests To Run - continue-on-error: true - outputs: - src: ${{ steps.ignore-filter.outputs.changes || steps.changes.outputs.changes }} - - build-lint-integration-tests: - name: Build and Lint ${{ matrix.project.name }} - runs-on: ubuntu22.04-8cores-32GB - # We don't directly merge dependabot PRs, so let's not waste the resources - if: github.actor != 'dependabot[bot]' - strategy: - matrix: - project: - - name: integration-tests - id: e2e - path: ./integration-tests - cache-id: e2e - - name: load - id: load - path: ./integration-tests/load - cache-id: load - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: ${{ env.COLLECTION_ID }}-build-lint-${{ matrix.project.id }} - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Build and Lint ${{ matrix.project.name }} - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - repository: smartcontractkit/ccip - ref: ${{ inputs.cl_ref }} - - name: Setup Go - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/setup-go@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_download_vendor_packages_command: cd ${{ matrix.project.path }} && go mod download - go_mod_path: ${{ matrix.project.path }}/go.mod - cache_key_id: core-${{ matrix.project.cache-id }}-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - - name: Build Go - run: | - cd ${{ matrix.project.path }} - go build ./... - go test -run=^# ./... - - name: Lint Go - uses: golangci/golangci-lint-action@3cfe3a4abbb849e10058ce4af15d205b6da42804 # v4.0.0 - with: - version: v1.59.1 - # We already cache these directories in setup-go - skip-pkg-cache: true - skip-build-cache: true - # only-new-issues is only applicable to PRs, otherwise it is always set to false - only-new-issues: false # disabled for PRs due to unreliability - args: --out-format colored-line-number,checkstyle:golangci-lint-report.xml - working-directory: ${{ matrix.project.path }} - - build-chainlink: - environment: integration - permissions: - id-token: write - contents: read - strategy: - matrix: - image: - - name: "" - dockerfile: core/chainlink.Dockerfile - tag-suffix: "" - - name: (plugins) - dockerfile: plugins/chainlink.Dockerfile - tag-suffix: -plugins - name: Build Chainlink Image ${{ matrix.image.name }} - runs-on: ubuntu22.04-8cores-32GB - needs: [changes, enforce-ctf-version] - steps: - - name: Collect Metrics - if: needs.changes.outputs.src == 'true' || github.event_name == 'workflow_dispatch' - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: ${{ env.COLLECTION_ID }}-build-chainlink - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Build Chainlink Image ${{ matrix.image.name }} - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - repository: smartcontractkit/ccip - ref: ${{ inputs.cl_ref || github.event.pull_request.head.sha || github.event.merge_group.head_sha }} - - name: Setup Github Token - if: ${{ inputs.evm-ref }} - id: get-gh-token - uses: smartcontractkit/.github/actions/setup-github-token@ef78fa97bf3c77de6563db1175422703e9e6674f # setup-github-token@0.2.1 - with: - aws-role-arn: ${{ secrets.AWS_OIDC_GLOBAL_READ_ONLY_TOKEN_ISSUER_ROLE_ARN }} - aws-lambda-url: ${{ secrets.AWS_INFRA_RELENG_TOKEN_ISSUER_LAMBDA_URL }} - aws-region: ${{ secrets.AWS_REGION }} - set-git-config: "true" - - name: Build Chainlink Image - if: needs.changes.outputs.src == 'true' || github.event_name == 'workflow_dispatch' - uses: ./.github/actions/build-chainlink-image - with: - tag_suffix: ${{ matrix.image.tag-suffix }} - dockerfile: ${{ matrix.image.dockerfile }} - git_commit_sha: ${{ inputs.evm-ref || github.sha }} - AWS_REGION: ${{ secrets.QA_AWS_REGION }} - AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - dep_evm_sha: ${{ inputs.evm-ref }} - - build-test-image: - if: startsWith(github.ref, 'refs/tags/') || github.event_name == 'schedule' || contains(join(github.event.pull_request.labels.*.name, ' '), 'build-test-image') - environment: integration - permissions: - id-token: write - contents: read - name: Build Test Image - runs-on: ubuntu22.04-16cores-64GB - needs: [changes] - steps: - - name: Collect Metrics - if: needs.changes.outputs.src == 'true' || github.event_name == 'workflow_dispatch' - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: ${{ env.COLLECTION_ID }}-build-test-image - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Build Test Image - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - repository: smartcontractkit/ccip - ref: ${{ inputs.cl_ref || github.event.pull_request.head.sha || github.event.merge_group.head_sha }} - - name: Build Go Test Command - id: build-go-test-command - run: | - # if the matrix.product.run is set, use it for a different command - if [ "${{ matrix.product.run }}" != "" ]; then - echo "run_command=${{ matrix.product.run }} ./smoke/${{ matrix.product.file }}_test.go" >> "$GITHUB_OUTPUT" - else - echo "run_command=./smoke/${{ matrix.product.name }}_test.go" >> "$GITHUB_OUTPUT" - fi - - name: Setup GAP for Grafana - uses: smartcontractkit/.github/actions/setup-gap@d316f66b2990ea4daa479daa3de6fc92b00f863e # setup-gap@0.3.2 - id: setup-gap - with: - # aws inputs - aws-region: ${{ secrets.AWS_REGION }} - aws-role-arn: ${{ secrets.AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN }} - api-gateway-host: ${{ secrets.AWS_API_GW_HOST_GRAFANA }} - # other inputs - duplicate-authorization-header: "true" - - eth-smoke-tests-matrix: - if: ${{ !contains(join(github.event.pull_request.labels.*.name, ' '), 'skip-smoke-tests') }} - environment: integration - permissions: - actions: read - checks: write - pull-requests: write - id-token: write - contents: read - needs: [build-chainlink, changes, build-lint-integration-tests] - env: - SELECTED_NETWORKS: SIMULATED - CHAINLINK_COMMIT_SHA: ${{ inputs.evm-ref || github.sha }} - CHAINLINK_ENV_USER: ${{ github.actor }} - TEST_LOG_LEVEL: debug - strategy: - fail-fast: false - matrix: - product: -# LM Smoke Test is disabled since project is paused -# - name: ccip-lm-smoke -# nodes: 1 -# os: ubuntu-latest -# file: lm -# dir: ccip-tests/smoke -# run: -run ^TestLmBasic$ - - name: ccip-smoke - nodes: 1 - os: ubuntu-latest - file: ccip - dir: ccip-tests/smoke - run: -run ^TestSmokeCCIPForBidirectionalLane$ - - name: ccip-smoke-1.4-pools - nodes: 1 - os: ubuntu-latest - file: ccip - dir: ccip-tests/smoke - run: -run ^TestSmokeCCIPForBidirectionalLane$ - config_path: ./integration-tests/ccip-tests/testconfig/tomls/contract-version1.4.toml - - name: ccip-smoke-usdc - nodes: 1 - os: ubuntu-latest - file: ccip - dir: ccip-tests/smoke - run: -run ^TestSmokeCCIPForBidirectionalLane$ - config_path: ./integration-tests/ccip-tests/testconfig/tomls/usdc_mock_deployment.toml - - name: ccip-smoke-db-compatibility - nodes: 1 - os: ubuntu-latest - file: ccip - dir: ccip-tests/smoke - run: -run ^TestSmokeCCIPForBidirectionalLane$ - config_path: ./integration-tests/ccip-tests/testconfig/tomls/db-compatibility.toml - - name: ccip-smoke-rate-limit - nodes: 1 - dir: ccip-tests/smoke - os: ubuntu-latest - file: ccip - run: -run ^TestSmokeCCIPRateLimit$ - - name: ccip-smoke-rate-limit - nodes: 1 - dir: ccip-tests/smoke - os: ubuntu-latest - file: ccip - run: -run ^TestSmokeCCIPTokenPoolRateLimits$ - - name: ccip-smoke-multicall - nodes: 1 - dir: ccip-tests/smoke - os: ubuntu-latest - file: ccip - run: -run ^TestSmokeCCIPMulticall$ - - name: ccip-smoke-manual-exec - nodes: 1 - dir: ccip-tests/smoke - os: ubuntu-latest - file: ccip - run: -run ^TestSmokeCCIPManuallyExecuteAfterExecutionFailingDueToInsufficientGas$ - - name: ccip-smoke-on-ramp-limits - nodes: 1 - dir: ccip-tests/smoke - os: ubuntu-latest - file: ccip - run: -run ^TestSmokeCCIPOnRampLimits$ - - name: ccip-smoke-off-ramp-capacity - nodes: 1 - dir: ccip-tests/smoke - os: ubuntu-latest - file: ccip - run: -run ^TestSmokeCCIPOffRampCapacityLimit$ - - name: ccip-smoke-off-ramp-agg-rate-limit - nodes: 1 - dir: ccip-tests/smoke - os: ubuntu-latest - file: ccip - run: -run ^TestSmokeCCIPOffRampAggRateLimit$ - - name: ccip-smoke-leader-lane - nodes: 15 - dir: ccip-tests/smoke - os: ubuntu-latest - file: ccip - run: -run ^TestSmokeCCIPForBidirectionalLane$ - config_path: ./integration-tests/ccip-tests/testconfig/tomls/leader-lane.toml - - name: ccip-smoke-reorg-bidirectional - nodes: 1 - dir: ccip-tests/smoke - os: ubuntu-latest - file: ccip - run: -run ^TestSmokeCCIPReorgBelowFinality$ -v - config_path: ./integration-tests/ccip-tests/testconfig/tomls/ccip-reorg.toml - - name: ccip-smoke-reorg-below-finality - nodes: 1 - dir: ccip-tests/smoke - os: ubuntu-latest - file: ccip - run: -run ^TestSmokeCCIPReorgAboveFinalityAtDestination$ -v - config_path: ./integration-tests/ccip-tests/testconfig/tomls/ccip-reorg.toml - - name: ccip-smoke-reorg-above-finality - nodes: 1 - dir: ccip-tests/smoke - os: ubuntu-latest - file: ccip - run: -run ^TestSmokeCCIPReorgAboveFinalityAtSource$ -v - config_path: ./integration-tests/ccip-tests/testconfig/tomls/ccip-reorg.toml - - name: runlog - id: runlog - nodes: 2 - os: ubuntu-latest - pyroscope_env: "ci-smoke-runlog-evm-simulated" - - name: cron - id: cron - nodes: 2 - os: ubuntu-latest - pyroscope_env: "ci-smoke-cron-evm-simulated" - - name: flux - id: flux - nodes: 1 - os: ubuntu-latest - pyroscope_env: "ci-smoke-flux-evm-simulated" - - name: ocr - id: ocr - nodes: 2 - os: ubuntu-latest - file: ocr - pyroscope_env: ci-smoke-ocr-evm-simulated - - name: reorg_above_finality - id: reorg_above_finality - nodes: 1 - os: ubuntu-latest - file: reorg_above_finality - pyroscope_env: ci-smoke-reorg-above-finality-evm-simulated - - name: ocr2 - id: ocr2 - nodes: 6 - os: ubuntu22.04-16cores-64GB - file: ocr2 - pyroscope_env: ci-smoke-ocr2-evm-simulated - - name: ocr2 - id: ocr2-plugins - nodes: 6 - os: ubuntu22.04-16cores-64GB - pyroscope_env: ci-smoke-ocr2-plugins-evm-simulated - tag_suffix: "-plugins" - - name: vrf - id: vrf - nodes: 2 - os: ubuntu-latest - pyroscope_env: ci-smoke-vrf-evm-simulated - - name: vrfv2 - id: vrfv2 - nodes: 6 - os: ubuntu-latest - pyroscope_env: ci-smoke-vrf2-evm-simulated - - name: vrfv2plus - id: vrfv2plus - nodes: 9 - os: ubuntu-latest - pyroscope_env: ci-smoke-vrf2plus-evm-simulated - - name: forwarder_ocr - id: forwarder_ocr - nodes: 2 - os: ubuntu-latest - pyroscope_env: ci-smoke-forwarder-ocr-evm-simulated - - name: forwarders_ocr2 - id: forwarders_ocr2 - nodes: 2 - os: ubuntu-latest - pyroscope_env: ci-smoke-forwarder-ocr-evm-simulated - runs-on: ${{ matrix.product.os }} - name: ETH Smoke Tests ${{ matrix.product.name }}${{ matrix.product.tag_suffix }} - steps: - # Handy for debugging resource usage - # - name: Collect Workflow Telemetry - # uses: catchpoint/workflow-telemetry-action@94c3c3d9567a0205de6da68a76c428ce4e769af1 # v2.0.0 - - name: Collect Metrics - if: needs.changes.outputs.src == 'true' || github.event_name == 'workflow_dispatch' - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: ${{ env.COLLECTION_ID }}-matrix-${{ matrix.product.id }} - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: ETH Smoke Tests ${{ matrix.product.name }}${{ matrix.product.tag_suffix }} - test-results-file: '{"testType":"go","filePath":"/tmp/gotest.log"}' - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - repository: smartcontractkit/ccip - ref: ${{ inputs.cl_ref || github.event.pull_request.head.sha || github.event.merge_group.head_sha }} - - name: Build Go Test Command - id: build-go-test-command - run: | - # if dir is provided use it, otherwise use the smoke dir - if [ "${{ matrix.product.dir }}" != "" ]; then - dir=${{ matrix.product.dir }} - else - dir=smoke - fi - # if the matrix.product.run is set, use it for a different command - if [ "${{ matrix.product.run }}" != "" ]; then - echo "run_command=${{ matrix.product.run }} ./${dir}/${{ matrix.product.file }}_test.go" >> "$GITHUB_OUTPUT" - else - echo "run_command=./${dir}/${{ matrix.product.name }}_test.go" >> "$GITHUB_OUTPUT" - fi - - name: Check for "enable tracing" label - id: check-label - run: | - label=$(jq -r '.pull_request.labels[]?.name // empty' "$GITHUB_EVENT_PATH") - - if [[ -n "$label" ]]; then - if [[ "$label" == "enable tracing" ]]; then - echo "Enable tracing label found." - echo "trace=true" >> $GITHUB_OUTPUT - else - echo "Enable tracing label not found." - echo "trace=false" >> $GITHUB_OUTPUT - fi - else - echo "No labels present or labels are null." - echo "trace=false" >> $GITHUB_OUTPUT - fi - - - name: Setup Grafana and OpenTelemetry - id: docker-setup - if: steps.check-label.outputs.trace == 'true' && matrix.product.name == 'ocr2' && matrix.product.tag_suffix == '-plugins' - run: | - # Create network - docker network create --driver bridge tracing - - # Make trace directory - cd integration-tests/smoke/ - mkdir ./traces - chmod -R 777 ./traces - - # Switch directory - cd ../../.github/tracing - - # Create a Docker volume for traces - # docker volume create otel-traces - - # Start OpenTelemetry Collector - # Note the user must be set to the same user as the runner for the trace data to be accessible - docker run -d --network=tracing --name=otel-collector \ - -v $PWD/otel-collector-ci.yaml:/etc/otel-collector.yaml \ - -v $PWD/../../integration-tests/smoke/traces:/tracing \ - --user "$(id -u):$(id -g)" \ - -p 4317:4317 otel/opentelemetry-collector:0.88.0 --config=/etc/otel-collector.yaml - - - name: Locate Docker Volume - id: locate-volume - if: false - run: | - echo "VOLUME_PATH=$(docker volume inspect --format '{{ .Mountpoint }}' otel-traces)" >> $GITHUB_OUTPUT - - - name: Show Otel-Collector Logs - if: steps.check-label.outputs.trace == 'true' && matrix.product.name == 'ocr2' && matrix.product.tag_suffix == '-plugins' - run: | - docker logs otel-collector - - name: Set Override Config - id: set_override_config - run: | - # if the matrix.product.config_path is set, use it as the override config - if [ "${{ matrix.product.config_path }}" != "" ]; then - echo "base_64_override=$(base64 -w 0 -i ${{ matrix.product.config_path }})" >> "$GITHUB_OUTPUT" - fi - - name: Setup GAP for Grafana - uses: smartcontractkit/.github/actions/setup-gap@d316f66b2990ea4daa479daa3de6fc92b00f863e # setup-gap@0.3.2 - id: setup-gap - with: - # aws inputs - aws-region: ${{ secrets.AWS_REGION }} - aws-role-arn: ${{ secrets.AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN }} - api-gateway-host: ${{ secrets.AWS_API_GW_HOST_GRAFANA }} - # other inputs - duplicate-authorization-header: "true" - # metrics inputs - metrics-job-name: "grafana" - gc-host: ${{ secrets.GRAFANA_INTERNAL_HOST }} - gc-basic-auth: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} - gc-org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - - name: Prepare Base64 CCIP TOML secrets - uses: ./.github/actions/setup-create-base64-config-ccip - id: setup_create_base64_config_ccip - with: - runId: ${{ github.run_id }} - pyroscopeServer: ${{ matrix.product.pyroscope_env == '' && '' || !startsWith(github.ref, 'refs/tags/') && '' || secrets.QA_PYROSCOPE_INSTANCE }} # Avoid sending blank envs https://github.com/orgs/community/discussions/25725 - pyroscopeEnvironment: ${{ matrix.product.pyroscope_env }} - pyroscopeKey: ${{ secrets.QA_PYROSCOPE_KEY }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - selectedNetworks: SIMULATED_1,SIMULATED_2 - chainlinkVersion: ${{ inputs.evm-ref || github.sha }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - - ## Run this step when changes that require tests to be run are made - - name: Run Tests - if: needs.changes.outputs.src == 'true' || github.event_name == 'workflow_dispatch' - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@aa8eea635029ab8d95abd3c206f56dae1e22e623 # v2.3.28 - env: - BASE64_CCIP_CONFIG_OVERRIDE: ${{ steps.setup_create_base64_config_ccip.outputs.base64_config }},${{ steps.set_override_config.outputs.base_64_override }} - TEST_BASE64_CCIP_CONFIG_OVERRIDE: ${{ steps.setup_create_base64_config_ccip.outputs.base64_config }},${{ steps.set_override_config.outputs.base_64_override }} - DEBUG_RESTY: false - with: - test_command_to_run: cd ./integration-tests && go test -timeout 30m -count=1 -json -test.parallel=${{ matrix.product.nodes }} ${{ steps.build-go-test-command.outputs.run_command }} 2>&1 | tee /tmp/gotest.log | gotestloghelper -ci -singlepackage -hidepassingtests=false -hidepassinglogs - test_download_vendor_packages_command: cd ./integration-tests && go mod download - test_config_chainlink_version: ${{ inputs.evm-ref || github.sha }} - test_config_selected_networks: ${{ env.SELECTED_NETWORKS }} - test_config_logging_run_id: ${{ github.run_id }} - test_config_logstream_log_targets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - test_config_test_log_collect: ${{ vars.TEST_LOG_COLLECT }} - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ inputs.evm-ref || github.sha }}${{ matrix.product.tag_suffix }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - artifacts_name: ${{ matrix.product.name }}${{ matrix.product.tag_suffix }}-test-logs - artifacts_location: | - ./integration-tests/smoke/logs/ - ./integration-tests/ccip-tests/smoke/logs/* - /tmp/gotest.log - publish_check_name: ${{ matrix.product.name }} - token: ${{ secrets.GITHUB_TOKEN }} - go_mod_path: ./integration-tests/go.mod - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: "" - should_tidy: "false" - go_coverage_src_dir: /var/tmp/go-coverage - go_coverage_dest_dir: ${{ github.workspace }}/.covdata - DEFAULT_CHAINLINK_IMAGE: ${{ env.CHAINLINK_IMAGE }} - DEFAULT_LOKI_TENANT_ID: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - DEFAULT_LOKI_ENDPOINT: https://${{ secrets.GRAFANA_INTERNAL_HOST }}/loki/api/v1/push - DEFAULT_LOKI_BASIC_AUTH: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - DEFAULT_GRAFANA_BASE_URL: "http://localhost:8080/primary" - DEFAULT_GRAFANA_DASHBOARD_URL: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - DEFAULT_GRAFANA_BEARER_TOKEN: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} - DEFAULT_PYROSCOPE_SERVER_URL: ${{ matrix.product.pyroscope_env == '' && '' || !startsWith(github.ref, 'refs/tags/') && '' || secrets.QA_PYROSCOPE_INSTANCE }} # Avoid sending blank envs https://github.com/orgs/community/discussions/25725 - DEFAULT_PYROSCOPE_KEY: ${{ secrets.QA_PYROSCOPE_KEY }} - DEFAULT_PYROSCOPE_ENVIRONMENT: ${{ matrix.product.pyroscope_env }} - DEFAULT_PYROSCOPE_ENABLED: ${{ matrix.product.pyroscope_env == '' || !startsWith(github.ref, 'refs/tags/') && 'false' || 'true' }} - - - name: Upload Coverage Data - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 - timeout-minutes: 2 - continue-on-error: true - with: - name: cl-node-coverage-data-${{ matrix.product.name }}-${{ matrix.product.tag_suffix }} - path: .covdata - retention-days: 1 - - # Run this step when changes that do not need the test to run are made - - name: Run Setup - if: needs.changes.outputs.src == 'false' - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/setup-run-tests-environment@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_download_vendor_packages_command: cd ./integration-tests && go mod download - go_mod_path: ./integration-tests/go.mod - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: "" - should_tidy: "false" - - - name: Show Otel-Collector Logs - if: steps.check-label.outputs.trace == 'true' && matrix.product.name == 'ocr2' && matrix.product.tag_suffix == '-plugins' - run: | - docker logs otel-collector - - - name: Permissions on traces - if: steps.check-label.outputs.trace == 'true' && matrix.product.name == 'ocr2' && matrix.product.tag_suffix == '-plugins' - run: | - ls -l ./integration-tests/smoke/traces - - - name: Upload Trace Data - if: steps.check-label.outputs.trace == 'true' && matrix.product.name == 'ocr2' && matrix.product.tag_suffix == '-plugins' - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1 - with: - name: trace-data - path: ./integration-tests/smoke/traces/trace-data.json - - - name: Print failed test summary - if: always() - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/show-test-summary@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_directories: ./integration-tests/smoke/,./integration-tests/ccip-tests/smoke/ - - ### Used to check the required checks box when the matrix completes - eth-smoke-tests: - if: always() - runs-on: ubuntu-latest - name: ETH Smoke Tests - needs: [eth-smoke-tests-matrix] - steps: - - name: Check smoke test matrix status - if: needs.eth-smoke-tests-matrix.result != 'success' - run: | - echo "${{ needs.eth-smoke-tests-matrix.result }}" - exit 1 - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: ${{ env.COLLECTION_ID }}-matrix-results - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: ETH Smoke Tests - matrix-aggregator-status: ${{ needs.eth-smoke-tests-matrix.result }} - continue-on-error: true - - cleanup: - name: Clean up integration environment deployments - if: always() - needs: [eth-smoke-tests] - runs-on: ubuntu-latest - steps: - - name: Checkout repo - if: ${{ github.event_name == 'pull_request' }} - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - repository: smartcontractkit/ccip - ref: ${{ inputs.cl_ref }} - - - name: 🧼 Clean up Environment - if: ${{ github.event_name == 'pull_request' }} - uses: ./.github/actions/delete-deployments - with: - environment: integration - ref: ${{ github.head_ref }} # See https://github.com/github/docs/issues/15319#issuecomment-1476705663 - - - name: Collect Metrics - if: ${{ github.event_name == 'pull_request' }} - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: ${{ env.COLLECTION_ID }}-env-cleanup - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Clean up integration environment deployments - continue-on-error: true - - show-coverage: - name: Show Chainlink Node Go Coverage - if: always() - needs: [cleanup] - runs-on: ubuntu-latest - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - repository: smartcontractkit/ccip - ref: ${{ inputs.cl_ref || github.event.pull_request.head.sha || github.event.merge_group.head_sha }} - - name: Download All Artifacts - uses: actions/download-artifact@9c19ed7fe5d278cd354c7dfd5d3b88589c7e2395 # v4.1.6 - with: - path: cl-node-coverage-data - pattern: cl-node-coverage-data-* - merge-multiple: true - - name: Show Coverage - run: go run ./integration-tests/scripts/show_coverage.go "${{ github.workspace }}/cl-node-coverage-data/*/merged" - - # Run the setup if the matrix finishes but this time save the cache if we have a cache hit miss - # this will also only run if both of the matrix jobs pass - eth-smoke-go-mod-cache: - environment: integration - needs: [eth-smoke-tests] - runs-on: ubuntu-latest - name: ETH Smoke Tests Go Mod Cache - continue-on-error: true - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - repository: smartcontractkit/ccip - ref: ${{ inputs.cl_ref || github.event.pull_request.head.sha || github.event.merge_group.head_sha }} - - name: Run Setup - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/setup-go@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_download_vendor_packages_command: | - cd ./integration-tests - go mod download - # force download of test dependencies - go test -run=NonExistentTest ./smoke/... || echo "ignore expected test failure" - go_mod_path: ./integration-tests/go.mod - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "false" - - ### Migration tests - node-migration-tests: - name: Version Migration Tests - environment: integration - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - runs-on: ubuntu-latest - needs: [build-chainlink, changes] - # Only run migration tests on new tags - if: startsWith(github.ref, 'refs/tags/') - env: - SELECTED_NETWORKS: SIMULATED,SIMULATED_1,SIMULATED_2 - CHAINLINK_COMMIT_SHA: ${{ inputs.evm-ref || github.sha }} - CHAINLINK_ENV_USER: ${{ github.actor }} - CHAINLINK_IMAGE: public.ecr.aws/w0i8p0z9/chainlink-ccip - UPGRADE_VERSION: ${{ inputs.evm-ref || github.sha }} - UPGRADE_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink - TEST_LOG_LEVEL: debug - TEST_SUITE: migration - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: ${{ env.COLLECTION_ID }}-migration-tests - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Version Migration Tests - test-results-file: '{"testType":"go","filePath":"/tmp/gotest.log"}' - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - repository: smartcontractkit/ccip - ref: ${{ inputs.cl_ref || github.event.pull_request.head.sha || github.event.merge_group.head_sha }} - - name: Get Latest Version - id: get_latest_version - run: | - untrimmed_ver=$(curl --header "Authorization: token ${{ secrets.GITHUB_TOKEN }}" --request GET https://api.github.com/repos/${{ github.repository }}/releases/latest | jq -r .name) - latest_version="${untrimmed_ver:1}" - # Check if latest_version is empty - if [ -z "$latest_version" ]; then - echo "Error: The latest_version is empty. The migration tests need a verison to run." - exit 1 - fi - # CCIP uses the `-release` suffix for their versions - echo "latest_version=${latest_version}-release" >> "$GITHUB_OUTPUT" - - name: Name Versions - run: | - echo "Running migration tests from version '${{ steps.get_latest_version.outputs.latest_version }}' to: '${{ inputs.evm-ref || github.sha }}'" - - name: Prepare Base64 TOML override - uses: ./.github/actions/setup-create-base64-upgrade-config - with: - selectedNetworks: ${{ env.SELECTED_NETWORKS }} - chainlinkVersion: ${{ steps.get_latest_version.outputs.latest_version }} - upgradeVersion: ${{ env.UPGRADE_VERSION }} - runId: ${{ github.run_id }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - - name: Run Migration Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@aa8eea635029ab8d95abd3c206f56dae1e22e623 # v2.3.28 - with: - test_command_to_run: cd ./integration-tests && go test -timeout 20m -count=1 -json ./migration 2>&1 | tee /tmp/gotest.log | gotestloghelper -ci -singlepackage -hidepassingtests=false -hidepassinglogs - test_download_vendor_packages_command: cd ./integration-tests && go mod download - test_config_override_base64: ${{ env.BASE64_CONFIG_OVERRIDE }} - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ steps.get_latest_version.outputs.latest_version }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - artifacts_name: node-migration-test-logs - artifacts_location: | - ./integration-tests/migration/logs - /tmp/gotest.log - publish_check_name: Node Migration Test Results - token: ${{ secrets.GITHUB_TOKEN }} - go_mod_path: ./integration-tests/go.mod - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: "" - go_coverage_src_dir: /var/tmp/go-coverage - go_coverage_dest_dir: ${{ github.workspace }}/.covdata - should_tidy: "false" - DEFAULT_CHAINLINK_IMAGE: ${{ env.CHAINLINK_IMAGE }} - DEFAULT_CHAINLINK_UPGRADE_IMAGE: ${{ env.UPGRADE_IMAGE }} - DEFAULT_LOKI_TENANT_ID: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - DEFAULT_LOKI_ENDPOINT: https://${{ secrets.GRAFANA_INTERNAL_HOST }}/loki/api/v1/push - DEFAULT_LOKI_BASIC_AUTH: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - DEFAULT_GRAFANA_BASE_URL: "http://localhost:8080/primary" - DEFAULT_GRAFANA_DASHBOARD_URL: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - DEFAULT_GRAFANA_BEARER_TOKEN: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} - - - name: Upload Coverage Data - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 - timeout-minutes: 2 - continue-on-error: true - with: - name: cl-node-coverage-data-migration-tests - path: .covdata - retention-days: 1 - - name: Notify Slack - if: failure() && github.event_name != 'workflow_dispatch' - uses: slackapi/slack-github-action@6c661ce58804a1a20f6dc5fbee7f0381b469e001 # v1.25.0 - env: - SLACK_BOT_TOKEN: ${{ secrets.QA_SLACK_API_KEY }} - with: - channel-id: "C05QQ8Z342V" - slack-message: "CCIP Node Migration Tests Failed :x: \n${{ format('https://github.com/{0}/actions/runs/{1}', github.repository, github.run_id) }}" - payload: | - { - "attachments": [ - { - "color": "${{ contains(join(needs.*.result, ','), 'failure') && '#C62828' || '#2E7D32' }}", - "blocks": [ - { - "type": "header", - "text": { - "type": "plain_text", - "text": "CCIP Version Migration Test Results ${{ contains(join(needs.*.result, ','), 'failure') && ':x:' || ':white_check_mark:'}}", - "emoji": true - } - }, - { - "type": "section", - "text": { - "type": "mrkdwn", - "text": "${{ contains(join(needs.*.result, ','), 'failure') && 'Some tests failed! Notifying ' || 'All Good!' }}" - } - }, - { - "type": "divider" - }, - { - "type": "section", - "text": { - "type": "mrkdwn", - "text": "<${{ github.server_url }}/${{ github.repository }}/releases/tag/${{ github.ref_name }}|${{ github.ref_name }}> | <${{ github.server_url }}/${{ github.repository }}/commit/${{ github.sha }}|${{ github.sha }}> | <${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|Run>" - } - } - ] - } - ] - } diff --git a/.github/workflows/lint-gh-workflows.yml b/.github/workflows/lint-gh-workflows.yml deleted file mode 100644 index c7727199e9..0000000000 --- a/.github/workflows/lint-gh-workflows.yml +++ /dev/null @@ -1,23 +0,0 @@ -name: Lint GH Workflows -on: - push: -jobs: - lint_workflows: - name: Validate Github Action Workflows - runs-on: ubuntu-latest - steps: - - name: Check out Code - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - name: Run actionlint - uses: reviewdog/action-actionlint@c6ee1eb0a5d47b2af53a203652b5dac0b6c4016e # v1.43.0 - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: lint-gh-workflows - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Validate Github Action Workflows - continue-on-error: true diff --git a/.github/workflows/live-testnet-tests.yml b/.github/workflows/live-testnet-tests.yml deleted file mode 100644 index a7eaa19f7f..0000000000 --- a/.github/workflows/live-testnet-tests.yml +++ /dev/null @@ -1,1119 +0,0 @@ -# *** -# This workflow is a monstrosity of copy-paste, and that's to increase legibility in reporting and running, so the code be damned. -# I suspect this can be cleaned up significantly with some clever trickery of the GitHub actions matrices, but I am not that clever. -# We want each chain to run in parallel, but each test within the chain needs to be able to run sequentially -# (we're trying to eliminate this as a requirement, should make it a lot easier). -# Each chain can have a variety of tests to run. -# We also want reporting to be clear in the start-slack-thread and post-test-results-to-slack jobs. -# Funding address: 0xC1107e57082945E28d3202A81B1520DEA3AE6AEC -# *** - -name: Live Testnet Tests -on: - # Disable refular runs for now until we can fix some test client flakiness and improve stability - # schedule: - # - cron: "0 5 * * *" # Run every night at midnight EST - # push: - # tags: - # - "*" - workflow_dispatch: - inputs: - slack_user_id: - description: "The Slack member ID to notify" - required: true - type: string - network: - description: "The network to run tests on" - required: true - type: choice - options: - - "All" - - "Sepolia" - - "Optimism Sepolia" - - "Arbitrum Sepolia" - - "Base Sepolia" - - "Polygon Mumbai" - - "Avalanche Fuji" - - "Fantom Testnet" - - "Celo Alfajores" - - "Linea Goerli" - - "BSC Testnet" - -env: - CHAINLINK_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink - INTERNAL_DOCKER_REPO: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com - MOD_CACHE_VERSION: 2 - CHAINLINK_NODE_FUNDING: .5 - PYROSCOPE_KEY: ${{ secrets.QA_PYROSCOPE_KEY }} - LOKI_TENANT_ID: ${{ vars.LOKI_TENANT_ID }} - LOKI_URL: ${{ secrets.LOKI_URL }} - LOKI_BASIC_AUTH: ${{ secrets.LOKI_BASIC_AUTH }} - LOGSTREAM_LOG_TARGETS: loki - GRAFANA_URL: ${{ vars.GRAFANA_URL }} - RUN_ID: ${{ github.run_id }} - - CHAINLINK_COMMIT_SHA: ${{ github.sha }} - CHAINLINK_ENV_USER: ${{ github.actor }} - TEST_LOG_LEVEL: debug - -jobs: - - # Build Test Dependencies - - build-chainlink: - environment: integration - permissions: - id-token: write - contents: read - name: Build Chainlink Image - runs-on: ubuntu-latest - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: live-testnet-build-chainlink - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Build Chainlink Image - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - ref: ${{ github.event.pull_request.head.sha || github.event.merge_group.head_sha }} - - name: Build Chainlink Image - uses: ./.github/actions/build-chainlink-image - with: - tag_suffix: "" - dockerfile: core/chainlink.Dockerfile - git_commit_sha: ${{ github.sha }} - AWS_REGION: ${{ secrets.QA_AWS_REGION }} - AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - - build-tests: - environment: integration - permissions: - id-token: write - contents: read - name: Build Tests Binary - runs-on: ubuntu-latest - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: live-testnet-build-test-image - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Build Tests Binary - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - ref: ${{ github.event.pull_request.head.sha || github.event.merge_group.head_sha }} - - name: Build Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/build-tests@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_download_vendor_packages_command: cd ./integration-tests && go mod download - token: ${{ secrets.GITHUB_TOKEN }} - go_mod_path: ./integration-tests/go.mod - go_tags: embed - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - binary_name: tests - - # End Build Test Dependencies - - # Reporting Jobs - - start-slack-thread: - name: Start Slack Thread - if: ${{ always() && needs.*.result != 'skipped' && needs.*.result != 'cancelled' }} - environment: integration - outputs: - thread_ts: ${{ steps.slack.outputs.thread_ts }} - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - runs-on: ubuntu-latest - needs: [sepolia-smoke-tests, optimism-sepolia-smoke-tests, arbitrum-sepolia-smoke-tests, base-sepolia-smoke-tests, polygon-mumbai-smoke-tests, avalanche-fuji-smoke-tests, fantom-testnet-smoke-tests, celo-alfajores-smoke-tests, linea-goerli-smoke-tests, bsc-testnet-smoke-tests] - steps: - - name: Debug Result - run: echo ${{ join(needs.*.result, ',') }} - - name: Main Slack Notification - uses: slackapi/slack-github-action@6c661ce58804a1a20f6dc5fbee7f0381b469e001 # v1.25.0 - id: slack - with: - channel-id: ${{ secrets.QA_SLACK_CHANNEL }} - payload: | - { - "attachments": [ - { - "color": "${{ contains(join(needs.*.result, ','), 'failure') && '#C62828' || '#2E7D32' }}", - "blocks": [ - { - "type": "header", - "text": { - "type": "plain_text", - "text": "Live Smoke Test Results ${{ contains(join(needs.*.result, ','), 'failure') && ':x:' || ':white_check_mark:'}}", - "emoji": true - } - }, - { - "type": "section", - "text": { - "type": "mrkdwn", - "text": "Notifying <@${{ inputs.slack_user_id }}>" - } - }, - { - "type": "divider" - }, - { - "type": "section", - "text": { - "type": "mrkdwn", - "text": "<${{ github.server_url }}/${{ github.repository }}/releases/tag/${{ github.ref_name }}|${{ github.ref_name }}> | <${{ github.server_url }}/${{ github.repository }}/commit/${{ github.sha }}|${{ github.sha }}> | <${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|Run>\nThe funding address for all tests and networks is `0xC1107e57082945E28d3202A81B1520DEA3AE6AEC`" - } - } - ] - } - ] - } - env: - SLACK_BOT_TOKEN: ${{ secrets.QA_SLACK_API_KEY }} - - post-test-results-to-slack: - name: Post Test Results for ${{ matrix.network }} - if: ${{ always() && needs.*.result != 'skipped' && needs.*.result != 'cancelled' }} - environment: integration - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - runs-on: ubuntu-latest - needs: start-slack-thread - strategy: - fail-fast: false - matrix: - network: [Sepolia, Optimism Sepolia, Arbitrum Sepolia, Base Sepolia, Polygon Mumbai, Avalanche Fuji, Fantom Testnet, Celo Alfajores, Linea Goerli, BSC Testnet] - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - ref: ${{ github.event.pull_request.head.sha || github.event.merge_group.head_sha }} - - name: Post Test Results - uses: ./.github/actions/notify-slack-jobs-result - with: - github_token: ${{ github.token }} - github_repository: ${{ github.repository }} - workflow_run_id: ${{ github.run_id }} - github_job_name_regex: ^${{ matrix.network }} (.*?) Tests$ - message_title: ${{ matrix.network }} - slack_channel_id: ${{ secrets.QA_SLACK_CHANNEL }} - slack_bot_token: ${{ secrets.QA_SLACK_API_KEY }} - slack_thread_ts: ${{ needs.start-slack-thread.outputs.thread_ts }} - - # End Reporting Jobs - - sepolia-smoke-tests: - environment: integration - if: ${{ (github.event.inputs.network == 'All' || github.event.inputs.network == 'Sepolia') }} - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - needs: [build-chainlink, build-tests] - strategy: - max-parallel: 1 - fail-fast: false - matrix: - include: # https://docs.github.com/en/actions/using-jobs/using-a-matrix-for-your-jobs#example-adding-configurations - - product: OCR - test: TestOCRBasic - - product: Automation Conditional - test: TestAutomationBasic/registry_2_1_conditional - - product: Automation Log Trigger - test: TestAutomationBasic/registry_2_1_logtrigger - name: Sepolia ${{ matrix.product }} Tests - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - fetch-depth: 0 - - name: Setup GAP for Grafana - uses: smartcontractkit/.github/actions/setup-gap@d316f66b2990ea4daa479daa3de6fc92b00f863e # setup-gap@0.3.2 - with: - # aws inputs - aws-region: ${{ secrets.AWS_REGION }} - aws-role-arn: ${{ secrets.AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN }} - api-gateway-host: ${{ secrets.AWS_API_GW_HOST_GRAFANA }} - # other inputs - duplicate-authorization-header: "true" - - name: Prepare Base64 TOML override - uses: ./.github/actions/setup-create-base64-config-live-testnets - with: - runId: ${{ github.run_id }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - chainlinkImage: ${{ env.CHAINLINK_IMAGE }} - chainlinkVersion: ${{ github.sha }} - pyroscopeServer: ${{ matrix.product.pyroscope_env == '' && '' || !startsWith(github.ref, 'refs/tags/') && '' || secrets.QA_PYROSCOPE_INSTANCE }} # Avoid sending blank envs https://github.com/orgs/community/discussions/25725 - pyroscopeEnvironment: ci-smoke-${{ matrix.product }}-sepolia - pyroscopeKey: ${{ secrets.QA_PYROSCOPE_KEY }} - lokiEndpoint: ${{ secrets.LOKI_URL }} - lokiTenantId: ${{ vars.LOKI_TENANT_ID }} - lokiBasicAuth: ${{ secrets.LOKI_BASIC_AUTH }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - grafanaUrl: "http://localhost:8080/primary" - grafanaDashboardUrl: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - grafanaBearerToken: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} - network: "sepolia" - httpEndpoints: ${{ secrets.QA_SEPOLIA_HTTP_URLS }} - wsEndpoints: ${{ secrets.QA_SEPOLIA_URLS }} - fundingKeys: ${{ secrets.QA_EVM_KEYS }} - - name: Download Tests Binary - uses: actions/download-artifact@c850b930e6ba138125429b7e5c93fc707a7f8427 # v4.1.4 - with: - name: tests - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests-binary@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_command_to_run: ./tests -test.timeout 30m -test.count=1 -test.parallel=1 -test.run ${{ matrix.test }} - binary_name: tests - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ github.sha }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - dockerhub_username: ${{ secrets.DOCKERHUB_READONLY_USERNAME }} - dockerhub_password: ${{ secrets.DOCKERHUB_READONLY_PASSWORD }} - artifacts_location: ./logs - token: ${{ secrets.GITHUB_TOKEN }} - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - - name: Print failed test summary - if: always() - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/show-test-summary@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_directory: "./" - - bsc-testnet-smoke-tests: - environment: integration - if: ${{ github.event.inputs.network == 'All' || github.event.inputs.network == 'BSC Testnet' }} - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - needs: [build-chainlink, build-tests] - strategy: - max-parallel: 1 - fail-fast: false - matrix: - include: # https://docs.github.com/en/actions/using-jobs/using-a-matrix-for-your-jobs#example-adding-configurations - - product: OCR - test: TestOCRBasic - - product: Automation Conditional - test: TestAutomationBasic/registry_2_1_conditional - - product: Automation Log Trigger - test: TestAutomationBasic/registry_2_1_logtrigger - name: BSC Testnet ${{ matrix.product }} Tests - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - fetch-depth: 0 - - name: Setup GAP for Grafana - uses: smartcontractkit/.github/actions/setup-gap@d316f66b2990ea4daa479daa3de6fc92b00f863e # setup-gap@0.3.2 - with: - # aws inputs - aws-region: ${{ secrets.AWS_REGION }} - aws-role-arn: ${{ secrets.AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN }} - api-gateway-host: ${{ secrets.AWS_API_GW_HOST_GRAFANA }} - # other inputs - duplicate-authorization-header: "true" - - name: Prepare Base64 TOML override - uses: ./.github/actions/setup-create-base64-config-live-testnets - with: - runId: ${{ github.run_id }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - chainlinkImage: ${{ env.CHAINLINK_IMAGE }} - chainlinkVersion: ${{ github.sha }} - pyroscopeServer: ${{ matrix.product.pyroscope_env == '' && '' || !startsWith(github.ref, 'refs/tags/') && '' || secrets.QA_PYROSCOPE_INSTANCE }} # Avoid sending blank envs https://github.com/orgs/community/discussions/25725 - pyroscopeEnvironment: ci-smoke-${{ matrix.product }}-bsc-testnet - pyroscopeKey: ${{ secrets.QA_PYROSCOPE_KEY }} - lokiEndpoint: ${{ secrets.LOKI_URL }} - lokiTenantId: ${{ vars.LOKI_TENANT_ID }} - lokiBasicAuth: ${{ secrets.LOKI_BASIC_AUTH }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - grafanaUrl: "http://localhost:8080/primary" - grafanaDashboardUrl: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - grafanaBearerToken: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} - network: "bsc_testnet" - httpEndpoints: ${{ secrets.QA_BSC_TESTNET_HTTP_URLS }} - wsEndpoints: ${{ secrets.QA_BSC_TESTNET_URLS }} - fundingKeys: ${{ secrets.QA_EVM_KEYS }} - - name: Download Tests Binary - uses: actions/download-artifact@c850b930e6ba138125429b7e5c93fc707a7f8427 # v4.1.4 - with: - name: tests - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests-binary@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_command_to_run: ./tests -test.timeout 30m -test.count=1 -test.parallel=1 -test.run ${{ matrix.test }} - binary_name: tests - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ github.sha }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - dockerhub_username: ${{ secrets.DOCKERHUB_READONLY_USERNAME }} - dockerhub_password: ${{ secrets.DOCKERHUB_READONLY_PASSWORD }} - artifacts_location: ./logs - token: ${{ secrets.GITHUB_TOKEN }} - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - - name: Print failed test summary - if: always() - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/show-test-summary@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_directory: "./" - - optimism-sepolia-smoke-tests: - environment: integration - if: ${{ github.event.inputs.network == 'All' || github.event.inputs.network == 'Optimism Sepolia' }} - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - needs: [build-chainlink, build-tests] - strategy: - max-parallel: 1 - fail-fast: false - matrix: - include: # https://docs.github.com/en/actions/using-jobs/using-a-matrix-for-your-jobs#example-adding-configurations - - product: OCR - test: TestOCRBasic - - product: Automation Conditional - test: TestAutomationBasic/registry_2_1_conditional - - product: Automation Log Trigger - test: TestAutomationBasic/registry_2_1_logtrigger - name: Optimism Sepolia ${{ matrix.product }} Tests - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - fetch-depth: 0 - - name: Setup GAP for Grafana - uses: smartcontractkit/.github/actions/setup-gap@d316f66b2990ea4daa479daa3de6fc92b00f863e # setup-gap@0.3.2 - with: - # aws inputs - aws-region: ${{ secrets.AWS_REGION }} - aws-role-arn: ${{ secrets.AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN }} - api-gateway-host: ${{ secrets.AWS_API_GW_HOST_GRAFANA }} - # other inputs - duplicate-authorization-header: "true" - - name: Prepare Base64 TOML override - uses: ./.github/actions/setup-create-base64-config-live-testnets - with: - runId: ${{ github.run_id }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - chainlinkImage: ${{ env.CHAINLINK_IMAGE }} - chainlinkVersion: ${{ github.sha }} - pyroscopeServer: ${{ matrix.product.pyroscope_env == '' && '' || !startsWith(github.ref, 'refs/tags/') && '' || secrets.QA_PYROSCOPE_INSTANCE }} # Avoid sending blank envs https://github.com/orgs/community/discussions/25725 - pyroscopeEnvironment: ci-smoke-${{ matrix.product }}-optimism-sepolia - pyroscopeKey: ${{ secrets.QA_PYROSCOPE_KEY }} - lokiEndpoint: ${{ secrets.LOKI_URL }} - lokiTenantId: ${{ vars.LOKI_TENANT_ID }} - lokiBasicAuth: ${{ secrets.LOKI_BASIC_AUTH }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - grafanaUrl: "http://localhost:8080/primary" - grafanaDashboardUrl: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - grafanaBearerToken: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} - network: "optimism_sepolia" - httpEndpoints: ${{ secrets.QA_OPTIMISM_SEPOLIA_HTTP_URLS }} - wsEndpoints: ${{ secrets.QA_OPTIMISM_SEPOLIA_URLS }} - fundingKeys: ${{ secrets.QA_EVM_KEYS }} - - name: Download Tests Binary - uses: actions/download-artifact@c850b930e6ba138125429b7e5c93fc707a7f8427 # v4.1.4 - with: - name: tests - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests-binary@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_command_to_run: ./tests -test.timeout 30m -test.count=1 -test.parallel=1 -test.run ${{ matrix.test }} - binary_name: tests - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ github.sha }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - dockerhub_username: ${{ secrets.DOCKERHUB_READONLY_USERNAME }} - dockerhub_password: ${{ secrets.DOCKERHUB_READONLY_PASSWORD }} - artifacts_location: ./logs - token: ${{ secrets.GITHUB_TOKEN }} - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - - name: Print failed test summary - if: always() - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/show-test-summary@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_directory: "./" - - arbitrum-sepolia-smoke-tests: - environment: integration - if: ${{ github.event.inputs.network == 'All' || github.event.inputs.network == 'Arbitrum Sepolia' }} - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - needs: [build-chainlink, build-tests] - strategy: - max-parallel: 1 - fail-fast: false - matrix: - include: # https://docs.github.com/en/actions/using-jobs/using-a-matrix-for-your-jobs#example-adding-configurations - - product: OCR - test: TestOCRBasic - - product: Automation Conditional - test: TestAutomationBasic/registry_2_1_conditional - - product: Automation Log Trigger - test: TestAutomationBasic/registry_2_1_logtrigger - name: Arbitrum Sepolia ${{ matrix.product }} Tests - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - fetch-depth: 0 - - name: Setup GAP for Grafana - uses: smartcontractkit/.github/actions/setup-gap@d316f66b2990ea4daa479daa3de6fc92b00f863e # setup-gap@0.3.2 - with: - # aws inputs - aws-region: ${{ secrets.AWS_REGION }} - aws-role-arn: ${{ secrets.AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN }} - api-gateway-host: ${{ secrets.AWS_API_GW_HOST_GRAFANA }} - # other inputs - duplicate-authorization-header: "true" - - name: Prepare Base64 TOML override - uses: ./.github/actions/setup-create-base64-config-live-testnets - with: - runId: ${{ github.run_id }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - chainlinkImage: ${{ env.CHAINLINK_IMAGE }} - chainlinkVersion: ${{ github.sha }} - pyroscopeServer: ${{ matrix.product.pyroscope_env == '' && '' || !startsWith(github.ref, 'refs/tags/') && '' || secrets.QA_PYROSCOPE_INSTANCE }} # Avoid sending blank envs https://github.com/orgs/community/discussions/25725 - pyroscopeEnvironment: ci-smoke-${{ matrix.product }}-arbitrum-sepolia - pyroscopeKey: ${{ secrets.QA_PYROSCOPE_KEY }} - lokiEndpoint: ${{ secrets.LOKI_URL }} - lokiTenantId: ${{ vars.LOKI_TENANT_ID }} - lokiBasicAuth: ${{ secrets.LOKI_BASIC_AUTH }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - grafanaUrl: "http://localhost:8080/primary" - grafanaDashboardUrl: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - grafanaBearerToken: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} - network: "arbitrum_sepolia" - httpEndpoints: ${{ secrets.QA_ARBITRUM_SEPOLIA_HTTP_URLS }} - wsEndpoints: ${{ secrets.QA_ARBITRUM_SEPOLIA_URLS }} - fundingKeys: ${{ secrets.QA_EVM_KEYS }} - - name: Download Tests Binary - uses: actions/download-artifact@c850b930e6ba138125429b7e5c93fc707a7f8427 # v4.1.4 - with: - name: tests - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests-binary@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_command_to_run: ./tests -test.timeout 30m -test.count=1 -test.parallel=1 -test.run ${{ matrix.test }} - binary_name: tests - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ github.sha }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - dockerhub_username: ${{ secrets.DOCKERHUB_READONLY_USERNAME }} - dockerhub_password: ${{ secrets.DOCKERHUB_READONLY_PASSWORD }} - artifacts_location: ./logs - token: ${{ secrets.GITHUB_TOKEN }} - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - - name: Print failed test summary - if: always() - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/show-test-summary@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_directory: "./" - - base-sepolia-smoke-tests: - environment: integration - if: ${{ github.event.inputs.network == 'All' || github.event.inputs.network == 'Base Sepolia' }} - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - needs: [build-chainlink, build-tests] - strategy: - max-parallel: 1 - fail-fast: false - matrix: - include: # https://docs.github.com/en/actions/using-jobs/using-a-matrix-for-your-jobs#example-adding-configurations - - product: OCR - test: TestOCRBasic - name: Base Sepolia ${{ matrix.product }} Tests - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - fetch-depth: 0 - - name: Setup GAP for Grafana - uses: smartcontractkit/.github/actions/setup-gap@d316f66b2990ea4daa479daa3de6fc92b00f863e # setup-gap@0.3.2 - with: - # aws inputs - aws-region: ${{ secrets.AWS_REGION }} - aws-role-arn: ${{ secrets.AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN }} - api-gateway-host: ${{ secrets.AWS_API_GW_HOST_GRAFANA }} - # other inputs - duplicate-authorization-header: "true" - - name: Prepare Base64 TOML override - uses: ./.github/actions/setup-create-base64-config-live-testnets - with: - runId: ${{ github.run_id }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - chainlinkImage: ${{ env.CHAINLINK_IMAGE }} - chainlinkVersion: ${{ github.sha }} - pyroscopeServer: ${{ matrix.product.pyroscope_env == '' && '' || !startsWith(github.ref, 'refs/tags/') && '' || secrets.QA_PYROSCOPE_INSTANCE }} # Avoid sending blank envs https://github.com/orgs/community/discussions/25725 - pyroscopeEnvironment: ci-smoke-${{ matrix.product }}-base-sepolia - pyroscopeKey: ${{ secrets.QA_PYROSCOPE_KEY }} - lokiEndpoint: ${{ secrets.LOKI_URL }} - lokiTenantId: ${{ vars.LOKI_TENANT_ID }} - lokiBasicAuth: ${{ secrets.LOKI_BASIC_AUTH }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - grafanaUrl: "http://localhost:8080/primary" - grafanaDashboardUrl: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - grafanaBearerToken: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} - network: "base_sepolia" - httpEndpoints: ${{ secrets.QA_BASE_SEPOLIA_HTTP_URLS }} - wsEndpoints: ${{ secrets.QA_BASE_SEPOLIA_URLS }} - fundingKeys: ${{ secrets.QA_EVM_KEYS }} - - name: Download Tests Binary - uses: actions/download-artifact@c850b930e6ba138125429b7e5c93fc707a7f8427 # v4.1.4 - with: - name: tests - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests-binary@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_command_to_run: ./tests -test.timeout 30m -test.count=1 -test.parallel=1 -test.run ${{ matrix.test }} - binary_name: tests - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ github.sha }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - dockerhub_username: ${{ secrets.DOCKERHUB_READONLY_USERNAME }} - dockerhub_password: ${{ secrets.DOCKERHUB_READONLY_PASSWORD }} - artifacts_location: ./logs - token: ${{ secrets.GITHUB_TOKEN }} - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - - name: Print failed test summary - if: always() - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/show-test-summary@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_directory: "./" - - polygon-mumbai-smoke-tests: - environment: integration - if: ${{ github.event.inputs.network == 'All' || github.event.inputs.network == 'Polygon Mumbai' }} - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - needs: [build-chainlink, build-tests] - strategy: - max-parallel: 1 - fail-fast: false - matrix: - include: # https://docs.github.com/en/actions/using-jobs/using-a-matrix-for-your-jobs#example-adding-configurations - - product: OCR - test: TestOCRBasic - - product: Automation Conditional - test: TestAutomationBasic/registry_2_1_conditional - - product: Automation Log Trigger - test: TestAutomationBasic/registry_2_1_logtrigger - name: Polygon Mumbai ${{ matrix.product }} Tests - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - fetch-depth: 0 - - name: Setup GAP for Grafana - uses: smartcontractkit/.github/actions/setup-gap@d316f66b2990ea4daa479daa3de6fc92b00f863e # setup-gap@0.3.2 - with: - # aws inputs - aws-region: ${{ secrets.AWS_REGION }} - aws-role-arn: ${{ secrets.AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN }} - api-gateway-host: ${{ secrets.AWS_API_GW_HOST_GRAFANA }} - # other inputs - duplicate-authorization-header: "true" - - name: Prepare Base64 TOML override - uses: ./.github/actions/setup-create-base64-config-live-testnets - with: - runId: ${{ github.run_id }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - chainlinkImage: ${{ env.CHAINLINK_IMAGE }} - chainlinkVersion: ${{ github.sha }} - pyroscopeServer: ${{ matrix.product.pyroscope_env == '' && '' || !startsWith(github.ref, 'refs/tags/') && '' || secrets.QA_PYROSCOPE_INSTANCE }} # Avoid sending blank envs https://github.com/orgs/community/discussions/25725 - pyroscopeEnvironment: ci-smoke-${{ matrix.product }}-polygon-mumbai - pyroscopeKey: ${{ secrets.QA_PYROSCOPE_KEY }} - lokiEndpoint: ${{ secrets.LOKI_URL }} - lokiTenantId: ${{ vars.LOKI_TENANT_ID }} - lokiBasicAuth: ${{ secrets.LOKI_BASIC_AUTH }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - grafanaUrl: "http://localhost:8080/primary" - grafanaDashboardUrl: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - grafanaBearerToken: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} - network: "polygon_mumbai" - httpEndpoints: ${{ secrets.QA_POLYGON_MUMBAI_HTTP_URLS }} - wsEndpoints: ${{ secrets.QA_POLYGON_MUMBAI_URLS }} - fundingKeys: ${{ secrets.QA_EVM_KEYS }} - - name: Download Tests Binary - uses: actions/download-artifact@c850b930e6ba138125429b7e5c93fc707a7f8427 # v4.1.4 - with: - name: tests - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests-binary@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_command_to_run: ./tests -test.timeout 30m -test.count=1 -test.parallel=1 -test.run ${{ matrix.test }} - binary_name: tests - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ github.sha }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - dockerhub_username: ${{ secrets.DOCKERHUB_READONLY_USERNAME }} - dockerhub_password: ${{ secrets.DOCKERHUB_READONLY_PASSWORD }} - artifacts_location: ./logs - token: ${{ secrets.GITHUB_TOKEN }} - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - - name: Print failed test summary - if: always() - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/show-test-summary@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_directory: "./" - - avalanche-fuji-smoke-tests: - environment: integration - if: ${{ github.event.inputs.network == 'All' || github.event.inputs.network == 'Avalanche Fuji' }} - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - needs: [build-chainlink, build-tests] - strategy: - max-parallel: 1 - fail-fast: false - matrix: - include: # https://docs.github.com/en/actions/using-jobs/using-a-matrix-for-your-jobs#example-adding-configurations - - product: OCR - test: TestOCRBasic - - product: Automation Conditional - test: TestAutomationBasic/registry_2_1_conditional - - product: Automation Log Trigger - test: TestAutomationBasic/registry_2_1_logtrigger - name: Avalanche Fuji ${{ matrix.product }} Tests - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - fetch-depth: 0 - - name: Setup GAP for Grafana - uses: smartcontractkit/.github/actions/setup-gap@d316f66b2990ea4daa479daa3de6fc92b00f863e # setup-gap@0.3.2 - with: - # aws inputs - aws-region: ${{ secrets.AWS_REGION }} - aws-role-arn: ${{ secrets.AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN }} - api-gateway-host: ${{ secrets.AWS_API_GW_HOST_GRAFANA }} - # other inputs - duplicate-authorization-header: "true" - - name: Prepare Base64 TOML override - uses: ./.github/actions/setup-create-base64-config-live-testnets - with: - runId: ${{ github.run_id }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - chainlinkImage: ${{ env.CHAINLINK_IMAGE }} - chainlinkVersion: ${{ github.sha }} - pyroscopeServer: ${{ matrix.product.pyroscope_env == '' && '' || !startsWith(github.ref, 'refs/tags/') && '' || secrets.QA_PYROSCOPE_INSTANCE }} # Avoid sending blank envs https://github.com/orgs/community/discussions/25725 - pyroscopeEnvironment: ci-smoke-${{ matrix.product }}-avalanche-fuji - pyroscopeKey: ${{ secrets.QA_PYROSCOPE_KEY }} - lokiEndpoint: ${{ secrets.LOKI_URL }} - lokiTenantId: ${{ vars.LOKI_TENANT_ID }} - lokiBasicAuth: ${{ secrets.LOKI_BASIC_AUTH }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - grafanaUrl: "http://localhost:8080/primary" - grafanaDashboardUrl: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - grafanaBearerToken: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} - network: "avalanche_fuji" - httpEndpoints: ${{ secrets.QA_AVALANCHE_FUJI_HTTP_URLS }} - wsEndpoints: ${{ secrets.QA_AVALANCHE_FUJI_URLS }} - fundingKeys: ${{ secrets.QA_EVM_KEYS }} - - name: Download Tests Binary - uses: actions/download-artifact@c850b930e6ba138125429b7e5c93fc707a7f8427 # v4.1.4 - with: - name: tests - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests-binary@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_command_to_run: ./tests -test.timeout 30m -test.count=1 -test.parallel=1 -test.run ${{ matrix.test }} - binary_name: tests - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ github.sha }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - dockerhub_username: ${{ secrets.DOCKERHUB_READONLY_USERNAME }} - dockerhub_password: ${{ secrets.DOCKERHUB_READONLY_PASSWORD }} - artifacts_location: ./logs - token: ${{ secrets.GITHUB_TOKEN }} - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - - name: Print failed test summary - if: always() - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/show-test-summary@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_directory: "./" - - fantom-testnet-smoke-tests: - environment: integration - if: ${{ github.event.inputs.network == 'All' || github.event.inputs.network == 'Fantom Testnet' }} - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - needs: [build-chainlink, build-tests] - strategy: - max-parallel: 1 - fail-fast: false - matrix: - include: # https://docs.github.com/en/actions/using-jobs/using-a-matrix-for-your-jobs#example-adding-configurations - - product: OCR - test: TestOCRBasic - - product: Automation Conditional - test: TestAutomationBasic/registry_2_1_conditional - - product: Automation Log Trigger - test: TestAutomationBasic/registry_2_1_logtrigger - name: Fantom Testnet ${{ matrix.product }} Tests - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - fetch-depth: 0 - - name: Setup GAP for Grafana - uses: smartcontractkit/.github/actions/setup-gap@d316f66b2990ea4daa479daa3de6fc92b00f863e # setup-gap@0.3.2 - with: - # aws inputs - aws-region: ${{ secrets.AWS_REGION }} - aws-role-arn: ${{ secrets.AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN }} - api-gateway-host: ${{ secrets.AWS_API_GW_HOST_GRAFANA }} - # other inputs - duplicate-authorization-header: "true" - - name: Prepare Base64 TOML override - uses: ./.github/actions/setup-create-base64-config-live-testnets - with: - runId: ${{ github.run_id }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - chainlinkImage: ${{ env.CHAINLINK_IMAGE }} - chainlinkVersion: ${{ github.sha }} - pyroscopeServer: ${{ matrix.product.pyroscope_env == '' && '' || !startsWith(github.ref, 'refs/tags/') && '' || secrets.QA_PYROSCOPE_INSTANCE }} # Avoid sending blank envs https://github.com/orgs/community/discussions/25725 - pyroscopeEnvironment: ci-smoke-${{ matrix.product }}-fantom-testnet - pyroscopeKey: ${{ secrets.QA_PYROSCOPE_KEY }} - lokiEndpoint: ${{ secrets.LOKI_URL }} - lokiTenantId: ${{ vars.LOKI_TENANT_ID }} - lokiBasicAuth: ${{ secrets.LOKI_BASIC_AUTH }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - grafanaUrl: "http://localhost:8080/primary" - grafanaDashboardUrl: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - grafanaBearerToken: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} - network: "fantom_testnet" - httpEndpoints: ${{ secrets.QA_FANTOM_TESTNET_HTTP_URLS }} - wsEndpoints: ${{ secrets.QA_FANTOM_TESTNET_URLS }} - fundingKeys: ${{ secrets.QA_EVM_KEYS }} - - name: Download Tests Binary - uses: actions/download-artifact@c850b930e6ba138125429b7e5c93fc707a7f8427 # v4.1.4 - with: - name: tests - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests-binary@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_command_to_run: ./tests -test.timeout 30m -test.count=1 -test.parallel=1 -test.run ${{ matrix.test }} - binary_name: tests - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ github.sha }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - dockerhub_username: ${{ secrets.DOCKERHUB_READONLY_USERNAME }} - dockerhub_password: ${{ secrets.DOCKERHUB_READONLY_PASSWORD }} - artifacts_location: ./logs - token: ${{ secrets.GITHUB_TOKEN }} - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - - name: Print failed test summary - if: always() - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/show-test-summary@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_directory: "./" - - celo-alfajores-smoke-tests: - environment: integration - if: ${{ github.event.inputs.network == 'All' || github.event.inputs.network == 'Celo Alfajores' }} - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - needs: [build-chainlink, build-tests] - strategy: - max-parallel: 1 - fail-fast: false - matrix: - include: # https://docs.github.com/en/actions/using-jobs/using-a-matrix-for-your-jobs#example-adding-configurations - - product: OCR - test: TestOCRBasic - name: Celo Alfajores ${{ matrix.product }} Tests - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - fetch-depth: 0 - - name: Setup GAP for Grafana - uses: smartcontractkit/.github/actions/setup-gap@d316f66b2990ea4daa479daa3de6fc92b00f863e # setup-gap@0.3.2 - with: - # aws inputs - aws-region: ${{ secrets.AWS_REGION }} - aws-role-arn: ${{ secrets.AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN }} - api-gateway-host: ${{ secrets.AWS_API_GW_HOST_GRAFANA }} - # other inputs - duplicate-authorization-header: "true" - - name: Prepare Base64 TOML override - uses: ./.github/actions/setup-create-base64-config-live-testnets - with: - runId: ${{ github.run_id }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - chainlinkImage: ${{ env.CHAINLINK_IMAGE }} - chainlinkVersion: ${{ github.sha }} - pyroscopeServer: ${{ matrix.product.pyroscope_env == '' && '' || !startsWith(github.ref, 'refs/tags/') && '' || secrets.QA_PYROSCOPE_INSTANCE }} # Avoid sending blank envs https://github.com/orgs/community/discussions/25725 - pyroscopeEnvironment: ci-smoke-${{ matrix.product }}-celo-alfajores - pyroscopeKey: ${{ secrets.QA_PYROSCOPE_KEY }} - lokiEndpoint: ${{ secrets.LOKI_URL }} - lokiTenantId: ${{ vars.LOKI_TENANT_ID }} - lokiBasicAuth: ${{ secrets.LOKI_BASIC_AUTH }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - grafanaUrl: "http://localhost:8080/primary" - grafanaDashboardUrl: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - grafanaBearerToken: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} - network: "celo_alfajores" - httpEndpoints: ${{ secrets.QA_CELO_ALFAJORES_HTTP_URLS }} - wsEndpoints: ${{ secrets.QA_CELO_ALFAJORES_URLS }} - fundingKeys: ${{ secrets.QA_EVM_KEYS }} - - name: Download Tests Binary - uses: actions/download-artifact@c850b930e6ba138125429b7e5c93fc707a7f8427 # v4.1.4 - with: - name: tests - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests-binary@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_command_to_run: ./tests -test.timeout 30m -test.count=1 -test.parallel=1 -test.run ${{ matrix.test }} - binary_name: tests - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ github.sha }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - dockerhub_username: ${{ secrets.DOCKERHUB_READONLY_USERNAME }} - dockerhub_password: ${{ secrets.DOCKERHUB_READONLY_PASSWORD }} - artifacts_location: ./logs - token: ${{ secrets.GITHUB_TOKEN }} - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - - name: Print failed test summary - if: always() - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/show-test-summary@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_directory: "./" - - scroll-sepolia-smoke-tests: - if: false - environment: integration - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - needs: [build-chainlink, build-tests] - strategy: - max-parallel: 1 - fail-fast: false - matrix: - include: # https://docs.github.com/en/actions/using-jobs/using-a-matrix-for-your-jobs#example-adding-configurations - - product: OCR - test: TestOCRBasic - name: Scroll Sepolia ${{ matrix.product }} Tests - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - fetch-depth: 0 - - name: Setup GAP for Grafana - uses: smartcontractkit/.github/actions/setup-gap@d316f66b2990ea4daa479daa3de6fc92b00f863e # setup-gap@0.3.2 - with: - # aws inputs - aws-region: ${{ secrets.AWS_REGION }} - aws-role-arn: ${{ secrets.AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN }} - api-gateway-host: ${{ secrets.AWS_API_GW_HOST_GRAFANA }} - # other inputs - duplicate-authorization-header: "true" - - name: Prepare Base64 TOML override - uses: ./.github/actions/setup-create-base64-config-live-testnets - with: - runId: ${{ github.run_id }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - chainlinkImage: ${{ env.CHAINLINK_IMAGE }} - chainlinkVersion: ${{ github.sha }} - pyroscopeServer: ${{ matrix.product.pyroscope_env == '' && '' || !startsWith(github.ref, 'refs/tags/') && '' || secrets.QA_PYROSCOPE_INSTANCE }} # Avoid sending blank envs https://github.com/orgs/community/discussions/25725 - pyroscopeEnvironment: ci-smoke-${{ matrix.product }}-scroll-sepolia - pyroscopeKey: ${{ secrets.QA_PYROSCOPE_KEY }} - lokiEndpoint: ${{ secrets.LOKI_URL }} - lokiTenantId: ${{ vars.LOKI_TENANT_ID }} - lokiBasicAuth: ${{ secrets.LOKI_BASIC_AUTH }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - grafanaUrl: "http://localhost:8080/primary" - grafanaDashboardUrl: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - grafanaBearerToken: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} - network: "scroll_sepolia" - httpEndpoints: ${{ secrets.QA_SCROLL_SEPOLIA_HTTP_URLS }} - wsEndpoints: ${{ secrets.QA_SCROLL_SEPOLIA_URLS }} - fundingKeys: ${{ secrets.QA_EVM_KEYS }} - - name: Download Tests Binary - uses: actions/download-artifact@c850b930e6ba138125429b7e5c93fc707a7f8427 # v4.1.4 - with: - name: tests - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests-binary@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_command_to_run: ./tests -test.timeout 30m -test.count=1 -test.parallel=1 -test.run ${{ matrix.test }} - binary_name: tests - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ github.sha }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - dockerhub_username: ${{ secrets.DOCKERHUB_READONLY_USERNAME }} - dockerhub_password: ${{ secrets.DOCKERHUB_READONLY_PASSWORD }} - artifacts_location: ./logs - token: ${{ secrets.GITHUB_TOKEN }} - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - - name: Print failed test summary - if: always() - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/show-test-summary@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_directory: "./" - - linea-goerli-smoke-tests: - environment: integration - if: ${{ github.event.inputs.network == 'All' || github.event.inputs.network == 'Linea Goerli' }} - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - needs: [build-chainlink, build-tests] - strategy: - max-parallel: 1 - fail-fast: false - matrix: - include: # https://docs.github.com/en/actions/using-jobs/using-a-matrix-for-your-jobs#example-adding-configurations - - product: OCR - test: TestOCRBasic - name: Linea Goerli ${{ matrix.product }} Tests - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - fetch-depth: 0 - - name: Setup GAP for Grafana - uses: smartcontractkit/.github/actions/setup-gap@d316f66b2990ea4daa479daa3de6fc92b00f863e # setup-gap@0.3.2 - with: - # aws inputs - aws-region: ${{ secrets.AWS_REGION }} - aws-role-arn: ${{ secrets.AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN }} - api-gateway-host: ${{ secrets.AWS_API_GW_HOST_GRAFANA }} - # other inputs - duplicate-authorization-header: "true" - - name: Prepare Base64 TOML override - uses: ./.github/actions/setup-create-base64-config-live-testnets - with: - runId: ${{ github.run_id }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - chainlinkImage: ${{ env.CHAINLINK_IMAGE }} - chainlinkVersion: ${{ github.sha }} - pyroscopeServer: ${{ matrix.product.pyroscope_env == '' && '' || !startsWith(github.ref, 'refs/tags/') && '' || secrets.QA_PYROSCOPE_INSTANCE }} # Avoid sending blank envs https://github.com/orgs/community/discussions/25725 - pyroscopeEnvironment: ci-smoke-${{ matrix.product }}-linea-goerli - pyroscopeKey: ${{ secrets.QA_PYROSCOPE_KEY }} - lokiEndpoint: ${{ secrets.LOKI_URL }} - lokiTenantId: ${{ vars.LOKI_TENANT_ID }} - lokiBasicAuth: ${{ secrets.LOKI_BASIC_AUTH }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - grafanaUrl: "http://localhost:8080/primary" - grafanaDashboardUrl: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - grafanaBearerToken: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} - network: "linea_goerli" - httpEndpoints: ${{ secrets.QA_LINEA_GOERLI_HTTP_URLS }} - wsEndpoints: ${{ secrets.QA_LINEA_GOERLI_URLS }} - fundingKeys: ${{ secrets.QA_EVM_KEYS }} - - name: Download Tests Binary - uses: actions/download-artifact@c850b930e6ba138125429b7e5c93fc707a7f8427 # v4.1.4 - with: - name: tests - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests-binary@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_command_to_run: ./tests -test.timeout 30m -test.count=1 -test.parallel=1 -test.run ${{ matrix.test }} - binary_name: tests - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ github.sha }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - dockerhub_username: ${{ secrets.DOCKERHUB_READONLY_USERNAME }} - dockerhub_password: ${{ secrets.DOCKERHUB_READONLY_PASSWORD }} - artifacts_location: ./logs - token: ${{ secrets.GITHUB_TOKEN }} - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - - name: Print failed test summary - if: always() - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/show-test-summary@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_directory: "./" diff --git a/.github/workflows/live-vrf-tests.yml b/.github/workflows/live-vrf-tests.yml deleted file mode 100644 index 43b0fd8dc3..0000000000 --- a/.github/workflows/live-vrf-tests.yml +++ /dev/null @@ -1,191 +0,0 @@ -# Funding address: 0xC1107e57082945E28d3202A81B1520DEA3AE6AEC -name: Generic Live Smoke Tests -on: - workflow_dispatch: - inputs: - networks: - description: "Comma-separated list of networks to run on" - required: true - default: "SEPOLIA,OPTIMISM_SEPOLIA,ARBITRUM_SEPOLIA" - test_list: - description: "Comma-separated list of tests to run" - required: true - default: "TestVRFBasic,TestVRFv2Basic,TestVRFv2Plus" - -env: - CHAINLINK_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink - INTERNAL_DOCKER_REPO: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com - MOD_CACHE_VERSION: 2 - CHAINLINK_NODE_FUNDING: .5 - PYROSCOPE_KEY: ${{ secrets.QA_PYROSCOPE_KEY }} - LOKI_TENANT_ID: ${{ vars.LOKI_TENANT_ID }} - LOKI_URL: ${{ secrets.LOKI_URL }} - LOKI_BASIC_AUTH: ${{ secrets.LOKI_BASIC_AUTH }} - LOGSTREAM_LOG_TARGETS: loki - GRAFANA_URL: ${{ vars.GRAFANA_URL }} - RUN_ID: ${{ github.run_id }} - - CHAINLINK_COMMIT_SHA: ${{ github.sha }} - CHAINLINK_ENV_USER: ${{ github.actor }} - TEST_LOG_LEVEL: debug - -jobs: - # Build Test Dependencies - - build-chainlink: - environment: integration - permissions: - id-token: write - contents: read - name: Build Chainlink Image - runs-on: ubuntu-latest - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: live-vrf-build-chainlink - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Build Chainlink Image - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - ref: ${{ github.event.pull_request.head.sha || github.event.merge_group.head_sha }} - - name: Build Chainlink Image - uses: ./.github/actions/build-chainlink-image - with: - tag_suffix: "" - dockerfile: core/chainlink.Dockerfile - git_commit_sha: ${{ github.sha }} - GRAFANA_CLOUD_BASIC_AUTH: ${{ secrets.GRAFANA_CLOUD_BASIC_AUTH }} - GRAFANA_CLOUD_HOST: ${{ secrets.GRAFANA_CLOUD_HOST }} - AWS_REGION: ${{ secrets.QA_AWS_REGION }} - AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - - build-tests: - environment: integration - permissions: - id-token: write - contents: read - name: Build Tests Binary - runs-on: ubuntu-latest - outputs: - matrix: ${{ steps.build-matrix.outputs.matrix }} - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: live-vrf-build-test-image - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Build Tests Binary - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - ref: ${{ github.event.pull_request.head.sha || github.event.merge_group.head_sha }} - - name: Build Network Matrix - id: build-matrix - run: | - NETWORKS="[\"${{ github.event.inputs.networks }}\"]" - NETWORKS="${NETWORKS//,/\",\"}" - echo "matrix=${NETWORKS}" >> "$GITHUB_OUTPUT" - - name: Build Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/build-tests@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_download_vendor_packages_command: cd ./integration-tests && go mod download - token: ${{ secrets.GITHUB_TOKEN }} - go_mod_path: ./integration-tests/go.mod - go_tags: embed - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - binary_name: tests - - # End Build Test Dependencies - - live-smoke-tests: - environment: integration - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - needs: [build-chainlink, build-tests] - strategy: - fail-fast: false - matrix: - network: ${{fromJson(needs.build-tests.outputs.matrix)}} - name: Smoke Tests on ${{ matrix.network }} - runs-on: ubuntu-latest - steps: - - name: Build Secrets Names - id: build-secrets-names - run: | - echo "HTTP_URLS_SECRET_NAME=QA_${{ matrix.network }}_HTTP_URLS" >> $GITHUB_ENV - echo "URLS_SECRET_NAME=QA_${{ matrix.network }}_URLS" >> $GITHUB_ENV - - name: Split Test Names - id: split_list - run: | - IFS=',' read -ra ADDR <<< "${{ inputs.test_list }}" - echo "test_list=${ADDR[*]}" >> $GITHUB_ENV - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - fetch-depth: 0 - - name: Setup GAP for Grafana - uses: smartcontractkit/.github/actions/setup-gap@d316f66b2990ea4daa479daa3de6fc92b00f863e # setup-gap@0.3.2 - with: - # aws inputs - aws-region: ${{ secrets.AWS_REGION }} - aws-role-arn: ${{ secrets.AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN }} - api-gateway-host: ${{ secrets.AWS_API_GW_HOST_GRAFANA }} - # other inputs - duplicate-authorization-header: "true" - - name: Prepare Base64 TOML override - uses: ./.github/actions/setup-create-base64-config-live-testnets - with: - runId: ${{ github.run_id }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - chainlinkImage: ${{ env.CHAINLINK_IMAGE }} - chainlinkVersion: ${{ github.sha }} - lokiEndpoint: ${{ secrets.LOKI_URL }} - lokiTenantId: ${{ vars.LOKI_TENANT_ID }} - lokiBasicAuth: ${{ secrets.LOKI_BASIC_AUTH }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - grafanaUrl: "http://localhost:8080/primary" - grafanaDashboardUrl: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - grafanaBearerToken: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} - network: ${{ matrix.network }} - httpEndpoints: ${{ secrets[env.HTTP_URLS_SECRET_NAME] }} - wsEndpoints: ${{ secrets[env.URLS_SECRET_NAME] }} - fundingKeys: ${{ secrets.QA_EVM_KEYS }} - - name: Download Tests Binary - uses: actions/download-artifact@c850b930e6ba138125429b7e5c93fc707a7f8427 # v4.1.4 - with: - name: tests - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests-binary@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_command_to_run: ./tests -test.v -test.timeout 4h -test.count=1 -test.parallel=1 -test.run ${{ env.test_list }} - binary_name: tests - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ github.sha }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - dockerhub_username: ${{ secrets.DOCKER_READONLY_USERNAME }} - dockerhub_password: ${{ secrets.DOCKER_READONLY_PASSWORD }} - artifacts_location: ./logs - token: ${{ secrets.GITHUB_TOKEN }} - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - - name: Print failed test summary - if: always() - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/show-test-summary@75a9005952a9e905649cfb5a6971fd9429436acd # v2.3.25 - with: - test_directory: "./" diff --git a/.github/workflows/on-demand-keeper-smoke-tests.yml b/.github/workflows/on-demand-keeper-smoke-tests.yml deleted file mode 100644 index dbdeff1b2c..0000000000 --- a/.github/workflows/on-demand-keeper-smoke-tests.yml +++ /dev/null @@ -1,289 +0,0 @@ -name: On Demand Keeper Smoke Tests -run-name: On Demand Keeper Smoke Tests ${{ inputs.distinct_run_name && inputs.distinct_run_name || '' }} -on: - workflow_dispatch: - inputs: - distinct_run_name: - description: 'A unique identifier for this run, only use from other repos' - required: false - type: string - -# Only run 1 of this workflow at a time per PR -concurrency: - group: on-demand-keeper-smoke-tests-${{ github.ref }}-${{ inputs.distinct_run_name }} - cancel-in-progress: true - -env: - # for run-test variables and environment - ENV_JOB_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink-tests:${{ inputs.evm-ref || github.sha }} - CHAINLINK_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink - TEST_SUITE: smoke - TEST_ARGS: -test.timeout 12m - INTERNAL_DOCKER_REPO: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com - MOD_CACHE_VERSION: 2 - COLLECTION_ID: chainlink-e2e-tests - -jobs: - build-chainlink: - environment: integration - permissions: - id-token: write - contents: read - strategy: - matrix: - image: - - name: "" - dockerfile: core/chainlink.Dockerfile - tag-suffix: "" - name: Build Chainlink Image ${{ matrix.image.name }} - runs-on: ubuntu22.04-16cores-64GB - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: ${{ env.COLLECTION_ID }}-build-chainlink - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Build Chainlink Image ${{ matrix.image.name }} - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - repository: smartcontractkit/chainlink - ref: ${{ inputs.cl_ref || github.event.pull_request.head.sha || github.event.merge_group.head_sha }} - - name: Build Chainlink Image - uses: ./.github/actions/build-chainlink-image - with: - tag_suffix: ${{ matrix.image.tag-suffix }} - dockerfile: ${{ matrix.image.dockerfile }} - git_commit_sha: ${{ inputs.evm-ref || github.sha }} - AWS_REGION: ${{ secrets.QA_AWS_REGION }} - AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - dep_evm_sha: ${{ inputs.evm-ref }} - - compare-tests: - runs-on: ubuntu-latest - name: Build Automation Test List - outputs: - automation-matrix: ${{ env.AUTOMATION_JOB_MATRIX_JSON }} - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - repository: smartcontractkit/chainlink - ref: ${{ inputs.cl_ref }} - - name: Compare Test Lists - run: | - cd ./integration-tests - ./scripts/compareTestList.sh ./smoke/keeper_test.go - - name: Build Test Matrix Lists - id: build-test-matrix-list - run: | - cd ./integration-tests - KEEPER_JOB_MATRIX_JSON=$(./scripts/buildTestMatrixList.sh ./smoke/keeper_test.go keeper ubuntu-latest 1) - echo "AUTOMATION_JOB_MATRIX_JSON=${KEEPER_JOB_MATRIX_JSON}" >> $GITHUB_ENV - - eth-smoke-tests-matrix-automation: - if: ${{ !contains(join(github.event.pull_request.labels.*.name, ' '), 'skip-smoke-tests') }} - environment: integration - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - needs: [build-chainlink, compare-tests] - env: - SELECTED_NETWORKS: SIMULATED,SIMULATED_1,SIMULATED_2 - CHAINLINK_COMMIT_SHA: ${{ inputs.evm-ref || github.sha }} - CHAINLINK_ENV_USER: ${{ github.actor }} - TEST_LOG_LEVEL: debug - strategy: - fail-fast: false - matrix: - product: ${{fromJson(needs.compare-tests.outputs.automation-matrix)}} - runs-on: ${{ matrix.product.os }} - name: ETH Smoke Tests ${{ matrix.product.name }} - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: ${{ env.COLLECTION_ID }}-matrix-${{ matrix.product.name }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - this-job-name: ETH Smoke Tests ${{ matrix.product.name }} - test-results-file: '{"testType":"go","filePath":"/tmp/gotest.log"}' - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - repository: smartcontractkit/chainlink - ref: ${{ inputs.cl_ref || github.event.pull_request.head.sha || github.event.merge_group.head_sha }} - - name: Build Go Test Command - id: build-go-test-command - run: | - # if the matrix.product.run is set, use it for a different command - if [ "${{ matrix.product.run }}" != "" ]; then - echo "run_command=${{ matrix.product.run }} ./smoke/${{ matrix.product.file }}_test.go" >> "$GITHUB_OUTPUT" - else - echo "run_command=./smoke/${{ matrix.product.name }}_test.go" >> "$GITHUB_OUTPUT" - fi - - ## Run this step when changes that require tests to be run are made - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@aa8eea635029ab8d95abd3c206f56dae1e22e623 # v2.3.28 - with: - test_command_to_run: cd ./integration-tests && go test -timeout 30m -count=1 -json -test.parallel=${{ matrix.product.nodes }} ${{ steps.build-go-test-command.outputs.run_command }} 2>&1 | tee /tmp/gotest.log | gotestloghelper -ci -singlepackage -hidepassingtests=false -hidepassinglogs - test_download_vendor_packages_command: cd ./integration-tests && go mod download - test_config_chainlink_version: ${{ inputs.evm-ref || github.sha }} - test_config_selected_networks: ${{ env.SELECTED_NETWORKS }} - test_config_logging_run_id: ${{ github.run_id }} - test_config_logstream_log_targets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - test_config_test_log_collect: ${{ vars.TEST_LOG_COLLECT }} - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ inputs.evm-ref || github.sha }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - artifacts_name: ${{ matrix.product.name }}-test-logs - artifacts_location: | - ./integration-tests/smoke/logs/ - /tmp/gotest.log - publish_check_name: ${{ matrix.product.name }} - token: ${{ secrets.GITHUB_TOKEN }} - go_mod_path: ./integration-tests/go.mod - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: "" - should_tidy: "false" - go_coverage_src_dir: /var/tmp/go-coverage - go_coverage_dest_dir: ${{ github.workspace }}/.covdata - DEFAULT_CHAINLINK_IMAGE: ${{ env.CHAINLINK_IMAGE }} - DEFAULT_LOKI_TENANT_ID: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - DEFAULT_LOKI_ENDPOINT: https://${{ secrets.GRAFANA_INTERNAL_HOST }}/loki/api/v1/push - DEFAULT_LOKI_BASIC_AUTH: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - DEFAULT_GRAFANA_BASE_URL: ${{ vars.GRAFANA_URL }} - DEFAULT_GRAFANA_DASHBOARD_URL: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - DEFAULT_GRAFANA_BEARER_TOKEN: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} - DEFAULT_PYROSCOPE_SERVER_URL: ${{ matrix.product.pyroscope_env == '' && '' || !startsWith(github.ref, 'refs/tags/') && '' || secrets.QA_PYROSCOPE_INSTANCE }} # Avoid sending blank envs https://github.com/orgs/community/discussions/25725 - DEFAULT_PYROSCOPE_KEY: ${{ secrets.QA_PYROSCOPE_KEY }} - DEFAULT_PYROSCOPE_ENVIRONMENT: ${{ matrix.product.pyroscope_env }} - DEFAULT_PYROSCOPE_ENABLED: ${{ matrix.product.pyroscope_env == '' || !startsWith(github.ref, 'refs/tags/') && 'false' || 'true' }} - - - name: Upload Coverage Data - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 - with: - name: cl-node-coverage-data-${{ matrix.product.name }} - path: .covdata - retention-days: 1 - - - name: Print failed test summary - if: always() - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/show-test-summary@5dd916d08c03cb5f9a97304f4f174820421bb946 # v2.3.11 - - ### Used to check the required checks box when the matrix completes - eth-smoke-tests: - if: always() - runs-on: ubuntu-latest - name: ETH Smoke Tests - needs: [eth-smoke-tests-matrix-automation] - steps: - - name: Check smoke test matrix status - if: needs.eth-smoke-tests-matrix-automation.result != 'success' - run: | - echo "Automation: ${{ needs.eth-smoke-tests-matrix-automation.result }}" - exit 1 - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: ${{ env.COLLECTION_ID }}-matrix-results - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: ETH Smoke Tests - matrix-aggregator-status: ${{ needs.eth-smoke-tests-matrix.result }} - continue-on-error: true - - cleanup: - name: Clean up integration environment deployments - if: always() - needs: [eth-smoke-tests] - runs-on: ubuntu-latest - steps: - - name: Checkout repo - if: ${{ github.event_name == 'pull_request' }} - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - repository: smartcontractkit/chainlink - ref: ${{ inputs.cl_ref }} - - - name: 🧼 Clean up Environment - if: ${{ github.event_name == 'pull_request' }} - uses: ./.github/actions/delete-deployments - with: - environment: integration - ref: ${{ github.head_ref }} # See https://github.com/github/docs/issues/15319#issuecomment-1476705663 - - - name: Collect Metrics - if: ${{ github.event_name == 'pull_request' }} - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: ${{ env.COLLECTION_ID }}-env-cleanup - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Clean up integration environment deployments - continue-on-error: true - - show-coverage: - name: Show Chainlink Node Go Coverage - if: always() - needs: [cleanup] - runs-on: ubuntu-latest - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - repository: smartcontractkit/chainlink - ref: ${{ inputs.cl_ref || github.event.pull_request.head.sha || github.event.merge_group.head_sha }} - - name: Download All Artifacts - uses: actions/download-artifact@9c19ed7fe5d278cd354c7dfd5d3b88589c7e2395 # v4.1.6 - with: - path: cl-node-coverage-data - pattern: cl-node-coverage-data-* - merge-multiple: true - - name: Show Coverage - run: go run ./integration-tests/scripts/show_coverage.go "${{ github.workspace }}/cl-node-coverage-data/*/merged" - - # Run the setup if the matrix finishes but this time save the cache if we have a cache hit miss - # this will also only run if both of the matrix jobs pass - eth-smoke-go-mod-cache: - - environment: integration - needs: [eth-smoke-tests] - runs-on: ubuntu-latest - name: ETH Smoke Tests Go Mod Cache - continue-on-error: true - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - repository: smartcontractkit/chainlink - ref: ${{ inputs.cl_ref || github.event.pull_request.head.sha || github.event.merge_group.head_sha }} - - name: Run Setup - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/setup-go@5dd916d08c03cb5f9a97304f4f174820421bb946 # v2.3.11 - with: - test_download_vendor_packages_command: | - cd ./integration-tests - go mod download - # force download of test dependencies - go test -run=NonExistentTest ./smoke/... || echo "ignore expected test failure" - go_mod_path: ./integration-tests/go.mod - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "false" diff --git a/.github/workflows/on-demand-log-poller.yml b/.github/workflows/on-demand-log-poller.yml deleted file mode 100644 index 1685c7e455..0000000000 --- a/.github/workflows/on-demand-log-poller.yml +++ /dev/null @@ -1,34 +0,0 @@ -name: On Demand Log Poller Consistency Test -on: - workflow_dispatch: - inputs: - base64Config: - description: base64-ed config - required: true - type: string - -jobs: - test: - env: - REF_NAME: ${{ github.head_ref || github.ref_name }} - runs-on: ubuntu22.04-8cores-32GB - steps: - - name: Add masks and export base64 config - run: | - BASE64_CONFIG_OVERRIDE=$(jq -r '.inputs.base64Config' $GITHUB_EVENT_PATH) - echo ::add-mask::$BASE64_CONFIG_OVERRIDE - echo "BASE64_CONFIG_OVERRIDE=$BASE64_CONFIG_OVERRIDE" >> $GITHUB_ENV - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - ref: ${{ env.REF_NAME }} - - name: Setup Go - uses: actions/setup-go@0c52d547c9bc32b1aa3301fd7a9cb496313a4491 # v5.0.0 - with: - go-version-file: "integration-tests/go.mod" - cache: true - - name: Run tests - run: | - cd integration-tests - go mod download - go test -v -timeout 5h -v -count=1 -run ^TestLogPollerFewFiltersFixedDepth$ ./smoke/log_poller_test.go diff --git a/.github/workflows/on-demand-ocr-soak-test.yml b/.github/workflows/on-demand-ocr-soak-test.yml deleted file mode 100644 index 924b43829e..0000000000 --- a/.github/workflows/on-demand-ocr-soak-test.yml +++ /dev/null @@ -1,116 +0,0 @@ -name: On Demand OCR Soak Test -on: - workflow_dispatch: - inputs: - testToRun: - description: Select a test to run - required: true - default: TestOCRSoak - type: choice - options: - - TestOCRv1Soak - - TestOCRv2Soak - - TestForwarderOCRv1Soak - - TestForwarderOCRv2Soak - - TestOCRSoak_GethReorgBelowFinality_FinalityTagDisabled - - TestOCRSoak_GethReorgBelowFinality_FinalityTagEnabled - - TestOCRSoak_GasSpike - - TestOCRSoak_ChangeBlockGasLimit - - TestOCRSoak_RPCDownForAllCLNodes - - TestOCRSoak_RPCDownForHalfCLNodes - base64Config: - description: base64-ed config - required: true - type: string - slackMemberID: - description: Slack Member ID (Not your @) - required: true - default: U01A2B2C3D4 - type: string - test_secrets_override_key: - description: 'Key to run tests with custom test secrets' - required: false - type: string - -jobs: - ocr_soak_test: - name: OCR Soak Test - environment: integration - runs-on: ubuntu-latest - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - env: - CHAINLINK_ENV_USER: ${{ github.actor }} - SLACK_API_KEY: ${{ secrets.QA_SLACK_API_KEY }} - SLACK_CHANNEL: ${{ secrets.QA_SLACK_CHANNEL }} - TEST_LOG_LEVEL: debug - REF_NAME: ${{ github.head_ref || github.ref_name }} - ENV_JOB_IMAGE_BASE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink-tests - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: on-demand-ocr-soak-test - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: ${{ inputs.network }} OCR Soak Test - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - ref: ${{ env.REF_NAME }} - - name: Get Slack config and mask base64 config - run: | - SLACK_USER=$(jq -r '.inputs.slackMemberID' $GITHUB_EVENT_PATH) - echo ::add-mask::$SLACK_USER - echo SLACK_USER=$SLACK_USER >> $GITHUB_ENV - - BASE64_CONFIG_OVERRIDE=$(jq -r '.inputs.base64Config' $GITHUB_EVENT_PATH) - echo ::add-mask::$BASE64_CONFIG_OVERRIDE - echo "BASE64_CONFIG_OVERRIDE=$BASE64_CONFIG_OVERRIDE" >> $GITHUB_ENV - - name: Parse base64 config - uses: ./.github/actions/setup-parse-base64-config - with: - base64Config: ${{ env.BASE64_CONFIG_OVERRIDE }} - - name: Setup Push Tag - shell: bash - run: | - echo "### chainlink image used for this test run :link:" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.CHAINLINK_IMAGE }}\`" >>$GITHUB_STEP_SUMMARY - echo "### chainlink-tests image tag for this test run :ship:" >>$GITHUB_STEP_SUMMARY - echo "\`${GITHUB_SHA}\`" >>$GITHUB_STEP_SUMMARY - echo "### Networks on which test was run" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.NETWORKS }}\`" >>$GITHUB_STEP_SUMMARY - - name: Build Image - uses: ./.github/actions/build-test-image - with: - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ACCOUNT_NUMBER: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@2967f2287bd3f3ddbac7b476e9568993df01796e # v2.3.27 - env: - DETACH_RUNNER: true - TEST_SUITE: soak - TEST_ARGS: -test.timeout 900h -test.memprofile memprofile.out -test.cpuprofile profile.out - ENV_JOB_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink-tests:${{ github.sha }} - # We can comment these out when we have a stable soak test and aren't worried about resource consumption - TEST_UPLOAD_CPU_PROFILE: true - TEST_UPLOAD_MEM_PROFILE: true - with: - test_command_to_run: cd ./integration-tests && go test -v -count=1 -run ^${{ github.event.inputs.testToRun }}$ ./soak - test_download_vendor_packages_command: make gomod - test_secrets_override_base64: ${{ secrets[inputs.test_secrets_override_key] }} - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ env.CHAINLINK_VERSION }} - token: ${{ secrets.GITHUB_TOKEN }} - should_cleanup: false - go_mod_path: ./integration-tests/go.mod - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} diff --git a/.github/workflows/on-demand-vrfv2-eth2-clients-test.yml b/.github/workflows/on-demand-vrfv2-eth2-clients-test.yml deleted file mode 100644 index 5f24fa81c3..0000000000 --- a/.github/workflows/on-demand-vrfv2-eth2-clients-test.yml +++ /dev/null @@ -1,75 +0,0 @@ -name: On Demand VRFV2 Smoke Test (Ethereum clients) -on: - workflow_dispatch: - inputs: - base64Config: - description: base64-ed config - required: true - type: string - test_secrets_override_key: - description: 'Key to run tests with custom test secrets' - required: false - type: string - -jobs: - vrfv2_smoke_test: - name: VRFV2 Smoke Test with custom EL client client - environment: integration - runs-on: ubuntu22.04-8cores-32GB - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - env: - TEST_LOG_LEVEL: debug - REF_NAME: ${{ github.head_ref || github.ref_name }} - steps: - - name: Checkout code - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - fetch-depth: 0 - - name: Mask base64 config - run: | - BASE64_CONFIG_OVERRIDE=$(jq -r '.inputs.base64Config' $GITHUB_EVENT_PATH) - echo ::add-mask::$BASE64_CONFIG_OVERRIDE - echo "BASE64_CONFIG_OVERRIDE=$BASE64_CONFIG_OVERRIDE" >> $GITHUB_ENV - - name: Parse base64 config - uses: ./.github/actions/setup-parse-base64-config - with: - base64Config: ${{ env.BASE64_CONFIG_OVERRIDE }} - - name: Send details to Step Summary - shell: bash - run: | - echo "### chainlink image used for this test run :link:" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.CHAINLINK_IMAGE }}\`" >>$GITHUB_STEP_SUMMARY - echo "### chainlink-tests image tag for this test run :ship:" >>$GITHUB_STEP_SUMMARY - echo "\`${GITHUB_SHA}\`" >>$GITHUB_STEP_SUMMARY - echo "### Networks on which test was run" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.NETWORKS }}\`" >>$GITHUB_STEP_SUMMARY - echo "### Execution client used" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.ETH2_EL_CLIENT }}\`" >>$GITHUB_STEP_SUMMARY - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@2967f2287bd3f3ddbac7b476e9568993df01796e # v2.3.27 - with: - test_command_to_run: cd ./integration-tests && go test -timeout 30m -count=1 -json -run TestVRFv2Basic ./smoke/vrfv2_test.go 2>&1 | tee /tmp/gotest.log | gotestloghelper -ci -singlepackage -hidepassingtests=false -hidepassinglogs - test_download_vendor_packages_command: cd ./integration-tests && go mod download - test_secrets_override_base64: ${{ secrets[inputs.test_secrets_override_key] }} - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ env.CHAINLINK_VERSION }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - artifacts_name: vrf-test-logs - artifacts_location: ./integration-tests/smoke/logs/ - token: ${{ secrets.GITHUB_TOKEN }} - go_mod_path: ./integration-tests/go.mod - should_cleanup: false - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: "" - DEFAULT_CHAINLINK_IMAGE: ${{ env.CHAINLINK_IMAGE }} - DEFAULT_LOKI_TENANT_ID: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - DEFAULT_LOKI_ENDPOINT: https://${{ secrets.GRAFANA_INTERNAL_HOST }}/loki/api/v1/push - DEFAULT_LOKI_BASIC_AUTH: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - DEFAULT_GRAFANA_BASE_URL: "http://localhost:8080/primary" - DEFAULT_GRAFANA_DASHBOARD_URL: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - DEFAULT_GRAFANA_BEARER_TOKEN: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} \ No newline at end of file diff --git a/.github/workflows/on-demand-vrfv2-performance-test.yml b/.github/workflows/on-demand-vrfv2-performance-test.yml deleted file mode 100644 index 3d55c38458..0000000000 --- a/.github/workflows/on-demand-vrfv2-performance-test.yml +++ /dev/null @@ -1,99 +0,0 @@ -name: On Demand VRFV2 Performance Test -on: - workflow_dispatch: - inputs: - base64Config: - description: base64-ed config - required: true - type: string - performanceTestType: - description: Performance Test Type of test to run - type: choice - options: - - "Smoke" - - "Soak" - - "Load" - - "Stress" - - "Spike" - test_list_regex: - description: "Regex for tests to run" - required: false - default: "(TestVRFV2Performance)" - test_secrets_override_key: - description: 'Key to run tests with custom test secrets' - required: false - type: string - -jobs: - vrfv2_performance_test: - name: VRFV2 Performance Test - environment: integration - runs-on: ubuntu22.04-8cores-32GB - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - env: - LOKI_URL: ${{ secrets.LOKI_URL }} - LOKI_TENANT_ID: ${{ secrets.LOKI_TENANT_ID }} - LOKI_BASIC_AUTH: ${{ secrets.LOKI_BASIC_AUTH }} - TEST_TYPE: ${{ inputs.performanceTestType }} - TEST_LOG_LEVEL: debug - REF_NAME: ${{ github.head_ref || github.ref_name }} - SLACK_API_KEY: ${{ secrets.QA_SLACK_API_KEY }} - SLACK_CHANNEL: ${{ secrets.QA_VRF_SLACK_CHANNEL }} - GRAFANA_URL: "http://localhost:8080/primary" - GRAFANA_DASHBOARD_URL: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - GRAFANA_BEARER_TOKEN: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} - WASP_LOG_LEVEL: info - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: on-demand-vrfv2-performance-test - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: ${{ inputs.network }} VRFV2 Performance Test - continue-on-error: true - - name: Checkout code - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - fetch-depth: 0 - - name: Mask base64 config - run: | - BASE64_CONFIG_OVERRIDE=$(jq -r '.inputs.base64Config' $GITHUB_EVENT_PATH) - echo ::add-mask::$BASE64_CONFIG_OVERRIDE - echo "BASE64_CONFIG_OVERRIDE=$BASE64_CONFIG_OVERRIDE" >> $GITHUB_ENV - - name: Merge and export base64 config - uses: ./.github/actions/setup-merge-base64-config - with: - base64Config: ${{ env.BASE64_CONFIG_OVERRIDE }} - - name: Send details to Step Summary - shell: bash - run: | - echo "### chainlink image used for this test run :link:" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.CHAINLINK_IMAGE }}\`" >>$GITHUB_STEP_SUMMARY - echo "### chainlink-tests image tag for this test run :ship:" >>$GITHUB_STEP_SUMMARY - echo "\`${GITHUB_SHA}\`" >>$GITHUB_STEP_SUMMARY - echo "### Networks on which test was run" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.NETWORKS }}\`" >>$GITHUB_STEP_SUMMARY - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@2967f2287bd3f3ddbac7b476e9568993df01796e # v2.3.27 - with: - test_command_to_run: cd ./integration-tests/load && go test -v -count=1 -timeout 24h -run "${{ inputs.test_list_regex }}" ./vrfv2 - test_download_vendor_packages_command: cd ./integration-tests && go mod download - test_secrets_override_base64: ${{ secrets[inputs.test_secrets_override_key] }} - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ env.CHAINLINK_VERSION }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - artifacts_name: vrf-test-logs - artifacts_location: ./integration-tests/load/vrfv2/logs/ - token: ${{ secrets.GITHUB_TOKEN }} - go_mod_path: ./integration-tests/go.mod - should_cleanup: false - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} diff --git a/.github/workflows/on-demand-vrfv2plus-eth2-clients-test.yml b/.github/workflows/on-demand-vrfv2plus-eth2-clients-test.yml deleted file mode 100644 index 58ecd39763..0000000000 --- a/.github/workflows/on-demand-vrfv2plus-eth2-clients-test.yml +++ /dev/null @@ -1,75 +0,0 @@ -name: On Demand VRFV2Plus Smoke Test (Ethereum clients) -on: - workflow_dispatch: - inputs: - base64Config: - description: base64-ed config - required: true - type: string - test_secrets_override_key: - description: 'Key to run tests with custom test secrets' - required: false - type: string - -jobs: - vrfv2plus_smoke_test: - name: VRFV2Plus Smoke Test with custom EL client - environment: integration - runs-on: ubuntu22.04-8cores-32GB - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - env: - TEST_LOG_LEVEL: debug - REF_NAME: ${{ github.head_ref || github.ref_name }} - steps: - - name: Checkout code - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - fetch-depth: 0 - - name: Mask base64 config - run: | - BASE64_CONFIG_OVERRIDE=$(jq -r '.inputs.base64Config' $GITHUB_EVENT_PATH) - echo ::add-mask::$BASE64_CONFIG_OVERRIDE - echo "BASE64_CONFIG_OVERRIDE=$BASE64_CONFIG_OVERRIDE" >> $GITHUB_ENV - - name: Parse base64 config - uses: ./.github/actions/setup-parse-base64-config - with: - base64Config: ${{ env.BASE64_CONFIG_OVERRIDE }} - - name: Send details to Step Summary - shell: bash - run: | - echo "### chainlink image used for this test run :link:" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.CHAINLINK_IMAGE }}\`" >>$GITHUB_STEP_SUMMARY - echo "### chainlink-tests image tag for this test run :ship:" >>$GITHUB_STEP_SUMMARY - echo "\`${GITHUB_SHA}\`" >>$GITHUB_STEP_SUMMARY - echo "### Networks on which test was run" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.NETWORKS }}\`" >>$GITHUB_STEP_SUMMARY - echo "### Execution client used" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.ETH2_EL_CLIENT }}\`" >>$GITHUB_STEP_SUMMARY - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@2967f2287bd3f3ddbac7b476e9568993df01796e # v2.3.27 - with: - test_command_to_run: cd ./integration-tests && go test -timeout 30m -count=1 -json -run ^TestVRFv2Plus$/^Link_Billing$ ./smoke/vrfv2plus_test.go 2>&1 | tee /tmp/gotest.log | gotestloghelper -ci -singlepackage -hidepassingtests=false -hidepassinglogs - test_download_vendor_packages_command: cd ./integration-tests && go mod download - test_secrets_override_base64: ${{ secrets[inputs.test_secrets_override_key] }} - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ env.CHAINLINK_VERSION }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - artifacts_name: vrfplus-test-logs - artifacts_location: ./integration-tests/smoke/logs/ - token: ${{ secrets.GITHUB_TOKEN }} - go_mod_path: ./integration-tests/go.mod - should_cleanup: false - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: "" - DEFAULT_CHAINLINK_IMAGE: ${{ env.CHAINLINK_IMAGE }} - DEFAULT_LOKI_TENANT_ID: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - DEFAULT_LOKI_ENDPOINT: https://${{ secrets.GRAFANA_INTERNAL_HOST }}/loki/api/v1/push - DEFAULT_LOKI_BASIC_AUTH: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - DEFAULT_GRAFANA_BASE_URL: "http://localhost:8080/primary" - DEFAULT_GRAFANA_DASHBOARD_URL: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - DEFAULT_GRAFANA_BEARER_TOKEN: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} \ No newline at end of file diff --git a/.github/workflows/on-demand-vrfv2plus-performance-test.yml b/.github/workflows/on-demand-vrfv2plus-performance-test.yml deleted file mode 100644 index 658736ab03..0000000000 --- a/.github/workflows/on-demand-vrfv2plus-performance-test.yml +++ /dev/null @@ -1,94 +0,0 @@ -name: On Demand VRFV2 Plus Performance Test -on: - workflow_dispatch: - inputs: - base64Config: - description: base64-ed config - required: true - type: string - performanceTestType: - description: Performance Test Type of test to run - type: string - required: true - test_list_regex: - description: "Regex for tests to run" - required: false - default: "(TestVRFV2PlusPerformance)" - test_secrets_override_key: - description: 'Key to run tests with custom test secrets' - required: false - type: string - -jobs: - vrfv2plus_performance_test: - name: VRFV2 Plus Performance Test - environment: integration - runs-on: ubuntu22.04-8cores-32GB - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - env: - LOKI_URL: ${{ secrets.LOKI_URL }} - LOKI_TENANT_ID: ${{ secrets.LOKI_TENANT_ID }} - LOKI_BASIC_AUTH: ${{ secrets.LOKI_BASIC_AUTH }} - TEST_TYPE: ${{ inputs.performanceTestType }} - TEST_LOG_LEVEL: debug - REF_NAME: ${{ github.head_ref || github.ref_name }} - SLACK_API_KEY: ${{ secrets.QA_SLACK_API_KEY }} - SLACK_CHANNEL: ${{ secrets.QA_VRF_SLACK_CHANNEL }} - GRAFANA_URL: "http://localhost:8080/primary" - GRAFANA_DASHBOARD_URL: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - GRAFANA_BEARER_TOKEN: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} - WASP_LOG_LEVEL: info - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: on-demand-vrfv2-plus-performance-test - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: ${{ inputs.network }} VRFV2 Plus Performance Test - continue-on-error: true - - name: Checkout code - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - fetch-depth: 0 - - name: Mask base64 config - run: | - BASE64_CONFIG_OVERRIDE=$(jq -r '.inputs.base64Config' $GITHUB_EVENT_PATH) - echo ::add-mask::$BASE64_CONFIG_OVERRIDE - echo "BASE64_CONFIG_OVERRIDE=$BASE64_CONFIG_OVERRIDE" >> $GITHUB_ENV - - name: Merge and export base64 config - uses: ./.github/actions/setup-merge-base64-config - with: - base64Config: ${{ env.BASE64_CONFIG_OVERRIDE }} - - name: Send details to Step Summary - shell: bash - run: | - echo "### chainlink image used for this test run :link:" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.CHAINLINK_IMAGE }}\`" >>$GITHUB_STEP_SUMMARY - echo "### chainlink-tests image tag for this test run :ship:" >>$GITHUB_STEP_SUMMARY - echo "\`${GITHUB_SHA}\`" >>$GITHUB_STEP_SUMMARY - echo "### Networks on which test was run" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.NETWORKS }}\`" >>$GITHUB_STEP_SUMMARY - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@2967f2287bd3f3ddbac7b476e9568993df01796e # v2.3.27 - with: - test_command_to_run: cd ./integration-tests/load && go test -v -count=1 -timeout 24h -run "${{ inputs.test_list_regex }}" ./vrfv2plus - test_download_vendor_packages_command: cd ./integration-tests && go mod download - test_secrets_override_base64: ${{ secrets[inputs.test_secrets_override_key] }} - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ env.CHAINLINK_VERSION }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - artifacts_name: vrf-test-logs - artifacts_location: ./integration-tests/load/vrfv2plus/logs/ - token: ${{ secrets.GITHUB_TOKEN }} - go_mod_path: ./integration-tests/go.mod - should_cleanup: false - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} diff --git a/.github/workflows/operator-ui-ci.yml b/.github/workflows/operator-ui-ci.yml deleted file mode 100644 index 442fc06613..0000000000 --- a/.github/workflows/operator-ui-ci.yml +++ /dev/null @@ -1,63 +0,0 @@ -#name: Operator UI CI -#on: -# pull_request: -# -#env: -# TARGET_BRANCH_NAME: ${{ github.event.pull_request.base.ref }} -# -#jobs: -# check-gql: -# permissions: -# id-token: write -# contents: read -# # To allow writing comments to the current PR -# pull-requests: write -# -# name: Breaking Changes GQL Check -# runs-on: ubuntu-latest -# steps: -# - name: Collect Metrics -# id: collect-gha-metrics -# uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 -# with: -# basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} -# hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} -# org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} -# this-job-name: Breaking Changes GQL Check -# continue-on-error: true -# -# - name: Assume role capable of dispatching action -# uses: aws-actions/configure-aws-credentials@e3dd6a429d7300a6a4c196c26e071d42e0343502 # v4.0.2 -# with: -# role-to-assume: ${{ secrets.AWS_OIDC_CHAINLINK_CI_OPERATOR_UI_ACCESS_TOKEN_ISSUER_ROLE_ARN }} -# role-duration-seconds: 3600 -# role-session-name: operator-ui-ci.check-gql -# aws-region: ${{ secrets.AWS_REGION }} -# -# - name: Get Github Token -# id: get-gh-token -# uses: smartcontractkit/chainlink-github-actions/github-app-token-issuer@main -# with: -# url: ${{ secrets.AWS_INFRA_RELENG_TOKEN_ISSUER_LAMBDA_URL }} -# -# - name: Checkout repository -# uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 -# -# - name: Get operator-ui tag -# id: get-operator-ui-tag -# shell: bash -# run: | -# if [[ $TARGET_BRANCH_NAME == release/* ]]; then -# TAG=$(cat ./operator_ui/TAG) -# echo "TAG=$TAG" >> $GITHUB_OUTPUT -# else -# echo "TAG=main" >> $GITHUB_OUTPUT -# fi -# -# - uses: convictional/trigger-workflow-and-wait@f69fa9eedd3c62a599220f4d5745230e237904be #v1.6.5 -# with: -# owner: smartcontractkit -# repo: operator-ui -# github_token: ${{ steps.get-gh-token.outputs.access-token }} -# workflow_file_name: chainlink-ci.yml -# client_payload: '{"ref": "${{ github.event.pull_request.head.sha }}", "tag": "${{ steps.get-operator-ui-tag.outputs.TAG }}"}' diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml deleted file mode 100644 index 130796a3c9..0000000000 --- a/.github/workflows/publish.yml +++ /dev/null @@ -1,84 +0,0 @@ -name: publish - -on: - workflow_dispatch: - push: - tags: - - "v*" - branches: - - ccip-develop - - "release/**" - -jobs: - build-and-publish: - # Do not trigger from versioned tags. - if: ${{ ! startsWith(github.ref, 'refs/tags/v') }} - environment: publish - permissions: - id-token: write - contents: read - runs-on: ubuntu-latest - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@dea9b546553cb4ca936607c2267a09c004e4ab3f # v3.0.0 - with: - id: ccip-build-and-publish - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: build-and-publish - continue-on-error: true - - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - name: Build and publish chainlink image - uses: ./.github/actions/build-sign-publish-chainlink - with: - publish: true - aws-role-to-assume: ${{ secrets.AWS_OIDC_IAM_ROLE_PROD_PUBLISH_ARN }} - aws-role-duration-seconds: ${{ secrets.AWS_ROLE_DURATION_SECONDS }} - aws-region: ${{ secrets.AWS_REGION }} - ecr-hostname: ${{ secrets.AWS_ECR_REPO_URL }} - ecr-image-name: chainlink-ccip - sign-images: false - dockerfile: ./core/chainlink.Dockerfile - - build-and-publish-release: - # Trigger only from versioned tags. - if: ${{ startsWith(github.ref, 'refs/tags/v') }} - environment: publish - env: - # Public ECR is only available in us-east-1; not a secret. - AWS_REGION: us-east-1 - AWS_ECR_REPO_PUBLIC_REGISTRY: public.ecr.aws - permissions: - id-token: write - contents: read - runs-on: ubuntu-latest - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@dea9b546553cb4ca936607c2267a09c004e4ab3f # v3.0.0 - with: - id: ccip-build-and-publish-release - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: build-and-publish-release - continue-on-error: true - - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - name: Build and publish chainlink image - uses: ./.github/actions/build-sign-publish-chainlink - with: - publish: true - aws-role-to-assume: ${{ secrets.AWS_OIDC_IAM_ROLE_PROD_PUBLISH_ARN }} - aws-role-duration-seconds: ${{ secrets.AWS_ROLE_DURATION_SECONDS }} - aws-region: ${{ env.AWS_REGION }} - ecr-hostname: ${{ env.AWS_ECR_REPO_PUBLIC_REGISTRY }} - ecr-image-name: w0i8p0z9/chainlink-ccip - sign-images: false - dockerfile: ./core/chainlink.Dockerfile - diff --git a/.github/workflows/run-automation-ondemand-e2e-tests.yml b/.github/workflows/run-automation-ondemand-e2e-tests.yml deleted file mode 100644 index 7bf4691ecc..0000000000 --- a/.github/workflows/run-automation-ondemand-e2e-tests.yml +++ /dev/null @@ -1,163 +0,0 @@ -name: Run Automation On Demand Tests (TEST WORKFLOW) - -on: - workflow_dispatch: - inputs: - chainlinkVersionUpdate: - description: Chainlink image version to upgrade to (Leave empty to build from head/ref) - required: false - type: string - chainlinkImageUpdate: - description: Chainlink image repo to upgrade to - options: - - QA_ECR - - public.ecr.aws/chainlink/chainlink - type: choice - chainlinkVersion: - description: Chainlink image version to use initially for upgrade test - default: latest - required: true - type: string - chainlinkImage: - description: Chainlink image repo to use initially for upgrade test - required: true - options: - - public.ecr.aws/chainlink/chainlink - - QA_ECR - type: choice - enableChaos: - description: Check to enable chaos tests - type: boolean - default: false - required: true - enableReorg: - description: Check to enable reorg tests - type: boolean - default: false - required: true - with_existing_remote_runner_version: - description: 'Tag of the existing remote runner version to use (Leave empty to build from head/ref)' - required: false - type: string - -jobs: - # Set tests to run based on the workflow inputs - set-tests-to-run: - name: Set tests to run - runs-on: ubuntu-latest - outputs: - test_list: ${{ steps.set-tests.outputs.test_list }} - require_chainlink_image_versions_in_qa_ecr: ${{ steps.determine-chainlink-image-check.outputs.require_chainlink_image_versions_in_qa_ecr }} - steps: - - name: Determine build to use - id: determine-build - shell: bash - run: | - if [[ "${{ inputs.chainlinkImage }}" == "QA_ECR" ]]; then - echo "image='{{ env.QA_CHAINLINK_IMAGE }}'" >>$GITHUB_OUTPUT - else - echo "image=${{ inputs.chainlinkImage }}" >>$GITHUB_OUTPUT - fi - if [[ "${{ inputs.chainlinkImageUpdate }}" == "QA_ECR" ]]; then - echo "upgrade_image='{{ env.QA_CHAINLINK_IMAGE }}'" >>$GITHUB_OUTPUT - else - echo "upgrade_image=${{ inputs.chainlinkImageUpdate }}" >>$GITHUB_OUTPUT - fi - if [[ -z "${{ inputs.chainlinkVersion }}" ]] && [[ "${{ inputs.chainlinkImage }}" == "QA_ECR" ]]; then - echo "version=${{ github.sha }}" >>$GITHUB_OUTPUT - else - echo "version=${{ inputs.chainlinkVersion }}" >>$GITHUB_OUTPUT - fi - if [[ -z "${{ inputs.chainlinkVersionUpdate }}" ]] && [[ "${{ inputs.chainlinkImageUpdate }}" == "QA_ECR" ]]; then - echo "upgrade_version=${{ github.sha }}" >>$GITHUB_OUTPUT - else - echo "upgrade_version=${{ inputs.chainlinkVersionUpdate }}" >>$GITHUB_OUTPUT - fi - - name: Check if chainlink image check required - id: determine-chainlink-image-check - shell: bash - run: | - chainlink_image_versions="" - if [ "${{ github.event.inputs.chainlinkImage }}" = "QA_ECR" ]; then - chainlink_image_versions+="${{ steps.determine-build.outputs.version }}," - fi - if [ "${{ github.event.inputs.chainlinkImageUpdate }}" = "QA_ECR" ]; then - chainlink_image_versions+="${{ steps.determine-build.outputs.upgrade_version }}" - fi - echo "require_chainlink_image_versions_in_qa_ecr=$chainlink_image_versions" >> $GITHUB_OUTPUT - - name: Set tests to run - id: set-tests - run: | - - # Always run upgrade tests - cat > test_list.yaml <> test_list.yaml <> test_list.yaml <> $GITHUB_OUTPUT - - call-run-e2e-tests-workflow: - name: Run E2E Tests - needs: set-tests-to-run - uses: ./.github/workflows/run-e2e-tests-reusable-workflow.yml - with: - test_list: ${{ needs.set-tests-to-run.outputs.test_list }} - require_chainlink_image_versions_in_qa_ecr: ${{ needs.set-tests-to-run.outputs.require_chainlink_image_versions_in_qa_ecr }} - with_existing_remote_runner_version: ${{ github.event.inputs.with_existing_remote_runner_version }} - test_log_upload_on_failure: true - test_log_upload_retention_days: 7 - secrets: - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_AWS_ACCOUNT_NUMBER: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - QA_PYROSCOPE_INSTANCE: ${{ secrets.QA_PYROSCOPE_INSTANCE }} - QA_PYROSCOPE_KEY: ${{ secrets.QA_PYROSCOPE_KEY }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - GRAFANA_INTERNAL_TENANT_ID: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - GRAFANA_INTERNAL_BASIC_AUTH: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - GRAFANA_INTERNAL_HOST: ${{ secrets.GRAFANA_INTERNAL_HOST }} - GRAFANA_INTERNAL_URL_SHORTENER_TOKEN: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} - GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - AWS_REGION: ${{ secrets.QA_AWS_REGION }} - AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN: ${{ secrets.AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN }} - AWS_API_GW_HOST_GRAFANA: ${{ secrets.AWS_API_GW_HOST_GRAFANA }} - diff --git a/.github/workflows/run-e2e-tests-reusable-workflow.yml b/.github/workflows/run-e2e-tests-reusable-workflow.yml deleted file mode 100644 index 2d3f31aa3b..0000000000 --- a/.github/workflows/run-e2e-tests-reusable-workflow.yml +++ /dev/null @@ -1,739 +0,0 @@ -# This is a reusable workflow that runs E2E tests for Chainlink. -# It is not meant to be run on its own. -name: Run E2E Tests -on: - workflow_call: - inputs: - chainlink_version: - description: 'Enter Chainlink version to use for the tests. Example: "v2.10.0" or sha' - required: false - type: string - test_ids: - description: 'Run tests by test ids separated by commas. Example: "run_all_in_ocr_tests_go,run_TestOCRv2Request_in_ocr2_test_go". Check all test IDs in .github/e2e-tests.yml' - required: false - type: string - test_list: - description: 'Base64 encoded list of tests (YML objects) to run. Example in run-automation-ondemand-e2e-tests.yml' - required: false - type: string - test_workflow: - description: 'Run tests by workflow name. Example: "Run Nightly E2E Tests"' - required: false - type: string - # TODO: Uncomment once Test Config does not have any secrets. Related ticket https://smartcontract-it.atlassian.net/browse/TT-1392 - # test_config_override_base64: - # required: false - # description: The base64-encoded test config override - # type: string - enable_check_test_configurations: - description: 'Set to "true" to enable check-test-configurations job' - required: false - type: boolean - default: false - with_existing_remote_runner_version: - description: 'Use the existing remote runner version for k8s tests. Example: "d3bf5044af33e08be788a2df31c4a745cf69d787"' - required: false - type: string - require_chainlink_image_versions_in_qa_ecr: - description: 'Check Chainlink image versions to be present in QA ECR. If not, build and push the image to QA ECR. Takes comma separated list of Chainlink image versions. Example: "5733cdcda9a9fc6da6343798b119b2ae136146cd,0b7d2c497a508efa5a827714780d908b7b8eda19"' - required: false - type: string - require_chainlink_plugin_versions_in_qa_ecr: - description: 'Check Chainlink plugins versions to be present in QA ECR. If not, build and push the image to QA ECR. Takes comma separated list of Chainlink image versions. Example: "5733cdcda9a9fc6da6343798b119b2ae136146cd,0b7d2c497a508efa5a827714780d908b7b8eda19"' - required: false - type: string - slack_notification_after_tests: - description: 'Set to "true" to send a slack notification after the tests' - required: false - type: boolean - default: false - slack_notification_after_tests_channel_id: - description: 'Slack channel ID to send the notification to' - required: false - type: string - slack_notification_after_tests_name: - description: 'Name of the slack notification' - required: false - type: string - test_log_upload_on_failure: - description: 'Set to "true" to upload the test log on failure as Github artifact' - required: false - type: boolean - default: false - test_log_upload_retention_days: - description: 'Number of days to retain the test log. Default is 3 days' - required: false - type: number - default: 3 - secrets: - TEST_SECRETS_OVERRIDE_BASE64: - required: false - QA_AWS_REGION: - required: true - QA_AWS_ROLE_TO_ASSUME: - required: true - QA_AWS_ACCOUNT_NUMBER: - required: true - QA_PYROSCOPE_INSTANCE: - required: true - QA_PYROSCOPE_KEY: - required: true - QA_KUBECONFIG: - required: true - GRAFANA_INTERNAL_TENANT_ID: - required: true - GRAFANA_INTERNAL_BASIC_AUTH: - required: true - GRAFANA_INTERNAL_HOST: - required: true - GRAFANA_INTERNAL_URL_SHORTENER_TOKEN: - required: true - GH_TOKEN: - required: true - AWS_REGION: - required: true - AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN: - required: true - AWS_API_GW_HOST_GRAFANA: - required: true - SLACK_BOT_TOKEN: - required: false - -env: - CHAINLINK_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink - QA_CHAINLINK_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink - GITHUB_SHA_PLUGINS: ${{ github.sha }}-plugins - CHAINLINK_ENV_USER: ${{ github.actor }} - CHAINLINK_COMMIT_SHA: ${{ inputs.evm-ref || github.sha }} - SELECTED_NETWORKS: SIMULATED - MOD_CACHE_VERSION: 1 - TEST_LOG_LEVEL: debug - -jobs: - validate-inputs: - name: Validate workflow inputs - runs-on: ubuntu-latest - outputs: - require_chainlink_image_versions_in_qa_ecr_matrix: ${{ steps.set-required-chainlink-image-versions-matrix.outputs.versions }} - require_chainlink_plugin_versions_in_qa_ecr_matrix: ${{ steps.set-required-chainlink-plugin-versions-matrix.outputs.versions }} - steps: - - name: Check input conditions - run: | - if [[ "${{ inputs.test_ids }}" != "" && "${{ inputs.test_workflow }}" != "" ]]; then - echo "::error::Error: Both 'test_ids' and 'test_workflow' are provided. Please specify only one." - exit 1 - fi - if [[ "${{ secrets.TEST_SECRETS_OVERRIDE_BASE64 }}" != "" ]]; then - echo "Will run tests with custom test secrets" - fi - - name: Install jq - run: sudo apt-get install jq - - name: Create matrix for required Chainlink image versions - id: set-required-chainlink-image-versions-matrix - run: | - if [[ "${{ inputs.require_chainlink_image_versions_in_qa_ecr }}" != '' ]]; then - image_versions=$(echo "${{ inputs.require_chainlink_image_versions_in_qa_ecr }}" | jq -Rc 'split(",") | if . == [""] then [] else . end') - echo "versions=$image_versions" >> $GITHUB_OUTPUT - fi - - name: Create matrix for required Chainlink plugin versions - id: set-required-chainlink-plugin-versions-matrix - run: | - if [[ "${{ inputs.require_chainlink_plugin_versions_in_qa_ecr }}" != '' ]]; then - image_versions=$(echo "${{ inputs.require_chainlink_plugin_versions_in_qa_ecr }}" | jq -Rc 'split(",") | if . == [""] then [] else . end') - echo "versions=$image_versions" >> $GITHUB_OUTPUT - fi - - check-test-configurations: - name: Check test configurations - if: ${{ inputs.enable_check_test_configurations }} - needs: validate-inputs - runs-on: ubuntu-latest - steps: - - name: Checkout code - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - name: Setup Go - uses: ./.github/actions/setup-go - - name: Run Check Tests Command - run: | - cd integration-tests/ - if ! go run citool/main.go check-tests . ../.github/e2e-tests.yml; then - echo "::error::Some E2E test configurations have to be added to .github/e2e-tests.yml. This file defines Github CI configuration for each E2E test or set of E2E tests." && exit 1 - fi - - get_latest_chainlink_release_version: - name: Get latest Chainlink release version - runs-on: ubuntu-latest - environment: integration - outputs: - latest_chainlink_release_version: ${{ steps.get_latest_version.outputs.latest_version }} - steps: - - name: Get Latest Version - id: get_latest_version - run: | - untrimmed_ver=$(curl --header "Authorization: token ${{ secrets.GH_TOKEN }}" --request GET https://api.github.com/repos/${{ github.repository }}/releases/latest | jq -r .name) - latest_version="${untrimmed_ver:1}" - echo "Latest Chainlink release version: $latest_version" - echo "latest_version=${latest_version}" >> "$GITHUB_OUTPUT" - # Check if latest_version is empty - if [ -z "$latest_version" ]; then - echo "Error: The latest_version is empty. The migration tests need a verison to run." - exit 1 - fi - - load-test-configurations: - name: Load test configurations - needs: [validate-inputs] - runs-on: ubuntu-latest - outputs: - run-docker-tests: ${{ steps.check-matrices.outputs.run-docker-tests }} - run-k8s-tests: ${{ steps.check-matrices.outputs.run-k8s-tests }} - docker-matrix: ${{ steps.set-docker-matrix.outputs.matrix }} - k8s-runner-matrix: ${{ steps.set-k8s-runner-matrix.outputs.matrix }} - steps: - - name: Checkout code - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - name: Setup Go - uses: ./.github/actions/setup-go - - name: Install jq - run: sudo apt-get install jq - - name: Generate Docker Tests Matrix - id: set-docker-matrix - run: | - cd integration-tests/citool - MATRIX_JSON=$(go run main.go filter --file ${{ github.workspace }}/.github/e2e-tests.yml --test-env-type 'docker' --test-list '${{ inputs.test_list }}' --test-ids '${{ inputs.test_ids }}' --workflow '${{ inputs.test_workflow }}') - echo "Docker tests:" - echo "$MATRIX_JSON" | jq - echo "matrix=$MATRIX_JSON" >> $GITHUB_OUTPUT - - name: Generate K8s Tests Matrix - id: set-k8s-runner-matrix - run: | - cd integration-tests/citool - MATRIX_JSON=$(go run main.go filter --file ${{ github.workspace }}/.github/e2e-tests.yml --test-env-type 'k8s-remote-runner' --test-list '${{ inputs.test_list }}' --test-ids '${{ inputs.test_ids }}' --workflow '${{ inputs.test_workflow }}') - echo "K8s tests:" - echo "$MATRIX_JSON" | jq - echo "matrix=$MATRIX_JSON" >> $GITHUB_OUTPUT - - name: Check Test Matrices - id: check-matrices - run: | - DOCKER_MATRIX_EMPTY=$(echo '${{ steps.set-docker-matrix.outputs.matrix }}' | jq '.tests == null or .tests == []') - K8S_MATRIX_EMPTY=$(echo '${{ steps.set-k8s-runner-matrix.outputs.matrix }}' | jq '.tests == null or .tests == []') - - # Check if jq commands succeeded - if [ $? -ne 0 ]; then - echo "JSON parse error occurred." - exit 1 - fi - - if [[ "$DOCKER_MATRIX_EMPTY" == "true" ]]; then - echo "run-docker-tests=false" >> $GITHUB_OUTPUT - else - echo "run-docker-tests=true" >> $GITHUB_OUTPUT - fi - if [[ "$K8S_MATRIX_EMPTY" == "true" ]]; then - echo "run-k8s-tests=false" >> $GITHUB_OUTPUT - else - echo "run-k8s-tests=true" >> $GITHUB_OUTPUT - fi - - # Check if both matrices are empty - if [[ "$DOCKER_MATRIX_EMPTY" == "true" ]] && [[ "$K8S_MATRIX_EMPTY" == "true" ]]; then - echo "No tests found for inputs: '${{ toJson(inputs) }}'. Both Docker and Kubernetes tests matrices are empty" - exit 1 - fi - shell: bash - - - name: Check if test config override is required for any test - shell: bash - run: | - # Check if the test config override is provided and skip the checks if it is non-empty - # TODO: Uncomment once Test Config does not have any secrets. Related ticket https://smartcontract-it.atlassian.net/browse/TT-1392 - # if [ -n "${{ inputs.test_config_override_base64 }}" ]; then - # echo "Test config override provided. Skipping checks for tests requiring config override." - # exit 0 - # fi - - # Parse the JSON to check for test_config_override_required in Docker matrix - DOCKER_TESTS_REQUIRING_CONFIG_OVERRIDE=$(echo '${{ steps.set-docker-matrix.outputs.matrix }}' | jq 'if .tests then .tests[] | select(has("test_config_override_required") and .test_config_override_required) | .id else empty end' -r) - # Parse the JSON to check for test_config_override_required in Kubernetes matrix - K8S_TESTS_REQUIRING_CONFIG_OVERRIDE=$(echo '${{ steps.set-k8s-runner-matrix.outputs.matrix }}' | jq 'if .tests then .tests[] | select(has("test_config_override_required") and .test_config_override_required) | .id else empty end' -r) - - # Determine if any tests require a configuration override - if [ ! -z "$DOCKER_TESTS_REQUIRING_CONFIG_OVERRIDE" ] || [ ! -z "$K8S_TESTS_REQUIRING_CONFIG_OVERRIDE" ]; then - echo "Tests in .github/e2e-tests.yml requiring test config override:" - if [ ! -z "$DOCKER_TESTS_REQUIRING_CONFIG_OVERRIDE" ]; then - echo $DOCKER_TESTS_REQUIRING_CONFIG_OVERRIDE - fi - if [ ! -z "$K8S_TESTS_REQUIRING_CONFIG_OVERRIDE" ]; then - echo $K8S_TESTS_REQUIRING_CONFIG_OVERRIDE - fi - echo "::error::Error: Some of the tests require a test config override. Please see workflow logs and set 'test_config_override_base64' to run these tests." - exit 1 - else - echo "No tests require a configuration override. Proceeding without overrides." - fi - - - name: Check if test secrets are required for any test - shell: bash - run: | - # Check if the test secret key is provided and skip the checks if it is non-empty - if [ -n "${{ secrets.TEST_SECRETS_OVERRIDE_BASE64 }}" ]; then - echo "Test secret key provided. Skipping checks for tests requiring secrets." - exit 0 - fi - - # Parse the JSON to check for test_secrets_required in Docker matrix - DOCKER_TESTS_REQUIRING_SECRETS=$(echo '${{ steps.set-docker-matrix.outputs.matrix }}' | jq 'if .tests then .tests[] | select(has("test_secrets_required") and .test_secrets_required) | .id else empty end' -r) - # Parse the JSON to check for test_secrets_required in Kubernetes matrix - K8S_TESTS_REQUIRING_SECRETS=$(echo '${{ steps.set-k8s-runner-matrix.outputs.matrix }}' | jq 'if .tests then .tests[] | select(has("test_secrets_required") and .test_secrets_required) | .id else empty end' -r) - - # Determine if any tests require secrets - if [ ! -z "$DOCKER_TESTS_REQUIRING_SECRETS" ] || [ ! -z "$K8S_TESTS_REQUIRING_SECRETS" ]; then - echo "Tests in .github/e2e-tests.yml requiring custom test secrets:" - if [ ! -z "$DOCKER_TESTS_REQUIRING_SECRETS" ]; then - echo $DOCKER_TESTS_REQUIRING_SECRETS - fi - if [ ! -z "$K8S_TESTS_REQUIRING_SECRETS" ]; then - echo $K8S_TESTS_REQUIRING_SECRETS - fi - echo "::error::Error: Some of the tests require custom test secrets to run. Please see workflow logs and set 'test_secrets_override_key' to run these tests." - exit 1 - else - echo "No tests require secrets. Proceeding without additional secret setup." - fi - - # Build Chainlink images required for the tests - require-chainlink-image-versions-in-qa-ecr: - name: Build Chainlink image - needs: [validate-inputs, load-test-configurations] - if: ${{ needs.validate-inputs.outputs.require_chainlink_image_versions_in_qa_ecr_matrix != '' }} - runs-on: ubuntu-latest - environment: integration - permissions: - id-token: write - contents: read - env: - CHAINLINK_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink - strategy: - matrix: - version: ${{ fromJson(needs.validate-inputs.outputs.require_chainlink_image_versions_in_qa_ecr_matrix) }} - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - - name: Build Chainlink image for ${{ matrix.version }} and push it to QA ECR - uses: ./.github/actions/build-chainlink-image - with: - dockerfile: core/chainlink.Dockerfile - git_commit_sha: ${{ matrix.version }} - tag_suffix: '' - check_image_exists: 'true' - AWS_REGION: ${{ secrets.QA_AWS_REGION }} - AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - - # Build Chainlink plugins required for the tests - require-chainlink-plugin-versions-in-qa-ecr: - name: Build Chainlink plugins - needs: [validate-inputs, load-test-configurations] - if: ${{ needs.validate-inputs.outputs.require_chainlink_plugin_versions_in_qa_ecr_matrix != '' }} - runs-on: ubuntu-latest - environment: integration - permissions: - id-token: write - contents: read - env: - CHAINLINK_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink - strategy: - matrix: - version: ${{ fromJson(needs.validate-inputs.outputs.require_chainlink_plugin_versions_in_qa_ecr_matrix) }} - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - - name: Build Chainlink plugins image for ${{ matrix.version }} - uses: ./.github/actions/build-chainlink-image - with: - dockerfile: plugins/chainlink.Dockerfile - git_commit_sha: ${{ matrix.version }} - tag_suffix: '-plugins' - check_image_exists: 'true' - AWS_REGION: ${{ secrets.QA_AWS_REGION }} - AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - - # Run Docker tests - run-docker-tests: - name: Run ${{ matrix.tests.id }} - needs: [load-test-configurations, require-chainlink-image-versions-in-qa-ecr, require-chainlink-plugin-versions-in-qa-ecr, get_latest_chainlink_release_version] - # Run when none of the needed jobs fail or are cancelled (skipped or successful jobs are ok) - if: ${{ needs.load-test-configurations.outputs.run-docker-tests == 'true' && always() && !failure() && !cancelled() }} - runs-on: ${{ matrix.tests.runs_on }} - strategy: - fail-fast: false - matrix: ${{fromJson(needs.load-test-configurations.outputs.docker-matrix)}} - environment: integration - permissions: - actions: read - checks: write - pull-requests: write - id-token: write - contents: read - env: - LATEST_CHAINLINK_RELEASE_VERSION: ${{ needs.get_latest_chainlink_release_version.outputs.latest_chainlink_release_version }} - steps: - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: e2e_tests_${{ matrix.tests.id_sanitized }} - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Run E2E Tests / Run ${{ matrix.tests.id }} - test-results-file: '{"testType":"go","filePath":"/tmp/gotest.log"}' - continue-on-error: true - - - name: Checkout repository - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - name: Install jq - run: sudo apt-get install -y jq - - name: Show test configuration - run: echo '${{ toJson(matrix.tests) }}' | jq . - - name: Setup Go - uses: ./.github/actions/setup-go - - name: Setup GAP for Grafana - uses: smartcontractkit/.github/actions/setup-gap@d316f66b2990ea4daa479daa3de6fc92b00f863e # setup-gap@0.3.2 - id: setup-gap - with: - aws-region: ${{ secrets.AWS_REGION }} - aws-role-arn: ${{ secrets.AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN }} - api-gateway-host: ${{ secrets.AWS_API_GW_HOST_GRAFANA }} - duplicate-authorization-header: "true" - - - name: Run tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@aa8eea635029ab8d95abd3c206f56dae1e22e623 # v2.3.28 - env: - DETACH_RUNNER: true - with: - test_command_to_run: ${{ matrix.tests.test_cmd }} 2>&1 | tee /tmp/gotest.log | gotestloghelper -ci -singlepackage -hidepassingtests=false -hidepassinglogs - test_download_vendor_packages_command: cd ./integration-tests && go mod download - test_secrets_override_base64: ${{ secrets.TEST_SECRETS_OVERRIDE_BASE64 }} - # TODO: Uncomment once Test Config does not have any secrets. Related ticket https://smartcontract-it.atlassian.net/browse/TT-1392 - # test_config_override_base64: ${{ inputs.test_config_override_base64 }} - test_config_chainlink_version: ${{ matrix.tests.test_inputs.chainlink_version || inputs.chainlink_version || github.sha }} - test_config_chainlink_upgrade_version: ${{ matrix.tests.test_inputs.chainlink_upgrade_version }} - test_config_chainlink_postgres_version: ${{ matrix.tests.test_inputs.chainlink_postgres_version }} - test_config_selected_networks: ${{ matrix.tests.test_inputs.selected_networks || env.SELECTED_NETWORKS}} - test_config_logging_run_id: ${{ github.run_id }} - test_config_logstream_log_targets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - test_type: ${{ matrix.tests.test_inputs.test_type }} - test_suite: ${{ matrix.tests.test_inputs.test_suite }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - artifacts_name: ${{ matrix.tests.id_sanitized }}-test-logs - artifacts_location: | - ./integration-tests/smoke/logs/ - /tmp/gotest.log - publish_check_name: ${{ matrix.tests.id_sanitized }} - token: ${{ secrets.GH_TOKEN }} - no_cache: true # Do not restore cache since go was already configured in the previous step - go_mod_path: ./integration-tests/go.mod - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: "" - should_tidy: "false" - go_coverage_src_dir: /var/tmp/go-coverage - go_coverage_dest_dir: ${{ github.workspace }}/.covdata - DEFAULT_CHAINLINK_IMAGE: ${{ matrix.tests.test_inputs.chainlink_image || env.CHAINLINK_IMAGE }} - DEFAULT_CHAINLINK_UPGRADE_IMAGE: ${{ matrix.tests.test_inputs.chainlink_upgrade_image }} - DEFAULT_LOKI_TENANT_ID: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - DEFAULT_LOKI_ENDPOINT: https://${{ secrets.GRAFANA_INTERNAL_HOST }}/loki/api/v1/push - DEFAULT_LOKI_BASIC_AUTH: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - DEFAULT_GRAFANA_BASE_URL: "http://localhost:8080/primary" - DEFAULT_GRAFANA_DASHBOARD_URL: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - DEFAULT_GRAFANA_BEARER_TOKEN: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} - DEFAULT_PYROSCOPE_ENVIRONMENT: ${{ matrix.tests.pyroscope_env }} - DEFAULT_PYROSCOPE_SERVER_URL: ${{ matrix.tests.pyroscope_env != '' && secrets.QA_PYROSCOPE_INSTANCE || '' }} - DEFAULT_PYROSCOPE_KEY: ${{ matrix.tests.pyroscope_env != '' && secrets.QA_PYROSCOPE_KEY || '' }} - DEFAULT_PYROSCOPE_ENABLED: ${{ matrix.tests.pyroscope_env != '' && 'true' || '' }} - - - name: Upload test log as Github artifact - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1 - if: inputs.test_log_upload_on_failure && failure() - with: - name: test_log_${{ matrix.tests.id_sanitized }} - path: /tmp/gotest.log - retention-days: ${{ inputs.test_log_upload_retention_days }} - continue-on-error: true - - # Run K8s tests using old remote runner - - prepare-remote-runner-test-image: - needs: [load-test-configurations, require-chainlink-image-versions-in-qa-ecr, require-chainlink-plugin-versions-in-qa-ecr] - if: ${{ needs.load-test-configurations.outputs.run-k8s-tests == 'true' && always() && !failure() && !cancelled() }} - name: Prepare remote runner test image - runs-on: ubuntu-latest - environment: integration - permissions: - actions: read - checks: write - pull-requests: write - id-token: write - contents: read - outputs: - remote-runner-version: ${{ steps.set-remote-runner-version.outputs.remote-runner-version }} - env: - ENV_JOB_IMAGE_BASE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink-tests - steps: - - name: Checkout repository - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - name: Build Test Runner Image - uses: ./.github/actions/build-test-image - if: ${{ inputs.with_existing_remote_runner_version == '' }} - with: - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ACCOUNT_NUMBER: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - - name: Set Remote Runner Version - id: set-remote-runner-version - run: | - if [[ -z "${{ inputs.with_existing_remote_runner_version }}" ]]; then - echo "remote-runner-version=${{ github.sha }}" >> $GITHUB_OUTPUT - else - echo "remote-runner-version=${{ inputs.with_existing_remote_runner_version }}" >> $GITHUB_OUTPUT - fi - - run-k8s-runner-tests: - needs: [load-test-configurations, prepare-remote-runner-test-image, require-chainlink-image-versions-in-qa-ecr, require-chainlink-plugin-versions-in-qa-ecr, get_latest_chainlink_release_version] - if: ${{ needs.load-test-configurations.outputs.run-k8s-tests == 'true' && always() && !failure() && !cancelled() }} - name: Run ${{ matrix.tests.id }} - runs-on: ${{ matrix.tests.runs_on }} - strategy: - fail-fast: false - matrix: ${{fromJson(needs.load-test-configurations.outputs.k8s-runner-matrix)}} - environment: integration - permissions: - actions: read - checks: write - pull-requests: write - id-token: write - contents: read - env: - LATEST_CHAINLINK_RELEASE_VERSION: ${{ needs.get_latest_chainlink_release_version.outputs.latest_chainlink_release_version }} - steps: - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: e2e_tests_${{ matrix.tests.id_sanitized }} - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Run E2E Tests / Run ${{ matrix.tests.id }} - continue-on-error: true - - - name: Checkout repository - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - name: Install jq - run: sudo apt-get install -y jq - - name: Show Test Configuration - run: echo '${{ toJson(matrix.tests) }}' | jq . - - name: Show Remote Runner Version - run: | - echo "Remote Runner Version: ${{ needs.prepare-remote-runner-test-image.outputs.remote-runner-version }}" - - - name: Run tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@aa8eea635029ab8d95abd3c206f56dae1e22e623 # v2.3.28 - env: - DETACH_RUNNER: true - RR_MEM: ${{ matrix.tests.remote_runner_memory }} - TEST_ARGS: -test.timeout 900h -test.memprofile memprofile.out -test.cpuprofile profile.out - ENV_JOB_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink-tests:${{ needs.prepare-remote-runner-test-image.outputs.remote-runner-version }} - INTERNAL_DOCKER_REPO: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com - # We can comment these out when we have a stable soak test and aren't worried about resource consumption - TEST_UPLOAD_CPU_PROFILE: true - TEST_UPLOAD_MEM_PROFILE: true - TEST_LOG_LEVEL: debug - REF_NAME: ${{ github.head_ref || github.ref_name }} - with: - test_command_to_run: ${{ matrix.tests.test_cmd }} 2>&1 | tee /tmp/gotest.log | gotestloghelper -ci -singlepackage -hidepassingtests=false -hidepassinglogs - test_download_vendor_packages_command: make gomod - test_secrets_override_base64: ${{ secrets.TEST_SECRETS_OVERRIDE_BASE64 }} - # TODO: Uncomment once Test Config does not have any secrets. Related ticket https://smartcontract-it.atlassian.net/browse/TT-1392 - # test_config_override_base64: ${{ inputs.test_config_override_base64 }} - test_config_chainlink_version: ${{ matrix.tests.test_inputs.chainlink_version || inputs.chainlink_version || github.sha }} - test_config_chainlink_upgrade_version: ${{ matrix.tests.test_inputs.chainlink_upgrade_version }} - test_config_chainlink_postgres_version: ${{ matrix.tests.test_inputs.chainlink_postgres_version }} - test_config_selected_networks: ${{ matrix.tests.test_inputs.selected_networks || env.SELECTED_NETWORKS}} - test_config_logging_run_id: ${{ github.run_id }} - test_config_logstream_log_targets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - test_type: ${{ matrix.tests.test_inputs.test_type }} - test_suite: ${{ matrix.tests.test_inputs.test_suite }} - token: ${{ secrets.GH_TOKEN }} - should_cleanup: false - no_cache: true # Do not restore cache since go was already configured in the previous step - go_mod_path: ./integration-tests/go.mod - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - DEFAULT_CHAINLINK_IMAGE: ${{ matrix.tests.test_inputs.chainlink_image || env.CHAINLINK_IMAGE }} - DEFAULT_CHAINLINK_UPGRADE_IMAGE: ${{ matrix.tests.test_inputs.chainlink_upgrade_image }} - DEFAULT_LOKI_TENANT_ID: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - DEFAULT_LOKI_ENDPOINT: https://${{ secrets.GRAFANA_INTERNAL_HOST }}/loki/api/v1/push - DEFAULT_LOKI_BASIC_AUTH: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - DEFAULT_GRAFANA_BASE_URL: "http://localhost:8080/primary" - DEFAULT_GRAFANA_DASHBOARD_URL: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - DEFAULT_GRAFANA_BEARER_TOKEN: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} - DEFAULT_PYROSCOPE_ENVIRONMENT: ${{ matrix.tests.pyroscope_env }} - DEFAULT_PYROSCOPE_SERVER_URL: ${{ matrix.tests.pyroscope_env != '' && secrets.QA_PYROSCOPE_INSTANCE || '' }} - DEFAULT_PYROSCOPE_KEY: ${{ matrix.tests.pyroscope_env != '' && secrets.QA_PYROSCOPE_KEY || '' }} - DEFAULT_PYROSCOPE_ENABLED: ${{ matrix.tests.pyroscope_env != '' && 'true' || '' }} - - - name: Upload test log as Github artifact - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1 - if: inputs.test_log_upload_on_failure && failure() - with: - name: test_log_${{ matrix.tests.id_sanitized }} - path: /tmp/gotest.log - retention-days: ${{ inputs.test_log_upload_retention_days }} - continue-on-error: true - - after_tests: - needs: [run-docker-tests, run-k8s-runner-tests] - if: always() - name: After tests notifications - runs-on: ubuntu-latest - steps: - - name: Determine combined test results - id: combine_results - run: | - docker_result="${{ needs.run-docker-tests.result }}" - k8s_result="${{ needs.run-k8s-runner-tests.result }}" - - function map_outcome { - case "$1" in - success|skipped) - echo "success" - ;; - cancelled) - echo "cancelled" - ;; - *) - echo "failure" - ;; - esac - } - - combined_docker_result=$(map_outcome $docker_result) - combined_k8s_result=$(map_outcome $k8s_result) - - if [[ $combined_docker_result == "failure" || $combined_k8s_result == "failure" ]]; then - echo "result=failure" >> $GITHUB_OUTPUT - elif [[ $combined_docker_result == "cancelled" || $combined_k8s_result == "cancelled" ]]; then - echo "result=cancelled" >> $GITHUB_OUTPUT - else - echo "result=success" >> $GITHUB_OUTPUT - fi - - - name: Send Slack notification - uses: slackapi/slack-github-action@6c661ce58804a1a20f6dc5fbee7f0381b469e001 # v1.25.0 - if: ${{ inputs.slack_notification_after_tests }} - id: slack - env: - SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} - with: - channel-id: ${{ inputs.slack_notification_after_tests_channel_id }} - payload: | - { - "blocks": [ - { - "type": "section", - "text": { - "type": "mrkdwn", - "text": "${{ inputs.slack_notification_after_tests_name }} - ${{ steps.combine_results.outputs.result == 'failure' && 'Failed :x:' || steps.combine_results.outputs.result == 'cancelled' && 'Cancelled :warning:' || 'Passed :white_check_mark:' }}" - } - }, - { - "type": "section", - "text": { - "type": "mrkdwn", - "text": "<${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|View Build Details>" - } - } - ] - } - - # Run K8s tests using new remote runner - # remote-runner-k8s-tests: - # runs-on: ubuntu-latest - # container: - # image: golang:1.18 - # steps: - # - name: Checkout repository - # uses: actions/checkout@v2 - - # - name: Set up Go - # uses: actions/setup-go@v2 - # with: - # go-version: '1.18' - - # - name: Load Runner Config - # run: echo "$RUNNER_CONFIG" > runner.toml - # env: - # RUNNER_CONFIG: | - # # Runner configuration - # detached_mode = true - # debug = false - - # [[test_runs]] - # namespace = "dev-env" - # rbac_role_name = "dev-role" - # rbac_service_account_name = "dev-service-account" - # sync_value = "unique-sync-value-1" - # ttl_seconds_after_finished = 300 - # image_registry_url = "https://myregistry.dev/" - # image_name = "dev-image" - # image_tag = "v1.0.0" - # test_name = "TestMercuryLoad/all_endpoints" - # test_config_base64_env_name = "CONFIG_ENV_DEV" - # test_config_file_path = "/configs/dev/test-config.toml" - # test_config_base64 = "dGVzdCBjb25maWcgdmFsdWUgZGV2" - # test_timeout = "30m" - # resources_requests_cpu = "500m" - # resources_requests_memory = "1Gi" - # resources_limits_cpu = "1000m" - # resources_limits_memory = "2Gi" - # job_count = 2 - # chart_path = "/charts/dev" - # [envs] - # WASP_LOG_LEVEL = "info" - # TEST_LOG_LEVEL = "info" - # MERCURY_TEST_LOG_LEVEL = "info" - - # [[test_runs]] - # namespace = "prod-env" - # rbac_role_name = "prod-role" - # rbac_service_account_name = "prod-service-account" - # sync_value = "unique-sync-value-2" - # ttl_seconds_after_finished = 600 - # image_registry_url = "https://myregistry.prod/" - # image_name = "prod-image" - # image_tag = "v1.0.1" - # test_name = "TestMercuryLoad/all_endpoints" - # test_config_base64_env_name = "CONFIG_ENV_PROD" - # test_config_file_path = "/configs/prod/test-config.toml" - # test_config_base64 = "dGVzdCBjb25maWcgdmFsdWUgcHJvZA==" - # test_timeout = "45m" - # resources_requests_cpu = "800m" - # resources_requests_memory = "2Gi" - # resources_limits_cpu = "1500m" - # resources_limits_memory = "4Gi" - # job_count = 3 - # chart_path = "/charts/prod" - # [envs] - # WASP_LOG_LEVEL = "info" - # TEST_LOG_LEVEL = "info" - # MERCURY_TEST_LOG_LEVEL = "info" - - # # Schedule the tests in K8s in remote runner - # - name: Run Kubernetes Tests - # run: go run ./cmd/main.go run -c runner.toml \ No newline at end of file diff --git a/.github/workflows/run-nightly-e2e-tests.yml b/.github/workflows/run-nightly-e2e-tests.yml deleted file mode 100644 index cb8550fb70..0000000000 --- a/.github/workflows/run-nightly-e2e-tests.yml +++ /dev/null @@ -1,35 +0,0 @@ -name: Run Nightly E2E Tests - -on: - # Disable the workflow as the notification already sent on core workflow - # schedule: - # Run every night at midnight UTC (0:00 AM) - # - cron: '0 0 * * *' - workflow_dispatch: - -jobs: - call-run-e2e-tests-workflow: - name: Run E2E Tests - uses: ./.github/workflows/run-e2e-tests-reusable-workflow.yml - with: - chainlink_version: develop - test_workflow: Run Nightly E2E Tests - slack_notification_after_tests: true - slack_notification_after_tests_channel_id: "#team-test-tooling-internal" - slack_notification_after_tests_name: Nightly E2E Tests - secrets: - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_AWS_ACCOUNT_NUMBER: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - QA_PYROSCOPE_INSTANCE: ${{ secrets.QA_PYROSCOPE_INSTANCE }} - QA_PYROSCOPE_KEY: ${{ secrets.QA_PYROSCOPE_KEY }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - GRAFANA_INTERNAL_TENANT_ID: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - GRAFANA_INTERNAL_BASIC_AUTH: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - GRAFANA_INTERNAL_HOST: ${{ secrets.GRAFANA_INTERNAL_HOST }} - GRAFANA_INTERNAL_URL_SHORTENER_TOKEN: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} - GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - AWS_REGION: ${{ secrets.QA_AWS_REGION }} - AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN: ${{ secrets.AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN }} - AWS_API_GW_HOST_GRAFANA: ${{ secrets.AWS_API_GW_HOST_GRAFANA }} - SLACK_BOT_TOKEN: ${{ secrets.QA_SLACK_API_KEY }} diff --git a/.github/workflows/run-selected-e2e-tests.yml b/.github/workflows/run-selected-e2e-tests.yml deleted file mode 100644 index ab064531fd..0000000000 --- a/.github/workflows/run-selected-e2e-tests.yml +++ /dev/null @@ -1,70 +0,0 @@ -name: Run Selected E2E Tests - -on: - workflow_dispatch: - inputs: - chainlink_version: - description: 'Enter Chainlink version to use for the tests. Example: "v2.10.0" or sha' - required: false - type: string - test_ids: - description: 'Run all tests "*" by default. Or, enter test IDs to run separated by commas. Example: "run_all_in_ocr_tests_go,run_TestOCRv2Request_in_ocr2_test_go". Check all test IDs in .github/e2e-tests.yml' - default: "*" - required: true - type: string - test_secrets_override_key: - description: 'Enter the secret key to override test secrets' - required: false - type: string - # TODO: Uncomment once Test Config does not have any secrets. Related ticket https://smartcontract-it.atlassian.net/browse/TT-1392 - # test_config_override_base64: - # required: false - # description: The base64-encoded test config override - # type: string - enable_check_test_configurations: - description: 'Set to "true" to enable check-test-configurations job' - required: false - type: boolean - default: false - with_existing_remote_runner_version: - description: 'Use the existing remote runner version for k8s tests. Example: "d3bf5044af33e08be788a2df31c4a745cf69d787"' - required: false - type: string - workflow_run_name: - description: 'Enter the name of the workflow run' - default: 'Run E2E Tests' - required: false - type: string - -run-name: ${{ inputs.workflow_run_name }} - -jobs: - call-run-e2e-tests-workflow: - name: Run E2E Tests - uses: ./.github/workflows/run-e2e-tests-reusable-workflow.yml - with: - chainlink_version: ${{ github.event.inputs.chainlink_version }} - test_ids: ${{ github.event.inputs.test_ids }} - # TODO: Uncomment once Test Config does not have any secrets. Related ticket https://smartcontract-it.atlassian.net/browse/TT-1392 - # test_config_override_base64: ${{ github.event.inputs.test_config_override_base64 }} - with_existing_remote_runner_version: ${{ github.event.inputs.with_existing_remote_runner_version }} - # Use fromJSON to convert string to boolean. More info: https://github.com/actions/runner/issues/2206#issuecomment-1532246677 - enable_check_test_configurations: ${{ fromJSON(github.event.inputs.enable_check_test_configurations) }} - secrets: - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_AWS_ACCOUNT_NUMBER: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - QA_PYROSCOPE_INSTANCE: ${{ secrets.QA_PYROSCOPE_INSTANCE }} - QA_PYROSCOPE_KEY: ${{ secrets.QA_PYROSCOPE_KEY }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - GRAFANA_INTERNAL_TENANT_ID: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - GRAFANA_INTERNAL_BASIC_AUTH: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - GRAFANA_INTERNAL_HOST: ${{ secrets.GRAFANA_INTERNAL_HOST }} - GRAFANA_INTERNAL_URL_SHORTENER_TOKEN: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }} - GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - AWS_REGION: ${{ secrets.QA_AWS_REGION }} - AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN: ${{ secrets.AWS_OIDC_IAM_ROLE_VALIDATION_PROD_ARN }} - AWS_API_GW_HOST_GRAFANA: ${{ secrets.AWS_API_GW_HOST_GRAFANA }} - TEST_SECRETS_OVERRIDE_BASE64: ${{ secrets[inputs.test_secrets_override_key] }} - SLACK_BOT_TOKEN: ${{ secrets.QA_SLACK_API_KEY }} - diff --git a/.github/workflows/sigscanner.yml b/.github/workflows/sigscanner.yml deleted file mode 100644 index 5d22f79ab5..0000000000 --- a/.github/workflows/sigscanner.yml +++ /dev/null @@ -1,36 +0,0 @@ -name: 'SigScanner Check' - -on: - merge_group: - push: - -jobs: - sigscanner-check: - runs-on: ubuntu-latest - steps: - - name: "SigScanner checking ${{ github.sha }} by ${{ github.actor }}" - env: - API_TOKEN: ${{ secrets.SIGSCANNER_API_TOKEN }} - API_URL: ${{ secrets.SIGSCANNER_API_URL }} - run: | - echo "🔎 Checking commit ${{ github.sha }} by ${{ github.actor }} in ${{ github.repository }} - ${{ github.event_name }}" - CODE=`curl --write-out '%{http_code}' -X POST -H "Content-Type: application/json" -H "Authorization: $API_TOKEN" --silent --output /dev/null --url "$API_URL" --data '{"commit":"${{ github.sha }}","repository":"${{ github.repository }}","author":"${{ github.actor }}"}'` - echo "Received $CODE" - if [[ "$CODE" == "200" ]]; then - echo "✅ Commit is verified" - exit 0 - else - echo "❌ Commit is NOT verified" - exit 1 - fi - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: sigscanner - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: sigscanner-check - continue-on-error: true diff --git a/.github/workflows/solidity-foundry-artifacts.yml b/.github/workflows/solidity-foundry-artifacts.yml deleted file mode 100644 index 50a77e2846..0000000000 --- a/.github/workflows/solidity-foundry-artifacts.yml +++ /dev/null @@ -1,372 +0,0 @@ -name: Solidity Foundry Artifact Generation -on: - workflow_dispatch: - inputs: - product: - type: choice - description: 'product for which to generate artifacts; should be the same as Foundry profile' - required: true - options: - - "automation" - - "ccip" - - "functions" - - "keystone" - - "l2ep" - - "liquiditymanager" - - "llo-feeds" - - "operatorforwarder" - - "shared" - - "transmission" - - "vrf" - base_ref: - description: 'commit or tag to be used as base reference, when looking for modified Solidity files' - required: true - -env: - FOUNDRY_PROFILE: ci - -jobs: - changes: - name: Detect changes - runs-on: ubuntu-latest - outputs: - changes: ${{ steps.changes.outputs.sol }} - product_changes: ${{ steps.changes-transform.outputs.product_changes }} - product_files: ${{ steps.changes-transform.outputs.product_files }} - changeset_changes: ${{ steps.changes-dorny.outputs.changeset }} - changeset_files: ${{ steps.changes-dorny.outputs.changeset_files }} - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - name: Find modified contracts - uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2 - id: changes - with: - list-files: 'csv' - base: ${{ inputs.base_ref }} - predicate-quantifier: every - filters: | - ignored: &ignored - - '!contracts/src/v0.8/**/test/**' - - '!contracts/src/v0.8/**/tests/**' - - '!contracts/src/v0.8/**/mock/**' - - '!contracts/src/v0.8/**/mocks/**' - - '!contracts/src/v0.8/**/*.t.sol' - - '!contracts/src/v0.8/*.t.sol' - - '!contracts/src/v0.8/**/testhelpers/**' - - '!contracts/src/v0.8/testhelpers/**' - - '!contracts/src/v0.8/vendor/**' - other_shared: - - modified|added: 'contracts/src/v0.8/(interfaces/**/*.sol|*.sol)' - - *ignored - sol: - - modified|added: 'contracts/src/v0.8/**/*.sol' - - *ignored - product: &product - - modified|added: 'contracts/src/v0.8/${{ inputs.product }}/**/*.sol' - - *ignored - changeset: - - modified|added: 'contracts/.changeset/!(README)*.md' - - # Manual transformation needed, because shared contracts have a different folder structure - - name: Transform modified files - id: changes-transform - shell: bash - run: | - if [ "${{ inputs.product }}" = "shared" ]; then - echo "::debug:: Product is shared, transforming changes" - if [[ "${{ steps.changes.outputs.product }}" == "true" && "${{ steps.changes.outputs.other_shared }}" == "true" ]]; then - echo "::debug:: Changes were found in 'shared' folder and in 'interfaces' and root folders" - echo "product_changes=true" >> $GITHUB_OUTPUT - echo "product_files=${{ steps.changes.outputs.product_files }},${{ steps.changes.outputs.other_shared_files }}" >> $GITHUB_OUTPUT - elif [[ "${{ steps.changes.outputs.product }}" == "false" && "${{ steps.changes.outputs.other_shared }}" == "true" ]]; then - echo "::debug:: Only contracts in' interfaces' and root folders were modified" - echo "product_changes=true" >> $GITHUB_OUTPUT - echo "product_files=${{ steps.changes.outputs.other_shared_files }}" >> $GITHUB_OUTPUT - elif [[ "${{ steps.changes.outputs.product }}" == "true" && "${{ steps.changes.outputs.other_shared }}" == "false" ]]; then - echo "::debug:: Only contracts in 'shared' folder were modified" - echo "product_changes=true" >> $GITHUB_OUTPUT - echo "product_files=${{ steps.changes.outputs.product_files }}" >> $GITHUB_OUTPUT - else - echo "::debug:: No contracts were modified" - echo "product_changes=false" >> $GITHUB_OUTPUT - echo "product_files=" >> $GITHUB_OUTPUT - fi - else - echo "product_changes=${{ steps.changes.outputs.product }}" >> $GITHUB_OUTPUT - echo "product_files=${{ steps.changes.outputs.product_files }}" >> $GITHUB_OUTPUT - fi - - - name: Check for changes outside of artifact scope - uses: ./.github/actions/validate-artifact-scope - if: ${{ steps.changes.outputs.sol == 'true' }} - with: - sol_files: ${{ steps.changes.outputs.sol_files }} - product: ${{ inputs.product }} - - gather-basic-info: - name: Gather basic info - if: ${{ needs.changes.outputs.product_changes == 'true' }} - runs-on: ubuntu-22.04 - needs: [ changes ] - outputs: - foundry_version: ${{ steps.extract-foundry-version.outputs.foundry-version }} - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - fetch-depth: 0 - - - name: Extract Foundry version - id: extract-foundry-version - uses: ./.github/actions/detect-solidity-foundry-version - with: - working-directory: contracts - - - name: Copy modified changesets - if: ${{ needs.changes.outputs.changeset_changes == 'true' }} - run: | - mkdir -p contracts/changesets - files="${{ needs.changes.outputs.changeset_files }}" - IFS=",' - for changeset in $files; do - echo "::debug:: Copying $changeset" - cp $changeset contracts/changesets - done - - - name: Generate basic info and modified contracts list - shell: bash - run: | - echo "Commit SHA used to generate artifacts: ${{ github.sha }}" > contracts/commit_sha_base_ref.txt - echo "Base reference SHA used to find modified contracts: ${{ inputs.base_ref }}" >> contracts/commit_sha_base_ref.txt - - IFS=',' read -r -a modified_files <<< "${{ needs.changes.outputs.product_files }}" - echo "# Modified contracts:" > contracts/modified_contracts.md - for file in "${modified_files[@]}"; do - echo " - [$file](${{ github.server_url }}/${{ github.repository }}/blob/${{ github.sha }}/$file)" >> contracts/modified_contracts.md - echo "$file" >> contracts/modified_contracts.txt - done - - - name: Upload basic info and modified contracts list - uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 - timeout-minutes: 2 - continue-on-error: true - with: - name: tmp-basic-info - path: | - contracts/modified_contracts.md - contracts/modified_contracts.txt - contracts/commit_sha_base_ref.txt - contracts/changesets - retention-days: 7 - - # some of the artifacts can only be generated on product level, and we cannot scope them to single contracts - # some product-level modifications might also require shared contracts changes, so if these happened we need to generate artifacts for shared contracts as well - coverage-and-book: - if: ${{ needs.changes.outputs.product_changes == 'true' }} - name: Generate Docs and Code Coverage reports - runs-on: ubuntu-22.04 - needs: [changes, gather-basic-info] - steps: - - name: Prepare exclusion list - id: prepare-exclusion-list - run: | - cat < coverage_exclusions.json - ["automation", "functions", "vrf"] - EOF - coverage_exclusions=$(cat coverage_exclusions.json | jq -c .) - echo "coverage_exclusions=$coverage_exclusions" >> $GITHUB_OUTPUT - - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - - name: Setup NodeJS - uses: ./.github/actions/setup-nodejs - - - name: Create directories - shell: bash - run: | - mkdir -p contracts/code-coverage - - - name: Install Foundry - uses: foundry-rs/foundry-toolchain@8f1998e9878d786675189ef566a2e4bf24869773 # v1.2.0 - with: - version: ${{ needs.gather-basic-info.outputs.foundry_version }} - - # required for code coverage report generation - - name: Setup LCOV - uses: hrishikesh-kadam/setup-lcov@f5da1b26b0dcf5d893077a3c4f29cf78079c841d # v1.0.0 - - - name: Run Forge build for product contracts - if: ${{ needs.changes.outputs.product_changes == 'true' }} - run: | - forge --version - forge build - working-directory: contracts - env: - FOUNDRY_PROFILE: ${{ inputs.product }} - - - name: Run coverage for product contracts - if: ${{ !contains(fromJson(steps.prepare-exclusion-list.outputs.coverage_exclusions), inputs.product) && needs.changes.outputs.product_changes == 'true' }} - working-directory: contracts - run: forge coverage --report lcov --report-file code-coverage/lcov.info - env: - FOUNDRY_PROFILE: ${{ inputs.product }} - - - name: Generate Code Coverage HTML report for product contracts - if: ${{ !contains(fromJson(steps.prepare-exclusion-list.outputs.coverage_exclusions), inputs.product) && needs.changes.outputs.product_changes == 'true' }} - shell: bash - working-directory: contracts - run: genhtml code-coverage/lcov.info --branch-coverage --output-directory code-coverage - - - name: Run Forge doc for product contracts - if: ${{ needs.changes.outputs.product_changes == 'true' }} - run: forge doc --build -o docs - working-directory: contracts - env: - FOUNDRY_PROFILE: ${{ inputs.product }} - - - name: Upload Artifacts for product contracts - if: ${{ needs.changes.outputs.product_changes == 'true' }} - uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 - timeout-minutes: 2 - continue-on-error: true - with: - name: tmp-${{ inputs.product }}-artifacts - path: | - contracts/docs - contracts/code-coverage/lcov-.info - contracts/code-coverage - retention-days: 7 - - # Generates UML diagrams and Slither reports for modified contracts - uml-static-analysis: - if: ${{ needs.changes.outputs.product_changes == 'true' }} - name: Generate UML and Slither reports for modified contracts - runs-on: ubuntu-22.04 - needs: [changes, gather-basic-info] - env: - GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - fetch-depth: 0 - - - name: Setup NodeJS - uses: ./.github/actions/setup-nodejs - - - name: Install Foundry - uses: foundry-rs/foundry-toolchain@8f1998e9878d786675189ef566a2e4bf24869773 # v1.2.0 - with: - version: ${{ needs.gather-basic-info.outputs.foundry_version }} - - - name: Install Sol2uml - run: | - npm link sol2uml --only=production - - - name: Set up Python - uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f #v5.1.1 - with: - python-version: '3.8' - - - name: Install solc-select and solc - uses: ./.github/actions/setup-solc-select - with: - to_install: '0.8.19' - to_use: '0.8.19' - - - name: Install Slither - uses: ./.github/actions/setup-slither - - - name: Generate UML - shell: bash - run: | - contract_list="${{ needs.changes.outputs.product_files }}" - - # modify remappings so that solc can find dependencies - ./contracts/scripts/ci/modify_remappings.sh contracts contracts/remappings.txt - mv remappings_modified.txt remappings.txt - - ./contracts/scripts/ci/generate_uml.sh "./" "contracts/uml-diagrams" "$contract_list" - - - name: Generate Slither Markdown reports - run: | - contract_list="${{ needs.changes.outputs.product_files }}" - - echo "::debug::Processing contracts: $contract_list" - ./contracts/scripts/ci/generate_slither_report.sh "${{ github.server_url }}/${{ github.repository }}/blob/${{ github.sha }}/" contracts/configs/slither/.slither.config-artifacts.json "." "$contract_list" "contracts/slither-reports" "--solc-remaps @=contracts/node_modules/@" - - - name: Upload UMLs and Slither reports - uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 - timeout-minutes: 10 - continue-on-error: true - with: - name: tmp-contracts-artifacts - path: | - contracts/uml-diagrams - contracts/slither-reports - retention-days: 7 - - - name: Validate if all Slither run for all contracts - uses: ./.github/actions/validate-solidity-artifacts - with: - validate_slither_reports: 'true' - validate_uml_diagrams: 'true' - slither_reports_path: 'contracts/slither-reports' - uml_diagrams_path: 'contracts/uml-diagrams' - sol_files: ${{ needs.changes.outputs.product_files }} - - gather-all-artifacts: - name: Gather all artifacts - if: ${{ needs.changes.outputs.product_changes == 'true' }} - runs-on: ubuntu-latest - needs: [coverage-and-book, uml-static-analysis, gather-basic-info, changes] - steps: - - name: Download all artifacts - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 - with: - path: review_artifacts - merge-multiple: true - - - name: Upload all artifacts as single package - uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 - with: - name: review-artifacts-${{ inputs.product }}-${{ github.sha }} - path: review_artifacts - retention-days: 60 - - - name: Remove temporary artifacts - uses: geekyeggo/delete-artifact@24928e75e6e6590170563b8ddae9fac674508aa1 # v5.0 - with: - name: tmp-* - - - name: Print Artifact URL in job summary - env: - GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: | - ARTIFACTS=$(gh api -X GET repos/${{ github.repository }}/actions/runs/${{ github.run_id }}/artifacts) - ARTIFACT_ID=$(echo "$ARTIFACTS" | jq '.artifacts[] | select(.name=="review-artifacts-${{ inputs.product }}-${{ github.sha }}") | .id') - echo "Artifact ID: $ARTIFACT_ID" - - echo "# Solidity Review Artifact Generated" >> $GITHUB_STEP_SUMMARY - echo "Base Ref used: **${{ inputs.base_ref }}**" >> $GITHUB_STEP_SUMMARY - echo "Commit SHA used: **${{ github.sha }}**" >> $GITHUB_STEP_SUMMARY - echo "[Artifact URL](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}/artifacts/$ARTIFACT_ID)" >> $GITHUB_STEP_SUMMARY - - notify-no-changes: - if: ${{ needs.changes.outputs.product_changes == 'false' }} - needs: [changes] - runs-on: ubuntu-latest - steps: - - name: Print warning in job summary - shell: bash - run: | - echo "# Solidity Review Artifact NOT Generated" >> $GITHUB_STEP_SUMMARY - echo "Base Ref used: **${{ inputs.base_ref }}**" >> $GITHUB_STEP_SUMMARY - echo "Commit SHA used: **${{ github.sha }}**" >> $GITHUB_STEP_SUMMARY - echo "## Reason: No modified Solidity files found for ${{ inputs.product }}" >> $GITHUB_STEP_SUMMARY - echo "* no modified Solidity files found between ${{ inputs.base_ref }} and ${{ github.sha }} commits" >> $GITHUB_STEP_SUMMARY - echo "* or they are located outside of ./contracts/src/v0.8 folder" >> $GITHUB_STEP_SUMMARY - echo "* or they were limited to test files" >> $GITHUB_STEP_SUMMARY - exit 1 diff --git a/.github/workflows/solidity-foundry.yml b/.github/workflows/solidity-foundry.yml deleted file mode 100644 index 7ba1d9bba2..0000000000 --- a/.github/workflows/solidity-foundry.yml +++ /dev/null @@ -1,650 +0,0 @@ -name: Solidity Foundry -on: [pull_request] - -env: - FOUNDRY_PROFILE: ci - -# Making changes: -# * use the top-level matrix to decide, which checks should run for each product. -# * when enabling code coverage, remember to adjust the minimum code coverage as it's set to 98.5% by default. - -# This pipeline will run product tests only if product-specific contracts were modified or if broad-impact changes were made (e.g. changes to this pipeline, Foundry configuration, etc.) -# For modified contracts we use a LLM to extract new issues introduced by the changes. For new contracts full report is delivered. -# Slither has a default configuration, but also supports per-product configuration. If a product-specific configuration is not found, the default one is used. -# Changes to test files do not trigger static analysis or formatting checks. - -jobs: - define-matrix: - name: Define test matrix - runs-on: ubuntu-latest - outputs: - matrix: ${{ steps.define-matrix.outputs.matrix }} - foundry-version: ${{ steps.extract-foundry-version.outputs.foundry-version }} - steps: - - name: Define test matrix - id: define-matrix - shell: bash - run: | - cat < matrix.json - [ - { "name": "automation", "setup": { "run-coverage": false, "min-coverage": 98.5, "run-gas-snapshot": false, "run-forge-fmt": false }}, - { "name": "ccip", "setup": { "run-coverage": true, "min-coverage": 98.5, "run-gas-snapshot": true, "run-forge-fmt": true }}, - { "name": "functions", "setup": { "run-coverage": false, "min-coverage": 98.5, "run-gas-snapshot": true, "run-forge-fmt": false }}, - { "name": "keystone", "setup": { "run-coverage": true, "min-coverage": 74.1, "run-gas-snapshot": false, "run-forge-fmt": false }}, - { "name": "l2ep", "setup": { "run-coverage": true, "min-coverage": 65.6, "run-gas-snapshot": true, "run-forge-fmt": false }}, - { "name": "liquiditymanager", "setup": { "run-coverage": true, "min-coverage": 46.3, "run-gas-snapshot": true, "run-forge-fmt": false }}, - { "name": "llo-feeds", "setup": { "run-coverage": true, "min-coverage": 49.3, "run-gas-snapshot": true, "run-forge-fmt": false }}, - { "name": "operatorforwarder", "setup": { "run-coverage": true, "min-coverage": 55.7, "run-gas-snapshot": true, "run-forge-fmt": false }}, - { "name": "shared", "setup": { "run-coverage": true, "extra-coverage-params": "--no-match-path='*CallWithExactGas*'", "min-coverage": 32.6, "run-gas-snapshot": true, "run-forge-fmt": false }}, - { "name": "transmission", "setup": { "run-coverage": true, "min-coverage": 65.6, "run-gas-snapshot": true, "run-forge-fmt": false }}, - { "name": "vrf", "setup": { "run-coverage": false, "min-coverage": 98.5, "run-gas-snapshot": false, "run-forge-fmt": false }} - ] - EOF - - matrix=$(cat matrix.json | jq -c .) - echo "matrix=$matrix" >> $GITHUB_OUTPUT - - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - - name: Extract Foundry version - id: extract-foundry-version - uses: ./.github/actions/detect-solidity-foundry-version - with: - working-directory: contracts - - changes: - name: Detect changes - runs-on: ubuntu-latest - outputs: - non_src_changes: ${{ steps.changes.outputs.non_src }} - sol_modified_added: ${{ steps.changes.outputs.sol }} - sol_modified_added_files: ${{ steps.changes.outputs.sol_files }} - sol_mod_only: ${{ steps.changes.outputs.sol_mod_only }} - sol_mod_only_files: ${{ steps.changes.outputs.sol_mod_only_files }} - not_test_sol_modified: ${{ steps.changes-non-test.outputs.not_test_sol }} - not_test_sol_modified_files: ${{ steps.changes-non-test.outputs.not_test_sol_files }} - all_changes: ${{ steps.changes.outputs.changes }} - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - name: Detect changes - uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2 - id: changes - with: - list-files: 'shell' - filters: | - non_src: - - '.github/workflows/solidity-foundry.yml' - - 'contracts/foundry.toml' - - 'contracts/gas-snapshots/*.gas-snapshot' - - 'contracts/package.json' - sol: - - modified|added: 'contracts/src/v0.8/**/*.sol' - sol_mod_only: - - modified: 'contracts/src/v0.8/**/!(tests|mocks)/!(*.t).sol' - not_test_sol: - - modified|added: 'contracts/src/v0.8/**/!(tests|mocks)/!(*.t).sol' - automation: - - 'contracts/src/v0.8/automation/**/*.sol' - ccip: - - 'contracts/src/v0.8/ccip/**/*.sol' - functions: - - 'contracts/src/v0.8/functions/**/*.sol' - keystone: - - 'contracts/src/v0.8/keystone/**/*.sol' - l2ep: - - 'contracts/src/v0.8/l2ep/**/*.sol' - liquiditymanager: - - 'contracts/src/v0.8/liquiditymanager/**/*.sol' - llo-feeds: - - 'contracts/src/v0.8/llo-feeds/**/*.sol' - operatorforwarder: - - 'contracts/src/v0.8/operatorforwarder/**/*.sol' - vrf: - - 'contracts/src/v0.8/vrf/**/*.sol' - shared: - - 'contracts/src/v0.8/shared/**/*.sol' - - 'contracts/src/v0.8/*.sol' - - 'contracts/src/v0.8/mocks/**/*.sol' - - 'contracts/src/v0.8/tests/**/*.sol' - - 'contracts/src/v0.8/vendor/**/*.sol' - transmission: - - 'contracts/src/v0.8/transmission/**/*.sol' - - - name: Detect non-test changes - uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2 - id: changes-non-test - with: - list-files: 'shell' - # This is a valid input, see https://github.com/dorny/paths-filter/pull/226 - predicate-quantifier: every - filters: | - not_test_sol: - - modified|added: 'contracts/src/v0.8/**/!(*.t).sol' - - '!contracts/src/v0.8/**/test/**' - - '!contracts/src/v0.8/**/tests/**' - - '!contracts/src/v0.8/**/mock/**' - - '!contracts/src/v0.8/**/mocks/**' - - '!contracts/src/v0.8/**/*.t.sol' - - '!contracts/src/v0.8/*.t.sol' - - '!contracts/src/v0.8/**/testhelpers/**' - - '!contracts/src/v0.8/testhelpers/**' - - '!contracts/src/v0.8/vendor/**' - - tests: - strategy: - fail-fast: false - matrix: - product: ${{fromJson(needs.define-matrix.outputs.matrix)}} - needs: [define-matrix, changes] - name: Foundry Tests ${{ matrix.product.name }} - runs-on: ubuntu-22.04 - - # The if statements for steps after checkout repo is workaround for - # passing required check for PRs that don't have filtered changes. - steps: - - name: Checkout the repo - if: ${{ contains(fromJson(needs.changes.outputs.all_changes), matrix.product.name) - || contains(fromJson(needs.changes.outputs.all_changes), 'shared') - || needs.changes.outputs.non_src_changes == 'true' }} - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - submodules: recursive - - # Only needed because we use the NPM versions of packages - # and not native Foundry. This is to make sure the dependencies - # stay in sync. - - name: Setup NodeJS - if: ${{ contains(fromJson(needs.changes.outputs.all_changes), matrix.product.name) - || contains(fromJson(needs.changes.outputs.all_changes), 'shared') - || needs.changes.outputs.non_src_changes == 'true' }} - uses: ./.github/actions/setup-nodejs - - - name: Install Foundry - if: ${{ contains(fromJson(needs.changes.outputs.all_changes), matrix.product.name) - || contains(fromJson(needs.changes.outputs.all_changes), 'shared') - || needs.changes.outputs.non_src_changes == 'true' }} - uses: foundry-rs/foundry-toolchain@8f1998e9878d786675189ef566a2e4bf24869773 # v1.2.0 - with: - version: ${{ needs.define-matrix.outputs.foundry-version }} - - - name: Run Forge build - if: ${{ contains(fromJson(needs.changes.outputs.all_changes), matrix.product.name) - || contains(fromJson(needs.changes.outputs.all_changes), 'shared') - || needs.changes.outputs.non_src_changes == 'true' }} - run: | - forge --version - forge build - id: build - working-directory: contracts - env: - FOUNDRY_PROFILE: ${{ matrix.product.name }} - - - name: Run Forge tests - if: ${{ contains(fromJson(needs.changes.outputs.all_changes), matrix.product.name) - || contains(fromJson(needs.changes.outputs.all_changes), 'shared') - || needs.changes.outputs.non_src_changes == 'true' }} - run: | - forge test -vvv - id: test - working-directory: contracts - env: - FOUNDRY_PROFILE: ${{ matrix.product.name }} - - - name: Run Forge snapshot - if: ${{ (contains(fromJson(needs.changes.outputs.all_changes), matrix.product.name) - || contains(fromJson(needs.changes.outputs.all_changes), 'shared') - || needs.changes.outputs.non_src_changes == 'true') - && matrix.product.setup.run-gas-snapshot }} - run: | - forge snapshot --nmt "test_?Fuzz_\w{1,}?" --check gas-snapshots/${{ matrix.product.name }}.gas-snapshot - id: snapshot - working-directory: contracts - env: - FOUNDRY_PROFILE: ${{ matrix.product.name }} - - # required for code coverage report generation - - name: Setup LCOV - if: ${{ (contains(fromJson(needs.changes.outputs.all_changes), matrix.product.name) - || contains(fromJson(needs.changes.outputs.all_changes), 'shared') - || needs.changes.outputs.non_src_changes == 'true') - && matrix.product.setup.run-coverage }} - uses: hrishikesh-kadam/setup-lcov@f5da1b26b0dcf5d893077a3c4f29cf78079c841d # v1.0.0 - - - name: Run coverage for ${{ matrix.product.name }} - if: ${{ (contains(fromJson(needs.changes.outputs.all_changes), matrix.product.name) - || contains(fromJson(needs.changes.outputs.all_changes), 'shared') - || needs.changes.outputs.non_src_changes == 'true') - && matrix.product.setup.run-coverage }} - working-directory: contracts - shell: bash - run: | - if [[ -n "${{ matrix.product.setup.extra-coverage-params }}" ]]; then - forge coverage --report lcov ${{ matrix.product.setup.extra-coverage-params }} - else - forge coverage --report lcov - fi - env: - FOUNDRY_PROFILE: ${{ matrix.product.name }} - - - name: Prune lcov report - if: ${{ (contains(fromJson(needs.changes.outputs.all_changes), matrix.product.name) - || contains(fromJson(needs.changes.outputs.all_changes), 'shared') - || needs.changes.outputs.non_src_changes == 'true') - && matrix.product.setup.run-coverage }} - run: | - ./contracts/scripts/lcov_prune ${{ matrix.product.name }} ./contracts/lcov.info ./contracts/lcov.info.pruned - - - name: Report code coverage for ${{ matrix.product.name }} - if: ${{ (contains(fromJson(needs.changes.outputs.all_changes), matrix.product.name) - || contains(fromJson(needs.changes.outputs.all_changes), 'shared') - || needs.changes.outputs.non_src_changes == 'true') - && matrix.product.setup.run-coverage }} - uses: zgosalvez/github-actions-report-lcov@a546f89a65a0cdcd82a92ae8d65e74d450ff3fbc # v4.1.4 - with: - update-comment: false - coverage-files: ./contracts/lcov.info.pruned - minimum-coverage: ${{ matrix.product.min-coverage }} - artifact-name: code-coverage-report-${{ matrix.product.name }} - working-directory: ./contracts - - - name: Collect Metrics - if: ${{ contains(fromJson(needs.changes.outputs.all_changes), matrix.product.name) - || contains(fromJson(needs.changes.outputs.all_changes), 'shared') - || needs.changes.outputs.non_src_changes == 'true' }} - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: ${{ matrix.product.name }}-solidity-foundry - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Foundry Tests ${{ matrix.product.name }} - continue-on-error: true - - # runs only if non-test contracts were modified; scoped only to modified or added contracts - analyze: - needs: [ changes, define-matrix ] - name: Run static analysis - if: needs.changes.outputs.not_test_sol_modified == 'true' && false - runs-on: ubuntu-22.04 - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - submodules: recursive - - - name: Setup NodeJS - uses: ./.github/actions/setup-nodejs - - - name: Install Foundry - uses: foundry-rs/foundry-toolchain@8f1998e9878d786675189ef566a2e4bf24869773 # v1.2.0 - with: - version: ${{ needs.define-matrix.outputs.foundry-version }} - - - name: Set up Python - uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f #v5.1.1 - with: - python-version: '3.8' - - - name: Install solc-select and solc - uses: ./.github/actions/setup-solc-select - with: - to_install: '0.8.19' - to_use: '0.8.19' - - - name: Install Slither - uses: ./.github/actions/setup-slither - - - name: Run Slither - shell: bash - run: | - # modify remappings so that solc can find dependencies - ./contracts/scripts/ci/modify_remappings.sh contracts contracts/remappings.txt - mv remappings_modified.txt remappings.txt - - # without it Slither sometimes fails to use remappings correctly - cp contracts/foundry.toml foundry.toml - - FILES="${{ needs.changes.outputs.not_test_sol_modified_files }}" - - for FILE in $FILES; do - PRODUCT=$(echo "$FILE" | awk -F'src/[^/]*/' '{print $2}' | cut -d'/' -f1) - echo "::debug::Running Slither for $FILE in $PRODUCT" - SLITHER_CONFIG="contracts/configs/slither/.slither.config-$PRODUCT-pr.json" - if [[ ! -f $SLITHER_CONFIG ]]; then - echo "::debug::No Slither config found for $PRODUCT, using default" - SLITHER_CONFIG="contracts/configs/slither/.slither.config-default-pr.json" - fi - ./contracts/scripts/ci/generate_slither_report.sh "${{ github.server_url }}/${{ github.repository }}/blob/${{ github.sha }}/" "$SLITHER_CONFIG" "." "$FILE" "contracts/slither-reports-current" "--solc-remaps @=contracts/node_modules/@" - done - - # all the actions below, up to printing results, run only if any existing contracts were modified - # in that case we extract new issues introduced by the changes by using an LLM model - - name: Upload Slither results for current branch - if: needs.changes.outputs.sol_mod_only == 'true' - uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 - timeout-minutes: 2 - continue-on-error: true - with: - name: slither-reports-current-${{ github.sha }} - path: contracts/slither-reports-current - retention-days: 7 - - # we need to upload scripts and configuration in case base_ref doesn't have the scripts, or they are in different version - - name: Upload Slither scripts - if: needs.changes.outputs.sol_mod_only == 'true' - uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 - timeout-minutes: 2 - continue-on-error: true - with: - name: tmp-slither-scripts-${{ github.sha }} - path: contracts/scripts/ci - retention-days: 7 - - - name: Upload configs - if: needs.changes.outputs.sol_mod_only == 'true' - uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 - timeout-minutes: 2 - continue-on-error: true - with: - name: tmp-configs-${{ github.sha }} - path: contracts/configs - retention-days: 7 - - - name: Checkout the repo - if: needs.changes.outputs.sol_mod_only == 'true' - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - ref: ${{ github.base_ref }} - - - name: Download Slither scripts - if: needs.changes.outputs.sol_mod_only == 'true' - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 - with: - name: tmp-slither-scripts-${{ github.sha }} - path: contracts/scripts/ci - - - name: Download configs - if: needs.changes.outputs.sol_mod_only == 'true' - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 - with: - name: tmp-configs-${{ github.sha }} - path: contracts/configs - - # since we have just checked out the repository again, we lose NPM dependencies installs previously, we need to install them again to compile contracts - - name: Setup NodeJS - if: needs.changes.outputs.sol_mod_only == 'true' - uses: ./.github/actions/setup-nodejs - - - name: Run Slither for base reference - if: needs.changes.outputs.sol_mod_only == 'true' - shell: bash - run: | - # we need to set file permission again since they are lost during download - for file in contracts/scripts/ci/*.sh; do - chmod +x "$file" - done - - # modify remappings so that solc can find dependencies - ./contracts/scripts/ci/modify_remappings.sh contracts contracts/remappings.txt - mv remappings_modified.txt remappings.txt - - # without it Slither sometimes fails to use remappings correctly - cp contracts/foundry.toml foundry.toml - - FILES="${{ needs.changes.outputs.sol_mod_only_files }}" - - for FILE in $FILES; do - PRODUCT=$(echo "$FILE" | awk -F'src/[^/]*/' '{print $2}' | cut -d'/' -f1) - echo "::debug::Running Slither for $FILE in $PRODUCT" - SLITHER_CONFIG="contracts/configs/slither/.slither.config-$PRODUCT-pr.json" - if [[ ! -f $SLITHER_CONFIG ]]; then - echo "::debug::No Slither config found for $PRODUCT, using default" - SLITHER_CONFIG="contracts/configs/slither/.slither.config-default-pr.json" - fi - ./contracts/scripts/ci/generate_slither_report.sh "${{ github.server_url }}/${{ github.repository }}/blob/${{ github.sha }}/" "$SLITHER_CONFIG" "." "$FILE" "contracts/slither-reports-base-ref" "--solc-remaps @=contracts/node_modules/@" - done - - - name: Upload Slither report - if: needs.changes.outputs.sol_mod_only == 'true' - uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 - timeout-minutes: 10 - continue-on-error: true - with: - name: slither-reports-base-${{ github.sha }} - path: | - contracts/slither-reports-base-ref - retention-days: 7 - - - name: Download Slither results for current branch - if: needs.changes.outputs.sol_mod_only == 'true' - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 - with: - name: slither-reports-current-${{ github.sha }} - path: contracts/slither-reports-current - - - name: Generate diff of Slither reports for modified files - if: needs.changes.outputs.sol_mod_only == 'true' - env: - OPEN_API_KEY: ${{ secrets.OPEN_AI_SLITHER_API_KEY }} - shell: bash - run: | - set -euo pipefail - for base_report in contracts/slither-reports-base-ref/*.md; do - filename=$(basename "$base_report") - current_report="contracts/slither-reports-current/$filename" - new_issues_report="contracts/slither-reports-current/${filename%.md}_new_issues.md" - if [ -f "$current_report" ]; then - if ./contracts/scripts/ci/find_slither_report_diff.sh "$base_report" "$current_report" "$new_issues_report" "contracts/scripts/ci/prompt-difference.md" "contracts/scripts/ci/prompt-validation.md"; then - if [[ -s $new_issues_report ]]; then - awk 'NR==2{print "*This new issues report has been automatically generated by LLM model using two Slither reports. One based on `${{ github.base_ref}}` and another on `${{ github.sha }}` commits.*"}1' $new_issues_report > tmp.md && mv tmp.md $new_issues_report - echo "Replacing full Slither report with diff for $current_report" - rm $current_report && mv $new_issues_report $current_report - else - echo "No difference detected between $base_report and $current_report reports. Won't include any of them." - rm $current_report - fi - else - echo "::warning::Failed to generate a diff report with new issues for $base_report using an LLM model, will use full report." - fi - - else - echo "::warning::Failed to find current commit's equivalent of $base_report (file $current_report doesn't exist, but should have been generated). Please check Slither logs." - fi - done - - # actions that execute only if any existing contracts were modified end here - - name: Print Slither summary - shell: bash - run: | - echo "# Static analysis results " >> $GITHUB_STEP_SUMMARY - for file in "contracts/slither-reports-current"/*.md; do - if [ -e "$file" ]; then - cat "$file" >> $GITHUB_STEP_SUMMARY - fi - done - - - name: Validate if all Slither run for all contracts - uses: ./.github/actions/validate-solidity-artifacts - with: - validate_slither_reports: 'true' - slither_reports_path: 'contracts/slither-reports-current' - sol_files: ${{ needs.changes.outputs.not_test_sol_modified_files }} - - - name: Upload Slither reports - uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 - timeout-minutes: 10 - continue-on-error: true - with: - name: slither-reports-${{ github.sha }} - path: | - contracts/slither-reports-current - retention-days: 7 - - - name: Find Slither comment in the PR - uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.0.0 - id: find-comment - with: - issue-number: ${{ github.event.pull_request.number }} - comment-author: 'github-actions[bot]' - body-includes: 'Static analysis results' - - - name: Extract job summary URL - id: job-summary-url - uses: pl-strflt/job-summary-url-action@df2d22c5351f73e0a187d20879854b8d98e6e001 # v1.0.0 - with: - job: 'Run static analysis' - - - name: Build Slither reports artifacts URL - id: build-slither-artifact-url - env: - GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: | - ARTIFACTS=$(gh api -X GET repos/${{ github.repository }}/actions/runs/${{ github.run_id }}/artifacts) - ARTIFACT_ID=$(echo "$ARTIFACTS" | jq '.artifacts[] | select(.name=="slither-reports-${{ github.sha }}") | .id') - echo "Artifact ID: $ARTIFACT_ID" - - slither_artifact_url="https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}/artifacts/$ARTIFACT_ID" - echo "slither_artifact_url=$slither_artifact_url" >> $GITHUB_OUTPUT - - - name: Create or update Slither comment in the PR - uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4.0.0 - with: - comment-id: ${{ steps.find-comment.outputs.comment-id }} - issue-number: ${{ github.event.pull_request.number }} - body: | - ## Static analysis results are available - Hey @${{ github.event.push && github.event.push.pusher && github.event.push.pusher.username || github.actor }}, you can view Slither reports in the job summary [here](${{ steps.job-summary-url.outputs.job_summary_url }}) or download them as artifact [here](${{ steps.build-slither-artifact-url.outputs.slither_artifact_url }}). - - Please check them before merging and make sure you have addressed all issues. - edit-mode: replace - - - name: Remove temp artifacts - uses: geekyeggo/delete-artifact@24928e75e6e6590170563b8ddae9fac674508aa1 # v5.0 - with: - name: tmp-* - - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@dea9b546553cb4ca936607c2267a09c004e4ab3f # v3.0.0 - with: - id: solidity-foundry-slither - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Run static analysis - continue-on-error: true - - solidity-forge-fmt: - name: Forge fmt ${{ matrix.product.name }} - if: ${{ needs.changes.outputs.non_src_changes == 'true' || needs.changes.outputs.not_test_sol_modified == 'true' }} - needs: [define-matrix, changes] - strategy: - fail-fast: false - matrix: - product: ${{fromJson(needs.define-matrix.outputs.matrix)}} - runs-on: ubuntu-22.04 - steps: - - name: Checkout the repo - if: ${{ contains(fromJson(needs.changes.outputs.all_changes), matrix.product.name) && matrix.product.setup.run-forge-fmt }} - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - submodules: recursive - - - name: Setup NodeJS - if: ${{ contains(fromJson(needs.changes.outputs.all_changes), matrix.product.name) && matrix.product.setup.run-forge-fmt }} - uses: ./.github/actions/setup-nodejs - - - name: Install Foundry - if: ${{ contains(fromJson(needs.changes.outputs.all_changes), matrix.product.name) && matrix.product.setup.run-forge-fmt }} - uses: foundry-rs/foundry-toolchain@8f1998e9878d786675189ef566a2e4bf24869773 # v1.2.0 - with: - version: ${{ needs.define-matrix.outputs.foundry-version }} - - - name: Run Forge fmt - if: ${{ contains(fromJson(needs.changes.outputs.all_changes), matrix.product.name) && matrix.product.setup.run-forge-fmt }} - run: forge fmt --check - id: fmt - working-directory: contracts - env: - FOUNDRY_PROFILE: ${{ matrix.product.name }} - - - name: Collect Metrics - if: ${{ contains(fromJson(needs.changes.outputs.all_changes), matrix.product.name) && matrix.product.setup.run-forge-fmt }} - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@dea9b546553cb4ca936607c2267a09c004e4ab3f # v3.0.0 - with: - id: solidity-forge-fmt-${{ matrix.product.name }} - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Forge fmt ${{ matrix.product.name }} - continue-on-error: true - - coverage: - needs: [define-matrix, changes] - name: Coverage - runs-on: ubuntu-latest - env: - FOUNDRY_PROFILE: ccip - - steps: - - name: Collect Metrics - if: ${{ needs.changes.outputs.non_src_changes == 'true' || needs.changes.outputs.sol_modified_added == 'true' }} - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@dea9b546553cb4ca936607c2267a09c004e4ab3f # v3.0.0 - with: - id: ccip-solidity-foundry-coverage - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Coverage - continue-on-error: true - - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - submodules: recursive - - # Only needed because we use the NPM versions of packages - # and not native Foundry. This is to make sure the dependencies - # stay in sync. - - name: Setup NodeJS - uses: ./.github/actions/setup-nodejs - - - name: Install Foundry - if: ${{ needs.changes.outputs.non_src_changes == 'true' || needs.changes.outputs.sol_modified_added == 'true' }} - uses: foundry-rs/foundry-toolchain@8f1998e9878d786675189ef566a2e4bf24869773 # v1.2.0 - with: - version: ${{ needs.define-matrix.outputs.foundry-version }} - - - name: Run Forge build - if: ${{ needs.changes.outputs.non_src_changes == 'true' || needs.changes.outputs.sol_modified_added == 'true' }} - working-directory: contracts - run: | - forge --version - forge build - id: build - - - name: Run coverage - if: ${{ needs.changes.outputs.non_src_changes == 'true' || needs.changes.outputs.sol_modified_added == 'true' }} - working-directory: contracts - run: forge coverage --report lcov - - - name: Prune report - if: ${{ needs.changes.outputs.non_src_changes == 'true' || needs.changes.outputs.sol_modified_added == 'true' }} - run: | - sudo apt-get install lcov - ./tools/ci/ccip_lcov_prune ./contracts/lcov.info ./lcov.info.pruned - - - name: Report code coverage - if: ${{ needs.changes.outputs.non_src_changes == 'true' || needs.changes.outputs.sol_modified_added == 'true' }} - uses: zgosalvez/github-actions-report-lcov@a546f89a65a0cdcd82a92ae8d65e74d450ff3fbc # v4.1.4 - with: - update-comment: true - coverage-files: lcov.info.pruned - minimum-coverage: 97.6 - artifact-name: code-coverage-report - working-directory: ./contracts - github-token: ${{ secrets.GITHUB_TOKEN }} \ No newline at end of file diff --git a/.github/workflows/solidity-hardhat.yml b/.github/workflows/solidity-hardhat.yml deleted file mode 100644 index 4fb43ee962..0000000000 --- a/.github/workflows/solidity-hardhat.yml +++ /dev/null @@ -1,87 +0,0 @@ -name: Solidity-Hardhat - -on: - merge_group: - push: - -env: - NODE_OPTIONS: --max_old_space_size=8192 - -defaults: - run: - shell: bash - -jobs: - changes: - name: Detect changes - runs-on: ubuntu-latest - outputs: - changes: ${{ steps.changes.outputs.src }} - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2 - id: changes - with: - filters: | - src: - - 'contracts/src/!(v0.8/(ccip|functions|keystone|l2ep|liquiditymanager|llo-feeds|transmission|vrf)/**)/**/*' - - 'contracts/test/**/*' - - 'contracts/package.json' - - 'contracts/pnpm-lock.yaml' - - 'contracts/hardhat.config.ts' - - '.github/workflows/solidity-hardhat.yml' - -# hardhat-test: -# needs: [changes] -# if: needs.changes.outputs.changes == 'true' -# name: Solidity ${{ fromJSON('["(skipped)", ""]')[needs.changes.outputs.changes == 'true'] }} -# runs-on: ubuntu-latest -# steps: -# - name: Checkout the repo -# uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 -# - name: Setup NodeJS -# uses: ./.github/actions/setup-nodejs -# - name: Setup Hardhat -# uses: ./.github/actions/setup-hardhat -# with: -# namespace: coverage -# - name: Run tests -# working-directory: contracts -# run: pnpm test -# - name: Collect Metrics -# id: collect-gha-metrics -# uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 -# with: -# id: hardhat-test -# org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} -# basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} -# hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} -# continue-on-error: true -# -# solidity: -# needs: [changes, hardhat-test] -# name: Solidity -# runs-on: ubuntu-latest -# if: always() -# steps: -# - run: echo 'Solidity tests finished!' -# - name: Check test results -# run: | -# if [[ "${{ needs.changes.result }}" = "failure" || "${{ needs.solidity-splits.result }}" = "failure" ]]; then -# echo "One or more changes / solidity-splits jobs failed" -# exit 1 -# else -# echo "All test jobs passed successfully" -# fi -# - name: Collect Metrics -# if: always() -# id: collect-gha-metrics -# uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 -# with: -# id: solidity-tests -# org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} -# basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} -# hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} -# this-job-name: Solidity -# continue-on-error: true diff --git a/.github/workflows/solidity-wrappers.yml b/.github/workflows/solidity-wrappers.yml deleted file mode 100644 index 40737774d5..0000000000 --- a/.github/workflows/solidity-wrappers.yml +++ /dev/null @@ -1,84 +0,0 @@ -name: Solidity Wrappers -# This is its own workflow file rather than being merged into "solidity.yml" to avoid over complicating the conditionals -# used for job execution. The jobs in "solidity.yml" are configured around push events, whereas -# we only want to generate gethwrappers during pull requests. -on: - pull_request: - types: - - opened - - synchronize - - reopened - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -jobs: - changes: - # We don't directly merge dependabot PRs, so let's not waste the resources - if: ${{ github.actor != 'dependabot[bot]' }} - name: Detect changes - runs-on: ubuntu-latest - outputs: - changes: ${{ steps.ch.outputs.changes }} - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - name: Detect changes - id: ch - uses: ./.github/actions/detect-solidity-file-changes - - # On a pull request event, make updates to gethwrappers if there are changes. - update-wrappers: - needs: [changes] - if: needs.changes.outputs.changes == 'true' - name: Update Wrappers - permissions: - actions: read - id-token: write - contents: read - runs-on: ubuntu22.04-8cores-32GB - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - - name: Setup Go - uses: ./.github/actions/setup-go - - - name: Setup NodeJS - uses: ./.github/actions/setup-nodejs - with: - prod: "true" - - - name: Run native compile and generate wrappers - run: make wrappers-all - working-directory: ./contracts - - - name: Assume role capable of dispatching action - uses: smartcontractkit/.github/actions/setup-github-token@ef78fa97bf3c77de6563db1175422703e9e6674f # setup-github-token@0.2.1 - id: get-gh-token - with: - aws-role-arn: ${{ secrets.AWS_OIDC_CCIP_CI_AUTO_PR_TOKEN_ISSUER_ROLE_ARN }} - aws-lambda-url: ${{ secrets.AWS_INFRA_RELENG_TOKEN_ISSUER_LAMBDA_URL }} - aws-region: ${{ secrets.AWS_REGION }} - - - name: Commit any wrapper changes - uses: planetscale/ghcommit-action@21a8cda29f55e5cc2cdae0cdbdd08e38dd148c25 # v0.1.37 - with: - commit_message: "Update gethwrappers" - repo: ${{ github.repository }} - branch: ${{ github.head_ref }} - file_pattern: "core/gethwrappers/**/generated/*.go core/gethwrappers/**/generated-wrapper-dependency-versions-do-not-edit.txt" - env: - GITHUB_TOKEN: ${{ steps.get-gh-token.outputs.access-token }} - - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: solidity-update-wrappers - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Update Wrappers - continue-on-error: true diff --git a/.github/workflows/solidity.yml b/.github/workflows/solidity.yml deleted file mode 100644 index 31833ab2eb..0000000000 --- a/.github/workflows/solidity.yml +++ /dev/null @@ -1,273 +0,0 @@ -name: Solidity - -on: - merge_group: - push: - -defaults: - run: - shell: bash - -jobs: - initialize: - name: Initialize - runs-on: ubuntu-latest - outputs: - is-release: ${{ steps.release-tag-check.outputs.is-release }} - is-pre-release: ${{ steps.release-tag-check.outputs.is-pre-release }} - steps: - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - name: Check release tag - id: release-tag-check - uses: smartcontractkit/chainlink-github-actions/release/release-tag-check@5dd916d08c03cb5f9a97304f4f174820421bb946 # v2.3.11 - env: - # Match semver git tags with a "contracts-ccip/" prefix. - RELEASE_REGEX: '^contracts-ccip/v[0-9]+\.[0-9]+\.[0-9]+$' - PRE_RELEASE_REGEX: '^contracts-ccip/v[0-9]+\.[0-9]+\.[0-9]+-(.+)$' - - readonly_changes: - name: Detect readonly solidity file changes - runs-on: ubuntu-latest - outputs: - changes: ${{ steps.ch.outputs.changes }} - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - name: Detect readonly solidity file changes - id: ch - uses: ./.github/actions/detect-solidity-readonly-file-changes - - changes: - name: Detect changes - runs-on: ubuntu-latest - outputs: - changes: ${{ steps.ch.outputs.changes }} - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - name: Detect changes - id: ch - uses: ./.github/actions/detect-solidity-file-changes - - tag-check: - needs: [changes] - name: Tag Check - runs-on: ubuntu-latest - outputs: - is-release: ${{ steps.release-tag-check.outputs.is-release }} - is-pre-release: ${{ steps.release-tag-check.outputs.is-pre-release }} - release-version: ${{ steps.release-tag-check.outputs.release-version }} - pre-release-version: ${{ steps.release-tag-check.outputs.pre-release-version }} - steps: - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - name: Check release tag - id: release-tag-check - uses: smartcontractkit/chainlink-github-actions/release/release-tag-check@5dd916d08c03cb5f9a97304f4f174820421bb946 # v2.3.11 - env: - # Match semver git tags with a "contracts-ccip/" prefix. - RELEASE_REGEX: '^contracts-ccip/v[0-9]+\.[0-9]+\.[0-9]+$' - PRE_RELEASE_REGEX: '^contracts-ccip/v[0-9]+\.[0-9]+\.[0-9]+-(.+)$' - # Get the version by stripping the "contracts-ccip/v" prefix. - VERSION_PREFIX: 'contracts-ccip/v' - -# prepublish-test: -# needs: [changes, tag-check] -# if: needs.changes.outputs.changes == 'true' || needs.tag-check.outputs.is-pre-release == 'true' -# name: Prepublish Test -# runs-on: ubuntu-latest -# steps: -# - name: Checkout the repo -# uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 -# - name: Setup NodeJS -# uses: ./.github/actions/setup-nodejs -# - name: Run Prepublish test -# working-directory: contracts -# run: pnpm prepublishOnly -# - name: Collect Metrics -# id: collect-gha-metrics -# uses: smartcontractkit/push-gha-metrics-action@dea9b546553cb4ca936607c2267a09c004e4ab3f # v3.0.0 -# with: -# id: solidity-prepublish-test -# org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} -# basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} -# hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} -# this-job-name: Prepublish Test -# continue-on-error: true - - native-compile: - needs: [changes, tag-check] - if: needs.changes.outputs.changes == 'true' || needs.tag-check.outputs.is-release == 'true' || needs.tag-check.outputs.is-pre-release == 'true' - name: Native Compilation - runs-on: ubuntu-latest - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - name: Checkout diff-so-fancy - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - repository: so-fancy/diff-so-fancy - ref: a673cb4d2707f64d92b86498a2f5f71c8e2643d5 # v1.4.3 - path: diff-so-fancy - - name: Install diff-so-fancy - run: echo "$GITHUB_WORKSPACE/diff-so-fancy" >> $GITHUB_PATH - - name: Setup NodeJS - uses: ./.github/actions/setup-nodejs - with: - prod: "true" - - name: Setup Go - uses: ./.github/actions/setup-go - - name: Run native compile and generate wrappers - run: make wrappers-all - working-directory: ./contracts - - name: Verify local solc binaries - run: ./tools/ci/check_solc_hashes - - name: Display git diff - if: ${{ needs.changes.outputs.changes == 'true' }} - run: git diff --minimal --color --exit-code | diff-so-fancy - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: solidity-native-compile - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Native Compilation - continue-on-error: true - - # The if statements for steps after checkout repo is a workaround for - # passing required check for PRs that don't have filtered changes. - lint: - defaults: - run: - working-directory: contracts - needs: [changes, tag-check] - if: needs.changes.outputs.changes == 'true' || needs.tag-check.outputs.is-release == 'true' || needs.tag-check.outputs.is-pre-release == 'true' - name: Solidity Lint - runs-on: ubuntu-latest - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - name: Setup NodeJS - uses: ./.github/actions/setup-nodejs - - name: Run pnpm lint - run: pnpm lint - - name: Run solhint - run: pnpm solhint - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: solidity-lint - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Solidity Lint - continue-on-error: true - - prettier: - defaults: - run: - working-directory: contracts - needs: [changes, tag-check] - if: needs.changes.outputs.changes == 'true' || needs.tag-check.outputs.is-release == 'true' || needs.tag-check.outputs.is-pre-release == 'true' - name: Prettier Formatting - runs-on: ubuntu-latest - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - name: Setup NodeJS - uses: ./.github/actions/setup-nodejs - - name: Run prettier check - run: pnpm prettier:check - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: solidity-prettier - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Prettier Formatting - continue-on-error: true - - publish-beta: - name: Publish Beta NPM - environment: publish-contracts - needs: [tag-check, changes, lint, prettier, native-compile] - if: needs.tag-check.outputs.is-pre-release == 'true' - runs-on: ubuntu-latest - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - - name: Setup NodeJS - uses: ./.github/actions/setup-nodejs - - - name: Version package.json - working-directory: contracts - run: | - echo "Bumping version to ${{ needs.tag-check.outputs.pre-release-version }}" - pnpm version ${{ needs.tag-check.outputs.pre-release-version }} --no-git-tag-version --no-commit-hooks --no-git-checks - - - name: Publish to NPM (beta) - uses: smartcontractkit/.github/actions/ci-publish-npm@4b0ab756abcb1760cb82e1e87b94ff431905bffc # ci-publish-npm@0.4.0 - with: - npm-token: ${{ secrets.NPM_TOKEN }} - create-github-release: false - publish-command: "pnpm publish-beta --no-git-checks" - package-json-directory: contracts - - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: ccip-solidity-publish-beta - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Publish Beta NPM - continue-on-error: true - - publish-prod: - name: Publish Prod NPM - environment: publish-contracts - needs: [tag-check, changes, lint, prettier, native-compile] - if: needs.tag-check.outputs.is-release == 'true' - runs-on: ubuntu-latest - permissions: - contents: write - steps: - - name: Checkout the repo - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - - name: Setup NodeJS - uses: ./.github/actions/setup-nodejs - - - name: Validate version - working-directory: contracts - run: | - PACKAGE_JSON_VERSION="$(cat package.json | jq -r '.version')" - if [ "$PACKAGE_JSON_VERSION" != "${{ needs.tag-check.outputs.release-version }}" ]; then - echo "::error version mismatch: package.json version ($PACKAGE_JSON_VERSION) does not match version computed from tag ${{ needs.tag-check.outputs.release-version }}" - exit 1 - fi - - - name: Publish to NPM (latest) - uses: smartcontractkit/.github/actions/ci-publish-npm@4b0ab756abcb1760cb82e1e87b94ff431905bffc # ci-publish-npm@0.4.0 - with: - npm-token: ${{ secrets.NPM_TOKEN }} - create-github-release: false - publish-command: "pnpm publish-prod --no-git-checks" - package-json-directory: contracts - - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: ccip-solidity-publish-prod - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Publish Prod NPM - continue-on-error: true diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml deleted file mode 100644 index de7ef871b3..0000000000 --- a/.github/workflows/stale.yml +++ /dev/null @@ -1,25 +0,0 @@ -# Workflow to manage automatically closing stale pull requests and issues. -# See configuration for configuration. -name: Manage stale Issues and PRs - -on: - schedule: - - cron: "0 0 * * *" # Will be triggered every day at midnight UTC - -jobs: - stale: - - runs-on: ubuntu-latest - permissions: - issues: write - pull-requests: write - - steps: - - uses: actions/stale@28ca1036281a5e5922ead5184a1bbf96e5fc984e # v9.0.0 - with: - repo-token: ${{ secrets.GITHUB_TOKEN }} - days-before-issue-stale: -1 # disables marking issues as stale automatically. Issues can still be marked as stale manually, in which the closure policy applies. - stale-pr-message: 'This PR is stale because it has been open 45 days with no activity. Remove stale label or comment or this will be closed in 10 days.' - close-pr-message: 'This PR was closed because it has been stalled for 10 days with no activity.' - days-before-pr-stale: 45 - days-before-pr-close: 10 diff --git a/.github/workflows/sync-develop-from-smartcontractkit-chainlink.yml b/.github/workflows/sync-develop-from-smartcontractkit-chainlink.yml deleted file mode 100644 index 05b7365eba..0000000000 --- a/.github/workflows/sync-develop-from-smartcontractkit-chainlink.yml +++ /dev/null @@ -1,40 +0,0 @@ -name: Sync develop from smartcontractkit/chainlink - -on: - schedule: - # * is a special character in YAML so you have to quote this string - - cron: '*/30 * * * *' - -jobs: - sync: - name: Sync - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - with: - ref: develop - if: env.GITHUB_REPOSITORY != 'smartcontractkit/chainlink' - - name: Sync - run: | - git remote add upstream "https://github.com/smartcontractkit/chainlink.git" - COMMIT_HASH_UPSTREAM=$(git ls-remote upstream develop | grep -P '^[0-9a-f]{40}\trefs/heads/develop$' | cut -f 1) - COMMIT_HASH_ORIGIN=$(git ls-remote origin develop | grep -P '^[0-9a-f]{40}\trefs/heads/develop$' | cut -f 1) - if [ "$COMMIT_HASH_UPSTREAM" = "$COMMIT_HASH_ORIGIN" ]; then - echo "Both remotes have develop at $COMMIT_HASH_UPSTREAM. No need to sync." - else - echo "upstream has develop at $COMMIT_HASH_UPSTREAM. origin has develop at $COMMIT_HASH_ORIGIN. Syncing..." - git fetch upstream - git push origin upstream/develop:develop - fi - if: env.GITHUB_REPOSITORY != 'smartcontractkit/chainlink' - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@d9da21a2747016b3e13de58c7d4115a3d5c97935 # v3.0.1 - with: - id: sync-develop - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Sync - continue-on-error: true diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml new file mode 100644 index 0000000000..5f535d2ef2 --- /dev/null +++ b/.github/workflows/tests.yml @@ -0,0 +1,62 @@ +name: solidity + +on: push + +env: + FOUNDRY_PROFILE: ci + +jobs: + tests: + strategy: + fail-fast: false + matrix: + product: [ccip] + name: Foundry Tests ${{ matrix.product }} + # See https://github.com/foundry-rs/foundry/issues/3827 + runs-on: ubuntu-22.04 + + # The if statements for steps after checkout repo is workaround for + # passing required check for PRs that don't have filtered changes. + steps: + - name: Checkout the repo + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 + with: + submodules: recursive + + # Only needed because we use the NPM versions of packages + # and not native Foundry. This is to make sure the dependencies + # stay in sync. + - name: Setup NodeJS + uses: ./.github/actions/setup-nodejs + + - name: Install Foundry + uses: foundry-rs/foundry-toolchain@v1 + with: + # Has to match the `make foundry` version. + version: nightly-2cb875799419c907cc3709e586ece2559e6b340e + + - name: Run Forge build + run: | + forge --version + forge build + id: build + working-directory: contracts + env: + FOUNDRY_PROFILE: ${{ matrix.product }} + + - name: Run Forge tests + run: | + forge test -vvv + id: test + working-directory: contracts + env: + FOUNDRY_PROFILE: ${{ matrix.product }} + + - name: Run Forge snapshot + if: ${{ !contains(fromJson('["vrf"]'), matrix.product) && !contains(fromJson('["automation"]'), matrix.product) && needs.changes.outputs.changes == 'true' }} + run: | + forge snapshot --nmt "testFuzz_\w{1,}?" --check gas-snapshots/${{ matrix.product }}.gas-snapshot + id: snapshot + working-directory: contracts + env: + FOUNDRY_PROFILE: ${{ matrix.product }} diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 0000000000..744ebfdfe5 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,9 @@ +[submodule "contracts/foundry-lib/solidity-utils"] + path = contracts/foundry-lib/solidity-utils + url = https://github.com/bgd-labs/solidity-utils +[submodule "contracts/foundry-lib/gho-core"] + path = contracts/foundry-lib/gho-core + url = https://github.com/aave/gho-core +[submodule "contracts/foundry-lib/forge-std"] + path = contracts/foundry-lib/forge-std + url = https://github.com/foundry-rs/forge-std diff --git a/certora/Makefile b/certora/Makefile new file mode 100644 index 0000000000..0e33459cab --- /dev/null +++ b/certora/Makefile @@ -0,0 +1,24 @@ +default: help + +PATCH = applyHarness.patch +CONTRACTS_DIR = ../contracts +MUNGED_DIR = munged + +help: + @echo "usage:" + @echo " make clean: remove all generated files (those ignored by git)" + @echo " make $(MUNGED_DIR): create $(MUNGED_DIR) directory by applying the patch file to $(CONTRACTS_DIR)" + @echo " make record: record a new patch file capturing the differences between $(CONTRACTS_DIR) and $(MUNGED_DIR)" + +munged: $(wildcard $(CONTRACTS_DIR)/*.sol) $(PATCH) + rm -rf $@ + cp -r $(CONTRACTS_DIR) $@ + patch -p0 -d $@ < $(PATCH) + +record: + diff -ruN $(CONTRACTS_DIR) $(MUNGED_DIR) | sed 's+\.\./contracts/++g' | sed 's+munged/++g' > $(PATCH) + +clean: + git clean -fdX + touch $(PATCH) + diff --git a/certora/confs/ccip.conf b/certora/confs/ccip.conf new file mode 100644 index 0000000000..68f75b2636 --- /dev/null +++ b/certora/confs/ccip.conf @@ -0,0 +1,21 @@ +{ + "files": [ + "contracts/src/v0.8/ccip/pools/GHO/UpgradeableLockReleaseTokenPool.sol", + "certora/harness/SimpleERC20.sol" + ], + "packages": [ + "solidity-utils/=contracts/foundry-lib/solidity-utils/src/" + ], + "link": [ + "UpgradeableLockReleaseTokenPool:i_token=SimpleERC20" + ], + "optimistic_loop": true, + "process": "emv", + "prover_args": ["-depth 10","-mediumTimeout 700"], + "smt_timeout": "600", + "solc": "solc8.20", + "verify": "UpgradeableLockReleaseTokenPool:certora/specs/ccip.spec", + "rule_sanity": "basic", + "msg": "CCIP" +} + diff --git a/certora/harness/SimpleERC20.sol b/certora/harness/SimpleERC20.sol new file mode 100644 index 0000000000..f9d14a7ff6 --- /dev/null +++ b/certora/harness/SimpleERC20.sol @@ -0,0 +1,58 @@ +// SPDX-License-Identifier: agpl-3.0 +pragma solidity ^0.8.0; + +import {IERC20} from "../../contracts/src/v0.8/vendor/openzeppelin-solidity/v4.8.3/contracts/token/ERC20/IERC20.sol"; + +/** +A simple ERC implementation used as the underlying_asset for the verification process. + */ +contract SimpleERC20 is IERC20 { + uint256 t; + mapping(address => uint256) b; + mapping(address => mapping(address => uint256)) a; + + function add(uint a, uint b) internal pure returns (uint256) { + uint c = a + b; + require(c >= a); + return c; + } + + function sub(uint a, uint b) internal pure returns (uint256) { + require(a >= b); + return a - b; + } + + function totalSupply() external view override returns (uint256) { + return t; + } + + function balanceOf(address account) external view override returns (uint256) { + return b[account]; + } + + function transfer(address recipient, uint256 amount) external override returns (bool) { + b[msg.sender] = sub(b[msg.sender], amount); + b[recipient] = add(b[recipient], amount); + return true; + } + + function allowance(address owner, address spender) external view override returns (uint256) { + return a[owner][spender]; + } + + function approve(address spender, uint256 amount) external override returns (bool) { + a[msg.sender][spender] = amount; + return true; + } + + function transferFrom( + address sender, + address recipient, + uint256 amount + ) external override returns (bool) { + b[sender] = sub(b[sender], amount); + b[recipient] = add(b[recipient], amount); + a[sender][msg.sender] = sub(a[sender][msg.sender], amount); + return true; + } +} diff --git a/certora/munged/.gitignore b/certora/munged/.gitignore new file mode 100644 index 0000000000..d6b7ef32c8 --- /dev/null +++ b/certora/munged/.gitignore @@ -0,0 +1,2 @@ +* +!.gitignore diff --git a/certora/specs/ccip.spec b/certora/specs/ccip.spec new file mode 100644 index 0000000000..4d7200c7f5 --- /dev/null +++ b/certora/specs/ccip.spec @@ -0,0 +1,119 @@ +/* + This is a Specification File for Smart Contract Verification with the Certora Prover. + Contract name: UpgradeableLockReleaseTokenPool +*/ + +using SimpleERC20 as erc20; + +methods { + function getCurrentBridgedAmount() external returns (uint256) envfree; + function getBridgeLimit() external returns (uint256) envfree; + function owner() external returns (address) envfree; +} + + +rule sanity { + env e; + calldataarg arg; + method f; + f(e, arg); + satisfy true; +} + + + +/* ============================================================================== + invariant: currentBridge_LEQ_bridgeLimit. + Description: The value of s_currentBridged is LEQ than the value of s_bridgeLimit. + Note: this may be violated if one calls to setBridgeLimit(newBridgeLimit) with + newBridgeLimit < s_currentBridged. + ============================================================================*/ +invariant currentBridge_LEQ_bridgeLimit() + getCurrentBridgedAmount() <= getBridgeLimit() + filtered { f -> + !f.isView && + f.selector != sig:setBridgeLimit(uint256).selector} + { + preserved initialize(address owner, address[] allowlist, address router, uint256 bridgeLimit) with (env e2) { + require getCurrentBridgedAmount()==0; + } + } + + +/* ============================================================================== + rule: withdrawLiquidity_correctness + description: The rule checks that the balance of the contract is as expected. + ============================================================================*/ +rule withdrawLiquidity_correctness(env e) { + uint256 amount; + + require e.msg.sender != currentContract; + uint256 bal_before = erc20.balanceOf(e, currentContract); + withdrawLiquidity(e, amount); + uint256 bal_after = erc20.balanceOf(e, currentContract); + + assert (to_mathint(bal_after) == bal_before - amount); +} + + +/* ============================================================================== + rule: provideLiquidity_correctness + description: The rule checks that the balance of the contract is as expected. + ============================================================================*/ +rule provideLiquidity_correctness(env e) { + uint256 amount; + + require e.msg.sender != currentContract; + uint256 bal_before = erc20.balanceOf(e, currentContract); + provideLiquidity(e, amount); + uint256 bal_after = erc20.balanceOf(e, currentContract); + + assert (to_mathint(bal_after) == bal_before + amount); +} + + +/* ============================================================================== + rule: only_lockOrBurn_can_increase_currentBridged + ============================================================================*/ +rule only_lockOrBurn_can_increase_currentBridged(env e) { + method f; + calldataarg args; + + uint256 curr_bridge_before = getCurrentBridgedAmount(); + f (e,args); + uint256 curr_bridge_after = getCurrentBridgedAmount(); + + assert + curr_bridge_after > curr_bridge_before => + f.selector==sig:lockOrBurn(Pool.LockOrBurnInV1).selector; +} + + +/* ============================================================================== + rule: only_releaseOrMint_can_deccrease_currentBridged + ============================================================================*/ +rule only_releaseOrMint_can_decrease_currentBridged(env e) { + method f; + calldataarg args; + + uint256 curr_bridge_before = getCurrentBridgedAmount(); + f (e,args); + uint256 curr_bridge_after = getCurrentBridgedAmount(); + + assert + curr_bridge_after < curr_bridge_before => + f.selector==sig:releaseOrMint(Pool.ReleaseOrMintInV1).selector; +} + + +/* ============================================================================== + rule: only_bridgeLimitAdmin_or_owner_can_call_setBridgeLimit + ============================================================================*/ +rule only_bridgeLimitAdmin_or_owner_can_call_setBridgeLimit(env e) { + uint256 newBridgeLimit; + + setBridgeLimit(e, newBridgeLimit); + + assert e.msg.sender==getBridgeLimitAdmin(e) || e.msg.sender==owner(); +} + diff --git a/contracts/foundry-lib/forge-std b/contracts/foundry-lib/forge-std new file mode 160000 index 0000000000..8f24d6b04c --- /dev/null +++ b/contracts/foundry-lib/forge-std @@ -0,0 +1 @@ +Subproject commit 8f24d6b04c92975e0795b5868aa0d783251cdeaa diff --git a/contracts/foundry-lib/gho-core b/contracts/foundry-lib/gho-core new file mode 160000 index 0000000000..0a6fbd4d41 --- /dev/null +++ b/contracts/foundry-lib/gho-core @@ -0,0 +1 @@ +Subproject commit 0a6fbd4d4167f6e0e777aec0ae12bf4f18b4b0a8 diff --git a/contracts/foundry-lib/solidity-utils b/contracts/foundry-lib/solidity-utils new file mode 160000 index 0000000000..9d4d041562 --- /dev/null +++ b/contracts/foundry-lib/solidity-utils @@ -0,0 +1 @@ +Subproject commit 9d4d041562f7ac2918e216e2e7c74172afe3d2af diff --git a/contracts/foundry.toml b/contracts/foundry.toml index 501390eb80..3194c0a6e2 100644 --- a/contracts/foundry.toml +++ b/contracts/foundry.toml @@ -6,8 +6,8 @@ optimizer_runs = 1_000_000 src = 'src/v0.8' test = 'test/v0.8' out = 'foundry-artifacts' -cache_path = 'foundry-cache' -libs = ['node_modules'] +cache_path = 'foundry-cache' +libs = ["node_modules", "foundry-lib"] bytecode_hash = "none" ffi = false @@ -26,14 +26,14 @@ single_line_statement_blocks = "preserve" solc_version = '0.8.24' src = 'src/v0.8/ccip' test = 'src/v0.8/ccip/test' -optimizer_runs = 800 +optimizer_runs = 800 evm_version = 'paris' [profile.functions] solc_version = '0.8.19' src = 'src/v0.8/functions/dev/v1_X' test = 'src/v0.8/functions/tests/v1_X' -gas_price = 3_000_000_000 # 3 gwei +gas_price = 3_000_000_000 # 3 gwei [profile.vrf] optimizer_runs = 1_000 diff --git a/contracts/remappings.txt b/contracts/remappings.txt index ec64b1b211..20db4bb047 100644 --- a/contracts/remappings.txt +++ b/contracts/remappings.txt @@ -1,7 +1,24 @@ -forge-std/=src/v0.8/vendor/forge-std/src/ - -@openzeppelin/=node_modules/@openzeppelin/ +@aave/=foundry-lib/gho-core/lib/aave-token/ +@aave-gho-core/=foundry-lib/gho-core/src/contracts/ +@aave/core-v3/=foundry-lib/gho-core/lib/aave-v3-core/ +@aave/periphery-v3/=foundry-lib/gho-core/lib/aave-v3-periphery/ @arbitrum/=node_modules/@arbitrum/ -hardhat/=node_modules/hardhat/ @eth-optimism/=node_modules/@eth-optimism/ +@openzeppelin/=node_modules/@openzeppelin/ @scroll-tech/=node_modules/@scroll-tech/ +aave-address-book/=foundry-lib/gho-core/lib/aave-address-book/src/ +aave-helpers/=foundry-lib/gho-core/lib/aave-stk-v1-5/lib/aave-helpers/ +aave-stk-v1-5/=foundry-lib/gho-core/lib/aave-stk-v1-5/ +aave-token/=foundry-lib/gho-core/lib/aave-token/contracts/ +aave-v3-core/=foundry-lib/gho-core/lib/aave-v3-core/ +aave-v3-periphery/=foundry-lib/gho-core/lib/aave-v3-periphery/contracts/ +ds-test/=foundry-lib/solidity-utils/lib/forge-std/lib/ds-test/src/ +erc4626-tests/=foundry-lib/gho-core/lib/aave-stk-v1-5/lib/openzeppelin-contracts/lib/erc4626-tests/ +eth-gas-reporter/=foundry-lib/gho-core/node_modules/eth-gas-reporter/ +forge-std/=foundry-lib/forge-std/src/ +gho-core/=foundry-lib/gho-core/ +hardhat-deploy/=foundry-lib/gho-core/node_modules/hardhat-deploy/ +hardhat/=node_modules/hardhat/ +openzeppelin-contracts/=foundry-lib/gho-core/lib/openzeppelin-contracts/ +safety-module/=foundry-lib/gho-core/lib/safety-module/contracts/ +solidity-utils/=foundry-lib/solidity-utils/src/ diff --git a/contracts/src/v0.8/ccip/pools/GHO/UpgradeableBurnMintTokenPool.sol b/contracts/src/v0.8/ccip/pools/GHO/UpgradeableBurnMintTokenPool.sol new file mode 100644 index 0000000000..287cb13e9c --- /dev/null +++ b/contracts/src/v0.8/ccip/pools/GHO/UpgradeableBurnMintTokenPool.sol @@ -0,0 +1,57 @@ +// SPDX-License-Identifier: BUSL-1.1 +pragma solidity ^0.8.0; + +import {Initializable} from "solidity-utils/contracts/transparent-proxy/Initializable.sol"; +import {ITypeAndVersion} from "../../../shared/interfaces/ITypeAndVersion.sol"; +import {IBurnMintERC20} from "../../../shared/token/ERC20/IBurnMintERC20.sol"; + +import {UpgradeableBurnMintTokenPoolAbstract} from "./UpgradeableBurnMintTokenPoolAbstract.sol"; +import {UpgradeableTokenPool} from "./UpgradeableTokenPool.sol"; + +import {IRouter} from "../../interfaces/IRouter.sol"; + +/// @title UpgradeableBurnMintTokenPool +/// @author Aave Labs +/// @notice Upgradeable version of Chainlink's CCIP BurnMintTokenPool +/// @dev Contract adaptations: +/// - Implementation of Initializable to allow upgrades +/// - Move of allowlist and router definition to initialization stage +/// - Inclusion of rate limit admin who may configure rate limits in addition to owner +contract UpgradeableBurnMintTokenPool is Initializable, UpgradeableBurnMintTokenPoolAbstract, ITypeAndVersion { + string public constant override typeAndVersion = "BurnMintTokenPool 1.5.0"; + + /// @dev Constructor + /// @param token The bridgeable token that is managed by this pool. + /// @param rmnProxy The address of the rmn proxy + /// @param allowlistEnabled True if pool is set to access-controlled mode, false otherwise + constructor( + address token, + address rmnProxy, + bool allowlistEnabled + ) UpgradeableTokenPool(IBurnMintERC20(token), rmnProxy, allowlistEnabled) { + _disableInitializers(); + } + + /// @dev Initializer + /// @dev The address passed as `owner_` must accept ownership after initialization. + /// @dev The `allowlist` is only effective if pool is set to access-controlled mode + /// @param owner_ The address of the owner + /// @param allowlist A set of addresses allowed to trigger lockOrBurn as original senders + /// @param router The address of the router + function initialize(address owner_, address[] memory allowlist, address router) public virtual initializer { + if (owner_ == address(0) || router == address(0)) revert ZeroAddressNotAllowed(); + _transferOwnership(owner_); + + s_router = IRouter(router); + + // Pool can be set as permissioned or permissionless at deployment time only to save hot-path gas. + if (i_allowlistEnabled) { + _applyAllowListUpdates(new address[](0), allowlist); + } + } + + /// @inheritdoc UpgradeableBurnMintTokenPoolAbstract + function _burn(uint256 amount) internal virtual override { + IBurnMintERC20(address(i_token)).burn(amount); + } +} diff --git a/contracts/src/v0.8/ccip/pools/GHO/UpgradeableBurnMintTokenPoolAbstract.sol b/contracts/src/v0.8/ccip/pools/GHO/UpgradeableBurnMintTokenPoolAbstract.sol new file mode 100644 index 0000000000..0e73ccc1d6 --- /dev/null +++ b/contracts/src/v0.8/ccip/pools/GHO/UpgradeableBurnMintTokenPoolAbstract.sol @@ -0,0 +1,43 @@ +// SPDX-License-Identifier: BUSL-1.1 +pragma solidity ^0.8.0; + +import {IBurnMintERC20} from "../../../shared/token/ERC20/IBurnMintERC20.sol"; + +import {Pool} from "../../libraries/Pool.sol"; +import {UpgradeableTokenPool} from "./UpgradeableTokenPool.sol"; + +abstract contract UpgradeableBurnMintTokenPoolAbstract is UpgradeableTokenPool { + /// @notice Contains the specific burn call for a pool. + /// @dev overriding this method allows us to create pools with different burn signatures + /// without duplicating the underlying logic. + function _burn(uint256 amount) internal virtual; + + /// @notice Burn the token in the pool + /// @dev The _validateLockOrBurn check is an essential security check + function lockOrBurn( + Pool.LockOrBurnInV1 calldata lockOrBurnIn + ) external virtual override returns (Pool.LockOrBurnOutV1 memory) { + _validateLockOrBurn(lockOrBurnIn); + + _burn(lockOrBurnIn.amount); + + emit Burned(msg.sender, lockOrBurnIn.amount); + + return Pool.LockOrBurnOutV1({destTokenAddress: getRemoteToken(lockOrBurnIn.remoteChainSelector), destPoolData: ""}); + } + + /// @notice Mint tokens from the pool to the recipient + /// @dev The _validateReleaseOrMint check is an essential security check + function releaseOrMint( + Pool.ReleaseOrMintInV1 calldata releaseOrMintIn + ) external virtual override returns (Pool.ReleaseOrMintOutV1 memory) { + _validateReleaseOrMint(releaseOrMintIn); + + // Mint to the receiver + IBurnMintERC20(address(i_token)).mint(releaseOrMintIn.receiver, releaseOrMintIn.amount); + + emit Minted(msg.sender, releaseOrMintIn.receiver, releaseOrMintIn.amount); + + return Pool.ReleaseOrMintOutV1({destinationAmount: releaseOrMintIn.amount}); + } +} diff --git a/contracts/src/v0.8/ccip/pools/GHO/UpgradeableLockReleaseTokenPool.sol b/contracts/src/v0.8/ccip/pools/GHO/UpgradeableLockReleaseTokenPool.sol new file mode 100644 index 0000000000..03e929ceb5 --- /dev/null +++ b/contracts/src/v0.8/ccip/pools/GHO/UpgradeableLockReleaseTokenPool.sol @@ -0,0 +1,220 @@ +// SPDX-License-Identifier: BUSL-1.1 +pragma solidity ^0.8.0; + +import {Initializable} from "solidity-utils/contracts/transparent-proxy/Initializable.sol"; + +import {ILiquidityContainer} from "../../../liquiditymanager/interfaces/ILiquidityContainer.sol"; +import {ITypeAndVersion} from "../../../shared/interfaces/ITypeAndVersion.sol"; + +import {Pool} from "../../libraries/Pool.sol"; +import {UpgradeableTokenPool} from "./UpgradeableTokenPool.sol"; + +import {IERC20} from "../../../vendor/openzeppelin-solidity/v4.8.3/contracts/token/ERC20/IERC20.sol"; +import {SafeERC20} from "../../../vendor/openzeppelin-solidity/v4.8.3/contracts/token/ERC20/utils/SafeERC20.sol"; +import {IRouter} from "../../interfaces/IRouter.sol"; + +/// @title UpgradeableLockReleaseTokenPool +/// @author Aave Labs +/// @notice Upgradeable version of Chainlink's CCIP LockReleaseTokenPool +/// @dev Contract adaptations: +/// - Implementation of Initializable to allow upgrades +/// - Move of allowlist and router definition to initialization stage +/// - Addition of a bridge limit to regulate the maximum amount of tokens that can be transferred out (burned/locked) +contract UpgradeableLockReleaseTokenPool is Initializable, UpgradeableTokenPool, ILiquidityContainer, ITypeAndVersion { + using SafeERC20 for IERC20; + + error InsufficientLiquidity(); + error LiquidityNotAccepted(); + error BridgeLimitExceeded(uint256 bridgeLimit); + error NotEnoughBridgedAmount(); + + event BridgeLimitUpdated(uint256 oldBridgeLimit, uint256 newBridgeLimit); + event BridgeLimitAdminUpdated(address indexed oldAdmin, address indexed newAdmin); + + event LiquidityTransferred(address indexed from, uint256 amount); + + string public constant override typeAndVersion = "LockReleaseTokenPool 1.5.0"; + + /// @dev Whether or not the pool accepts liquidity. + /// External liquidity is not required when there is one canonical token deployed to a chain, + /// and CCIP is facilitating mint/burn on all the other chains, in which case the invariant + /// balanceOf(pool) on home chain >= sum(totalSupply(mint/burn "wrapped" token) on all remote chains) should always hold + bool internal immutable i_acceptLiquidity; + /// @notice The address of the rebalancer. + address internal s_rebalancer; + + /// @notice Maximum amount of tokens that can be bridged to other chains + uint256 private s_bridgeLimit; + /// @notice Amount of tokens bridged (transferred out) + /// @dev Must always be equal to or below the bridge limit + uint256 private s_currentBridged; + /// @notice The address of the bridge limit admin. + /// @dev Can be address(0) if none is configured. + address internal s_bridgeLimitAdmin; + + // @dev Constructor + // @param token The bridgeable token that is managed by this pool. + // @param rmnProxy The address of the rmn proxy + // @param allowlistEnabled True if pool is set to access-controlled mode, false otherwise + // @param acceptLiquidity True if the pool accepts liquidity, false otherwise + constructor( + address token, + address rmnProxy, + bool allowListEnabled, + bool acceptLiquidity + ) UpgradeableTokenPool(IERC20(token), rmnProxy, allowListEnabled) { + i_acceptLiquidity = acceptLiquidity; + } + + /// @dev Initializer + /// @dev The address passed as `owner_` must accept ownership after initialization. + /// @dev The `allowlist` is only effective if pool is set to access-controlled mode + /// @param owner_ The address of the owner + /// @param allowlist A set of addresses allowed to trigger lockOrBurn as original senders + /// @param router The address of the router + /// @param bridgeLimit The maximum amount of tokens that can be bridged to other chains + function initialize( + address owner_, + address[] memory allowlist, + address router, + uint256 bridgeLimit + ) public initializer { + if (router == address(0) || owner_ == address(0)) revert ZeroAddressNotAllowed(); + + _transferOwnership(owner_); + s_router = IRouter(router); + if (i_allowlistEnabled) _applyAllowListUpdates(new address[](0), allowlist); + s_bridgeLimit = bridgeLimit; + } + + /// @notice Locks the token in the pool + /// @dev The _validateLockOrBurn check is an essential security check + function lockOrBurn( + Pool.LockOrBurnInV1 calldata lockOrBurnIn + ) external virtual override returns (Pool.LockOrBurnOutV1 memory) { + // Increase bridged amount because tokens are leaving the source chain + if ((s_currentBridged += lockOrBurnIn.amount) > s_bridgeLimit) revert BridgeLimitExceeded(s_bridgeLimit); + _validateLockOrBurn(lockOrBurnIn); + + emit Locked(msg.sender, lockOrBurnIn.amount); + + return Pool.LockOrBurnOutV1({destTokenAddress: getRemoteToken(lockOrBurnIn.remoteChainSelector), destPoolData: ""}); + } + + /// @notice Release tokens from the pool to the recipient + /// @dev The _validateReleaseOrMint check is an essential security check + function releaseOrMint( + Pool.ReleaseOrMintInV1 calldata releaseOrMintIn + ) external virtual override returns (Pool.ReleaseOrMintOutV1 memory) { + _validateReleaseOrMint(releaseOrMintIn); + + // This should never occur. Amount should never exceed the current bridged amount + if (releaseOrMintIn.amount > s_currentBridged) revert NotEnoughBridgedAmount(); + // Reduce bridged amount because tokens are back to source chain + s_currentBridged -= releaseOrMintIn.amount; + + // Release to the recipient + getToken().safeTransfer(releaseOrMintIn.receiver, releaseOrMintIn.amount); + + emit Released(msg.sender, releaseOrMintIn.receiver, releaseOrMintIn.amount); + + return Pool.ReleaseOrMintOutV1({destinationAmount: releaseOrMintIn.amount}); + } + + /// @notice Sets the bridge limit, the maximum amount of tokens that can be bridged out + /// @dev Only callable by the owner or the bridge limit admin or owner. + /// @dev Bridge limit changes should be carefully managed, specially when reducing below the current bridged amount + /// @param newBridgeLimit The new bridge limit + function setBridgeLimit(uint256 newBridgeLimit) external { + if (msg.sender != s_bridgeLimitAdmin && msg.sender != owner()) revert Unauthorized(msg.sender); + uint256 oldBridgeLimit = s_bridgeLimit; + s_bridgeLimit = newBridgeLimit; + emit BridgeLimitUpdated(oldBridgeLimit, newBridgeLimit); + } + + /// @notice Sets the bridge limit admin address. + /// @dev Only callable by the owner. + /// @param bridgeLimitAdmin The new bridge limit admin address. + function setBridgeLimitAdmin(address bridgeLimitAdmin) external onlyOwner { + address oldAdmin = s_bridgeLimitAdmin; + s_bridgeLimitAdmin = bridgeLimitAdmin; + emit BridgeLimitAdminUpdated(oldAdmin, bridgeLimitAdmin); + } + + /// @notice Gets the bridge limit + /// @return The maximum amount of tokens that can be transferred out to other chains + function getBridgeLimit() external view virtual returns (uint256) { + return s_bridgeLimit; + } + + /// @notice Gets the current bridged amount to other chains + /// @return The amount of tokens transferred out to other chains + function getCurrentBridgedAmount() external view virtual returns (uint256) { + return s_currentBridged; + } + + // @inheritdoc IERC165 + function supportsInterface(bytes4 interfaceId) public pure virtual override returns (bool) { + return interfaceId == type(ILiquidityContainer).interfaceId || super.supportsInterface(interfaceId); + } + + /// @notice Gets LiquidityManager, can be address(0) if none is configured. + /// @return The current liquidity manager. + function getRebalancer() external view returns (address) { + return s_rebalancer; + } + + /// @notice Gets the bridge limiter admin address. + function getBridgeLimitAdmin() external view returns (address) { + return s_bridgeLimitAdmin; + } + + /// @notice Sets the LiquidityManager address. + /// @dev Only callable by the owner. + function setRebalancer(address rebalancer) external onlyOwner { + s_rebalancer = rebalancer; + } + + /// @notice Checks if the pool can accept liquidity. + /// @return true if the pool can accept liquidity, false otherwise. + function canAcceptLiquidity() external view returns (bool) { + return i_acceptLiquidity; + } + + /// @notice Adds liquidity to the pool. The tokens should be approved first. + /// @param amount The amount of liquidity to provide. + function provideLiquidity(uint256 amount) external { + if (!i_acceptLiquidity) revert LiquidityNotAccepted(); + if (s_rebalancer != msg.sender) revert Unauthorized(msg.sender); + + i_token.safeTransferFrom(msg.sender, address(this), amount); + emit LiquidityAdded(msg.sender, amount); + } + + /// @notice Removed liquidity to the pool. The tokens will be sent to msg.sender. + /// @param amount The amount of liquidity to remove. + function withdrawLiquidity(uint256 amount) external { + if (s_rebalancer != msg.sender) revert Unauthorized(msg.sender); + + if (i_token.balanceOf(address(this)) < amount) revert InsufficientLiquidity(); + i_token.safeTransfer(msg.sender, amount); + emit LiquidityRemoved(msg.sender, amount); + } + + /// @notice This function can be used to transfer liquidity from an older version of the pool to this pool. To do so + /// this pool will have to be set as the rebalancer in the older version of the pool. This allows it to transfer the + /// funds in the old pool to the new pool. + /// @dev When upgrading a LockRelease pool, this function can be called at the same time as the pool is changed in the + /// TokenAdminRegistry. This allows for a smooth transition of both liquidity and transactions to the new pool. + /// Alternatively, when no multicall is available, a portion of the funds can be transferred to the new pool before + /// changing which pool CCIP uses, to ensure both pools can operate. Then the pool should be changed in the + /// TokenAdminRegistry, which will activate the new pool. All new transactions will use the new pool and its + /// liquidity. Finally, the remaining liquidity can be transferred to the new pool using this function one more time. + /// @param from The address of the old pool. + /// @param amount The amount of liquidity to transfer. + function transferLiquidity(address from, uint256 amount) external onlyOwner { + UpgradeableLockReleaseTokenPool(from).withdrawLiquidity(amount); + + emit LiquidityTransferred(from, amount); + } +} diff --git a/contracts/src/v0.8/ccip/pools/GHO/UpgradeableTokenPool.sol b/contracts/src/v0.8/ccip/pools/GHO/UpgradeableTokenPool.sol new file mode 100644 index 0000000000..58331aa41f --- /dev/null +++ b/contracts/src/v0.8/ccip/pools/GHO/UpgradeableTokenPool.sol @@ -0,0 +1,436 @@ +// SPDX-License-Identifier: BUSL-1.1 +pragma solidity ^0.8.0; + +import {IPoolV1} from "../../interfaces/IPool.sol"; +import {IRMN} from "../../interfaces/IRMN.sol"; +import {IRouter} from "../../interfaces/IRouter.sol"; + +import {OwnerIsCreator} from "../../../shared/access/OwnerIsCreator.sol"; +import {Pool} from "../../libraries/Pool.sol"; +import {RateLimiter} from "../../libraries/RateLimiter.sol"; + +import {IERC20} from "../../../vendor/openzeppelin-solidity/v4.8.3/contracts/token/ERC20/IERC20.sol"; +import {IERC165} from "../../../vendor/openzeppelin-solidity/v5.0.2/contracts/utils/introspection/IERC165.sol"; +import {EnumerableSet} from "../../../vendor/openzeppelin-solidity/v5.0.2/contracts/utils/structs/EnumerableSet.sol"; + +/// @notice Base abstract class with common functions for all token pools. +/// A token pool serves as isolated place for holding tokens and token specific logic +/// that may execute as tokens move across the bridge. +abstract contract UpgradeableTokenPool is IPoolV1, OwnerIsCreator { + using EnumerableSet for EnumerableSet.AddressSet; + using EnumerableSet for EnumerableSet.UintSet; + using RateLimiter for RateLimiter.TokenBucket; + + error CallerIsNotARampOnRouter(address caller); + error ZeroAddressNotAllowed(); + error SenderNotAllowed(address sender); + error AllowListNotEnabled(); + error NonExistentChain(uint64 remoteChainSelector); + error ChainNotAllowed(uint64 remoteChainSelector); + error CursedByRMN(); + error ChainAlreadyExists(uint64 chainSelector); + error InvalidSourcePoolAddress(bytes sourcePoolAddress); + error InvalidToken(address token); + error Unauthorized(address caller); + + event Locked(address indexed sender, uint256 amount); + event Burned(address indexed sender, uint256 amount); + event Released(address indexed sender, address indexed recipient, uint256 amount); + event Minted(address indexed sender, address indexed recipient, uint256 amount); + event ChainAdded( + uint64 remoteChainSelector, + bytes remoteToken, + RateLimiter.Config outboundRateLimiterConfig, + RateLimiter.Config inboundRateLimiterConfig + ); + event ChainConfigured( + uint64 remoteChainSelector, + RateLimiter.Config outboundRateLimiterConfig, + RateLimiter.Config inboundRateLimiterConfig + ); + event ChainRemoved(uint64 remoteChainSelector); + event RemotePoolSet(uint64 indexed remoteChainSelector, bytes previousPoolAddress, bytes remotePoolAddress); + event AllowListAdd(address sender); + event AllowListRemove(address sender); + event RouterUpdated(address oldRouter, address newRouter); + + struct ChainUpdate { + uint64 remoteChainSelector; // ──╮ Remote chain selector + bool allowed; // ────────────────╯ Whether the chain should be enabled + bytes remotePoolAddress; // Address of the remote pool, ABI encoded in the case of a remote EVM chain. + bytes remoteTokenAddress; // Address of the remote token, ABI encoded in the case of a remote EVM chain. + RateLimiter.Config outboundRateLimiterConfig; // Outbound rate limited config, meaning the rate limits for all of the onRamps for the given chain + RateLimiter.Config inboundRateLimiterConfig; // Inbound rate limited config, meaning the rate limits for all of the offRamps for the given chain + } + + struct RemoteChainConfig { + RateLimiter.TokenBucket outboundRateLimiterConfig; // Outbound rate limited config, meaning the rate limits for all of the onRamps for the given chain + RateLimiter.TokenBucket inboundRateLimiterConfig; // Inbound rate limited config, meaning the rate limits for all of the offRamps for the given chain + bytes remotePoolAddress; // Address of the remote pool, ABI encoded in the case of a remote EVM chain. + bytes remoteTokenAddress; // Address of the remote token, ABI encoded in the case of a remote EVM chain. + } + + /// @dev The bridgeable token that is managed by this pool. + IERC20 internal immutable i_token; + /// @dev The address of the RMN proxy + address internal immutable i_rmnProxy; + /// @dev The immutable flag that indicates if the pool is access-controlled. + bool internal immutable i_allowlistEnabled; + /// @dev A set of addresses allowed to trigger lockOrBurn as original senders. + /// Only takes effect if i_allowlistEnabled is true. + /// This can be used to ensure only token-issuer specified addresses can + /// move tokens. + EnumerableSet.AddressSet internal s_allowList; + /// @dev The address of the router + IRouter internal s_router; + /// @dev A set of allowed chain selectors. We want the allowlist to be enumerable to + /// be able to quickly determine (without parsing logs) who can access the pool. + /// @dev The chain selectors are in uint256 format because of the EnumerableSet implementation. + EnumerableSet.UintSet internal s_remoteChainSelectors; + mapping(uint64 remoteChainSelector => RemoteChainConfig) internal s_remoteChainConfigs; + /// @notice The address of the rate limiter admin. + /// @dev Can be address(0) if none is configured. + address internal s_rateLimitAdmin; + + constructor(IERC20 token, address rmnProxy, bool allowListEnabled) { + if (address(token) == address(0) || rmnProxy == address(0)) revert ZeroAddressNotAllowed(); + i_token = token; + i_rmnProxy = rmnProxy; + + // Pool can be set as permissioned or permissionless at deployment time only to save hot-path gas. + i_allowlistEnabled = allowListEnabled; + } + + /// @notice Get RMN proxy address + /// @return rmnProxy Address of RMN proxy + function getRmnProxy() public view returns (address rmnProxy) { + return i_rmnProxy; + } + + /// @inheritdoc IPoolV1 + function isSupportedToken(address token) public view virtual returns (bool) { + return token == address(i_token); + } + + /// @notice Gets the IERC20 token that this pool can lock or burn. + /// @return token The IERC20 token representation. + function getToken() public view returns (IERC20 token) { + return i_token; + } + + /// @notice Gets the pool's Router + /// @return router The pool's Router + function getRouter() public view returns (address router) { + return address(s_router); + } + + /// @notice Sets the pool's Router + /// @param newRouter The new Router + function setRouter(address newRouter) public onlyOwner { + if (newRouter == address(0)) revert ZeroAddressNotAllowed(); + address oldRouter = address(s_router); + s_router = IRouter(newRouter); + + emit RouterUpdated(oldRouter, newRouter); + } + + /// @notice Signals which version of the pool interface is supported + function supportsInterface(bytes4 interfaceId) public pure virtual override returns (bool) { + return + interfaceId == Pool.CCIP_POOL_V1 || + interfaceId == type(IPoolV1).interfaceId || + interfaceId == type(IERC165).interfaceId; + } + + // ================================================================ + // │ Validation │ + // ================================================================ + + /// @notice Validates the lock or burn input for correctness on + /// - token to be locked or burned + /// - RMN curse status + /// - allowlist status + /// - if the sender is a valid onRamp + /// - rate limit status + /// @param lockOrBurnIn The input to validate. + /// @dev This function should always be called before executing a lock or burn. Not doing so would allow + /// for various exploits. + function _validateLockOrBurn(Pool.LockOrBurnInV1 memory lockOrBurnIn) internal { + if (!isSupportedToken(lockOrBurnIn.localToken)) revert InvalidToken(lockOrBurnIn.localToken); + if (IRMN(i_rmnProxy).isCursed(bytes16(uint128(lockOrBurnIn.remoteChainSelector)))) revert CursedByRMN(); + _checkAllowList(lockOrBurnIn.originalSender); + + _onlyOnRamp(lockOrBurnIn.remoteChainSelector); + _consumeOutboundRateLimit(lockOrBurnIn.remoteChainSelector, lockOrBurnIn.amount); + } + + /// @notice Validates the release or mint input for correctness on + /// - token to be released or minted + /// - RMN curse status + /// - if the sender is a valid offRamp + /// - if the source pool is valid + /// - rate limit status + /// @param releaseOrMintIn The input to validate. + /// @dev This function should always be called before executing a release or mint. Not doing so would allow + /// for various exploits. + function _validateReleaseOrMint(Pool.ReleaseOrMintInV1 memory releaseOrMintIn) internal { + if (!isSupportedToken(releaseOrMintIn.localToken)) revert InvalidToken(releaseOrMintIn.localToken); + if (IRMN(i_rmnProxy).isCursed(bytes16(uint128(releaseOrMintIn.remoteChainSelector)))) revert CursedByRMN(); + _onlyOffRamp(releaseOrMintIn.remoteChainSelector); + + // Validates that the source pool address is configured on this pool. + bytes memory configuredRemotePool = getRemotePool(releaseOrMintIn.remoteChainSelector); + if ( + configuredRemotePool.length == 0 || + keccak256(releaseOrMintIn.sourcePoolAddress) != keccak256(configuredRemotePool) + ) { + revert InvalidSourcePoolAddress(releaseOrMintIn.sourcePoolAddress); + } + _consumeInboundRateLimit(releaseOrMintIn.remoteChainSelector, releaseOrMintIn.amount); + } + + // ================================================================ + // │ Chain permissions │ + // ================================================================ + + /// @notice Gets the pool address on the remote chain. + /// @param remoteChainSelector Remote chain selector. + /// @dev To support non-evm chains, this value is encoded into bytes + function getRemotePool(uint64 remoteChainSelector) public view returns (bytes memory) { + return s_remoteChainConfigs[remoteChainSelector].remotePoolAddress; + } + + /// @notice Gets the token address on the remote chain. + /// @param remoteChainSelector Remote chain selector. + /// @dev To support non-evm chains, this value is encoded into bytes + function getRemoteToken(uint64 remoteChainSelector) public view returns (bytes memory) { + return s_remoteChainConfigs[remoteChainSelector].remoteTokenAddress; + } + + /// @notice Sets the remote pool address for a given chain selector. + /// @param remoteChainSelector The remote chain selector for which the remote pool address is being set. + /// @param remotePoolAddress The address of the remote pool. + function setRemotePool(uint64 remoteChainSelector, bytes calldata remotePoolAddress) external onlyOwner { + if (!isSupportedChain(remoteChainSelector)) revert NonExistentChain(remoteChainSelector); + + bytes memory prevAddress = s_remoteChainConfigs[remoteChainSelector].remotePoolAddress; + s_remoteChainConfigs[remoteChainSelector].remotePoolAddress = remotePoolAddress; + + emit RemotePoolSet(remoteChainSelector, prevAddress, remotePoolAddress); + } + + /// @inheritdoc IPoolV1 + function isSupportedChain(uint64 remoteChainSelector) public view returns (bool) { + return s_remoteChainSelectors.contains(remoteChainSelector); + } + + /// @notice Get list of allowed chains + /// @return list of chains. + function getSupportedChains() public view returns (uint64[] memory) { + uint256[] memory uint256ChainSelectors = s_remoteChainSelectors.values(); + uint64[] memory chainSelectors = new uint64[](uint256ChainSelectors.length); + for (uint256 i = 0; i < uint256ChainSelectors.length; ++i) { + chainSelectors[i] = uint64(uint256ChainSelectors[i]); + } + + return chainSelectors; + } + + /// @notice Sets the permissions for a list of chains selectors. Actual senders for these chains + /// need to be allowed on the Router to interact with this pool. + /// @dev Only callable by the owner + /// @param chains A list of chains and their new permission status & rate limits. Rate limits + /// are only used when the chain is being added through `allowed` being true. + function applyChainUpdates(ChainUpdate[] calldata chains) external virtual onlyOwner { + for (uint256 i = 0; i < chains.length; ++i) { + ChainUpdate memory update = chains[i]; + RateLimiter._validateTokenBucketConfig(update.outboundRateLimiterConfig, !update.allowed); + RateLimiter._validateTokenBucketConfig(update.inboundRateLimiterConfig, !update.allowed); + + if (update.allowed) { + // If the chain already exists, revert + if (!s_remoteChainSelectors.add(update.remoteChainSelector)) { + revert ChainAlreadyExists(update.remoteChainSelector); + } + + if (update.remotePoolAddress.length == 0 || update.remoteTokenAddress.length == 0) { + revert ZeroAddressNotAllowed(); + } + + s_remoteChainConfigs[update.remoteChainSelector] = RemoteChainConfig({ + outboundRateLimiterConfig: RateLimiter.TokenBucket({ + rate: update.outboundRateLimiterConfig.rate, + capacity: update.outboundRateLimiterConfig.capacity, + tokens: update.outboundRateLimiterConfig.capacity, + lastUpdated: uint32(block.timestamp), + isEnabled: update.outboundRateLimiterConfig.isEnabled + }), + inboundRateLimiterConfig: RateLimiter.TokenBucket({ + rate: update.inboundRateLimiterConfig.rate, + capacity: update.inboundRateLimiterConfig.capacity, + tokens: update.inboundRateLimiterConfig.capacity, + lastUpdated: uint32(block.timestamp), + isEnabled: update.inboundRateLimiterConfig.isEnabled + }), + remotePoolAddress: update.remotePoolAddress, + remoteTokenAddress: update.remoteTokenAddress + }); + + emit ChainAdded( + update.remoteChainSelector, + update.remoteTokenAddress, + update.outboundRateLimiterConfig, + update.inboundRateLimiterConfig + ); + } else { + // If the chain doesn't exist, revert + if (!s_remoteChainSelectors.remove(update.remoteChainSelector)) { + revert NonExistentChain(update.remoteChainSelector); + } + + delete s_remoteChainConfigs[update.remoteChainSelector]; + + emit ChainRemoved(update.remoteChainSelector); + } + } + } + + // ================================================================ + // │ Rate limiting │ + // ================================================================ + + /// @notice Sets the rate limiter admin address. + /// @dev Only callable by the owner. + /// @param rateLimitAdmin The new rate limiter admin address. + function setRateLimitAdmin(address rateLimitAdmin) external onlyOwner { + s_rateLimitAdmin = rateLimitAdmin; + } + + /// @notice Gets the rate limiter admin address. + function getRateLimitAdmin() external view returns (address) { + return s_rateLimitAdmin; + } + + /// @notice Consumes outbound rate limiting capacity in this pool + function _consumeOutboundRateLimit(uint64 remoteChainSelector, uint256 amount) internal { + s_remoteChainConfigs[remoteChainSelector].outboundRateLimiterConfig._consume(amount, address(i_token)); + } + + /// @notice Consumes inbound rate limiting capacity in this pool + function _consumeInboundRateLimit(uint64 remoteChainSelector, uint256 amount) internal { + s_remoteChainConfigs[remoteChainSelector].inboundRateLimiterConfig._consume(amount, address(i_token)); + } + + /// @notice Gets the token bucket with its values for the block it was requested at. + /// @return The token bucket. + function getCurrentOutboundRateLimiterState( + uint64 remoteChainSelector + ) external view returns (RateLimiter.TokenBucket memory) { + return s_remoteChainConfigs[remoteChainSelector].outboundRateLimiterConfig._currentTokenBucketState(); + } + + /// @notice Gets the token bucket with its values for the block it was requested at. + /// @return The token bucket. + function getCurrentInboundRateLimiterState( + uint64 remoteChainSelector + ) external view returns (RateLimiter.TokenBucket memory) { + return s_remoteChainConfigs[remoteChainSelector].inboundRateLimiterConfig._currentTokenBucketState(); + } + + /// @notice Sets the chain rate limiter config. + /// @param remoteChainSelector The remote chain selector for which the rate limits apply. + /// @param outboundConfig The new outbound rate limiter config, meaning the onRamp rate limits for the given chain. + /// @param inboundConfig The new inbound rate limiter config, meaning the offRamp rate limits for the given chain. + function setChainRateLimiterConfig( + uint64 remoteChainSelector, + RateLimiter.Config memory outboundConfig, + RateLimiter.Config memory inboundConfig + ) external { + if (msg.sender != s_rateLimitAdmin && msg.sender != owner()) revert Unauthorized(msg.sender); + + _setRateLimitConfig(remoteChainSelector, outboundConfig, inboundConfig); + } + + function _setRateLimitConfig( + uint64 remoteChainSelector, + RateLimiter.Config memory outboundConfig, + RateLimiter.Config memory inboundConfig + ) internal { + if (!isSupportedChain(remoteChainSelector)) revert NonExistentChain(remoteChainSelector); + RateLimiter._validateTokenBucketConfig(outboundConfig, false); + s_remoteChainConfigs[remoteChainSelector].outboundRateLimiterConfig._setTokenBucketConfig(outboundConfig); + RateLimiter._validateTokenBucketConfig(inboundConfig, false); + s_remoteChainConfigs[remoteChainSelector].inboundRateLimiterConfig._setTokenBucketConfig(inboundConfig); + emit ChainConfigured(remoteChainSelector, outboundConfig, inboundConfig); + } + + // ================================================================ + // │ Access │ + // ================================================================ + + /// @notice Checks whether remote chain selector is configured on this contract, and if the msg.sender + /// is a permissioned onRamp for the given chain on the Router. + function _onlyOnRamp(uint64 remoteChainSelector) internal view { + if (!isSupportedChain(remoteChainSelector)) revert ChainNotAllowed(remoteChainSelector); + if (!(msg.sender == s_router.getOnRamp(remoteChainSelector))) revert CallerIsNotARampOnRouter(msg.sender); + } + + /// @notice Checks whether remote chain selector is configured on this contract, and if the msg.sender + /// is a permissioned offRamp for the given chain on the Router. + function _onlyOffRamp(uint64 remoteChainSelector) internal view { + if (!isSupportedChain(remoteChainSelector)) revert ChainNotAllowed(remoteChainSelector); + if (!s_router.isOffRamp(remoteChainSelector, msg.sender)) revert CallerIsNotARampOnRouter(msg.sender); + } + + // ================================================================ + // │ Allowlist │ + // ================================================================ + + function _checkAllowList(address sender) internal view { + if (i_allowlistEnabled) { + if (!s_allowList.contains(sender)) { + revert SenderNotAllowed(sender); + } + } + } + + /// @notice Gets whether the allowList functionality is enabled. + /// @return true is enabled, false if not. + function getAllowListEnabled() external view returns (bool) { + return i_allowlistEnabled; + } + + /// @notice Gets the allowed addresses. + /// @return The allowed addresses. + function getAllowList() external view returns (address[] memory) { + return s_allowList.values(); + } + + /// @notice Apply updates to the allow list. + /// @param removes The addresses to be removed. + /// @param adds The addresses to be added. + function applyAllowListUpdates(address[] calldata removes, address[] calldata adds) external onlyOwner { + _applyAllowListUpdates(removes, adds); + } + + /// @notice Internal version of applyAllowListUpdates to allow for reuse in the constructor. + function _applyAllowListUpdates(address[] memory removes, address[] memory adds) internal { + if (!i_allowlistEnabled) revert AllowListNotEnabled(); + + for (uint256 i = 0; i < removes.length; ++i) { + address toRemove = removes[i]; + if (s_allowList.remove(toRemove)) { + emit AllowListRemove(toRemove); + } + } + for (uint256 i = 0; i < adds.length; ++i) { + address toAdd = adds[i]; + if (toAdd == address(0)) { + continue; + } + if (s_allowList.add(toAdd)) { + emit AllowListAdd(toAdd); + } + } + } +} diff --git a/contracts/src/v0.8/ccip/pools/GHO/diffs/UpgradeableBurnMintTokenPoolAbstract_diff.md b/contracts/src/v0.8/ccip/pools/GHO/diffs/UpgradeableBurnMintTokenPoolAbstract_diff.md new file mode 100644 index 0000000000..d29e8514b6 --- /dev/null +++ b/contracts/src/v0.8/ccip/pools/GHO/diffs/UpgradeableBurnMintTokenPoolAbstract_diff.md @@ -0,0 +1,24 @@ +```diff +diff --git a/src/v0.8/ccip/pools/BurnMintTokenPoolAbstract.sol b/src/v0.8/ccip/pools/GHO/UpgradeableBurnMintTokenPoolAbstract.sol +index 99908c91d0..0e73ccc1d6 100644 +--- a/src/v0.8/ccip/pools/BurnMintTokenPoolAbstract.sol ++++ b/src/v0.8/ccip/pools/GHO/UpgradeableBurnMintTokenPoolAbstract.sol +@@ -1,12 +1,12 @@ + // SPDX-License-Identifier: BUSL-1.1 +-pragma solidity 0.8.24; ++pragma solidity ^0.8.0; + +-import {IBurnMintERC20} from "../../shared/token/ERC20/IBurnMintERC20.sol"; ++import {IBurnMintERC20} from "../../../shared/token/ERC20/IBurnMintERC20.sol"; + +-import {Pool} from "../libraries/Pool.sol"; +-import {TokenPool} from "./TokenPool.sol"; ++import {Pool} from "../../libraries/Pool.sol"; ++import {UpgradeableTokenPool} from "./UpgradeableTokenPool.sol"; + +-abstract contract BurnMintTokenPoolAbstract is TokenPool { ++abstract contract UpgradeableBurnMintTokenPoolAbstract is UpgradeableTokenPool { + /// @notice Contains the specific burn call for a pool. + /// @dev overriding this method allows us to create pools with different burn signatures + /// without duplicating the underlying logic. +``` diff --git a/contracts/src/v0.8/ccip/pools/GHO/diffs/UpgradeableBurnMintTokenPool_diff.md b/contracts/src/v0.8/ccip/pools/GHO/diffs/UpgradeableBurnMintTokenPool_diff.md new file mode 100644 index 0000000000..2363dbc90d --- /dev/null +++ b/contracts/src/v0.8/ccip/pools/GHO/diffs/UpgradeableBurnMintTokenPool_diff.md @@ -0,0 +1,80 @@ +```diff +diff --git a/src/v0.8/ccip/pools/BurnMintTokenPool.sol b/src/v0.8/ccip/pools/GHO/UpgradeableBurnMintTokenPool.sol +index c48c8e51fb..287cb13e9c 100644 +--- a/src/v0.8/ccip/pools/BurnMintTokenPool.sol ++++ b/src/v0.8/ccip/pools/GHO/UpgradeableBurnMintTokenPool.sol +@@ -1,29 +1,56 @@ + // SPDX-License-Identifier: BUSL-1.1 +-pragma solidity 0.8.24; ++pragma solidity ^0.8.0; + +-import {ITypeAndVersion} from "../../shared/interfaces/ITypeAndVersion.sol"; +-import {IBurnMintERC20} from "../../shared/token/ERC20/IBurnMintERC20.sol"; ++import {Initializable} from "solidity-utils/contracts/transparent-proxy/Initializable.sol"; ++import {ITypeAndVersion} from "../../../shared/interfaces/ITypeAndVersion.sol"; ++import {IBurnMintERC20} from "../../../shared/token/ERC20/IBurnMintERC20.sol"; + +-import {BurnMintTokenPoolAbstract} from "./BurnMintTokenPoolAbstract.sol"; +-import {TokenPool} from "./TokenPool.sol"; ++import {UpgradeableBurnMintTokenPoolAbstract} from "./UpgradeableBurnMintTokenPoolAbstract.sol"; ++import {UpgradeableTokenPool} from "./UpgradeableTokenPool.sol"; + +-/// @notice This pool mints and burns a 3rd-party token. +-/// @dev Pool whitelisting mode is set in the constructor and cannot be modified later. +-/// It either accepts any address as originalSender, or only accepts whitelisted originalSender. +-/// The only way to change whitelisting mode is to deploy a new pool. +-/// If that is expected, please make sure the token's burner/minter roles are adjustable. +-/// @dev This contract is a variant of BurnMintTokenPool that uses `burn(amount)`. +-contract BurnMintTokenPool is BurnMintTokenPoolAbstract, ITypeAndVersion { ++import {IRouter} from "../../interfaces/IRouter.sol"; ++ ++/// @title UpgradeableBurnMintTokenPool ++/// @author Aave Labs ++/// @notice Upgradeable version of Chainlink's CCIP BurnMintTokenPool ++/// @dev Contract adaptations: ++/// - Implementation of Initializable to allow upgrades ++/// - Move of allowlist and router definition to initialization stage ++/// - Inclusion of rate limit admin who may configure rate limits in addition to owner ++contract UpgradeableBurnMintTokenPool is Initializable, UpgradeableBurnMintTokenPoolAbstract, ITypeAndVersion { + string public constant override typeAndVersion = "BurnMintTokenPool 1.5.0"; + ++ /// @dev Constructor ++ /// @param token The bridgeable token that is managed by this pool. ++ /// @param rmnProxy The address of the rmn proxy ++ /// @param allowlistEnabled True if pool is set to access-controlled mode, false otherwise + constructor( +- IBurnMintERC20 token, +- address[] memory allowlist, ++ address token, + address rmnProxy, +- address router +- ) TokenPool(token, allowlist, rmnProxy, router) {} ++ bool allowlistEnabled ++ ) UpgradeableTokenPool(IBurnMintERC20(token), rmnProxy, allowlistEnabled) { ++ _disableInitializers(); ++ } + +- /// @inheritdoc BurnMintTokenPoolAbstract ++ /// @dev Initializer ++ /// @dev The address passed as `owner_` must accept ownership after initialization. ++ /// @dev The `allowlist` is only effective if pool is set to access-controlled mode ++ /// @param owner_ The address of the owner ++ /// @param allowlist A set of addresses allowed to trigger lockOrBurn as original senders ++ /// @param router The address of the router ++ function initialize(address owner_, address[] memory allowlist, address router) public virtual initializer { ++ if (owner_ == address(0) || router == address(0)) revert ZeroAddressNotAllowed(); ++ _transferOwnership(owner_); ++ ++ s_router = IRouter(router); ++ ++ // Pool can be set as permissioned or permissionless at deployment time only to save hot-path gas. ++ if (i_allowlistEnabled) { ++ _applyAllowListUpdates(new address[](0), allowlist); ++ } ++ } ++ ++ /// @inheritdoc UpgradeableBurnMintTokenPoolAbstract + function _burn(uint256 amount) internal virtual override { + IBurnMintERC20(address(i_token)).burn(amount); + } +``` diff --git a/contracts/src/v0.8/ccip/pools/GHO/diffs/UpgradeableLockReleaseTokenPool_diff.md b/contracts/src/v0.8/ccip/pools/GHO/diffs/UpgradeableLockReleaseTokenPool_diff.md new file mode 100644 index 0000000000..9b12d90e6f --- /dev/null +++ b/contracts/src/v0.8/ccip/pools/GHO/diffs/UpgradeableLockReleaseTokenPool_diff.md @@ -0,0 +1,189 @@ +```diff +diff --git a/src/v0.8/ccip/pools/LockReleaseTokenPool.sol b/src/v0.8/ccip/pools/GHO/UpgradeableLockReleaseTokenPool.sol +index 3a4a4aef6d..b714fc2342 100644 +--- a/src/v0.8/ccip/pools/LockReleaseTokenPool.sol ++++ b/src/v0.8/ccip/pools/GHO/UpgradeableLockReleaseTokenPool.sol +@@ -1,24 +1,35 @@ + // SPDX-License-Identifier: BUSL-1.1 +-pragma solidity 0.8.24; ++pragma solidity ^0.8.0; + +-import {ILiquidityContainer} from "../../liquiditymanager/interfaces/ILiquidityContainer.sol"; +-import {ITypeAndVersion} from "../../shared/interfaces/ITypeAndVersion.sol"; ++import {Initializable} from "solidity-utils/contracts/transparent-proxy/Initializable.sol"; + +-import {Pool} from "../libraries/Pool.sol"; +-import {TokenPool} from "./TokenPool.sol"; ++import {ILiquidityContainer} from "../../../liquiditymanager/interfaces/ILiquidityContainer.sol"; ++import {ITypeAndVersion} from "../../../shared/interfaces/ITypeAndVersion.sol"; + +-import {IERC20} from "../../vendor/openzeppelin-solidity/v4.8.3/contracts/token/ERC20/IERC20.sol"; +-import {SafeERC20} from "../../vendor/openzeppelin-solidity/v4.8.3/contracts/token/ERC20/utils/SafeERC20.sol"; ++import {Pool} from "../../libraries/Pool.sol"; ++import {UpgradeableTokenPool} from "./UpgradeableTokenPool.sol"; + +-/// @notice Token pool used for tokens on their native chain. This uses a lock and release mechanism. +-/// Because of lock/unlock requiring liquidity, this pool contract also has function to add and remove +-/// liquidity. This allows for proper bookkeeping for both user and liquidity provider balances. +-/// @dev One token per LockReleaseTokenPool. +-contract LockReleaseTokenPool is TokenPool, ILiquidityContainer, ITypeAndVersion { ++import {IERC20} from "../../../vendor/openzeppelin-solidity/v4.8.3/contracts/token/ERC20/IERC20.sol"; ++import {SafeERC20} from "../../../vendor/openzeppelin-solidity/v4.8.3/contracts/token/ERC20/utils/SafeERC20.sol"; ++import {IRouter} from "../../interfaces/IRouter.sol"; ++ ++/// @title UpgradeableLockReleaseTokenPool ++/// @author Aave Labs ++/// @notice Upgradeable version of Chainlink's CCIP LockReleaseTokenPool ++/// @dev Contract adaptations: ++/// - Implementation of Initializable to allow upgrades ++/// - Move of allowlist and router definition to initialization stage ++/// - Addition of a bridge limit to regulate the maximum amount of tokens that can be transferred out (burned/locked) ++contract UpgradeableLockReleaseTokenPool is Initializable, UpgradeableTokenPool, ILiquidityContainer, ITypeAndVersion { + using SafeERC20 for IERC20; + + error InsufficientLiquidity(); + error LiquidityNotAccepted(); ++ error BridgeLimitExceeded(uint256 bridgeLimit); ++ error NotEnoughBridgedAmount(); ++ ++ event BridgeLimitUpdated(uint256 oldBridgeLimit, uint256 newBridgeLimit); ++ event BridgeLimitAdminUpdated(address indexed oldAdmin, address indexed newAdmin); + + event LiquidityTransferred(address indexed from, uint256 amount); + +@@ -32,21 +43,57 @@ contract LockReleaseTokenPool is TokenPool, ILiquidityContainer, ITypeAndVersion + /// @notice The address of the rebalancer. + address internal s_rebalancer; + ++ /// @notice Maximum amount of tokens that can be bridged to other chains ++ uint256 private s_bridgeLimit; ++ /// @notice Amount of tokens bridged (transferred out) ++ /// @dev Must always be equal to or below the bridge limit ++ uint256 private s_currentBridged; ++ /// @notice The address of the bridge limit admin. ++ /// @dev Can be address(0) if none is configured. ++ address internal s_bridgeLimitAdmin; ++ ++ /// @dev Constructor ++ /// @param token The bridgeable token that is managed by this pool. ++ /// @param rmnProxy The address of the rmn proxy ++ /// @param allowlistEnabled True if pool is set to access-controlled mode, false otherwise ++ /// @param acceptLiquidity True if the pool accepts liquidity, false otherwise + constructor( +- IERC20 token, +- address[] memory allowlist, ++ address token, + address rmnProxy, +- bool acceptLiquidity, +- address router +- ) TokenPool(token, allowlist, rmnProxy, router) { ++ bool allowListEnabled, ++ bool acceptLiquidity ++ ) UpgradeableTokenPool(IERC20(token), rmnProxy, allowListEnabled) { + i_acceptLiquidity = acceptLiquidity; + } + ++ /// @dev Initializer ++ /// @dev The address passed as `owner_` must accept ownership after initialization. ++ /// @dev The `allowlist` is only effective if pool is set to access-controlled mode ++ /// @param owner_ The address of the owner ++ /// @param allowlist A set of addresses allowed to trigger lockOrBurn as original senders ++ /// @param router The address of the router ++ /// @param bridgeLimit The maximum amount of tokens that can be bridged to other chains ++ function initialize( ++ address owner_, ++ address[] memory allowlist, ++ address router, ++ uint256 bridgeLimit ++ ) public initializer { ++ if (router == address(0) || owner_ == address(0)) revert ZeroAddressNotAllowed(); ++ ++ _transferOwnership(owner_); ++ s_router = IRouter(router); ++ if (i_allowlistEnabled) _applyAllowListUpdates(new address[](0), allowlist); ++ s_bridgeLimit = bridgeLimit; ++ } ++ + /// @notice Locks the token in the pool + /// @dev The _validateLockOrBurn check is an essential security check + function lockOrBurn( + Pool.LockOrBurnInV1 calldata lockOrBurnIn + ) external virtual override returns (Pool.LockOrBurnOutV1 memory) { ++ // Increase bridged amount because tokens are leaving the source chain ++ if ((s_currentBridged += lockOrBurnIn.amount) > s_bridgeLimit) revert BridgeLimitExceeded(s_bridgeLimit); + _validateLockOrBurn(lockOrBurnIn); + + emit Locked(msg.sender, lockOrBurnIn.amount); +@@ -61,6 +108,11 @@ contract LockReleaseTokenPool is TokenPool, ILiquidityContainer, ITypeAndVersion + ) external virtual override returns (Pool.ReleaseOrMintOutV1 memory) { + _validateReleaseOrMint(releaseOrMintIn); + ++ // This should never occur. Amount should never exceed the current bridged amount ++ if (releaseOrMintIn.amount > s_currentBridged) revert NotEnoughBridgedAmount(); ++ // Reduce bridged amount because tokens are back to source chain ++ s_currentBridged -= releaseOrMintIn.amount; ++ + // Release to the recipient + getToken().safeTransfer(releaseOrMintIn.receiver, releaseOrMintIn.amount); + +@@ -69,6 +121,38 @@ contract LockReleaseTokenPool is TokenPool, ILiquidityContainer, ITypeAndVersion + return Pool.ReleaseOrMintOutV1({destinationAmount: releaseOrMintIn.amount}); + } + ++ /// @notice Sets the bridge limit, the maximum amount of tokens that can be bridged out ++ /// @dev Only callable by the owner or the bridge limit admin or owner. ++ /// @dev Bridge limit changes should be carefully managed, specially when reducing below the current bridged amount ++ /// @param newBridgeLimit The new bridge limit ++ function setBridgeLimit(uint256 newBridgeLimit) external { ++ if (msg.sender != s_bridgeLimitAdmin && msg.sender != owner()) revert Unauthorized(msg.sender); ++ uint256 oldBridgeLimit = s_bridgeLimit; ++ s_bridgeLimit = newBridgeLimit; ++ emit BridgeLimitUpdated(oldBridgeLimit, newBridgeLimit); ++ } ++ ++ /// @notice Sets the bridge limit admin address. ++ /// @dev Only callable by the owner. ++ /// @param bridgeLimitAdmin The new bridge limit admin address. ++ function setBridgeLimitAdmin(address bridgeLimitAdmin) external onlyOwner { ++ address oldAdmin = s_bridgeLimitAdmin; ++ s_bridgeLimitAdmin = bridgeLimitAdmin; ++ emit BridgeLimitAdminUpdated(oldAdmin, bridgeLimitAdmin); ++ } ++ ++ /// @notice Gets the bridge limit ++ /// @return The maximum amount of tokens that can be transferred out to other chains ++ function getBridgeLimit() external view virtual returns (uint256) { ++ return s_bridgeLimit; ++ } ++ ++ /// @notice Gets the current bridged amount to other chains ++ /// @return The amount of tokens transferred out to other chains ++ function getCurrentBridgedAmount() external view virtual returns (uint256) { ++ return s_currentBridged; ++ } ++ + // @inheritdoc IERC165 + function supportsInterface(bytes4 interfaceId) public pure virtual override returns (bool) { + return interfaceId == type(ILiquidityContainer).interfaceId || super.supportsInterface(interfaceId); +@@ -80,6 +164,11 @@ contract LockReleaseTokenPool is TokenPool, ILiquidityContainer, ITypeAndVersion + return s_rebalancer; + } + ++ /// @notice Gets the bridge limiter admin address. ++ function getBridgeLimitAdmin() external view returns (address) { ++ return s_bridgeLimitAdmin; ++ } ++ + /// @notice Sets the LiquidityManager address. + /// @dev Only callable by the owner. + function setRebalancer(address rebalancer) external onlyOwner { +@@ -124,7 +213,7 @@ contract LockReleaseTokenPool is TokenPool, ILiquidityContainer, ITypeAndVersion + /// @param from The address of the old pool. + /// @param amount The amount of liquidity to transfer. + function transferLiquidity(address from, uint256 amount) external onlyOwner { +- LockReleaseTokenPool(from).withdrawLiquidity(amount); ++ UpgradeableLockReleaseTokenPool(from).withdrawLiquidity(amount); + + emit LiquidityTransferred(from, amount); + } +``` diff --git a/contracts/src/v0.8/ccip/pools/GHO/diffs/UpgradeableTokenPool_diff.md b/contracts/src/v0.8/ccip/pools/GHO/diffs/UpgradeableTokenPool_diff.md new file mode 100644 index 0000000000..b1c6dfee6d --- /dev/null +++ b/contracts/src/v0.8/ccip/pools/GHO/diffs/UpgradeableTokenPool_diff.md @@ -0,0 +1,85 @@ +```diff +diff --git a/src/v0.8/ccip/pools/TokenPool.sol b/src/v0.8/ccip/pools/GHO/UpgradeableTokenPool.sol +index ebd613134a..58331aa41f 100644 +--- a/src/v0.8/ccip/pools/TokenPool.sol ++++ b/src/v0.8/ccip/pools/GHO/UpgradeableTokenPool.sol +@@ -1,22 +1,22 @@ + // SPDX-License-Identifier: BUSL-1.1 +-pragma solidity 0.8.24; ++pragma solidity ^0.8.0; + +-import {IPoolV1} from "../interfaces/IPool.sol"; +-import {IRMN} from "../interfaces/IRMN.sol"; +-import {IRouter} from "../interfaces/IRouter.sol"; ++import {IPoolV1} from "../../interfaces/IPool.sol"; ++import {IRMN} from "../../interfaces/IRMN.sol"; ++import {IRouter} from "../../interfaces/IRouter.sol"; + +-import {OwnerIsCreator} from "../../shared/access/OwnerIsCreator.sol"; +-import {Pool} from "../libraries/Pool.sol"; +-import {RateLimiter} from "../libraries/RateLimiter.sol"; ++import {OwnerIsCreator} from "../../../shared/access/OwnerIsCreator.sol"; ++import {Pool} from "../../libraries/Pool.sol"; ++import {RateLimiter} from "../../libraries/RateLimiter.sol"; + +-import {IERC20} from "../../vendor/openzeppelin-solidity/v4.8.3/contracts/token/ERC20/IERC20.sol"; +-import {IERC165} from "../../vendor/openzeppelin-solidity/v5.0.2/contracts/utils/introspection/IERC165.sol"; +-import {EnumerableSet} from "../../vendor/openzeppelin-solidity/v5.0.2/contracts/utils/structs/EnumerableSet.sol"; ++import {IERC20} from "../../../vendor/openzeppelin-solidity/v4.8.3/contracts/token/ERC20/IERC20.sol"; ++import {IERC165} from "../../../vendor/openzeppelin-solidity/v5.0.2/contracts/utils/introspection/IERC165.sol"; ++import {EnumerableSet} from "../../../vendor/openzeppelin-solidity/v5.0.2/contracts/utils/structs/EnumerableSet.sol"; + + /// @notice Base abstract class with common functions for all token pools. + /// A token pool serves as isolated place for holding tokens and token specific logic + /// that may execute as tokens move across the bridge. +-abstract contract TokenPool is IPoolV1, OwnerIsCreator { ++abstract contract UpgradeableTokenPool is IPoolV1, OwnerIsCreator { + using EnumerableSet for EnumerableSet.AddressSet; + using EnumerableSet for EnumerableSet.UintSet; + using RateLimiter for RateLimiter.TokenBucket; +@@ -92,17 +92,13 @@ abstract contract TokenPool is IPoolV1, OwnerIsCreator { + /// @dev Can be address(0) if none is configured. + address internal s_rateLimitAdmin; + +- constructor(IERC20 token, address[] memory allowlist, address rmnProxy, address router) { +- if (address(token) == address(0) || router == address(0) || rmnProxy == address(0)) revert ZeroAddressNotAllowed(); ++ constructor(IERC20 token, address rmnProxy, bool allowListEnabled) { ++ if (address(token) == address(0) || rmnProxy == address(0)) revert ZeroAddressNotAllowed(); + i_token = token; + i_rmnProxy = rmnProxy; +- s_router = IRouter(router); + + // Pool can be set as permissioned or permissionless at deployment time only to save hot-path gas. +- i_allowlistEnabled = allowlist.length > 0; +- if (i_allowlistEnabled) { +- _applyAllowListUpdates(new address[](0), allowlist); +- } ++ i_allowlistEnabled = allowListEnabled; + } + + /// @notice Get RMN proxy address +@@ -140,8 +136,10 @@ abstract contract TokenPool is IPoolV1, OwnerIsCreator { + + /// @notice Signals which version of the pool interface is supported + function supportsInterface(bytes4 interfaceId) public pure virtual override returns (bool) { +- return interfaceId == Pool.CCIP_POOL_V1 || interfaceId == type(IPoolV1).interfaceId +- || interfaceId == type(IERC165).interfaceId; ++ return ++ interfaceId == Pool.CCIP_POOL_V1 || ++ interfaceId == type(IPoolV1).interfaceId || ++ interfaceId == type(IERC165).interfaceId; + } + + // ================================================================ +@@ -183,8 +181,8 @@ abstract contract TokenPool is IPoolV1, OwnerIsCreator { + // Validates that the source pool address is configured on this pool. + bytes memory configuredRemotePool = getRemotePool(releaseOrMintIn.remoteChainSelector); + if ( +- configuredRemotePool.length == 0 +- || keccak256(releaseOrMintIn.sourcePoolAddress) != keccak256(configuredRemotePool) ++ configuredRemotePool.length == 0 || ++ keccak256(releaseOrMintIn.sourcePoolAddress) != keccak256(configuredRemotePool) + ) { + revert InvalidSourcePoolAddress(releaseOrMintIn.sourcePoolAddress); + } +``` diff --git a/contracts/src/v0.8/ccip/test/mocks/MockUpgradeable.sol b/contracts/src/v0.8/ccip/test/mocks/MockUpgradeable.sol new file mode 100644 index 0000000000..45eeb5c5d7 --- /dev/null +++ b/contracts/src/v0.8/ccip/test/mocks/MockUpgradeable.sol @@ -0,0 +1,23 @@ +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.0; + +import {Initializable} from "solidity-utils/contracts/transparent-proxy/Initializable.sol"; + +/** + * @dev Mock contract to test upgrades, not to be used in production. + */ +contract MockUpgradeable is Initializable { + /** + * @dev Constructor + */ + constructor() { + // Intentionally left bank + } + + /** + * @dev Initializer + */ + function initialize() public reinitializer(2) { + // Intentionally left bank + } +} diff --git a/contracts/src/v0.8/ccip/test/pools/End2End.t.sol b/contracts/src/v0.8/ccip/test/pools/End2End.t.sol new file mode 100644 index 0000000000..f4f4b00a5e --- /dev/null +++ b/contracts/src/v0.8/ccip/test/pools/End2End.t.sol @@ -0,0 +1,18 @@ +// SPDX-License-Identifier: BUSL-1.1 +pragma solidity ^0.8.0; + +import "../commitStore/CommitStore.t.sol"; +import "../onRamp/EVM2EVMOnRampSetup.t.sol"; +import "../offRamp/EVM2EVMOffRampSetup.t.sol"; + +contract E2E is EVM2EVMOnRampSetup, CommitStoreSetup, EVM2EVMOffRampSetup { + using Internal for Internal.EVM2EVMMessage; + + function setUp() public virtual override(EVM2EVMOnRampSetup, CommitStoreSetup, EVM2EVMOffRampSetup) { + EVM2EVMOnRampSetup.setUp(); + CommitStoreSetup.setUp(); + EVM2EVMOffRampSetup.setUp(); + + deployOffRamp(s_commitStore, s_destRouter, address(0)); + } +} diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/GhoBaseTest.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/GhoBaseTest.t.sol new file mode 100644 index 0000000000..6d76e4d837 --- /dev/null +++ b/contracts/src/v0.8/ccip/test/pools/GHO/GhoBaseTest.t.sol @@ -0,0 +1,307 @@ +// SPDX-License-Identifier: BUSL-1.1 +pragma solidity ^0.8.0; + +import {GhoToken} from "@aave-gho-core/gho/GhoToken.sol"; +import {TransparentUpgradeableProxy} from "solidity-utils/contracts/transparent-proxy/TransparentUpgradeableProxy.sol"; + +import {IBurnMintERC20} from "../../../../shared/token/ERC20/IBurnMintERC20.sol"; +import {IPoolV1} from "../../../interfaces/IPool.sol"; +import {UpgradeableLockReleaseTokenPool} from "../../../pools/GHO/UpgradeableLockReleaseTokenPool.sol"; +import {UpgradeableBurnMintTokenPool} from "../../../pools/GHO/UpgradeableBurnMintTokenPool.sol"; +import {UpgradeableTokenPool} from "../../../pools/GHO/UpgradeableTokenPool.sol"; +import {RateLimiter} from "../../../libraries/RateLimiter.sol"; +import {Pool} from "../../../libraries/Pool.sol"; +import {BaseTest} from "../../BaseTest.t.sol"; + +abstract contract GhoBaseTest is BaseTest { + error Unauthorized(address caller); + + address internal RMN_PROXY = makeAddr("RMN_PROXY"); + address internal ROUTER = makeAddr("ROUTER"); + address internal RAMP = makeAddr("RAMP"); + address internal AAVE_DAO = makeAddr("AAVE_DAO"); + address internal PROXY_ADMIN = makeAddr("PROXY_ADMIN"); + address internal USER = makeAddr("USER"); + + uint256 public immutable INITIAL_BRIDGE_LIMIT = 100e6 * 1e18; + + struct UtilsStorage { + uint256[] chainsList; + mapping(uint256 => address) pools; // chainId => bridgeTokenPool + mapping(uint256 => address) tokens; // chainId => ghoToken + mapping(uint256 => uint256) bucketCapacities; // chainId => bucketCapacities + mapping(uint256 => uint256) bucketLevels; // chainId => bucketLevels + mapping(uint256 => uint256) liquidity; // chainId => liquidity + uint256 remoteLiquidity; + uint256 bridged; + bool capacityBelowLevelUpdate; + } + + function _deployUpgradeableBurnMintTokenPool( + address ghoToken, + address arm, + address router, + address owner, + address proxyAdmin + ) internal returns (address) { + // Deploy BurnMintTokenPool for GHO token on source chain + UpgradeableBurnMintTokenPool tokenPoolImpl = new UpgradeableBurnMintTokenPool(ghoToken, arm, false); + // proxy deploy and init + address[] memory emptyArray = new address[](0); + bytes memory tokenPoolInitParams = abi.encodeWithSignature( + "initialize(address,address[],address)", + owner, + emptyArray, + router + ); + TransparentUpgradeableProxy tokenPoolProxy = new TransparentUpgradeableProxy( + address(tokenPoolImpl), + proxyAdmin, + tokenPoolInitParams + ); + // Manage ownership + vm.stopPrank(); + vm.prank(owner); + UpgradeableBurnMintTokenPool(address(tokenPoolProxy)).acceptOwnership(); + vm.startPrank(OWNER); + + return address(tokenPoolProxy); + } + + function _deployUpgradeableLockReleaseTokenPool( + address ghoToken, + address arm, + address router, + address owner, + uint256 bridgeLimit, + address proxyAdmin + ) internal returns (address) { + UpgradeableLockReleaseTokenPool tokenPoolImpl = new UpgradeableLockReleaseTokenPool(ghoToken, arm, false, true); + // proxy deploy and init + address[] memory emptyArray = new address[](0); + bytes memory tokenPoolInitParams = abi.encodeWithSignature( + "initialize(address,address[],address,uint256)", + owner, + emptyArray, + router, + bridgeLimit + ); + TransparentUpgradeableProxy tokenPoolProxy = new TransparentUpgradeableProxy( + address(tokenPoolImpl), + proxyAdmin, + tokenPoolInitParams + ); + + // Manage ownership + vm.stopPrank(); + vm.prank(owner); + UpgradeableLockReleaseTokenPool(address(tokenPoolProxy)).acceptOwnership(); + vm.startPrank(OWNER); + + return address(tokenPoolProxy); + } + + function _inflateFacilitatorLevel(address tokenPool, address ghoToken, uint256 amount) internal { + vm.stopPrank(); + vm.prank(tokenPool); + IBurnMintERC20(ghoToken).mint(address(0), amount); + } + + function _getProxyAdminAddress(address proxy) internal view returns (address) { + bytes32 ERC1967_ADMIN_SLOT = 0xb53127684a568b3173ae13b9f8a6016e243e63b6e8ee1178d6a717850b5d6103; + bytes32 adminSlot = vm.load(proxy, ERC1967_ADMIN_SLOT); + return address(uint160(uint256(adminSlot))); + } + + function _getProxyImplementationAddress(address proxy) internal view returns (address) { + bytes32 ERC1967_IMPLEMENTATION_SLOT = 0x360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc; + bytes32 implSlot = vm.load(proxy, ERC1967_IMPLEMENTATION_SLOT); + return address(uint160(uint256(implSlot))); + } + + function _getUpgradeableVersion(address proxy) internal view returns (uint8) { + // version is 1st slot + return uint8(uint256(vm.load(proxy, bytes32(uint256(0))))); + } + + function _enableLane(UtilsStorage storage s, uint256 fromId, uint256 toId) internal { + // from + UpgradeableTokenPool.ChainUpdate[] memory chainUpdate = new UpgradeableTokenPool.ChainUpdate[](1); + RateLimiter.Config memory emptyRateConfig = RateLimiter.Config(false, 0, 0); + chainUpdate[0] = UpgradeableTokenPool.ChainUpdate({ + remoteChainSelector: uint64(toId), + remotePoolAddress: abi.encode(s.pools[toId]), + remoteTokenAddress: abi.encode(s.tokens[toId]), + allowed: true, + outboundRateLimiterConfig: emptyRateConfig, + inboundRateLimiterConfig: emptyRateConfig + }); + + vm.startPrank(OWNER); + UpgradeableTokenPool(s.pools[fromId]).applyChainUpdates(chainUpdate); + + chainUpdate[0].remoteChainSelector = uint64(fromId); + chainUpdate[0].remotePoolAddress = abi.encode(s.pools[fromId]); + chainUpdate[0].remoteTokenAddress = abi.encode(s.tokens[fromId]); + UpgradeableTokenPool(s.pools[toId]).applyChainUpdates(chainUpdate); + vm.stopPrank(); + } + + function _addBridge(UtilsStorage storage s, uint256 chainId, uint256 bucketCapacity) internal { + require(s.tokens[chainId] == address(0), "BRIDGE_ALREADY_EXISTS"); + + s.chainsList.push(chainId); + + // GHO Token + GhoToken ghoToken = new GhoToken(AAVE_DAO); + s.tokens[chainId] = address(ghoToken); + + // UpgradeableTokenPool + address bridgeTokenPool = _deployUpgradeableBurnMintTokenPool( + address(ghoToken), + RMN_PROXY, + ROUTER, + OWNER, + PROXY_ADMIN + ); + s.pools[chainId] = bridgeTokenPool; + + // Facilitator + s.bucketCapacities[chainId] = bucketCapacity; + vm.stopPrank(); + vm.startPrank(AAVE_DAO); + ghoToken.grantRole(ghoToken.FACILITATOR_MANAGER_ROLE(), AAVE_DAO); + ghoToken.addFacilitator(bridgeTokenPool, "UpgradeableTokenPool", uint128(bucketCapacity)); + vm.stopPrank(); + } + + function _updateBridgeLimit(UtilsStorage storage s, uint256 newBridgeLimit) internal { + vm.stopPrank(); + vm.startPrank(OWNER); + UpgradeableLockReleaseTokenPool(s.pools[0]).setBridgeLimit(newBridgeLimit); + vm.stopPrank(); + } + + function _updateBucketCapacity(UtilsStorage storage s, uint256 chainId, uint256 newBucketCapacity) internal { + s.bucketCapacities[chainId] = newBucketCapacity; + vm.stopPrank(); + vm.startPrank(AAVE_DAO); + GhoToken(s.tokens[chainId]).grantRole(GhoToken(s.tokens[chainId]).BUCKET_MANAGER_ROLE(), AAVE_DAO); + GhoToken(s.tokens[chainId]).setFacilitatorBucketCapacity(s.pools[chainId], uint128(newBucketCapacity)); + vm.stopPrank(); + } + + function _getCapacity(UtilsStorage storage s, uint256 chain) internal view returns (uint256) { + require(!_isEthereumChain(chain), "No bucket on Ethereum"); + (uint256 capacity, ) = GhoToken(s.tokens[chain]).getFacilitatorBucket(s.pools[chain]); + return capacity; + } + + function _getLevel(UtilsStorage storage s, uint256 chain) internal view returns (uint256) { + require(!_isEthereumChain(chain), "No bucket on Ethereum"); + (, uint256 level) = GhoToken(s.tokens[chain]).getFacilitatorBucket(s.pools[chain]); + return level; + } + + function _getMaxToBridgeOut(UtilsStorage storage s, uint256 fromChain) internal view returns (uint256) { + if (_isEthereumChain(fromChain)) { + UpgradeableLockReleaseTokenPool ethTokenPool = UpgradeableLockReleaseTokenPool(s.pools[0]); + uint256 bridgeLimit = ethTokenPool.getBridgeLimit(); + uint256 currentBridged = ethTokenPool.getCurrentBridgedAmount(); + return currentBridged >= bridgeLimit ? 0 : bridgeLimit - currentBridged; + } else { + (, uint256 level) = GhoToken(s.tokens[fromChain]).getFacilitatorBucket(s.pools[fromChain]); + return level; + } + } + + function _getMaxToBridgeIn(UtilsStorage storage s, uint256 toChain) internal view returns (uint256) { + if (_isEthereumChain(toChain)) { + UpgradeableLockReleaseTokenPool ethTokenPool = UpgradeableLockReleaseTokenPool(s.pools[0]); + return ethTokenPool.getCurrentBridgedAmount(); + } else { + (uint256 capacity, uint256 level) = GhoToken(s.tokens[toChain]).getFacilitatorBucket(s.pools[toChain]); + return level >= capacity ? 0 : capacity - level; + } + } + + function _bridgeGho( + UtilsStorage storage s, + uint256 fromChain, + uint256 toChain, + address user, + uint256 amount + ) internal { + _moveGhoOrigin(s, fromChain, toChain, user, amount); + _moveGhoDestination(s, fromChain, toChain, user, amount); + } + + function _moveGhoOrigin( + UtilsStorage storage s, + uint256 fromChain, + uint256 toChain, + address user, + uint256 amount + ) internal { + // Simulate CCIP pull of funds + vm.startPrank(user); + GhoToken(s.tokens[fromChain]).transfer(s.pools[fromChain], amount); + + vm.startPrank(RAMP); + IPoolV1(s.pools[fromChain]).lockOrBurn( + Pool.LockOrBurnInV1({ + receiver: abi.encode(user), + remoteChainSelector: uint64(toChain), + originalSender: user, + amount: amount, + localToken: s.tokens[fromChain] + }) + ); + + if (_isEthereumChain(fromChain)) { + // Lock + s.bridged += amount; + } else { + // Burn + s.bucketLevels[fromChain] -= amount; + s.liquidity[fromChain] -= amount; + s.remoteLiquidity -= amount; + } + } + + function _moveGhoDestination( + UtilsStorage storage s, + uint256 fromChain, + uint256 toChain, + address user, + uint256 amount + ) internal { + vm.startPrank(RAMP); + IPoolV1(s.pools[toChain]).releaseOrMint( + Pool.ReleaseOrMintInV1({ + originalSender: abi.encode(user), + remoteChainSelector: uint64(fromChain), + receiver: user, + amount: amount, + localToken: s.tokens[toChain], + sourcePoolAddress: abi.encode(s.pools[fromChain]), + sourcePoolData: abi.encode(s.pools[fromChain]), + offchainTokenData: bytes("") + }) + ); + + if (_isEthereumChain(toChain)) { + // Release + s.bridged -= amount; + } else { + // Mint + s.bucketLevels[toChain] += amount; + s.liquidity[toChain] += amount; + s.remoteLiquidity += amount; + } + } + + function _isEthereumChain(uint256 chainId) internal pure returns (bool) { + return chainId == 0; + } +} diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/GhoTokenPoolEthereum.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/GhoTokenPoolEthereum.t.sol new file mode 100644 index 0000000000..149a90e0ec --- /dev/null +++ b/contracts/src/v0.8/ccip/test/pools/GHO/GhoTokenPoolEthereum.t.sol @@ -0,0 +1,897 @@ +// SPDX-License-Identifier: BUSL-1.1 +pragma solidity ^0.8.0; + +import {TransparentUpgradeableProxy} from "solidity-utils/contracts/transparent-proxy/TransparentUpgradeableProxy.sol"; +import {stdError} from "forge-std/Test.sol"; +import {IERC165} from "../../../../vendor/openzeppelin-solidity/v4.8.3/contracts/utils/introspection/IERC165.sol"; +import {ILiquidityContainer} from "../../../../liquiditymanager/interfaces/ILiquidityContainer.sol"; +import {IPoolV1} from "../../../interfaces/IPool.sol"; +import {Pool} from "../../../libraries/Pool.sol"; +import {LockReleaseTokenPool} from "../../../pools/LockReleaseTokenPool.sol"; +import {UpgradeableLockReleaseTokenPool} from "../../../pools/GHO/UpgradeableLockReleaseTokenPool.sol"; +import {UpgradeableTokenPool} from "../../../pools/GHO/UpgradeableTokenPool.sol"; +import {EVM2EVMOffRamp} from "../../../offRamp/EVM2EVMOffRamp.sol"; +import {RateLimiter} from "../../../libraries/RateLimiter.sol"; +import {MockUpgradeable} from "../../mocks/MockUpgradeable.sol"; +import {GhoTokenPoolEthereumSetup} from "./GhoTokenPoolEthereumSetup.t.sol"; + +contract GhoTokenPoolEthereum_setRebalancer is GhoTokenPoolEthereumSetup { + function testSetRebalancerSuccess() public { + assertEq(address(s_ghoTokenPool.getRebalancer()), OWNER); + changePrank(AAVE_DAO); + s_ghoTokenPool.setRebalancer(STRANGER); + assertEq(address(s_ghoTokenPool.getRebalancer()), STRANGER); + } + + function testSetRebalancerReverts() public { + vm.startPrank(STRANGER); + + vm.expectRevert("Only callable by owner"); + s_ghoTokenPool.setRebalancer(STRANGER); + } +} + +contract GhoTokenPoolEthereum_lockOrBurn is GhoTokenPoolEthereumSetup { + error SenderNotAllowed(address sender); + + event Locked(address indexed sender, uint256 amount); + event TokensConsumed(uint256 tokens); + + function testFuzz_LockOrBurnNoAllowListSuccess(uint256 amount, uint256 bridgedAmount) public { + uint256 maxAmount = _getOutboundRateLimiterConfig().capacity < INITIAL_BRIDGE_LIMIT + ? _getOutboundRateLimiterConfig().capacity + : INITIAL_BRIDGE_LIMIT; + amount = bound(amount, 1, maxAmount); + bridgedAmount = bound(bridgedAmount, 0, INITIAL_BRIDGE_LIMIT - amount); + + changePrank(s_allowedOnRamp); + if (bridgedAmount > 0) { + s_ghoTokenPool.lockOrBurn( + Pool.LockOrBurnInV1({ + receiver: bytes(""), + remoteChainSelector: DEST_CHAIN_SELECTOR, + originalSender: STRANGER, + amount: bridgedAmount, + localToken: address(s_token) + }) + ); + assertEq(s_ghoTokenPool.getCurrentBridgedAmount(), bridgedAmount); + } + + vm.expectEmit(); + emit TokensConsumed(amount); + vm.expectEmit(); + emit Locked(s_allowedOnRamp, amount); + + s_ghoTokenPool.lockOrBurn( + Pool.LockOrBurnInV1({ + receiver: bytes(""), + remoteChainSelector: DEST_CHAIN_SELECTOR, + originalSender: STRANGER, + amount: amount, + localToken: address(s_token) + }) + ); + + assertEq(s_ghoTokenPool.getCurrentBridgedAmount(), bridgedAmount + amount); + } + + function testTokenMaxCapacityExceededReverts() public { + RateLimiter.Config memory rateLimiterConfig = _getOutboundRateLimiterConfig(); + uint256 capacity = rateLimiterConfig.capacity; + uint256 amount = 10 * capacity; + + // increase bridge limit to hit the rate limit error + vm.startPrank(AAVE_DAO); + s_ghoTokenPool.setBridgeLimit(amount); + + vm.expectRevert( + abi.encodeWithSelector(RateLimiter.TokenMaxCapacityExceeded.selector, capacity, amount, address(s_token)) + ); + vm.startPrank(s_allowedOnRamp); + s_ghoTokenPool.lockOrBurn( + Pool.LockOrBurnInV1({ + receiver: bytes(""), + remoteChainSelector: DEST_CHAIN_SELECTOR, + originalSender: STRANGER, + amount: amount, + localToken: address(s_token) + }) + ); + } + + function testTokenBridgeLimitExceededReverts() public { + uint256 bridgeLimit = s_ghoTokenPool.getBridgeLimit(); + uint256 amount = bridgeLimit + 1; + + vm.expectRevert(abi.encodeWithSelector(UpgradeableLockReleaseTokenPool.BridgeLimitExceeded.selector, bridgeLimit)); + vm.startPrank(s_allowedOnRamp); + s_ghoTokenPool.lockOrBurn( + Pool.LockOrBurnInV1({ + receiver: bytes(""), + remoteChainSelector: DEST_CHAIN_SELECTOR, + originalSender: STRANGER, + amount: amount, + localToken: address(s_token) + }) + ); + } +} + +contract GhoTokenPoolEthereum_releaseOrMint is GhoTokenPoolEthereumSetup { + event TokensConsumed(uint256 tokens); + event Released(address indexed sender, address indexed recipient, uint256 amount); + + function setUp() public virtual override { + GhoTokenPoolEthereumSetup.setUp(); + + UpgradeableTokenPool.ChainUpdate[] memory chainUpdate = new UpgradeableTokenPool.ChainUpdate[](1); + chainUpdate[0] = UpgradeableTokenPool.ChainUpdate({ + remoteChainSelector: SOURCE_CHAIN_SELECTOR, + allowed: true, + remotePoolAddress: abi.encode(s_sourcePool), + remoteTokenAddress: abi.encode(s_sourceToken), + outboundRateLimiterConfig: _getOutboundRateLimiterConfig(), + inboundRateLimiterConfig: _getInboundRateLimiterConfig() + }); + + changePrank(AAVE_DAO); + s_ghoTokenPool.applyChainUpdates(chainUpdate); + } + + function test_ReleaseOrMintSuccess() public { + uint256 amount = 100; + deal(address(s_token), address(s_ghoTokenPool), amount); + + // Inflate current bridged amount so it can be reduced in `releaseOrMint` function + vm.startPrank(s_allowedOnRamp); + s_ghoTokenPool.lockOrBurn( + Pool.LockOrBurnInV1({ + receiver: bytes(""), + remoteChainSelector: DEST_CHAIN_SELECTOR, + originalSender: STRANGER, + amount: amount, + localToken: address(s_token) + }) + ); + + vm.expectEmit(); + emit TokensConsumed(amount); + vm.expectEmit(); + emit Released(s_allowedOffRamp, OWNER, amount); + + vm.startPrank(s_allowedOffRamp); + s_ghoTokenPool.releaseOrMint( + Pool.ReleaseOrMintInV1({ + originalSender: bytes(""), + amount: amount, + remoteChainSelector: SOURCE_CHAIN_SELECTOR, + receiver: OWNER, + localToken: address(s_token), + sourcePoolAddress: abi.encode(s_sourcePool), + sourcePoolData: bytes(""), + offchainTokenData: bytes("") + }) + ); + + assertEq(s_ghoTokenPool.getCurrentBridgedAmount(), 0); + } + + function testFuzz_ReleaseOrMintSuccess(address recipient, uint256 amount, uint256 bridgedAmount) public { + // Since the owner already has tokens this would break the checks + vm.assume(recipient != OWNER); + vm.assume(recipient != address(0)); + vm.assume(recipient != address(s_token)); + + amount = uint128(bound(amount, 2, type(uint128).max)); + bridgedAmount = uint128(bound(bridgedAmount, amount, type(uint128).max)); + + // Inflate current bridged amount so it can be reduced in `releaseOrMint` function + vm.startPrank(AAVE_DAO); + s_ghoTokenPool.setBridgeLimit(bridgedAmount); + s_ghoTokenPool.setChainRateLimiterConfig( + DEST_CHAIN_SELECTOR, + RateLimiter.Config({isEnabled: true, capacity: type(uint128).max, rate: 1e15}), + RateLimiter.Config({isEnabled: true, capacity: type(uint128).max, rate: 1e15}) + ); + vm.warp(block.timestamp + 1e50); // wait to refill capacity + vm.startPrank(s_allowedOnRamp); + s_ghoTokenPool.lockOrBurn( + Pool.LockOrBurnInV1({ + receiver: bytes(""), + remoteChainSelector: DEST_CHAIN_SELECTOR, + originalSender: STRANGER, + amount: bridgedAmount, + localToken: address(s_token) + }) + ); + + // Makes sure the pool always has enough funds + deal(address(s_token), address(s_ghoTokenPool), amount); + vm.startPrank(s_allowedOffRamp); + + uint256 capacity = _getInboundRateLimiterConfig().capacity; + uint256 bridgedAmountAfter = bridgedAmount; + // Determine if we hit the rate limit or the txs should succeed. + if (amount > capacity) { + vm.expectRevert( + abi.encodeWithSelector(RateLimiter.TokenMaxCapacityExceeded.selector, capacity, amount, address(s_token)) + ); + } else { + // Only rate limit if the amount is >0 + if (amount > 0) { + vm.expectEmit(); + emit TokensConsumed(amount); + } + + vm.expectEmit(); + emit Released(s_allowedOffRamp, recipient, amount); + + bridgedAmountAfter -= amount; + } + + s_ghoTokenPool.releaseOrMint( + Pool.ReleaseOrMintInV1({ + originalSender: bytes(""), + amount: amount, + remoteChainSelector: SOURCE_CHAIN_SELECTOR, + receiver: recipient, + localToken: address(s_token), + sourcePoolAddress: abi.encode(s_sourcePool), + sourcePoolData: bytes(""), + offchainTokenData: bytes("") + }) + ); + + assertEq(s_ghoTokenPool.getCurrentBridgedAmount(), bridgedAmountAfter); + } + + function testChainNotAllowedReverts() public { + UpgradeableTokenPool.ChainUpdate[] memory chainUpdate = new UpgradeableTokenPool.ChainUpdate[](1); + chainUpdate[0] = UpgradeableTokenPool.ChainUpdate({ + remoteChainSelector: SOURCE_CHAIN_SELECTOR, + allowed: false, + remotePoolAddress: abi.encode(s_sourcePool), + remoteTokenAddress: abi.encode(s_sourceToken), + outboundRateLimiterConfig: RateLimiter.Config({isEnabled: false, capacity: 0, rate: 0}), + inboundRateLimiterConfig: RateLimiter.Config({isEnabled: false, capacity: 0, rate: 0}) + }); + + changePrank(AAVE_DAO); + s_ghoTokenPool.applyChainUpdates(chainUpdate); + vm.stopPrank(); + + vm.startPrank(s_allowedOffRamp); + + vm.expectRevert(abi.encodeWithSelector(UpgradeableTokenPool.ChainNotAllowed.selector, SOURCE_CHAIN_SELECTOR)); + s_ghoTokenPool.releaseOrMint( + Pool.ReleaseOrMintInV1({ + originalSender: bytes(""), + amount: 1e5, + remoteChainSelector: SOURCE_CHAIN_SELECTOR, + receiver: OWNER, + localToken: address(s_token), + sourcePoolAddress: abi.encode(s_sourcePool), + sourcePoolData: bytes(""), + offchainTokenData: bytes("") + }) + ); + } + + function testPoolMintNotHealthyReverts() public { + // Should not mint tokens if cursed. + s_mockRMN.setGlobalCursed(true); + uint256 before = s_token.balanceOf(OWNER); + vm.startPrank(s_allowedOffRamp); + vm.expectRevert(EVM2EVMOffRamp.CursedByRMN.selector); + s_ghoTokenPool.releaseOrMint( + Pool.ReleaseOrMintInV1({ + originalSender: bytes(""), + amount: 1e5, + remoteChainSelector: SOURCE_CHAIN_SELECTOR, + receiver: OWNER, + localToken: address(s_token), + sourcePoolAddress: abi.encode(s_sourcePool), + sourcePoolData: bytes(""), + offchainTokenData: bytes("") + }) + ); + assertEq(s_token.balanceOf(OWNER), before); + } + + function testReleaseNoFundsReverts() public { + uint256 amount = 1; + + // Inflate current bridged amount so it can be reduced in `releaseOrMint` function + vm.startPrank(s_allowedOnRamp); + s_ghoTokenPool.lockOrBurn( + Pool.LockOrBurnInV1({ + receiver: bytes(""), + remoteChainSelector: DEST_CHAIN_SELECTOR, + originalSender: STRANGER, + amount: amount, + localToken: address(s_token) + }) + ); + + vm.expectRevert(stdError.arithmeticError); + vm.startPrank(s_allowedOffRamp); + s_ghoTokenPool.releaseOrMint( + Pool.ReleaseOrMintInV1({ + originalSender: bytes(""), + amount: amount, + remoteChainSelector: SOURCE_CHAIN_SELECTOR, + receiver: STRANGER, + localToken: address(s_token), + sourcePoolAddress: abi.encode(s_sourcePool), + sourcePoolData: bytes(""), + offchainTokenData: bytes("") + }) + ); + } + + function testTokenMaxCapacityExceededReverts() public { + RateLimiter.Config memory rateLimiterConfig = _getInboundRateLimiterConfig(); + uint256 capacity = rateLimiterConfig.capacity; + uint256 amount = 10 * capacity; + + // Inflate current bridged amount so it can be reduced in `releaseOrMint` function + vm.startPrank(AAVE_DAO); + s_ghoTokenPool.setBridgeLimit(amount); + s_ghoTokenPool.setChainRateLimiterConfig( + DEST_CHAIN_SELECTOR, + RateLimiter.Config({isEnabled: true, capacity: type(uint128).max, rate: 1e15}), + _getInboundRateLimiterConfig() + ); + vm.warp(block.timestamp + 1e50); // wait to refill capacity + vm.startPrank(s_allowedOnRamp); + s_ghoTokenPool.lockOrBurn( + Pool.LockOrBurnInV1({ + receiver: bytes(""), + remoteChainSelector: DEST_CHAIN_SELECTOR, + originalSender: STRANGER, + amount: amount, + localToken: address(s_token) + }) + ); + + vm.expectRevert( + abi.encodeWithSelector(RateLimiter.TokenMaxCapacityExceeded.selector, capacity, amount, address(s_token)) + ); + vm.startPrank(s_allowedOffRamp); + s_ghoTokenPool.releaseOrMint( + Pool.ReleaseOrMintInV1({ + originalSender: bytes(""), + amount: amount, + remoteChainSelector: SOURCE_CHAIN_SELECTOR, + receiver: STRANGER, + localToken: address(s_token), + sourcePoolAddress: abi.encode(s_sourcePool), + sourcePoolData: bytes(""), + offchainTokenData: bytes("") + }) + ); + } + + function testBridgedAmountNotEnoughReverts() public { + uint256 amount = 10; + vm.expectRevert(abi.encodeWithSelector(UpgradeableLockReleaseTokenPool.NotEnoughBridgedAmount.selector)); + vm.startPrank(s_allowedOffRamp); + s_ghoTokenPool.releaseOrMint( + Pool.ReleaseOrMintInV1({ + originalSender: bytes(""), + amount: amount, + remoteChainSelector: SOURCE_CHAIN_SELECTOR, + receiver: STRANGER, + localToken: address(s_token), + sourcePoolAddress: abi.encode(s_sourcePool), + sourcePoolData: bytes(""), + offchainTokenData: bytes("") + }) + ); + } +} + +contract GhoTokenPoolEthereum_canAcceptLiquidity is GhoTokenPoolEthereumSetup { + function test_CanAcceptLiquiditySuccess() public { + assertEq(true, s_ghoTokenPool.canAcceptLiquidity()); + + s_ghoTokenPool = new UpgradeableLockReleaseTokenPool(address(s_token), address(s_mockRMN), false, false); + + assertEq(false, s_ghoTokenPool.canAcceptLiquidity()); + } +} + +contract GhoTokenPoolEthereum_provideLiquidity is GhoTokenPoolEthereumSetup { + function testFuzz_ProvideLiquiditySuccess(uint256 amount) public { + vm.assume(amount < type(uint128).max); + + uint256 balancePre = s_token.balanceOf(OWNER); + s_token.approve(address(s_ghoTokenPool), amount); + + s_ghoTokenPool.provideLiquidity(amount); + + assertEq(s_token.balanceOf(OWNER), balancePre - amount); + assertEq(s_token.balanceOf(address(s_ghoTokenPool)), amount); + } + + // Reverts + + function test_UnauthorizedReverts() public { + vm.startPrank(STRANGER); + vm.expectRevert(abi.encodeWithSelector(Unauthorized.selector, STRANGER)); + + s_ghoTokenPool.provideLiquidity(1); + } + + function testFuzz_ExceedsAllowance(uint256 amount) public { + vm.assume(amount > 0); + vm.expectRevert(stdError.arithmeticError); + s_ghoTokenPool.provideLiquidity(amount); + } + + function testLiquidityNotAcceptedReverts() public { + s_ghoTokenPool = new UpgradeableLockReleaseTokenPool(address(s_token), address(s_mockRMN), false, false); + + vm.expectRevert(LockReleaseTokenPool.LiquidityNotAccepted.selector); + s_ghoTokenPool.provideLiquidity(1); + } +} + +contract GhoTokenPoolEthereum_withdrawalLiquidity is GhoTokenPoolEthereumSetup { + function testFuzz_WithdrawalLiquiditySuccess(uint256 amount) public { + vm.assume(amount < type(uint128).max); + + uint256 balancePre = s_token.balanceOf(OWNER); + s_token.approve(address(s_ghoTokenPool), amount); + s_ghoTokenPool.provideLiquidity(amount); + + s_ghoTokenPool.withdrawLiquidity(amount); + + assertEq(s_token.balanceOf(OWNER), balancePre); + } + + // Reverts + + function test_UnauthorizedReverts() public { + vm.startPrank(STRANGER); + vm.expectRevert(abi.encodeWithSelector(Unauthorized.selector, STRANGER)); + + s_ghoTokenPool.withdrawLiquidity(1); + } + + function testInsufficientLiquidityReverts() public { + uint256 maxUint128 = 2 ** 128 - 1; + s_token.approve(address(s_ghoTokenPool), maxUint128); + s_ghoTokenPool.provideLiquidity(maxUint128); + + changePrank(address(s_ghoTokenPool)); + s_token.transfer(OWNER, maxUint128); + changePrank(OWNER); + + vm.expectRevert(LockReleaseTokenPool.InsufficientLiquidity.selector); + s_ghoTokenPool.withdrawLiquidity(1); + } +} + +contract GhoTokenPoolEthereum_supportsInterface is GhoTokenPoolEthereumSetup { + function testSupportsInterfaceSuccess() public view { + assertTrue(s_ghoTokenPool.supportsInterface(type(ILiquidityContainer).interfaceId)); + assertTrue(s_ghoTokenPool.supportsInterface(type(IPoolV1).interfaceId)); + assertTrue(s_ghoTokenPool.supportsInterface(type(IERC165).interfaceId)); + } +} + +contract GhoTokenPoolEthereum_setChainRateLimiterConfig is GhoTokenPoolEthereumSetup { + event ConfigChanged(RateLimiter.Config); + event ChainConfigured( + uint64 chainSelector, + RateLimiter.Config outboundRateLimiterConfig, + RateLimiter.Config inboundRateLimiterConfig + ); + + uint64 internal s_remoteChainSelector; + + function setUp() public virtual override { + GhoTokenPoolEthereumSetup.setUp(); + UpgradeableTokenPool.ChainUpdate[] memory chainUpdates = new UpgradeableTokenPool.ChainUpdate[](1); + s_remoteChainSelector = 123124; + chainUpdates[0] = UpgradeableTokenPool.ChainUpdate({ + remoteChainSelector: s_remoteChainSelector, + allowed: true, + remotePoolAddress: abi.encode(s_sourcePool), + remoteTokenAddress: abi.encode(s_sourceToken), + outboundRateLimiterConfig: _getOutboundRateLimiterConfig(), + inboundRateLimiterConfig: _getInboundRateLimiterConfig() + }); + changePrank(AAVE_DAO); + s_ghoTokenPool.applyChainUpdates(chainUpdates); + changePrank(OWNER); + } + + function testFuzz_SetChainRateLimiterConfigSuccess(uint128 capacity, uint128 rate, uint32 newTime) public { + // Cap the lower bound to 4 so 4/2 is still >= 2 + vm.assume(capacity >= 4); + // Cap the lower bound to 2 so 2/2 is still >= 1 + rate = uint128(bound(rate, 2, capacity - 2)); + // Bucket updates only work on increasing time + newTime = uint32(bound(newTime, block.timestamp + 1, type(uint32).max)); + vm.warp(newTime); + + uint256 oldOutboundTokens = s_ghoTokenPool.getCurrentOutboundRateLimiterState(s_remoteChainSelector).tokens; + uint256 oldInboundTokens = s_ghoTokenPool.getCurrentInboundRateLimiterState(s_remoteChainSelector).tokens; + + RateLimiter.Config memory newOutboundConfig = RateLimiter.Config({isEnabled: true, capacity: capacity, rate: rate}); + RateLimiter.Config memory newInboundConfig = RateLimiter.Config({ + isEnabled: true, + capacity: capacity / 2, + rate: rate / 2 + }); + + vm.expectEmit(); + emit ConfigChanged(newOutboundConfig); + vm.expectEmit(); + emit ConfigChanged(newInboundConfig); + vm.expectEmit(); + emit ChainConfigured(s_remoteChainSelector, newOutboundConfig, newInboundConfig); + + changePrank(AAVE_DAO); + s_ghoTokenPool.setChainRateLimiterConfig(s_remoteChainSelector, newOutboundConfig, newInboundConfig); + + uint256 expectedTokens = RateLimiter._min(newOutboundConfig.capacity, oldOutboundTokens); + + RateLimiter.TokenBucket memory bucket = s_ghoTokenPool.getCurrentOutboundRateLimiterState(s_remoteChainSelector); + assertEq(bucket.capacity, newOutboundConfig.capacity); + assertEq(bucket.rate, newOutboundConfig.rate); + assertEq(bucket.tokens, expectedTokens); + assertEq(bucket.lastUpdated, newTime); + + expectedTokens = RateLimiter._min(newInboundConfig.capacity, oldInboundTokens); + + bucket = s_ghoTokenPool.getCurrentInboundRateLimiterState(s_remoteChainSelector); + assertEq(bucket.capacity, newInboundConfig.capacity); + assertEq(bucket.rate, newInboundConfig.rate); + assertEq(bucket.tokens, expectedTokens); + assertEq(bucket.lastUpdated, newTime); + } + + function testOnlyOwnerOrRateLimitAdminSuccess() public { + address rateLimiterAdmin = address(28973509103597907); + + changePrank(AAVE_DAO); + s_ghoTokenPool.setRateLimitAdmin(rateLimiterAdmin); + + changePrank(rateLimiterAdmin); + + s_ghoTokenPool.setChainRateLimiterConfig( + s_remoteChainSelector, + _getOutboundRateLimiterConfig(), + _getInboundRateLimiterConfig() + ); + + changePrank(AAVE_DAO); + + s_ghoTokenPool.setChainRateLimiterConfig( + s_remoteChainSelector, + _getOutboundRateLimiterConfig(), + _getInboundRateLimiterConfig() + ); + } + + // Reverts + + function testOnlyOwnerReverts() public { + changePrank(STRANGER); + + vm.expectRevert(abi.encodeWithSelector(Unauthorized.selector, STRANGER)); + s_ghoTokenPool.setChainRateLimiterConfig( + s_remoteChainSelector, + _getOutboundRateLimiterConfig(), + _getInboundRateLimiterConfig() + ); + } + + function testNonExistentChainReverts() public { + uint64 wrongChainSelector = 9084102894; + + vm.expectRevert(abi.encodeWithSelector(UpgradeableTokenPool.NonExistentChain.selector, wrongChainSelector)); + changePrank(AAVE_DAO); + s_ghoTokenPool.setChainRateLimiterConfig( + wrongChainSelector, + _getOutboundRateLimiterConfig(), + _getInboundRateLimiterConfig() + ); + } +} + +contract GhoTokenPoolEthereum_setRateLimitAdmin is GhoTokenPoolEthereumSetup { + function testSetRateLimitAdminSuccess() public { + assertEq(address(0), s_ghoTokenPool.getRateLimitAdmin()); + changePrank(AAVE_DAO); + s_ghoTokenPool.setRateLimitAdmin(OWNER); + assertEq(OWNER, s_ghoTokenPool.getRateLimitAdmin()); + } + + // Reverts + + function testSetRateLimitAdminReverts() public { + vm.startPrank(STRANGER); + + vm.expectRevert("Only callable by owner"); + s_ghoTokenPool.setRateLimitAdmin(STRANGER); + } +} + +contract GhoTokenPoolEthereum_setBridgeLimit is GhoTokenPoolEthereumSetup { + event BridgeLimitUpdated(uint256 oldBridgeLimit, uint256 newBridgeLimit); + event BridgeLimitAdminUpdated(address indexed oldAdmin, address indexed newAdmin); + + function testSetBridgeLimitAdminSuccess() public { + assertEq(INITIAL_BRIDGE_LIMIT, s_ghoTokenPool.getBridgeLimit()); + + uint256 newBridgeLimit = INITIAL_BRIDGE_LIMIT * 2; + + vm.expectEmit(); + emit BridgeLimitUpdated(INITIAL_BRIDGE_LIMIT, newBridgeLimit); + + vm.startPrank(AAVE_DAO); + s_ghoTokenPool.setBridgeLimit(newBridgeLimit); + + assertEq(newBridgeLimit, s_ghoTokenPool.getBridgeLimit()); + + // Bridge Limit Admin + address bridgeLimitAdmin = address(28973509103597907); + + vm.expectEmit(); + emit BridgeLimitAdminUpdated(address(0), bridgeLimitAdmin); + + s_ghoTokenPool.setBridgeLimitAdmin(bridgeLimitAdmin); + + vm.startPrank(bridgeLimitAdmin); + newBridgeLimit += 1; + + s_ghoTokenPool.setBridgeLimit(newBridgeLimit); + + assertEq(newBridgeLimit, s_ghoTokenPool.getBridgeLimit()); + } + + function testZeroBridgeLimitReverts() public { + vm.stopPrank(); + vm.startPrank(AAVE_DAO); + s_ghoTokenPool.setBridgeLimit(0); + + uint256 amount = 1; + + vm.expectRevert(abi.encodeWithSelector(UpgradeableLockReleaseTokenPool.BridgeLimitExceeded.selector, 0)); + vm.startPrank(s_allowedOnRamp); + s_ghoTokenPool.lockOrBurn( + Pool.LockOrBurnInV1({ + receiver: bytes(""), + remoteChainSelector: DEST_CHAIN_SELECTOR, + originalSender: STRANGER, + amount: amount, + localToken: address(s_token) + }) + ); + } + + function testBridgeLimitBelowCurrent() public { + // Increase current bridged amount to 10 + uint256 amount = 10e18; + vm.startPrank(s_allowedOnRamp); + s_ghoTokenPool.lockOrBurn( + Pool.LockOrBurnInV1({ + receiver: bytes(""), + remoteChainSelector: DEST_CHAIN_SELECTOR, + originalSender: STRANGER, + amount: amount, + localToken: address(s_token) + }) + ); + + // Reduce bridge limit below current bridged amount + vm.startPrank(AAVE_DAO); + uint256 newBridgeLimit = amount - 1; + s_ghoTokenPool.setBridgeLimit(newBridgeLimit); + assertEq(s_ghoTokenPool.getCurrentBridgedAmount(), amount); + assertEq(s_ghoTokenPool.getBridgeLimit(), newBridgeLimit); + assertGt(s_ghoTokenPool.getCurrentBridgedAmount(), s_ghoTokenPool.getBridgeLimit()); + + // Lock reverts due to maxed out bridge limit + vm.expectRevert( + abi.encodeWithSelector(UpgradeableLockReleaseTokenPool.BridgeLimitExceeded.selector, newBridgeLimit) + ); + vm.startPrank(s_allowedOnRamp); + s_ghoTokenPool.lockOrBurn( + Pool.LockOrBurnInV1({ + receiver: bytes(""), + remoteChainSelector: DEST_CHAIN_SELECTOR, + originalSender: STRANGER, + amount: 1, + localToken: address(s_token) + }) + ); + + // Increase bridge limit by 1 + vm.startPrank(AAVE_DAO); + newBridgeLimit = amount + 1; + s_ghoTokenPool.setBridgeLimit(newBridgeLimit); + assertEq(s_ghoTokenPool.getCurrentBridgedAmount(), amount); + assertEq(s_ghoTokenPool.getBridgeLimit(), newBridgeLimit); + assertGt(s_ghoTokenPool.getBridgeLimit(), s_ghoTokenPool.getCurrentBridgedAmount()); + + // Bridge limit maxed out again + vm.startPrank(s_allowedOnRamp); + s_ghoTokenPool.lockOrBurn( + Pool.LockOrBurnInV1({ + receiver: bytes(""), + remoteChainSelector: DEST_CHAIN_SELECTOR, + originalSender: STRANGER, + amount: 1, + localToken: address(s_token) + }) + ); + assertEq(s_ghoTokenPool.getBridgeLimit(), s_ghoTokenPool.getCurrentBridgedAmount()); + } + + function testCurrentBridgedAmountRecover() public { + // Reach maximum + vm.startPrank(s_allowedOnRamp); + s_ghoTokenPool.lockOrBurn( + Pool.LockOrBurnInV1({ + receiver: bytes(""), + remoteChainSelector: DEST_CHAIN_SELECTOR, + originalSender: STRANGER, + amount: INITIAL_BRIDGE_LIMIT, + localToken: address(s_token) + }) + ); + assertEq(s_ghoTokenPool.getCurrentBridgedAmount(), INITIAL_BRIDGE_LIMIT); + assertEq(s_ghoTokenPool.getBridgeLimit(), s_ghoTokenPool.getCurrentBridgedAmount()); + + // Lock reverts due to maxed out bridge limit + vm.expectRevert( + abi.encodeWithSelector(UpgradeableLockReleaseTokenPool.BridgeLimitExceeded.selector, INITIAL_BRIDGE_LIMIT) + ); + s_ghoTokenPool.lockOrBurn( + Pool.LockOrBurnInV1({ + receiver: bytes(""), + remoteChainSelector: DEST_CHAIN_SELECTOR, + originalSender: STRANGER, + amount: 1, + localToken: address(s_token) + }) + ); + + // Amount available to bridge recovers thanks to liquidity coming back + UpgradeableTokenPool.ChainUpdate[] memory chainUpdate = new UpgradeableTokenPool.ChainUpdate[](1); + chainUpdate[0] = UpgradeableTokenPool.ChainUpdate({ + remoteChainSelector: SOURCE_CHAIN_SELECTOR, + allowed: true, + remotePoolAddress: abi.encode(s_sourcePool), + remoteTokenAddress: abi.encode(s_sourceToken), + outboundRateLimiterConfig: _getOutboundRateLimiterConfig(), + inboundRateLimiterConfig: _getInboundRateLimiterConfig() + }); + + changePrank(AAVE_DAO); + s_ghoTokenPool.applyChainUpdates(chainUpdate); + + uint256 amount = 10; + deal(address(s_token), address(s_ghoTokenPool), amount); + vm.startPrank(s_allowedOffRamp); + s_ghoTokenPool.releaseOrMint( + Pool.ReleaseOrMintInV1({ + originalSender: bytes(""), + amount: amount, + remoteChainSelector: SOURCE_CHAIN_SELECTOR, + receiver: OWNER, + localToken: address(s_token), + sourcePoolAddress: abi.encode(s_sourcePool), + sourcePoolData: bytes(""), + offchainTokenData: bytes("") + }) + ); + assertEq(s_ghoTokenPool.getCurrentBridgedAmount(), INITIAL_BRIDGE_LIMIT - amount); + } + + // Reverts + + function testSetBridgeLimitAdminReverts() public { + vm.startPrank(STRANGER); + + vm.expectRevert(abi.encodeWithSelector(Unauthorized.selector, STRANGER)); + s_ghoTokenPool.setBridgeLimit(0); + } +} + +contract GhoTokenPoolEthereum_setBridgeLimitAdmin is GhoTokenPoolEthereumSetup { + event BridgeLimitAdminUpdated(address indexed oldAdmin, address indexed newAdmin); + + function testSetBridgeLimitAdminSuccess() public { + assertEq(address(0), s_ghoTokenPool.getBridgeLimitAdmin()); + + address bridgeLimitAdmin = address(28973509103597907); + changePrank(AAVE_DAO); + + vm.expectEmit(); + emit BridgeLimitAdminUpdated(address(0), bridgeLimitAdmin); + + s_ghoTokenPool.setBridgeLimitAdmin(bridgeLimitAdmin); + + assertEq(bridgeLimitAdmin, s_ghoTokenPool.getBridgeLimitAdmin()); + } + + // Reverts + + function testSetBridgeLimitAdminReverts() public { + vm.startPrank(STRANGER); + + vm.expectRevert("Only callable by owner"); + s_ghoTokenPool.setBridgeLimitAdmin(STRANGER); + } +} + +contract GhoTokenPoolEthereum_upgradeability is GhoTokenPoolEthereumSetup { + function testInitialization() public { + // Upgradeability + assertEq(_getUpgradeableVersion(address(s_ghoTokenPool)), 1); + vm.startPrank(PROXY_ADMIN); + (bool ok, bytes memory result) = address(s_ghoTokenPool).staticcall( + abi.encodeWithSelector(TransparentUpgradeableProxy.admin.selector) + ); + assertTrue(ok, "proxy admin fetch failed"); + address decodedProxyAdmin = abi.decode(result, (address)); + assertEq(decodedProxyAdmin, PROXY_ADMIN, "proxy admin is wrong"); + assertEq(decodedProxyAdmin, _getProxyAdminAddress(address(s_ghoTokenPool)), "proxy admin is wrong"); + + // TokenPool + vm.startPrank(OWNER); + assertEq(s_ghoTokenPool.getAllowList().length, 0); + assertEq(s_ghoTokenPool.getAllowListEnabled(), false); + assertEq(s_ghoTokenPool.getRmnProxy(), address(s_mockRMN)); + assertEq(s_ghoTokenPool.getRouter(), address(s_sourceRouter)); + assertEq(address(s_ghoTokenPool.getToken()), address(s_token)); + assertEq(s_ghoTokenPool.owner(), AAVE_DAO, "owner is wrong"); + } + + function testUpgrade() public { + MockUpgradeable newImpl = new MockUpgradeable(); + bytes memory mockImpleParams = abi.encodeWithSignature("initialize()"); + vm.startPrank(PROXY_ADMIN); + TransparentUpgradeableProxy(payable(address(s_ghoTokenPool))).upgradeToAndCall(address(newImpl), mockImpleParams); + + vm.startPrank(OWNER); + assertEq(_getUpgradeableVersion(address(s_ghoTokenPool)), 2); + } + + function testUpgradeAdminReverts() public { + vm.expectRevert(); + TransparentUpgradeableProxy(payable(address(s_ghoTokenPool))).upgradeToAndCall(address(0), bytes("")); + assertEq(_getUpgradeableVersion(address(s_ghoTokenPool)), 1); + + vm.expectRevert(); + TransparentUpgradeableProxy(payable(address(s_ghoTokenPool))).upgradeTo(address(0)); + assertEq(_getUpgradeableVersion(address(s_ghoTokenPool)), 1); + } + + function testChangeAdmin() public { + assertEq(_getProxyAdminAddress(address(s_ghoTokenPool)), PROXY_ADMIN); + + address newAdmin = makeAddr("newAdmin"); + vm.startPrank(PROXY_ADMIN); + TransparentUpgradeableProxy(payable(address(s_ghoTokenPool))).changeAdmin(newAdmin); + + assertEq(_getProxyAdminAddress(address(s_ghoTokenPool)), newAdmin, "Admin change failed"); + } + + function testChangeAdminAdminReverts() public { + assertEq(_getProxyAdminAddress(address(s_ghoTokenPool)), PROXY_ADMIN); + + address newAdmin = makeAddr("newAdmin"); + vm.expectRevert(); + TransparentUpgradeableProxy(payable(address(s_ghoTokenPool))).changeAdmin(newAdmin); + + assertEq(_getProxyAdminAddress(address(s_ghoTokenPool)), PROXY_ADMIN, "Unauthorized admin change"); + } +} diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/GhoTokenPoolEthereumBridgeLimit.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/GhoTokenPoolEthereumBridgeLimit.t.sol new file mode 100644 index 0000000000..32ff72b63e --- /dev/null +++ b/contracts/src/v0.8/ccip/test/pools/GHO/GhoTokenPoolEthereumBridgeLimit.t.sol @@ -0,0 +1,1041 @@ +// SPDX-License-Identifier: BUSL-1.1 +pragma solidity ^0.8.0; + +import {GhoToken} from "@aave-gho-core/gho/GhoToken.sol"; + +import {IPoolV1} from "../../../interfaces/IPool.sol"; +import {Pool} from "../../../libraries/Pool.sol"; +import {UpgradeableLockReleaseTokenPool} from "../../../pools/GHO/UpgradeableLockReleaseTokenPool.sol"; +import {GhoBaseTest} from "./GhoBaseTest.t.sol"; + +contract GhoTokenPoolEthereumBridgeLimitSetup is GhoBaseTest { + UtilsStorage public s; + + function setUp() public virtual override { + // Ethereum with id 0 + s.chainsList.push(0); + s.tokens[0] = address(new GhoToken(AAVE_DAO)); + s.pools[0] = _deployUpgradeableLockReleaseTokenPool( + s.tokens[0], + RMN_PROXY, + ROUTER, + OWNER, + INITIAL_BRIDGE_LIMIT, + PROXY_ADMIN + ); + + // Mock calls for bridging + vm.mockCall(ROUTER, abi.encodeWithSelector(bytes4(keccak256("getOnRamp(uint64)"))), abi.encode(RAMP)); + vm.mockCall(ROUTER, abi.encodeWithSelector(bytes4(keccak256("isOffRamp(uint64,address)"))), abi.encode(true)); + vm.mockCall(RMN_PROXY, abi.encodeWithSelector(bytes4(keccak256("isCursed()"))), abi.encode(false)); + vm.mockCall(RMN_PROXY, abi.encodeWithSelector(bytes4(keccak256("isCursed(bytes16)"))), abi.encode(false)); + } + + function _assertInvariant() internal view { + // Check bridged + assertEq(UpgradeableLockReleaseTokenPool(s.pools[0]).getCurrentBridgedAmount(), s.bridged); + + // Check levels and buckets + uint256 sumLevels; + uint256 chainId; + uint256 capacity; + uint256 level; + for (uint i = 1; i < s.chainsList.length; i++) { + // not counting Ethereum -{0} + chainId = s.chainsList[i]; + (capacity, level) = GhoToken(s.tokens[chainId]).getFacilitatorBucket(s.pools[chainId]); + + // Aggregate levels + sumLevels += level; + + assertEq(capacity, s.bucketCapacities[chainId], "wrong bucket capacity"); + assertEq(level, s.bucketLevels[chainId], "wrong bucket level"); + + assertEq( + capacity, + UpgradeableLockReleaseTokenPool(s.pools[0]).getBridgeLimit(), + "capacity must be equal to bridgeLimit" + ); + assertLe( + level, + UpgradeableLockReleaseTokenPool(s.pools[0]).getBridgeLimit(), + "level cannot be higher than bridgeLimit" + ); + } + // Check bridged is equal to sum of levels + assertEq(UpgradeableLockReleaseTokenPool(s.pools[0]).getCurrentBridgedAmount(), sumLevels, "wrong bridged"); + assertEq(s.remoteLiquidity, sumLevels, "wrong bridged"); + } +} + +contract GhoTokenPoolEthereumBridgeLimitSimpleScenario is GhoTokenPoolEthereumBridgeLimitSetup { + function setUp() public virtual override { + super.setUp(); + + // Arbitrum + _addBridge(s, 1, INITIAL_BRIDGE_LIMIT); + _enableLane(s, 0, 1); + } + + function testFuzz_Bridge(uint256 amount) public { + uint256 maxAmount = _getMaxToBridgeOut(s, 0); + amount = bound(amount, 1, maxAmount); + + _assertInvariant(); + + assertEq(_getMaxToBridgeOut(s, 0), maxAmount); + assertEq(_getMaxToBridgeIn(s, 0), 0); + assertEq(_getMaxToBridgeOut(s, 1), 0); + assertEq(_getMaxToBridgeIn(s, 1), s.bucketCapacities[1]); + + deal(s.tokens[0], USER, amount); + _moveGhoOrigin(s, 0, 1, USER, amount); + + assertEq(_getMaxToBridgeOut(s, 0), maxAmount - amount); + assertEq(_getMaxToBridgeIn(s, 0), amount); + assertEq(_getMaxToBridgeOut(s, 1), 0); + assertEq(_getMaxToBridgeIn(s, 1), s.bucketCapacities[1]); + + _moveGhoDestination(s, 0, 1, USER, amount); + + assertEq(_getMaxToBridgeOut(s, 0), maxAmount - amount); + assertEq(_getMaxToBridgeIn(s, 0), amount); + assertEq(_getMaxToBridgeOut(s, 1), s.bucketLevels[1]); + assertEq(_getMaxToBridgeIn(s, 1), s.bucketCapacities[1] - s.bucketLevels[1]); + + _assertInvariant(); + } + + function testBridgeAll() public { + _assertInvariant(); + + uint256 maxAmount = _getMaxToBridgeOut(s, 0); + assertEq(_getMaxToBridgeIn(s, 0), 0); + assertEq(_getMaxToBridgeOut(s, 1), 0); + assertEq(_getMaxToBridgeIn(s, 1), s.bucketCapacities[1]); + + deal(s.tokens[0], USER, maxAmount); + _moveGhoOrigin(s, 0, 1, USER, maxAmount); + + assertEq(_getMaxToBridgeOut(s, 0), 0); + assertEq(_getMaxToBridgeIn(s, 0), maxAmount); + assertEq(_getMaxToBridgeOut(s, 1), 0); + assertEq(_getMaxToBridgeIn(s, 1), s.bucketCapacities[1]); + + _moveGhoDestination(s, 0, 1, USER, maxAmount); + + assertEq(_getMaxToBridgeOut(s, 0), 0); + assertEq(_getMaxToBridgeIn(s, 0), maxAmount); + assertEq(_getMaxToBridgeOut(s, 1), s.bucketCapacities[1]); + assertEq(_getMaxToBridgeIn(s, 1), 0); + + _assertInvariant(); + } + + /// @dev Bridge out two times + function testFuzz_BridgeTwoSteps(uint256 amount1, uint256 amount2) public { + uint256 maxAmount = _getMaxToBridgeOut(s, 0); + amount1 = bound(amount1, 1, maxAmount); + amount2 = bound(amount2, 1, maxAmount); + + _assertInvariant(); + + assertEq(_getMaxToBridgeOut(s, 0), maxAmount); + assertEq(_getMaxToBridgeIn(s, 0), 0); + assertEq(_getMaxToBridgeOut(s, 1), 0); + assertEq(_getMaxToBridgeIn(s, 1), s.bucketCapacities[1]); + + deal(s.tokens[0], USER, amount1); + _moveGhoOrigin(s, 0, 1, USER, amount1); + + assertEq(_getMaxToBridgeOut(s, 0), maxAmount - amount1); + assertEq(_getMaxToBridgeIn(s, 0), amount1); + assertEq(_getMaxToBridgeOut(s, 1), 0); + assertEq(_getMaxToBridgeIn(s, 1), s.bucketCapacities[1]); + + _moveGhoDestination(s, 0, 1, USER, amount1); + + assertEq(_getMaxToBridgeOut(s, 0), maxAmount - amount1); + assertEq(_getMaxToBridgeIn(s, 0), amount1); + assertEq(_getMaxToBridgeOut(s, 1), s.bucketLevels[1]); + assertEq(_getMaxToBridgeIn(s, 1), s.bucketCapacities[1] - s.bucketLevels[1]); + + _assertInvariant(); + + // Bridge up to bridge limit amount + if (amount1 + amount2 > maxAmount) { + vm.expectRevert(); + vm.prank(RAMP); + IPoolV1(s.pools[0]).lockOrBurn( + Pool.LockOrBurnInV1({ + receiver: bytes(""), + remoteChainSelector: uint64(1), + originalSender: USER, + amount: amount2, + localToken: s.tokens[0] + }) + ); + + amount2 = maxAmount - amount1; + } + + if (amount2 > 0) { + _assertInvariant(); + + uint256 acc = amount1 + amount2; + deal(s.tokens[0], USER, amount2); + _moveGhoOrigin(s, 0, 1, USER, amount2); + + assertEq(_getMaxToBridgeOut(s, 0), maxAmount - acc); + assertEq(_getMaxToBridgeIn(s, 0), acc); + assertEq(_getMaxToBridgeOut(s, 1), amount1); + assertEq(_getMaxToBridgeIn(s, 1), s.bucketCapacities[1] - amount1); + + _moveGhoDestination(s, 0, 1, USER, amount2); + + assertEq(_getMaxToBridgeOut(s, 0), maxAmount - acc); + assertEq(_getMaxToBridgeIn(s, 0), acc); + assertEq(_getMaxToBridgeOut(s, 1), acc); + assertEq(_getMaxToBridgeIn(s, 1), s.bucketCapacities[1] - acc); + + _assertInvariant(); + } + } + + /// @dev Bridge some tokens out and later, bridge them back in + function testFuzz_BridgeBackAndForth(uint256 amountOut, uint256 amountIn) public { + uint256 maxAmount = _getMaxToBridgeOut(s, 0); + amountOut = bound(amountOut, 1, maxAmount); + amountIn = bound(amountIn, 1, _getCapacity(s, 1)); + + _assertInvariant(); + + assertEq(_getMaxToBridgeOut(s, 0), maxAmount); + assertEq(_getMaxToBridgeIn(s, 0), 0); + assertEq(_getMaxToBridgeOut(s, 1), 0); + assertEq(_getMaxToBridgeIn(s, 1), s.bucketCapacities[1]); + + deal(s.tokens[0], USER, amountOut); + _moveGhoOrigin(s, 0, 1, USER, amountOut); + + assertEq(_getMaxToBridgeOut(s, 0), maxAmount - amountOut); + assertEq(_getMaxToBridgeIn(s, 0), amountOut); + assertEq(_getMaxToBridgeOut(s, 1), 0); + assertEq(_getMaxToBridgeIn(s, 1), s.bucketCapacities[1]); + + _moveGhoDestination(s, 0, 1, USER, amountOut); + + assertEq(_getMaxToBridgeOut(s, 0), maxAmount - amountOut); + assertEq(_getMaxToBridgeIn(s, 0), amountOut); + assertEq(_getMaxToBridgeOut(s, 1), s.bucketLevels[1]); + assertEq(_getMaxToBridgeIn(s, 1), s.bucketCapacities[1] - s.bucketLevels[1]); + + _assertInvariant(); + + // Bridge up to current bridged amount + if (amountIn > amountOut) { + // Simulate revert on destination + vm.expectRevert(); + vm.prank(RAMP); + IPoolV1(s.pools[0]).releaseOrMint( + Pool.ReleaseOrMintInV1({ + originalSender: bytes(""), + remoteChainSelector: uint64(1), + receiver: USER, + amount: amountIn, + localToken: s.tokens[0], + sourcePoolAddress: abi.encode(address(s.pools[0])), + sourcePoolData: bytes(""), + offchainTokenData: bytes("") + }) + ); + + amountIn = amountOut; + } + + if (amountIn > 0) { + _assertInvariant(); + + uint256 acc = amountOut - amountIn; + deal(s.tokens[1], USER, amountIn); + _moveGhoOrigin(s, 1, 0, USER, amountIn); + + assertEq(_getMaxToBridgeOut(s, 0), maxAmount - amountOut); + assertEq(_getMaxToBridgeIn(s, 0), amountOut); + assertEq(_getMaxToBridgeOut(s, 1), acc); + assertEq(_getMaxToBridgeIn(s, 1), s.bucketCapacities[1] - acc); + + _moveGhoDestination(s, 1, 0, USER, amountIn); + + assertEq(_getMaxToBridgeOut(s, 0), maxAmount - acc); + assertEq(_getMaxToBridgeIn(s, 0), acc); + assertEq(_getMaxToBridgeOut(s, 1), acc); + assertEq(_getMaxToBridgeIn(s, 1), maxAmount - acc); + + _assertInvariant(); + } + } + + /// @dev Bridge from Ethereum to Arbitrum reverts if amount is higher than bridge limit + function testFuzz_BridgeBridgeLimitExceededSourceReverts(uint256 amount, uint256 bridgeAmount) public { + vm.assume(amount < type(uint128).max); + vm.assume(bridgeAmount < INITIAL_BRIDGE_LIMIT); + + // Inflate bridgeAmount + if (bridgeAmount > 0) { + deal(s.tokens[0], USER, bridgeAmount); + _bridgeGho(s, 0, 1, USER, bridgeAmount); + } + + deal(s.tokens[0], USER, amount); + // Simulate CCIP pull of funds + vm.startPrank(USER); + GhoToken(s.tokens[0]).transfer(s.pools[0], amount); + + if (bridgeAmount + amount > INITIAL_BRIDGE_LIMIT) { + vm.expectRevert(); + } + vm.startPrank(RAMP); + IPoolV1(s.pools[0]).lockOrBurn( + Pool.LockOrBurnInV1({ + receiver: bytes(""), + remoteChainSelector: uint64(1), + originalSender: USER, + amount: amount, + localToken: s.tokens[0] + }) + ); + } + + /// @dev Bridge from Ethereum to Arbitrum reverts if amount is higher than capacity available + function testFuzz_BridgeCapacityExceededDestinationReverts(uint256 amount, uint256 level) public { + (uint256 capacity, ) = GhoToken(s.tokens[1]).getFacilitatorBucket(s.pools[1]); + vm.assume(level < capacity); + amount = bound(amount, 1, type(uint128).max); + + // Inflate level + if (level > 0) { + _inflateFacilitatorLevel(s.pools[1], s.tokens[1], level); + } + + // Skip origin move + + // Destination execution + if (amount > capacity - level) { + vm.expectRevert(); + } + vm.prank(RAMP); + IPoolV1(s.pools[1]).releaseOrMint( + Pool.ReleaseOrMintInV1({ + originalSender: bytes(""), + remoteChainSelector: uint64(0), + receiver: USER, + amount: amount, + localToken: s.tokens[1], + sourcePoolAddress: abi.encode(address(s.pools[0])), + sourcePoolData: bytes(""), + offchainTokenData: bytes("") + }) + ); + } + + /// @dev Bridge from Arbitrum To Ethereum reverts if Arbitrum level is lower than amount + function testFuzz_BridgeBackZeroLevelSourceReverts(uint256 amount, uint256 level) public { + (uint256 capacity, ) = GhoToken(s.tokens[1]).getFacilitatorBucket(s.pools[1]); + vm.assume(level < capacity); + amount = bound(amount, 1, capacity - level); + + // Inflate level + if (level > 0) { + _inflateFacilitatorLevel(s.pools[1], s.tokens[1], level); + } + + deal(s.tokens[1], USER, amount); + // Simulate CCIP pull of funds + vm.prank(USER); + GhoToken(s.tokens[1]).transfer(s.pools[1], amount); + + if (amount > level) { + vm.expectRevert(); + } + vm.prank(RAMP); + IPoolV1(s.pools[1]).lockOrBurn( + Pool.LockOrBurnInV1({ + receiver: bytes(""), + remoteChainSelector: uint64(0), + originalSender: USER, + amount: amount, + localToken: s.tokens[1] + }) + ); + } + + /// @dev Bridge from Arbitrum To Ethereum reverts if Ethereum current bridged amount is lower than amount + function testFuzz_BridgeBackZeroBridgeLimitDestinationReverts(uint256 amount, uint256 bridgeAmount) public { + (uint256 capacity, ) = GhoToken(s.tokens[1]).getFacilitatorBucket(s.pools[1]); + amount = bound(amount, 1, capacity); + bridgeAmount = bound(bridgeAmount, 0, capacity - amount); + + // Inflate bridgeAmount + if (bridgeAmount > 0) { + deal(s.tokens[0], USER, bridgeAmount); + _bridgeGho(s, 0, 1, USER, bridgeAmount); + } + + // Inflate level on Arbitrum + _inflateFacilitatorLevel(s.pools[1], s.tokens[1], amount); + + // Skip origin move + + // Destination execution + if (amount > bridgeAmount) { + vm.expectRevert(); + } + vm.prank(RAMP); + IPoolV1(s.pools[0]).releaseOrMint( + Pool.ReleaseOrMintInV1({ + originalSender: bytes(""), + remoteChainSelector: uint64(1), + receiver: USER, + amount: amount, + localToken: s.tokens[0], + sourcePoolAddress: abi.encode(address(s.pools[1])), + sourcePoolData: bytes(""), + offchainTokenData: bytes("") + }) + ); + } + + /// @dev Bucket capacity reduction. Caution: bridge limit reduction must happen first + function testReduceBucketCapacity() public { + // Max out capacity + uint256 maxAmount = _getMaxToBridgeOut(s, 0); + deal(s.tokens[0], USER, maxAmount); + _bridgeGho(s, 0, 1, USER, maxAmount); + + assertEq(_getMaxToBridgeIn(s, 1), 0); + assertEq(_getCapacity(s, 1), maxAmount); + assertEq(_getLevel(s, 1), maxAmount); + + _assertInvariant(); + + uint256 newBucketCapacity = s.bucketCapacities[1] - 10; + // 1. Reduce bridge limit + _updateBridgeLimit(s, newBucketCapacity); + assertEq(_getMaxToBridgeOut(s, 0), 0); + assertEq(_getMaxToBridgeIn(s, 1), 0); + + // 2. Reduce bucket capacity + _updateBucketCapacity(s, 1, newBucketCapacity); + assertEq(_getMaxToBridgeOut(s, 0), 0); + assertEq(_getMaxToBridgeIn(s, 1), 0); + + // Maximum to bridge in is all minted on Arbitrum + assertEq(_getMaxToBridgeIn(s, 0), maxAmount); + assertEq(_getMaxToBridgeOut(s, 1), maxAmount); + + _bridgeGho(s, 1, 0, USER, maxAmount); + assertEq(_getMaxToBridgeOut(s, 0), newBucketCapacity); + assertEq(_getMaxToBridgeIn(s, 0), 0); + assertEq(_getMaxToBridgeOut(s, 1), 0); + assertEq(_getMaxToBridgeIn(s, 1), newBucketCapacity); + + _assertInvariant(); + } + + /// @dev Bucket capacity reduction, performed following wrong order procedure + function testReduceBucketCapacityIncorrectProcedure() public { + // Bridge a third of the capacity + uint256 amount = _getMaxToBridgeOut(s, 0) / 3; + uint256 availableToBridge = _getMaxToBridgeOut(s, 0) - amount; + + deal(s.tokens[0], USER, amount); + _bridgeGho(s, 0, 1, USER, amount); + + assertEq(_getMaxToBridgeIn(s, 1), s.bucketCapacities[1] - amount); + assertEq(_getLevel(s, 1), amount); + + _assertInvariant(); + + uint256 newBucketCapacity = s.bucketCapacities[1] - 10; + /// @dev INCORRECT ORDER PROCEDURE!! bridge limit reduction should happen first + // 1. Reduce bucket capacity + _updateBucketCapacity(s, 1, newBucketCapacity); + assertEq(_getMaxToBridgeOut(s, 0), availableToBridge); // this is the UX issue + assertEq(_getMaxToBridgeIn(s, 1), availableToBridge - 10); + + // User can come and try to max bridge on Arbitrum + // Transaction will succeed on Ethereum, but revert on Arbitrum + deal(s.tokens[0], USER, availableToBridge); + _moveGhoOrigin(s, 0, 1, USER, availableToBridge); + assertEq(_getMaxToBridgeOut(s, 0), 0); + + vm.expectRevert(); + vm.prank(RAMP); + IPoolV1(s.pools[1]).releaseOrMint( + Pool.ReleaseOrMintInV1({ + originalSender: bytes(""), + remoteChainSelector: uint64(0), + receiver: USER, + amount: availableToBridge, + localToken: s.tokens[0], + sourcePoolAddress: abi.encode(address(s.pools[0])), + sourcePoolData: bytes(""), + offchainTokenData: bytes("") + }) + ); + + // User can only bridge up to new bucket capacity (10 units less) + assertEq(_getMaxToBridgeIn(s, 1), availableToBridge - 10); + vm.prank(RAMP); + IPoolV1(s.pools[1]).releaseOrMint( + Pool.ReleaseOrMintInV1({ + originalSender: bytes(""), + remoteChainSelector: uint64(0), + receiver: USER, + amount: availableToBridge - 10, + localToken: s.tokens[0], + sourcePoolAddress: abi.encode(address(s.pools[0])), + sourcePoolData: bytes(""), + offchainTokenData: bytes("") + }) + ); + assertEq(_getMaxToBridgeIn(s, 1), 0); + + // 2. Reduce bridge limit + _updateBridgeLimit(s, newBucketCapacity); + assertEq(_getMaxToBridgeOut(s, 0), 0); + assertEq(_getMaxToBridgeIn(s, 1), 0); + } + + /// @dev Bucket capacity reduction, with a bridge out in between + function testReduceBucketCapacityWithBridgeOutInBetween() public { + // Bridge a third of the capacity + uint256 amount = _getMaxToBridgeOut(s, 0) / 3; + uint256 availableToBridge = _getMaxToBridgeOut(s, 0) - amount; + + deal(s.tokens[0], USER, amount); + _bridgeGho(s, 0, 1, USER, amount); + + assertEq(_getMaxToBridgeIn(s, 1), s.bucketCapacities[1] - amount); + assertEq(_getLevel(s, 1), amount); + + _assertInvariant(); + + uint256 newBucketCapacity = s.bucketCapacities[1] - 10; + // 1. Reduce bridge limit + _updateBridgeLimit(s, newBucketCapacity); + assertEq(_getMaxToBridgeOut(s, 0), availableToBridge - 10); + assertEq(_getMaxToBridgeIn(s, 1), availableToBridge); + + // User initiates bridge out action + uint256 amount2 = _getMaxToBridgeOut(s, 0); + deal(s.tokens[0], USER, amount2); + _moveGhoOrigin(s, 0, 1, USER, amount2); + assertEq(_getMaxToBridgeOut(s, 0), 0); + assertEq(_getMaxToBridgeIn(s, 0), newBucketCapacity); + + // 2. Reduce bucket capacity + _updateBucketCapacity(s, 1, newBucketCapacity); + // Destination execution can happen, no more bridge out actions can be initiated + assertEq(_getMaxToBridgeOut(s, 1), amount); + assertEq(_getMaxToBridgeIn(s, 1), amount2); + + // Finalize bridge out action + _moveGhoDestination(s, 0, 1, USER, amount2); + assertEq(_getMaxToBridgeOut(s, 0), 0); + assertEq(_getMaxToBridgeIn(s, 0), newBucketCapacity); + assertEq(_getMaxToBridgeOut(s, 1), newBucketCapacity); + assertEq(_getMaxToBridgeIn(s, 1), 0); + + _assertInvariant(); + } + + /// @dev Bucket capacity reduction, with a bridge in in between + function testReduceBucketCapacityWithBridgeInInBetween() public { + // Bridge max amount + uint256 maxAmount = _getMaxToBridgeOut(s, 0); + + deal(s.tokens[0], USER, maxAmount); + _bridgeGho(s, 0, 1, USER, maxAmount); + + assertEq(_getMaxToBridgeIn(s, 1), 0); + assertEq(_getCapacity(s, 1), maxAmount); + assertEq(_getLevel(s, 1), maxAmount); + + _assertInvariant(); + + uint256 newBucketCapacity = s.bucketCapacities[1] - 10; + // 1. Reduce bridge limit + _updateBridgeLimit(s, newBucketCapacity); + assertEq(_getMaxToBridgeOut(s, 0), 0); + assertEq(_getMaxToBridgeIn(s, 1), 0); + + // User initiates bridge in action + _moveGhoOrigin(s, 1, 0, USER, maxAmount); + assertEq(_getMaxToBridgeOut(s, 1), 0); + assertEq(_getMaxToBridgeIn(s, 1), maxAmount); + + // 2. Reduce bucket capacity + _updateBucketCapacity(s, 1, newBucketCapacity); + assertEq(_getMaxToBridgeOut(s, 0), 0); + assertEq(_getMaxToBridgeIn(s, 0), maxAmount); + + // Finalize bridge in action + _moveGhoDestination(s, 1, 0, USER, maxAmount); + assertEq(_getMaxToBridgeOut(s, 0), newBucketCapacity); + assertEq(_getMaxToBridgeIn(s, 0), 0); + assertEq(_getMaxToBridgeOut(s, 1), 0); + assertEq(_getMaxToBridgeIn(s, 1), newBucketCapacity); + + _assertInvariant(); + } + + /// @dev Bucket capacity increase. Caution: bridge limit increase must happen afterwards + function testIncreaseBucketCapacity() public { + // Max out capacity + uint256 maxAmount = _getMaxToBridgeOut(s, 0); + deal(s.tokens[0], USER, maxAmount); + _bridgeGho(s, 0, 1, USER, maxAmount); + + assertEq(_getMaxToBridgeIn(s, 1), 0); + assertEq(_getCapacity(s, 1), maxAmount); + assertEq(_getLevel(s, 1), maxAmount); + + _assertInvariant(); + + uint256 newBucketCapacity = s.bucketCapacities[1] + 10; + // 2. Increase bucket capacity + _updateBucketCapacity(s, 1, newBucketCapacity); + assertEq(_getMaxToBridgeOut(s, 0), 0); + assertEq(_getMaxToBridgeIn(s, 1), 10); + + // Reverts if a user tries to bridge out 10 + vm.expectRevert(); + vm.prank(RAMP); + IPoolV1(s.pools[0]).lockOrBurn( + Pool.LockOrBurnInV1({ + receiver: bytes(""), + remoteChainSelector: uint64(1), + originalSender: USER, + amount: 10, + localToken: s.tokens[0] + }) + ); + + // 2. Increase bridge limit + _updateBridgeLimit(s, newBucketCapacity); + assertEq(_getMaxToBridgeOut(s, 0), 10); + assertEq(_getMaxToBridgeIn(s, 1), 10); + + _assertInvariant(); + + // Now it is possible to bridge some again + _bridgeGho(s, 1, 0, USER, maxAmount); + assertEq(_getMaxToBridgeOut(s, 0), newBucketCapacity); + assertEq(_getMaxToBridgeIn(s, 0), 0); + assertEq(_getMaxToBridgeOut(s, 1), 0); + assertEq(_getMaxToBridgeIn(s, 1), newBucketCapacity); + + _assertInvariant(); + } + + /// @dev Bucket capacity increase, performed following wrong order procedure + function testIncreaseBucketCapacityIncorrectProcedure() public { + // Max out capacity + uint256 maxAmount = _getMaxToBridgeOut(s, 0); + deal(s.tokens[0], USER, maxAmount); + _bridgeGho(s, 0, 1, USER, maxAmount); + + assertEq(_getMaxToBridgeIn(s, 1), 0); + assertEq(_getCapacity(s, 1), maxAmount); + assertEq(_getLevel(s, 1), maxAmount); + + _assertInvariant(); + + uint256 newBucketCapacity = s.bucketCapacities[1] + 10; + + /// @dev INCORRECT ORDER PROCEDURE!! bucket capacity increase should happen first + // 1. Increase bridge limit + _updateBridgeLimit(s, newBucketCapacity); + assertEq(_getMaxToBridgeOut(s, 0), 10); + assertEq(_getMaxToBridgeIn(s, 1), 0); // this is the UX issue + + // User can come and try to max bridge on Arbitrum + // Transaction will succeed on Ethereum, but revert on Arbitrum + deal(s.tokens[0], USER, 10); + _moveGhoOrigin(s, 0, 1, USER, 10); + assertEq(_getMaxToBridgeOut(s, 0), 0); + assertEq(_getMaxToBridgeIn(s, 0), newBucketCapacity); + + // Execution on destination will revert until bucket capacity gets increased + vm.expectRevert(); + vm.prank(RAMP); + IPoolV1(s.pools[1]).releaseOrMint( + Pool.ReleaseOrMintInV1({ + originalSender: bytes(""), + remoteChainSelector: uint64(0), + receiver: USER, + amount: 10, + localToken: s.tokens[0], + sourcePoolAddress: abi.encode(address(s.pools[0])), + sourcePoolData: bytes(""), + offchainTokenData: bytes("") + }) + ); + + // 2. Increase bucket capacity + _updateBucketCapacity(s, 1, newBucketCapacity); + assertEq(_getMaxToBridgeOut(s, 1), maxAmount); + assertEq(_getMaxToBridgeIn(s, 1), 10); + + // Now it is possible to execute on destination + _moveGhoDestination(s, 0, 1, USER, 10); + + assertEq(_getMaxToBridgeOut(s, 0), 0); + assertEq(_getMaxToBridgeIn(s, 0), newBucketCapacity); + assertEq(_getMaxToBridgeOut(s, 1), newBucketCapacity); + assertEq(_getMaxToBridgeIn(s, 1), 0); + + _assertInvariant(); + } + + /// @dev Bucket capacity increase, with a bridge out in between + function testIncreaseBucketCapacityWithBridgeOutInBetween() public { + // Bridge a third of the capacity + uint256 amount = _getMaxToBridgeOut(s, 0) / 3; + uint256 availableToBridge = _getMaxToBridgeOut(s, 0) - amount; + deal(s.tokens[0], USER, amount); + _bridgeGho(s, 0, 1, USER, amount); + + assertEq(_getMaxToBridgeIn(s, 1), s.bucketCapacities[1] - amount); + assertEq(_getLevel(s, 1), amount); + + _assertInvariant(); + + uint256 newBucketCapacity = s.bucketCapacities[1] + 10; + // 1. Increase bucket capacity + _updateBucketCapacity(s, 1, newBucketCapacity); + assertEq(_getMaxToBridgeOut(s, 0), availableToBridge); + assertEq(_getMaxToBridgeIn(s, 1), availableToBridge + 10); + + // Reverts if a user tries to bridge out all up to new bucket capacity + vm.expectRevert(); + vm.prank(RAMP); + IPoolV1(s.pools[0]).lockOrBurn( + Pool.LockOrBurnInV1({ + receiver: bytes(""), + remoteChainSelector: uint64(1), + originalSender: USER, + amount: availableToBridge + 10, + localToken: s.tokens[0] + }) + ); + + // User initiates bridge out action + deal(s.tokens[0], USER, availableToBridge); + _bridgeGho(s, 0, 1, USER, availableToBridge); + assertEq(_getMaxToBridgeOut(s, 0), 0); + assertEq(_getMaxToBridgeIn(s, 1), 10); + + // 2. Increase bridge limit + _updateBridgeLimit(s, newBucketCapacity); + assertEq(_getMaxToBridgeOut(s, 0), 10); + assertEq(_getMaxToBridgeIn(s, 1), 10); + + _assertInvariant(); + + // Now it is possible to bridge some again + deal(s.tokens[0], USER, 10); + _bridgeGho(s, 0, 1, USER, 10); + assertEq(_getMaxToBridgeOut(s, 0), 0); + assertEq(_getMaxToBridgeIn(s, 0), newBucketCapacity); + assertEq(_getMaxToBridgeOut(s, 1), newBucketCapacity); + assertEq(_getMaxToBridgeIn(s, 1), 0); + + _assertInvariant(); + } + + /// @dev Bucket capacity increase, with a bridge in in between + function testIncreaseBucketCapacityWithBridgeInInBetween() public { + // Max out capacity + uint256 maxAmount = _getMaxToBridgeOut(s, 0); + deal(s.tokens[0], USER, maxAmount); + _bridgeGho(s, 0, 1, USER, maxAmount); + + assertEq(_getMaxToBridgeIn(s, 1), 0); + assertEq(_getCapacity(s, 1), maxAmount); + assertEq(_getLevel(s, 1), maxAmount); + + _assertInvariant(); + + uint256 newBucketCapacity = s.bucketCapacities[1] + 10; + // 1. Increase bucket capacity + _updateBucketCapacity(s, 1, newBucketCapacity); + assertEq(_getMaxToBridgeOut(s, 0), 0); + assertEq(_getMaxToBridgeIn(s, 0), maxAmount); + assertEq(_getMaxToBridgeOut(s, 1), maxAmount); + assertEq(_getMaxToBridgeIn(s, 1), 10); + + // User initiates bridge in action + _moveGhoOrigin(s, 1, 0, USER, maxAmount); + assertEq(_getMaxToBridgeOut(s, 1), 0); + assertEq(_getMaxToBridgeIn(s, 1), newBucketCapacity); + + // 2. Increase bridge limit + _updateBridgeLimit(s, newBucketCapacity); + assertEq(_getMaxToBridgeOut(s, 0), 10); + assertEq(_getMaxToBridgeIn(s, 0), maxAmount); + + // User finalizes bridge in action + _moveGhoDestination(s, 1, 0, USER, maxAmount); + assertEq(_getMaxToBridgeOut(s, 0), newBucketCapacity); + assertEq(_getMaxToBridgeIn(s, 0), 0); + + _assertInvariant(); + + // Now it is possible to bridge new bucket capacity + deal(s.tokens[0], USER, newBucketCapacity); + _bridgeGho(s, 0, 1, USER, newBucketCapacity); + assertEq(_getMaxToBridgeOut(s, 0), 0); + assertEq(_getMaxToBridgeIn(s, 0), newBucketCapacity); + assertEq(_getMaxToBridgeOut(s, 1), newBucketCapacity); + assertEq(_getMaxToBridgeIn(s, 1), 0); + + _assertInvariant(); + } +} + +contract GhoTokenPoolEthereumBridgeLimitTripleScenario is GhoTokenPoolEthereumBridgeLimitSetup { + function setUp() public virtual override { + super.setUp(); + + // Arbitrum + _addBridge(s, 1, INITIAL_BRIDGE_LIMIT); + _enableLane(s, 0, 1); + + // Avalanche + _addBridge(s, 2, INITIAL_BRIDGE_LIMIT); + _enableLane(s, 1, 2); + _enableLane(s, 0, 2); + } + + /// @dev Bridge out some tokens to third chain via second chain (Ethereum to Arbitrum, Arbitrum to Avalanche) + function testFuzz_BridgeToTwoToThree(uint256 amount) public { + uint256 maxAmount = _getMaxToBridgeOut(s, 0); + amount = bound(amount, 1, maxAmount); + + _assertInvariant(); + + assertEq(_getMaxToBridgeOut(s, 0), maxAmount); + assertEq(_getMaxToBridgeIn(s, 0), 0); + assertEq(_getMaxToBridgeOut(s, 1), 0); + assertEq(_getMaxToBridgeIn(s, 1), s.bucketCapacities[1]); + assertEq(_getMaxToBridgeOut(s, 2), 0); + assertEq(_getMaxToBridgeIn(s, 2), s.bucketCapacities[2]); + + deal(s.tokens[0], USER, amount); + _moveGhoOrigin(s, 0, 1, USER, amount); + + assertEq(_getMaxToBridgeOut(s, 0), maxAmount - amount); + assertEq(_getMaxToBridgeIn(s, 0), amount); + assertEq(_getMaxToBridgeOut(s, 1), 0); + assertEq(_getMaxToBridgeIn(s, 1), s.bucketCapacities[1]); + assertEq(_getMaxToBridgeOut(s, 2), 0); + assertEq(_getMaxToBridgeIn(s, 2), s.bucketCapacities[2]); + + _moveGhoDestination(s, 0, 1, USER, amount); + + assertEq(_getMaxToBridgeOut(s, 0), maxAmount - amount); + assertEq(_getMaxToBridgeIn(s, 0), amount); + assertEq(_getMaxToBridgeOut(s, 1), amount); + assertEq(_getMaxToBridgeIn(s, 1), s.bucketCapacities[1] - s.bucketLevels[1]); + assertEq(_getMaxToBridgeOut(s, 2), 0); + assertEq(_getMaxToBridgeIn(s, 2), s.bucketCapacities[2]); + + _assertInvariant(); + + _moveGhoOrigin(s, 1, 2, USER, amount); + + assertEq(_getMaxToBridgeOut(s, 0), maxAmount - amount); + assertEq(_getMaxToBridgeIn(s, 0), amount); + assertEq(_getMaxToBridgeOut(s, 1), 0); + assertEq(_getMaxToBridgeIn(s, 1), s.bucketCapacities[1]); + assertEq(_getMaxToBridgeOut(s, 2), 0); + assertEq(_getMaxToBridgeIn(s, 2), s.bucketCapacities[2]); + + _moveGhoDestination(s, 1, 2, USER, amount); + + assertEq(_getMaxToBridgeOut(s, 0), maxAmount - amount); + assertEq(_getMaxToBridgeIn(s, 0), amount); + assertEq(_getMaxToBridgeOut(s, 1), 0); + assertEq(_getMaxToBridgeIn(s, 1), s.bucketCapacities[1]); + assertEq(_getMaxToBridgeOut(s, 2), amount); + assertEq(_getMaxToBridgeIn(s, 2), s.bucketCapacities[2] - amount); + + _assertInvariant(); + } + + /// @dev Bridge out some tokens to second and third chain randomly + function testFuzz_BridgeRandomlyToTwoAndThree(uint64[] memory amounts) public { + vm.assume(amounts.length < 30); + + uint256 maxAmount = _getMaxToBridgeOut(s, 0); + uint256 sourceAcc; + uint256 amount; + uint256 dest; + bool lastTime; + for (uint256 i = 0; i < amounts.length && !lastTime; i++) { + amount = amounts[i]; + + if (amount == 0) amount += 1; + if (sourceAcc + amount > maxAmount) { + amount = maxAmount - sourceAcc; + lastTime = true; + } + + dest = (amount % 2) + 1; + deal(s.tokens[0], USER, amount); + _bridgeGho(s, 0, dest, USER, amount); + + sourceAcc += amount; + } + assertEq(sourceAcc, s.bridged); + + // Bridge all to Avalanche + uint256 toBridge = _getMaxToBridgeOut(s, 1); + if (toBridge > 0) { + _bridgeGho(s, 1, 2, USER, toBridge); + assertEq(sourceAcc, s.bridged); + assertEq(_getLevel(s, 2), s.bridged); + assertEq(_getLevel(s, 1), 0); + } + } + + /// @dev All remote liquidity is on one chain or the other + function testLiquidityUnbalanced() public { + // Bridge all out to Arbitrum + uint256 amount = _getMaxToBridgeOut(s, 0); + deal(s.tokens[0], USER, amount); + _bridgeGho(s, 0, 1, USER, amount); + + // No more liquidity can go remotely + assertEq(_getMaxToBridgeOut(s, 0), 0); + vm.expectRevert(); + vm.prank(RAMP); + IPoolV1(s.pools[0]).lockOrBurn( + Pool.LockOrBurnInV1({ + receiver: bytes(""), + remoteChainSelector: uint64(1), + originalSender: USER, + amount: 1, + localToken: s.tokens[0] + }) + ); + vm.prank(RAMP); + vm.expectRevert(); + IPoolV1(s.pools[0]).lockOrBurn( + Pool.LockOrBurnInV1({ + receiver: bytes(""), + remoteChainSelector: uint64(2), + originalSender: USER, + amount: 1, + localToken: s.tokens[0] + }) + ); + + // All liquidity on Arbitrum, 0 on Avalanche + assertEq(_getLevel(s, 1), s.bridged); + assertEq(_getLevel(s, 1), _getCapacity(s, 1)); + assertEq(_getLevel(s, 2), 0); + + // Move all liquidity to Avalanche + _bridgeGho(s, 1, 2, USER, amount); + assertEq(_getLevel(s, 1), 0); + assertEq(_getLevel(s, 2), s.bridged); + assertEq(_getLevel(s, 2), _getCapacity(s, 2)); + + // Move all liquidity back to Ethereum + _bridgeGho(s, 2, 0, USER, amount); + assertEq(_getLevel(s, 1), 0); + assertEq(_getLevel(s, 2), 0); + assertEq(s.bridged, 0); + assertEq(_getMaxToBridgeOut(s, 0), amount); + } + + /// @dev Test showcasing incorrect bridge limit and bucket capacity configuration + function testIncorrectBridgeLimitBucketConfig() public { + // BridgeLimit 10, Arbitrum 9, Avalanche Bucket 10 + _updateBridgeLimit(s, 10); + _updateBucketCapacity(s, 1, 9); + _updateBucketCapacity(s, 2, 10); + + assertEq(_getMaxToBridgeOut(s, 0), 10); + assertEq(_getMaxToBridgeIn(s, 1), 9); // here the issue + assertEq(_getMaxToBridgeIn(s, 2), 10); + + // Possible to bridge 10 out to 2 + deal(s.tokens[0], USER, 10); + _bridgeGho(s, 0, 2, USER, 10); + + // Liquidity comes back + _bridgeGho(s, 2, 0, USER, 10); + + // Not possible to bridge 10 out to 1 + _moveGhoOrigin(s, 0, 1, USER, 10); + // Reverts on destination + vm.expectRevert(); + vm.prank(RAMP); + IPoolV1(s.pools[1]).releaseOrMint( + Pool.ReleaseOrMintInV1({ + originalSender: bytes(""), + amount: 10, + remoteChainSelector: uint64(0), + receiver: USER, + localToken: s.tokens[0], + sourcePoolAddress: abi.encode(address(s.pools[0])), + sourcePoolData: bytes(""), + offchainTokenData: bytes("") + }) + ); + + // Only if bucket capacity gets increased, execution can succeed + _updateBucketCapacity(s, 1, 10); + _moveGhoDestination(s, 0, 1, USER, 10); + } + + /// @dev Test showcasing a user locked due to a bridge limit reduction below current bridged amount + function testUserLockedBridgeLimitReductionBelowLevel() public { + // Bridge all out to Arbitrum + uint256 amount = _getMaxToBridgeOut(s, 0); + deal(s.tokens[0], USER, amount); + _bridgeGho(s, 0, 1, USER, amount); + + // Reduce bridge limit below current bridged amount + uint256 newBridgeLimit = amount / 2; + _updateBridgeLimit(s, newBridgeLimit); + _updateBucketCapacity(s, 1, newBridgeLimit); + + // Moving to Avalanche is not a problem because bucket capacity is higher than bridge limit + assertGt(_getMaxToBridgeIn(s, 2), newBridgeLimit); + _bridgeGho(s, 1, 2, USER, amount); + + // Moving back to Arbitrum reverts on destination + assertEq(_getMaxToBridgeIn(s, 1), newBridgeLimit); + _moveGhoOrigin(s, 2, 1, USER, amount); + vm.expectRevert(); + vm.prank(RAMP); + IPoolV1(s.pools[1]).releaseOrMint( + Pool.ReleaseOrMintInV1({ + originalSender: bytes(""), + amount: amount, + remoteChainSelector: uint64(2), + receiver: USER, + localToken: s.tokens[0], + sourcePoolAddress: abi.encode(address(s.pools[0])), + sourcePoolData: bytes(""), + offchainTokenData: bytes("") + }) + ); + } +} diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/GhoTokenPoolEthereumE2E.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/GhoTokenPoolEthereumE2E.t.sol new file mode 100644 index 0000000000..2e828c9bc2 --- /dev/null +++ b/contracts/src/v0.8/ccip/test/pools/GHO/GhoTokenPoolEthereumE2E.t.sol @@ -0,0 +1,422 @@ +// SPDX-License-Identifier: BUSL-1.1 +pragma solidity ^0.8.0; + +import {GhoToken} from "@aave-gho-core/gho/GhoToken.sol"; + +import {IERC20} from "../../../../vendor/openzeppelin-solidity/v4.8.3/contracts/token/ERC20/IERC20.sol"; +import {CommitStore} from "../../../CommitStore.sol"; +import {EVM2EVMOnRamp} from "../../../onRamp/EVM2EVMOnRamp.sol"; +import {EVM2EVMOffRamp} from "../../../offRamp/EVM2EVMOffRamp.sol"; +import {IBurnMintERC20} from "../../../../shared/token/ERC20/IBurnMintERC20.sol"; +import {UpgradeableLockReleaseTokenPool} from "../../../pools/GHO/UpgradeableLockReleaseTokenPool.sol"; +import {UpgradeableBurnMintTokenPool} from "../../../pools/GHO/UpgradeableBurnMintTokenPool.sol"; +import {UpgradeableTokenPool} from "../../../pools/GHO/UpgradeableTokenPool.sol"; +import {IPriceRegistry} from "../../../interfaces/IPriceRegistry.sol"; +import {IRMN} from "../../../interfaces/IRMN.sol"; +import {RateLimiter} from "../../../libraries/RateLimiter.sol"; +import {Client} from "../../../libraries/Client.sol"; +import {Internal} from "../../../libraries/Internal.sol"; +import {MerkleHelper} from "../../helpers/MerkleHelper.sol"; +import {BaseTest} from "../../BaseTest.t.sol"; +import {E2E} from "../End2End.t.sol"; +import {GhoBaseTest} from "./GhoBaseTest.t.sol"; + +contract GhoTokenPoolEthereumE2E is E2E, GhoBaseTest { + using Internal for Internal.EVM2EVMMessage; + + IBurnMintERC20 internal srcGhoToken; + IBurnMintERC20 internal dstGhoToken; + UpgradeableLockReleaseTokenPool internal srcGhoTokenPool; + UpgradeableBurnMintTokenPool internal dstGhoTokenPool; + + function setUp() public virtual override(E2E, BaseTest) { + E2E.setUp(); + + // Deploy GHO Token on source chain + srcGhoToken = IBurnMintERC20(address(new GhoToken(AAVE_DAO))); + deal(address(srcGhoToken), OWNER, type(uint128).max); + // Add GHO token to source token list + s_sourceTokens.push(address(srcGhoToken)); + + // Deploy GHO Token on destination chain + dstGhoToken = IBurnMintERC20(address(new GhoToken(AAVE_DAO))); + deal(address(dstGhoToken), OWNER, type(uint128).max); + // Add GHO token to destination token list + s_destTokens.push(address(dstGhoToken)); + + // Deploy LockReleaseTokenPool for GHO token on source chain + srcGhoTokenPool = UpgradeableLockReleaseTokenPool( + _deployUpgradeableLockReleaseTokenPool( + address(srcGhoToken), + address(s_mockRMN), + address(s_sourceRouter), + AAVE_DAO, + INITIAL_BRIDGE_LIMIT, + PROXY_ADMIN + ) + ); + + // Add GHO UpgradeableTokenPool to source token pool list + s_sourcePoolByToken[address(srcGhoToken)] = address(srcGhoTokenPool); + s_destTokenBySourceToken[address(srcGhoToken)] = address(dstGhoToken); + + // Deploy BurnMintTokenPool for GHO token on destination chain + dstGhoTokenPool = UpgradeableBurnMintTokenPool( + _deployUpgradeableBurnMintTokenPool( + address(dstGhoToken), + address(s_mockRMN), + address(s_destRouter), + AAVE_DAO, + PROXY_ADMIN + ) + ); + + // Add GHO UpgradeableTokenPool to destination token pool list + s_sourcePoolByToken[address(dstGhoToken)] = address(dstGhoTokenPool); + s_destTokenBySourceToken[address(dstGhoToken)] = address(srcGhoToken); + + // Give mint and burn privileges to destination UpgradeableTokenPool (GHO-specific related) + vm.stopPrank(); + vm.startPrank(AAVE_DAO); + GhoToken(address(dstGhoToken)).grantRole(GhoToken(address(dstGhoToken)).FACILITATOR_MANAGER_ROLE(), AAVE_DAO); + GhoToken(address(dstGhoToken)).addFacilitator(address(dstGhoTokenPool), "UpgradeableTokenPool", type(uint128).max); + vm.stopPrank(); + vm.startPrank(OWNER); + + // Add config for source and destination chains + UpgradeableTokenPool.ChainUpdate[] memory srcChainUpdates = new UpgradeableTokenPool.ChainUpdate[](1); + srcChainUpdates[0] = UpgradeableTokenPool.ChainUpdate({ + remoteChainSelector: DEST_CHAIN_SELECTOR, + remotePoolAddress: abi.encode(address(dstGhoTokenPool)), + remoteTokenAddress: abi.encode(address(dstGhoToken)), + allowed: true, + outboundRateLimiterConfig: _getOutboundRateLimiterConfig(), + inboundRateLimiterConfig: _getInboundRateLimiterConfig() + }); + UpgradeableTokenPool.ChainUpdate[] memory dstChainUpdates = new UpgradeableTokenPool.ChainUpdate[](1); + dstChainUpdates[0] = UpgradeableTokenPool.ChainUpdate({ + remoteChainSelector: SOURCE_CHAIN_SELECTOR, + remotePoolAddress: abi.encode(address(srcGhoTokenPool)), + remoteTokenAddress: abi.encode(address(srcGhoToken)), + allowed: true, + outboundRateLimiterConfig: _getOutboundRateLimiterConfig(), + inboundRateLimiterConfig: _getInboundRateLimiterConfig() + }); + vm.stopPrank(); + vm.startPrank(AAVE_DAO); + srcGhoTokenPool.applyChainUpdates(srcChainUpdates); + dstGhoTokenPool.applyChainUpdates(dstChainUpdates); + vm.stopPrank(); + vm.startPrank(OWNER); + + // Update GHO Token price on source PriceRegistry + EVM2EVMOnRamp.DynamicConfig memory onRampDynamicConfig = s_onRamp.getDynamicConfig(); + IPriceRegistry onRampPriceRegistry = IPriceRegistry(onRampDynamicConfig.priceRegistry); + onRampPriceRegistry.updatePrices(_getSingleTokenPriceUpdateStruct(address(srcGhoToken), 1e18)); + + // Update GHO Token price on destination PriceRegistry + EVM2EVMOffRamp.DynamicConfig memory offRampDynamicConfig = s_offRamp.getDynamicConfig(); + IPriceRegistry offRampPriceRegistry = IPriceRegistry(offRampDynamicConfig.priceRegistry); + offRampPriceRegistry.updatePrices(_getSingleTokenPriceUpdateStruct(address(dstGhoToken), 1e18)); + + s_tokenAdminRegistry.proposeAdministrator(address(srcGhoToken), AAVE_DAO); + s_tokenAdminRegistry.proposeAdministrator(address(dstGhoToken), AAVE_DAO); + vm.stopPrank(); + vm.startPrank(AAVE_DAO); + s_tokenAdminRegistry.acceptAdminRole(address(srcGhoToken)); + s_tokenAdminRegistry.setPool(address(srcGhoToken), address(srcGhoTokenPool)); + s_tokenAdminRegistry.acceptAdminRole(address(dstGhoToken)); + s_tokenAdminRegistry.setPool(address(dstGhoToken), address(dstGhoTokenPool)); + vm.stopPrank(); + vm.startPrank(OWNER); + } + + function testE2E_MessagesSuccess_gas() public { + vm.pauseGasMetering(); + uint256 preGhoTokenBalanceOwner = srcGhoToken.balanceOf(OWNER); + uint256 preGhoTokenBalancePool = srcGhoToken.balanceOf(address(srcGhoTokenPool)); + uint256 preBridgedAmount = srcGhoTokenPool.getCurrentBridgedAmount(); + uint256 preBridgeLimit = srcGhoTokenPool.getBridgeLimit(); + + Internal.EVM2EVMMessage[] memory messages = new Internal.EVM2EVMMessage[](1); + messages[0] = sendRequestGho(1, 1000 * 1e18, false, false); + + uint256 expectedFee = s_sourceRouter.getFee(DEST_CHAIN_SELECTOR, _generateTokenMessage()); + // Asserts that the tokens have been sent and the fee has been paid. + assertEq(preGhoTokenBalanceOwner - 1000 * 1e18, srcGhoToken.balanceOf(OWNER)); + assertEq(preGhoTokenBalancePool + 1000 * 1e18, srcGhoToken.balanceOf(address(srcGhoTokenPool))); + assertGt(expectedFee, 0); + + assertEq(preBridgedAmount + 1000 * 1e18, srcGhoTokenPool.getCurrentBridgedAmount()); + assertEq(preBridgeLimit, srcGhoTokenPool.getBridgeLimit()); + + bytes32 metaDataHash = s_offRamp.metadataHash(); + + bytes32[] memory hashedMessages = new bytes32[](1); + hashedMessages[0] = messages[0]._hash(metaDataHash); + messages[0].messageId = hashedMessages[0]; + + bytes32[] memory merkleRoots = new bytes32[](1); + merkleRoots[0] = MerkleHelper.getMerkleRoot(hashedMessages); + + address[] memory onRamps = new address[](1); + onRamps[0] = ON_RAMP_ADDRESS; + + bytes memory commitReport = abi.encode( + CommitStore.CommitReport({ + priceUpdates: _getEmptyPriceUpdates(), + interval: CommitStore.Interval(messages[0].sequenceNumber, messages[0].sequenceNumber), + merkleRoot: merkleRoots[0] + }) + ); + + vm.resumeGasMetering(); + s_commitStore.report(commitReport, ++s_latestEpochAndRound); + vm.pauseGasMetering(); + + vm.mockCall( + s_commitStore.getStaticConfig().rmnProxy, + abi.encodeWithSelector(IRMN.isBlessed.selector, IRMN.TaggedRoot(address(s_commitStore), merkleRoots[0])), + abi.encode(true) + ); + + bytes32[] memory proofs = new bytes32[](0); + uint256 timestamp = s_commitStore.verify(merkleRoots, proofs, 2 ** 2 - 1); + assertEq(BLOCK_TIME, timestamp); + + // We change the block time so when execute would e.g. use the current + // block time instead of the committed block time the value would be + // incorrect in the checks below. + vm.warp(BLOCK_TIME + 2000); + + vm.expectEmit(); + emit EVM2EVMOffRamp.ExecutionStateChanged( + messages[0].sequenceNumber, + messages[0].messageId, + Internal.MessageExecutionState.SUCCESS, + "" + ); + + Internal.ExecutionReport memory execReport = _generateReportFromMessages(messages); + + uint256 preGhoTokenBalanceUser = dstGhoToken.balanceOf(USER); + (uint256 preCapacity, uint256 preLevel) = GhoToken(address(dstGhoToken)).getFacilitatorBucket( + address(dstGhoTokenPool) + ); + + s_offRamp.execute(execReport, new EVM2EVMOffRamp.GasLimitOverride[](0)); + + assertEq(preGhoTokenBalanceUser + 1000 * 1e18, dstGhoToken.balanceOf(USER), "Wrong balance on destination"); + // Facilitator checks + (uint256 postCapacity, uint256 postLevel) = GhoToken(address(dstGhoToken)).getFacilitatorBucket( + address(dstGhoTokenPool) + ); + assertEq(postCapacity, preCapacity); + assertEq(preLevel + 1000 * 1e18, postLevel, "wrong facilitator bucket level"); + } + + function testE2E_3MessagesSuccess_gas() public { + vm.pauseGasMetering(); + uint256 preGhoTokenBalanceOwner = srcGhoToken.balanceOf(OWNER); + uint256 preGhoTokenBalancePool = srcGhoToken.balanceOf(address(srcGhoTokenPool)); + uint256 preBridgedAmount = srcGhoTokenPool.getCurrentBridgedAmount(); + uint256 preBridgeLimit = srcGhoTokenPool.getBridgeLimit(); + + Internal.EVM2EVMMessage[] memory messages = new Internal.EVM2EVMMessage[](3); + messages[0] = sendRequestGho(1, 1000 * 1e18, false, false); + messages[1] = sendRequestGho(2, 2000 * 1e18, false, false); + messages[2] = sendRequestGho(3, 3000 * 1e18, false, false); + + uint256 expectedFee = s_sourceRouter.getFee(DEST_CHAIN_SELECTOR, _generateTokenMessage()); + // Asserts that the tokens have been sent and the fee has been paid. + assertEq(preGhoTokenBalanceOwner - 6000 * 1e18, srcGhoToken.balanceOf(OWNER)); + assertEq(preGhoTokenBalancePool + 6000 * 1e18, srcGhoToken.balanceOf(address(srcGhoTokenPool))); + assertGt(expectedFee, 0); + + assertEq(preBridgedAmount + 6000 * 1e18, srcGhoTokenPool.getCurrentBridgedAmount()); + assertEq(preBridgeLimit, srcGhoTokenPool.getBridgeLimit()); + + bytes32 metaDataHash = s_offRamp.metadataHash(); + + bytes32[] memory hashedMessages = new bytes32[](3); + hashedMessages[0] = messages[0]._hash(metaDataHash); + messages[0].messageId = hashedMessages[0]; + hashedMessages[1] = messages[1]._hash(metaDataHash); + messages[1].messageId = hashedMessages[1]; + hashedMessages[2] = messages[2]._hash(metaDataHash); + messages[2].messageId = hashedMessages[2]; + + bytes32[] memory merkleRoots = new bytes32[](1); + merkleRoots[0] = MerkleHelper.getMerkleRoot(hashedMessages); + + address[] memory onRamps = new address[](1); + onRamps[0] = ON_RAMP_ADDRESS; + + bytes memory commitReport = abi.encode( + CommitStore.CommitReport({ + priceUpdates: _getEmptyPriceUpdates(), + interval: CommitStore.Interval(messages[0].sequenceNumber, messages[2].sequenceNumber), + merkleRoot: merkleRoots[0] + }) + ); + + vm.resumeGasMetering(); + s_commitStore.report(commitReport, ++s_latestEpochAndRound); + vm.pauseGasMetering(); + + vm.mockCall( + s_commitStore.getStaticConfig().rmnProxy, + abi.encodeWithSelector(IRMN.isBlessed.selector, IRMN.TaggedRoot(address(s_commitStore), merkleRoots[0])), + abi.encode(true) + ); + + bytes32[] memory proofs = new bytes32[](0); + uint256 timestamp = s_commitStore.verify(merkleRoots, proofs, 2 ** 2 - 1); + assertEq(BLOCK_TIME, timestamp); + + // We change the block time so when execute would e.g. use the current + // block time instead of the committed block time the value would be + // incorrect in the checks below. + vm.warp(BLOCK_TIME + 2000); + + vm.expectEmit(); + emit EVM2EVMOffRamp.ExecutionStateChanged( + messages[0].sequenceNumber, + messages[0].messageId, + Internal.MessageExecutionState.SUCCESS, + "" + ); + + vm.expectEmit(); + emit EVM2EVMOffRamp.ExecutionStateChanged( + messages[1].sequenceNumber, + messages[1].messageId, + Internal.MessageExecutionState.SUCCESS, + "" + ); + + vm.expectEmit(); + emit EVM2EVMOffRamp.ExecutionStateChanged( + messages[2].sequenceNumber, + messages[2].messageId, + Internal.MessageExecutionState.SUCCESS, + "" + ); + + Internal.ExecutionReport memory execReport = _generateReportFromMessages(messages); + + uint256 preGhoTokenBalanceUser = dstGhoToken.balanceOf(USER); + (uint256 preCapacity, uint256 preLevel) = GhoToken(address(dstGhoToken)).getFacilitatorBucket( + address(dstGhoTokenPool) + ); + + s_offRamp.execute(execReport, new EVM2EVMOffRamp.GasLimitOverride[](0)); + + assertEq(preGhoTokenBalanceUser + 6000 * 1e18, dstGhoToken.balanceOf(USER), "Wrong balance on destination"); + // Facilitator checks + (uint256 postCapacity, uint256 postLevel) = GhoToken(address(dstGhoToken)).getFacilitatorBucket( + address(dstGhoTokenPool) + ); + assertEq(postCapacity, preCapacity); + assertEq(preLevel + 6000 * 1e18, postLevel, "wrong facilitator bucket level"); + } + + function testRevertRateLimitReached() public { + // increase bridge limit to hit the rate limit error + vm.stopPrank(); + vm.startPrank(AAVE_DAO); + srcGhoTokenPool.setBridgeLimit(type(uint256).max); + vm.startPrank(OWNER); + + RateLimiter.Config memory rateLimiterConfig = _getOutboundRateLimiterConfig(); + + // will revert due to rate limit of tokenPool + sendRequestGho(1, rateLimiterConfig.capacity + 1, true, false); + + // max capacity, won't revert + sendRequestGho(1, rateLimiterConfig.capacity, false, false); + + // revert due to capacity exceed + sendRequestGho(2, 100, true, false); + + // increase blocktime to refill capacity + vm.warp(BLOCK_TIME + 1); + + // won't revert due to refill + sendRequestGho(2, 100, false, false); + } + + function testRevertOnLessTokenToCoverFee() public { + sendRequestGho(1, 1000, false, true); + } + + function testRevertBridgeLimitReached() public { + // increase ccip rate limit to hit the bridge limit error + vm.stopPrank(); + vm.startPrank(AAVE_DAO); + srcGhoTokenPool.setChainRateLimiterConfig( + DEST_CHAIN_SELECTOR, + RateLimiter.Config({isEnabled: true, capacity: uint128(INITIAL_BRIDGE_LIMIT * 2), rate: 1e15}), + _getInboundRateLimiterConfig() + ); + vm.warp(block.timestamp + 100); // wait to refill capacity + vm.startPrank(OWNER); + + // will revert due to bridge limit + sendRequestGho(1, uint128(INITIAL_BRIDGE_LIMIT + 1), true, false); + + // max bridge limit, won't revert + sendRequestGho(1, uint128(INITIAL_BRIDGE_LIMIT), false, false); + assertEq(srcGhoTokenPool.getCurrentBridgedAmount(), INITIAL_BRIDGE_LIMIT); + + // revert due to bridge limit exceed + sendRequestGho(2, 1, true, false); + + // increase bridge limit + vm.startPrank(AAVE_DAO); + srcGhoTokenPool.setBridgeLimit(INITIAL_BRIDGE_LIMIT + 1); + vm.startPrank(OWNER); + + // won't revert due to refill + sendRequestGho(2, 1, false, false); + assertEq(srcGhoTokenPool.getCurrentBridgedAmount(), INITIAL_BRIDGE_LIMIT + 1); + } + + function sendRequestGho( + uint64 expectedSeqNum, + uint256 amount, + bool expectRevert, + bool sendLessFee + ) public returns (Internal.EVM2EVMMessage memory) { + Client.EVM2AnyMessage memory message = _generateSingleTokenMessage(address(srcGhoToken), amount); + uint256 expectedFee = s_sourceRouter.getFee(DEST_CHAIN_SELECTOR, message); + + // err mgmt + uint256 feeToSend = sendLessFee ? expectedFee - 1 : expectedFee; + expectRevert = sendLessFee ? true : expectRevert; + + IERC20(s_sourceTokens[0]).approve(address(s_sourceRouter), feeToSend); // fee + IERC20(srcGhoToken).approve(address(s_sourceRouter), amount); // amount + + message.receiver = abi.encode(USER); + Internal.EVM2EVMMessage memory geEvent = _messageToEvent( + message, + expectedSeqNum, + expectedSeqNum, + expectedFee, + OWNER + ); + + if (!expectRevert) { + vm.expectEmit(); + emit EVM2EVMOnRamp.CCIPSendRequested(geEvent); + } else { + vm.expectRevert(); + } + vm.resumeGasMetering(); + s_sourceRouter.ccipSend(DEST_CHAIN_SELECTOR, message); + vm.pauseGasMetering(); + + return geEvent; + } +} diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/GhoTokenPoolEthereumSetup.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/GhoTokenPoolEthereumSetup.t.sol new file mode 100644 index 0000000000..c6bfc90858 --- /dev/null +++ b/contracts/src/v0.8/ccip/test/pools/GHO/GhoTokenPoolEthereumSetup.t.sol @@ -0,0 +1,66 @@ +// SPDX-License-Identifier: BUSL-1.1 +pragma solidity ^0.8.0; + +import {GhoToken} from "@aave-gho-core/gho/GhoToken.sol"; + +import {BaseTest} from "../../BaseTest.t.sol"; +import {UpgradeableLockReleaseTokenPool} from "../../../pools/GHO/UpgradeableLockReleaseTokenPool.sol"; +import {UpgradeableTokenPool} from "../../../pools/GHO/UpgradeableTokenPool.sol"; +import {Router} from "../../../Router.sol"; +import {IERC20} from "../../../../vendor/openzeppelin-solidity/v4.8.3/contracts/token/ERC20/IERC20.sol"; +import {RouterSetup} from "../../router/RouterSetup.t.sol"; +import {BaseTest} from "../../BaseTest.t.sol"; +import {GhoBaseTest} from "./GhoBaseTest.t.sol"; + +contract GhoTokenPoolEthereumSetup is RouterSetup, GhoBaseTest { + IERC20 internal s_token; + UpgradeableLockReleaseTokenPool internal s_ghoTokenPool; + + address internal s_allowedOnRamp = address(123); + address internal s_allowedOffRamp = address(234); + + address internal s_sourcePool = makeAddr("source_pool"); + address internal s_sourceToken = makeAddr("source_token"); + + function setUp() public virtual override(RouterSetup, BaseTest) { + RouterSetup.setUp(); + + // GHO deployment + GhoToken ghoToken = new GhoToken(AAVE_DAO); + s_token = IERC20(address(ghoToken)); + deal(address(s_token), OWNER, type(uint128).max); + + // Set up UpgradeableTokenPool with permission to mint/burn + s_ghoTokenPool = UpgradeableLockReleaseTokenPool( + _deployUpgradeableLockReleaseTokenPool( + address(s_token), + address(s_mockRMN), + address(s_sourceRouter), + AAVE_DAO, + INITIAL_BRIDGE_LIMIT, + PROXY_ADMIN + ) + ); + + UpgradeableTokenPool.ChainUpdate[] memory chainUpdate = new UpgradeableTokenPool.ChainUpdate[](1); + chainUpdate[0] = UpgradeableTokenPool.ChainUpdate({ + remoteChainSelector: DEST_CHAIN_SELECTOR, + allowed: true, + remotePoolAddress: abi.encode(s_sourcePool), + remoteTokenAddress: abi.encode(s_sourceToken), + outboundRateLimiterConfig: _getOutboundRateLimiterConfig(), + inboundRateLimiterConfig: _getInboundRateLimiterConfig() + }); + + changePrank(AAVE_DAO); + s_ghoTokenPool.applyChainUpdates(chainUpdate); + s_ghoTokenPool.setRebalancer(OWNER); + changePrank(OWNER); + + Router.OnRamp[] memory onRampUpdates = new Router.OnRamp[](1); + Router.OffRamp[] memory offRampUpdates = new Router.OffRamp[](1); + onRampUpdates[0] = Router.OnRamp({destChainSelector: DEST_CHAIN_SELECTOR, onRamp: s_allowedOnRamp}); + offRampUpdates[0] = Router.OffRamp({sourceChainSelector: SOURCE_CHAIN_SELECTOR, offRamp: s_allowedOffRamp}); + s_sourceRouter.applyRampUpdates(onRampUpdates, new Router.OffRamp[](0), offRampUpdates); + } +} diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/GhoTokenPoolRemote.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/GhoTokenPoolRemote.t.sol new file mode 100644 index 0000000000..1b52b6d1bc --- /dev/null +++ b/contracts/src/v0.8/ccip/test/pools/GHO/GhoTokenPoolRemote.t.sol @@ -0,0 +1,500 @@ +// SPDX-License-Identifier: BUSL-1.1 +pragma solidity ^0.8.0; + +import {GhoToken} from "@aave-gho-core/gho/GhoToken.sol"; +import {TransparentUpgradeableProxy} from "solidity-utils/contracts/transparent-proxy/TransparentUpgradeableProxy.sol"; + +import {stdError} from "forge-std/Test.sol"; +import {UpgradeableTokenPool} from "../../../pools/GHO/UpgradeableTokenPool.sol"; +import {EVM2EVMOnRamp} from "../../../onRamp/EVM2EVMOnRamp.sol"; +import {EVM2EVMOffRamp} from "../../../offRamp/EVM2EVMOffRamp.sol"; +import {BurnMintTokenPool} from "../../../pools/BurnMintTokenPool.sol"; +import {RateLimiter} from "../../../libraries/RateLimiter.sol"; +import {Pool} from "../../../libraries/Pool.sol"; +import {MockUpgradeable} from "../../mocks/MockUpgradeable.sol"; + +import {GhoTokenPoolRemoteSetup} from "./GhoTokenPoolRemoteSetup.t.sol"; + +contract GhoTokenPoolRemote_lockOrBurn is GhoTokenPoolRemoteSetup { + function testSetupSuccess() public view { + assertEq(address(s_burnMintERC677), address(s_pool.getToken())); + assertEq(address(s_mockRMN), s_pool.getRmnProxy()); + assertEq(false, s_pool.getAllowListEnabled()); + assertEq("BurnMintTokenPool 1.5.0", s_pool.typeAndVersion()); + } + + function testPoolBurnSuccess() public { + uint256 burnAmount = 20_000e18; + // inflate facilitator level + _inflateFacilitatorLevel(address(s_pool), address(s_burnMintERC677), burnAmount); + + deal(address(s_burnMintERC677), address(s_pool), burnAmount); + assertEq(s_burnMintERC677.balanceOf(address(s_pool)), burnAmount); + + vm.startPrank(s_burnMintOnRamp); + + vm.expectEmit(); + emit TokensConsumed(burnAmount); + + vm.expectEmit(); + emit Transfer(address(s_pool), address(0), burnAmount); + + vm.expectEmit(); + emit Burned(address(s_burnMintOnRamp), burnAmount); + + bytes4 expectedSignature = bytes4(keccak256("burn(uint256)")); + vm.expectCall(address(s_burnMintERC677), abi.encodeWithSelector(expectedSignature, burnAmount)); + + (uint256 preCapacity, uint256 preLevel) = GhoToken(address(s_burnMintERC677)).getFacilitatorBucket(address(s_pool)); + + s_pool.lockOrBurn( + Pool.LockOrBurnInV1({ + originalSender: OWNER, + receiver: bytes(""), + amount: burnAmount, + remoteChainSelector: DEST_CHAIN_SELECTOR, + localToken: address(s_burnMintERC677) + }) + // OWNER, bytes(""), burnAmount, DEST_CHAIN_SELECTOR, bytes("") + ); + + // Facilitator checks + (uint256 postCapacity, uint256 postLevel) = GhoToken(address(s_burnMintERC677)).getFacilitatorBucket( + address(s_pool) + ); + assertEq(postCapacity, preCapacity); + assertEq(preLevel - burnAmount, postLevel, "wrong facilitator bucket level"); + + assertEq(s_burnMintERC677.balanceOf(address(s_pool)), 0); + } + + // Should not burn tokens if cursed. + function testPoolBurnRevertNotHealthyReverts() public { + s_mockRMN.setGlobalCursed(true); + uint256 before = s_burnMintERC677.balanceOf(address(s_pool)); + vm.startPrank(s_burnMintOnRamp); + + vm.expectRevert(EVM2EVMOnRamp.CursedByRMN.selector); + s_pool.lockOrBurn( + Pool.LockOrBurnInV1({ + originalSender: OWNER, + receiver: bytes(""), + amount: 1e5, + remoteChainSelector: DEST_CHAIN_SELECTOR, + localToken: address(s_burnMintERC677) + }) + ); + + assertEq(s_burnMintERC677.balanceOf(address(s_pool)), before); + } + + function testChainNotAllowedReverts() public { + uint64 wrongChainSelector = 8838833; + vm.expectRevert(abi.encodeWithSelector(UpgradeableTokenPool.ChainNotAllowed.selector, wrongChainSelector)); + s_pool.lockOrBurn( + Pool.LockOrBurnInV1({ + originalSender: OWNER, + receiver: bytes(""), + amount: 1, + remoteChainSelector: wrongChainSelector, + localToken: address(s_burnMintERC677) + }) + ); + } + + function testPoolBurnNoPrivilegesReverts() public { + // Remove privileges + vm.startPrank(AAVE_DAO); + GhoToken(address(s_burnMintERC677)).removeFacilitator(address(s_pool)); + vm.stopPrank(); + + uint256 amount = 1; + vm.startPrank(s_burnMintOnRamp); + vm.expectRevert(stdError.arithmeticError); + s_pool.lockOrBurn( + Pool.LockOrBurnInV1({ + originalSender: STRANGER, + receiver: bytes(""), + amount: amount, + remoteChainSelector: DEST_CHAIN_SELECTOR, + localToken: address(s_burnMintERC677) + }) + ); + } + + function testBucketLevelNotEnoughReverts() public { + (, uint256 bucketLevel) = GhoToken(address(s_burnMintERC677)).getFacilitatorBucket(address(s_pool)); + assertEq(bucketLevel, 0); + + uint256 amount = 1; + vm.expectCall(address(s_burnMintERC677), abi.encodeWithSelector(GhoToken.burn.selector, amount)); + vm.expectRevert(stdError.arithmeticError); + vm.startPrank(s_burnMintOnRamp); + s_pool.lockOrBurn( + Pool.LockOrBurnInV1({ + originalSender: STRANGER, + receiver: bytes(""), + amount: amount, + remoteChainSelector: DEST_CHAIN_SELECTOR, + localToken: address(s_burnMintERC677) + }) + ); + } + + function testTokenMaxCapacityExceededReverts() public { + RateLimiter.Config memory rateLimiterConfig = _getOutboundRateLimiterConfig(); + uint256 capacity = rateLimiterConfig.capacity; + uint256 amount = 10 * capacity; + + vm.expectRevert( + abi.encodeWithSelector(RateLimiter.TokenMaxCapacityExceeded.selector, capacity, amount, address(s_burnMintERC677)) + ); + vm.startPrank(s_burnMintOnRamp); + s_pool.lockOrBurn( + Pool.LockOrBurnInV1({ + originalSender: STRANGER, + receiver: bytes(""), + amount: amount, + remoteChainSelector: DEST_CHAIN_SELECTOR, + localToken: address(s_burnMintERC677) + }) + ); + } +} + +contract GhoTokenPoolRemote_releaseOrMint is GhoTokenPoolRemoteSetup { + function testPoolMintSuccess() public { + uint256 amount = 1e19; + vm.startPrank(s_burnMintOffRamp); + vm.expectEmit(); + emit Transfer(address(0), OWNER, amount); + s_pool.releaseOrMint( + Pool.ReleaseOrMintInV1({ + originalSender: bytes(""), + receiver: OWNER, + amount: amount, + remoteChainSelector: DEST_CHAIN_SELECTOR, + localToken: address(s_burnMintERC677), + sourcePoolAddress: abi.encode(s_sourcePool), + sourcePoolData: bytes(""), + offchainTokenData: bytes("") + }) + ); + assertEq(s_burnMintERC677.balanceOf(OWNER), amount); + } + + function testPoolMintNotHealthyReverts() public { + // Should not mint tokens if cursed. + s_mockRMN.setGlobalCursed(true); + uint256 before = s_burnMintERC677.balanceOf(OWNER); + vm.startPrank(s_burnMintOffRamp); + vm.expectRevert(EVM2EVMOffRamp.CursedByRMN.selector); + s_pool.releaseOrMint( + Pool.ReleaseOrMintInV1({ + originalSender: bytes(""), + receiver: OWNER, + amount: 1e5, + remoteChainSelector: DEST_CHAIN_SELECTOR, + localToken: address(s_burnMintERC677), + sourcePoolAddress: abi.encode(s_sourcePool), + sourcePoolData: bytes(""), + offchainTokenData: bytes("") + }) + ); + assertEq(s_burnMintERC677.balanceOf(OWNER), before); + } + + function testChainNotAllowedReverts() public { + uint64 wrongChainSelector = 8838833; + vm.expectRevert(abi.encodeWithSelector(UpgradeableTokenPool.ChainNotAllowed.selector, wrongChainSelector)); + s_pool.releaseOrMint( + Pool.ReleaseOrMintInV1({ + originalSender: bytes(""), + receiver: STRANGER, + amount: 1, + remoteChainSelector: wrongChainSelector, + localToken: address(s_burnMintERC677), + sourcePoolAddress: abi.encode(s_sourcePool), + sourcePoolData: bytes(""), + offchainTokenData: bytes("") + }) + ); + } + + function testPoolMintNoPrivilegesReverts() public { + // Remove privileges + vm.startPrank(AAVE_DAO); + GhoToken(address(s_burnMintERC677)).removeFacilitator(address(s_pool)); + vm.stopPrank(); + + uint256 amount = 1; + vm.startPrank(s_burnMintOffRamp); + vm.expectRevert("FACILITATOR_BUCKET_CAPACITY_EXCEEDED"); + s_pool.releaseOrMint( + Pool.ReleaseOrMintInV1({ + originalSender: bytes(""), + receiver: STRANGER, + amount: amount, + remoteChainSelector: DEST_CHAIN_SELECTOR, + localToken: address(s_burnMintERC677), + sourcePoolAddress: abi.encode(s_sourcePool), + sourcePoolData: bytes(""), + offchainTokenData: bytes("") + }) + ); + } + + function testBucketCapacityExceededReverts() public { + // Mint all the bucket capacity + (uint256 bucketCapacity, ) = GhoToken(address(s_burnMintERC677)).getFacilitatorBucket(address(s_pool)); + _inflateFacilitatorLevel(address(s_pool), address(s_burnMintERC677), bucketCapacity); + (uint256 currCapacity, uint256 currLevel) = GhoToken(address(s_burnMintERC677)).getFacilitatorBucket( + address(s_pool) + ); + assertEq(currCapacity, currLevel); + + uint256 amount = 1; + vm.expectCall(address(s_burnMintERC677), abi.encodeWithSelector(GhoToken.mint.selector, STRANGER, amount)); + vm.expectRevert("FACILITATOR_BUCKET_CAPACITY_EXCEEDED"); + vm.startPrank(s_burnMintOffRamp); + s_pool.releaseOrMint( + Pool.ReleaseOrMintInV1({ + originalSender: bytes(""), + receiver: STRANGER, + amount: amount, + remoteChainSelector: DEST_CHAIN_SELECTOR, + localToken: address(s_burnMintERC677), + sourcePoolAddress: abi.encode(s_sourcePool), + sourcePoolData: bytes(""), + offchainTokenData: bytes("") + }) + ); + } + + function testTokenMaxCapacityExceededReverts() public { + RateLimiter.Config memory rateLimiterConfig = _getInboundRateLimiterConfig(); + uint256 capacity = rateLimiterConfig.capacity; + uint256 amount = 10 * capacity; + + vm.expectRevert( + abi.encodeWithSelector(RateLimiter.TokenMaxCapacityExceeded.selector, capacity, amount, address(s_burnMintERC677)) + ); + vm.startPrank(s_burnMintOffRamp); + s_pool.releaseOrMint( + Pool.ReleaseOrMintInV1({ + originalSender: bytes(""), + receiver: STRANGER, + amount: amount, + remoteChainSelector: DEST_CHAIN_SELECTOR, + localToken: address(s_burnMintERC677), + sourcePoolAddress: abi.encode(s_sourcePool), + sourcePoolData: bytes(""), + offchainTokenData: bytes("") + }) + ); + } +} + +contract GhoTokenPoolEthereum_upgradeability is GhoTokenPoolRemoteSetup { + function testInitialization() public { + // Upgradeability + assertEq(_getUpgradeableVersion(address(s_pool)), 1); + vm.startPrank(PROXY_ADMIN); + (bool ok, bytes memory result) = address(s_pool).staticcall( + abi.encodeWithSelector(TransparentUpgradeableProxy.admin.selector) + ); + assertTrue(ok, "proxy admin fetch failed"); + address decodedProxyAdmin = abi.decode(result, (address)); + assertEq(decodedProxyAdmin, PROXY_ADMIN, "proxy admin is wrong"); + assertEq(decodedProxyAdmin, _getProxyAdminAddress(address(s_pool)), "proxy admin is wrong"); + + // TokenPool + vm.startPrank(OWNER); + assertEq(s_pool.getAllowList().length, 0); + assertEq(s_pool.getAllowListEnabled(), false); + assertEq(s_pool.getRmnProxy(), address(s_mockRMN)); + assertEq(s_pool.getRouter(), address(s_sourceRouter)); + assertEq(address(s_pool.getToken()), address(s_burnMintERC677)); + assertEq(s_pool.owner(), AAVE_DAO, "owner is wrong"); + } + + function testUpgrade() public { + MockUpgradeable newImpl = new MockUpgradeable(); + bytes memory mockImpleParams = abi.encodeWithSignature("initialize()"); + vm.startPrank(PROXY_ADMIN); + TransparentUpgradeableProxy(payable(address(s_pool))).upgradeToAndCall(address(newImpl), mockImpleParams); + + vm.startPrank(OWNER); + assertEq(_getUpgradeableVersion(address(s_pool)), 2); + } + + function testUpgradeAdminReverts() public { + vm.expectRevert(); + TransparentUpgradeableProxy(payable(address(s_pool))).upgradeToAndCall(address(0), bytes("")); + assertEq(_getUpgradeableVersion(address(s_pool)), 1); + + vm.expectRevert(); + TransparentUpgradeableProxy(payable(address(s_pool))).upgradeTo(address(0)); + assertEq(_getUpgradeableVersion(address(s_pool)), 1); + } + + function testChangeAdmin() public { + assertEq(_getProxyAdminAddress(address(s_pool)), PROXY_ADMIN); + + address newAdmin = makeAddr("newAdmin"); + vm.startPrank(PROXY_ADMIN); + TransparentUpgradeableProxy(payable(address(s_pool))).changeAdmin(newAdmin); + + assertEq(_getProxyAdminAddress(address(s_pool)), newAdmin, "Admin change failed"); + } + + function testChangeAdminAdminReverts() public { + assertEq(_getProxyAdminAddress(address(s_pool)), PROXY_ADMIN); + + address newAdmin = makeAddr("newAdmin"); + vm.expectRevert(); + TransparentUpgradeableProxy(payable(address(s_pool))).changeAdmin(newAdmin); + + assertEq(_getProxyAdminAddress(address(s_pool)), PROXY_ADMIN, "Unauthorized admin change"); + } +} + +contract GhoTokenPoolRemote_setChainRateLimiterConfig is GhoTokenPoolRemoteSetup { + event ConfigChanged(RateLimiter.Config); + event ChainConfigured( + uint64 chainSelector, + RateLimiter.Config outboundRateLimiterConfig, + RateLimiter.Config inboundRateLimiterConfig + ); + + uint64 internal s_remoteChainSelector; + + function setUp() public virtual override { + GhoTokenPoolRemoteSetup.setUp(); + UpgradeableTokenPool.ChainUpdate[] memory chainUpdates = new UpgradeableTokenPool.ChainUpdate[](1); + s_remoteChainSelector = 123124; + chainUpdates[0] = UpgradeableTokenPool.ChainUpdate({ + remoteChainSelector: s_remoteChainSelector, + allowed: true, + remotePoolAddress: abi.encode(s_sourcePool), + remoteTokenAddress: abi.encode(s_sourceToken), + outboundRateLimiterConfig: _getOutboundRateLimiterConfig(), + inboundRateLimiterConfig: _getInboundRateLimiterConfig() + }); + changePrank(AAVE_DAO); + s_pool.applyChainUpdates(chainUpdates); + changePrank(OWNER); + } + + function testFuzz_SetChainRateLimiterConfigSuccess(uint128 capacity, uint128 rate, uint32 newTime) public { + // Cap the lower bound to 4 so 4/2 is still >= 2 + vm.assume(capacity >= 4); + // Cap the lower bound to 2 so 2/2 is still >= 1 + rate = uint128(bound(rate, 2, capacity - 2)); + // Bucket updates only work on increasing time + newTime = uint32(bound(newTime, block.timestamp + 1, type(uint32).max)); + vm.warp(newTime); + + uint256 oldOutboundTokens = s_pool.getCurrentOutboundRateLimiterState(s_remoteChainSelector).tokens; + uint256 oldInboundTokens = s_pool.getCurrentInboundRateLimiterState(s_remoteChainSelector).tokens; + + RateLimiter.Config memory newOutboundConfig = RateLimiter.Config({isEnabled: true, capacity: capacity, rate: rate}); + RateLimiter.Config memory newInboundConfig = RateLimiter.Config({ + isEnabled: true, + capacity: capacity / 2, + rate: rate / 2 + }); + + vm.expectEmit(); + emit ConfigChanged(newOutboundConfig); + vm.expectEmit(); + emit ConfigChanged(newInboundConfig); + vm.expectEmit(); + emit ChainConfigured(s_remoteChainSelector, newOutboundConfig, newInboundConfig); + + changePrank(AAVE_DAO); + s_pool.setChainRateLimiterConfig(s_remoteChainSelector, newOutboundConfig, newInboundConfig); + + uint256 expectedTokens = RateLimiter._min(newOutboundConfig.capacity, oldOutboundTokens); + + RateLimiter.TokenBucket memory bucket = s_pool.getCurrentOutboundRateLimiterState(s_remoteChainSelector); + assertEq(bucket.capacity, newOutboundConfig.capacity); + assertEq(bucket.rate, newOutboundConfig.rate); + assertEq(bucket.tokens, expectedTokens); + assertEq(bucket.lastUpdated, newTime); + + expectedTokens = RateLimiter._min(newInboundConfig.capacity, oldInboundTokens); + + bucket = s_pool.getCurrentInboundRateLimiterState(s_remoteChainSelector); + assertEq(bucket.capacity, newInboundConfig.capacity); + assertEq(bucket.rate, newInboundConfig.rate); + assertEq(bucket.tokens, expectedTokens); + assertEq(bucket.lastUpdated, newTime); + } + + function testOnlyOwnerOrRateLimitAdminSuccess() public { + address rateLimiterAdmin = address(28973509103597907); + + changePrank(AAVE_DAO); + s_pool.setRateLimitAdmin(rateLimiterAdmin); + + changePrank(rateLimiterAdmin); + + s_pool.setChainRateLimiterConfig( + s_remoteChainSelector, + _getOutboundRateLimiterConfig(), + _getInboundRateLimiterConfig() + ); + + changePrank(AAVE_DAO); + + s_pool.setChainRateLimiterConfig( + s_remoteChainSelector, + _getOutboundRateLimiterConfig(), + _getInboundRateLimiterConfig() + ); + } + + // Reverts + + function testOnlyOwnerReverts() public { + changePrank(STRANGER); + + vm.expectRevert(abi.encodeWithSelector(UpgradeableTokenPool.Unauthorized.selector, STRANGER)); + s_pool.setChainRateLimiterConfig( + s_remoteChainSelector, + _getOutboundRateLimiterConfig(), + _getInboundRateLimiterConfig() + ); + } + + function testNonExistentChainReverts() public { + uint64 wrongChainSelector = 9084102894; + + vm.expectRevert(abi.encodeWithSelector(UpgradeableTokenPool.NonExistentChain.selector, wrongChainSelector)); + changePrank(AAVE_DAO); + s_pool.setChainRateLimiterConfig( + wrongChainSelector, + _getOutboundRateLimiterConfig(), + _getInboundRateLimiterConfig() + ); + } +} + +contract GhoTokenPoolRemote_setRateLimitAdmin is GhoTokenPoolRemoteSetup { + function testSetRateLimitAdminSuccess() public { + assertEq(address(0), s_pool.getRateLimitAdmin()); + changePrank(AAVE_DAO); + s_pool.setRateLimitAdmin(OWNER); + assertEq(OWNER, s_pool.getRateLimitAdmin()); + } + + // Reverts + + function testSetRateLimitAdminReverts() public { + vm.startPrank(STRANGER); + + vm.expectRevert("Only callable by owner"); + s_pool.setRateLimitAdmin(STRANGER); + } +} diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/GhoTokenPoolRemoteE2E.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/GhoTokenPoolRemoteE2E.t.sol new file mode 100644 index 0000000000..9096116472 --- /dev/null +++ b/contracts/src/v0.8/ccip/test/pools/GHO/GhoTokenPoolRemoteE2E.t.sol @@ -0,0 +1,445 @@ +// SPDX-License-Identifier: BUSL-1.1 +pragma solidity ^0.8.0; + +import {GhoToken} from "@aave-gho-core/gho/GhoToken.sol"; + +import {IERC20} from "../../../../vendor/openzeppelin-solidity/v4.8.3/contracts/token/ERC20/IERC20.sol"; +import {IRMN} from "../../../interfaces/IRMN.sol"; +import {CommitStore} from "../../../CommitStore.sol"; +import {EVM2EVMOnRamp} from "../../../onRamp/EVM2EVMOnRamp.sol"; +import {EVM2EVMOffRamp} from "../../../offRamp/EVM2EVMOffRamp.sol"; +import {IBurnMintERC20} from "../../../../shared/token/ERC20/IBurnMintERC20.sol"; +import {UpgradeableLockReleaseTokenPool} from "../../../pools/GHO/UpgradeableLockReleaseTokenPool.sol"; +import {UpgradeableBurnMintTokenPool} from "../../../pools/GHO/UpgradeableBurnMintTokenPool.sol"; +import {UpgradeableTokenPool} from "../../../pools/GHO/UpgradeableTokenPool.sol"; +import {IPriceRegistry} from "../../../interfaces/IPriceRegistry.sol"; +import {RateLimiter} from "../../../libraries/RateLimiter.sol"; +import {Pool} from "../../../libraries/Pool.sol"; +import {Internal} from "../../../libraries/Internal.sol"; +import {Client} from "../../../libraries/Client.sol"; +import {MerkleHelper} from "../../helpers/MerkleHelper.sol"; +import {BaseTest} from "../../BaseTest.t.sol"; +import {E2E} from "../End2End.t.sol"; +import {GhoBaseTest} from "./GhoBaseTest.t.sol"; + +contract GhoTokenPoolRemoteE2E is E2E, GhoBaseTest { + using Internal for Internal.EVM2EVMMessage; + + IBurnMintERC20 internal srcGhoToken; + IBurnMintERC20 internal dstGhoToken; + UpgradeableBurnMintTokenPool internal srcGhoTokenPool; + UpgradeableLockReleaseTokenPool internal dstGhoTokenPool; + + function setUp() public virtual override(E2E, BaseTest) { + E2E.setUp(); + + // Deploy GHO Token on source chain + srcGhoToken = IBurnMintERC20(address(new GhoToken(AAVE_DAO))); + deal(address(srcGhoToken), OWNER, type(uint128).max); + // Add GHO token to source token list + s_sourceTokens.push(address(srcGhoToken)); + + // Deploy GHO Token on destination chain + dstGhoToken = IBurnMintERC20(address(new GhoToken(AAVE_DAO))); + deal(address(dstGhoToken), OWNER, type(uint128).max); + // Add GHO token to destination token list + s_destTokens.push(address(dstGhoToken)); + + // Deploy BurnMintTokenPool for GHO token on source chain + srcGhoTokenPool = UpgradeableBurnMintTokenPool( + _deployUpgradeableBurnMintTokenPool( + address(srcGhoToken), + address(s_mockRMN), + address(s_sourceRouter), + AAVE_DAO, + PROXY_ADMIN + ) + ); + + // Add GHO UpgradeableTokenPool to source token pool list + s_sourcePoolByToken[address(srcGhoToken)] = address(srcGhoTokenPool); + s_destTokenBySourceToken[address(srcGhoToken)] = address(dstGhoToken); + + // Deploy LockReleaseTokenPool for GHO token on destination chain + dstGhoTokenPool = UpgradeableLockReleaseTokenPool( + _deployUpgradeableLockReleaseTokenPool( + address(dstGhoToken), + address(s_mockRMN), + address(s_destRouter), + AAVE_DAO, + INITIAL_BRIDGE_LIMIT, + PROXY_ADMIN + ) + ); + + // Add GHO UpgradeableTokenPool to destination token pool list + s_sourcePoolByToken[address(dstGhoToken)] = address(dstGhoTokenPool); + s_destTokenBySourceToken[address(dstGhoToken)] = address(srcGhoToken); + + // Give mint and burn privileges to source UpgradeableTokenPool (GHO-specific related) + vm.stopPrank(); + vm.startPrank(AAVE_DAO); + GhoToken(address(srcGhoToken)).grantRole(GhoToken(address(srcGhoToken)).FACILITATOR_MANAGER_ROLE(), AAVE_DAO); + GhoToken(address(srcGhoToken)).addFacilitator(address(srcGhoTokenPool), "UpgradeableTokenPool", type(uint128).max); + vm.stopPrank(); + vm.startPrank(OWNER); + + // Add config for source and destination chains + UpgradeableTokenPool.ChainUpdate[] memory srcChainUpdates = new UpgradeableTokenPool.ChainUpdate[](1); + srcChainUpdates[0] = UpgradeableTokenPool.ChainUpdate({ + remoteChainSelector: DEST_CHAIN_SELECTOR, + allowed: true, + remotePoolAddress: abi.encode(address(dstGhoTokenPool)), + remoteTokenAddress: abi.encode(address(dstGhoToken)), + outboundRateLimiterConfig: _getOutboundRateLimiterConfig(), + inboundRateLimiterConfig: _getInboundRateLimiterConfig() + }); + UpgradeableTokenPool.ChainUpdate[] memory dstChainUpdates = new UpgradeableTokenPool.ChainUpdate[](1); + dstChainUpdates[0] = UpgradeableTokenPool.ChainUpdate({ + remoteChainSelector: SOURCE_CHAIN_SELECTOR, + allowed: true, + remotePoolAddress: abi.encode(address(srcGhoTokenPool)), + remoteTokenAddress: abi.encode(address(srcGhoToken)), + outboundRateLimiterConfig: _getOutboundRateLimiterConfig(), + inboundRateLimiterConfig: _getInboundRateLimiterConfig() + }); + vm.stopPrank(); + vm.startPrank(AAVE_DAO); + srcGhoTokenPool.applyChainUpdates(srcChainUpdates); + dstGhoTokenPool.applyChainUpdates(dstChainUpdates); + vm.stopPrank(); + vm.startPrank(OWNER); + + // Update GHO Token price on source PriceRegistry + EVM2EVMOnRamp.DynamicConfig memory onRampDynamicConfig = s_onRamp.getDynamicConfig(); + IPriceRegistry onRampPriceRegistry = IPriceRegistry(onRampDynamicConfig.priceRegistry); + onRampPriceRegistry.updatePrices(_getSingleTokenPriceUpdateStruct(address(srcGhoToken), 1e18)); + + // Update GHO Token price on destination PriceRegistry + EVM2EVMOffRamp.DynamicConfig memory offRampDynamicConfig = s_offRamp.getDynamicConfig(); + IPriceRegistry offRampPriceRegistry = IPriceRegistry(offRampDynamicConfig.priceRegistry); + offRampPriceRegistry.updatePrices(_getSingleTokenPriceUpdateStruct(address(dstGhoToken), 1e18)); + + s_tokenAdminRegistry.proposeAdministrator(address(srcGhoToken), AAVE_DAO); + s_tokenAdminRegistry.proposeAdministrator(address(dstGhoToken), AAVE_DAO); + vm.stopPrank(); + vm.startPrank(AAVE_DAO); + s_tokenAdminRegistry.acceptAdminRole(address(srcGhoToken)); + s_tokenAdminRegistry.setPool(address(srcGhoToken), address(srcGhoTokenPool)); + s_tokenAdminRegistry.acceptAdminRole(address(dstGhoToken)); + s_tokenAdminRegistry.setPool(address(dstGhoToken), address(dstGhoTokenPool)); + vm.stopPrank(); + vm.startPrank(OWNER); + } + + function testE2E_MessagesSuccess_gas() public { + vm.pauseGasMetering(); + + // Mint some GHO to inflate UpgradeableBurnMintTokenPool facilitator level + _inflateFacilitatorLevel(address(srcGhoTokenPool), address(srcGhoToken), 1000 * 1e18); + vm.startPrank(OWNER); + + // Lock some GHO on destination so it can be released later on + dstGhoToken.transfer(address(dstGhoTokenPool), 1000 * 1e18); + // Inflate current bridged amount so it can be reduced in `releaseOrMint` function + vm.stopPrank(); + vm.startPrank(address(s_onRamp)); + vm.mockCall( + address(s_destRouter), + abi.encodeWithSelector(bytes4(keccak256("getOnRamp(uint64)"))), + abi.encode(s_onRamp) + ); + dstGhoTokenPool.lockOrBurn( + Pool.LockOrBurnInV1({ + originalSender: STRANGER, + receiver: bytes(""), + amount: 1000 * 1e18, + remoteChainSelector: SOURCE_CHAIN_SELECTOR, + localToken: address(dstGhoToken) + }) + ); + assertEq(dstGhoTokenPool.getCurrentBridgedAmount(), 1000 * 1e18); + vm.startPrank(address(OWNER)); + + uint256 preGhoTokenBalanceOwner = srcGhoToken.balanceOf(OWNER); + uint256 preGhoTokenBalancePool = srcGhoToken.balanceOf(address(srcGhoTokenPool)); + (uint256 preCapacity, uint256 preLevel) = GhoToken(address(srcGhoToken)).getFacilitatorBucket( + address(srcGhoTokenPool) + ); + + Internal.EVM2EVMMessage[] memory messages = new Internal.EVM2EVMMessage[](1); + messages[0] = sendRequestGho(1, 1000 * 1e18, false, false); + + uint256 expectedFee = s_sourceRouter.getFee(DEST_CHAIN_SELECTOR, _generateTokenMessage()); + // Asserts that the tokens have been sent and the fee has been paid. + assertEq(preGhoTokenBalanceOwner - 1000 * 1e18, srcGhoToken.balanceOf(OWNER)); + assertEq(preGhoTokenBalancePool, srcGhoToken.balanceOf(address(srcGhoTokenPool))); // GHO gets burned + assertGt(expectedFee, 0); + assertEq(dstGhoTokenPool.getCurrentBridgedAmount(), 1000 * 1e18); + + // Facilitator checks + (uint256 postCapacity, uint256 postLevel) = GhoToken(address(srcGhoToken)).getFacilitatorBucket( + address(srcGhoTokenPool) + ); + assertEq(postCapacity, preCapacity); + assertEq(preLevel - 1000 * 1e18, postLevel, "wrong facilitator bucket level"); + + bytes32 metaDataHash = s_offRamp.metadataHash(); + + bytes32[] memory hashedMessages = new bytes32[](1); + hashedMessages[0] = messages[0]._hash(metaDataHash); + messages[0].messageId = hashedMessages[0]; + + bytes32[] memory merkleRoots = new bytes32[](1); + merkleRoots[0] = MerkleHelper.getMerkleRoot(hashedMessages); + + address[] memory onRamps = new address[](1); + onRamps[0] = ON_RAMP_ADDRESS; + + bytes memory commitReport = abi.encode( + CommitStore.CommitReport({ + priceUpdates: _getEmptyPriceUpdates(), + interval: CommitStore.Interval(messages[0].sequenceNumber, messages[0].sequenceNumber), + merkleRoot: merkleRoots[0] + }) + ); + + vm.resumeGasMetering(); + s_commitStore.report(commitReport, ++s_latestEpochAndRound); + vm.pauseGasMetering(); + + vm.mockCall( + s_commitStore.getStaticConfig().rmnProxy, + abi.encodeWithSelector(IRMN.isBlessed.selector, IRMN.TaggedRoot(address(s_commitStore), merkleRoots[0])), + abi.encode(true) + ); + + bytes32[] memory proofs = new bytes32[](0); + uint256 timestamp = s_commitStore.verify(merkleRoots, proofs, 2 ** 2 - 1); + assertEq(BLOCK_TIME, timestamp); + + // We change the block time so when execute would e.g. use the current + // block time instead of the committed block time the value would be + // incorrect in the checks below. + vm.warp(BLOCK_TIME + 2000); + + vm.expectEmit(); + emit EVM2EVMOffRamp.ExecutionStateChanged( + messages[0].sequenceNumber, + messages[0].messageId, + Internal.MessageExecutionState.SUCCESS, + "" + ); + + Internal.ExecutionReport memory execReport = _generateReportFromMessages(messages); + + uint256 preGhoTokenBalanceUser = dstGhoToken.balanceOf(USER); + + vm.resumeGasMetering(); + s_offRamp.execute(execReport, new EVM2EVMOffRamp.GasLimitOverride[](0)); + vm.pauseGasMetering(); + + assertEq(preGhoTokenBalanceUser + 1000 * 1e18, dstGhoToken.balanceOf(USER), "Wrong balance on destination"); + assertEq(dstGhoTokenPool.getCurrentBridgedAmount(), 0); + } + + function testE2E_3MessagesSuccess_gas() public { + vm.pauseGasMetering(); + + // Mint some GHO to inflate UpgradeableTokenPool facilitator level + _inflateFacilitatorLevel(address(srcGhoTokenPool), address(srcGhoToken), 6000 * 1e18); + vm.startPrank(OWNER); + + // Lock some GHO on destination so it can be released later on + dstGhoToken.transfer(address(dstGhoTokenPool), 6000 * 1e18); + // Inflate current bridged amount so it can be reduced in `releaseOrMint` function + vm.stopPrank(); + vm.startPrank(address(s_onRamp)); + vm.mockCall( + address(s_destRouter), + abi.encodeWithSelector(bytes4(keccak256("getOnRamp(uint64)"))), + abi.encode(s_onRamp) + ); + dstGhoTokenPool.lockOrBurn( + Pool.LockOrBurnInV1({ + originalSender: STRANGER, + receiver: bytes(""), + amount: 6000 * 1e18, + remoteChainSelector: SOURCE_CHAIN_SELECTOR, + localToken: address(dstGhoToken) + }) + ); + assertEq(dstGhoTokenPool.getCurrentBridgedAmount(), 6000 * 1e18); + vm.startPrank(address(OWNER)); + + uint256 preGhoTokenBalanceOwner = srcGhoToken.balanceOf(OWNER); + uint256 preGhoTokenBalancePool = srcGhoToken.balanceOf(address(srcGhoTokenPool)); + (uint256 preCapacity, uint256 preLevel) = GhoToken(address(srcGhoToken)).getFacilitatorBucket( + address(srcGhoTokenPool) + ); + + Internal.EVM2EVMMessage[] memory messages = new Internal.EVM2EVMMessage[](3); + messages[0] = sendRequestGho(1, 1000 * 1e18, false, false); + messages[1] = sendRequestGho(2, 2000 * 1e18, false, false); + messages[2] = sendRequestGho(3, 3000 * 1e18, false, false); + + uint256 expectedFee = s_sourceRouter.getFee(DEST_CHAIN_SELECTOR, _generateTokenMessage()); + // Asserts that the tokens have been sent and the fee has been paid. + assertEq(preGhoTokenBalanceOwner - 6000 * 1e18, srcGhoToken.balanceOf(OWNER)); + assertEq(preGhoTokenBalancePool, srcGhoToken.balanceOf(address(srcGhoTokenPool))); // GHO gets burned + assertGt(expectedFee, 0); + assertEq(dstGhoTokenPool.getCurrentBridgedAmount(), 6000 * 1e18); + + // Facilitator checks + (uint256 postCapacity, uint256 postLevel) = GhoToken(address(srcGhoToken)).getFacilitatorBucket( + address(srcGhoTokenPool) + ); + assertEq(postCapacity, preCapacity); + assertEq(preLevel - 6000 * 1e18, postLevel, "wrong facilitator bucket level"); + + bytes32 metaDataHash = s_offRamp.metadataHash(); + + bytes32[] memory hashedMessages = new bytes32[](3); + hashedMessages[0] = messages[0]._hash(metaDataHash); + messages[0].messageId = hashedMessages[0]; + hashedMessages[1] = messages[1]._hash(metaDataHash); + messages[1].messageId = hashedMessages[1]; + hashedMessages[2] = messages[2]._hash(metaDataHash); + messages[2].messageId = hashedMessages[2]; + + bytes32[] memory merkleRoots = new bytes32[](1); + merkleRoots[0] = MerkleHelper.getMerkleRoot(hashedMessages); + + address[] memory onRamps = new address[](1); + onRamps[0] = ON_RAMP_ADDRESS; + + bytes memory commitReport = abi.encode( + CommitStore.CommitReport({ + priceUpdates: _getEmptyPriceUpdates(), + interval: CommitStore.Interval(messages[0].sequenceNumber, messages[2].sequenceNumber), + merkleRoot: merkleRoots[0] + }) + ); + + vm.resumeGasMetering(); + s_commitStore.report(commitReport, ++s_latestEpochAndRound); + vm.pauseGasMetering(); + + vm.mockCall( + s_commitStore.getStaticConfig().rmnProxy, + abi.encodeWithSelector(IRMN.isBlessed.selector, IRMN.TaggedRoot(address(s_commitStore), merkleRoots[0])), + abi.encode(true) + ); + + bytes32[] memory proofs = new bytes32[](0); + uint256 timestamp = s_commitStore.verify(merkleRoots, proofs, 2 ** 2 - 1); + assertEq(BLOCK_TIME, timestamp); + + // We change the block time so when execute would e.g. use the current + // block time instead of the committed block time the value would be + // incorrect in the checks below. + vm.warp(BLOCK_TIME + 2000); + + vm.expectEmit(); + emit EVM2EVMOffRamp.ExecutionStateChanged( + messages[0].sequenceNumber, + messages[0].messageId, + Internal.MessageExecutionState.SUCCESS, + "" + ); + + vm.expectEmit(); + emit EVM2EVMOffRamp.ExecutionStateChanged( + messages[1].sequenceNumber, + messages[1].messageId, + Internal.MessageExecutionState.SUCCESS, + "" + ); + + vm.expectEmit(); + emit EVM2EVMOffRamp.ExecutionStateChanged( + messages[2].sequenceNumber, + messages[2].messageId, + Internal.MessageExecutionState.SUCCESS, + "" + ); + + Internal.ExecutionReport memory execReport = _generateReportFromMessages(messages); + + uint256 preGhoTokenBalanceUser = dstGhoToken.balanceOf(USER); + + vm.resumeGasMetering(); + s_offRamp.execute(execReport, new EVM2EVMOffRamp.GasLimitOverride[](0)); + vm.pauseGasMetering(); + + assertEq(preGhoTokenBalanceUser + 6000 * 1e18, dstGhoToken.balanceOf(USER), "Wrong balance on destination"); + assertEq(dstGhoTokenPool.getCurrentBridgedAmount(), 0); + } + + function testRevertRateLimitReached() public { + RateLimiter.Config memory rateLimiterConfig = _getOutboundRateLimiterConfig(); + + // will revert due to rate limit of tokenPool + sendRequestGho(1, rateLimiterConfig.capacity + 1, true, false); + + // max capacity, won't revert + + // Mint some GHO to inflate UpgradeableTokenPool facilitator level + _inflateFacilitatorLevel(address(srcGhoTokenPool), address(srcGhoToken), rateLimiterConfig.capacity); + vm.startPrank(OWNER); + sendRequestGho(1, rateLimiterConfig.capacity, false, false); + + // revert due to capacity exceed + sendRequestGho(2, 100, true, false); + + // increase blocktime to refill capacity + vm.warp(BLOCK_TIME + 1); + + // won't revert due to refill + _inflateFacilitatorLevel(address(srcGhoTokenPool), address(srcGhoToken), 100); + vm.startPrank(OWNER); + sendRequestGho(2, 100, false, false); + } + + function testRevertOnLessTokenToCoverFee() public { + sendRequestGho(1, 1000, false, true); + } + + function sendRequestGho( + uint64 expectedSeqNum, + uint256 amount, + bool expectRevert, + bool sendLessFee + ) public returns (Internal.EVM2EVMMessage memory) { + Client.EVM2AnyMessage memory message = _generateSingleTokenMessage(address(srcGhoToken), amount); + uint256 expectedFee = s_sourceRouter.getFee(DEST_CHAIN_SELECTOR, message); + + // err mgmt + uint256 feeToSend = sendLessFee ? expectedFee - 1 : expectedFee; + expectRevert = sendLessFee ? true : expectRevert; + + IERC20(s_sourceTokens[0]).approve(address(s_sourceRouter), feeToSend); // fee + IERC20(srcGhoToken).approve(address(s_sourceRouter), amount); // amount + + message.receiver = abi.encode(USER); + Internal.EVM2EVMMessage memory geEvent = _messageToEvent( + message, + expectedSeqNum, + expectedSeqNum, + expectedFee, + OWNER + ); + + if (!expectRevert) { + vm.expectEmit(); + emit EVM2EVMOnRamp.CCIPSendRequested(geEvent); + } else { + vm.expectRevert(); + } + vm.resumeGasMetering(); + s_sourceRouter.ccipSend(DEST_CHAIN_SELECTOR, message); + vm.pauseGasMetering(); + + return geEvent; + } +} diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/GhoTokenPoolRemoteSetup.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/GhoTokenPoolRemoteSetup.t.sol new file mode 100644 index 0000000000..bb4b682f76 --- /dev/null +++ b/contracts/src/v0.8/ccip/test/pools/GHO/GhoTokenPoolRemoteSetup.t.sol @@ -0,0 +1,79 @@ +// SPDX-License-Identifier: BUSL-1.1 +pragma solidity ^0.8.0; + +import {GhoToken} from "@aave-gho-core/gho/GhoToken.sol"; +import {UpgradeableTokenPool} from "../../../pools/GHO/UpgradeableTokenPool.sol"; +import {Router} from "../../../Router.sol"; +import {BurnMintERC677} from "../../../../shared/token/ERC677/BurnMintERC677.sol"; +import {UpgradeableBurnMintTokenPool} from "../../../pools/GHO/UpgradeableBurnMintTokenPool.sol"; +import {RouterSetup} from "../../router/RouterSetup.t.sol"; +import {BaseTest} from "../../BaseTest.t.sol"; +import {GhoBaseTest} from "./GhoBaseTest.t.sol"; + +contract GhoTokenPoolRemoteSetup is RouterSetup, GhoBaseTest { + event Transfer(address indexed from, address indexed to, uint256 value); + event TokensConsumed(uint256 tokens); + event Burned(address indexed sender, uint256 amount); + + BurnMintERC677 internal s_burnMintERC677; + address internal s_burnMintOffRamp = makeAddr("burn_mint_offRamp"); + address internal s_burnMintOnRamp = makeAddr("burn_mint_onRamp"); + + address internal s_sourcePool = makeAddr("source_pool"); + address internal s_sourceToken = makeAddr("source_token"); + + UpgradeableBurnMintTokenPool internal s_pool; + + function setUp() public virtual override(RouterSetup, BaseTest) { + RouterSetup.setUp(); + + // GHO deployment + GhoToken ghoToken = new GhoToken(AAVE_DAO); + s_burnMintERC677 = BurnMintERC677(address(ghoToken)); + + s_pool = UpgradeableBurnMintTokenPool( + _deployUpgradeableBurnMintTokenPool( + address(s_burnMintERC677), + address(s_mockRMN), + address(s_sourceRouter), + AAVE_DAO, + PROXY_ADMIN + ) + ); + + // Give mint and burn privileges to source UpgradeableTokenPool (GHO-specific related) + vm.stopPrank(); + vm.startPrank(AAVE_DAO); + GhoToken(address(s_burnMintERC677)).grantRole( + GhoToken(address(s_burnMintERC677)).FACILITATOR_MANAGER_ROLE(), + AAVE_DAO + ); + GhoToken(address(s_burnMintERC677)).addFacilitator(address(s_pool), "UpgradeableTokenPool", type(uint128).max); + vm.stopPrank(); + + _applyChainUpdates(address(s_pool)); + } + + function _applyChainUpdates(address pool) internal { + UpgradeableTokenPool.ChainUpdate[] memory chains = new UpgradeableTokenPool.ChainUpdate[](1); + chains[0] = UpgradeableTokenPool.ChainUpdate({ + remoteChainSelector: DEST_CHAIN_SELECTOR, + allowed: true, + remotePoolAddress: abi.encode(s_sourcePool), + remoteTokenAddress: abi.encode(s_sourceToken), + outboundRateLimiterConfig: _getOutboundRateLimiterConfig(), + inboundRateLimiterConfig: _getInboundRateLimiterConfig() + }); + + vm.startPrank(AAVE_DAO); + UpgradeableBurnMintTokenPool(pool).applyChainUpdates(chains); + vm.stopPrank(); + vm.startPrank(OWNER); + + Router.OnRamp[] memory onRampUpdates = new Router.OnRamp[](1); + onRampUpdates[0] = Router.OnRamp({destChainSelector: DEST_CHAIN_SELECTOR, onRamp: s_burnMintOnRamp}); + Router.OffRamp[] memory offRampUpdates = new Router.OffRamp[](1); + offRampUpdates[0] = Router.OffRamp({sourceChainSelector: DEST_CHAIN_SELECTOR, offRamp: s_burnMintOffRamp}); + s_sourceRouter.applyRampUpdates(onRampUpdates, new Router.OffRamp[](0), offRampUpdates); + } +} diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/invariant/GhoTokenPoolEthereumBridgeLimitInvariant.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/invariant/GhoTokenPoolEthereumBridgeLimitInvariant.t.sol new file mode 100644 index 0000000000..f3af59e445 --- /dev/null +++ b/contracts/src/v0.8/ccip/test/pools/GHO/invariant/GhoTokenPoolEthereumBridgeLimitInvariant.t.sol @@ -0,0 +1,69 @@ +// SPDX-License-Identifier: BUSL-1.1 +pragma solidity ^0.8.0; + +import {GhoToken} from "@aave-gho-core/gho/GhoToken.sol"; + +import {UpgradeableLockReleaseTokenPool} from "../../../../pools/GHO/UpgradeableLockReleaseTokenPool.sol"; +import {BaseTest} from "../../../BaseTest.t.sol"; +import {GhoTokenPoolHandler} from "./GhoTokenPoolHandler.t.sol"; + +contract GhoTokenPoolEthereumBridgeLimitInvariant is BaseTest { + GhoTokenPoolHandler handler; + + function setUp() public override { + super.setUp(); + + handler = new GhoTokenPoolHandler(); + deal(handler.tokens(0), address(handler), handler.INITIAL_BRIDGE_LIMIT()); + + targetContract(address(handler)); + bytes4[] memory selectors = new bytes4[](2); + selectors[0] = GhoTokenPoolHandler.bridgeGho.selector; + selectors[1] = GhoTokenPoolHandler.updateBucketCapacity.selector; + targetSelector(FuzzSelector({addr: address(handler), selectors: selectors})); + } + + /// forge-config: ccip.invariant.fail-on-revert = true + /// forge-config: ccip.invariant.runs = 2000 + /// forge-config: ccip.invariant.depth = 50 + function invariant_bridgeLimit() public view { + // Check bridged + assertEq(UpgradeableLockReleaseTokenPool(handler.pools(0)).getCurrentBridgedAmount(), handler.bridged()); + + // Check levels and buckets + uint256 sumLevels; + uint256 chainId; + uint256 capacity; + uint256 level; + uint256[] memory chainsListLocal = handler.getChainsList(); + for (uint i = 1; i < chainsListLocal.length; i++) { + // not counting Ethereum -{0} + chainId = chainsListLocal[i]; + (capacity, level) = GhoToken(handler.tokens(chainId)).getFacilitatorBucket(handler.pools(chainId)); + + // Aggregate levels + sumLevels += level; + + assertEq(capacity, handler.bucketCapacities(chainId), "wrong bucket capacity"); + assertEq(level, handler.bucketLevels(chainId), "wrong bucket level"); + + assertGe( + capacity, + UpgradeableLockReleaseTokenPool(handler.pools(0)).getBridgeLimit(), + "capacity must be equal to bridgeLimit" + ); + + // This invariant only holds if there were no bridge limit reductions below the current bridged amount + if (!handler.capacityBelowLevelUpdate()) { + assertLe( + level, + UpgradeableLockReleaseTokenPool(handler.pools(0)).getBridgeLimit(), + "level cannot be higher than bridgeLimit" + ); + } + } + // Check bridged is equal to sum of levels + assertEq(UpgradeableLockReleaseTokenPool(handler.pools(0)).getCurrentBridgedAmount(), sumLevels, "wrong bridged"); + assertEq(handler.remoteLiquidity(), sumLevels, "wrong bridged"); + } +} diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/invariant/GhoTokenPoolHandler.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/invariant/GhoTokenPoolHandler.t.sol new file mode 100644 index 0000000000..0855685400 --- /dev/null +++ b/contracts/src/v0.8/ccip/test/pools/GHO/invariant/GhoTokenPoolHandler.t.sol @@ -0,0 +1,122 @@ +// SPDX-License-Identifier: BUSL-1.1 +pragma solidity ^0.8.0; + +import {GhoToken} from "@aave-gho-core/gho/GhoToken.sol"; + +import {GhoBaseTest} from "../GhoBaseTest.t.sol"; + +contract GhoTokenPoolHandler is GhoBaseTest { + UtilsStorage public s; + + constructor() { + // Ethereum with id 0 + s.chainsList.push(0); + s.tokens[0] = address(new GhoToken(AAVE_DAO)); + s.pools[0] = _deployUpgradeableLockReleaseTokenPool( + s.tokens[0], + RMN_PROXY, + ROUTER, + OWNER, + INITIAL_BRIDGE_LIMIT, + PROXY_ADMIN + ); + + // Mock calls for bridging + vm.mockCall(ROUTER, abi.encodeWithSelector(bytes4(keccak256("getOnRamp(uint64)"))), abi.encode(RAMP)); + vm.mockCall(ROUTER, abi.encodeWithSelector(bytes4(keccak256("isOffRamp(uint64,address)"))), abi.encode(true)); + vm.mockCall(RMN_PROXY, abi.encodeWithSelector(bytes4(keccak256("isCursed()"))), abi.encode(false)); + vm.mockCall(RMN_PROXY, abi.encodeWithSelector(bytes4(keccak256("isCursed(bytes16)"))), abi.encode(false)); + + // Arbitrum + _addBridge(s, 1, INITIAL_BRIDGE_LIMIT); + _enableLane(s, 0, 1); + + // Avalanche + _addBridge(s, 2, INITIAL_BRIDGE_LIMIT); + _enableLane(s, 0, 2); + _enableLane(s, 1, 2); + } + + /// forge-config: ccip.fuzz.runs = 500 + function bridgeGho(uint256 fromChain, uint256 toChain, uint256 amount) public { + fromChain = bound(fromChain, 0, 2); + toChain = bound(toChain, 0, 2); + if (fromChain != toChain) { + uint256 maxBalance = GhoToken(s.tokens[fromChain]).balanceOf(address(this)); + uint256 maxToBridge = _getMaxToBridgeOut(s, fromChain); + uint256 maxAmount = maxBalance > maxToBridge ? maxToBridge : maxBalance; + amount = bound(amount, 0, maxAmount); + + if (amount > 0) { + _bridgeGho(s, fromChain, toChain, address(this), amount); + } + } + } + + /// forge-config: ccip.fuzz.runs = 500 + function updateBucketCapacity(uint256 chain, uint128 newCapacity) public { + chain = bound(chain, 1, 2); + uint256 otherChain = (chain % 2) + 1; + newCapacity = uint128(bound(newCapacity, s.bridged, type(uint128).max)); + + uint256 oldCapacity = s.bucketCapacities[chain]; + + if (newCapacity < s.bucketLevels[chain]) { + s.capacityBelowLevelUpdate = true; + } else { + s.capacityBelowLevelUpdate = false; + } + + if (newCapacity > oldCapacity) { + // Increase + _updateBucketCapacity(s, chain, newCapacity); + // keep bridge limit as the minimum bucket capacity + if (newCapacity < s.bucketCapacities[otherChain]) { + _updateBridgeLimit(s, newCapacity); + } + } else { + // Reduction + // keep bridge limit as the minimum bucket capacity + if (newCapacity < s.bucketCapacities[otherChain]) { + _updateBridgeLimit(s, newCapacity); + } + _updateBucketCapacity(s, chain, newCapacity); + } + } + + function getChainsList() public view returns (uint256[] memory) { + return s.chainsList; + } + + function pools(uint256 i) public view returns (address) { + return s.pools[i]; + } + + function tokens(uint256 i) public view returns (address) { + return s.tokens[i]; + } + + function bucketCapacities(uint256 i) public view returns (uint256) { + return s.bucketCapacities[i]; + } + + function bucketLevels(uint256 i) public view returns (uint256) { + return s.bucketLevels[i]; + } + + function liquidity(uint256 i) public view returns (uint256) { + return s.liquidity[i]; + } + + function remoteLiquidity() public view returns (uint256) { + return s.remoteLiquidity; + } + + function bridged() public view returns (uint256) { + return s.bridged; + } + + function capacityBelowLevelUpdate() public view returns (bool) { + return s.capacityBelowLevelUpdate; + } +} diff --git a/contracts/src/v0.8/shared/access/ConfirmedOwnerWithProposal.sol b/contracts/src/v0.8/shared/access/ConfirmedOwnerWithProposal.sol index 2a6dd94e10..3b691ad7ca 100644 --- a/contracts/src/v0.8/shared/access/ConfirmedOwnerWithProposal.sol +++ b/contracts/src/v0.8/shared/access/ConfirmedOwnerWithProposal.sol @@ -45,10 +45,7 @@ contract ConfirmedOwnerWithProposal is IOwnable { } /// @notice validate, transfer ownership, and emit relevant events - function _transferOwnership(address to) private { - // solhint-disable-next-line gas-custom-errors - require(to != msg.sender, "Cannot transfer to self"); - + function _transferOwnership(address to) internal { s_pendingOwner = to; emit OwnershipTransferRequested(s_owner, to);