From 8a05713e968444a2afd836b2ece8f5af8c0c8492 Mon Sep 17 00:00:00 2001 From: miguelmtzinf Date: Wed, 8 May 2024 17:34:01 +0200 Subject: [PATCH 01/18] feat: Add custom upgradeable TokenPool contracts --- .gitmodules | 5 +- contracts/foundry-lib/forge-std | 2 +- contracts/foundry-lib/solidity-utils | 1 + contracts/remappings.txt | 2 + .../pools/UpgradeableBurnMintTokenPool.sol | 66 ++++ .../UpgradeableBurnMintTokenPoolAbstract.sol | 55 +++ .../pools/UpgradeableLockReleaseTokenPool.sol | 259 ++++++++++++++ .../v0.8/ccip/pools/UpgradeableTokenPool.sol | 320 ++++++++++++++++++ .../ccip/pools/VersionedInitializable.sol | 77 +++++ .../access/ConfirmedOwnerWithProposal.sol | 5 +- 10 files changed, 786 insertions(+), 6 deletions(-) create mode 160000 contracts/foundry-lib/solidity-utils create mode 100644 contracts/src/v0.8/ccip/pools/UpgradeableBurnMintTokenPool.sol create mode 100644 contracts/src/v0.8/ccip/pools/UpgradeableBurnMintTokenPoolAbstract.sol create mode 100644 contracts/src/v0.8/ccip/pools/UpgradeableLockReleaseTokenPool.sol create mode 100644 contracts/src/v0.8/ccip/pools/UpgradeableTokenPool.sol create mode 100644 contracts/src/v0.8/ccip/pools/VersionedInitializable.sol diff --git a/.gitmodules b/.gitmodules index e3d87871cb..21fec76324 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,3 +1,6 @@ [submodule "contracts/foundry-lib/forge-std"] path = contracts/foundry-lib/forge-std - url = https://github.com/foundry-rs/forge-std \ No newline at end of file + url = https://github.com/foundry-rs/forge-std +[submodule "contracts/foundry-lib/solidity-utils"] + path = contracts/foundry-lib/solidity-utils + url = https://github.com/bgd-labs/solidity-utils diff --git a/contracts/foundry-lib/forge-std b/contracts/foundry-lib/forge-std index f73c73d201..4513bc2063 160000 --- a/contracts/foundry-lib/forge-std +++ b/contracts/foundry-lib/forge-std @@ -1 +1 @@ -Subproject commit f73c73d2018eb6a111f35e4dae7b4f27401e9421 +Subproject commit 4513bc2063f23c57bee6558799584b518d387a39 diff --git a/contracts/foundry-lib/solidity-utils b/contracts/foundry-lib/solidity-utils new file mode 160000 index 0000000000..9d4d041562 --- /dev/null +++ b/contracts/foundry-lib/solidity-utils @@ -0,0 +1 @@ +Subproject commit 9d4d041562f7ac2918e216e2e7c74172afe3d2af diff --git a/contracts/remappings.txt b/contracts/remappings.txt index a314323752..8fbfa33413 100644 --- a/contracts/remappings.txt +++ b/contracts/remappings.txt @@ -6,3 +6,5 @@ forge-std/=foundry-lib/forge-std/src/ hardhat/=node_modules/hardhat/ @eth-optimism/=node_modules/@eth-optimism/ @scroll-tech/=node_modules/@scroll-tech/ +@aave/gho-core/=node_modules/@aave/gho/src/contracts/ +solidity-utils/=foundry-lib/solidity-utils/src/ diff --git a/contracts/src/v0.8/ccip/pools/UpgradeableBurnMintTokenPool.sol b/contracts/src/v0.8/ccip/pools/UpgradeableBurnMintTokenPool.sol new file mode 100644 index 0000000000..f07f8c3a28 --- /dev/null +++ b/contracts/src/v0.8/ccip/pools/UpgradeableBurnMintTokenPool.sol @@ -0,0 +1,66 @@ +// SPDX-License-Identifier: BUSL-1.1 +pragma solidity ^0.8.0; + +import {ITypeAndVersion} from "../../shared/interfaces/ITypeAndVersion.sol"; +import {IBurnMintERC20} from "../../shared/token/ERC20/IBurnMintERC20.sol"; + +import {UpgradeableTokenPool} from "./UpgradeableTokenPool.sol"; +import {UpgradeableBurnMintTokenPoolAbstract} from "./UpgradeableBurnMintTokenPoolAbstract.sol"; + +import {IRouter} from "../interfaces/IRouter.sol"; +import {VersionedInitializable} from "./VersionedInitializable.sol"; + +/// @title UpgradeableBurnMintTokenPool +/// @author Aave Labs +/// @notice Upgradeable version of Chainlink's CCIP BurnMintTokenPool +/// @dev Contract adaptations: +/// - Implementation of VersionedInitializable to allow upgrades +/// - Move of allowlist and router definition to initialization stage +contract UpgradeableBurnMintTokenPool is VersionedInitializable, UpgradeableBurnMintTokenPoolAbstract, ITypeAndVersion { + string public constant override typeAndVersion = "BurnMintTokenPool 1.4.0"; + + /// @dev Constructor + /// @param token The bridgeable token that is managed by this pool. + /// @param armProxy The address of the arm proxy + /// @param allowlistEnabled True if pool is set to access-controlled mode, false otherwise + constructor( + address token, + address armProxy, + bool allowlistEnabled + ) UpgradeableTokenPool(IBurnMintERC20(token), armProxy, allowlistEnabled) {} + + /// @dev Initializer + /// @dev The address passed as `owner` must accept ownership after initialization. + /// @dev The `allowlist` is only effective if pool is set to access-controlled mode + /// @param owner The address of the owner + /// @param allowlist A set of addresses allowed to trigger lockOrBurn as original senders + /// @param router The address of the router + function initialize(address owner, address[] memory allowlist, address router) public virtual initializer { + if (owner == address(0)) revert ZeroAddressNotAllowed(); + if (router == address(0)) revert ZeroAddressNotAllowed(); + _transferOwnership(owner); + + s_router = IRouter(router); + + // Pool can be set as permissioned or permissionless at deployment time only to save hot-path gas. + if (i_allowlistEnabled) { + _applyAllowListUpdates(new address[](0), allowlist); + } + } + + /// @inheritdoc UpgradeableBurnMintTokenPoolAbstract + function _burn(uint256 amount) internal virtual override { + IBurnMintERC20(address(i_token)).burn(amount); + } + + /// @notice Returns the revision number + /// @return The revision number + function REVISION() public pure virtual returns (uint256) { + return 1; + } + + /// @inheritdoc VersionedInitializable + function getRevision() internal pure virtual override returns (uint256) { + return REVISION(); + } +} diff --git a/contracts/src/v0.8/ccip/pools/UpgradeableBurnMintTokenPoolAbstract.sol b/contracts/src/v0.8/ccip/pools/UpgradeableBurnMintTokenPoolAbstract.sol new file mode 100644 index 0000000000..651965e40b --- /dev/null +++ b/contracts/src/v0.8/ccip/pools/UpgradeableBurnMintTokenPoolAbstract.sol @@ -0,0 +1,55 @@ +// SPDX-License-Identifier: BUSL-1.1 +pragma solidity ^0.8.0; + +import {IBurnMintERC20} from "../../shared/token/ERC20/IBurnMintERC20.sol"; + +import {UpgradeableTokenPool} from "./UpgradeableTokenPool.sol"; + +abstract contract UpgradeableBurnMintTokenPoolAbstract is UpgradeableTokenPool { + /// @notice Contains the specific burn call for a pool. + /// @dev overriding this method allows us to create pools with different burn signatures + /// without duplicating the underlying logic. + function _burn(uint256 amount) internal virtual; + + /// @notice Burn the token in the pool + /// @param amount Amount to burn + /// @dev The whenHealthy check is important to ensure that even if a ramp is compromised + /// we're able to stop token movement via ARM. + function lockOrBurn( + address originalSender, + bytes calldata, + uint256 amount, + uint64 remoteChainSelector, + bytes calldata + ) + external + virtual + override + onlyOnRamp(remoteChainSelector) + checkAllowList(originalSender) + whenHealthy + returns (bytes memory) + { + _consumeOutboundRateLimit(remoteChainSelector, amount); + _burn(amount); + emit Burned(msg.sender, amount); + return ""; + } + + /// @notice Mint tokens from the pool to the recipient + /// @param receiver Recipient address + /// @param amount Amount to mint + /// @dev The whenHealthy check is important to ensure that even if a ramp is compromised + /// we're able to stop token movement via ARM. + function releaseOrMint( + bytes memory, + address receiver, + uint256 amount, + uint64 remoteChainSelector, + bytes memory + ) external virtual override whenHealthy onlyOffRamp(remoteChainSelector) { + _consumeInboundRateLimit(remoteChainSelector, amount); + IBurnMintERC20(address(i_token)).mint(receiver, amount); + emit Minted(msg.sender, receiver, amount); + } +} diff --git a/contracts/src/v0.8/ccip/pools/UpgradeableLockReleaseTokenPool.sol b/contracts/src/v0.8/ccip/pools/UpgradeableLockReleaseTokenPool.sol new file mode 100644 index 0000000000..f1abee7c86 --- /dev/null +++ b/contracts/src/v0.8/ccip/pools/UpgradeableLockReleaseTokenPool.sol @@ -0,0 +1,259 @@ +// SPDX-License-Identifier: BUSL-1.1 +pragma solidity ^0.8.0; + +import {ITypeAndVersion} from "../../shared/interfaces/ITypeAndVersion.sol"; +import {ILiquidityContainer} from "../../rebalancer/interfaces/ILiquidityContainer.sol"; + +import {UpgradeableTokenPool} from "./UpgradeableTokenPool.sol"; +import {RateLimiter} from "../libraries/RateLimiter.sol"; + +import {IERC20} from "../../vendor/openzeppelin-solidity/v4.8.3/contracts/token/ERC20/IERC20.sol"; +import {SafeERC20} from "../../vendor/openzeppelin-solidity/v4.8.3/contracts/token/ERC20/utils/SafeERC20.sol"; + +import {IRouter} from "../interfaces/IRouter.sol"; +import {VersionedInitializable} from "./VersionedInitializable.sol"; + +/// @title UpgradeableLockReleaseTokenPool +/// @author Aave Labs +/// @notice Upgradeable version of Chainlink's CCIP LockReleaseTokenPool +/// @dev Contract adaptations: +/// - Implementation of VersionedInitializable to allow upgrades +/// - Move of allowlist and router definition to initialization stage +/// - Addition of a bridge limit to regulate the maximum amount of tokens that can be transferred out (burned/locked) +contract UpgradeableLockReleaseTokenPool is + VersionedInitializable, + UpgradeableTokenPool, + ILiquidityContainer, + ITypeAndVersion +{ + using SafeERC20 for IERC20; + + error InsufficientLiquidity(); + error LiquidityNotAccepted(); + error Unauthorized(address caller); + + error BridgeLimitExceeded(uint256 bridgeLimit); + error InvalidAmountToBurn(); + event BridgeLimitUpdated(uint256 oldBridgeLimit, uint256 newBridgeLimit); + + string public constant override typeAndVersion = "LockReleaseTokenPool 1.4.0"; + + /// @dev The unique lock release pool flag to signal through EIP 165. + bytes4 private constant LOCK_RELEASE_INTERFACE_ID = bytes4(keccak256("LockReleaseTokenPool")); + + /// @dev Whether or not the pool accepts liquidity. + /// External liquidity is not required when there is one canonical token deployed to a chain, + /// and CCIP is facilitating mint/burn on all the other chains, in which case the invariant + /// balanceOf(pool) on home chain == sum(totalSupply(mint/burn "wrapped" token) on all remote chains) should always hold + bool internal immutable i_acceptLiquidity; + /// @notice The address of the rebalancer. + address internal s_rebalancer; + /// @notice The address of the rate limiter admin. + /// @dev Can be address(0) if none is configured. + address internal s_rateLimitAdmin; + + /// @notice Maximum amount of tokens that can be bridged to other chains + uint256 private s_bridgeLimit; + /// @notice Amount of tokens bridged (transferred out) + /// @dev Must always be equal to or below the bridge limit + uint256 private s_currentBridged; + + /// @dev Constructor + /// @param token The bridgeable token that is managed by this pool. + /// @param armProxy The address of the arm proxy + /// @param allowlistEnabled True if pool is set to access-controlled mode, false otherwise + /// @param acceptLiquidity True if the pool accepts liquidity, false otherwise + constructor( + address token, + address armProxy, + bool allowlistEnabled, + bool acceptLiquidity + ) UpgradeableTokenPool(IERC20(token), armProxy, allowlistEnabled) { + i_acceptLiquidity = acceptLiquidity; + } + + /// @dev Initializer + /// @dev The address passed as `owner` must accept ownership after initialization. + /// @dev The `allowlist` is only effective if pool is set to access-controlled mode + /// @param owner The address of the owner + /// @param allowlist A set of addresses allowed to trigger lockOrBurn as original senders + /// @param router The address of the router + /// @param bridgeLimit The maximum amount of tokens that can be bridged to other chains + function initialize( + address owner, + address[] memory allowlist, + address router, + uint256 bridgeLimit + ) public virtual initializer { + if (owner == address(0)) revert ZeroAddressNotAllowed(); + if (router == address(0)) revert ZeroAddressNotAllowed(); + _transferOwnership(owner); + + s_router = IRouter(router); + + // Pool can be set as permissioned or permissionless at deployment time only to save hot-path gas. + if (i_allowlistEnabled) { + _applyAllowListUpdates(new address[](0), allowlist); + } + s_bridgeLimit = bridgeLimit; + } + + /// @notice Locks the token in the pool + /// @param amount Amount to lock + /// @dev The whenHealthy check is important to ensure that even if a ramp is compromised + /// we're able to stop token movement via ARM. + function lockOrBurn( + address originalSender, + bytes calldata, + uint256 amount, + uint64 remoteChainSelector, + bytes calldata + ) + external + virtual + override + onlyOnRamp(remoteChainSelector) + checkAllowList(originalSender) + whenHealthy + returns (bytes memory) + { + // Increase bridged amount because tokens are leaving the source chain + if ((s_currentBridged += amount) > s_bridgeLimit) revert BridgeLimitExceeded(s_bridgeLimit); + + _consumeOutboundRateLimit(remoteChainSelector, amount); + emit Locked(msg.sender, amount); + return ""; + } + + /// @notice Release tokens from the pool to the recipient + /// @param receiver Recipient address + /// @param amount Amount to release + /// @dev The whenHealthy check is important to ensure that even if a ramp is compromised + /// we're able to stop token movement via ARM. + function releaseOrMint( + bytes memory, + address receiver, + uint256 amount, + uint64 remoteChainSelector, + bytes memory + ) external virtual override onlyOffRamp(remoteChainSelector) whenHealthy { + // This should never occur. Amount should never exceed the current bridged amount + if (amount > s_currentBridged) revert InvalidAmountToBurn(); + // Reduce bridged amount because tokens are back to source chain + s_currentBridged -= amount; + + _consumeInboundRateLimit(remoteChainSelector, amount); + getToken().safeTransfer(receiver, amount); + emit Released(msg.sender, receiver, amount); + } + + /// @notice returns the lock release interface flag used for EIP165 identification. + function getLockReleaseInterfaceId() public pure returns (bytes4) { + return LOCK_RELEASE_INTERFACE_ID; + } + + // @inheritdoc IERC165 + function supportsInterface(bytes4 interfaceId) public pure virtual override returns (bool) { + return + interfaceId == LOCK_RELEASE_INTERFACE_ID || + interfaceId == type(ILiquidityContainer).interfaceId || + super.supportsInterface(interfaceId); + } + + /// @notice Gets Rebalancer, can be address(0) if none is configured. + /// @return The current liquidity manager. + function getRebalancer() external view returns (address) { + return s_rebalancer; + } + + /// @notice Sets the Rebalancer address. + /// @dev Only callable by the owner. + function setRebalancer(address rebalancer) external onlyOwner { + s_rebalancer = rebalancer; + } + + /// @notice Sets the rate limiter admin address. + /// @dev Only callable by the owner. + /// @param rateLimitAdmin The new rate limiter admin address. + function setRateLimitAdmin(address rateLimitAdmin) external onlyOwner { + s_rateLimitAdmin = rateLimitAdmin; + } + + /// @notice Sets the bridge limit, the maximum amount of tokens that can be bridged out + /// @param newBridgeLimit The new bridge limit + function setBridgeLimit(uint256 newBridgeLimit) external onlyOwner { + uint256 oldBridgeLimit = s_bridgeLimit; + s_bridgeLimit = newBridgeLimit; + emit BridgeLimitUpdated(oldBridgeLimit, newBridgeLimit); + } + + /// @notice Gets the bridge limit + /// @return The maximum amount of tokens that can be transferred out to other chains + function getBridgeLimit() external view virtual returns (uint256) { + return s_bridgeLimit; + } + + /// @notice Gets the current bridged amount to other chains + /// @return The amount of tokens transferred out to other chains + function getCurrentBridgedAmount() external view virtual returns (uint256) { + return s_currentBridged; + } + + /// @notice Gets the rate limiter admin address. + function getRateLimitAdmin() external view returns (address) { + return s_rateLimitAdmin; + } + + /// @notice Checks if the pool can accept liquidity. + /// @return true if the pool can accept liquidity, false otherwise. + function canAcceptLiquidity() external view returns (bool) { + return i_acceptLiquidity; + } + + /// @notice Adds liquidity to the pool. The tokens should be approved first. + /// @param amount The amount of liquidity to provide. + function provideLiquidity(uint256 amount) external { + if (!i_acceptLiquidity) revert LiquidityNotAccepted(); + if (s_rebalancer != msg.sender) revert Unauthorized(msg.sender); + + i_token.safeTransferFrom(msg.sender, address(this), amount); + emit LiquidityAdded(msg.sender, amount); + } + + /// @notice Removed liquidity to the pool. The tokens will be sent to msg.sender. + /// @param amount The amount of liquidity to remove. + function withdrawLiquidity(uint256 amount) external { + if (s_rebalancer != msg.sender) revert Unauthorized(msg.sender); + + if (i_token.balanceOf(address(this)) < amount) revert InsufficientLiquidity(); + i_token.safeTransfer(msg.sender, amount); + emit LiquidityRemoved(msg.sender, amount); + } + + /// @notice Sets the rate limiter admin address. + /// @dev Only callable by the owner or the rate limiter admin. NOTE: overwrites the normal + /// onlyAdmin check in the base implementation to also allow the rate limiter admin. + /// @param remoteChainSelector The remote chain selector for which the rate limits apply. + /// @param outboundConfig The new outbound rate limiter config. + /// @param inboundConfig The new inbound rate limiter config. + function setChainRateLimiterConfig( + uint64 remoteChainSelector, + RateLimiter.Config memory outboundConfig, + RateLimiter.Config memory inboundConfig + ) external override { + if (msg.sender != s_rateLimitAdmin && msg.sender != owner()) revert Unauthorized(msg.sender); + + _setRateLimitConfig(remoteChainSelector, outboundConfig, inboundConfig); + } + + /// @notice Returns the revision number + /// @return The revision number + function REVISION() public pure virtual returns (uint256) { + return 1; + } + + /// @inheritdoc VersionedInitializable + function getRevision() internal pure virtual override returns (uint256) { + return REVISION(); + } +} diff --git a/contracts/src/v0.8/ccip/pools/UpgradeableTokenPool.sol b/contracts/src/v0.8/ccip/pools/UpgradeableTokenPool.sol new file mode 100644 index 0000000000..fcd8948098 --- /dev/null +++ b/contracts/src/v0.8/ccip/pools/UpgradeableTokenPool.sol @@ -0,0 +1,320 @@ +// SPDX-License-Identifier: BUSL-1.1 +pragma solidity ^0.8.0; + +import {IPool} from "../interfaces/pools/IPool.sol"; +import {IARM} from "../interfaces/IARM.sol"; +import {IRouter} from "../interfaces/IRouter.sol"; + +import {OwnerIsCreator} from "../../shared/access/OwnerIsCreator.sol"; +import {RateLimiter} from "../libraries/RateLimiter.sol"; + +import {IERC20} from "../../vendor/openzeppelin-solidity/v4.8.3/contracts/token/ERC20/IERC20.sol"; +import {IERC165} from "../../vendor/openzeppelin-solidity/v4.8.3/contracts/utils/introspection/IERC165.sol"; +import {EnumerableSet} from "../../vendor/openzeppelin-solidity/v4.8.3/contracts/utils/structs/EnumerableSet.sol"; + +/// @notice Base abstract class with common functions for all token pools. +/// A token pool serves as isolated place for holding tokens and token specific logic +/// that may execute as tokens move across the bridge. +abstract contract UpgradeableTokenPool is IPool, OwnerIsCreator, IERC165 { + using EnumerableSet for EnumerableSet.AddressSet; + using EnumerableSet for EnumerableSet.UintSet; + using RateLimiter for RateLimiter.TokenBucket; + + error CallerIsNotARampOnRouter(address caller); + error ZeroAddressNotAllowed(); + error SenderNotAllowed(address sender); + error AllowListNotEnabled(); + error NonExistentChain(uint64 remoteChainSelector); + error ChainNotAllowed(uint64 remoteChainSelector); + error BadARMSignal(); + error ChainAlreadyExists(uint64 chainSelector); + + event Locked(address indexed sender, uint256 amount); + event Burned(address indexed sender, uint256 amount); + event Released(address indexed sender, address indexed recipient, uint256 amount); + event Minted(address indexed sender, address indexed recipient, uint256 amount); + event ChainAdded( + uint64 remoteChainSelector, + RateLimiter.Config outboundRateLimiterConfig, + RateLimiter.Config inboundRateLimiterConfig + ); + event ChainConfigured( + uint64 remoteChainSelector, + RateLimiter.Config outboundRateLimiterConfig, + RateLimiter.Config inboundRateLimiterConfig + ); + event ChainRemoved(uint64 remoteChainSelector); + event AllowListAdd(address sender); + event AllowListRemove(address sender); + event RouterUpdated(address oldRouter, address newRouter); + + struct ChainUpdate { + uint64 remoteChainSelector; // ──╮ Remote chain selector + bool allowed; // ────────────────╯ Whether the chain is allowed + RateLimiter.Config outboundRateLimiterConfig; // Outbound rate limited config, meaning the rate limits for all of the onRamps for the given chain + RateLimiter.Config inboundRateLimiterConfig; // Inbound rate limited config, meaning the rate limits for all of the offRamps for the given chain + } + + /// @dev The bridgeable token that is managed by this pool. + IERC20 internal immutable i_token; + /// @dev The address of the arm proxy + address internal immutable i_armProxy; + /// @dev The immutable flag that indicates if the pool is access-controlled. + bool internal immutable i_allowlistEnabled; + /// @dev A set of addresses allowed to trigger lockOrBurn as original senders. + /// Only takes effect if i_allowlistEnabled is true. + /// This can be used to ensure only token-issuer specified addresses can + /// move tokens. + EnumerableSet.AddressSet internal s_allowList; + /// @dev The address of the router + IRouter internal s_router; + /// @dev A set of allowed chain selectors. We want the allowlist to be enumerable to + /// be able to quickly determine (without parsing logs) who can access the pool. + /// @dev The chain selectors are in uin256 format because of the EnumerableSet implementation. + EnumerableSet.UintSet internal s_remoteChainSelectors; + /// @dev Outbound rate limits. Corresponds to the inbound rate limit for the pool + /// on the remote chain. + mapping(uint64 => RateLimiter.TokenBucket) internal s_outboundRateLimits; + /// @dev Inbound rate limits. This allows per destination chain + /// token issuer specified rate limiting (e.g. issuers may trust chains to varying + /// degrees and prefer different limits) + mapping(uint64 => RateLimiter.TokenBucket) internal s_inboundRateLimits; + + constructor(IERC20 token, address armProxy, bool allowlistEnabled) { + if (address(token) == address(0)) revert ZeroAddressNotAllowed(); + i_token = token; + i_armProxy = armProxy; + i_allowlistEnabled = allowlistEnabled; + } + + /// @notice Get ARM proxy address + /// @return armProxy Address of arm proxy + function getArmProxy() public view returns (address armProxy) { + return i_armProxy; + } + + /// @inheritdoc IPool + function getToken() public view override returns (IERC20 token) { + return i_token; + } + + /// @notice Gets the pool's Router + /// @return router The pool's Router + function getRouter() public view returns (address router) { + return address(s_router); + } + + /// @notice Sets the pool's Router + /// @param newRouter The new Router + function setRouter(address newRouter) public onlyOwner { + if (newRouter == address(0)) revert ZeroAddressNotAllowed(); + address oldRouter = address(s_router); + s_router = IRouter(newRouter); + + emit RouterUpdated(oldRouter, newRouter); + } + + /// @inheritdoc IERC165 + function supportsInterface(bytes4 interfaceId) public pure virtual override returns (bool) { + return interfaceId == type(IPool).interfaceId || interfaceId == type(IERC165).interfaceId; + } + + // ================================================================ + // │ Chain permissions │ + // ================================================================ + + /// @notice Checks whether a chain selector is permissioned on this contract. + /// @return true if the given chain selector is a permissioned remote chain. + function isSupportedChain(uint64 remoteChainSelector) public view returns (bool) { + return s_remoteChainSelectors.contains(remoteChainSelector); + } + + /// @notice Get list of allowed chains + /// @return list of chains. + function getSupportedChains() public view returns (uint64[] memory) { + uint256[] memory uint256ChainSelectors = s_remoteChainSelectors.values(); + uint64[] memory chainSelectors = new uint64[](uint256ChainSelectors.length); + for (uint256 i = 0; i < uint256ChainSelectors.length; ++i) { + chainSelectors[i] = uint64(uint256ChainSelectors[i]); + } + + return chainSelectors; + } + + /// @notice Sets the permissions for a list of chains selectors. Actual senders for these chains + /// need to be allowed on the Router to interact with this pool. + /// @dev Only callable by the owner + /// @param chains A list of chains and their new permission status & rate limits. Rate limits + /// are only used when the chain is being added through `allowed` being true. + function applyChainUpdates(ChainUpdate[] calldata chains) external virtual onlyOwner { + for (uint256 i = 0; i < chains.length; ++i) { + ChainUpdate memory update = chains[i]; + RateLimiter._validateTokenBucketConfig(update.outboundRateLimiterConfig, !update.allowed); + RateLimiter._validateTokenBucketConfig(update.inboundRateLimiterConfig, !update.allowed); + + if (update.allowed) { + // If the chain already exists, revert + if (!s_remoteChainSelectors.add(update.remoteChainSelector)) { + revert ChainAlreadyExists(update.remoteChainSelector); + } + + s_outboundRateLimits[update.remoteChainSelector] = RateLimiter.TokenBucket({ + rate: update.outboundRateLimiterConfig.rate, + capacity: update.outboundRateLimiterConfig.capacity, + tokens: update.outboundRateLimiterConfig.capacity, + lastUpdated: uint32(block.timestamp), + isEnabled: update.outboundRateLimiterConfig.isEnabled + }); + + s_inboundRateLimits[update.remoteChainSelector] = RateLimiter.TokenBucket({ + rate: update.inboundRateLimiterConfig.rate, + capacity: update.inboundRateLimiterConfig.capacity, + tokens: update.inboundRateLimiterConfig.capacity, + lastUpdated: uint32(block.timestamp), + isEnabled: update.inboundRateLimiterConfig.isEnabled + }); + emit ChainAdded(update.remoteChainSelector, update.outboundRateLimiterConfig, update.inboundRateLimiterConfig); + } else { + // If the chain doesn't exist, revert + if (!s_remoteChainSelectors.remove(update.remoteChainSelector)) { + revert NonExistentChain(update.remoteChainSelector); + } + + delete s_inboundRateLimits[update.remoteChainSelector]; + delete s_outboundRateLimits[update.remoteChainSelector]; + emit ChainRemoved(update.remoteChainSelector); + } + } + } + + // ================================================================ + // │ Rate limiting │ + // ================================================================ + + /// @notice Consumes outbound rate limiting capacity in this pool + function _consumeOutboundRateLimit(uint64 remoteChainSelector, uint256 amount) internal { + s_outboundRateLimits[remoteChainSelector]._consume(amount, address(i_token)); + } + + /// @notice Consumes inbound rate limiting capacity in this pool + function _consumeInboundRateLimit(uint64 remoteChainSelector, uint256 amount) internal { + s_inboundRateLimits[remoteChainSelector]._consume(amount, address(i_token)); + } + + /// @notice Gets the token bucket with its values for the block it was requested at. + /// @return The token bucket. + function getCurrentOutboundRateLimiterState( + uint64 remoteChainSelector + ) external view returns (RateLimiter.TokenBucket memory) { + return s_outboundRateLimits[remoteChainSelector]._currentTokenBucketState(); + } + + /// @notice Gets the token bucket with its values for the block it was requested at. + /// @return The token bucket. + function getCurrentInboundRateLimiterState( + uint64 remoteChainSelector + ) external view returns (RateLimiter.TokenBucket memory) { + return s_inboundRateLimits[remoteChainSelector]._currentTokenBucketState(); + } + + /// @notice Sets the chain rate limiter config. + /// @param remoteChainSelector The remote chain selector for which the rate limits apply. + /// @param outboundConfig The new outbound rate limiter config, meaning the onRamp rate limits for the given chain. + /// @param inboundConfig The new inbound rate limiter config, meaning the offRamp rate limits for the given chain. + function setChainRateLimiterConfig( + uint64 remoteChainSelector, + RateLimiter.Config memory outboundConfig, + RateLimiter.Config memory inboundConfig + ) external virtual onlyOwner { + _setRateLimitConfig(remoteChainSelector, outboundConfig, inboundConfig); + } + + function _setRateLimitConfig( + uint64 remoteChainSelector, + RateLimiter.Config memory outboundConfig, + RateLimiter.Config memory inboundConfig + ) internal { + if (!isSupportedChain(remoteChainSelector)) revert NonExistentChain(remoteChainSelector); + RateLimiter._validateTokenBucketConfig(outboundConfig, false); + s_outboundRateLimits[remoteChainSelector]._setTokenBucketConfig(outboundConfig); + RateLimiter._validateTokenBucketConfig(inboundConfig, false); + s_inboundRateLimits[remoteChainSelector]._setTokenBucketConfig(inboundConfig); + emit ChainConfigured(remoteChainSelector, outboundConfig, inboundConfig); + } + + // ================================================================ + // │ Access │ + // ================================================================ + + /// @notice Checks whether remote chain selector is configured on this contract, and if the msg.sender + /// is a permissioned onRamp for the given chain on the Router. + modifier onlyOnRamp(uint64 remoteChainSelector) { + if (!isSupportedChain(remoteChainSelector)) revert ChainNotAllowed(remoteChainSelector); + if (!(msg.sender == s_router.getOnRamp(remoteChainSelector))) revert CallerIsNotARampOnRouter(msg.sender); + _; + } + + /// @notice Checks whether remote chain selector is configured on this contract, and if the msg.sender + /// is a permissioned offRamp for the given chain on the Router. + modifier onlyOffRamp(uint64 remoteChainSelector) { + if (!isSupportedChain(remoteChainSelector)) revert ChainNotAllowed(remoteChainSelector); + if (!s_router.isOffRamp(remoteChainSelector, msg.sender)) revert CallerIsNotARampOnRouter(msg.sender); + _; + } + + // ================================================================ + // │ Allowlist │ + // ================================================================ + + modifier checkAllowList(address sender) { + if (i_allowlistEnabled && !s_allowList.contains(sender)) revert SenderNotAllowed(sender); + _; + } + + /// @notice Gets whether the allowList functionality is enabled. + /// @return true is enabled, false if not. + function getAllowListEnabled() external view returns (bool) { + return i_allowlistEnabled; + } + + /// @notice Gets the allowed addresses. + /// @return The allowed addresses. + function getAllowList() external view returns (address[] memory) { + return s_allowList.values(); + } + + /// @notice Apply updates to the allow list. + /// @param removes The addresses to be removed. + /// @param adds The addresses to be added. + /// @dev allowListing will be removed before public launch + function applyAllowListUpdates(address[] calldata removes, address[] calldata adds) external onlyOwner { + _applyAllowListUpdates(removes, adds); + } + + /// @notice Internal version of applyAllowListUpdates to allow for reuse in the constructor. + function _applyAllowListUpdates(address[] memory removes, address[] memory adds) internal { + if (!i_allowlistEnabled) revert AllowListNotEnabled(); + + for (uint256 i = 0; i < removes.length; ++i) { + address toRemove = removes[i]; + if (s_allowList.remove(toRemove)) { + emit AllowListRemove(toRemove); + } + } + for (uint256 i = 0; i < adds.length; ++i) { + address toAdd = adds[i]; + if (toAdd == address(0)) { + continue; + } + if (s_allowList.add(toAdd)) { + emit AllowListAdd(toAdd); + } + } + } + + /// @notice Ensure that there is no active curse. + modifier whenHealthy() { + if (IARM(i_armProxy).isCursed()) revert BadARMSignal(); + _; + } +} diff --git a/contracts/src/v0.8/ccip/pools/VersionedInitializable.sol b/contracts/src/v0.8/ccip/pools/VersionedInitializable.sol new file mode 100644 index 0000000000..b9fb054fa0 --- /dev/null +++ b/contracts/src/v0.8/ccip/pools/VersionedInitializable.sol @@ -0,0 +1,77 @@ +// SPDX-License-Identifier: AGPL-3.0 +pragma solidity ^0.8.0; + +/** + * @title VersionedInitializable + * @author Aave, inspired by the OpenZeppelin Initializable contract + * @notice Helper contract to implement initializer functions. To use it, replace + * the constructor with a function that has the `initializer` modifier. + * @dev WARNING: Unlike constructors, initializer functions must be manually + * invoked. This applies both to deploying an Initializable contract, as well + * as extending an Initializable contract via inheritance. + * WARNING: When used with inheritance, manual care must be taken to not invoke + * a parent initializer twice, or ensure that all initializers are idempotent, + * because this is not dealt with automatically as with constructors. + */ +abstract contract VersionedInitializable { + /** + * @dev Indicates that the contract has been initialized. + */ + uint256 private lastInitializedRevision = 0; + + /** + * @dev Indicates that the contract is in the process of being initialized. + */ + bool private initializing; + + /** + * @dev Modifier to use in the initializer function of a contract. + */ + modifier initializer() { + uint256 revision = getRevision(); + require( + initializing || isConstructor() || revision > lastInitializedRevision, + "Contract instance has already been initialized" + ); + + bool isTopLevelCall = !initializing; + if (isTopLevelCall) { + initializing = true; + lastInitializedRevision = revision; + } + + _; + + if (isTopLevelCall) { + initializing = false; + } + } + + /** + * @notice Returns the revision number of the contract + * @dev Needs to be defined in the inherited class as a constant. + * @return The revision number + */ + function getRevision() internal pure virtual returns (uint256); + + /** + * @notice Returns true if and only if the function is running in the constructor + * @return True if the function is running in the constructor + */ + function isConstructor() private view returns (bool) { + // extcodesize checks the size of the code stored in an address, and + // address returns the current address. Since the code is still not + // deployed when running a constructor, any checks on its code size will + // yield zero, making it an effective way to detect if a contract is + // under construction or not. + uint256 cs; + //solium-disable-next-line + assembly { + cs := extcodesize(address()) + } + return cs == 0; + } + + // Reserved storage space to allow for layout changes in the future. + uint256[50] private ______gap; +} diff --git a/contracts/src/v0.8/shared/access/ConfirmedOwnerWithProposal.sol b/contracts/src/v0.8/shared/access/ConfirmedOwnerWithProposal.sol index 7b68418754..b02296f9ff 100644 --- a/contracts/src/v0.8/shared/access/ConfirmedOwnerWithProposal.sol +++ b/contracts/src/v0.8/shared/access/ConfirmedOwnerWithProposal.sol @@ -45,10 +45,7 @@ contract ConfirmedOwnerWithProposal is IOwnable { } /// @notice validate, transfer ownership, and emit relevant events - function _transferOwnership(address to) private { - // solhint-disable-next-line custom-errors - require(to != msg.sender, "Cannot transfer to self"); - + function _transferOwnership(address to) internal { s_pendingOwner = to; emit OwnershipTransferRequested(s_owner, to); From 24d277fd4532c52b7f6a01cc3a52a75e5b81740e Mon Sep 17 00:00:00 2001 From: miguelmtz <36620902+miguelmtzinf@users.noreply.github.com> Date: Mon, 20 May 2024 11:08:27 +0200 Subject: [PATCH 02/18] test: Add unit and e2e tests (#4) * test: Add unit and e2e tests * test: Add tests for bridge limit config * fix: Fix foundry toml --- contracts/src/v0.8/ccip/test/BaseTest.t.sol | 92 ++ .../v0.8/ccip/test/mocks/MockUpgradeable.sol | 36 + .../src/v0.8/ccip/test/pools/End2End.t.sol | 19 + .../test/pools/GHO/GHOTokenPoolEthereum.t.sol | 646 ++++++++++++++ .../GHO/GHOTokenPoolEthereumBridgeLimit.t.sol | 813 ++++++++++++++++++ ...GHOTokenPoolEthereumBridgeLimitSetup.t.sol | 224 +++++ .../pools/GHO/GHOTokenPoolEthereumE2E.t.sol | 408 +++++++++ .../pools/GHO/GHOTokenPoolEthereumSetup.t.sol | 72 ++ .../test/pools/GHO/GHOTokenPoolRemote.t.sol | 244 ++++++ .../pools/GHO/GHOTokenPoolRemoteE2E.t.sol | 416 +++++++++ .../pools/GHO/GHOTokenPoolRemoteSetup.t.sol | 78 ++ 11 files changed, 3048 insertions(+) create mode 100644 contracts/src/v0.8/ccip/test/mocks/MockUpgradeable.sol create mode 100644 contracts/src/v0.8/ccip/test/pools/End2End.t.sol create mode 100644 contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereum.t.sol create mode 100644 contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumBridgeLimit.t.sol create mode 100644 contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumBridgeLimitSetup.t.sol create mode 100644 contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumE2E.t.sol create mode 100644 contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumSetup.t.sol create mode 100644 contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolRemote.t.sol create mode 100644 contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolRemoteE2E.t.sol create mode 100644 contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolRemoteSetup.t.sol diff --git a/contracts/src/v0.8/ccip/test/BaseTest.t.sol b/contracts/src/v0.8/ccip/test/BaseTest.t.sol index 33d2e649c4..e12746a802 100644 --- a/contracts/src/v0.8/ccip/test/BaseTest.t.sol +++ b/contracts/src/v0.8/ccip/test/BaseTest.t.sol @@ -5,6 +5,11 @@ import {Test, stdError} from "forge-std/Test.sol"; import {MockARM} from "./mocks/MockARM.sol"; import {StructFactory} from "./StructFactory.sol"; +import {TransparentUpgradeableProxy} from "solidity-utils/contracts/transparent-proxy/TransparentUpgradeableProxy.sol"; +import {UpgradeableLockReleaseTokenPool} from "../pools/UpgradeableLockReleaseTokenPool.sol"; +import {UpgradeableBurnMintTokenPool} from "../pools/UpgradeableBurnMintTokenPool.sol"; +import {IBurnMintERC20} from "../../shared/token/ERC20/IBurnMintERC20.sol"; + contract BaseTest is Test, StructFactory { bool private s_baseTestInitialized; @@ -26,4 +31,91 @@ contract BaseTest is Test, StructFactory { s_mockARM = new MockARM(); } + + function _deployUpgradeableBurnMintTokenPool( + address ghoToken, + address arm, + address router, + address owner, + address proxyAdmin + ) internal returns (address) { + // Deploy BurnMintTokenPool for GHO token on source chain + UpgradeableBurnMintTokenPool tokenPoolImpl = new UpgradeableBurnMintTokenPool(ghoToken, arm, false); + // Imple init + address[] memory emptyArray = new address[](0); + tokenPoolImpl.initialize(owner, emptyArray, router); + // proxy deploy and init + bytes memory tokenPoolInitParams = abi.encodeWithSignature( + "initialize(address,address[],address)", + owner, + emptyArray, + router + ); + TransparentUpgradeableProxy tokenPoolProxy = new TransparentUpgradeableProxy( + address(tokenPoolImpl), + proxyAdmin, + tokenPoolInitParams + ); + // Manage ownership + vm.stopPrank(); + vm.prank(owner); + UpgradeableBurnMintTokenPool(address(tokenPoolProxy)).acceptOwnership(); + vm.startPrank(OWNER); + + return address(tokenPoolProxy); + } + + function _deployUpgradeableLockReleaseTokenPool( + address ghoToken, + address arm, + address router, + address owner, + uint256 bridgeLimit, + address proxyAdmin + ) internal returns (address) { + UpgradeableLockReleaseTokenPool tokenPoolImpl = new UpgradeableLockReleaseTokenPool(ghoToken, arm, false, true); + // Imple init + address[] memory emptyArray = new address[](0); + tokenPoolImpl.initialize(owner, emptyArray, router, bridgeLimit); + // proxy deploy and init + bytes memory tokenPoolInitParams = abi.encodeWithSignature( + "initialize(address,address[],address,uint256)", + owner, + emptyArray, + router, + bridgeLimit + ); + TransparentUpgradeableProxy tokenPoolProxy = new TransparentUpgradeableProxy( + address(tokenPoolImpl), + proxyAdmin, + tokenPoolInitParams + ); + + // Manage ownership + vm.stopPrank(); + vm.prank(owner); + UpgradeableLockReleaseTokenPool(address(tokenPoolProxy)).acceptOwnership(); + vm.startPrank(OWNER); + + return address(tokenPoolProxy); + } + + function _inflateFacilitatorLevel(address tokenPool, address ghoToken, uint256 amount) internal { + vm.stopPrank(); + vm.prank(tokenPool); + IBurnMintERC20(ghoToken).mint(address(0), amount); + } + + function _getProxyAdminAddress(address proxy) internal view returns (address) { + bytes32 ERC1967_ADMIN_SLOT = 0xb53127684a568b3173ae13b9f8a6016e243e63b6e8ee1178d6a717850b5d6103; + bytes32 adminSlot = vm.load(proxy, ERC1967_ADMIN_SLOT); + return address(uint160(uint256(adminSlot))); + } + + function _getProxyImplementationAddress(address proxy) internal view returns (address) { + bytes32 ERC1967_IMPLEMENTATION_SLOT = 0x360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc; + bytes32 implSlot = vm.load(proxy, ERC1967_IMPLEMENTATION_SLOT); + return address(uint160(uint256(implSlot))); + } + } diff --git a/contracts/src/v0.8/ccip/test/mocks/MockUpgradeable.sol b/contracts/src/v0.8/ccip/test/mocks/MockUpgradeable.sol new file mode 100644 index 0000000000..e613768e6c --- /dev/null +++ b/contracts/src/v0.8/ccip/test/mocks/MockUpgradeable.sol @@ -0,0 +1,36 @@ +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.0; + +import {VersionedInitializable} from "../../pools/VersionedInitializable.sol"; + +/** + * @dev Mock contract to test upgrades, not to be used in production. + */ +contract MockUpgradeable is VersionedInitializable { + /** + * @dev Constructor + */ + constructor() { + // Intentionally left bank + } + + /** + * @dev Initializer + */ + function initialize() public initializer { + // Intentionally left bank + } + + /** + * @notice Returns the revision number + * @return The revision number + */ + function REVISION() public pure returns (uint256) { + return 2; + } + + /// @inheritdoc VersionedInitializable + function getRevision() internal pure virtual override returns (uint256) { + return REVISION(); + } +} diff --git a/contracts/src/v0.8/ccip/test/pools/End2End.t.sol b/contracts/src/v0.8/ccip/test/pools/End2End.t.sol new file mode 100644 index 0000000000..9abbef2ac9 --- /dev/null +++ b/contracts/src/v0.8/ccip/test/pools/End2End.t.sol @@ -0,0 +1,19 @@ +// SPDX-License-Identifier: BUSL-1.1 +pragma solidity 0.8.19; + +import "../helpers/MerkleHelper.sol"; +import "../commitStore/CommitStore.t.sol"; +import "../onRamp/EVM2EVMOnRampSetup.t.sol"; +import "../offRamp/EVM2EVMOffRampSetup.t.sol"; + +contract E2E is EVM2EVMOnRampSetup, CommitStoreSetup, EVM2EVMOffRampSetup { + using Internal for Internal.EVM2EVMMessage; + + function setUp() public virtual override(EVM2EVMOnRampSetup, CommitStoreSetup, EVM2EVMOffRampSetup) { + EVM2EVMOnRampSetup.setUp(); + CommitStoreSetup.setUp(); + EVM2EVMOffRampSetup.setUp(); + + deployOffRamp(s_commitStore, s_destRouter, address(0)); + } +} diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereum.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereum.t.sol new file mode 100644 index 0000000000..aa85916c74 --- /dev/null +++ b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereum.t.sol @@ -0,0 +1,646 @@ +// SPDX-License-Identifier: BUSL-1.1 +pragma solidity 0.8.19; + +import {GhoToken} from "@aave/gho-core/gho/GhoToken.sol"; +import {TransparentUpgradeableProxy} from "solidity-utils/contracts/transparent-proxy/TransparentUpgradeableProxy.sol"; + +import {stdError} from "forge-std/Test.sol"; +import {MockUpgradeable} from "../../mocks/MockUpgradeable.sol"; +import {IPool} from "../../../interfaces/pools/IPool.sol"; +import {LockReleaseTokenPool} from "../../../pools/LockReleaseTokenPool.sol"; +import {UpgradeableLockReleaseTokenPool} from "../../../pools/UpgradeableLockReleaseTokenPool.sol"; +import {UpgradeableTokenPool} from "../../../pools/UpgradeableTokenPool.sol"; +import {EVM2EVMOffRamp} from "../../../offRamp/EVM2EVMOffRamp.sol"; +import {RateLimiter} from "../../../libraries/RateLimiter.sol"; +import {IERC165} from "../../../../vendor/openzeppelin-solidity/v4.8.3/contracts/utils/introspection/IERC165.sol"; +import {GHOTokenPoolEthereumSetup} from "./GHOTokenPoolEthereumSetup.t.sol"; + +contract GHOTokenPoolEthereum_setRebalancer is GHOTokenPoolEthereumSetup { + function testSetRebalancerSuccess() public { + assertEq(address(s_ghoTokenPool.getRebalancer()), OWNER); + changePrank(AAVE_DAO); + s_ghoTokenPool.setRebalancer(STRANGER); + assertEq(address(s_ghoTokenPool.getRebalancer()), STRANGER); + } + + function testSetRebalancerReverts() public { + vm.startPrank(STRANGER); + + vm.expectRevert("Only callable by owner"); + s_ghoTokenPool.setRebalancer(STRANGER); + } +} + +contract GHOTokenPoolEthereum_lockOrBurn is GHOTokenPoolEthereumSetup { + error SenderNotAllowed(address sender); + + event Locked(address indexed sender, uint256 amount); + event TokensConsumed(uint256 tokens); + + function testFuzz_LockOrBurnNoAllowListSuccess(uint256 amount, uint256 bridgedAmount) public { + uint256 maxAmount = getOutboundRateLimiterConfig().capacity < INITIAL_BRIDGE_LIMIT + ? getOutboundRateLimiterConfig().capacity + : INITIAL_BRIDGE_LIMIT; + amount = bound(amount, 1, maxAmount); + bridgedAmount = bound(bridgedAmount, 0, INITIAL_BRIDGE_LIMIT - amount); + + changePrank(s_allowedOnRamp); + if (bridgedAmount > 0) { + s_ghoTokenPool.lockOrBurn(STRANGER, bytes(""), bridgedAmount, DEST_CHAIN_SELECTOR, bytes("")); + assertEq(s_ghoTokenPool.getCurrentBridgedAmount(), bridgedAmount); + } + + vm.expectEmit(); + emit TokensConsumed(amount); + vm.expectEmit(); + emit Locked(s_allowedOnRamp, amount); + + s_ghoTokenPool.lockOrBurn(STRANGER, bytes(""), amount, DEST_CHAIN_SELECTOR, bytes("")); + + assertEq(s_ghoTokenPool.getCurrentBridgedAmount(), bridgedAmount + amount); + } + + function testTokenMaxCapacityExceededReverts() public { + RateLimiter.Config memory rateLimiterConfig = getOutboundRateLimiterConfig(); + uint256 capacity = rateLimiterConfig.capacity; + uint256 amount = 10 * capacity; + + // increase bridge limit to hit the rate limit error + vm.startPrank(AAVE_DAO); + s_ghoTokenPool.setBridgeLimit(amount); + + vm.expectRevert( + abi.encodeWithSelector(RateLimiter.TokenMaxCapacityExceeded.selector, capacity, amount, address(s_token)) + ); + vm.startPrank(s_allowedOnRamp); + s_ghoTokenPool.lockOrBurn(STRANGER, bytes(""), amount, DEST_CHAIN_SELECTOR, bytes("")); + } + + function testTokenBridgeLimitExceededReverts() public { + uint256 bridgeLimit = s_ghoTokenPool.getBridgeLimit(); + uint256 amount = bridgeLimit + 1; + + vm.expectRevert(abi.encodeWithSelector(UpgradeableLockReleaseTokenPool.BridgeLimitExceeded.selector, bridgeLimit)); + vm.startPrank(s_allowedOnRamp); + s_ghoTokenPool.lockOrBurn(STRANGER, bytes(""), amount, DEST_CHAIN_SELECTOR, bytes("")); + } +} + +contract GHOTokenPoolEthereum_releaseOrMint is GHOTokenPoolEthereumSetup { + event TokensConsumed(uint256 tokens); + event Released(address indexed sender, address indexed recipient, uint256 amount); + + function setUp() public virtual override { + GHOTokenPoolEthereumSetup.setUp(); + + UpgradeableTokenPool.ChainUpdate[] memory chainUpdate = new UpgradeableTokenPool.ChainUpdate[](1); + chainUpdate[0] = UpgradeableTokenPool.ChainUpdate({ + remoteChainSelector: SOURCE_CHAIN_SELECTOR, + allowed: true, + outboundRateLimiterConfig: getOutboundRateLimiterConfig(), + inboundRateLimiterConfig: getInboundRateLimiterConfig() + }); + + changePrank(AAVE_DAO); + s_ghoTokenPool.applyChainUpdates(chainUpdate); + } + + function test_ReleaseOrMintSuccess() public { + uint256 amount = 100; + deal(address(s_token), address(s_ghoTokenPool), amount); + + // Inflate current bridged amount so it can be reduced in `releaseOrMint` function + vm.startPrank(s_allowedOnRamp); + s_ghoTokenPool.lockOrBurn(STRANGER, bytes(""), amount, DEST_CHAIN_SELECTOR, bytes("")); + + vm.expectEmit(); + emit TokensConsumed(amount); + vm.expectEmit(); + emit Released(s_allowedOffRamp, OWNER, amount); + + vm.startPrank(s_allowedOffRamp); + s_ghoTokenPool.releaseOrMint(bytes(""), OWNER, amount, SOURCE_CHAIN_SELECTOR, bytes("")); + + assertEq(s_ghoTokenPool.getCurrentBridgedAmount(), 0); + } + + function testFuzz_ReleaseOrMintSuccess(address recipient, uint256 amount, uint256 bridgedAmount) public { + // Since the owner already has tokens this would break the checks + vm.assume(recipient != OWNER); + vm.assume(recipient != address(0)); + vm.assume(recipient != address(s_token)); + + amount = uint128(bound(amount, 2, type(uint128).max)); + bridgedAmount = uint128(bound(bridgedAmount, amount, type(uint128).max)); + + // Inflate current bridged amount so it can be reduced in `releaseOrMint` function + vm.startPrank(AAVE_DAO); + s_ghoTokenPool.setBridgeLimit(bridgedAmount); + s_ghoTokenPool.setChainRateLimiterConfig( + DEST_CHAIN_SELECTOR, + RateLimiter.Config({isEnabled: true, capacity: type(uint128).max, rate: 1e15}), + RateLimiter.Config({isEnabled: true, capacity: type(uint128).max, rate: 1e15}) + ); + vm.warp(block.timestamp + 1e50); // wait to refill capacity + vm.startPrank(s_allowedOnRamp); + s_ghoTokenPool.lockOrBurn(STRANGER, bytes(""), bridgedAmount, DEST_CHAIN_SELECTOR, bytes("")); + + // Makes sure the pool always has enough funds + deal(address(s_token), address(s_ghoTokenPool), amount); + vm.startPrank(s_allowedOffRamp); + + uint256 capacity = getInboundRateLimiterConfig().capacity; + uint256 bridgedAmountAfter = bridgedAmount; + // Determine if we hit the rate limit or the txs should succeed. + if (amount > capacity) { + vm.expectRevert( + abi.encodeWithSelector(RateLimiter.TokenMaxCapacityExceeded.selector, capacity, amount, address(s_token)) + ); + } else { + // Only rate limit if the amount is >0 + if (amount > 0) { + vm.expectEmit(); + emit TokensConsumed(amount); + } + + vm.expectEmit(); + emit Released(s_allowedOffRamp, recipient, amount); + + bridgedAmountAfter -= amount; + } + + s_ghoTokenPool.releaseOrMint(bytes(""), recipient, amount, SOURCE_CHAIN_SELECTOR, bytes("")); + + assertEq(s_ghoTokenPool.getCurrentBridgedAmount(), bridgedAmountAfter); + } + + function testChainNotAllowedReverts() public { + UpgradeableTokenPool.ChainUpdate[] memory chainUpdate = new UpgradeableTokenPool.ChainUpdate[](1); + chainUpdate[0] = UpgradeableTokenPool.ChainUpdate({ + remoteChainSelector: SOURCE_CHAIN_SELECTOR, + allowed: false, + outboundRateLimiterConfig: RateLimiter.Config({isEnabled: false, capacity: 0, rate: 0}), + inboundRateLimiterConfig: RateLimiter.Config({isEnabled: false, capacity: 0, rate: 0}) + }); + + changePrank(AAVE_DAO); + s_ghoTokenPool.applyChainUpdates(chainUpdate); + vm.stopPrank(); + + vm.startPrank(s_allowedOffRamp); + + vm.expectRevert(abi.encodeWithSelector(UpgradeableTokenPool.ChainNotAllowed.selector, SOURCE_CHAIN_SELECTOR)); + s_ghoTokenPool.releaseOrMint(bytes(""), OWNER, 1e5, SOURCE_CHAIN_SELECTOR, bytes("")); + } + + function testPoolMintNotHealthyReverts() public { + // Should not mint tokens if cursed. + s_mockARM.voteToCurse(bytes32(0)); + uint256 before = s_token.balanceOf(OWNER); + vm.startPrank(s_allowedOffRamp); + vm.expectRevert(EVM2EVMOffRamp.BadARMSignal.selector); + s_ghoTokenPool.releaseOrMint(bytes(""), OWNER, 1e5, SOURCE_CHAIN_SELECTOR, bytes("")); + assertEq(s_token.balanceOf(OWNER), before); + } + + function testReleaseNoFundsReverts() public { + uint256 amount = 1; + + // Inflate current bridged amount so it can be reduced in `releaseOrMint` function + vm.startPrank(s_allowedOnRamp); + s_ghoTokenPool.lockOrBurn(STRANGER, bytes(""), amount, DEST_CHAIN_SELECTOR, bytes("")); + + vm.expectRevert(stdError.arithmeticError); + vm.startPrank(s_allowedOffRamp); + s_ghoTokenPool.releaseOrMint(bytes(""), STRANGER, amount, SOURCE_CHAIN_SELECTOR, bytes("")); + } + + function testTokenMaxCapacityExceededReverts() public { + RateLimiter.Config memory rateLimiterConfig = getInboundRateLimiterConfig(); + uint256 capacity = rateLimiterConfig.capacity; + uint256 amount = 10 * capacity; + + // Inflate current bridged amount so it can be reduced in `releaseOrMint` function + vm.startPrank(AAVE_DAO); + s_ghoTokenPool.setBridgeLimit(amount); + s_ghoTokenPool.setChainRateLimiterConfig( + DEST_CHAIN_SELECTOR, + RateLimiter.Config({isEnabled: true, capacity: type(uint128).max, rate: 1e15}), + getInboundRateLimiterConfig() + ); + vm.warp(block.timestamp + 1e50); // wait to refill capacity + vm.startPrank(s_allowedOnRamp); + s_ghoTokenPool.lockOrBurn(STRANGER, bytes(""), amount, DEST_CHAIN_SELECTOR, bytes("")); + + vm.expectRevert( + abi.encodeWithSelector(RateLimiter.TokenMaxCapacityExceeded.selector, capacity, amount, address(s_token)) + ); + vm.startPrank(s_allowedOffRamp); + s_ghoTokenPool.releaseOrMint(bytes(""), STRANGER, amount, SOURCE_CHAIN_SELECTOR, bytes("")); + } + + function testBridgedAmountNoEnoughReverts() public { + uint256 amount = 10; + vm.expectRevert(abi.encodeWithSelector(UpgradeableLockReleaseTokenPool.InvalidAmountToBurn.selector)); + vm.startPrank(s_allowedOffRamp); + s_ghoTokenPool.releaseOrMint(bytes(""), STRANGER, amount, SOURCE_CHAIN_SELECTOR, bytes("")); + } +} + +contract GHOTokenPoolEthereum_canAcceptLiquidity is GHOTokenPoolEthereumSetup { + function test_CanAcceptLiquiditySuccess() public { + assertEq(true, s_ghoTokenPool.canAcceptLiquidity()); + + s_ghoTokenPool = new UpgradeableLockReleaseTokenPool(address(s_token), address(s_mockARM), false, false); + + assertEq(false, s_ghoTokenPool.canAcceptLiquidity()); + } +} + +contract GHOTokenPoolEthereum_provideLiquidity is GHOTokenPoolEthereumSetup { + function testFuzz_ProvideLiquiditySuccess(uint256 amount) public { + vm.assume(amount < type(uint128).max); + + uint256 balancePre = s_token.balanceOf(OWNER); + s_token.approve(address(s_ghoTokenPool), amount); + + s_ghoTokenPool.provideLiquidity(amount); + + assertEq(s_token.balanceOf(OWNER), balancePre - amount); + assertEq(s_token.balanceOf(address(s_ghoTokenPool)), amount); + } + + // Reverts + + function test_UnauthorizedReverts() public { + vm.startPrank(STRANGER); + vm.expectRevert(abi.encodeWithSelector(LockReleaseTokenPool.Unauthorized.selector, STRANGER)); + + s_ghoTokenPool.provideLiquidity(1); + } + + function testFuzz_ExceedsAllowance(uint256 amount) public { + vm.assume(amount > 0); + vm.expectRevert(stdError.arithmeticError); + s_ghoTokenPool.provideLiquidity(amount); + } + + function testLiquidityNotAcceptedReverts() public { + s_ghoTokenPool = new UpgradeableLockReleaseTokenPool(address(s_token), address(s_mockARM), false, false); + + vm.expectRevert(LockReleaseTokenPool.LiquidityNotAccepted.selector); + s_ghoTokenPool.provideLiquidity(1); + } +} + +contract GHOTokenPoolEthereum_withdrawalLiquidity is GHOTokenPoolEthereumSetup { + function testFuzz_WithdrawalLiquiditySuccess(uint256 amount) public { + vm.assume(amount < type(uint128).max); + + uint256 balancePre = s_token.balanceOf(OWNER); + s_token.approve(address(s_ghoTokenPool), amount); + s_ghoTokenPool.provideLiquidity(amount); + + s_ghoTokenPool.withdrawLiquidity(amount); + + assertEq(s_token.balanceOf(OWNER), balancePre); + } + + // Reverts + + function test_UnauthorizedReverts() public { + vm.startPrank(STRANGER); + vm.expectRevert(abi.encodeWithSelector(LockReleaseTokenPool.Unauthorized.selector, STRANGER)); + + s_ghoTokenPool.withdrawLiquidity(1); + } + + function testInsufficientLiquidityReverts() public { + uint256 maxUint128 = 2 ** 128 - 1; + s_token.approve(address(s_ghoTokenPool), maxUint128); + s_ghoTokenPool.provideLiquidity(maxUint128); + + changePrank(address(s_ghoTokenPool)); + s_token.transfer(OWNER, maxUint128); + changePrank(OWNER); + + vm.expectRevert(LockReleaseTokenPool.InsufficientLiquidity.selector); + s_ghoTokenPool.withdrawLiquidity(1); + } +} + +contract GHOTokenPoolEthereum_supportsInterface is GHOTokenPoolEthereumSetup { + function testSupportsInterfaceSuccess() public { + assertTrue(s_ghoTokenPool.supportsInterface(s_ghoTokenPool.getLockReleaseInterfaceId())); + assertTrue(s_ghoTokenPool.supportsInterface(type(IPool).interfaceId)); + assertTrue(s_ghoTokenPool.supportsInterface(type(IERC165).interfaceId)); + } +} + +contract GHOTokenPoolEthereum_setChainRateLimiterConfig is GHOTokenPoolEthereumSetup { + event ConfigChanged(RateLimiter.Config); + event ChainConfigured( + uint64 chainSelector, + RateLimiter.Config outboundRateLimiterConfig, + RateLimiter.Config inboundRateLimiterConfig + ); + + uint64 internal s_remoteChainSelector; + + function setUp() public virtual override { + GHOTokenPoolEthereumSetup.setUp(); + UpgradeableTokenPool.ChainUpdate[] memory chainUpdates = new UpgradeableTokenPool.ChainUpdate[](1); + s_remoteChainSelector = 123124; + chainUpdates[0] = UpgradeableTokenPool.ChainUpdate({ + remoteChainSelector: s_remoteChainSelector, + allowed: true, + outboundRateLimiterConfig: getOutboundRateLimiterConfig(), + inboundRateLimiterConfig: getInboundRateLimiterConfig() + }); + changePrank(AAVE_DAO); + s_ghoTokenPool.applyChainUpdates(chainUpdates); + changePrank(OWNER); + } + + function testFuzz_SetChainRateLimiterConfigSuccess(uint128 capacity, uint128 rate, uint32 newTime) public { + // Cap the lower bound to 4 so 4/2 is still >= 2 + vm.assume(capacity >= 4); + // Cap the lower bound to 2 so 2/2 is still >= 1 + rate = uint128(bound(rate, 2, capacity - 2)); + // Bucket updates only work on increasing time + newTime = uint32(bound(newTime, block.timestamp + 1, type(uint32).max)); + vm.warp(newTime); + + uint256 oldOutboundTokens = s_ghoTokenPool.getCurrentOutboundRateLimiterState(s_remoteChainSelector).tokens; + uint256 oldInboundTokens = s_ghoTokenPool.getCurrentInboundRateLimiterState(s_remoteChainSelector).tokens; + + RateLimiter.Config memory newOutboundConfig = RateLimiter.Config({isEnabled: true, capacity: capacity, rate: rate}); + RateLimiter.Config memory newInboundConfig = RateLimiter.Config({ + isEnabled: true, + capacity: capacity / 2, + rate: rate / 2 + }); + + vm.expectEmit(); + emit ConfigChanged(newOutboundConfig); + vm.expectEmit(); + emit ConfigChanged(newInboundConfig); + vm.expectEmit(); + emit ChainConfigured(s_remoteChainSelector, newOutboundConfig, newInboundConfig); + + changePrank(AAVE_DAO); + s_ghoTokenPool.setChainRateLimiterConfig(s_remoteChainSelector, newOutboundConfig, newInboundConfig); + + uint256 expectedTokens = RateLimiter._min(newOutboundConfig.capacity, oldOutboundTokens); + + RateLimiter.TokenBucket memory bucket = s_ghoTokenPool.getCurrentOutboundRateLimiterState(s_remoteChainSelector); + assertEq(bucket.capacity, newOutboundConfig.capacity); + assertEq(bucket.rate, newOutboundConfig.rate); + assertEq(bucket.tokens, expectedTokens); + assertEq(bucket.lastUpdated, newTime); + + expectedTokens = RateLimiter._min(newInboundConfig.capacity, oldInboundTokens); + + bucket = s_ghoTokenPool.getCurrentInboundRateLimiterState(s_remoteChainSelector); + assertEq(bucket.capacity, newInboundConfig.capacity); + assertEq(bucket.rate, newInboundConfig.rate); + assertEq(bucket.tokens, expectedTokens); + assertEq(bucket.lastUpdated, newTime); + } + + function testOnlyOwnerOrRateLimitAdminReverts() public { + address rateLimiterAdmin = address(28973509103597907); + + changePrank(AAVE_DAO); + s_ghoTokenPool.setRateLimitAdmin(rateLimiterAdmin); + + changePrank(rateLimiterAdmin); + + s_ghoTokenPool.setChainRateLimiterConfig( + s_remoteChainSelector, + getOutboundRateLimiterConfig(), + getInboundRateLimiterConfig() + ); + + changePrank(AAVE_DAO); + + s_ghoTokenPool.setChainRateLimiterConfig( + s_remoteChainSelector, + getOutboundRateLimiterConfig(), + getInboundRateLimiterConfig() + ); + } + + // Reverts + + function testOnlyOwnerReverts() public { + changePrank(STRANGER); + + vm.expectRevert(abi.encodeWithSelector(LockReleaseTokenPool.Unauthorized.selector, STRANGER)); + s_ghoTokenPool.setChainRateLimiterConfig( + s_remoteChainSelector, + getOutboundRateLimiterConfig(), + getInboundRateLimiterConfig() + ); + } + + function testNonExistentChainReverts() public { + uint64 wrongChainSelector = 9084102894; + + vm.expectRevert(abi.encodeWithSelector(UpgradeableTokenPool.NonExistentChain.selector, wrongChainSelector)); + changePrank(AAVE_DAO); + s_ghoTokenPool.setChainRateLimiterConfig( + wrongChainSelector, + getOutboundRateLimiterConfig(), + getInboundRateLimiterConfig() + ); + } +} + +contract GHOTokenPoolEthereum_setRateLimitAdmin is GHOTokenPoolEthereumSetup { + function testSetRateLimitAdminSuccess() public { + assertEq(address(0), s_ghoTokenPool.getRateLimitAdmin()); + changePrank(AAVE_DAO); + s_ghoTokenPool.setRateLimitAdmin(OWNER); + assertEq(OWNER, s_ghoTokenPool.getRateLimitAdmin()); + } + + // Reverts + + function testSetRateLimitAdminReverts() public { + vm.startPrank(STRANGER); + + vm.expectRevert("Only callable by owner"); + s_ghoTokenPool.setRateLimitAdmin(STRANGER); + } +} + +contract GHOTokenPoolEthereum_setBridgeLimit is GHOTokenPoolEthereumSetup { + event BridgeLimitUpdated(uint256 oldBridgeLimit, uint256 newBridgeLimit); + + function testSetBridgeLimitAdminSuccess() public { + assertEq(INITIAL_BRIDGE_LIMIT, s_ghoTokenPool.getBridgeLimit()); + + uint256 newBridgeLimit = INITIAL_BRIDGE_LIMIT * 2; + + vm.expectEmit(); + emit BridgeLimitUpdated(INITIAL_BRIDGE_LIMIT, newBridgeLimit); + + vm.startPrank(AAVE_DAO); + s_ghoTokenPool.setBridgeLimit(newBridgeLimit); + + assertEq(newBridgeLimit, s_ghoTokenPool.getBridgeLimit()); + } + + function testZeroBridgeLimitReverts() public { + vm.stopPrank(); + vm.startPrank(AAVE_DAO); + s_ghoTokenPool.setBridgeLimit(0); + + uint256 amount = 1; + + vm.expectRevert(abi.encodeWithSelector(UpgradeableLockReleaseTokenPool.BridgeLimitExceeded.selector, 0)); + vm.startPrank(s_allowedOnRamp); + s_ghoTokenPool.lockOrBurn(STRANGER, bytes(""), amount, DEST_CHAIN_SELECTOR, bytes("")); + } + + function testBridgeLimitBelowCurrent() public { + // Increase current bridged amount to 10 + uint256 amount = 10e18; + vm.startPrank(s_allowedOnRamp); + s_ghoTokenPool.lockOrBurn(STRANGER, bytes(""), amount, DEST_CHAIN_SELECTOR, bytes("")); + + // Reduce bridge limit below current bridged amount + vm.startPrank(AAVE_DAO); + uint256 newBridgeLimit = amount - 1; + s_ghoTokenPool.setBridgeLimit(newBridgeLimit); + assertEq(s_ghoTokenPool.getCurrentBridgedAmount(), amount); + assertEq(s_ghoTokenPool.getBridgeLimit(), newBridgeLimit); + assertGt(s_ghoTokenPool.getCurrentBridgedAmount(), s_ghoTokenPool.getBridgeLimit()); + + // Lock reverts due to maxed out bridge limit + vm.expectRevert( + abi.encodeWithSelector(UpgradeableLockReleaseTokenPool.BridgeLimitExceeded.selector, newBridgeLimit) + ); + vm.startPrank(s_allowedOnRamp); + s_ghoTokenPool.lockOrBurn(STRANGER, bytes(""), 1, DEST_CHAIN_SELECTOR, bytes("")); + + // Increase bridge limit by 1 + vm.startPrank(AAVE_DAO); + newBridgeLimit = amount + 1; + s_ghoTokenPool.setBridgeLimit(newBridgeLimit); + assertEq(s_ghoTokenPool.getCurrentBridgedAmount(), amount); + assertEq(s_ghoTokenPool.getBridgeLimit(), newBridgeLimit); + assertGt(s_ghoTokenPool.getBridgeLimit(), s_ghoTokenPool.getCurrentBridgedAmount()); + + // Bridge limit maxed out again + vm.startPrank(s_allowedOnRamp); + s_ghoTokenPool.lockOrBurn(STRANGER, bytes(""), 1, DEST_CHAIN_SELECTOR, bytes("")); + assertEq(s_ghoTokenPool.getBridgeLimit(), s_ghoTokenPool.getCurrentBridgedAmount()); + } + + function testCurrentBridgedAmountRecover() public { + // Reach maximum + vm.startPrank(s_allowedOnRamp); + s_ghoTokenPool.lockOrBurn(STRANGER, bytes(""), INITIAL_BRIDGE_LIMIT, DEST_CHAIN_SELECTOR, bytes("")); + assertEq(s_ghoTokenPool.getCurrentBridgedAmount(), INITIAL_BRIDGE_LIMIT); + assertEq(s_ghoTokenPool.getBridgeLimit(), s_ghoTokenPool.getCurrentBridgedAmount()); + + // Lock reverts due to maxed out bridge limit + vm.expectRevert( + abi.encodeWithSelector(UpgradeableLockReleaseTokenPool.BridgeLimitExceeded.selector, INITIAL_BRIDGE_LIMIT) + ); + s_ghoTokenPool.lockOrBurn(STRANGER, bytes(""), 1, DEST_CHAIN_SELECTOR, bytes("")); + + // Amount available to bridge recovers thanks to liquidity coming back + UpgradeableTokenPool.ChainUpdate[] memory chainUpdate = new UpgradeableTokenPool.ChainUpdate[](1); + chainUpdate[0] = UpgradeableTokenPool.ChainUpdate({ + remoteChainSelector: SOURCE_CHAIN_SELECTOR, + allowed: true, + outboundRateLimiterConfig: getOutboundRateLimiterConfig(), + inboundRateLimiterConfig: getInboundRateLimiterConfig() + }); + + changePrank(AAVE_DAO); + s_ghoTokenPool.applyChainUpdates(chainUpdate); + + uint256 amount = 10; + deal(address(s_token), address(s_ghoTokenPool), amount); + vm.startPrank(s_allowedOffRamp); + s_ghoTokenPool.releaseOrMint(bytes(""), OWNER, amount, SOURCE_CHAIN_SELECTOR, bytes("")); + assertEq(s_ghoTokenPool.getCurrentBridgedAmount(), INITIAL_BRIDGE_LIMIT - amount); + } + + // Reverts + + function testSetRateLimitAdminReverts() public { + vm.startPrank(STRANGER); + + vm.expectRevert("Only callable by owner"); + s_ghoTokenPool.setBridgeLimit(0); + } +} + +contract GHOTokenPoolEthereum_upgradeability is GHOTokenPoolEthereumSetup { + function testInitialization() public { + // Upgradeability + assertEq(s_ghoTokenPool.REVISION(), 1); + vm.startPrank(PROXY_ADMIN); + (bool ok, bytes memory result) = address(s_ghoTokenPool).staticcall( + abi.encodeWithSelector(TransparentUpgradeableProxy.admin.selector) + ); + assertTrue(ok, "proxy admin fetch failed"); + address decodedProxyAdmin = abi.decode(result, (address)); + assertEq(decodedProxyAdmin, PROXY_ADMIN, "proxy admin is wrong"); + assertEq(decodedProxyAdmin, _getProxyAdminAddress(address(s_ghoTokenPool)), "proxy admin is wrong"); + + // TokenPool + vm.startPrank(OWNER); + assertEq(s_ghoTokenPool.getAllowList().length, 0); + assertEq(s_ghoTokenPool.getAllowListEnabled(), false); + assertEq(s_ghoTokenPool.getArmProxy(), address(s_mockARM)); + assertEq(s_ghoTokenPool.getRouter(), address(s_sourceRouter)); + assertEq(address(s_ghoTokenPool.getToken()), address(s_token)); + assertEq(s_ghoTokenPool.owner(), AAVE_DAO, "owner is wrong"); + } + + function testUpgrade() public { + MockUpgradeable newImpl = new MockUpgradeable(); + bytes memory mockImpleParams = abi.encodeWithSignature("initialize()"); + vm.startPrank(PROXY_ADMIN); + TransparentUpgradeableProxy(payable(address(s_ghoTokenPool))).upgradeToAndCall(address(newImpl), mockImpleParams); + + vm.startPrank(OWNER); + assertEq(s_ghoTokenPool.REVISION(), 2); + } + + function testUpgradeAdminReverts() public { + vm.expectRevert(); + TransparentUpgradeableProxy(payable(address(s_ghoTokenPool))).upgradeToAndCall(address(0), bytes("")); + assertEq(s_ghoTokenPool.REVISION(), 1); + + vm.expectRevert(); + TransparentUpgradeableProxy(payable(address(s_ghoTokenPool))).upgradeTo(address(0)); + assertEq(s_ghoTokenPool.REVISION(), 1); + } + + function testChangeAdmin() public { + assertEq(_getProxyAdminAddress(address(s_ghoTokenPool)), PROXY_ADMIN); + + address newAdmin = makeAddr("newAdmin"); + vm.startPrank(PROXY_ADMIN); + TransparentUpgradeableProxy(payable(address(s_ghoTokenPool))).changeAdmin(newAdmin); + + assertEq(_getProxyAdminAddress(address(s_ghoTokenPool)), newAdmin, "Admin change failed"); + } + + function testChangeAdminAdminReverts() public { + assertEq(_getProxyAdminAddress(address(s_ghoTokenPool)), PROXY_ADMIN); + + address newAdmin = makeAddr("newAdmin"); + vm.expectRevert(); + TransparentUpgradeableProxy(payable(address(s_ghoTokenPool))).changeAdmin(newAdmin); + + assertEq(_getProxyAdminAddress(address(s_ghoTokenPool)), PROXY_ADMIN, "Unauthorized admin change"); + } +} diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumBridgeLimit.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumBridgeLimit.t.sol new file mode 100644 index 0000000000..83eae99a71 --- /dev/null +++ b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumBridgeLimit.t.sol @@ -0,0 +1,813 @@ +// SPDX-License-Identifier: BUSL-1.1 +pragma solidity 0.8.19; + +import {GhoToken} from "@aave/gho-core/gho/GhoToken.sol"; + +import {IPool} from "../../../interfaces/pools/IPool.sol"; +import {GHOTokenPoolEthereumBridgeLimitSetup} from "./GHOTokenPoolEthereumBridgeLimitSetup.t.sol"; + +contract GHOTokenPoolEthereumBridgeLimitSimpleScenario is GHOTokenPoolEthereumBridgeLimitSetup { + function setUp() public virtual override { + super.setUp(); + + // Arbitrum + _addBridge(1, INITIAL_BRIDGE_LIMIT); + _enableLane(0, 1); + } + + function testFuzz_Bridge(uint256 amount) public { + uint256 maxAmount = _getMaxToBridgeOut(0); + amount = bound(amount, 1, maxAmount); + + _assertInvariant(); + + assertEq(_getMaxToBridgeOut(0), maxAmount); + assertEq(_getMaxToBridgeIn(0), 0); + assertEq(_getMaxToBridgeOut(1), 0); + assertEq(_getMaxToBridgeIn(1), bucketCapacities[1]); + + deal(tokens[0], USER, amount); + _moveGhoOrigin(0, 1, USER, amount); + + assertEq(_getMaxToBridgeOut(0), maxAmount - amount); + assertEq(_getMaxToBridgeIn(0), amount); + assertEq(_getMaxToBridgeOut(1), 0); + assertEq(_getMaxToBridgeIn(1), bucketCapacities[1]); + + _moveGhoDestination(0, 1, USER, amount); + + assertEq(_getMaxToBridgeOut(0), maxAmount - amount); + assertEq(_getMaxToBridgeIn(0), amount); + assertEq(_getMaxToBridgeOut(1), bucketLevels[1]); + assertEq(_getMaxToBridgeIn(1), bucketCapacities[1] - bucketLevels[1]); + + _assertInvariant(); + } + + function testBridgeAll() public { + _assertInvariant(); + + uint256 maxAmount = _getMaxToBridgeOut(0); + assertEq(_getMaxToBridgeIn(0), 0); + assertEq(_getMaxToBridgeOut(1), 0); + assertEq(_getMaxToBridgeIn(1), bucketCapacities[1]); + + deal(tokens[0], USER, maxAmount); + _moveGhoOrigin(0, 1, USER, maxAmount); + + assertEq(_getMaxToBridgeOut(0), 0); + assertEq(_getMaxToBridgeIn(0), maxAmount); + assertEq(_getMaxToBridgeOut(1), 0); + assertEq(_getMaxToBridgeIn(1), bucketCapacities[1]); + + _moveGhoDestination(0, 1, USER, maxAmount); + + assertEq(_getMaxToBridgeOut(0), 0); + assertEq(_getMaxToBridgeIn(0), maxAmount); + assertEq(_getMaxToBridgeOut(1), bucketCapacities[1]); + assertEq(_getMaxToBridgeIn(1), 0); + + _assertInvariant(); + } + + /// @dev Bridge out two times + function testFuzz_BridgeTwoSteps(uint256 amount1, uint256 amount2) public { + uint256 maxAmount = _getMaxToBridgeOut(0); + amount1 = bound(amount1, 1, maxAmount); + amount2 = bound(amount2, 1, maxAmount); + + _assertInvariant(); + + assertEq(_getMaxToBridgeOut(0), maxAmount); + assertEq(_getMaxToBridgeIn(0), 0); + assertEq(_getMaxToBridgeOut(1), 0); + assertEq(_getMaxToBridgeIn(1), bucketCapacities[1]); + + deal(tokens[0], USER, amount1); + _moveGhoOrigin(0, 1, USER, amount1); + + assertEq(_getMaxToBridgeOut(0), maxAmount - amount1); + assertEq(_getMaxToBridgeIn(0), amount1); + assertEq(_getMaxToBridgeOut(1), 0); + assertEq(_getMaxToBridgeIn(1), bucketCapacities[1]); + + _moveGhoDestination(0, 1, USER, amount1); + + assertEq(_getMaxToBridgeOut(0), maxAmount - amount1); + assertEq(_getMaxToBridgeIn(0), amount1); + assertEq(_getMaxToBridgeOut(1), bucketLevels[1]); + assertEq(_getMaxToBridgeIn(1), bucketCapacities[1] - bucketLevels[1]); + + _assertInvariant(); + + // Bridge up to bridge limit amount + if (amount1 + amount2 > maxAmount) { + vm.expectRevert(); + vm.prank(RAMP); + IPool(pools[0]).lockOrBurn(USER, bytes(""), amount2, uint64(1), bytes("")); + + amount2 = maxAmount - amount1; + } + + if (amount2 > 0) { + _assertInvariant(); + + uint256 acc = amount1 + amount2; + deal(tokens[0], USER, amount2); + _moveGhoOrigin(0, 1, USER, amount2); + + assertEq(_getMaxToBridgeOut(0), maxAmount - acc); + assertEq(_getMaxToBridgeIn(0), acc); + assertEq(_getMaxToBridgeOut(1), amount1); + assertEq(_getMaxToBridgeIn(1), bucketCapacities[1] - amount1); + + _moveGhoDestination(0, 1, USER, amount2); + + assertEq(_getMaxToBridgeOut(0), maxAmount - acc); + assertEq(_getMaxToBridgeIn(0), acc); + assertEq(_getMaxToBridgeOut(1), acc); + assertEq(_getMaxToBridgeIn(1), bucketCapacities[1] - acc); + + _assertInvariant(); + } + } + + /// @dev Bridge some tokens out and later, bridge them back in + function testFuzz_BridgeBackAndForth(uint256 amountOut, uint256 amountIn) public { + uint256 maxAmount = _getMaxToBridgeOut(0); + amountOut = bound(amountOut, 1, maxAmount); + amountIn = bound(amountIn, 1, _getCapacity(1)); + + _assertInvariant(); + + assertEq(_getMaxToBridgeOut(0), maxAmount); + assertEq(_getMaxToBridgeIn(0), 0); + assertEq(_getMaxToBridgeOut(1), 0); + assertEq(_getMaxToBridgeIn(1), bucketCapacities[1]); + + deal(tokens[0], USER, amountOut); + _moveGhoOrigin(0, 1, USER, amountOut); + + assertEq(_getMaxToBridgeOut(0), maxAmount - amountOut); + assertEq(_getMaxToBridgeIn(0), amountOut); + assertEq(_getMaxToBridgeOut(1), 0); + assertEq(_getMaxToBridgeIn(1), bucketCapacities[1]); + + _moveGhoDestination(0, 1, USER, amountOut); + + assertEq(_getMaxToBridgeOut(0), maxAmount - amountOut); + assertEq(_getMaxToBridgeIn(0), amountOut); + assertEq(_getMaxToBridgeOut(1), bucketLevels[1]); + assertEq(_getMaxToBridgeIn(1), bucketCapacities[1] - bucketLevels[1]); + + _assertInvariant(); + + // Bridge up to current bridged amount + if (amountIn > amountOut) { + // Simulate revert on destination + vm.expectRevert(); + vm.prank(RAMP); + IPool(pools[0]).releaseOrMint(bytes(""), USER, amountIn, uint64(1), bytes("")); + + amountIn = amountOut; + } + + if (amountIn > 0) { + _assertInvariant(); + + uint256 acc = amountOut - amountIn; + deal(tokens[1], USER, amountIn); + _moveGhoOrigin(1, 0, USER, amountIn); + + assertEq(_getMaxToBridgeOut(0), maxAmount - amountOut); + assertEq(_getMaxToBridgeIn(0), amountOut); + assertEq(_getMaxToBridgeOut(1), acc); + assertEq(_getMaxToBridgeIn(1), bucketCapacities[1] - acc); + + _moveGhoDestination(1, 0, USER, amountIn); + + assertEq(_getMaxToBridgeOut(0), maxAmount - acc); + assertEq(_getMaxToBridgeIn(0), acc); + assertEq(_getMaxToBridgeOut(1), acc); + assertEq(_getMaxToBridgeIn(1), maxAmount - acc); + + _assertInvariant(); + } + } + + /// @dev Bridge from Ethereum to Arbitrum reverts if amount is higher than bridge limit + function testFuzz_BridgeBridgeLimitExceededSourceReverts(uint256 amount, uint256 bridgeAmount) public { + vm.assume(amount < type(uint128).max); + vm.assume(bridgeAmount < INITIAL_BRIDGE_LIMIT); + + // Inflate bridgeAmount + if (bridgeAmount > 0) { + deal(tokens[0], USER, bridgeAmount); + _bridgeGho(0, 1, USER, bridgeAmount); + } + + deal(tokens[0], USER, amount); + // Simulate CCIP pull of funds + vm.prank(USER); + GhoToken(tokens[0]).transfer(pools[0], amount); + + if (bridgeAmount + amount > INITIAL_BRIDGE_LIMIT) { + vm.expectRevert(); + } + vm.prank(RAMP); + IPool(pools[0]).lockOrBurn(USER, bytes(""), amount, uint64(1), bytes("")); + } + + /// @dev Bridge from Ethereum to Arbitrum reverts if amount is higher than capacity available + function testFuzz_BridgeCapacityExceededDestinationReverts(uint256 amount, uint256 level) public { + (uint256 capacity, ) = GhoToken(tokens[1]).getFacilitatorBucket(pools[1]); + vm.assume(level < capacity); + amount = bound(amount, 1, type(uint128).max); + + // Inflate level + if (level > 0) { + _inflateFacilitatorLevel(pools[1], tokens[1], level); + } + + // Skip origin move + + // Destination execution + if (amount > capacity - level) { + vm.expectRevert(); + } + vm.prank(RAMP); + IPool(pools[1]).releaseOrMint(bytes(""), USER, amount, uint64(0), bytes("")); + } + + /// @dev Bridge from Arbitrum To Ethereum reverts if Arbitrum level is lower than amount + function testFuzz_BridgeBackZeroLevelSourceReverts(uint256 amount, uint256 level) public { + (uint256 capacity, ) = GhoToken(tokens[1]).getFacilitatorBucket(pools[1]); + vm.assume(level < capacity); + amount = bound(amount, 1, capacity - level); + + // Inflate level + if (level > 0) { + _inflateFacilitatorLevel(pools[1], tokens[1], level); + } + + deal(tokens[1], USER, amount); + // Simulate CCIP pull of funds + vm.prank(USER); + GhoToken(tokens[1]).transfer(pools[1], amount); + + if (amount > level) { + vm.expectRevert(); + } + vm.prank(RAMP); + IPool(pools[1]).lockOrBurn(USER, bytes(""), amount, uint64(0), bytes("")); + } + + /// @dev Bridge from Arbitrum To Ethereum reverts if Ethereum current bridged amount is lower than amount + function testFuzz_BridgeBackZeroBridgeLimitDestinationReverts(uint256 amount, uint256 bridgeAmount) public { + (uint256 capacity, ) = GhoToken(tokens[1]).getFacilitatorBucket(pools[1]); + amount = bound(amount, 1, capacity); + bridgeAmount = bound(bridgeAmount, 0, capacity - amount); + + // Inflate bridgeAmount + if (bridgeAmount > 0) { + deal(tokens[0], USER, bridgeAmount); + _bridgeGho(0, 1, USER, bridgeAmount); + } + + // Inflate level on Arbitrum + _inflateFacilitatorLevel(pools[1], tokens[1], amount); + + // Skip origin move + + // Destination execution + if (amount > bridgeAmount) { + vm.expectRevert(); + } + vm.prank(RAMP); + IPool(pools[0]).releaseOrMint(bytes(""), USER, amount, uint64(1), bytes("")); + } + + /// @dev Bucket capacity reduction. Caution: bridge limit reduction must happen first + function testReduceBucketCapacity() public { + // Max out capacity + uint256 maxAmount = _getMaxToBridgeOut(0); + deal(tokens[0], USER, maxAmount); + _bridgeGho(0, 1, USER, maxAmount); + + assertEq(_getMaxToBridgeIn(1), 0); + assertEq(_getCapacity(1), maxAmount); + assertEq(_getLevel(1), maxAmount); + + _assertInvariant(); + + uint256 newBucketCapacity = bucketCapacities[1] - 10; + // 1. Reduce bridge limit + _updateBridgeLimit(newBucketCapacity); + assertEq(_getMaxToBridgeOut(0), 0); + assertEq(_getMaxToBridgeIn(1), 0); + + // 2. Reduce bucket capacity + _updateBucketCapacity(1, newBucketCapacity); + assertEq(_getMaxToBridgeOut(0), 0); + assertEq(_getMaxToBridgeIn(1), 0); + + // Maximum to bridge in is all minted on Arbitrum + assertEq(_getMaxToBridgeIn(0), maxAmount); + assertEq(_getMaxToBridgeOut(1), maxAmount); + + _bridgeGho(1, 0, USER, maxAmount); + assertEq(_getMaxToBridgeOut(0), newBucketCapacity); + assertEq(_getMaxToBridgeIn(0), 0); + assertEq(_getMaxToBridgeOut(1), 0); + assertEq(_getMaxToBridgeIn(1), newBucketCapacity); + + _assertInvariant(); + } + + /// @dev Bucket capacity reduction, performed following wrong order procedure + function testReduceBucketCapacityIncorrectProcedure() public { + // Bridge a third of the capacity + uint256 amount = _getMaxToBridgeOut(0) / 3; + uint256 availableToBridge = _getMaxToBridgeOut(0) - amount; + + deal(tokens[0], USER, amount); + _bridgeGho(0, 1, USER, amount); + + assertEq(_getMaxToBridgeIn(1), bucketCapacities[1] - amount); + assertEq(_getLevel(1), amount); + + _assertInvariant(); + + uint256 newBucketCapacity = bucketCapacities[1] - 10; + /// @dev INCORRECT ORDER PROCEDURE!! bridge limit reduction should happen first + // 1. Reduce bucket capacity + _updateBucketCapacity(1, newBucketCapacity); + assertEq(_getMaxToBridgeOut(0), availableToBridge); // this is the UX issue + assertEq(_getMaxToBridgeIn(1), availableToBridge - 10); + + // User can come and try to max bridge on Arbitrum + // Transaction will succeed on Ethereum, but revert on Arbitrum + deal(tokens[0], USER, availableToBridge); + _moveGhoOrigin(0, 1, USER, availableToBridge); + assertEq(_getMaxToBridgeOut(0), 0); + + vm.expectRevert(); + vm.prank(RAMP); + IPool(pools[1]).releaseOrMint(bytes(""), USER, availableToBridge, uint64(0), bytes("")); + + // User can only bridge up to new bucket capacity (10 units less) + assertEq(_getMaxToBridgeIn(1), availableToBridge - 10); + vm.prank(RAMP); + IPool(pools[1]).releaseOrMint(bytes(""), USER, availableToBridge - 10, uint64(0), bytes("")); + assertEq(_getMaxToBridgeIn(1), 0); + + // 2. Reduce bridge limit + _updateBridgeLimit(newBucketCapacity); + assertEq(_getMaxToBridgeOut(0), 0); + assertEq(_getMaxToBridgeIn(1), 0); + } + + /// @dev Bucket capacity reduction, with a bridge out in between + function testReduceBucketCapacityWithBridgeOutInBetween() public { + // Bridge a third of the capacity + uint256 amount = _getMaxToBridgeOut(0) / 3; + uint256 availableToBridge = _getMaxToBridgeOut(0) - amount; + + deal(tokens[0], USER, amount); + _bridgeGho(0, 1, USER, amount); + + assertEq(_getMaxToBridgeIn(1), bucketCapacities[1] - amount); + assertEq(_getLevel(1), amount); + + _assertInvariant(); + + uint256 newBucketCapacity = bucketCapacities[1] - 10; + // 1. Reduce bridge limit + _updateBridgeLimit(newBucketCapacity); + assertEq(_getMaxToBridgeOut(0), availableToBridge - 10); + assertEq(_getMaxToBridgeIn(1), availableToBridge); + + // User initiates bridge out action + uint256 amount2 = _getMaxToBridgeOut(0); + deal(tokens[0], USER, amount2); + _moveGhoOrigin(0, 1, USER, amount2); + assertEq(_getMaxToBridgeOut(0), 0); + assertEq(_getMaxToBridgeIn(0), newBucketCapacity); + + // 2. Reduce bucket capacity + _updateBucketCapacity(1, newBucketCapacity); + // Destination execution can happen, no more bridge out actions can be initiated + assertEq(_getMaxToBridgeOut(1), amount); + assertEq(_getMaxToBridgeIn(1), amount2); + + // Finalize bridge out action + _moveGhoDestination(0, 1, USER, amount2); + assertEq(_getMaxToBridgeOut(0), 0); + assertEq(_getMaxToBridgeIn(0), newBucketCapacity); + assertEq(_getMaxToBridgeOut(1), newBucketCapacity); + assertEq(_getMaxToBridgeIn(1), 0); + + _assertInvariant(); + } + + /// @dev Bucket capacity reduction, with a bridge in in between + function testReduceBucketCapacityWithBridgeInInBetween() public { + // Bridge max amount + uint256 maxAmount = _getMaxToBridgeOut(0); + + deal(tokens[0], USER, maxAmount); + _bridgeGho(0, 1, USER, maxAmount); + + assertEq(_getMaxToBridgeIn(1), 0); + assertEq(_getCapacity(1), maxAmount); + assertEq(_getLevel(1), maxAmount); + + _assertInvariant(); + + uint256 newBucketCapacity = bucketCapacities[1] - 10; + // 1. Reduce bridge limit + _updateBridgeLimit(newBucketCapacity); + assertEq(_getMaxToBridgeOut(0), 0); + assertEq(_getMaxToBridgeIn(1), 0); + + // User initiates bridge in action + _moveGhoOrigin(1, 0, USER, maxAmount); + assertEq(_getMaxToBridgeOut(1), 0); + assertEq(_getMaxToBridgeIn(1), maxAmount); + + // 2. Reduce bucket capacity + _updateBucketCapacity(1, newBucketCapacity); + assertEq(_getMaxToBridgeOut(0), 0); + assertEq(_getMaxToBridgeIn(0), maxAmount); + + // Finalize bridge in action + _moveGhoDestination(1, 0, USER, maxAmount); + assertEq(_getMaxToBridgeOut(0), newBucketCapacity); + assertEq(_getMaxToBridgeIn(0), 0); + assertEq(_getMaxToBridgeOut(1), 0); + assertEq(_getMaxToBridgeIn(1), newBucketCapacity); + + _assertInvariant(); + } + + /// @dev Bucket capacity increase. Caution: bridge limit increase must happen afterwards + function testIncreaseBucketCapacity() public { + // Max out capacity + uint256 maxAmount = _getMaxToBridgeOut(0); + deal(tokens[0], USER, maxAmount); + _bridgeGho(0, 1, USER, maxAmount); + + assertEq(_getMaxToBridgeIn(1), 0); + assertEq(_getCapacity(1), maxAmount); + assertEq(_getLevel(1), maxAmount); + + _assertInvariant(); + + uint256 newBucketCapacity = bucketCapacities[1] + 10; + // 2. Increase bucket capacity + _updateBucketCapacity(1, newBucketCapacity); + assertEq(_getMaxToBridgeOut(0), 0); + assertEq(_getMaxToBridgeIn(1), 10); + + // Reverts if a user tries to bridge out 10 + vm.expectRevert(); + vm.prank(RAMP); + IPool(pools[0]).lockOrBurn(USER, bytes(""), 10, uint64(1), bytes("")); + + // 2. Increase bridge limit + _updateBridgeLimit(newBucketCapacity); + assertEq(_getMaxToBridgeOut(0), 10); + assertEq(_getMaxToBridgeIn(1), 10); + + _assertInvariant(); + + // Now it is possible to bridge some again + _bridgeGho(1, 0, USER, maxAmount); + assertEq(_getMaxToBridgeOut(0), newBucketCapacity); + assertEq(_getMaxToBridgeIn(0), 0); + assertEq(_getMaxToBridgeOut(1), 0); + assertEq(_getMaxToBridgeIn(1), newBucketCapacity); + + _assertInvariant(); + } + + /// @dev Bucket capacity increase, performed following wrong order procedure + function testIncreaseBucketCapacityIncorrectProcedure() public { + // Max out capacity + uint256 maxAmount = _getMaxToBridgeOut(0); + deal(tokens[0], USER, maxAmount); + _bridgeGho(0, 1, USER, maxAmount); + + assertEq(_getMaxToBridgeIn(1), 0); + assertEq(_getCapacity(1), maxAmount); + assertEq(_getLevel(1), maxAmount); + + _assertInvariant(); + + uint256 newBucketCapacity = bucketCapacities[1] + 10; + + /// @dev INCORRECT ORDER PROCEDURE!! bucket capacity increase should happen first + // 1. Increase bridge limit + _updateBridgeLimit(newBucketCapacity); + assertEq(_getMaxToBridgeOut(0), 10); + assertEq(_getMaxToBridgeIn(1), 0); // this is the UX issue + + // User can come and try to max bridge on Arbitrum + // Transaction will succeed on Ethereum, but revert on Arbitrum + deal(tokens[0], USER, 10); + _moveGhoOrigin(0, 1, USER, 10); + assertEq(_getMaxToBridgeOut(0), 0); + assertEq(_getMaxToBridgeIn(0), newBucketCapacity); + + // Execution on destination will revert until bucket capacity gets increased + vm.expectRevert(); + vm.prank(RAMP); + IPool(pools[1]).releaseOrMint(bytes(""), USER, 10, uint64(0), bytes("")); + + // 2. Increase bucket capacity + _updateBucketCapacity(1, newBucketCapacity); + assertEq(_getMaxToBridgeOut(1), maxAmount); + assertEq(_getMaxToBridgeIn(1), 10); + + // Now it is possible to execute on destination + _moveGhoDestination(0, 1, USER, 10); + + assertEq(_getMaxToBridgeOut(0), 0); + assertEq(_getMaxToBridgeIn(0), newBucketCapacity); + assertEq(_getMaxToBridgeOut(1), newBucketCapacity); + assertEq(_getMaxToBridgeIn(1), 0); + + _assertInvariant(); + } + + /// @dev Bucket capacity increase, with a bridge out in between + function testIncreaseBucketCapacityWithBridgeOutInBetween() public { + // Bridge a third of the capacity + uint256 amount = _getMaxToBridgeOut(0) / 3; + uint256 availableToBridge = _getMaxToBridgeOut(0) - amount; + deal(tokens[0], USER, amount); + _bridgeGho(0, 1, USER, amount); + + assertEq(_getMaxToBridgeIn(1), bucketCapacities[1] - amount); + assertEq(_getLevel(1), amount); + + _assertInvariant(); + + uint256 newBucketCapacity = bucketCapacities[1] + 10; + // 1. Increase bucket capacity + _updateBucketCapacity(1, newBucketCapacity); + assertEq(_getMaxToBridgeOut(0), availableToBridge); + assertEq(_getMaxToBridgeIn(1), availableToBridge + 10); + + // Reverts if a user tries to bridge out all up to new bucket capacity + vm.expectRevert(); + vm.prank(RAMP); + IPool(pools[0]).lockOrBurn(USER, bytes(""), availableToBridge + 10, uint64(1), bytes("")); + + // User initiates bridge out action + deal(tokens[0], USER, availableToBridge); + _bridgeGho(0, 1, USER, availableToBridge); + assertEq(_getMaxToBridgeOut(0), 0); + assertEq(_getMaxToBridgeIn(1), 10); + + // 2. Increase bridge limit + _updateBridgeLimit(newBucketCapacity); + assertEq(_getMaxToBridgeOut(0), 10); + assertEq(_getMaxToBridgeIn(1), 10); + + _assertInvariant(); + + // Now it is possible to bridge some again + deal(tokens[0], USER, 10); + _bridgeGho(0, 1, USER, 10); + assertEq(_getMaxToBridgeOut(0), 0); + assertEq(_getMaxToBridgeIn(0), newBucketCapacity); + assertEq(_getMaxToBridgeOut(1), newBucketCapacity); + assertEq(_getMaxToBridgeIn(1), 0); + + _assertInvariant(); + } + + /// @dev Bucket capacity increase, with a bridge in in between + function testIncreaseBucketCapacityWithBridgeInInBetween() public { + // Max out capacity + uint256 maxAmount = _getMaxToBridgeOut(0); + deal(tokens[0], USER, maxAmount); + _bridgeGho(0, 1, USER, maxAmount); + + assertEq(_getMaxToBridgeIn(1), 0); + assertEq(_getCapacity(1), maxAmount); + assertEq(_getLevel(1), maxAmount); + + _assertInvariant(); + + uint256 newBucketCapacity = bucketCapacities[1] + 10; + // 1. Increase bucket capacity + _updateBucketCapacity(1, newBucketCapacity); + assertEq(_getMaxToBridgeOut(0), 0); + assertEq(_getMaxToBridgeIn(0), maxAmount); + assertEq(_getMaxToBridgeOut(1), maxAmount); + assertEq(_getMaxToBridgeIn(1), 10); + + // User initiates bridge in action + _moveGhoOrigin(1, 0, USER, maxAmount); + assertEq(_getMaxToBridgeOut(1), 0); + assertEq(_getMaxToBridgeIn(1), newBucketCapacity); + + // 2. Increase bridge limit + _updateBridgeLimit(newBucketCapacity); + assertEq(_getMaxToBridgeOut(0), 10); + assertEq(_getMaxToBridgeIn(0), maxAmount); + + // User finalizes bridge in action + _moveGhoDestination(1, 0, USER, maxAmount); + assertEq(_getMaxToBridgeOut(0), newBucketCapacity); + assertEq(_getMaxToBridgeIn(0), 0); + + _assertInvariant(); + + // Now it is possible to bridge new bucket capacity + deal(tokens[0], USER, newBucketCapacity); + _bridgeGho(0, 1, USER, newBucketCapacity); + assertEq(_getMaxToBridgeOut(0), 0); + assertEq(_getMaxToBridgeIn(0), newBucketCapacity); + assertEq(_getMaxToBridgeOut(1), newBucketCapacity); + assertEq(_getMaxToBridgeIn(1), 0); + + _assertInvariant(); + } +} + +contract GHOTokenPoolEthereumBridgeLimitTripleScenario is GHOTokenPoolEthereumBridgeLimitSetup { + function setUp() public virtual override { + super.setUp(); + + // Arbitrum + _addBridge(1, INITIAL_BRIDGE_LIMIT); + _enableLane(0, 1); + + // Avalanche + _addBridge(2, INITIAL_BRIDGE_LIMIT); + _enableLane(1, 2); + _enableLane(0, 2); + } + + /// @dev Bridge out some tokens to third chain via second chain (Ethereum to Arbitrum, Arbitrum to Avalanche) + function testFuzz_BridgeToTwoToThree(uint256 amount) public { + uint256 maxAmount = _getMaxToBridgeOut(0); + amount = bound(amount, 1, maxAmount); + + _assertInvariant(); + + assertEq(_getMaxToBridgeOut(0), maxAmount); + assertEq(_getMaxToBridgeIn(0), 0); + assertEq(_getMaxToBridgeOut(1), 0); + assertEq(_getMaxToBridgeIn(1), bucketCapacities[1]); + assertEq(_getMaxToBridgeOut(2), 0); + assertEq(_getMaxToBridgeIn(2), bucketCapacities[2]); + + deal(tokens[0], USER, amount); + _moveGhoOrigin(0, 1, USER, amount); + + assertEq(_getMaxToBridgeOut(0), maxAmount - amount); + assertEq(_getMaxToBridgeIn(0), amount); + assertEq(_getMaxToBridgeOut(1), 0); + assertEq(_getMaxToBridgeIn(1), bucketCapacities[1]); + assertEq(_getMaxToBridgeOut(2), 0); + assertEq(_getMaxToBridgeIn(2), bucketCapacities[2]); + + _moveGhoDestination(0, 1, USER, amount); + + assertEq(_getMaxToBridgeOut(0), maxAmount - amount); + assertEq(_getMaxToBridgeIn(0), amount); + assertEq(_getMaxToBridgeOut(1), amount); + assertEq(_getMaxToBridgeIn(1), bucketCapacities[1] - bucketLevels[1]); + assertEq(_getMaxToBridgeOut(2), 0); + assertEq(_getMaxToBridgeIn(2), bucketCapacities[2]); + + _assertInvariant(); + + _moveGhoOrigin(1, 2, USER, amount); + + assertEq(_getMaxToBridgeOut(0), maxAmount - amount); + assertEq(_getMaxToBridgeIn(0), amount); + assertEq(_getMaxToBridgeOut(1), 0); + assertEq(_getMaxToBridgeIn(1), bucketCapacities[1]); + assertEq(_getMaxToBridgeOut(2), 0); + assertEq(_getMaxToBridgeIn(2), bucketCapacities[2]); + + _moveGhoDestination(1, 2, USER, amount); + + assertEq(_getMaxToBridgeOut(0), maxAmount - amount); + assertEq(_getMaxToBridgeIn(0), amount); + assertEq(_getMaxToBridgeOut(1), 0); + assertEq(_getMaxToBridgeIn(1), bucketCapacities[1]); + assertEq(_getMaxToBridgeOut(2), amount); + assertEq(_getMaxToBridgeIn(2), bucketCapacities[2] - amount); + + _assertInvariant(); + } + + /// @dev Bridge out some tokens to second and third chain randomly + function testFuzz_BridgeRandomlyToTwoAndThree(uint64[] memory amounts) public { + vm.assume(amounts.length < 30); + + uint256 maxAmount = _getMaxToBridgeOut(0); + uint256 sourceAcc; + uint256 amount; + uint256 dest; + bool lastTime; + for (uint256 i = 0; i < amounts.length && !lastTime; i++) { + amount = amounts[i]; + + if (amount == 0) amount += 1; + if (sourceAcc + amount > maxAmount) { + amount = maxAmount - sourceAcc; + lastTime = true; + } + + dest = (amount % 2) + 1; + deal(tokens[0], USER, amount); + _bridgeGho(0, dest, USER, amount); + + sourceAcc += amount; + } + assertEq(sourceAcc, bridged); + + // Bridge all to Avalanche + uint256 toBridge = _getMaxToBridgeOut(1); + if (toBridge > 0) { + _bridgeGho(1, 2, USER, toBridge); + assertEq(sourceAcc, bridged); + assertEq(_getLevel(2), bridged); + assertEq(_getLevel(1), 0); + } + } + + /// @dev All remote liquidity is on one chain or the other + function testLiquidityUnbalanced() public { + uint256 amount; + + // Bridge all out to Arbitrum + amount = _getMaxToBridgeOut(0); + deal(tokens[0], USER, amount); + _bridgeGho(0, 1, USER, amount); + + // No more liquidity can go remotely + assertEq(_getMaxToBridgeOut(0), 0); + vm.expectRevert(); + vm.prank(RAMP); + IPool(pools[0]).lockOrBurn(USER, bytes(""), 1, uint64(1), bytes("")); + vm.prank(RAMP); + vm.expectRevert(); + IPool(pools[0]).lockOrBurn(USER, bytes(""), 1, uint64(2), bytes("")); + + // All liquidity on Arbitrum, 0 on Avalanche + assertEq(_getLevel(1), bridged); + assertEq(_getLevel(1), _getCapacity(1)); + assertEq(_getLevel(2), 0); + + // Move all liquidity to Avalanche + _bridgeGho(1, 2, USER, amount); + assertEq(_getLevel(1), 0); + assertEq(_getLevel(2), bridged); + assertEq(_getLevel(2), _getCapacity(2)); + + // Move all liquidity back to Ethereum + _bridgeGho(2, 0, USER, amount); + assertEq(_getLevel(1), 0); + assertEq(_getLevel(2), 0); + assertEq(bridged, 0); + assertEq(_getMaxToBridgeOut(0), amount); + } + + /// @dev Test showcasing incorrect bridge limit and bucket capacity configuration + function testIncorrectBridgeLimitBucketConfig() public { + // BridgeLimit 10, Arbitrum 9, Avalanche Bucket 10 + _updateBridgeLimit(10); + _updateBucketCapacity(1, 9); + _updateBucketCapacity(2, 10); + + assertEq(_getMaxToBridgeOut(0), 10); + assertEq(_getMaxToBridgeIn(1), 9); // here the issue + assertEq(_getMaxToBridgeIn(2), 10); + + // Possible to bridge 10 out to 2 + deal(tokens[0], USER, 10); + _bridgeGho(0, 2, USER, 10); + + // Liquidity comes back + _bridgeGho(2, 0, USER, 10); + + // Not possible to bridge 10 out to 1 + _moveGhoOrigin(0, 1, USER, 10); + // Reverts on destination + vm.expectRevert(); + vm.prank(RAMP); + IPool(pools[1]).releaseOrMint(bytes(""), USER, 10, uint64(0), bytes("")); + + // Only if bucket capacity gets increased, execution can succeed + _updateBucketCapacity(1, 10); + _moveGhoDestination(0, 1, USER, 10); + } +} diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumBridgeLimitSetup.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumBridgeLimitSetup.t.sol new file mode 100644 index 0000000000..16cea99aeb --- /dev/null +++ b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumBridgeLimitSetup.t.sol @@ -0,0 +1,224 @@ +// SPDX-License-Identifier: BUSL-1.1 +pragma solidity 0.8.19; + +import {GhoToken} from "@aave/gho-core/gho/GhoToken.sol"; + +import {BaseTest} from "../../BaseTest.t.sol"; +import {IPool} from "../../../interfaces/pools/IPool.sol"; +import {UpgradeableLockReleaseTokenPool} from "../../../pools/UpgradeableLockReleaseTokenPool.sol"; +import {UpgradeableBurnMintTokenPool} from "../../../pools/UpgradeableBurnMintTokenPool.sol"; +import {UpgradeableTokenPool} from "../../../pools/UpgradeableTokenPool.sol"; +import {RateLimiter} from "../../../libraries/RateLimiter.sol"; + +contract GHOTokenPoolEthereumBridgeLimitSetup is BaseTest { + address internal ARM_PROXY = makeAddr("ARM_PROXY"); + address internal ROUTER = makeAddr("ROUTER"); + address internal RAMP = makeAddr("RAMP"); + address internal AAVE_DAO = makeAddr("AAVE_DAO"); + address internal PROXY_ADMIN = makeAddr("PROXY_ADMIN"); + address internal USER = makeAddr("USER"); + + uint256 internal INITIAL_BRIDGE_LIMIT = 100e6 * 1e18; + + uint256[] internal chainsList; + mapping(uint256 => address) internal pools; // chainId => bridgeTokenPool + mapping(uint256 => address) internal tokens; // chainId => ghoToken + mapping(uint256 => uint256) internal bucketCapacities; // chainId => bucketCapacities + mapping(uint256 => uint256) internal bucketLevels; // chainId => bucketLevels + mapping(uint256 => uint256) internal liquidity; // chainId => liquidity + uint256 internal remoteLiquidity; + uint256 internal bridged; + + function setUp() public virtual override { + // Ethereum with id 0 + chainsList.push(0); + tokens[0] = address(new GhoToken(AAVE_DAO)); + pools[0] = _deployUpgradeableLockReleaseTokenPool( + tokens[0], + ARM_PROXY, + ROUTER, + OWNER, + INITIAL_BRIDGE_LIMIT, + PROXY_ADMIN + ); + + // Mock calls for bridging + vm.mockCall(ROUTER, abi.encodeWithSelector(bytes4(keccak256("getOnRamp(uint64)"))), abi.encode(RAMP)); + vm.mockCall(ROUTER, abi.encodeWithSelector(bytes4(keccak256("isOffRamp(uint64,address)"))), abi.encode(true)); + vm.mockCall(ARM_PROXY, abi.encodeWithSelector(bytes4(keccak256("isCursed()"))), abi.encode(false)); + } + + function _enableLane(uint256 fromId, uint256 toId) internal { + // from + UpgradeableTokenPool.ChainUpdate[] memory chainUpdate = new UpgradeableTokenPool.ChainUpdate[](1); + RateLimiter.Config memory emptyRateConfig = RateLimiter.Config(false, 0, 0); + chainUpdate[0] = UpgradeableTokenPool.ChainUpdate({ + remoteChainSelector: uint64(toId), + allowed: true, + outboundRateLimiterConfig: emptyRateConfig, + inboundRateLimiterConfig: emptyRateConfig + }); + + vm.startPrank(OWNER); + UpgradeableTokenPool(pools[fromId]).applyChainUpdates(chainUpdate); + + // to + chainUpdate[0].remoteChainSelector = uint64(fromId); + UpgradeableTokenPool(pools[toId]).applyChainUpdates(chainUpdate); + vm.stopPrank(); + } + + function _addBridge(uint256 chainId, uint256 bucketCapacity) internal { + require(tokens[chainId] == address(0), "BRIDGE_ALREADY_EXISTS"); + + chainsList.push(chainId); + + // GHO Token + GhoToken ghoToken = new GhoToken(AAVE_DAO); + tokens[chainId] = address(ghoToken); + + // UpgradeableTokenPool + address bridgeTokenPool = _deployUpgradeableBurnMintTokenPool( + address(ghoToken), + ARM_PROXY, + ROUTER, + OWNER, + PROXY_ADMIN + ); + pools[chainId] = bridgeTokenPool; + + // Facilitator + bucketCapacities[chainId] = bucketCapacity; + vm.stopPrank(); + vm.startPrank(AAVE_DAO); + ghoToken.grantRole(ghoToken.FACILITATOR_MANAGER_ROLE(), AAVE_DAO); + ghoToken.addFacilitator(bridgeTokenPool, "UpgradeableTokenPool", uint128(bucketCapacity)); + vm.stopPrank(); + } + + function _updateBridgeLimit(uint256 newBridgeLimit) internal { + vm.prank(OWNER); + UpgradeableLockReleaseTokenPool(pools[0]).setBridgeLimit(newBridgeLimit); + } + + function _updateBucketCapacity(uint256 chainId, uint256 newBucketCapacity) internal { + bucketCapacities[chainId] = newBucketCapacity; + vm.startPrank(AAVE_DAO); + GhoToken(tokens[chainId]).grantRole(GhoToken(tokens[chainId]).BUCKET_MANAGER_ROLE(), AAVE_DAO); + GhoToken(tokens[chainId]).setFacilitatorBucketCapacity(pools[chainId], uint128(newBucketCapacity)); + vm.stopPrank(); + } + + function _getMaxToBridgeOut(uint256 fromChain) internal view returns (uint256) { + if (_isEthereumChain(fromChain)) { + UpgradeableLockReleaseTokenPool ethTokenPool = UpgradeableLockReleaseTokenPool(pools[0]); + uint256 bridgeLimit = ethTokenPool.getBridgeLimit(); + uint256 currentBridged = ethTokenPool.getCurrentBridgedAmount(); + return currentBridged > bridgeLimit ? 0 : bridgeLimit - currentBridged; + } else { + (, uint256 level) = GhoToken(tokens[fromChain]).getFacilitatorBucket(pools[fromChain]); + return level; + } + } + + function _getMaxToBridgeIn(uint256 toChain) internal view returns (uint256) { + if (_isEthereumChain(toChain)) { + UpgradeableLockReleaseTokenPool ethTokenPool = UpgradeableLockReleaseTokenPool(pools[0]); + return ethTokenPool.getCurrentBridgedAmount(); + } else { + (uint256 capacity, uint256 level) = GhoToken(tokens[toChain]).getFacilitatorBucket(pools[toChain]); + return level > capacity ? 0 : capacity - level; + } + } + + function _getCapacity(uint256 chain) internal view returns (uint256) { + require(!_isEthereumChain(chain), "No bucket on Ethereum"); + (uint256 capacity, ) = GhoToken(tokens[chain]).getFacilitatorBucket(pools[chain]); + return capacity; + } + + function _getLevel(uint256 chain) internal view returns (uint256) { + require(!_isEthereumChain(chain), "No bucket on Ethereum"); + (, uint256 level) = GhoToken(tokens[chain]).getFacilitatorBucket(pools[chain]); + return level; + } + + function _bridgeGho(uint256 fromChain, uint256 toChain, address user, uint256 amount) internal { + _moveGhoOrigin(fromChain, toChain, user, amount); + _moveGhoDestination(fromChain, toChain, user, amount); + } + + function _moveGhoOrigin(uint256 fromChain, uint256 toChain, address user, uint256 amount) internal { + // Simulate CCIP pull of funds + vm.prank(user); + GhoToken(tokens[fromChain]).transfer(pools[fromChain], amount); + + vm.prank(RAMP); + IPool(pools[fromChain]).lockOrBurn(user, bytes(""), amount, uint64(toChain), bytes("")); + + if (_isEthereumChain(fromChain)) { + // Lock + bridged += amount; + } else { + // Burn + bucketLevels[fromChain] -= amount; + liquidity[fromChain] -= amount; + remoteLiquidity -= amount; + } + } + + function _moveGhoDestination(uint256 fromChain, uint256 toChain, address user, uint256 amount) internal { + vm.prank(RAMP); + IPool(pools[toChain]).releaseOrMint(bytes(""), user, amount, uint64(fromChain), bytes("")); + + if (_isEthereumChain(toChain)) { + // Release + bridged -= amount; + } else { + // Mint + bucketLevels[toChain] += amount; + liquidity[toChain] += amount; + remoteLiquidity += amount; + } + } + + function _isEthereumChain(uint256 chainId) internal pure returns (bool) { + return chainId == 0; + } + + function _assertInvariant() internal { + // Check bridged + assertEq(UpgradeableLockReleaseTokenPool(pools[0]).getCurrentBridgedAmount(), bridged); + + // Check levels and buckets + uint256 sumLevels; + uint256 chainId; + uint256 capacity; + uint256 level; + for (uint i = 1; i < chainsList.length; i++) { + // not counting Ethereum -{0} + chainId = chainsList[i]; + (capacity, level) = GhoToken(tokens[chainId]).getFacilitatorBucket(pools[chainId]); + + // Aggregate levels + sumLevels += level; + + assertEq(capacity, bucketCapacities[chainId], "wrong bucket capacity"); + assertEq(level, bucketLevels[chainId], "wrong bucket level"); + + assertEq( + capacity, + UpgradeableLockReleaseTokenPool(pools[0]).getBridgeLimit(), + "capacity must be equal to bridgeLimit" + ); + assertLe( + level, + UpgradeableLockReleaseTokenPool(pools[0]).getBridgeLimit(), + "level cannot be higher than bridgeLimit" + ); + } + // Check bridged is equal to sum of levels + assertEq(UpgradeableLockReleaseTokenPool(pools[0]).getCurrentBridgedAmount(), sumLevels, "wrong bridged"); + assertEq(remoteLiquidity, sumLevels, "wrong bridged"); + } +} diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumE2E.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumE2E.t.sol new file mode 100644 index 0000000000..82e236618b --- /dev/null +++ b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumE2E.t.sol @@ -0,0 +1,408 @@ +// SPDX-License-Identifier: BUSL-1.1 +pragma solidity 0.8.19; + +import {GhoToken} from "@aave/gho-core/gho/GhoToken.sol"; +import {TransparentUpgradeableProxy} from "solidity-utils/contracts/transparent-proxy/TransparentUpgradeableProxy.sol"; + +import "../../helpers/MerkleHelper.sol"; +import "../../commitStore/CommitStore.t.sol"; +import "../../onRamp/EVM2EVMOnRampSetup.t.sol"; +import "../../offRamp/EVM2EVMOffRampSetup.t.sol"; +import {IBurnMintERC20} from "../../../../shared/token/ERC20/IBurnMintERC20.sol"; +import {UpgradeableLockReleaseTokenPool} from "../../../pools/UpgradeableLockReleaseTokenPool.sol"; +import {UpgradeableBurnMintTokenPool} from "../../../pools/UpgradeableBurnMintTokenPool.sol"; +import {UpgradeableTokenPool} from "../../../pools/UpgradeableTokenPool.sol"; +import {IPool} from "../../../interfaces/pools/IPool.sol"; +import {RateLimiter} from "../../../libraries/RateLimiter.sol"; +import {E2E} from "../End2End.t.sol"; + +contract GHOTokenPoolEthereumE2E is E2E { + using Internal for Internal.EVM2EVMMessage; + + address internal USER = makeAddr("user"); + address internal AAVE_DAO = makeAddr("AAVE_DAO"); + address internal PROXY_ADMIN = makeAddr("PROXY_ADMIN"); + + uint256 internal INITIAL_BRIDGE_LIMIT = 100e6 * 1e18; + + IBurnMintERC20 internal srcGhoToken; + IBurnMintERC20 internal dstGhoToken; + UpgradeableLockReleaseTokenPool internal srcGhoTokenPool; + UpgradeableBurnMintTokenPool internal dstGhoTokenPool; + + function setUp() public virtual override { + E2E.setUp(); + + // Deploy GHO Token on source chain + srcGhoToken = IBurnMintERC20(address(new GhoToken(AAVE_DAO))); + deal(address(srcGhoToken), OWNER, type(uint128).max); + // Add GHO token to source token list + s_sourceTokens.push(address(srcGhoToken)); + + // Deploy GHO Token on destination chain + dstGhoToken = IBurnMintERC20(address(new GhoToken(AAVE_DAO))); + deal(address(dstGhoToken), OWNER, type(uint128).max); + // Add GHO token to destination token list + s_destTokens.push(address(dstGhoToken)); + + // Deploy LockReleaseTokenPool for GHO token on source chain + srcGhoTokenPool = UpgradeableLockReleaseTokenPool( + _deployUpgradeableLockReleaseTokenPool( + address(srcGhoToken), + address(s_mockARM), + address(s_sourceRouter), + AAVE_DAO, + INITIAL_BRIDGE_LIMIT, + PROXY_ADMIN + ) + ); + + // Add GHO UpgradeableTokenPool to source token pool list + s_sourcePools.push(address(srcGhoTokenPool)); + + // Deploy BurnMintTokenPool for GHO token on destination chain + dstGhoTokenPool = UpgradeableBurnMintTokenPool( + _deployUpgradeableBurnMintTokenPool( + address(dstGhoToken), + address(s_mockARM), + address(s_destRouter), + AAVE_DAO, + PROXY_ADMIN + ) + ); + + // Add GHO UpgradeableTokenPool to destination token pool list + s_destPools.push(address(dstGhoTokenPool)); + + // Give mint and burn privileges to destination UpgradeableTokenPool (GHO-specific related) + vm.stopPrank(); + vm.startPrank(AAVE_DAO); + GhoToken(address(dstGhoToken)).grantRole(GhoToken(address(dstGhoToken)).FACILITATOR_MANAGER_ROLE(), AAVE_DAO); + GhoToken(address(dstGhoToken)).addFacilitator(address(dstGhoTokenPool), "UpgradeableTokenPool", type(uint128).max); + vm.stopPrank(); + vm.startPrank(OWNER); + + // Add config for source and destination chains + UpgradeableTokenPool.ChainUpdate[] memory srcChainUpdates = new UpgradeableTokenPool.ChainUpdate[](1); + srcChainUpdates[0] = UpgradeableTokenPool.ChainUpdate({ + remoteChainSelector: DEST_CHAIN_SELECTOR, + allowed: true, + outboundRateLimiterConfig: getOutboundRateLimiterConfig(), + inboundRateLimiterConfig: getInboundRateLimiterConfig() + }); + UpgradeableTokenPool.ChainUpdate[] memory dstChainUpdates = new UpgradeableTokenPool.ChainUpdate[](1); + dstChainUpdates[0] = UpgradeableTokenPool.ChainUpdate({ + remoteChainSelector: SOURCE_CHAIN_SELECTOR, + allowed: true, + outboundRateLimiterConfig: getOutboundRateLimiterConfig(), + inboundRateLimiterConfig: getInboundRateLimiterConfig() + }); + vm.stopPrank(); + vm.startPrank(AAVE_DAO); + srcGhoTokenPool.applyChainUpdates(srcChainUpdates); + dstGhoTokenPool.applyChainUpdates(dstChainUpdates); + vm.stopPrank(); + vm.startPrank(OWNER); + + // Update GHO Token price on source PriceRegistry + EVM2EVMOnRamp.DynamicConfig memory onRampDynamicConfig = s_onRamp.getDynamicConfig(); + PriceRegistry onRampPriceRegistry = PriceRegistry(onRampDynamicConfig.priceRegistry); + onRampPriceRegistry.updatePrices(getSingleTokenPriceUpdateStruct(address(srcGhoToken), 1e18)); + + // Update GHO Token price on destination PriceRegistry + EVM2EVMOffRamp.DynamicConfig memory offRampDynamicConfig = s_offRamp.getDynamicConfig(); + PriceRegistry offRampPriceRegistry = PriceRegistry(offRampDynamicConfig.priceRegistry); + offRampPriceRegistry.updatePrices(getSingleTokenPriceUpdateStruct(address(dstGhoToken), 1e18)); + + // Add UpgradeableTokenPool to OnRamp + address[] memory srcTokens = new address[](1); + IPool[] memory srcPools = new IPool[](1); + srcTokens[0] = address(srcGhoToken); + srcPools[0] = IPool(address(srcGhoTokenPool)); + s_onRamp.applyPoolUpdates(new Internal.PoolUpdate[](0), getTokensAndPools(srcTokens, srcPools)); + + // Add UpgradeableTokenPool to OffRamp, matching source token with destination UpgradeableTokenPool + IPool[] memory dstPools = new IPool[](1); + dstPools[0] = IPool(address(dstGhoTokenPool)); + s_offRamp.applyPoolUpdates(new Internal.PoolUpdate[](0), getTokensAndPools(srcTokens, dstPools)); + } + + function testE2E_MessagesSuccess_gas() public { + vm.pauseGasMetering(); + uint256 preGhoTokenBalanceOwner = srcGhoToken.balanceOf(OWNER); + uint256 preGhoTokenBalancePool = srcGhoToken.balanceOf(address(srcGhoTokenPool)); + uint256 preBridgedAmount = srcGhoTokenPool.getCurrentBridgedAmount(); + uint256 preBridgeLimit = srcGhoTokenPool.getBridgeLimit(); + + Internal.EVM2EVMMessage[] memory messages = new Internal.EVM2EVMMessage[](1); + messages[0] = sendRequestGho(1, 1000 * 1e18, false, false); + + uint256 expectedFee = s_sourceRouter.getFee(DEST_CHAIN_SELECTOR, _generateTokenMessage()); + // Asserts that the tokens have been sent and the fee has been paid. + assertEq(preGhoTokenBalanceOwner - 1000 * 1e18, srcGhoToken.balanceOf(OWNER)); + assertEq(preGhoTokenBalancePool + 1000 * 1e18, srcGhoToken.balanceOf(address(srcGhoTokenPool))); + assertGt(expectedFee, 0); + + assertEq(preBridgedAmount + 1000 * 1e18, srcGhoTokenPool.getCurrentBridgedAmount()); + assertEq(preBridgeLimit, srcGhoTokenPool.getBridgeLimit()); + + bytes32 metaDataHash = s_offRamp.metadataHash(); + + bytes32[] memory hashedMessages = new bytes32[](1); + hashedMessages[0] = messages[0]._hash(metaDataHash); + messages[0].messageId = hashedMessages[0]; + + bytes32[] memory merkleRoots = new bytes32[](1); + merkleRoots[0] = MerkleHelper.getMerkleRoot(hashedMessages); + + address[] memory onRamps = new address[](1); + onRamps[0] = ON_RAMP_ADDRESS; + + bytes memory commitReport = abi.encode( + CommitStore.CommitReport({ + priceUpdates: getEmptyPriceUpdates(), + interval: CommitStore.Interval(messages[0].sequenceNumber, messages[0].sequenceNumber), + merkleRoot: merkleRoots[0] + }) + ); + + vm.resumeGasMetering(); + s_commitStore.report(commitReport, ++s_latestEpochAndRound); + vm.pauseGasMetering(); + + bytes32[] memory proofs = new bytes32[](0); + uint256 timestamp = s_commitStore.verify(merkleRoots, proofs, 2 ** 2 - 1); + assertEq(BLOCK_TIME, timestamp); + + // We change the block time so when execute would e.g. use the current + // block time instead of the committed block time the value would be + // incorrect in the checks below. + vm.warp(BLOCK_TIME + 2000); + + vm.expectEmit(); + emit ExecutionStateChanged( + messages[0].sequenceNumber, + messages[0].messageId, + Internal.MessageExecutionState.SUCCESS, + "" + ); + + Internal.ExecutionReport memory execReport = _generateReportFromMessages(messages); + + uint256 preGhoTokenBalanceUser = dstGhoToken.balanceOf(USER); + (uint256 preCapacity, uint256 preLevel) = GhoToken(address(dstGhoToken)).getFacilitatorBucket( + address(dstGhoTokenPool) + ); + + vm.resumeGasMetering(); + s_offRamp.execute(execReport, new uint256[](0)); + vm.pauseGasMetering(); + + assertEq(preGhoTokenBalanceUser + 1000 * 1e18, dstGhoToken.balanceOf(USER), "Wrong balance on destination"); + // Facilitator checks + (uint256 postCapacity, uint256 postLevel) = GhoToken(address(dstGhoToken)).getFacilitatorBucket( + address(dstGhoTokenPool) + ); + assertEq(postCapacity, preCapacity); + assertEq(preLevel + 1000 * 1e18, postLevel, "wrong facilitator bucket level"); + } + + function testE2E_3MessagesSuccess_gas() public { + vm.pauseGasMetering(); + uint256 preGhoTokenBalanceOwner = srcGhoToken.balanceOf(OWNER); + uint256 preGhoTokenBalancePool = srcGhoToken.balanceOf(address(srcGhoTokenPool)); + uint256 preBridgedAmount = srcGhoTokenPool.getCurrentBridgedAmount(); + uint256 preBridgeLimit = srcGhoTokenPool.getBridgeLimit(); + + Internal.EVM2EVMMessage[] memory messages = new Internal.EVM2EVMMessage[](3); + messages[0] = sendRequestGho(1, 1000 * 1e18, false, false); + messages[1] = sendRequestGho(2, 2000 * 1e18, false, false); + messages[2] = sendRequestGho(3, 3000 * 1e18, false, false); + + uint256 expectedFee = s_sourceRouter.getFee(DEST_CHAIN_SELECTOR, _generateTokenMessage()); + // Asserts that the tokens have been sent and the fee has been paid. + assertEq(preGhoTokenBalanceOwner - 6000 * 1e18, srcGhoToken.balanceOf(OWNER)); + assertEq(preGhoTokenBalancePool + 6000 * 1e18, srcGhoToken.balanceOf(address(srcGhoTokenPool))); + assertGt(expectedFee, 0); + + assertEq(preBridgedAmount + 6000 * 1e18, srcGhoTokenPool.getCurrentBridgedAmount()); + assertEq(preBridgeLimit, srcGhoTokenPool.getBridgeLimit()); + + bytes32 metaDataHash = s_offRamp.metadataHash(); + + bytes32[] memory hashedMessages = new bytes32[](3); + hashedMessages[0] = messages[0]._hash(metaDataHash); + messages[0].messageId = hashedMessages[0]; + hashedMessages[1] = messages[1]._hash(metaDataHash); + messages[1].messageId = hashedMessages[1]; + hashedMessages[2] = messages[2]._hash(metaDataHash); + messages[2].messageId = hashedMessages[2]; + + bytes32[] memory merkleRoots = new bytes32[](1); + merkleRoots[0] = MerkleHelper.getMerkleRoot(hashedMessages); + + address[] memory onRamps = new address[](1); + onRamps[0] = ON_RAMP_ADDRESS; + + bytes memory commitReport = abi.encode( + CommitStore.CommitReport({ + priceUpdates: getEmptyPriceUpdates(), + interval: CommitStore.Interval(messages[0].sequenceNumber, messages[2].sequenceNumber), + merkleRoot: merkleRoots[0] + }) + ); + + vm.resumeGasMetering(); + s_commitStore.report(commitReport, ++s_latestEpochAndRound); + vm.pauseGasMetering(); + + bytes32[] memory proofs = new bytes32[](0); + uint256 timestamp = s_commitStore.verify(merkleRoots, proofs, 2 ** 2 - 1); + assertEq(BLOCK_TIME, timestamp); + + // We change the block time so when execute would e.g. use the current + // block time instead of the committed block time the value would be + // incorrect in the checks below. + vm.warp(BLOCK_TIME + 2000); + + vm.expectEmit(); + emit ExecutionStateChanged( + messages[0].sequenceNumber, + messages[0].messageId, + Internal.MessageExecutionState.SUCCESS, + "" + ); + + vm.expectEmit(); + emit ExecutionStateChanged( + messages[1].sequenceNumber, + messages[1].messageId, + Internal.MessageExecutionState.SUCCESS, + "" + ); + + vm.expectEmit(); + emit ExecutionStateChanged( + messages[2].sequenceNumber, + messages[2].messageId, + Internal.MessageExecutionState.SUCCESS, + "" + ); + + Internal.ExecutionReport memory execReport = _generateReportFromMessages(messages); + + uint256 preGhoTokenBalanceUser = dstGhoToken.balanceOf(USER); + (uint256 preCapacity, uint256 preLevel) = GhoToken(address(dstGhoToken)).getFacilitatorBucket( + address(dstGhoTokenPool) + ); + + vm.resumeGasMetering(); + s_offRamp.execute(execReport, new uint256[](0)); + vm.pauseGasMetering(); + + assertEq(preGhoTokenBalanceUser + 6000 * 1e18, dstGhoToken.balanceOf(USER), "Wrong balance on destination"); + // Facilitator checks + (uint256 postCapacity, uint256 postLevel) = GhoToken(address(dstGhoToken)).getFacilitatorBucket( + address(dstGhoTokenPool) + ); + assertEq(postCapacity, preCapacity); + assertEq(preLevel + 6000 * 1e18, postLevel, "wrong facilitator bucket level"); + } + + function testRevertRateLimitReached() public { + // increase bridge limit to hit the rate limit error + vm.startPrank(AAVE_DAO); + srcGhoTokenPool.setBridgeLimit(type(uint256).max); + vm.startPrank(OWNER); + + RateLimiter.Config memory rateLimiterConfig = getOutboundRateLimiterConfig(); + + // will revert due to rate limit of tokenPool + sendRequestGho(1, rateLimiterConfig.capacity + 1, true, false); + + // max capacity, won't revert + sendRequestGho(1, rateLimiterConfig.capacity, false, false); + + // revert due to capacity exceed + sendRequestGho(2, 100, true, false); + + // increase blocktime to refill capacity + vm.warp(BLOCK_TIME + 1); + + // won't revert due to refill + sendRequestGho(2, 100, false, false); + } + + function testRevertOnLessTokenToCoverFee() public { + sendRequestGho(1, 1000, false, true); + } + + function testRevertBridgeLimitReached() public { + // increase ccip rate limit to hit the bridge limit error + vm.startPrank(AAVE_DAO); + srcGhoTokenPool.setChainRateLimiterConfig( + DEST_CHAIN_SELECTOR, + RateLimiter.Config({isEnabled: true, capacity: uint128(INITIAL_BRIDGE_LIMIT * 2), rate: 1e15}), + getInboundRateLimiterConfig() + ); + vm.warp(block.timestamp + 100); // wait to refill capacity + vm.startPrank(OWNER); + + // will revert due to bridge limit + sendRequestGho(1, uint128(INITIAL_BRIDGE_LIMIT + 1), true, false); + + // max bridge limit, won't revert + sendRequestGho(1, uint128(INITIAL_BRIDGE_LIMIT), false, false); + assertEq(srcGhoTokenPool.getCurrentBridgedAmount(), INITIAL_BRIDGE_LIMIT); + + // revert due to bridge limit exceed + sendRequestGho(2, 1, true, false); + + // increase bridge limit + vm.startPrank(AAVE_DAO); + srcGhoTokenPool.setBridgeLimit(INITIAL_BRIDGE_LIMIT + 1); + vm.startPrank(OWNER); + + // won't revert due to refill + sendRequestGho(2, 1, false, false); + assertEq(srcGhoTokenPool.getCurrentBridgedAmount(), INITIAL_BRIDGE_LIMIT + 1); + } + + function sendRequestGho( + uint64 expectedSeqNum, + uint256 amount, + bool expectRevert, + bool sendLessFee + ) public returns (Internal.EVM2EVMMessage memory) { + Client.EVM2AnyMessage memory message = _generateSingleTokenMessage(address(srcGhoToken), amount); + uint256 expectedFee = s_sourceRouter.getFee(DEST_CHAIN_SELECTOR, message); + + // err mgmt + uint256 feeToSend = sendLessFee ? expectedFee - 1 : expectedFee; + expectRevert = sendLessFee ? true : expectRevert; + + IERC20(s_sourceTokens[0]).approve(address(s_sourceRouter), feeToSend); // fee + IERC20(srcGhoToken).approve(address(s_sourceRouter), amount); // amount + + message.receiver = abi.encode(USER); + Internal.EVM2EVMMessage memory geEvent = _messageToEvent( + message, + expectedSeqNum, + expectedSeqNum, + expectedFee, + OWNER + ); + + if (!expectRevert) { + vm.expectEmit(); + emit CCIPSendRequested(geEvent); + } else { + vm.expectRevert(); + } + vm.resumeGasMetering(); + s_sourceRouter.ccipSend(DEST_CHAIN_SELECTOR, message); + vm.pauseGasMetering(); + + return geEvent; + } +} diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumSetup.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumSetup.t.sol new file mode 100644 index 0000000000..89d27aaf9f --- /dev/null +++ b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumSetup.t.sol @@ -0,0 +1,72 @@ +// SPDX-License-Identifier: BUSL-1.1 +pragma solidity 0.8.19; + +import {GhoToken} from "@aave/gho-core/gho/GhoToken.sol"; +import {TransparentUpgradeableProxy} from "solidity-utils/contracts/transparent-proxy/TransparentUpgradeableProxy.sol"; + +import {stdError} from "forge-std/Test.sol"; +import {BaseTest} from "../../BaseTest.t.sol"; +import {IPool} from "../../../interfaces/pools/IPool.sol"; +import {UpgradeableLockReleaseTokenPool} from "../../../pools/UpgradeableLockReleaseTokenPool.sol"; +import {UpgradeableTokenPool} from "../../../pools/UpgradeableTokenPool.sol"; +import {EVM2EVMOnRamp} from "../../../onRamp/EVM2EVMOnRamp.sol"; +import {EVM2EVMOffRamp} from "../../../offRamp/EVM2EVMOffRamp.sol"; +import {RateLimiter} from "../../../libraries/RateLimiter.sol"; +import {BurnMintERC677} from "../../../../shared/token/ERC677/BurnMintERC677.sol"; +import {Router} from "../../../Router.sol"; +import {IERC165} from "../../../../vendor/openzeppelin-solidity/v4.8.3/contracts/utils/introspection/IERC165.sol"; +import {IERC20} from "../../../../vendor/openzeppelin-solidity/v4.8.3/contracts/token/ERC20/IERC20.sol"; +import {RouterSetup} from "../../router/RouterSetup.t.sol"; + +contract GHOTokenPoolEthereumSetup is RouterSetup { + IERC20 internal s_token; + UpgradeableLockReleaseTokenPool internal s_ghoTokenPool; + + address internal s_allowedOnRamp = address(123); + address internal s_allowedOffRamp = address(234); + + address internal AAVE_DAO = makeAddr("AAVE_DAO"); + address internal PROXY_ADMIN = makeAddr("PROXY_ADMIN"); + + uint256 internal INITIAL_BRIDGE_LIMIT = 100e6 * 1e18; + + function setUp() public virtual override { + RouterSetup.setUp(); + + // GHO deployment + GhoToken ghoToken = new GhoToken(AAVE_DAO); + s_token = IERC20(address(ghoToken)); + deal(address(s_token), OWNER, type(uint128).max); + + // Set up UpgradeableTokenPool with permission to mint/burn + s_ghoTokenPool = UpgradeableLockReleaseTokenPool( + _deployUpgradeableLockReleaseTokenPool( + address(s_token), + address(s_mockARM), + address(s_sourceRouter), + AAVE_DAO, + INITIAL_BRIDGE_LIMIT, + PROXY_ADMIN + ) + ); + + UpgradeableTokenPool.ChainUpdate[] memory chainUpdate = new UpgradeableTokenPool.ChainUpdate[](1); + chainUpdate[0] = UpgradeableTokenPool.ChainUpdate({ + remoteChainSelector: DEST_CHAIN_SELECTOR, + allowed: true, + outboundRateLimiterConfig: getOutboundRateLimiterConfig(), + inboundRateLimiterConfig: getInboundRateLimiterConfig() + }); + + changePrank(AAVE_DAO); + s_ghoTokenPool.applyChainUpdates(chainUpdate); + s_ghoTokenPool.setRebalancer(OWNER); + changePrank(OWNER); + + Router.OnRamp[] memory onRampUpdates = new Router.OnRamp[](1); + Router.OffRamp[] memory offRampUpdates = new Router.OffRamp[](1); + onRampUpdates[0] = Router.OnRamp({destChainSelector: DEST_CHAIN_SELECTOR, onRamp: s_allowedOnRamp}); + offRampUpdates[0] = Router.OffRamp({sourceChainSelector: SOURCE_CHAIN_SELECTOR, offRamp: s_allowedOffRamp}); + s_sourceRouter.applyRampUpdates(onRampUpdates, new Router.OffRamp[](0), offRampUpdates); + } +} diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolRemote.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolRemote.t.sol new file mode 100644 index 0000000000..3e2696bbf9 --- /dev/null +++ b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolRemote.t.sol @@ -0,0 +1,244 @@ +// SPDX-License-Identifier: BUSL-1.1 +pragma solidity 0.8.19; + +import {GhoToken} from "@aave/gho-core/gho/GhoToken.sol"; +import {TransparentUpgradeableProxy} from "solidity-utils/contracts/transparent-proxy/TransparentUpgradeableProxy.sol"; + +import {stdError} from "forge-std/Test.sol"; +import {MockUpgradeable} from "../../mocks/MockUpgradeable.sol"; +import {UpgradeableTokenPool} from "../../../pools/UpgradeableTokenPool.sol"; +import {EVM2EVMOnRamp} from "../../../onRamp/EVM2EVMOnRamp.sol"; +import {EVM2EVMOffRamp} from "../../../offRamp/EVM2EVMOffRamp.sol"; +import {BurnMintTokenPool} from "../../../pools/BurnMintTokenPool.sol"; +import {UpgradeableBurnMintTokenPool} from "../../../pools/UpgradeableBurnMintTokenPool.sol"; +import {RateLimiter} from "../../../libraries/RateLimiter.sol"; +import {GHOTokenPoolRemoteSetup} from "./GHOTokenPoolRemoteSetup.t.sol"; + +contract GHOTokenPoolRemote_lockOrBurn is GHOTokenPoolRemoteSetup { + function testSetupSuccess() public { + assertEq(address(s_burnMintERC677), address(s_pool.getToken())); + assertEq(address(s_mockARM), s_pool.getArmProxy()); + assertEq(false, s_pool.getAllowListEnabled()); + assertEq("BurnMintTokenPool 1.4.0", s_pool.typeAndVersion()); + } + + function testPoolBurnSuccess() public { + uint256 burnAmount = 20_000e18; + // inflate facilitator level + _inflateFacilitatorLevel(address(s_pool), address(s_burnMintERC677), burnAmount); + + deal(address(s_burnMintERC677), address(s_pool), burnAmount); + assertEq(s_burnMintERC677.balanceOf(address(s_pool)), burnAmount); + + vm.startPrank(s_burnMintOnRamp); + + vm.expectEmit(); + emit TokensConsumed(burnAmount); + + vm.expectEmit(); + emit Transfer(address(s_pool), address(0), burnAmount); + + vm.expectEmit(); + emit Burned(address(s_burnMintOnRamp), burnAmount); + + bytes4 expectedSignature = bytes4(keccak256("burn(uint256)")); + vm.expectCall(address(s_burnMintERC677), abi.encodeWithSelector(expectedSignature, burnAmount)); + + (uint256 preCapacity, uint256 preLevel) = GhoToken(address(s_burnMintERC677)).getFacilitatorBucket(address(s_pool)); + + s_pool.lockOrBurn(OWNER, bytes(""), burnAmount, DEST_CHAIN_SELECTOR, bytes("")); + + // Facilitator checks + (uint256 postCapacity, uint256 postLevel) = GhoToken(address(s_burnMintERC677)).getFacilitatorBucket( + address(s_pool) + ); + assertEq(postCapacity, preCapacity); + assertEq(preLevel - burnAmount, postLevel, "wrong facilitator bucket level"); + + assertEq(s_burnMintERC677.balanceOf(address(s_pool)), 0); + } + + // Should not burn tokens if cursed. + function testPoolBurnRevertNotHealthyReverts() public { + s_mockARM.voteToCurse(bytes32(0)); + uint256 before = s_burnMintERC677.balanceOf(address(s_pool)); + vm.startPrank(s_burnMintOnRamp); + + vm.expectRevert(EVM2EVMOnRamp.BadARMSignal.selector); + s_pool.lockOrBurn(OWNER, bytes(""), 1e5, DEST_CHAIN_SELECTOR, bytes("")); + + assertEq(s_burnMintERC677.balanceOf(address(s_pool)), before); + } + + function testChainNotAllowedReverts() public { + uint64 wrongChainSelector = 8838833; + vm.expectRevert(abi.encodeWithSelector(UpgradeableTokenPool.ChainNotAllowed.selector, wrongChainSelector)); + s_pool.lockOrBurn(OWNER, bytes(""), 1, wrongChainSelector, bytes("")); + } + + function testPoolBurnNoPrivilegesReverts() public { + // Remove privileges + vm.startPrank(AAVE_DAO); + GhoToken(address(s_burnMintERC677)).removeFacilitator(address(s_pool)); + vm.stopPrank(); + + uint256 amount = 1; + vm.startPrank(s_burnMintOnRamp); + vm.expectRevert(stdError.arithmeticError); + s_pool.lockOrBurn(STRANGER, bytes(""), amount, DEST_CHAIN_SELECTOR, bytes("")); + } + + function testBucketLevelNotEnoughReverts() public { + (, uint256 bucketLevel) = GhoToken(address(s_burnMintERC677)).getFacilitatorBucket(address(s_pool)); + assertEq(bucketLevel, 0); + + uint256 amount = 1; + vm.expectCall(address(s_burnMintERC677), abi.encodeWithSelector(GhoToken.burn.selector, amount)); + vm.expectRevert(stdError.arithmeticError); + vm.startPrank(s_burnMintOnRamp); + s_pool.lockOrBurn(STRANGER, bytes(""), amount, DEST_CHAIN_SELECTOR, bytes("")); + } + + function testTokenMaxCapacityExceededReverts() public { + RateLimiter.Config memory rateLimiterConfig = getOutboundRateLimiterConfig(); + uint256 capacity = rateLimiterConfig.capacity; + uint256 amount = 10 * capacity; + + vm.expectRevert( + abi.encodeWithSelector(RateLimiter.TokenMaxCapacityExceeded.selector, capacity, amount, address(s_burnMintERC677)) + ); + vm.startPrank(s_burnMintOnRamp); + s_pool.lockOrBurn(STRANGER, bytes(""), amount, DEST_CHAIN_SELECTOR, bytes("")); + } +} + +contract GHOTokenPoolRemote_releaseOrMint is GHOTokenPoolRemoteSetup { + function testPoolMintSuccess() public { + uint256 amount = 1e19; + vm.startPrank(s_burnMintOffRamp); + vm.expectEmit(); + emit Transfer(address(0), OWNER, amount); + s_pool.releaseOrMint(bytes(""), OWNER, amount, DEST_CHAIN_SELECTOR, bytes("")); + assertEq(s_burnMintERC677.balanceOf(OWNER), amount); + } + + function testPoolMintNotHealthyReverts() public { + // Should not mint tokens if cursed. + s_mockARM.voteToCurse(bytes32(0)); + uint256 before = s_burnMintERC677.balanceOf(OWNER); + vm.startPrank(s_burnMintOffRamp); + vm.expectRevert(EVM2EVMOffRamp.BadARMSignal.selector); + s_pool.releaseOrMint(bytes(""), OWNER, 1e5, DEST_CHAIN_SELECTOR, bytes("")); + assertEq(s_burnMintERC677.balanceOf(OWNER), before); + } + + function testChainNotAllowedReverts() public { + uint64 wrongChainSelector = 8838833; + vm.expectRevert(abi.encodeWithSelector(UpgradeableTokenPool.ChainNotAllowed.selector, wrongChainSelector)); + s_pool.releaseOrMint(bytes(""), STRANGER, 1, wrongChainSelector, bytes("")); + } + + function testPoolMintNoPrivilegesReverts() public { + // Remove privileges + vm.startPrank(AAVE_DAO); + GhoToken(address(s_burnMintERC677)).removeFacilitator(address(s_pool)); + vm.stopPrank(); + + uint256 amount = 1; + vm.startPrank(s_burnMintOffRamp); + vm.expectRevert("FACILITATOR_BUCKET_CAPACITY_EXCEEDED"); + s_pool.releaseOrMint(bytes(""), STRANGER, amount, DEST_CHAIN_SELECTOR, bytes("")); + } + + function testBucketCapacityExceededReverts() public { + // Mint all the bucket capacity + (uint256 bucketCapacity, ) = GhoToken(address(s_burnMintERC677)).getFacilitatorBucket(address(s_pool)); + _inflateFacilitatorLevel(address(s_pool), address(s_burnMintERC677), bucketCapacity); + (uint256 currCapacity, uint256 currLevel) = GhoToken(address(s_burnMintERC677)).getFacilitatorBucket( + address(s_pool) + ); + assertEq(currCapacity, currLevel); + + uint256 amount = 1; + vm.expectCall(address(s_burnMintERC677), abi.encodeWithSelector(GhoToken.mint.selector, STRANGER, amount)); + vm.expectRevert("FACILITATOR_BUCKET_CAPACITY_EXCEEDED"); + vm.startPrank(s_burnMintOffRamp); + s_pool.releaseOrMint(bytes(""), STRANGER, amount, DEST_CHAIN_SELECTOR, bytes("")); + } + + function testTokenMaxCapacityExceededReverts() public { + RateLimiter.Config memory rateLimiterConfig = getInboundRateLimiterConfig(); + uint256 capacity = rateLimiterConfig.capacity; + uint256 amount = 10 * capacity; + + vm.expectRevert( + abi.encodeWithSelector(RateLimiter.TokenMaxCapacityExceeded.selector, capacity, amount, address(s_burnMintERC677)) + ); + vm.startPrank(s_burnMintOffRamp); + s_pool.releaseOrMint(bytes(""), STRANGER, amount, DEST_CHAIN_SELECTOR, bytes("")); + } +} + +contract GHOTokenPoolEthereum_upgradeability is GHOTokenPoolRemoteSetup { + function testInitialization() public { + // Upgradeability + assertEq(s_pool.REVISION(), 1); + vm.startPrank(PROXY_ADMIN); + (bool ok, bytes memory result) = address(s_pool).staticcall( + abi.encodeWithSelector(TransparentUpgradeableProxy.admin.selector) + ); + assertTrue(ok, "proxy admin fetch failed"); + address decodedProxyAdmin = abi.decode(result, (address)); + assertEq(decodedProxyAdmin, PROXY_ADMIN, "proxy admin is wrong"); + assertEq(decodedProxyAdmin, _getProxyAdminAddress(address(s_pool)), "proxy admin is wrong"); + + // TokenPool + vm.startPrank(OWNER); + assertEq(s_pool.getAllowList().length, 0); + assertEq(s_pool.getAllowListEnabled(), false); + assertEq(s_pool.getArmProxy(), address(s_mockARM)); + assertEq(s_pool.getRouter(), address(s_sourceRouter)); + assertEq(address(s_pool.getToken()), address(s_burnMintERC677)); + assertEq(s_pool.owner(), AAVE_DAO, "owner is wrong"); + } + + function testUpgrade() public { + MockUpgradeable newImpl = new MockUpgradeable(); + bytes memory mockImpleParams = abi.encodeWithSignature("initialize()"); + vm.startPrank(PROXY_ADMIN); + TransparentUpgradeableProxy(payable(address(s_pool))).upgradeToAndCall(address(newImpl), mockImpleParams); + + vm.startPrank(OWNER); + assertEq(s_pool.REVISION(), 2); + } + + function testUpgradeAdminReverts() public { + vm.expectRevert(); + TransparentUpgradeableProxy(payable(address(s_pool))).upgradeToAndCall(address(0), bytes("")); + assertEq(s_pool.REVISION(), 1); + + vm.expectRevert(); + TransparentUpgradeableProxy(payable(address(s_pool))).upgradeTo(address(0)); + assertEq(s_pool.REVISION(), 1); + } + + function testChangeAdmin() public { + assertEq(_getProxyAdminAddress(address(s_pool)), PROXY_ADMIN); + + address newAdmin = makeAddr("newAdmin"); + vm.startPrank(PROXY_ADMIN); + TransparentUpgradeableProxy(payable(address(s_pool))).changeAdmin(newAdmin); + + assertEq(_getProxyAdminAddress(address(s_pool)), newAdmin, "Admin change failed"); + } + + function testChangeAdminAdminReverts() public { + assertEq(_getProxyAdminAddress(address(s_pool)), PROXY_ADMIN); + + address newAdmin = makeAddr("newAdmin"); + vm.expectRevert(); + TransparentUpgradeableProxy(payable(address(s_pool))).changeAdmin(newAdmin); + + assertEq(_getProxyAdminAddress(address(s_pool)), PROXY_ADMIN, "Unauthorized admin change"); + } +} diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolRemoteE2E.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolRemoteE2E.t.sol new file mode 100644 index 0000000000..62d6f5235b --- /dev/null +++ b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolRemoteE2E.t.sol @@ -0,0 +1,416 @@ +// SPDX-License-Identifier: BUSL-1.1 +pragma solidity 0.8.19; + +import {GhoToken} from "@aave/gho-core/gho/GhoToken.sol"; +import {TransparentUpgradeableProxy} from "solidity-utils/contracts/transparent-proxy/TransparentUpgradeableProxy.sol"; + +import "../../helpers/MerkleHelper.sol"; +import "../../commitStore/CommitStore.t.sol"; +import "../../onRamp/EVM2EVMOnRampSetup.t.sol"; +import "../../offRamp/EVM2EVMOffRampSetup.t.sol"; +import {IBurnMintERC20} from "../../../../shared/token/ERC20/IBurnMintERC20.sol"; +import {UpgradeableLockReleaseTokenPool} from "../../../pools/UpgradeableLockReleaseTokenPool.sol"; +import {UpgradeableBurnMintTokenPool} from "../../../pools/UpgradeableBurnMintTokenPool.sol"; +import {UpgradeableTokenPool} from "../../../pools/UpgradeableTokenPool.sol"; +import {IPool} from "../../../interfaces/pools/IPool.sol"; +import {RateLimiter} from "../../../libraries/RateLimiter.sol"; +import {E2E} from "../End2End.t.sol"; + +contract GHOTokenPoolRemoteE2E is E2E { + using Internal for Internal.EVM2EVMMessage; + + address internal USER = makeAddr("user"); + address internal AAVE_DAO = makeAddr("AAVE_DAO"); + address internal PROXY_ADMIN = makeAddr("PROXY_ADMIN"); + + uint256 internal INITIAL_BRIDGE_LIMIT = 100e6 * 1e18; + + IBurnMintERC20 internal srcGhoToken; + IBurnMintERC20 internal dstGhoToken; + UpgradeableBurnMintTokenPool internal srcGhoTokenPool; + UpgradeableLockReleaseTokenPool internal dstGhoTokenPool; + + function setUp() public virtual override { + E2E.setUp(); + + // Deploy GHO Token on source chain + srcGhoToken = IBurnMintERC20(address(new GhoToken(AAVE_DAO))); + deal(address(srcGhoToken), OWNER, type(uint128).max); + // Add GHO token to source token list + s_sourceTokens.push(address(srcGhoToken)); + + // Deploy GHO Token on destination chain + dstGhoToken = IBurnMintERC20(address(new GhoToken(AAVE_DAO))); + deal(address(dstGhoToken), OWNER, type(uint128).max); + // Add GHO token to destination token list + s_destTokens.push(address(dstGhoToken)); + + // Deploy BurnMintTokenPool for GHO token on source chain + srcGhoTokenPool = UpgradeableBurnMintTokenPool( + _deployUpgradeableBurnMintTokenPool( + address(srcGhoToken), + address(s_mockARM), + address(s_sourceRouter), + AAVE_DAO, + PROXY_ADMIN + ) + ); + + // Add GHO UpgradeableTokenPool to source token pool list + s_sourcePools.push(address(srcGhoTokenPool)); + + // Deploy LockReleaseTokenPool for GHO token on destination chain + dstGhoTokenPool = UpgradeableLockReleaseTokenPool( + _deployUpgradeableLockReleaseTokenPool( + address(dstGhoToken), + address(s_mockARM), + address(s_destRouter), + AAVE_DAO, + INITIAL_BRIDGE_LIMIT, + PROXY_ADMIN + ) + ); + + // Add GHO UpgradeableTokenPool to destination token pool list + s_destPools.push(address(dstGhoTokenPool)); + + // Give mint and burn privileges to source UpgradeableTokenPool (GHO-specific related) + vm.stopPrank(); + vm.startPrank(AAVE_DAO); + GhoToken(address(srcGhoToken)).grantRole(GhoToken(address(srcGhoToken)).FACILITATOR_MANAGER_ROLE(), AAVE_DAO); + GhoToken(address(srcGhoToken)).addFacilitator(address(srcGhoTokenPool), "UpgradeableTokenPool", type(uint128).max); + vm.stopPrank(); + vm.startPrank(OWNER); + + // Add config for source and destination chains + UpgradeableTokenPool.ChainUpdate[] memory srcChainUpdates = new UpgradeableTokenPool.ChainUpdate[](1); + srcChainUpdates[0] = UpgradeableTokenPool.ChainUpdate({ + remoteChainSelector: DEST_CHAIN_SELECTOR, + allowed: true, + outboundRateLimiterConfig: getOutboundRateLimiterConfig(), + inboundRateLimiterConfig: getInboundRateLimiterConfig() + }); + UpgradeableTokenPool.ChainUpdate[] memory dstChainUpdates = new UpgradeableTokenPool.ChainUpdate[](1); + dstChainUpdates[0] = UpgradeableTokenPool.ChainUpdate({ + remoteChainSelector: SOURCE_CHAIN_SELECTOR, + allowed: true, + outboundRateLimiterConfig: getOutboundRateLimiterConfig(), + inboundRateLimiterConfig: getInboundRateLimiterConfig() + }); + vm.stopPrank(); + vm.startPrank(AAVE_DAO); + srcGhoTokenPool.applyChainUpdates(srcChainUpdates); + dstGhoTokenPool.applyChainUpdates(dstChainUpdates); + vm.stopPrank(); + vm.startPrank(OWNER); + + // Update GHO Token price on source PriceRegistry + EVM2EVMOnRamp.DynamicConfig memory onRampDynamicConfig = s_onRamp.getDynamicConfig(); + PriceRegistry onRampPriceRegistry = PriceRegistry(onRampDynamicConfig.priceRegistry); + onRampPriceRegistry.updatePrices(getSingleTokenPriceUpdateStruct(address(srcGhoToken), 1e18)); + + // Update GHO Token price on destination PriceRegistry + EVM2EVMOffRamp.DynamicConfig memory offRampDynamicConfig = s_offRamp.getDynamicConfig(); + PriceRegistry offRampPriceRegistry = PriceRegistry(offRampDynamicConfig.priceRegistry); + offRampPriceRegistry.updatePrices(getSingleTokenPriceUpdateStruct(address(dstGhoToken), 1e18)); + + // Add UpgradeableTokenPool to OnRamp + address[] memory srcTokens = new address[](1); + IPool[] memory srcPools = new IPool[](1); + srcTokens[0] = address(srcGhoToken); + srcPools[0] = IPool(address(srcGhoTokenPool)); + s_onRamp.applyPoolUpdates(new Internal.PoolUpdate[](0), getTokensAndPools(srcTokens, srcPools)); + + // Add UpgradeableTokenPool to OffRamp, matching source token with destination UpgradeableTokenPool + IPool[] memory dstPools = new IPool[](1); + dstPools[0] = IPool(address(dstGhoTokenPool)); + s_offRamp.applyPoolUpdates(new Internal.PoolUpdate[](0), getTokensAndPools(srcTokens, dstPools)); + + address[] memory dstTokens = new address[](1); + dstTokens[0] = address(dstGhoToken); + s_onRamp.applyPoolUpdates(new Internal.PoolUpdate[](0), getTokensAndPools(dstTokens, dstPools)); + } + + function testE2E_MessagesSuccess_gas() public { + vm.pauseGasMetering(); + + // Mint some GHO to inflate UpgradeableBurnMintTokenPool facilitator level + _inflateFacilitatorLevel(address(srcGhoTokenPool), address(srcGhoToken), 1000 * 1e18); + vm.startPrank(OWNER); + + // Lock some GHO on destination so it can be released later on + dstGhoToken.transfer(address(dstGhoTokenPool), 1000 * 1e18); + // Inflate current bridged amount so it can be reduced in `releaseOrMint` function + vm.stopPrank(); + vm.startPrank(address(s_onRamp)); + vm.mockCall( + address(s_destRouter), + abi.encodeWithSelector(bytes4(keccak256("getOnRamp(uint64)"))), + abi.encode(s_onRamp) + ); + dstGhoTokenPool.lockOrBurn(STRANGER, bytes(""), 1000 * 1e18, SOURCE_CHAIN_SELECTOR, bytes("")); + assertEq(dstGhoTokenPool.getCurrentBridgedAmount(), 1000 * 1e18); + vm.startPrank(address(OWNER)); + + uint256 preGhoTokenBalanceOwner = srcGhoToken.balanceOf(OWNER); + uint256 preGhoTokenBalancePool = srcGhoToken.balanceOf(address(srcGhoTokenPool)); + (uint256 preCapacity, uint256 preLevel) = GhoToken(address(srcGhoToken)).getFacilitatorBucket( + address(srcGhoTokenPool) + ); + + Internal.EVM2EVMMessage[] memory messages = new Internal.EVM2EVMMessage[](1); + messages[0] = sendRequestGho(1, 1000 * 1e18, false, false); + + uint256 expectedFee = s_sourceRouter.getFee(DEST_CHAIN_SELECTOR, _generateTokenMessage()); + // Asserts that the tokens have been sent and the fee has been paid. + assertEq(preGhoTokenBalanceOwner - 1000 * 1e18, srcGhoToken.balanceOf(OWNER)); + assertEq(preGhoTokenBalancePool, srcGhoToken.balanceOf(address(srcGhoTokenPool))); // GHO gets burned + assertGt(expectedFee, 0); + assertEq(dstGhoTokenPool.getCurrentBridgedAmount(), 1000 * 1e18); + + // Facilitator checks + (uint256 postCapacity, uint256 postLevel) = GhoToken(address(srcGhoToken)).getFacilitatorBucket( + address(srcGhoTokenPool) + ); + assertEq(postCapacity, preCapacity); + assertEq(preLevel - 1000 * 1e18, postLevel, "wrong facilitator bucket level"); + + bytes32 metaDataHash = s_offRamp.metadataHash(); + + bytes32[] memory hashedMessages = new bytes32[](1); + hashedMessages[0] = messages[0]._hash(metaDataHash); + messages[0].messageId = hashedMessages[0]; + + bytes32[] memory merkleRoots = new bytes32[](1); + merkleRoots[0] = MerkleHelper.getMerkleRoot(hashedMessages); + + address[] memory onRamps = new address[](1); + onRamps[0] = ON_RAMP_ADDRESS; + + bytes memory commitReport = abi.encode( + CommitStore.CommitReport({ + priceUpdates: getEmptyPriceUpdates(), + interval: CommitStore.Interval(messages[0].sequenceNumber, messages[0].sequenceNumber), + merkleRoot: merkleRoots[0] + }) + ); + + vm.resumeGasMetering(); + s_commitStore.report(commitReport, ++s_latestEpochAndRound); + vm.pauseGasMetering(); + + bytes32[] memory proofs = new bytes32[](0); + uint256 timestamp = s_commitStore.verify(merkleRoots, proofs, 2 ** 2 - 1); + assertEq(BLOCK_TIME, timestamp); + + // We change the block time so when execute would e.g. use the current + // block time instead of the committed block time the value would be + // incorrect in the checks below. + vm.warp(BLOCK_TIME + 2000); + + vm.expectEmit(); + emit ExecutionStateChanged( + messages[0].sequenceNumber, + messages[0].messageId, + Internal.MessageExecutionState.SUCCESS, + "" + ); + + Internal.ExecutionReport memory execReport = _generateReportFromMessages(messages); + + uint256 preGhoTokenBalanceUser = dstGhoToken.balanceOf(USER); + + vm.resumeGasMetering(); + s_offRamp.execute(execReport, new uint256[](0)); + vm.pauseGasMetering(); + + assertEq(preGhoTokenBalanceUser + 1000 * 1e18, dstGhoToken.balanceOf(USER), "Wrong balance on destination"); + assertEq(dstGhoTokenPool.getCurrentBridgedAmount(), 0); + } + + function testE2E_3MessagesSuccess_gas() public { + vm.pauseGasMetering(); + + // Mint some GHO to inflate UpgradeableTokenPool facilitator level + _inflateFacilitatorLevel(address(srcGhoTokenPool), address(srcGhoToken), 6000 * 1e18); + vm.startPrank(OWNER); + + // Lock some GHO on destination so it can be released later on + dstGhoToken.transfer(address(dstGhoTokenPool), 6000 * 1e18); + // Inflate current bridged amount so it can be reduced in `releaseOrMint` function + vm.stopPrank(); + vm.startPrank(address(s_onRamp)); + vm.mockCall( + address(s_destRouter), + abi.encodeWithSelector(bytes4(keccak256("getOnRamp(uint64)"))), + abi.encode(s_onRamp) + ); + dstGhoTokenPool.lockOrBurn(STRANGER, bytes(""), 6000 * 1e18, SOURCE_CHAIN_SELECTOR, bytes("")); + assertEq(dstGhoTokenPool.getCurrentBridgedAmount(), 6000 * 1e18); + vm.startPrank(address(OWNER)); + + uint256 preGhoTokenBalanceOwner = srcGhoToken.balanceOf(OWNER); + uint256 preGhoTokenBalancePool = srcGhoToken.balanceOf(address(srcGhoTokenPool)); + (uint256 preCapacity, uint256 preLevel) = GhoToken(address(srcGhoToken)).getFacilitatorBucket( + address(srcGhoTokenPool) + ); + + Internal.EVM2EVMMessage[] memory messages = new Internal.EVM2EVMMessage[](3); + messages[0] = sendRequestGho(1, 1000 * 1e18, false, false); + messages[1] = sendRequestGho(2, 2000 * 1e18, false, false); + messages[2] = sendRequestGho(3, 3000 * 1e18, false, false); + + uint256 expectedFee = s_sourceRouter.getFee(DEST_CHAIN_SELECTOR, _generateTokenMessage()); + // Asserts that the tokens have been sent and the fee has been paid. + assertEq(preGhoTokenBalanceOwner - 6000 * 1e18, srcGhoToken.balanceOf(OWNER)); + assertEq(preGhoTokenBalancePool, srcGhoToken.balanceOf(address(srcGhoTokenPool))); // GHO gets burned + assertGt(expectedFee, 0); + assertEq(dstGhoTokenPool.getCurrentBridgedAmount(), 6000 * 1e18); + + // Facilitator checks + (uint256 postCapacity, uint256 postLevel) = GhoToken(address(srcGhoToken)).getFacilitatorBucket( + address(srcGhoTokenPool) + ); + assertEq(postCapacity, preCapacity); + assertEq(preLevel - 6000 * 1e18, postLevel, "wrong facilitator bucket level"); + + bytes32 metaDataHash = s_offRamp.metadataHash(); + + bytes32[] memory hashedMessages = new bytes32[](3); + hashedMessages[0] = messages[0]._hash(metaDataHash); + messages[0].messageId = hashedMessages[0]; + hashedMessages[1] = messages[1]._hash(metaDataHash); + messages[1].messageId = hashedMessages[1]; + hashedMessages[2] = messages[2]._hash(metaDataHash); + messages[2].messageId = hashedMessages[2]; + + bytes32[] memory merkleRoots = new bytes32[](1); + merkleRoots[0] = MerkleHelper.getMerkleRoot(hashedMessages); + + address[] memory onRamps = new address[](1); + onRamps[0] = ON_RAMP_ADDRESS; + + bytes memory commitReport = abi.encode( + CommitStore.CommitReport({ + priceUpdates: getEmptyPriceUpdates(), + interval: CommitStore.Interval(messages[0].sequenceNumber, messages[2].sequenceNumber), + merkleRoot: merkleRoots[0] + }) + ); + + vm.resumeGasMetering(); + s_commitStore.report(commitReport, ++s_latestEpochAndRound); + vm.pauseGasMetering(); + + bytes32[] memory proofs = new bytes32[](0); + uint256 timestamp = s_commitStore.verify(merkleRoots, proofs, 2 ** 2 - 1); + assertEq(BLOCK_TIME, timestamp); + + // We change the block time so when execute would e.g. use the current + // block time instead of the committed block time the value would be + // incorrect in the checks below. + vm.warp(BLOCK_TIME + 2000); + + vm.expectEmit(); + emit ExecutionStateChanged( + messages[0].sequenceNumber, + messages[0].messageId, + Internal.MessageExecutionState.SUCCESS, + "" + ); + + vm.expectEmit(); + emit ExecutionStateChanged( + messages[1].sequenceNumber, + messages[1].messageId, + Internal.MessageExecutionState.SUCCESS, + "" + ); + + vm.expectEmit(); + emit ExecutionStateChanged( + messages[2].sequenceNumber, + messages[2].messageId, + Internal.MessageExecutionState.SUCCESS, + "" + ); + + Internal.ExecutionReport memory execReport = _generateReportFromMessages(messages); + + uint256 preGhoTokenBalanceUser = dstGhoToken.balanceOf(USER); + + vm.resumeGasMetering(); + s_offRamp.execute(execReport, new uint256[](0)); + vm.pauseGasMetering(); + + assertEq(preGhoTokenBalanceUser + 6000 * 1e18, dstGhoToken.balanceOf(USER), "Wrong balance on destination"); + assertEq(dstGhoTokenPool.getCurrentBridgedAmount(), 0); + } + + function testRevertRateLimitReached() public { + RateLimiter.Config memory rateLimiterConfig = getOutboundRateLimiterConfig(); + + // will revert due to rate limit of tokenPool + sendRequestGho(1, rateLimiterConfig.capacity + 1, true, false); + + // max capacity, won't revert + + // Mint some GHO to inflate UpgradeableTokenPool facilitator level + _inflateFacilitatorLevel(address(srcGhoTokenPool), address(srcGhoToken), rateLimiterConfig.capacity); + vm.startPrank(OWNER); + sendRequestGho(1, rateLimiterConfig.capacity, false, false); + + // revert due to capacity exceed + sendRequestGho(2, 100, true, false); + + // increase blocktime to refill capacity + vm.warp(BLOCK_TIME + 1); + + // won't revert due to refill + _inflateFacilitatorLevel(address(srcGhoTokenPool), address(srcGhoToken), 100); + vm.startPrank(OWNER); + sendRequestGho(2, 100, false, false); + } + + function testRevertOnLessTokenToCoverFee() public { + sendRequestGho(1, 1000, false, true); + } + + function sendRequestGho( + uint64 expectedSeqNum, + uint256 amount, + bool expectRevert, + bool sendLessFee + ) public returns (Internal.EVM2EVMMessage memory) { + Client.EVM2AnyMessage memory message = _generateSingleTokenMessage(address(srcGhoToken), amount); + uint256 expectedFee = s_sourceRouter.getFee(DEST_CHAIN_SELECTOR, message); + + // err mgmt + uint256 feeToSend = sendLessFee ? expectedFee - 1 : expectedFee; + expectRevert = sendLessFee ? true : expectRevert; + + IERC20(s_sourceTokens[0]).approve(address(s_sourceRouter), feeToSend); // fee + IERC20(srcGhoToken).approve(address(s_sourceRouter), amount); // amount + + message.receiver = abi.encode(USER); + Internal.EVM2EVMMessage memory geEvent = _messageToEvent( + message, + expectedSeqNum, + expectedSeqNum, + expectedFee, + OWNER + ); + + if (!expectRevert) { + vm.expectEmit(); + emit CCIPSendRequested(geEvent); + } else { + vm.expectRevert(); + } + vm.resumeGasMetering(); + s_sourceRouter.ccipSend(DEST_CHAIN_SELECTOR, message); + vm.pauseGasMetering(); + + return geEvent; + } +} diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolRemoteSetup.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolRemoteSetup.t.sol new file mode 100644 index 0000000000..529715aaf2 --- /dev/null +++ b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolRemoteSetup.t.sol @@ -0,0 +1,78 @@ +// SPDX-License-Identifier: BUSL-1.1 +pragma solidity 0.8.19; + +import {GhoToken} from "@aave/gho-core/gho/GhoToken.sol"; +import {TransparentUpgradeableProxy} from "solidity-utils/contracts/transparent-proxy/TransparentUpgradeableProxy.sol"; + +import {stdError} from "forge-std/Test.sol"; +import {UpgradeableTokenPool} from "../../../pools/UpgradeableTokenPool.sol"; +import {Router} from "../../../Router.sol"; +import {BurnMintERC677} from "../../../../shared/token/ERC677/BurnMintERC677.sol"; +import {UpgradeableBurnMintTokenPool} from "../../../pools/UpgradeableBurnMintTokenPool.sol"; +import {RouterSetup} from "../../router/RouterSetup.t.sol"; + +contract GHOTokenPoolRemoteSetup is RouterSetup { + event Transfer(address indexed from, address indexed to, uint256 value); + event TokensConsumed(uint256 tokens); + event Burned(address indexed sender, uint256 amount); + + BurnMintERC677 internal s_burnMintERC677; + address internal s_burnMintOffRamp = makeAddr("burn_mint_offRamp"); + address internal s_burnMintOnRamp = makeAddr("burn_mint_onRamp"); + + UpgradeableBurnMintTokenPool internal s_pool; + + address internal AAVE_DAO = makeAddr("AAVE_DAO"); + address internal PROXY_ADMIN = makeAddr("PROXY_ADMIN"); + + function setUp() public virtual override { + RouterSetup.setUp(); + + // GHO deployment + GhoToken ghoToken = new GhoToken(AAVE_DAO); + s_burnMintERC677 = BurnMintERC677(address(ghoToken)); + + s_pool = UpgradeableBurnMintTokenPool( + _deployUpgradeableBurnMintTokenPool( + address(s_burnMintERC677), + address(s_mockARM), + address(s_sourceRouter), + AAVE_DAO, + PROXY_ADMIN + ) + ); + + // Give mint and burn privileges to source UpgradeableTokenPool (GHO-specific related) + vm.stopPrank(); + vm.startPrank(AAVE_DAO); + GhoToken(address(s_burnMintERC677)).grantRole( + GhoToken(address(s_burnMintERC677)).FACILITATOR_MANAGER_ROLE(), + AAVE_DAO + ); + GhoToken(address(s_burnMintERC677)).addFacilitator(address(s_pool), "UpgradeableTokenPool", type(uint128).max); + vm.stopPrank(); + + _applyChainUpdates(address(s_pool)); + } + + function _applyChainUpdates(address pool) internal { + UpgradeableTokenPool.ChainUpdate[] memory chains = new UpgradeableTokenPool.ChainUpdate[](1); + chains[0] = UpgradeableTokenPool.ChainUpdate({ + remoteChainSelector: DEST_CHAIN_SELECTOR, + allowed: true, + outboundRateLimiterConfig: getOutboundRateLimiterConfig(), + inboundRateLimiterConfig: getInboundRateLimiterConfig() + }); + + vm.startPrank(AAVE_DAO); + UpgradeableBurnMintTokenPool(pool).applyChainUpdates(chains); + vm.stopPrank(); + vm.startPrank(OWNER); + + Router.OnRamp[] memory onRampUpdates = new Router.OnRamp[](1); + onRampUpdates[0] = Router.OnRamp({destChainSelector: DEST_CHAIN_SELECTOR, onRamp: s_burnMintOnRamp}); + Router.OffRamp[] memory offRampUpdates = new Router.OffRamp[](1); + offRampUpdates[0] = Router.OffRamp({sourceChainSelector: DEST_CHAIN_SELECTOR, offRamp: s_burnMintOffRamp}); + s_sourceRouter.applyRampUpdates(onRampUpdates, new Router.OffRamp[](0), offRampUpdates); + } +} From e25ffef1bcbd27823c24b045b47780790e86aba0 Mon Sep 17 00:00:00 2001 From: miguelmtz <36620902+miguelmtzinf@users.noreply.github.com> Date: Mon, 20 May 2024 11:39:36 +0200 Subject: [PATCH 03/18] feat: Add Bridge limit admin (#6) * test: Add unit and e2e tests * test: Add tests for bridge limit config * fix: Fix foundry toml * feat: Add bridge limit admin --- .../pools/UpgradeableLockReleaseTokenPool.sol | 19 +++++++++- .../test/pools/GHO/GHOTokenPoolEthereum.t.sol | 38 +++++++++++++++++-- 2 files changed, 53 insertions(+), 4 deletions(-) diff --git a/contracts/src/v0.8/ccip/pools/UpgradeableLockReleaseTokenPool.sol b/contracts/src/v0.8/ccip/pools/UpgradeableLockReleaseTokenPool.sol index f1abee7c86..89c4edc9f1 100644 --- a/contracts/src/v0.8/ccip/pools/UpgradeableLockReleaseTokenPool.sol +++ b/contracts/src/v0.8/ccip/pools/UpgradeableLockReleaseTokenPool.sol @@ -57,6 +57,9 @@ contract UpgradeableLockReleaseTokenPool is /// @notice Amount of tokens bridged (transferred out) /// @dev Must always be equal to or below the bridge limit uint256 private s_currentBridged; + /// @notice The address of the bridge limit admin. + /// @dev Can be address(0) if none is configured. + address internal s_bridgeLimitAdmin; /// @dev Constructor /// @param token The bridgeable token that is managed by this pool. @@ -180,13 +183,22 @@ contract UpgradeableLockReleaseTokenPool is } /// @notice Sets the bridge limit, the maximum amount of tokens that can be bridged out + /// @dev Only callable by the owner or the bridge limit admin. /// @param newBridgeLimit The new bridge limit - function setBridgeLimit(uint256 newBridgeLimit) external onlyOwner { + function setBridgeLimit(uint256 newBridgeLimit) external { + if (msg.sender != s_bridgeLimitAdmin && msg.sender != owner()) revert Unauthorized(msg.sender); uint256 oldBridgeLimit = s_bridgeLimit; s_bridgeLimit = newBridgeLimit; emit BridgeLimitUpdated(oldBridgeLimit, newBridgeLimit); } + /// @notice Sets the bridge limit admin address. + /// @dev Only callable by the owner. + /// @param bridgeLimitAdmin The new bridge limit admin address. + function setBridgeLimitAdmin(address bridgeLimitAdmin) external onlyOwner { + s_bridgeLimitAdmin = bridgeLimitAdmin; + } + /// @notice Gets the bridge limit /// @return The maximum amount of tokens that can be transferred out to other chains function getBridgeLimit() external view virtual returns (uint256) { @@ -204,6 +216,11 @@ contract UpgradeableLockReleaseTokenPool is return s_rateLimitAdmin; } + /// @notice Gets the bridge limiter admin address. + function getBridgeLimitAdmin() external view returns (address) { + return s_bridgeLimitAdmin; + } + /// @notice Checks if the pool can accept liquidity. /// @return true if the pool can accept liquidity, false otherwise. function canAcceptLiquidity() external view returns (bool) { diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereum.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereum.t.sol index aa85916c74..b76a5c0328 100644 --- a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereum.t.sol +++ b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereum.t.sol @@ -408,7 +408,7 @@ contract GHOTokenPoolEthereum_setChainRateLimiterConfig is GHOTokenPoolEthereumS assertEq(bucket.lastUpdated, newTime); } - function testOnlyOwnerOrRateLimitAdminReverts() public { + function testOnlyOwnerOrRateLimitAdminSuccess() public { address rateLimiterAdmin = address(28973509103597907); changePrank(AAVE_DAO); @@ -490,6 +490,17 @@ contract GHOTokenPoolEthereum_setBridgeLimit is GHOTokenPoolEthereumSetup { s_ghoTokenPool.setBridgeLimit(newBridgeLimit); assertEq(newBridgeLimit, s_ghoTokenPool.getBridgeLimit()); + + // Bridge Limit Admin + address bridgeLimitAdmin = address(28973509103597907); + s_ghoTokenPool.setBridgeLimitAdmin(bridgeLimitAdmin); + + vm.startPrank(bridgeLimitAdmin); + newBridgeLimit += 1; + + s_ghoTokenPool.setBridgeLimit(newBridgeLimit); + + assertEq(newBridgeLimit, s_ghoTokenPool.getBridgeLimit()); } function testZeroBridgeLimitReverts() public { @@ -573,14 +584,35 @@ contract GHOTokenPoolEthereum_setBridgeLimit is GHOTokenPoolEthereumSetup { // Reverts - function testSetRateLimitAdminReverts() public { + function testSetBridgeLimitAdminReverts() public { vm.startPrank(STRANGER); - vm.expectRevert("Only callable by owner"); + vm.expectRevert(abi.encodeWithSelector(LockReleaseTokenPool.Unauthorized.selector, STRANGER)); s_ghoTokenPool.setBridgeLimit(0); } } +contract GHOTokenPoolEthereum_setBridgeLimitAdmin is GHOTokenPoolEthereumSetup { + function testSetBridgeLimitAdminSuccess() public { + assertEq(address(0), s_ghoTokenPool.getBridgeLimitAdmin()); + + address bridgeLimitAdmin = address(28973509103597907); + changePrank(AAVE_DAO); + s_ghoTokenPool.setBridgeLimitAdmin(bridgeLimitAdmin); + + assertEq(bridgeLimitAdmin, s_ghoTokenPool.getBridgeLimitAdmin()); + } + + // Reverts + + function testSetBridgeLimitAdminReverts() public { + vm.startPrank(STRANGER); + + vm.expectRevert("Only callable by owner"); + s_ghoTokenPool.setBridgeLimitAdmin(STRANGER); + } +} + contract GHOTokenPoolEthereum_upgradeability is GHOTokenPoolEthereumSetup { function testInitialization() public { // Upgradeability From 13afb4015b6ea6997f804e65da1515b450f4f7c5 Mon Sep 17 00:00:00 2001 From: miguelmtzinf Date: Mon, 20 May 2024 14:19:22 +0200 Subject: [PATCH 04/18] fix: Rename error --- .../pools/UpgradeableLockReleaseTokenPool.sol | 4 +- contracts/src/v0.8/ccip/script.s.sol | 83 +++++++++++++++++++ .../test/pools/GHO/GHOTokenPoolEthereum.t.sol | 2 +- 3 files changed, 86 insertions(+), 3 deletions(-) create mode 100644 contracts/src/v0.8/ccip/script.s.sol diff --git a/contracts/src/v0.8/ccip/pools/UpgradeableLockReleaseTokenPool.sol b/contracts/src/v0.8/ccip/pools/UpgradeableLockReleaseTokenPool.sol index 89c4edc9f1..f8cfde86ad 100644 --- a/contracts/src/v0.8/ccip/pools/UpgradeableLockReleaseTokenPool.sol +++ b/contracts/src/v0.8/ccip/pools/UpgradeableLockReleaseTokenPool.sol @@ -33,7 +33,7 @@ contract UpgradeableLockReleaseTokenPool is error Unauthorized(address caller); error BridgeLimitExceeded(uint256 bridgeLimit); - error InvalidAmountToBurn(); + error NotEnoughBridgedAmount(); event BridgeLimitUpdated(uint256 oldBridgeLimit, uint256 newBridgeLimit); string public constant override typeAndVersion = "LockReleaseTokenPool 1.4.0"; @@ -141,7 +141,7 @@ contract UpgradeableLockReleaseTokenPool is bytes memory ) external virtual override onlyOffRamp(remoteChainSelector) whenHealthy { // This should never occur. Amount should never exceed the current bridged amount - if (amount > s_currentBridged) revert InvalidAmountToBurn(); + if (amount > s_currentBridged) revert NotEnoughBridgedAmount(); // Reduce bridged amount because tokens are back to source chain s_currentBridged -= amount; diff --git a/contracts/src/v0.8/ccip/script.s.sol b/contracts/src/v0.8/ccip/script.s.sol new file mode 100644 index 0000000000..95ef0c509b --- /dev/null +++ b/contracts/src/v0.8/ccip/script.s.sol @@ -0,0 +1,83 @@ +// SPDX-License-Identifier: UNLICENSED +pragma solidity ^0.8.0; + +import {Script, console2} from 'forge-std/Script.sol'; +import {TransparentUpgradeableProxy} from "solidity-utils/contracts/transparent-proxy/TransparentUpgradeableProxy.sol"; +import {UpgradeableLockReleaseTokenPool} from "./pools/UpgradeableLockReleaseTokenPool.sol"; +import {UpgradeableBurnMintTokenPool} from "./pools/UpgradeableBurnMintTokenPool.sol"; +import {UpgradeableTokenPool} from "./pools/UpgradeableTokenPool.sol"; + + +contract DeployLockReleaseTokenPool is Script { + // ETH SEPOLIA - 11155111 + address GHO_TOKEN = 0xc4bF5CbDaBE595361438F8c6a187bDc330539c60; + address PROXY_ADMIN = 0xfA0e305E0f46AB04f00ae6b5f4560d61a2183E00; + address ARM_PROXY = 0xba3f6251de62dED61Ff98590cB2fDf6871FbB991; + address ROUTER = 0x0BF3dE8c5D3e8A2B34D2BEeB17ABfCeBaf363A59; + address TOKEN_POOL_OWNER = 0xa4b184737418B3014b3B1b1f0bE6700Bd9640FfE; + + // ARB SEPOLIA - 421614 + // address GHO_TOKEN = 0xb13Cfa6f8B2Eed2C37fB00fF0c1A59807C585810; + // address PROXY_ADMIN = 0xfA0e305E0f46AB04f00ae6b5f4560d61a2183E00; + // address ARM_PROXY = 0x9527E2d01A3064ef6b50c1Da1C0cC523803BCFF2; + // address ROUTER = 0x2a9C5afB0d0e4BAb2BCdaE109EC4b0c4Be15a165; + // address TOKEN_POOL_OWNER = 0xa4b184737418B3014b3B1b1f0bE6700Bd9640FfE; + + // BASE SEPOLIA - 84532 + // address GHO_TOKEN = 0x7CFa3f3d1cded0Da930881c609D4Dbf0012c14Bb; + // address PROXY_ADMIN = 0xfA0e305E0f46AB04f00ae6b5f4560d61a2183E00; + // address ARM_PROXY = 0x99360767a4705f68CcCb9533195B761648d6d807; + // address ROUTER = 0xD3b06cEbF099CE7DA4AcCf578aaebFDBd6e88a93; + // address TOKEN_POOL_OWNER = 0xa4b184737418B3014b3B1b1f0bE6700Bd9640FfE; + + // FUJI - 43113 + // address GHO_TOKEN = 0x9c04928Cc678776eC1C1C0E46ecC03a5F47A7723; + // address PROXY_ADMIN = 0xfA0e305E0f46AB04f00ae6b5f4560d61a2183E00; + // address ARM_PROXY = 0xAc8CFc3762a979628334a0E4C1026244498E821b; + // address ROUTER = 0xF694E193200268f9a4868e4Aa017A0118C9a8177; + // address TOKEN_POOL_OWNER = 0xa4b184737418B3014b3B1b1f0bE6700Bd9640FfE; + + function run() external { + console2.log('Block Number: ', block.number); + vm.startBroadcast(); + + UpgradeableLockReleaseTokenPool tokenPoolImpl = new UpgradeableLockReleaseTokenPool(GHO_TOKEN, ARM_PROXY, false, true); + // Imple init + address[] memory emptyArray = new address[](0); + tokenPoolImpl.initialize(TOKEN_POOL_OWNER, emptyArray, ROUTER, 10e18); + // proxy deploy and init + bytes memory tokenPoolInitParams = abi.encodeWithSignature( + "initialize(address,address[],address,uint256)", + TOKEN_POOL_OWNER, + emptyArray, + ROUTER, + 10e18 + ); + TransparentUpgradeableProxy tokenPoolProxy = new TransparentUpgradeableProxy( + address(tokenPoolImpl), + PROXY_ADMIN, + tokenPoolInitParams + ); + + vm.stopBroadcast(); + // Manage ownership + // UpgradeableLockReleaseTokenPool(address(tokenPoolProxy)).acceptOwnership(); + + } +} + +contract Accept is Script { + + function run() external { + console2.log('Block Number: ', block.number); + vm.startBroadcast(); + + console2.log(UpgradeableLockReleaseTokenPool(0x50A715d63bDcd5455a3308932a624263d170Dd74).getBridgeLimit()); + + // Manage ownership + UpgradeableLockReleaseTokenPool(0x50A715d63bDcd5455a3308932a624263d170Dd74).acceptOwnership(); + vm.stopBroadcast(); + + } +} + diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereum.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereum.t.sol index b76a5c0328..6ab3e47ecd 100644 --- a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereum.t.sol +++ b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereum.t.sol @@ -241,7 +241,7 @@ contract GHOTokenPoolEthereum_releaseOrMint is GHOTokenPoolEthereumSetup { function testBridgedAmountNoEnoughReverts() public { uint256 amount = 10; - vm.expectRevert(abi.encodeWithSelector(UpgradeableLockReleaseTokenPool.InvalidAmountToBurn.selector)); + vm.expectRevert(abi.encodeWithSelector(UpgradeableLockReleaseTokenPool.NotEnoughBridgedAmount.selector)); vm.startPrank(s_allowedOffRamp); s_ghoTokenPool.releaseOrMint(bytes(""), STRANGER, amount, SOURCE_CHAIN_SELECTOR, bytes("")); } From 64cc73d6312fb302d65a668fd7dd1c6602edbfc8 Mon Sep 17 00:00:00 2001 From: miguelmtzinf Date: Wed, 22 May 2024 11:07:39 +0200 Subject: [PATCH 05/18] doc: Add caution docs to setBridgeLimit --- .../src/v0.8/ccip/pools/UpgradeableLockReleaseTokenPool.sol | 1 + 1 file changed, 1 insertion(+) diff --git a/contracts/src/v0.8/ccip/pools/UpgradeableLockReleaseTokenPool.sol b/contracts/src/v0.8/ccip/pools/UpgradeableLockReleaseTokenPool.sol index f8cfde86ad..7ca3d5f389 100644 --- a/contracts/src/v0.8/ccip/pools/UpgradeableLockReleaseTokenPool.sol +++ b/contracts/src/v0.8/ccip/pools/UpgradeableLockReleaseTokenPool.sol @@ -184,6 +184,7 @@ contract UpgradeableLockReleaseTokenPool is /// @notice Sets the bridge limit, the maximum amount of tokens that can be bridged out /// @dev Only callable by the owner or the bridge limit admin. + /// @dev Bridge limit changes should be carefully managed, specially when reducing below the current bridged amount /// @param newBridgeLimit The new bridge limit function setBridgeLimit(uint256 newBridgeLimit) external { if (msg.sender != s_bridgeLimitAdmin && msg.sender != owner()) revert Unauthorized(msg.sender); From a666880b9a61ae397a0d87a9214f10f59f6a93ec Mon Sep 17 00:00:00 2001 From: miguelmtzinf Date: Wed, 22 May 2024 13:45:36 +0200 Subject: [PATCH 06/18] fix: Fix github action dir name --- .../action.yml | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename .github/actions/{setup-create-base64-upgrade-config => setup-create-base64-upgrade-config}/action.yml (100%) diff --git a/.github/actions/setup-create-base64-upgrade-config /action.yml b/.github/actions/setup-create-base64-upgrade-config/action.yml similarity index 100% rename from .github/actions/setup-create-base64-upgrade-config /action.yml rename to .github/actions/setup-create-base64-upgrade-config/action.yml From b1443cc46439a06c0a1af00d865302bafad2992d Mon Sep 17 00:00:00 2001 From: miguelmtzinf Date: Thu, 23 May 2024 17:44:17 +0200 Subject: [PATCH 07/18] fix: Add code diffs between custom token pools and standard implementation --- .../diffs/BurnMintTokenPoolAbstract_diff.md | 21 ++ .../pools/diffs/BurnMintTokenPool_diff.md | 87 ++++++++ .../pools/diffs/LockReleaseTokenPool_diff.md | 201 ++++++++++++++++++ .../v0.8/ccip/pools/diffs/TokenPool_diff.md | 51 +++++ 4 files changed, 360 insertions(+) create mode 100644 contracts/src/v0.8/ccip/pools/diffs/BurnMintTokenPoolAbstract_diff.md create mode 100644 contracts/src/v0.8/ccip/pools/diffs/BurnMintTokenPool_diff.md create mode 100644 contracts/src/v0.8/ccip/pools/diffs/LockReleaseTokenPool_diff.md create mode 100644 contracts/src/v0.8/ccip/pools/diffs/TokenPool_diff.md diff --git a/contracts/src/v0.8/ccip/pools/diffs/BurnMintTokenPoolAbstract_diff.md b/contracts/src/v0.8/ccip/pools/diffs/BurnMintTokenPoolAbstract_diff.md new file mode 100644 index 0000000000..ab584e1849 --- /dev/null +++ b/contracts/src/v0.8/ccip/pools/diffs/BurnMintTokenPoolAbstract_diff.md @@ -0,0 +1,21 @@ +```diff +diff --git a/src/v0.8/ccip/pools/BurnMintTokenPoolAbstract.sol b/src/v0.8/ccip/pools/UpgradeableBurnMintTokenPoolAbstract.sol +index f5eb135186..651965e40b 100644 +--- a/src/v0.8/ccip/pools/BurnMintTokenPoolAbstract.sol ++++ b/src/v0.8/ccip/pools/UpgradeableBurnMintTokenPoolAbstract.sol +@@ -1,11 +1,11 @@ + // SPDX-License-Identifier: BUSL-1.1 +-pragma solidity 0.8.19; ++pragma solidity ^0.8.0; + + import {IBurnMintERC20} from "../../shared/token/ERC20/IBurnMintERC20.sol"; + +-import {TokenPool} from "./TokenPool.sol"; ++import {UpgradeableTokenPool} from "./UpgradeableTokenPool.sol"; + +-abstract contract BurnMintTokenPoolAbstract is TokenPool { ++abstract contract UpgradeableBurnMintTokenPoolAbstract is UpgradeableTokenPool { + /// @notice Contains the specific burn call for a pool. + /// @dev overriding this method allows us to create pools with different burn signatures + /// without duplicating the underlying logic. +``` diff --git a/contracts/src/v0.8/ccip/pools/diffs/BurnMintTokenPool_diff.md b/contracts/src/v0.8/ccip/pools/diffs/BurnMintTokenPool_diff.md new file mode 100644 index 0000000000..3fdf2d83da --- /dev/null +++ b/contracts/src/v0.8/ccip/pools/diffs/BurnMintTokenPool_diff.md @@ -0,0 +1,87 @@ +```diff +diff --git a/src/v0.8/ccip/pools/BurnMintTokenPool.sol b/src/v0.8/ccip/pools/UpgradeableBurnMintTokenPool.sol +index 9af0f22f4c..f07f8c3a28 100644 +--- a/src/v0.8/ccip/pools/BurnMintTokenPool.sol ++++ b/src/v0.8/ccip/pools/UpgradeableBurnMintTokenPool.sol +@@ -1,29 +1,66 @@ + // SPDX-License-Identifier: BUSL-1.1 +-pragma solidity 0.8.19; ++pragma solidity ^0.8.0; + + import {ITypeAndVersion} from "../../shared/interfaces/ITypeAndVersion.sol"; + import {IBurnMintERC20} from "../../shared/token/ERC20/IBurnMintERC20.sol"; + +-import {TokenPool} from "./TokenPool.sol"; +-import {BurnMintTokenPoolAbstract} from "./BurnMintTokenPoolAbstract.sol"; ++import {UpgradeableTokenPool} from "./UpgradeableTokenPool.sol"; ++import {UpgradeableBurnMintTokenPoolAbstract} from "./UpgradeableBurnMintTokenPoolAbstract.sol"; + +-/// @notice This pool mints and burns a 3rd-party token. +-/// @dev Pool whitelisting mode is set in the constructor and cannot be modified later. +-/// It either accepts any address as originalSender, or only accepts whitelisted originalSender. +-/// The only way to change whitelisting mode is to deploy a new pool. +-/// If that is expected, please make sure the token's burner/minter roles are adjustable. +-contract BurnMintTokenPool is BurnMintTokenPoolAbstract, ITypeAndVersion { ++import {IRouter} from "../interfaces/IRouter.sol"; ++import {VersionedInitializable} from "./VersionedInitializable.sol"; ++ ++/// @title UpgradeableBurnMintTokenPool ++/// @author Aave Labs ++/// @notice Upgradeable version of Chainlink's CCIP BurnMintTokenPool ++/// @dev Contract adaptations: ++/// - Implementation of VersionedInitializable to allow upgrades ++/// - Move of allowlist and router definition to initialization stage ++contract UpgradeableBurnMintTokenPool is VersionedInitializable, UpgradeableBurnMintTokenPoolAbstract, ITypeAndVersion { + string public constant override typeAndVersion = "BurnMintTokenPool 1.4.0"; + ++ /// @dev Constructor ++ /// @param token The bridgeable token that is managed by this pool. ++ /// @param armProxy The address of the arm proxy ++ /// @param allowlistEnabled True if pool is set to access-controlled mode, false otherwise + constructor( +- IBurnMintERC20 token, +- address[] memory allowlist, ++ address token, + address armProxy, +- address router +- ) TokenPool(token, allowlist, armProxy, router) {} ++ bool allowlistEnabled ++ ) UpgradeableTokenPool(IBurnMintERC20(token), armProxy, allowlistEnabled) {} + +- /// @inheritdoc BurnMintTokenPoolAbstract ++ /// @dev Initializer ++ /// @dev The address passed as `owner` must accept ownership after initialization. ++ /// @dev The `allowlist` is only effective if pool is set to access-controlled mode ++ /// @param owner The address of the owner ++ /// @param allowlist A set of addresses allowed to trigger lockOrBurn as original senders ++ /// @param router The address of the router ++ function initialize(address owner, address[] memory allowlist, address router) public virtual initializer { ++ if (owner == address(0)) revert ZeroAddressNotAllowed(); ++ if (router == address(0)) revert ZeroAddressNotAllowed(); ++ _transferOwnership(owner); ++ ++ s_router = IRouter(router); ++ ++ // Pool can be set as permissioned or permissionless at deployment time only to save hot-path gas. ++ if (i_allowlistEnabled) { ++ _applyAllowListUpdates(new address[](0), allowlist); ++ } ++ } ++ ++ /// @inheritdoc UpgradeableBurnMintTokenPoolAbstract + function _burn(uint256 amount) internal virtual override { + IBurnMintERC20(address(i_token)).burn(amount); + } ++ ++ /// @notice Returns the revision number ++ /// @return The revision number ++ function REVISION() public pure virtual returns (uint256) { ++ return 1; ++ } ++ ++ /// @inheritdoc VersionedInitializable ++ function getRevision() internal pure virtual override returns (uint256) { ++ return REVISION(); ++ } + } +``` diff --git a/contracts/src/v0.8/ccip/pools/diffs/LockReleaseTokenPool_diff.md b/contracts/src/v0.8/ccip/pools/diffs/LockReleaseTokenPool_diff.md new file mode 100644 index 0000000000..3a81233289 --- /dev/null +++ b/contracts/src/v0.8/ccip/pools/diffs/LockReleaseTokenPool_diff.md @@ -0,0 +1,201 @@ +```diff +diff --git a/src/v0.8/ccip/pools/LockReleaseTokenPool.sol b/src/v0.8/ccip/pools/UpgradeableLockReleaseTokenPool.sol +index 1a17fa0398..7ca3d5f389 100644 +--- a/src/v0.8/ccip/pools/LockReleaseTokenPool.sol ++++ b/src/v0.8/ccip/pools/UpgradeableLockReleaseTokenPool.sol +@@ -1,26 +1,41 @@ + // SPDX-License-Identifier: BUSL-1.1 +-pragma solidity 0.8.19; ++pragma solidity ^0.8.0; + + import {ITypeAndVersion} from "../../shared/interfaces/ITypeAndVersion.sol"; + import {ILiquidityContainer} from "../../rebalancer/interfaces/ILiquidityContainer.sol"; + +-import {TokenPool} from "./TokenPool.sol"; ++import {UpgradeableTokenPool} from "./UpgradeableTokenPool.sol"; + import {RateLimiter} from "../libraries/RateLimiter.sol"; + + import {IERC20} from "../../vendor/openzeppelin-solidity/v4.8.3/contracts/token/ERC20/IERC20.sol"; + import {SafeERC20} from "../../vendor/openzeppelin-solidity/v4.8.3/contracts/token/ERC20/utils/SafeERC20.sol"; + +-/// @notice Token pool used for tokens on their native chain. This uses a lock and release mechanism. +-/// Because of lock/unlock requiring liquidity, this pool contract also has function to add and remove +-/// liquidity. This allows for proper bookkeeping for both user and liquidity provider balances. +-/// @dev One token per LockReleaseTokenPool. +-contract LockReleaseTokenPool is TokenPool, ILiquidityContainer, ITypeAndVersion { ++import {IRouter} from "../interfaces/IRouter.sol"; ++import {VersionedInitializable} from "./VersionedInitializable.sol"; ++ ++/// @title UpgradeableLockReleaseTokenPool ++/// @author Aave Labs ++/// @notice Upgradeable version of Chainlink's CCIP LockReleaseTokenPool ++/// @dev Contract adaptations: ++/// - Implementation of VersionedInitializable to allow upgrades ++/// - Move of allowlist and router definition to initialization stage ++/// - Addition of a bridge limit to regulate the maximum amount of tokens that can be transferred out (burned/locked) ++contract UpgradeableLockReleaseTokenPool is ++ VersionedInitializable, ++ UpgradeableTokenPool, ++ ILiquidityContainer, ++ ITypeAndVersion ++{ + using SafeERC20 for IERC20; + + error InsufficientLiquidity(); + error LiquidityNotAccepted(); + error Unauthorized(address caller); + ++ error BridgeLimitExceeded(uint256 bridgeLimit); ++ error NotEnoughBridgedAmount(); ++ event BridgeLimitUpdated(uint256 oldBridgeLimit, uint256 newBridgeLimit); ++ + string public constant override typeAndVersion = "LockReleaseTokenPool 1.4.0"; + + /// @dev The unique lock release pool flag to signal through EIP 165. +@@ -37,16 +52,55 @@ contract LockReleaseTokenPool is TokenPool, ILiquidityContainer, ITypeAndVersion + /// @dev Can be address(0) if none is configured. + address internal s_rateLimitAdmin; + ++ /// @notice Maximum amount of tokens that can be bridged to other chains ++ uint256 private s_bridgeLimit; ++ /// @notice Amount of tokens bridged (transferred out) ++ /// @dev Must always be equal to or below the bridge limit ++ uint256 private s_currentBridged; ++ /// @notice The address of the bridge limit admin. ++ /// @dev Can be address(0) if none is configured. ++ address internal s_bridgeLimitAdmin; ++ ++ /// @dev Constructor ++ /// @param token The bridgeable token that is managed by this pool. ++ /// @param armProxy The address of the arm proxy ++ /// @param allowlistEnabled True if pool is set to access-controlled mode, false otherwise ++ /// @param acceptLiquidity True if the pool accepts liquidity, false otherwise + constructor( +- IERC20 token, +- address[] memory allowlist, ++ address token, + address armProxy, +- bool acceptLiquidity, +- address router +- ) TokenPool(token, allowlist, armProxy, router) { ++ bool allowlistEnabled, ++ bool acceptLiquidity ++ ) UpgradeableTokenPool(IERC20(token), armProxy, allowlistEnabled) { + i_acceptLiquidity = acceptLiquidity; + } + ++ /// @dev Initializer ++ /// @dev The address passed as `owner` must accept ownership after initialization. ++ /// @dev The `allowlist` is only effective if pool is set to access-controlled mode ++ /// @param owner The address of the owner ++ /// @param allowlist A set of addresses allowed to trigger lockOrBurn as original senders ++ /// @param router The address of the router ++ /// @param bridgeLimit The maximum amount of tokens that can be bridged to other chains ++ function initialize( ++ address owner, ++ address[] memory allowlist, ++ address router, ++ uint256 bridgeLimit ++ ) public virtual initializer { ++ if (owner == address(0)) revert ZeroAddressNotAllowed(); ++ if (router == address(0)) revert ZeroAddressNotAllowed(); ++ _transferOwnership(owner); ++ ++ s_router = IRouter(router); ++ ++ // Pool can be set as permissioned or permissionless at deployment time only to save hot-path gas. ++ if (i_allowlistEnabled) { ++ _applyAllowListUpdates(new address[](0), allowlist); ++ } ++ s_bridgeLimit = bridgeLimit; ++ } ++ + /// @notice Locks the token in the pool + /// @param amount Amount to lock + /// @dev The whenHealthy check is important to ensure that even if a ramp is compromised +@@ -66,6 +120,9 @@ contract LockReleaseTokenPool is TokenPool, ILiquidityContainer, ITypeAndVersion + whenHealthy + returns (bytes memory) + { ++ // Increase bridged amount because tokens are leaving the source chain ++ if ((s_currentBridged += amount) > s_bridgeLimit) revert BridgeLimitExceeded(s_bridgeLimit); ++ + _consumeOutboundRateLimit(remoteChainSelector, amount); + emit Locked(msg.sender, amount); + return ""; +@@ -83,6 +140,11 @@ contract LockReleaseTokenPool is TokenPool, ILiquidityContainer, ITypeAndVersion + uint64 remoteChainSelector, + bytes memory + ) external virtual override onlyOffRamp(remoteChainSelector) whenHealthy { ++ // This should never occur. Amount should never exceed the current bridged amount ++ if (amount > s_currentBridged) revert NotEnoughBridgedAmount(); ++ // Reduce bridged amount because tokens are back to source chain ++ s_currentBridged -= amount; ++ + _consumeInboundRateLimit(remoteChainSelector, amount); + getToken().safeTransfer(receiver, amount); + emit Released(msg.sender, receiver, amount); +@@ -120,11 +182,46 @@ contract LockReleaseTokenPool is TokenPool, ILiquidityContainer, ITypeAndVersion + s_rateLimitAdmin = rateLimitAdmin; + } + ++ /// @notice Sets the bridge limit, the maximum amount of tokens that can be bridged out ++ /// @dev Only callable by the owner or the bridge limit admin. ++ /// @dev Bridge limit changes should be carefully managed, specially when reducing below the current bridged amount ++ /// @param newBridgeLimit The new bridge limit ++ function setBridgeLimit(uint256 newBridgeLimit) external { ++ if (msg.sender != s_bridgeLimitAdmin && msg.sender != owner()) revert Unauthorized(msg.sender); ++ uint256 oldBridgeLimit = s_bridgeLimit; ++ s_bridgeLimit = newBridgeLimit; ++ emit BridgeLimitUpdated(oldBridgeLimit, newBridgeLimit); ++ } ++ ++ /// @notice Sets the bridge limit admin address. ++ /// @dev Only callable by the owner. ++ /// @param bridgeLimitAdmin The new bridge limit admin address. ++ function setBridgeLimitAdmin(address bridgeLimitAdmin) external onlyOwner { ++ s_bridgeLimitAdmin = bridgeLimitAdmin; ++ } ++ ++ /// @notice Gets the bridge limit ++ /// @return The maximum amount of tokens that can be transferred out to other chains ++ function getBridgeLimit() external view virtual returns (uint256) { ++ return s_bridgeLimit; ++ } ++ ++ /// @notice Gets the current bridged amount to other chains ++ /// @return The amount of tokens transferred out to other chains ++ function getCurrentBridgedAmount() external view virtual returns (uint256) { ++ return s_currentBridged; ++ } ++ + /// @notice Gets the rate limiter admin address. + function getRateLimitAdmin() external view returns (address) { + return s_rateLimitAdmin; + } + ++ /// @notice Gets the bridge limiter admin address. ++ function getBridgeLimitAdmin() external view returns (address) { ++ return s_bridgeLimitAdmin; ++ } ++ + /// @notice Checks if the pool can accept liquidity. + /// @return true if the pool can accept liquidity, false otherwise. + function canAcceptLiquidity() external view returns (bool) { +@@ -166,4 +263,15 @@ contract LockReleaseTokenPool is TokenPool, ILiquidityContainer, ITypeAndVersion + + _setRateLimitConfig(remoteChainSelector, outboundConfig, inboundConfig); + } ++ ++ /// @notice Returns the revision number ++ /// @return The revision number ++ function REVISION() public pure virtual returns (uint256) { ++ return 1; ++ } ++ ++ /// @inheritdoc VersionedInitializable ++ function getRevision() internal pure virtual override returns (uint256) { ++ return REVISION(); ++ } + } +``` diff --git a/contracts/src/v0.8/ccip/pools/diffs/TokenPool_diff.md b/contracts/src/v0.8/ccip/pools/diffs/TokenPool_diff.md new file mode 100644 index 0000000000..4029fe88bd --- /dev/null +++ b/contracts/src/v0.8/ccip/pools/diffs/TokenPool_diff.md @@ -0,0 +1,51 @@ +```diff +diff --git a/src/v0.8/ccip/pools/TokenPool.sol b/src/v0.8/ccip/pools/UpgradeableTokenPool.sol +index b3571bb449..fcd8948098 100644 +--- a/src/v0.8/ccip/pools/TokenPool.sol ++++ b/src/v0.8/ccip/pools/UpgradeableTokenPool.sol +@@ -1,5 +1,5 @@ + // SPDX-License-Identifier: BUSL-1.1 +-pragma solidity 0.8.19; ++pragma solidity ^0.8.0; + + import {IPool} from "../interfaces/pools/IPool.sol"; + import {IARM} from "../interfaces/IARM.sol"; +@@ -15,7 +15,7 @@ import {EnumerableSet} from "../../vendor/openzeppelin-solidity/v4.8.3/contracts + /// @notice Base abstract class with common functions for all token pools. + /// A token pool serves as isolated place for holding tokens and token specific logic + /// that may execute as tokens move across the bridge. +-abstract contract TokenPool is IPool, OwnerIsCreator, IERC165 { ++abstract contract UpgradeableTokenPool is IPool, OwnerIsCreator, IERC165 { + using EnumerableSet for EnumerableSet.AddressSet; + using EnumerableSet for EnumerableSet.UintSet; + using RateLimiter for RateLimiter.TokenBucket; +@@ -74,23 +74,17 @@ abstract contract TokenPool is IPool, OwnerIsCreator, IERC165 { + EnumerableSet.UintSet internal s_remoteChainSelectors; + /// @dev Outbound rate limits. Corresponds to the inbound rate limit for the pool + /// on the remote chain. +- mapping(uint64 remoteChainSelector => RateLimiter.TokenBucket) internal s_outboundRateLimits; ++ mapping(uint64 => RateLimiter.TokenBucket) internal s_outboundRateLimits; + /// @dev Inbound rate limits. This allows per destination chain + /// token issuer specified rate limiting (e.g. issuers may trust chains to varying + /// degrees and prefer different limits) +- mapping(uint64 remoteChainSelector => RateLimiter.TokenBucket) internal s_inboundRateLimits; ++ mapping(uint64 => RateLimiter.TokenBucket) internal s_inboundRateLimits; + +- constructor(IERC20 token, address[] memory allowlist, address armProxy, address router) { +- if (address(token) == address(0) || router == address(0)) revert ZeroAddressNotAllowed(); ++ constructor(IERC20 token, address armProxy, bool allowlistEnabled) { ++ if (address(token) == address(0)) revert ZeroAddressNotAllowed(); + i_token = token; + i_armProxy = armProxy; +- s_router = IRouter(router); +- +- // Pool can be set as permissioned or permissionless at deployment time only to save hot-path gas. +- i_allowlistEnabled = allowlist.length > 0; +- if (i_allowlistEnabled) { +- _applyAllowListUpdates(new address[](0), allowlist); +- } ++ i_allowlistEnabled = allowlistEnabled; + } + + /// @notice Get ARM proxy address +``` From ffea18d05d2b0630c03e32f1d178f7ad0eadc7c9 Mon Sep 17 00:00:00 2001 From: miguelmtzinf Date: Thu, 23 May 2024 18:21:30 +0200 Subject: [PATCH 08/18] fix: Directory restructure --- README.md | 5 + .../UpgradeableBurnMintTokenPool.sol | 6 +- .../UpgradeableBurnMintTokenPoolAbstract.sol | 2 +- .../UpgradeableLockReleaseTokenPool.sol | 12 +- .../pools/{ => GHO}/UpgradeableTokenPool.sol | 16 +- .../{ => GHO}/VersionedInitializable.sol | 0 .../diffs/BurnMintTokenPoolAbstract_diff.md | 6 +- .../{ => GHO}/diffs/BurnMintTokenPool_diff.md | 10 +- .../diffs/LockReleaseTokenPool_diff.md | 24 +- .../pools/{ => GHO}/diffs/TokenPool_diff.md | 6 +- contracts/src/v0.8/ccip/test/BaseTest.t.sol | 7 +- .../v0.8/ccip/test/mocks/MockUpgradeable.sol | 2 +- .../test/pools/GHO/GHOTokenPoolEthereum.t.sol | 4 +- .../GHO/GHOTokenPoolEthereumBridgeLimit.t.sol | 41 ++- ...okenPoolEthereumBridgeLimitInvariant.t.sol | 313 ++++++++++++++++++ ...GHOTokenPoolEthereumBridgeLimitSetup.t.sol | 26 +- .../pools/GHO/GHOTokenPoolEthereumE2E.t.sol | 6 +- .../pools/GHO/GHOTokenPoolEthereumSetup.t.sol | 4 +- .../test/pools/GHO/GHOTokenPoolRemote.t.sol | 4 +- .../pools/GHO/GHOTokenPoolRemoteE2E.t.sol | 6 +- .../pools/GHO/GHOTokenPoolRemoteSetup.t.sol | 4 +- 21 files changed, 431 insertions(+), 73 deletions(-) rename contracts/src/v0.8/ccip/pools/{ => GHO}/UpgradeableBurnMintTokenPool.sol (92%) rename contracts/src/v0.8/ccip/pools/{ => GHO}/UpgradeableBurnMintTokenPoolAbstract.sol (95%) rename contracts/src/v0.8/ccip/pools/{ => GHO}/UpgradeableLockReleaseTokenPool.sol (95%) rename contracts/src/v0.8/ccip/pools/{ => GHO}/UpgradeableTokenPool.sol (95%) rename contracts/src/v0.8/ccip/pools/{ => GHO}/VersionedInitializable.sol (100%) rename contracts/src/v0.8/ccip/pools/{ => GHO}/diffs/BurnMintTokenPoolAbstract_diff.md (99%) rename contracts/src/v0.8/ccip/pools/{ => GHO}/diffs/BurnMintTokenPool_diff.md (99%) rename contracts/src/v0.8/ccip/pools/{ => GHO}/diffs/LockReleaseTokenPool_diff.md (99%) rename contracts/src/v0.8/ccip/pools/{ => GHO}/diffs/TokenPool_diff.md (99%) create mode 100644 contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumBridgeLimitInvariant.t.sol diff --git a/README.md b/README.md index eff798d46b..2e90af3864 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,5 @@ +> Forked repository of CCIP contracts ([version 2.8.0 release](https://github.com/smartcontractkit/ccip/tree/v2.8.0-ccip1.4.0-release)) includes modifications for developing custom TokenPool contracts tailored for the GHO cross-chain strategy. All relevant code and tests are located in the [GHO pools directory](./contracts/v0.8/ccip/pools/GHO). +

@@ -232,9 +234,11 @@ flowchart RL github.com/smartcontractkit/chainlink/core/scripts --> github.com/smartcontractkit/chainlink/v2 ``` + The `integration-tests` and `core/scripts` modules import the root module using a relative replace in their `go.mod` files, so dependency changes in the root `go.mod` often require changes in those modules as well. After making a change, `go mod tidy` can be run on all three modules using: + ``` make gomodtidy ``` @@ -254,6 +258,7 @@ pnpm i ```bash pnpm test ``` + NOTE: Chainlink is currently in the process of migrating to Foundry and contains both Foundry and Hardhat tests in some versions. More information can be found here: [Chainlink Foundry Documentation](https://github.com/smartcontractkit/chainlink/blob/develop/contracts/foundry.md). Any 't.sol' files associated with Foundry tests, contained within the src directories will be ignored by Hardhat. diff --git a/contracts/src/v0.8/ccip/pools/UpgradeableBurnMintTokenPool.sol b/contracts/src/v0.8/ccip/pools/GHO/UpgradeableBurnMintTokenPool.sol similarity index 92% rename from contracts/src/v0.8/ccip/pools/UpgradeableBurnMintTokenPool.sol rename to contracts/src/v0.8/ccip/pools/GHO/UpgradeableBurnMintTokenPool.sol index f07f8c3a28..cc0f24af39 100644 --- a/contracts/src/v0.8/ccip/pools/UpgradeableBurnMintTokenPool.sol +++ b/contracts/src/v0.8/ccip/pools/GHO/UpgradeableBurnMintTokenPool.sol @@ -1,13 +1,13 @@ // SPDX-License-Identifier: BUSL-1.1 pragma solidity ^0.8.0; -import {ITypeAndVersion} from "../../shared/interfaces/ITypeAndVersion.sol"; -import {IBurnMintERC20} from "../../shared/token/ERC20/IBurnMintERC20.sol"; +import {ITypeAndVersion} from "../../../shared/interfaces/ITypeAndVersion.sol"; +import {IBurnMintERC20} from "../../../shared/token/ERC20/IBurnMintERC20.sol"; import {UpgradeableTokenPool} from "./UpgradeableTokenPool.sol"; import {UpgradeableBurnMintTokenPoolAbstract} from "./UpgradeableBurnMintTokenPoolAbstract.sol"; -import {IRouter} from "../interfaces/IRouter.sol"; +import {IRouter} from "../../interfaces/IRouter.sol"; import {VersionedInitializable} from "./VersionedInitializable.sol"; /// @title UpgradeableBurnMintTokenPool diff --git a/contracts/src/v0.8/ccip/pools/UpgradeableBurnMintTokenPoolAbstract.sol b/contracts/src/v0.8/ccip/pools/GHO/UpgradeableBurnMintTokenPoolAbstract.sol similarity index 95% rename from contracts/src/v0.8/ccip/pools/UpgradeableBurnMintTokenPoolAbstract.sol rename to contracts/src/v0.8/ccip/pools/GHO/UpgradeableBurnMintTokenPoolAbstract.sol index 651965e40b..e228732855 100644 --- a/contracts/src/v0.8/ccip/pools/UpgradeableBurnMintTokenPoolAbstract.sol +++ b/contracts/src/v0.8/ccip/pools/GHO/UpgradeableBurnMintTokenPoolAbstract.sol @@ -1,7 +1,7 @@ // SPDX-License-Identifier: BUSL-1.1 pragma solidity ^0.8.0; -import {IBurnMintERC20} from "../../shared/token/ERC20/IBurnMintERC20.sol"; +import {IBurnMintERC20} from "../../../shared/token/ERC20/IBurnMintERC20.sol"; import {UpgradeableTokenPool} from "./UpgradeableTokenPool.sol"; diff --git a/contracts/src/v0.8/ccip/pools/UpgradeableLockReleaseTokenPool.sol b/contracts/src/v0.8/ccip/pools/GHO/UpgradeableLockReleaseTokenPool.sol similarity index 95% rename from contracts/src/v0.8/ccip/pools/UpgradeableLockReleaseTokenPool.sol rename to contracts/src/v0.8/ccip/pools/GHO/UpgradeableLockReleaseTokenPool.sol index 7ca3d5f389..0fac98c708 100644 --- a/contracts/src/v0.8/ccip/pools/UpgradeableLockReleaseTokenPool.sol +++ b/contracts/src/v0.8/ccip/pools/GHO/UpgradeableLockReleaseTokenPool.sol @@ -1,16 +1,16 @@ // SPDX-License-Identifier: BUSL-1.1 pragma solidity ^0.8.0; -import {ITypeAndVersion} from "../../shared/interfaces/ITypeAndVersion.sol"; -import {ILiquidityContainer} from "../../rebalancer/interfaces/ILiquidityContainer.sol"; +import {ITypeAndVersion} from "../../../shared/interfaces/ITypeAndVersion.sol"; +import {ILiquidityContainer} from "../../../rebalancer/interfaces/ILiquidityContainer.sol"; import {UpgradeableTokenPool} from "./UpgradeableTokenPool.sol"; -import {RateLimiter} from "../libraries/RateLimiter.sol"; +import {RateLimiter} from "../../libraries/RateLimiter.sol"; -import {IERC20} from "../../vendor/openzeppelin-solidity/v4.8.3/contracts/token/ERC20/IERC20.sol"; -import {SafeERC20} from "../../vendor/openzeppelin-solidity/v4.8.3/contracts/token/ERC20/utils/SafeERC20.sol"; +import {IERC20} from "../../../vendor/openzeppelin-solidity/v4.8.3/contracts/token/ERC20/IERC20.sol"; +import {SafeERC20} from "../../../vendor/openzeppelin-solidity/v4.8.3/contracts/token/ERC20/utils/SafeERC20.sol"; -import {IRouter} from "../interfaces/IRouter.sol"; +import {IRouter} from "../../interfaces/IRouter.sol"; import {VersionedInitializable} from "./VersionedInitializable.sol"; /// @title UpgradeableLockReleaseTokenPool diff --git a/contracts/src/v0.8/ccip/pools/UpgradeableTokenPool.sol b/contracts/src/v0.8/ccip/pools/GHO/UpgradeableTokenPool.sol similarity index 95% rename from contracts/src/v0.8/ccip/pools/UpgradeableTokenPool.sol rename to contracts/src/v0.8/ccip/pools/GHO/UpgradeableTokenPool.sol index fcd8948098..ee359ac1f8 100644 --- a/contracts/src/v0.8/ccip/pools/UpgradeableTokenPool.sol +++ b/contracts/src/v0.8/ccip/pools/GHO/UpgradeableTokenPool.sol @@ -1,16 +1,16 @@ // SPDX-License-Identifier: BUSL-1.1 pragma solidity ^0.8.0; -import {IPool} from "../interfaces/pools/IPool.sol"; -import {IARM} from "../interfaces/IARM.sol"; -import {IRouter} from "../interfaces/IRouter.sol"; +import {IPool} from "../../interfaces/pools/IPool.sol"; +import {IARM} from "../../interfaces/IARM.sol"; +import {IRouter} from "../../interfaces/IRouter.sol"; -import {OwnerIsCreator} from "../../shared/access/OwnerIsCreator.sol"; -import {RateLimiter} from "../libraries/RateLimiter.sol"; +import {OwnerIsCreator} from "../../../shared/access/OwnerIsCreator.sol"; +import {RateLimiter} from "../../libraries/RateLimiter.sol"; -import {IERC20} from "../../vendor/openzeppelin-solidity/v4.8.3/contracts/token/ERC20/IERC20.sol"; -import {IERC165} from "../../vendor/openzeppelin-solidity/v4.8.3/contracts/utils/introspection/IERC165.sol"; -import {EnumerableSet} from "../../vendor/openzeppelin-solidity/v4.8.3/contracts/utils/structs/EnumerableSet.sol"; +import {IERC20} from "../../../vendor/openzeppelin-solidity/v4.8.3/contracts/token/ERC20/IERC20.sol"; +import {IERC165} from "../../../vendor/openzeppelin-solidity/v4.8.3/contracts/utils/introspection/IERC165.sol"; +import {EnumerableSet} from "../../../vendor/openzeppelin-solidity/v4.8.3/contracts/utils/structs/EnumerableSet.sol"; /// @notice Base abstract class with common functions for all token pools. /// A token pool serves as isolated place for holding tokens and token specific logic diff --git a/contracts/src/v0.8/ccip/pools/VersionedInitializable.sol b/contracts/src/v0.8/ccip/pools/GHO/VersionedInitializable.sol similarity index 100% rename from contracts/src/v0.8/ccip/pools/VersionedInitializable.sol rename to contracts/src/v0.8/ccip/pools/GHO/VersionedInitializable.sol diff --git a/contracts/src/v0.8/ccip/pools/diffs/BurnMintTokenPoolAbstract_diff.md b/contracts/src/v0.8/ccip/pools/GHO/diffs/BurnMintTokenPoolAbstract_diff.md similarity index 99% rename from contracts/src/v0.8/ccip/pools/diffs/BurnMintTokenPoolAbstract_diff.md rename to contracts/src/v0.8/ccip/pools/GHO/diffs/BurnMintTokenPoolAbstract_diff.md index ab584e1849..11c20c0a6d 100644 --- a/contracts/src/v0.8/ccip/pools/diffs/BurnMintTokenPoolAbstract_diff.md +++ b/contracts/src/v0.8/ccip/pools/GHO/diffs/BurnMintTokenPoolAbstract_diff.md @@ -7,12 +7,12 @@ index f5eb135186..651965e40b 100644 // SPDX-License-Identifier: BUSL-1.1 -pragma solidity 0.8.19; +pragma solidity ^0.8.0; - + import {IBurnMintERC20} from "../../shared/token/ERC20/IBurnMintERC20.sol"; - + -import {TokenPool} from "./TokenPool.sol"; +import {UpgradeableTokenPool} from "./UpgradeableTokenPool.sol"; - + -abstract contract BurnMintTokenPoolAbstract is TokenPool { +abstract contract UpgradeableBurnMintTokenPoolAbstract is UpgradeableTokenPool { /// @notice Contains the specific burn call for a pool. diff --git a/contracts/src/v0.8/ccip/pools/diffs/BurnMintTokenPool_diff.md b/contracts/src/v0.8/ccip/pools/GHO/diffs/BurnMintTokenPool_diff.md similarity index 99% rename from contracts/src/v0.8/ccip/pools/diffs/BurnMintTokenPool_diff.md rename to contracts/src/v0.8/ccip/pools/GHO/diffs/BurnMintTokenPool_diff.md index 3fdf2d83da..1dfabb1e60 100644 --- a/contracts/src/v0.8/ccip/pools/diffs/BurnMintTokenPool_diff.md +++ b/contracts/src/v0.8/ccip/pools/GHO/diffs/BurnMintTokenPool_diff.md @@ -7,15 +7,15 @@ index 9af0f22f4c..f07f8c3a28 100644 // SPDX-License-Identifier: BUSL-1.1 -pragma solidity 0.8.19; +pragma solidity ^0.8.0; - + import {ITypeAndVersion} from "../../shared/interfaces/ITypeAndVersion.sol"; import {IBurnMintERC20} from "../../shared/token/ERC20/IBurnMintERC20.sol"; - + -import {TokenPool} from "./TokenPool.sol"; -import {BurnMintTokenPoolAbstract} from "./BurnMintTokenPoolAbstract.sol"; +import {UpgradeableTokenPool} from "./UpgradeableTokenPool.sol"; +import {UpgradeableBurnMintTokenPoolAbstract} from "./UpgradeableBurnMintTokenPoolAbstract.sol"; - + -/// @notice This pool mints and burns a 3rd-party token. -/// @dev Pool whitelisting mode is set in the constructor and cannot be modified later. -/// It either accepts any address as originalSender, or only accepts whitelisted originalSender. @@ -33,7 +33,7 @@ index 9af0f22f4c..f07f8c3a28 100644 +/// - Move of allowlist and router definition to initialization stage +contract UpgradeableBurnMintTokenPool is VersionedInitializable, UpgradeableBurnMintTokenPoolAbstract, ITypeAndVersion { string public constant override typeAndVersion = "BurnMintTokenPool 1.4.0"; - + + /// @dev Constructor + /// @param token The bridgeable token that is managed by this pool. + /// @param armProxy The address of the arm proxy @@ -47,7 +47,7 @@ index 9af0f22f4c..f07f8c3a28 100644 - ) TokenPool(token, allowlist, armProxy, router) {} + bool allowlistEnabled + ) UpgradeableTokenPool(IBurnMintERC20(token), armProxy, allowlistEnabled) {} - + - /// @inheritdoc BurnMintTokenPoolAbstract + /// @dev Initializer + /// @dev The address passed as `owner` must accept ownership after initialization. diff --git a/contracts/src/v0.8/ccip/pools/diffs/LockReleaseTokenPool_diff.md b/contracts/src/v0.8/ccip/pools/GHO/diffs/LockReleaseTokenPool_diff.md similarity index 99% rename from contracts/src/v0.8/ccip/pools/diffs/LockReleaseTokenPool_diff.md rename to contracts/src/v0.8/ccip/pools/GHO/diffs/LockReleaseTokenPool_diff.md index 3a81233289..ac5d7bf30e 100644 --- a/contracts/src/v0.8/ccip/pools/diffs/LockReleaseTokenPool_diff.md +++ b/contracts/src/v0.8/ccip/pools/GHO/diffs/LockReleaseTokenPool_diff.md @@ -7,17 +7,17 @@ index 1a17fa0398..7ca3d5f389 100644 // SPDX-License-Identifier: BUSL-1.1 -pragma solidity 0.8.19; +pragma solidity ^0.8.0; - + import {ITypeAndVersion} from "../../shared/interfaces/ITypeAndVersion.sol"; import {ILiquidityContainer} from "../../rebalancer/interfaces/ILiquidityContainer.sol"; - + -import {TokenPool} from "./TokenPool.sol"; +import {UpgradeableTokenPool} from "./UpgradeableTokenPool.sol"; import {RateLimiter} from "../libraries/RateLimiter.sol"; - + import {IERC20} from "../../vendor/openzeppelin-solidity/v4.8.3/contracts/token/ERC20/IERC20.sol"; import {SafeERC20} from "../../vendor/openzeppelin-solidity/v4.8.3/contracts/token/ERC20/utils/SafeERC20.sol"; - + -/// @notice Token pool used for tokens on their native chain. This uses a lock and release mechanism. -/// Because of lock/unlock requiring liquidity, this pool contract also has function to add and remove -/// liquidity. This allows for proper bookkeeping for both user and liquidity provider balances. @@ -40,22 +40,22 @@ index 1a17fa0398..7ca3d5f389 100644 + ITypeAndVersion +{ using SafeERC20 for IERC20; - + error InsufficientLiquidity(); error LiquidityNotAccepted(); error Unauthorized(address caller); - + + error BridgeLimitExceeded(uint256 bridgeLimit); + error NotEnoughBridgedAmount(); + event BridgeLimitUpdated(uint256 oldBridgeLimit, uint256 newBridgeLimit); + string public constant override typeAndVersion = "LockReleaseTokenPool 1.4.0"; - + /// @dev The unique lock release pool flag to signal through EIP 165. @@ -37,16 +52,55 @@ contract LockReleaseTokenPool is TokenPool, ILiquidityContainer, ITypeAndVersion /// @dev Can be address(0) if none is configured. address internal s_rateLimitAdmin; - + + /// @notice Maximum amount of tokens that can be bridged to other chains + uint256 private s_bridgeLimit; + /// @notice Amount of tokens bridged (transferred out) @@ -83,7 +83,7 @@ index 1a17fa0398..7ca3d5f389 100644 + ) UpgradeableTokenPool(IERC20(token), armProxy, allowlistEnabled) { i_acceptLiquidity = acceptLiquidity; } - + + /// @dev Initializer + /// @dev The address passed as `owner` must accept ownership after initialization. + /// @dev The `allowlist` is only effective if pool is set to access-controlled mode @@ -138,7 +138,7 @@ index 1a17fa0398..7ca3d5f389 100644 @@ -120,11 +182,46 @@ contract LockReleaseTokenPool is TokenPool, ILiquidityContainer, ITypeAndVersion s_rateLimitAdmin = rateLimitAdmin; } - + + /// @notice Sets the bridge limit, the maximum amount of tokens that can be bridged out + /// @dev Only callable by the owner or the bridge limit admin. + /// @dev Bridge limit changes should be carefully managed, specially when reducing below the current bridged amount @@ -173,7 +173,7 @@ index 1a17fa0398..7ca3d5f389 100644 function getRateLimitAdmin() external view returns (address) { return s_rateLimitAdmin; } - + + /// @notice Gets the bridge limiter admin address. + function getBridgeLimitAdmin() external view returns (address) { + return s_bridgeLimitAdmin; @@ -183,7 +183,7 @@ index 1a17fa0398..7ca3d5f389 100644 /// @return true if the pool can accept liquidity, false otherwise. function canAcceptLiquidity() external view returns (bool) { @@ -166,4 +263,15 @@ contract LockReleaseTokenPool is TokenPool, ILiquidityContainer, ITypeAndVersion - + _setRateLimitConfig(remoteChainSelector, outboundConfig, inboundConfig); } + diff --git a/contracts/src/v0.8/ccip/pools/diffs/TokenPool_diff.md b/contracts/src/v0.8/ccip/pools/GHO/diffs/TokenPool_diff.md similarity index 99% rename from contracts/src/v0.8/ccip/pools/diffs/TokenPool_diff.md rename to contracts/src/v0.8/ccip/pools/GHO/diffs/TokenPool_diff.md index 4029fe88bd..6ff8893172 100644 --- a/contracts/src/v0.8/ccip/pools/diffs/TokenPool_diff.md +++ b/contracts/src/v0.8/ccip/pools/GHO/diffs/TokenPool_diff.md @@ -7,7 +7,7 @@ index b3571bb449..fcd8948098 100644 // SPDX-License-Identifier: BUSL-1.1 -pragma solidity 0.8.19; +pragma solidity ^0.8.0; - + import {IPool} from "../interfaces/pools/IPool.sol"; import {IARM} from "../interfaces/IARM.sol"; @@ -15,7 +15,7 @@ import {EnumerableSet} from "../../vendor/openzeppelin-solidity/v4.8.3/contracts @@ -30,7 +30,7 @@ index b3571bb449..fcd8948098 100644 /// degrees and prefer different limits) - mapping(uint64 remoteChainSelector => RateLimiter.TokenBucket) internal s_inboundRateLimits; + mapping(uint64 => RateLimiter.TokenBucket) internal s_inboundRateLimits; - + - constructor(IERC20 token, address[] memory allowlist, address armProxy, address router) { - if (address(token) == address(0) || router == address(0)) revert ZeroAddressNotAllowed(); + constructor(IERC20 token, address armProxy, bool allowlistEnabled) { @@ -46,6 +46,6 @@ index b3571bb449..fcd8948098 100644 - } + i_allowlistEnabled = allowlistEnabled; } - + /// @notice Get ARM proxy address ``` diff --git a/contracts/src/v0.8/ccip/test/BaseTest.t.sol b/contracts/src/v0.8/ccip/test/BaseTest.t.sol index e12746a802..f645a6e612 100644 --- a/contracts/src/v0.8/ccip/test/BaseTest.t.sol +++ b/contracts/src/v0.8/ccip/test/BaseTest.t.sol @@ -2,12 +2,15 @@ pragma solidity 0.8.19; import {Test, stdError} from "forge-std/Test.sol"; +import {StdInvariant} from "forge-std/StdInvariant.sol"; +import {StdCheats} from "forge-std/StdCheats.sol"; +import {StdUtils} from "forge-std/StdUtils.sol"; import {MockARM} from "./mocks/MockARM.sol"; import {StructFactory} from "./StructFactory.sol"; import {TransparentUpgradeableProxy} from "solidity-utils/contracts/transparent-proxy/TransparentUpgradeableProxy.sol"; -import {UpgradeableLockReleaseTokenPool} from "../pools/UpgradeableLockReleaseTokenPool.sol"; -import {UpgradeableBurnMintTokenPool} from "../pools/UpgradeableBurnMintTokenPool.sol"; +import {UpgradeableLockReleaseTokenPool} from "../pools/GHO/UpgradeableLockReleaseTokenPool.sol"; +import {UpgradeableBurnMintTokenPool} from "../pools/GHO/UpgradeableBurnMintTokenPool.sol"; import {IBurnMintERC20} from "../../shared/token/ERC20/IBurnMintERC20.sol"; contract BaseTest is Test, StructFactory { diff --git a/contracts/src/v0.8/ccip/test/mocks/MockUpgradeable.sol b/contracts/src/v0.8/ccip/test/mocks/MockUpgradeable.sol index e613768e6c..bd80dee812 100644 --- a/contracts/src/v0.8/ccip/test/mocks/MockUpgradeable.sol +++ b/contracts/src/v0.8/ccip/test/mocks/MockUpgradeable.sol @@ -1,7 +1,7 @@ // SPDX-License-Identifier: MIT pragma solidity ^0.8.0; -import {VersionedInitializable} from "../../pools/VersionedInitializable.sol"; +import {VersionedInitializable} from "../../pools/GHO/VersionedInitializable.sol"; /** * @dev Mock contract to test upgrades, not to be used in production. diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereum.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereum.t.sol index 6ab3e47ecd..8eacd4232e 100644 --- a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereum.t.sol +++ b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereum.t.sol @@ -8,8 +8,8 @@ import {stdError} from "forge-std/Test.sol"; import {MockUpgradeable} from "../../mocks/MockUpgradeable.sol"; import {IPool} from "../../../interfaces/pools/IPool.sol"; import {LockReleaseTokenPool} from "../../../pools/LockReleaseTokenPool.sol"; -import {UpgradeableLockReleaseTokenPool} from "../../../pools/UpgradeableLockReleaseTokenPool.sol"; -import {UpgradeableTokenPool} from "../../../pools/UpgradeableTokenPool.sol"; +import {UpgradeableLockReleaseTokenPool} from "../../../pools/GHO/UpgradeableLockReleaseTokenPool.sol"; +import {UpgradeableTokenPool} from "../../../pools/GHO/UpgradeableTokenPool.sol"; import {EVM2EVMOffRamp} from "../../../offRamp/EVM2EVMOffRamp.sol"; import {RateLimiter} from "../../../libraries/RateLimiter.sol"; import {IERC165} from "../../../../vendor/openzeppelin-solidity/v4.8.3/contracts/utils/introspection/IERC165.sol"; diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumBridgeLimit.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumBridgeLimit.t.sol index 83eae99a71..6bf367d5a1 100644 --- a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumBridgeLimit.t.sol +++ b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumBridgeLimit.t.sol @@ -746,10 +746,8 @@ contract GHOTokenPoolEthereumBridgeLimitTripleScenario is GHOTokenPoolEthereumBr /// @dev All remote liquidity is on one chain or the other function testLiquidityUnbalanced() public { - uint256 amount; - // Bridge all out to Arbitrum - amount = _getMaxToBridgeOut(0); + uint256 amount = _getMaxToBridgeOut(0); deal(tokens[0], USER, amount); _bridgeGho(0, 1, USER, amount); @@ -810,4 +808,41 @@ contract GHOTokenPoolEthereumBridgeLimitTripleScenario is GHOTokenPoolEthereumBr _updateBucketCapacity(1, 10); _moveGhoDestination(0, 1, USER, 10); } + + /// @dev Test showcasing a user locked due to a bridge limit reduction below current bridged amount + function testUserLockedBridgeLimitReductionBelowLevel() public { + // Bridge all out to Arbitrum + uint256 amount = _getMaxToBridgeOut(0); + deal(tokens[0], USER, amount); + _bridgeGho(0, 1, USER, amount); + + // Reduce bridge limit below current bridged amount + uint256 newBridgeLimit = amount / 2; + _updateBridgeLimit(newBridgeLimit); + _updateBucketCapacity(1, newBridgeLimit); + // _updateBucketCapacity(2, newBridgeLimit); + + // assertEq(_getMaxToBridgeIn(2), newBridgeLimit); + + // Reverts + _bridgeGho(1, 2, USER, amount); + } + + /// @dev Test showcasing a user locked due to a bridge limit reduction below current bridged amount + function testUserLockedBridgeLimitReductionBelowLevel2() public { + // Bridge all out to Arbitrum + uint256 amount = _getMaxToBridgeOut(0); + deal(tokens[0], USER, amount); + _bridgeGho(0, 1, USER, amount); + + // Reduce bridge limit below current bridged amount + uint256 newBridgeLimit = amount / 2; + _updateBridgeLimit(newBridgeLimit); + _updateBucketCapacity(2, newBridgeLimit); + + // assertEq(_getMaxToBridgeIn(2), newBridgeLimit); + + // Reverts + _bridgeGho(1, 2, USER, amount); + } } diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumBridgeLimitInvariant.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumBridgeLimitInvariant.t.sol new file mode 100644 index 0000000000..300b2caf21 --- /dev/null +++ b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumBridgeLimitInvariant.t.sol @@ -0,0 +1,313 @@ +// SPDX-License-Identifier: BUSL-1.1 +pragma solidity 0.8.19; + +import {GhoToken} from "@aave/gho-core/gho/GhoToken.sol"; + +import {IPool} from "../../../interfaces/pools/IPool.sol"; +import {UpgradeableLockReleaseTokenPool} from "../../../pools/GHO/UpgradeableLockReleaseTokenPool.sol"; +import {UpgradeableTokenPool} from "../../../pools/GHO/UpgradeableTokenPool.sol"; +import {RateLimiter} from "../../../libraries/RateLimiter.sol"; + +import {StdInvariant} from "forge-std/StdInvariant.sol"; +import {BaseTest} from "../../BaseTest.t.sol"; + +import {console2} from "forge-std/console2.sol"; + +contract GHOTokenPoolHandler is BaseTest { + address internal ARM_PROXY = makeAddr("ARM_PROXY"); + address internal ROUTER = makeAddr("ROUTER"); + address internal RAMP = makeAddr("RAMP"); + address internal AAVE_DAO = makeAddr("AAVE_DAO"); + address internal PROXY_ADMIN = makeAddr("PROXY_ADMIN"); + address internal USER = makeAddr("USER"); + + uint256 public immutable INITIAL_BRIDGE_LIMIT = 100e6 * 1e18; + + uint256[] public chainsList; + mapping(uint256 => address) public pools; // chainId => bridgeTokenPool + mapping(uint256 => address) public tokens; // chainId => ghoToken + mapping(uint256 => uint256) public bucketCapacities; // chainId => bucketCapacities + mapping(uint256 => uint256) public bucketLevels; // chainId => bucketLevels + mapping(uint256 => uint256) public liquidity; // chainId => liquidity + uint256 public remoteLiquidity; + uint256 public bridged; + bool public capacityBelowLevelUpdate; + + constructor() { + // Ethereum with id 0 + chainsList.push(0); + tokens[0] = address(new GhoToken(AAVE_DAO)); + pools[0] = _deployUpgradeableLockReleaseTokenPool( + tokens[0], + ARM_PROXY, + ROUTER, + OWNER, + INITIAL_BRIDGE_LIMIT, + PROXY_ADMIN + ); + + // Mock calls for bridging + vm.mockCall(ROUTER, abi.encodeWithSelector(bytes4(keccak256("getOnRamp(uint64)"))), abi.encode(RAMP)); + vm.mockCall(ROUTER, abi.encodeWithSelector(bytes4(keccak256("isOffRamp(uint64,address)"))), abi.encode(true)); + vm.mockCall(ARM_PROXY, abi.encodeWithSelector(bytes4(keccak256("isCursed()"))), abi.encode(false)); + + // Arbitrum + _addBridge(1, INITIAL_BRIDGE_LIMIT); + _enableLane(0, 1); + + // Avalanche + _addBridge(2, INITIAL_BRIDGE_LIMIT); + _enableLane(0, 2); + _enableLane(1, 2); + } + + /// forge-config: ccip.fuzz.runs = 500 + function bridgeGho(uint256 fromChain, uint256 toChain, uint256 amount) public { + fromChain = bound(fromChain, 0, 2); + toChain = bound(toChain, 0, 2); + vm.assume(fromChain != toChain); + uint256 maxBalance = GhoToken(tokens[fromChain]).balanceOf(address(this)); + uint256 maxToBridge = _getMaxToBridgeOut(fromChain); + uint256 maxAmount = maxBalance > maxToBridge ? maxToBridge : maxBalance; + amount = bound(amount, 0, maxAmount); + + console2.log("bridgeGho", fromChain, toChain, amount); + console2.log("bridgeLimit", UpgradeableLockReleaseTokenPool(pools[0]).getBridgeLimit()); + console2.log("currentBridged", UpgradeableLockReleaseTokenPool(pools[0]).getBridgeLimit()); + if (!_isEthereumChain(fromChain)) { + console2.log("bucket from", fromChain, _getCapacity(fromChain), _getLevel(fromChain)); + } + if (!_isEthereumChain(toChain)) { + console2.log("bucket to", toChain, _getCapacity(toChain), _getLevel(toChain)); + } + + if (amount > 0) { + _bridgeGho(fromChain, toChain, address(this), amount); + } + } + + /// forge-config: ccip.fuzz.runs = 500 + function updateBucketCapacity(uint256 chain, uint128 newCapacity) public { + chain = bound(chain, 1, 2); + uint256 otherChain = (chain % 2) + 1; + vm.assume(newCapacity >= bridged); + + uint256 oldCapacity = bucketCapacities[chain]; + + console2.log("updateBucketCapacity", chain, oldCapacity, newCapacity); + if (newCapacity < bucketLevels[chain]) { + capacityBelowLevelUpdate = true; + } else { + capacityBelowLevelUpdate = false; + } + + if (newCapacity > oldCapacity) { + // Increase + _updateBucketCapacity(chain, newCapacity); + // keep bridge limit as the minimum bucket capacity + if (newCapacity < bucketCapacities[otherChain]) { + _updateBridgeLimit(newCapacity); + } + } else { + // Reduction + // keep bridge limit as the minimum bucket capacity + if (newCapacity < bucketCapacities[otherChain]) { + _updateBridgeLimit(newCapacity); + } + _updateBucketCapacity(chain, newCapacity); + } + } + + function _enableLane(uint256 fromId, uint256 toId) internal { + // from + UpgradeableTokenPool.ChainUpdate[] memory chainUpdate = new UpgradeableTokenPool.ChainUpdate[](1); + RateLimiter.Config memory emptyRateConfig = RateLimiter.Config(false, 0, 0); + chainUpdate[0] = UpgradeableTokenPool.ChainUpdate({ + remoteChainSelector: uint64(toId), + allowed: true, + outboundRateLimiterConfig: emptyRateConfig, + inboundRateLimiterConfig: emptyRateConfig + }); + + vm.startPrank(OWNER); + UpgradeableTokenPool(pools[fromId]).applyChainUpdates(chainUpdate); + + // to + chainUpdate[0].remoteChainSelector = uint64(fromId); + UpgradeableTokenPool(pools[toId]).applyChainUpdates(chainUpdate); + vm.stopPrank(); + } + + function _addBridge(uint256 chainId, uint256 bucketCapacity) internal { + require(tokens[chainId] == address(0), "BRIDGE_ALREADY_EXISTS"); + + chainsList.push(chainId); + + // GHO Token + GhoToken ghoToken = new GhoToken(AAVE_DAO); + tokens[chainId] = address(ghoToken); + + // UpgradeableTokenPool + address bridgeTokenPool = _deployUpgradeableBurnMintTokenPool( + address(ghoToken), + ARM_PROXY, + ROUTER, + OWNER, + PROXY_ADMIN + ); + pools[chainId] = bridgeTokenPool; + + // Facilitator + bucketCapacities[chainId] = bucketCapacity; + vm.stopPrank(); + vm.startPrank(AAVE_DAO); + ghoToken.grantRole(ghoToken.FACILITATOR_MANAGER_ROLE(), AAVE_DAO); + ghoToken.addFacilitator(bridgeTokenPool, "UpgradeableTokenPool", uint128(bucketCapacity)); + vm.stopPrank(); + } + + function _updateBridgeLimit(uint256 newBridgeLimit) internal { + vm.stopPrank(); + vm.startPrank(OWNER); + UpgradeableLockReleaseTokenPool(pools[0]).setBridgeLimit(newBridgeLimit); + vm.stopPrank(); + } + + function _updateBucketCapacity(uint256 chainId, uint256 newBucketCapacity) internal { + bucketCapacities[chainId] = newBucketCapacity; + vm.stopPrank(); + vm.startPrank(AAVE_DAO); + GhoToken(tokens[chainId]).grantRole(GhoToken(tokens[chainId]).BUCKET_MANAGER_ROLE(), AAVE_DAO); + GhoToken(tokens[chainId]).setFacilitatorBucketCapacity(pools[chainId], uint128(newBucketCapacity)); + vm.stopPrank(); + } + + function _getCapacity(uint256 chain) internal view returns (uint256) { + require(!_isEthereumChain(chain), "No bucket on Ethereum"); + (uint256 capacity, ) = GhoToken(tokens[chain]).getFacilitatorBucket(pools[chain]); + return capacity; + } + + function _getLevel(uint256 chain) internal view returns (uint256) { + require(!_isEthereumChain(chain), "No bucket on Ethereum"); + (, uint256 level) = GhoToken(tokens[chain]).getFacilitatorBucket(pools[chain]); + return level; + } + + function _getMaxToBridgeOut(uint256 fromChain) internal view returns (uint256) { + if (_isEthereumChain(fromChain)) { + UpgradeableLockReleaseTokenPool ethTokenPool = UpgradeableLockReleaseTokenPool(pools[0]); + uint256 bridgeLimit = ethTokenPool.getBridgeLimit(); + uint256 currentBridged = ethTokenPool.getCurrentBridgedAmount(); + return currentBridged > bridgeLimit ? 0 : bridgeLimit - currentBridged; + } else { + (, uint256 level) = GhoToken(tokens[fromChain]).getFacilitatorBucket(pools[fromChain]); + return level; + } + } + + function _bridgeGho(uint256 fromChain, uint256 toChain, address user, uint256 amount) internal { + _moveGhoOrigin(fromChain, toChain, user, amount); + _moveGhoDestination(fromChain, toChain, user, amount); + } + + function _moveGhoOrigin(uint256 fromChain, uint256 toChain, address user, uint256 amount) internal { + // Simulate CCIP pull of funds + vm.startPrank(user); + GhoToken(tokens[fromChain]).transfer(pools[fromChain], amount); + + vm.startPrank(RAMP); + IPool(pools[fromChain]).lockOrBurn(user, bytes(""), amount, uint64(toChain), bytes("")); + + if (_isEthereumChain(fromChain)) { + // Lock + bridged += amount; + } else { + // Burn + bucketLevels[fromChain] -= amount; + liquidity[fromChain] -= amount; + remoteLiquidity -= amount; + } + } + + function _moveGhoDestination(uint256 fromChain, uint256 toChain, address user, uint256 amount) internal { + vm.startPrank(RAMP); + IPool(pools[toChain]).releaseOrMint(bytes(""), user, amount, uint64(fromChain), bytes("")); + + if (_isEthereumChain(toChain)) { + // Release + bridged -= amount; + } else { + // Mint + bucketLevels[toChain] += amount; + liquidity[toChain] += amount; + remoteLiquidity += amount; + } + } + + function _isEthereumChain(uint256 chainId) internal pure returns (bool) { + return chainId == 0; + } + + function getChainsList() public view returns (uint256[] memory) { + return chainsList; + } +} + +contract GHOTokenPoolEthereumBridgeLimitInvariant is BaseTest { + GHOTokenPoolHandler handler; + + function setUp() public override { + super.setUp(); + + handler = new GHOTokenPoolHandler(); + handler.getChainsList(); + deal(handler.tokens(0), address(handler), handler.INITIAL_BRIDGE_LIMIT()); + + targetContract(address(handler)); + } + + /// forge-config: ccip.invariant.fail-on-revert = true + /// forge-config: ccip.invariant.runs = 2000 + /// forge-config: ccip.invariant.depth = 50 + function invariant_bridgeLimit() public { + // Check bridged + assertEq(UpgradeableLockReleaseTokenPool(handler.pools(0)).getCurrentBridgedAmount(), handler.bridged()); + + // Check levels and buckets + uint256 sumLevels; + uint256 chainId; + uint256 capacity; + uint256 level; + uint256[] memory chainsListLocal = handler.getChainsList(); + for (uint i = 1; i < chainsListLocal.length; i++) { + // not counting Ethereum -{0} + chainId = chainsListLocal[i]; + (capacity, level) = GhoToken(handler.tokens(chainId)).getFacilitatorBucket(handler.pools(chainId)); + + // Aggregate levels + sumLevels += level; + + assertEq(capacity, handler.bucketCapacities(chainId), "wrong bucket capacity"); + assertEq(level, handler.bucketLevels(chainId), "wrong bucket level"); + + assertGe( + capacity, + UpgradeableLockReleaseTokenPool(handler.pools(0)).getBridgeLimit(), + "capacity must be equal to bridgeLimit" + ); + + // This invariant only holds if there were no bridge limit reductions below the current bridged amount + if (!handler.capacityBelowLevelUpdate()) { + assertLe( + level, + UpgradeableLockReleaseTokenPool(handler.pools(0)).getBridgeLimit(), + "level cannot be higher than bridgeLimit" + ); + } + } + // Check bridged is equal to sum of levels + assertEq(UpgradeableLockReleaseTokenPool(handler.pools(0)).getCurrentBridgedAmount(), sumLevels, "wrong bridged"); + assertEq(handler.remoteLiquidity(), sumLevels, "wrong bridged"); + } +} diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumBridgeLimitSetup.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumBridgeLimitSetup.t.sol index 16cea99aeb..5e9b5f147a 100644 --- a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumBridgeLimitSetup.t.sol +++ b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumBridgeLimitSetup.t.sol @@ -5,11 +5,12 @@ import {GhoToken} from "@aave/gho-core/gho/GhoToken.sol"; import {BaseTest} from "../../BaseTest.t.sol"; import {IPool} from "../../../interfaces/pools/IPool.sol"; -import {UpgradeableLockReleaseTokenPool} from "../../../pools/UpgradeableLockReleaseTokenPool.sol"; -import {UpgradeableBurnMintTokenPool} from "../../../pools/UpgradeableBurnMintTokenPool.sol"; -import {UpgradeableTokenPool} from "../../../pools/UpgradeableTokenPool.sol"; +import {UpgradeableLockReleaseTokenPool} from "../../../pools/GHO/UpgradeableLockReleaseTokenPool.sol"; +import {UpgradeableBurnMintTokenPool} from "../../../pools/GHO/UpgradeableBurnMintTokenPool.sol"; +import {UpgradeableTokenPool} from "../../../pools/GHO/UpgradeableTokenPool.sol"; import {RateLimiter} from "../../../libraries/RateLimiter.sol"; +import {console2} from "forge-std/console2.sol"; contract GHOTokenPoolEthereumBridgeLimitSetup is BaseTest { address internal ARM_PROXY = makeAddr("ARM_PROXY"); address internal ROUTER = makeAddr("ROUTER"); @@ -18,18 +19,19 @@ contract GHOTokenPoolEthereumBridgeLimitSetup is BaseTest { address internal PROXY_ADMIN = makeAddr("PROXY_ADMIN"); address internal USER = makeAddr("USER"); - uint256 internal INITIAL_BRIDGE_LIMIT = 100e6 * 1e18; + uint256 public immutable INITIAL_BRIDGE_LIMIT = 100e6 * 1e18; - uint256[] internal chainsList; - mapping(uint256 => address) internal pools; // chainId => bridgeTokenPool - mapping(uint256 => address) internal tokens; // chainId => ghoToken - mapping(uint256 => uint256) internal bucketCapacities; // chainId => bucketCapacities - mapping(uint256 => uint256) internal bucketLevels; // chainId => bucketLevels - mapping(uint256 => uint256) internal liquidity; // chainId => liquidity - uint256 internal remoteLiquidity; - uint256 internal bridged; + uint256[] public chainsList; + mapping(uint256 => address) public pools; // chainId => bridgeTokenPool + mapping(uint256 => address) public tokens; // chainId => ghoToken + mapping(uint256 => uint256) public bucketCapacities; // chainId => bucketCapacities + mapping(uint256 => uint256) public bucketLevels; // chainId => bucketLevels + mapping(uint256 => uint256) public liquidity; // chainId => liquidity + uint256 public remoteLiquidity; + uint256 public bridged; function setUp() public virtual override { + console2.log("ENTRA"); // Ethereum with id 0 chainsList.push(0); tokens[0] = address(new GhoToken(AAVE_DAO)); diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumE2E.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumE2E.t.sol index 82e236618b..52ef9f5d6d 100644 --- a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumE2E.t.sol +++ b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumE2E.t.sol @@ -9,9 +9,9 @@ import "../../commitStore/CommitStore.t.sol"; import "../../onRamp/EVM2EVMOnRampSetup.t.sol"; import "../../offRamp/EVM2EVMOffRampSetup.t.sol"; import {IBurnMintERC20} from "../../../../shared/token/ERC20/IBurnMintERC20.sol"; -import {UpgradeableLockReleaseTokenPool} from "../../../pools/UpgradeableLockReleaseTokenPool.sol"; -import {UpgradeableBurnMintTokenPool} from "../../../pools/UpgradeableBurnMintTokenPool.sol"; -import {UpgradeableTokenPool} from "../../../pools/UpgradeableTokenPool.sol"; +import {UpgradeableLockReleaseTokenPool} from "../../../pools/GHO/UpgradeableLockReleaseTokenPool.sol"; +import {UpgradeableBurnMintTokenPool} from "../../../pools/GHO/UpgradeableBurnMintTokenPool.sol"; +import {UpgradeableTokenPool} from "../../../pools/GHO/UpgradeableTokenPool.sol"; import {IPool} from "../../../interfaces/pools/IPool.sol"; import {RateLimiter} from "../../../libraries/RateLimiter.sol"; import {E2E} from "../End2End.t.sol"; diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumSetup.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumSetup.t.sol index 89d27aaf9f..44038b9eb4 100644 --- a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumSetup.t.sol +++ b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumSetup.t.sol @@ -7,8 +7,8 @@ import {TransparentUpgradeableProxy} from "solidity-utils/contracts/transparent- import {stdError} from "forge-std/Test.sol"; import {BaseTest} from "../../BaseTest.t.sol"; import {IPool} from "../../../interfaces/pools/IPool.sol"; -import {UpgradeableLockReleaseTokenPool} from "../../../pools/UpgradeableLockReleaseTokenPool.sol"; -import {UpgradeableTokenPool} from "../../../pools/UpgradeableTokenPool.sol"; +import {UpgradeableLockReleaseTokenPool} from "../../../pools/GHO/UpgradeableLockReleaseTokenPool.sol"; +import {UpgradeableTokenPool} from "../../../pools/GHO/UpgradeableTokenPool.sol"; import {EVM2EVMOnRamp} from "../../../onRamp/EVM2EVMOnRamp.sol"; import {EVM2EVMOffRamp} from "../../../offRamp/EVM2EVMOffRamp.sol"; import {RateLimiter} from "../../../libraries/RateLimiter.sol"; diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolRemote.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolRemote.t.sol index 3e2696bbf9..b1027365c2 100644 --- a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolRemote.t.sol +++ b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolRemote.t.sol @@ -6,11 +6,11 @@ import {TransparentUpgradeableProxy} from "solidity-utils/contracts/transparent- import {stdError} from "forge-std/Test.sol"; import {MockUpgradeable} from "../../mocks/MockUpgradeable.sol"; -import {UpgradeableTokenPool} from "../../../pools/UpgradeableTokenPool.sol"; +import {UpgradeableTokenPool} from "../../../pools/GHO/UpgradeableTokenPool.sol"; import {EVM2EVMOnRamp} from "../../../onRamp/EVM2EVMOnRamp.sol"; import {EVM2EVMOffRamp} from "../../../offRamp/EVM2EVMOffRamp.sol"; import {BurnMintTokenPool} from "../../../pools/BurnMintTokenPool.sol"; -import {UpgradeableBurnMintTokenPool} from "../../../pools/UpgradeableBurnMintTokenPool.sol"; +import {UpgradeableBurnMintTokenPool} from "../../../pools/GHO/UpgradeableBurnMintTokenPool.sol"; import {RateLimiter} from "../../../libraries/RateLimiter.sol"; import {GHOTokenPoolRemoteSetup} from "./GHOTokenPoolRemoteSetup.t.sol"; diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolRemoteE2E.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolRemoteE2E.t.sol index 62d6f5235b..ccad39ce6c 100644 --- a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolRemoteE2E.t.sol +++ b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolRemoteE2E.t.sol @@ -9,9 +9,9 @@ import "../../commitStore/CommitStore.t.sol"; import "../../onRamp/EVM2EVMOnRampSetup.t.sol"; import "../../offRamp/EVM2EVMOffRampSetup.t.sol"; import {IBurnMintERC20} from "../../../../shared/token/ERC20/IBurnMintERC20.sol"; -import {UpgradeableLockReleaseTokenPool} from "../../../pools/UpgradeableLockReleaseTokenPool.sol"; -import {UpgradeableBurnMintTokenPool} from "../../../pools/UpgradeableBurnMintTokenPool.sol"; -import {UpgradeableTokenPool} from "../../../pools/UpgradeableTokenPool.sol"; +import {UpgradeableLockReleaseTokenPool} from "../../../pools/GHO/UpgradeableLockReleaseTokenPool.sol"; +import {UpgradeableBurnMintTokenPool} from "../../../pools/GHO/UpgradeableBurnMintTokenPool.sol"; +import {UpgradeableTokenPool} from "../../../pools/GHO/UpgradeableTokenPool.sol"; import {IPool} from "../../../interfaces/pools/IPool.sol"; import {RateLimiter} from "../../../libraries/RateLimiter.sol"; import {E2E} from "../End2End.t.sol"; diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolRemoteSetup.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolRemoteSetup.t.sol index 529715aaf2..402ca41b17 100644 --- a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolRemoteSetup.t.sol +++ b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolRemoteSetup.t.sol @@ -5,10 +5,10 @@ import {GhoToken} from "@aave/gho-core/gho/GhoToken.sol"; import {TransparentUpgradeableProxy} from "solidity-utils/contracts/transparent-proxy/TransparentUpgradeableProxy.sol"; import {stdError} from "forge-std/Test.sol"; -import {UpgradeableTokenPool} from "../../../pools/UpgradeableTokenPool.sol"; +import {UpgradeableTokenPool} from "../../../pools/GHO/UpgradeableTokenPool.sol"; import {Router} from "../../../Router.sol"; import {BurnMintERC677} from "../../../../shared/token/ERC677/BurnMintERC677.sol"; -import {UpgradeableBurnMintTokenPool} from "../../../pools/UpgradeableBurnMintTokenPool.sol"; +import {UpgradeableBurnMintTokenPool} from "../../../pools/GHO/UpgradeableBurnMintTokenPool.sol"; import {RouterSetup} from "../../router/RouterSetup.t.sol"; contract GHOTokenPoolRemoteSetup is RouterSetup { From bf5ae088cbbf18bb5524c5a9ee5473b7a1527cfc Mon Sep 17 00:00:00 2001 From: miguelmtzinf Date: Thu, 23 May 2024 18:25:25 +0200 Subject: [PATCH 09/18] fix: Fix README --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 2e90af3864..db9ffe5360 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -> Forked repository of CCIP contracts ([version 2.8.0 release](https://github.com/smartcontractkit/ccip/tree/v2.8.0-ccip1.4.0-release)) includes modifications for developing custom TokenPool contracts tailored for the GHO cross-chain strategy. All relevant code and tests are located in the [GHO pools directory](./contracts/v0.8/ccip/pools/GHO). +> ❗️ Forked repository of CCIP contracts ([version 2.8.0 release](https://github.com/smartcontractkit/ccip/tree/v2.8.0-ccip1.4.0-release)) that includes modifications for developing custom TokenPool contracts tailored for the [GHO cross-chain strategy](https://governance.aave.com/t/arfc-gho-cross-chain-launch/17616). All relevant code and tests are located in the [GHO pools directory](./contracts/v0.8/ccip/pools/GHO).

From e2eac9f5a5ce3b609e4b47021b48c38ebf7d8d5a Mon Sep 17 00:00:00 2001 From: miguelmtzinf Date: Thu, 23 May 2024 18:29:15 +0200 Subject: [PATCH 10/18] fix: Remove unneccesary files --- contracts/src/v0.8/ccip/script.s.sol | 83 ---------------------------- 1 file changed, 83 deletions(-) delete mode 100644 contracts/src/v0.8/ccip/script.s.sol diff --git a/contracts/src/v0.8/ccip/script.s.sol b/contracts/src/v0.8/ccip/script.s.sol deleted file mode 100644 index 95ef0c509b..0000000000 --- a/contracts/src/v0.8/ccip/script.s.sol +++ /dev/null @@ -1,83 +0,0 @@ -// SPDX-License-Identifier: UNLICENSED -pragma solidity ^0.8.0; - -import {Script, console2} from 'forge-std/Script.sol'; -import {TransparentUpgradeableProxy} from "solidity-utils/contracts/transparent-proxy/TransparentUpgradeableProxy.sol"; -import {UpgradeableLockReleaseTokenPool} from "./pools/UpgradeableLockReleaseTokenPool.sol"; -import {UpgradeableBurnMintTokenPool} from "./pools/UpgradeableBurnMintTokenPool.sol"; -import {UpgradeableTokenPool} from "./pools/UpgradeableTokenPool.sol"; - - -contract DeployLockReleaseTokenPool is Script { - // ETH SEPOLIA - 11155111 - address GHO_TOKEN = 0xc4bF5CbDaBE595361438F8c6a187bDc330539c60; - address PROXY_ADMIN = 0xfA0e305E0f46AB04f00ae6b5f4560d61a2183E00; - address ARM_PROXY = 0xba3f6251de62dED61Ff98590cB2fDf6871FbB991; - address ROUTER = 0x0BF3dE8c5D3e8A2B34D2BEeB17ABfCeBaf363A59; - address TOKEN_POOL_OWNER = 0xa4b184737418B3014b3B1b1f0bE6700Bd9640FfE; - - // ARB SEPOLIA - 421614 - // address GHO_TOKEN = 0xb13Cfa6f8B2Eed2C37fB00fF0c1A59807C585810; - // address PROXY_ADMIN = 0xfA0e305E0f46AB04f00ae6b5f4560d61a2183E00; - // address ARM_PROXY = 0x9527E2d01A3064ef6b50c1Da1C0cC523803BCFF2; - // address ROUTER = 0x2a9C5afB0d0e4BAb2BCdaE109EC4b0c4Be15a165; - // address TOKEN_POOL_OWNER = 0xa4b184737418B3014b3B1b1f0bE6700Bd9640FfE; - - // BASE SEPOLIA - 84532 - // address GHO_TOKEN = 0x7CFa3f3d1cded0Da930881c609D4Dbf0012c14Bb; - // address PROXY_ADMIN = 0xfA0e305E0f46AB04f00ae6b5f4560d61a2183E00; - // address ARM_PROXY = 0x99360767a4705f68CcCb9533195B761648d6d807; - // address ROUTER = 0xD3b06cEbF099CE7DA4AcCf578aaebFDBd6e88a93; - // address TOKEN_POOL_OWNER = 0xa4b184737418B3014b3B1b1f0bE6700Bd9640FfE; - - // FUJI - 43113 - // address GHO_TOKEN = 0x9c04928Cc678776eC1C1C0E46ecC03a5F47A7723; - // address PROXY_ADMIN = 0xfA0e305E0f46AB04f00ae6b5f4560d61a2183E00; - // address ARM_PROXY = 0xAc8CFc3762a979628334a0E4C1026244498E821b; - // address ROUTER = 0xF694E193200268f9a4868e4Aa017A0118C9a8177; - // address TOKEN_POOL_OWNER = 0xa4b184737418B3014b3B1b1f0bE6700Bd9640FfE; - - function run() external { - console2.log('Block Number: ', block.number); - vm.startBroadcast(); - - UpgradeableLockReleaseTokenPool tokenPoolImpl = new UpgradeableLockReleaseTokenPool(GHO_TOKEN, ARM_PROXY, false, true); - // Imple init - address[] memory emptyArray = new address[](0); - tokenPoolImpl.initialize(TOKEN_POOL_OWNER, emptyArray, ROUTER, 10e18); - // proxy deploy and init - bytes memory tokenPoolInitParams = abi.encodeWithSignature( - "initialize(address,address[],address,uint256)", - TOKEN_POOL_OWNER, - emptyArray, - ROUTER, - 10e18 - ); - TransparentUpgradeableProxy tokenPoolProxy = new TransparentUpgradeableProxy( - address(tokenPoolImpl), - PROXY_ADMIN, - tokenPoolInitParams - ); - - vm.stopBroadcast(); - // Manage ownership - // UpgradeableLockReleaseTokenPool(address(tokenPoolProxy)).acceptOwnership(); - - } -} - -contract Accept is Script { - - function run() external { - console2.log('Block Number: ', block.number); - vm.startBroadcast(); - - console2.log(UpgradeableLockReleaseTokenPool(0x50A715d63bDcd5455a3308932a624263d170Dd74).getBridgeLimit()); - - // Manage ownership - UpgradeableLockReleaseTokenPool(0x50A715d63bDcd5455a3308932a624263d170Dd74).acceptOwnership(); - vm.stopBroadcast(); - - } -} - From 16972b1e1396483d754fe4dea1c251d5d3d2d303 Mon Sep 17 00:00:00 2001 From: miguelmtzinf Date: Fri, 24 May 2024 09:16:25 +0200 Subject: [PATCH 11/18] fix: fix README --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index db9ffe5360..ae8760ca6b 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -> ❗️ Forked repository of CCIP contracts ([version 2.8.0 release](https://github.com/smartcontractkit/ccip/tree/v2.8.0-ccip1.4.0-release)) that includes modifications for developing custom TokenPool contracts tailored for the [GHO cross-chain strategy](https://governance.aave.com/t/arfc-gho-cross-chain-launch/17616). All relevant code and tests are located in the [GHO pools directory](./contracts/v0.8/ccip/pools/GHO). +> ❗️ Forked repository of CCIP contracts ([version 2.8.0 release](https://github.com/smartcontractkit/ccip/tree/v2.8.0-ccip1.4.0-release)) that includes modifications for developing custom TokenPool contracts tailored for the [GHO cross-chain strategy](https://governance.aave.com/t/arfc-gho-cross-chain-launch/17616). All relevant code and tests are located in the [GHO pools directory](./contracts/src/v0.8/ccip/pools/GHO).

From 89ab0e8edb47f86ade3e673046e6e158d45179a3 Mon Sep 17 00:00:00 2001 From: miguelmtzinf Date: Mon, 27 May 2024 14:34:39 +0200 Subject: [PATCH 12/18] test: Refactor invariant tests --- .../GHO/GHOTokenPoolEthereumBridgeLimit.t.sol | 29 ++----- ...GHOTokenPoolEthereumBridgeLimitSetup.t.sol | 2 - ...okenPoolEthereumBridgeLimitInvariant.t.sol | 65 +++++++++++++++ .../GHOTokenPoolHandler.t.sol} | 83 ++----------------- 4 files changed, 78 insertions(+), 101 deletions(-) create mode 100644 contracts/src/v0.8/ccip/test/pools/GHO/invariant/GHOTokenPoolEthereumBridgeLimitInvariant.t.sol rename contracts/src/v0.8/ccip/test/pools/GHO/{GHOTokenPoolEthereumBridgeLimitInvariant.t.sol => invariant/GHOTokenPoolHandler.t.sol} (72%) diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumBridgeLimit.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumBridgeLimit.t.sol index 6bf367d5a1..ee1fa8df49 100644 --- a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumBridgeLimit.t.sol +++ b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumBridgeLimit.t.sol @@ -820,29 +820,16 @@ contract GHOTokenPoolEthereumBridgeLimitTripleScenario is GHOTokenPoolEthereumBr uint256 newBridgeLimit = amount / 2; _updateBridgeLimit(newBridgeLimit); _updateBucketCapacity(1, newBridgeLimit); - // _updateBucketCapacity(2, newBridgeLimit); - // assertEq(_getMaxToBridgeIn(2), newBridgeLimit); - - // Reverts + // Moving to Avalanche is not a problem because bucket capacity is higher than bridge limit + assertGt(_getMaxToBridgeIn(2), newBridgeLimit); _bridgeGho(1, 2, USER, amount); - } - - /// @dev Test showcasing a user locked due to a bridge limit reduction below current bridged amount - function testUserLockedBridgeLimitReductionBelowLevel2() public { - // Bridge all out to Arbitrum - uint256 amount = _getMaxToBridgeOut(0); - deal(tokens[0], USER, amount); - _bridgeGho(0, 1, USER, amount); - // Reduce bridge limit below current bridged amount - uint256 newBridgeLimit = amount / 2; - _updateBridgeLimit(newBridgeLimit); - _updateBucketCapacity(2, newBridgeLimit); - - // assertEq(_getMaxToBridgeIn(2), newBridgeLimit); - - // Reverts - _bridgeGho(1, 2, USER, amount); + // Moving back to Arbitrum reverts on destination + assertEq(_getMaxToBridgeIn(1), newBridgeLimit); + _moveGhoOrigin(2, 1, USER, amount); + vm.expectRevert(); + vm.prank(RAMP); + IPool(pools[1]).releaseOrMint(bytes(""), USER, amount, uint64(2), bytes("")); } } diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumBridgeLimitSetup.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumBridgeLimitSetup.t.sol index 5e9b5f147a..bfb248b11f 100644 --- a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumBridgeLimitSetup.t.sol +++ b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumBridgeLimitSetup.t.sol @@ -10,7 +10,6 @@ import {UpgradeableBurnMintTokenPool} from "../../../pools/GHO/UpgradeableBurnMi import {UpgradeableTokenPool} from "../../../pools/GHO/UpgradeableTokenPool.sol"; import {RateLimiter} from "../../../libraries/RateLimiter.sol"; -import {console2} from "forge-std/console2.sol"; contract GHOTokenPoolEthereumBridgeLimitSetup is BaseTest { address internal ARM_PROXY = makeAddr("ARM_PROXY"); address internal ROUTER = makeAddr("ROUTER"); @@ -31,7 +30,6 @@ contract GHOTokenPoolEthereumBridgeLimitSetup is BaseTest { uint256 public bridged; function setUp() public virtual override { - console2.log("ENTRA"); // Ethereum with id 0 chainsList.push(0); tokens[0] = address(new GhoToken(AAVE_DAO)); diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/invariant/GHOTokenPoolEthereumBridgeLimitInvariant.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/invariant/GHOTokenPoolEthereumBridgeLimitInvariant.t.sol new file mode 100644 index 0000000000..3e3a847bc2 --- /dev/null +++ b/contracts/src/v0.8/ccip/test/pools/GHO/invariant/GHOTokenPoolEthereumBridgeLimitInvariant.t.sol @@ -0,0 +1,65 @@ +// SPDX-License-Identifier: BUSL-1.1 +pragma solidity 0.8.19; + +import {GhoToken} from "@aave/gho-core/gho/GhoToken.sol"; + +import {UpgradeableLockReleaseTokenPool} from "../../../../pools/GHO/UpgradeableLockReleaseTokenPool.sol"; +import {BaseTest} from "../../../BaseTest.t.sol"; +import {GHOTokenPoolHandler} from "./GHOTokenPoolHandler.t.sol"; + +contract GHOTokenPoolEthereumBridgeLimitInvariant is BaseTest { + GHOTokenPoolHandler handler; + + function setUp() public override { + super.setUp(); + + handler = new GHOTokenPoolHandler(); + deal(handler.tokens(0), address(handler), handler.INITIAL_BRIDGE_LIMIT()); + + targetContract(address(handler)); + } + + /// forge-config: ccip.invariant.fail-on-revert = true + /// forge-config: ccip.invariant.runs = 2000 + /// forge-config: ccip.invariant.depth = 50 + function invariant_bridgeLimit() public { + // Check bridged + assertEq(UpgradeableLockReleaseTokenPool(handler.pools(0)).getCurrentBridgedAmount(), handler.bridged()); + + // Check levels and buckets + uint256 sumLevels; + uint256 chainId; + uint256 capacity; + uint256 level; + uint256[] memory chainsListLocal = handler.getChainsList(); + for (uint i = 1; i < chainsListLocal.length; i++) { + // not counting Ethereum -{0} + chainId = chainsListLocal[i]; + (capacity, level) = GhoToken(handler.tokens(chainId)).getFacilitatorBucket(handler.pools(chainId)); + + // Aggregate levels + sumLevels += level; + + assertEq(capacity, handler.bucketCapacities(chainId), "wrong bucket capacity"); + assertEq(level, handler.bucketLevels(chainId), "wrong bucket level"); + + assertGe( + capacity, + UpgradeableLockReleaseTokenPool(handler.pools(0)).getBridgeLimit(), + "capacity must be equal to bridgeLimit" + ); + + // This invariant only holds if there were no bridge limit reductions below the current bridged amount + if (!handler.capacityBelowLevelUpdate()) { + assertLe( + level, + UpgradeableLockReleaseTokenPool(handler.pools(0)).getBridgeLimit(), + "level cannot be higher than bridgeLimit" + ); + } + } + // Check bridged is equal to sum of levels + assertEq(UpgradeableLockReleaseTokenPool(handler.pools(0)).getCurrentBridgedAmount(), sumLevels, "wrong bridged"); + assertEq(handler.remoteLiquidity(), sumLevels, "wrong bridged"); + } +} diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumBridgeLimitInvariant.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/invariant/GHOTokenPoolHandler.t.sol similarity index 72% rename from contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumBridgeLimitInvariant.t.sol rename to contracts/src/v0.8/ccip/test/pools/GHO/invariant/GHOTokenPoolHandler.t.sol index 300b2caf21..de07af3ca1 100644 --- a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumBridgeLimitInvariant.t.sol +++ b/contracts/src/v0.8/ccip/test/pools/GHO/invariant/GHOTokenPoolHandler.t.sol @@ -3,15 +3,11 @@ pragma solidity 0.8.19; import {GhoToken} from "@aave/gho-core/gho/GhoToken.sol"; -import {IPool} from "../../../interfaces/pools/IPool.sol"; -import {UpgradeableLockReleaseTokenPool} from "../../../pools/GHO/UpgradeableLockReleaseTokenPool.sol"; -import {UpgradeableTokenPool} from "../../../pools/GHO/UpgradeableTokenPool.sol"; -import {RateLimiter} from "../../../libraries/RateLimiter.sol"; - -import {StdInvariant} from "forge-std/StdInvariant.sol"; -import {BaseTest} from "../../BaseTest.t.sol"; - -import {console2} from "forge-std/console2.sol"; +import {IPool} from "../../../../interfaces/pools/IPool.sol"; +import {UpgradeableLockReleaseTokenPool} from "../../../../pools/GHO/UpgradeableLockReleaseTokenPool.sol"; +import {UpgradeableTokenPool} from "../../../../pools/GHO/UpgradeableTokenPool.sol"; +import {RateLimiter} from "../../../../libraries/RateLimiter.sol"; +import {BaseTest} from "../../../BaseTest.t.sol"; contract GHOTokenPoolHandler is BaseTest { address internal ARM_PROXY = makeAddr("ARM_PROXY"); @@ -71,16 +67,6 @@ contract GHOTokenPoolHandler is BaseTest { uint256 maxAmount = maxBalance > maxToBridge ? maxToBridge : maxBalance; amount = bound(amount, 0, maxAmount); - console2.log("bridgeGho", fromChain, toChain, amount); - console2.log("bridgeLimit", UpgradeableLockReleaseTokenPool(pools[0]).getBridgeLimit()); - console2.log("currentBridged", UpgradeableLockReleaseTokenPool(pools[0]).getBridgeLimit()); - if (!_isEthereumChain(fromChain)) { - console2.log("bucket from", fromChain, _getCapacity(fromChain), _getLevel(fromChain)); - } - if (!_isEthereumChain(toChain)) { - console2.log("bucket to", toChain, _getCapacity(toChain), _getLevel(toChain)); - } - if (amount > 0) { _bridgeGho(fromChain, toChain, address(this), amount); } @@ -94,7 +80,6 @@ contract GHOTokenPoolHandler is BaseTest { uint256 oldCapacity = bucketCapacities[chain]; - console2.log("updateBucketCapacity", chain, oldCapacity, newCapacity); if (newCapacity < bucketLevels[chain]) { capacityBelowLevelUpdate = true; } else { @@ -253,61 +238,3 @@ contract GHOTokenPoolHandler is BaseTest { return chainsList; } } - -contract GHOTokenPoolEthereumBridgeLimitInvariant is BaseTest { - GHOTokenPoolHandler handler; - - function setUp() public override { - super.setUp(); - - handler = new GHOTokenPoolHandler(); - handler.getChainsList(); - deal(handler.tokens(0), address(handler), handler.INITIAL_BRIDGE_LIMIT()); - - targetContract(address(handler)); - } - - /// forge-config: ccip.invariant.fail-on-revert = true - /// forge-config: ccip.invariant.runs = 2000 - /// forge-config: ccip.invariant.depth = 50 - function invariant_bridgeLimit() public { - // Check bridged - assertEq(UpgradeableLockReleaseTokenPool(handler.pools(0)).getCurrentBridgedAmount(), handler.bridged()); - - // Check levels and buckets - uint256 sumLevels; - uint256 chainId; - uint256 capacity; - uint256 level; - uint256[] memory chainsListLocal = handler.getChainsList(); - for (uint i = 1; i < chainsListLocal.length; i++) { - // not counting Ethereum -{0} - chainId = chainsListLocal[i]; - (capacity, level) = GhoToken(handler.tokens(chainId)).getFacilitatorBucket(handler.pools(chainId)); - - // Aggregate levels - sumLevels += level; - - assertEq(capacity, handler.bucketCapacities(chainId), "wrong bucket capacity"); - assertEq(level, handler.bucketLevels(chainId), "wrong bucket level"); - - assertGe( - capacity, - UpgradeableLockReleaseTokenPool(handler.pools(0)).getBridgeLimit(), - "capacity must be equal to bridgeLimit" - ); - - // This invariant only holds if there were no bridge limit reductions below the current bridged amount - if (!handler.capacityBelowLevelUpdate()) { - assertLe( - level, - UpgradeableLockReleaseTokenPool(handler.pools(0)).getBridgeLimit(), - "level cannot be higher than bridgeLimit" - ); - } - } - // Check bridged is equal to sum of levels - assertEq(UpgradeableLockReleaseTokenPool(handler.pools(0)).getCurrentBridgedAmount(), sumLevels, "wrong bridged"); - assertEq(handler.remoteLiquidity(), sumLevels, "wrong bridged"); - } -} From 1d47b0593ba0a373d19e4e66fcbda353479363ce Mon Sep 17 00:00:00 2001 From: miguelmtzinf Date: Mon, 27 May 2024 18:32:09 +0200 Subject: [PATCH 13/18] test: Refactor tests --- contracts/src/v0.8/ccip/test/BaseTest.t.sol | 89 -- .../ccip/test/pools/GHO/GHOBaseTest.t.sol | 281 ++++++ .../GHO/GHOTokenPoolEthereumBridgeLimit.t.sol | 822 ++++++++++-------- ...GHOTokenPoolEthereumBridgeLimitSetup.t.sol | 224 ----- .../pools/GHO/GHOTokenPoolEthereumE2E.t.sol | 12 +- .../pools/GHO/GHOTokenPoolEthereumSetup.t.sol | 11 +- .../pools/GHO/GHOTokenPoolRemoteE2E.t.sol | 12 +- .../pools/GHO/GHOTokenPoolRemoteSetup.t.sol | 9 +- .../GHO/invariant/GHOTokenPoolHandler.t.sol | 203 +---- 9 files changed, 784 insertions(+), 879 deletions(-) create mode 100644 contracts/src/v0.8/ccip/test/pools/GHO/GHOBaseTest.t.sol delete mode 100644 contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumBridgeLimitSetup.t.sol diff --git a/contracts/src/v0.8/ccip/test/BaseTest.t.sol b/contracts/src/v0.8/ccip/test/BaseTest.t.sol index f645a6e612..0dd265092b 100644 --- a/contracts/src/v0.8/ccip/test/BaseTest.t.sol +++ b/contracts/src/v0.8/ccip/test/BaseTest.t.sol @@ -8,10 +8,6 @@ import {StdUtils} from "forge-std/StdUtils.sol"; import {MockARM} from "./mocks/MockARM.sol"; import {StructFactory} from "./StructFactory.sol"; -import {TransparentUpgradeableProxy} from "solidity-utils/contracts/transparent-proxy/TransparentUpgradeableProxy.sol"; -import {UpgradeableLockReleaseTokenPool} from "../pools/GHO/UpgradeableLockReleaseTokenPool.sol"; -import {UpgradeableBurnMintTokenPool} from "../pools/GHO/UpgradeableBurnMintTokenPool.sol"; -import {IBurnMintERC20} from "../../shared/token/ERC20/IBurnMintERC20.sol"; contract BaseTest is Test, StructFactory { bool private s_baseTestInitialized; @@ -35,90 +31,5 @@ contract BaseTest is Test, StructFactory { s_mockARM = new MockARM(); } - function _deployUpgradeableBurnMintTokenPool( - address ghoToken, - address arm, - address router, - address owner, - address proxyAdmin - ) internal returns (address) { - // Deploy BurnMintTokenPool for GHO token on source chain - UpgradeableBurnMintTokenPool tokenPoolImpl = new UpgradeableBurnMintTokenPool(ghoToken, arm, false); - // Imple init - address[] memory emptyArray = new address[](0); - tokenPoolImpl.initialize(owner, emptyArray, router); - // proxy deploy and init - bytes memory tokenPoolInitParams = abi.encodeWithSignature( - "initialize(address,address[],address)", - owner, - emptyArray, - router - ); - TransparentUpgradeableProxy tokenPoolProxy = new TransparentUpgradeableProxy( - address(tokenPoolImpl), - proxyAdmin, - tokenPoolInitParams - ); - // Manage ownership - vm.stopPrank(); - vm.prank(owner); - UpgradeableBurnMintTokenPool(address(tokenPoolProxy)).acceptOwnership(); - vm.startPrank(OWNER); - - return address(tokenPoolProxy); - } - - function _deployUpgradeableLockReleaseTokenPool( - address ghoToken, - address arm, - address router, - address owner, - uint256 bridgeLimit, - address proxyAdmin - ) internal returns (address) { - UpgradeableLockReleaseTokenPool tokenPoolImpl = new UpgradeableLockReleaseTokenPool(ghoToken, arm, false, true); - // Imple init - address[] memory emptyArray = new address[](0); - tokenPoolImpl.initialize(owner, emptyArray, router, bridgeLimit); - // proxy deploy and init - bytes memory tokenPoolInitParams = abi.encodeWithSignature( - "initialize(address,address[],address,uint256)", - owner, - emptyArray, - router, - bridgeLimit - ); - TransparentUpgradeableProxy tokenPoolProxy = new TransparentUpgradeableProxy( - address(tokenPoolImpl), - proxyAdmin, - tokenPoolInitParams - ); - - // Manage ownership - vm.stopPrank(); - vm.prank(owner); - UpgradeableLockReleaseTokenPool(address(tokenPoolProxy)).acceptOwnership(); - vm.startPrank(OWNER); - - return address(tokenPoolProxy); - } - - function _inflateFacilitatorLevel(address tokenPool, address ghoToken, uint256 amount) internal { - vm.stopPrank(); - vm.prank(tokenPool); - IBurnMintERC20(ghoToken).mint(address(0), amount); - } - - function _getProxyAdminAddress(address proxy) internal view returns (address) { - bytes32 ERC1967_ADMIN_SLOT = 0xb53127684a568b3173ae13b9f8a6016e243e63b6e8ee1178d6a717850b5d6103; - bytes32 adminSlot = vm.load(proxy, ERC1967_ADMIN_SLOT); - return address(uint160(uint256(adminSlot))); - } - - function _getProxyImplementationAddress(address proxy) internal view returns (address) { - bytes32 ERC1967_IMPLEMENTATION_SLOT = 0x360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc; - bytes32 implSlot = vm.load(proxy, ERC1967_IMPLEMENTATION_SLOT); - return address(uint160(uint256(implSlot))); - } } diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/GHOBaseTest.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/GHOBaseTest.t.sol new file mode 100644 index 0000000000..8893095015 --- /dev/null +++ b/contracts/src/v0.8/ccip/test/pools/GHO/GHOBaseTest.t.sol @@ -0,0 +1,281 @@ +// SPDX-License-Identifier: BUSL-1.1 +pragma solidity 0.8.19; + +import {GhoToken} from "@aave/gho-core/gho/GhoToken.sol"; +import {TransparentUpgradeableProxy} from "solidity-utils/contracts/transparent-proxy/TransparentUpgradeableProxy.sol"; + +import {IBurnMintERC20} from "../../../../shared/token/ERC20/IBurnMintERC20.sol"; +import {IPool} from "../../../interfaces/pools/IPool.sol"; +import {UpgradeableLockReleaseTokenPool} from "../../../pools/GHO/UpgradeableLockReleaseTokenPool.sol"; +import {UpgradeableBurnMintTokenPool} from "../../../pools/GHO/UpgradeableBurnMintTokenPool.sol"; +import {UpgradeableTokenPool} from "../../../pools/GHO/UpgradeableTokenPool.sol"; +import {RateLimiter} from "../../../libraries/RateLimiter.sol"; +import {BaseTest} from "../../BaseTest.t.sol"; + +abstract contract GHOBaseTest is BaseTest { + address internal ARM_PROXY = makeAddr("ARM_PROXY"); + address internal ROUTER = makeAddr("ROUTER"); + address internal RAMP = makeAddr("RAMP"); + address internal AAVE_DAO = makeAddr("AAVE_DAO"); + address internal PROXY_ADMIN = makeAddr("PROXY_ADMIN"); + address internal USER = makeAddr("USER"); + + uint256 public immutable INITIAL_BRIDGE_LIMIT = 100e6 * 1e18; + + struct UtilsStorage { + uint256[] chainsList; + mapping(uint256 => address) pools; // chainId => bridgeTokenPool + mapping(uint256 => address) tokens; // chainId => ghoToken + mapping(uint256 => uint256) bucketCapacities; // chainId => bucketCapacities + mapping(uint256 => uint256) bucketLevels; // chainId => bucketLevels + mapping(uint256 => uint256) liquidity; // chainId => liquidity + uint256 remoteLiquidity; + uint256 bridged; + bool capacityBelowLevelUpdate; + } + + function _deployUpgradeableBurnMintTokenPool( + address ghoToken, + address arm, + address router, + address owner, + address proxyAdmin + ) internal returns (address) { + // Deploy BurnMintTokenPool for GHO token on source chain + UpgradeableBurnMintTokenPool tokenPoolImpl = new UpgradeableBurnMintTokenPool(ghoToken, arm, false); + // Imple init + address[] memory emptyArray = new address[](0); + tokenPoolImpl.initialize(owner, emptyArray, router); + // proxy deploy and init + bytes memory tokenPoolInitParams = abi.encodeWithSignature( + "initialize(address,address[],address)", + owner, + emptyArray, + router + ); + TransparentUpgradeableProxy tokenPoolProxy = new TransparentUpgradeableProxy( + address(tokenPoolImpl), + proxyAdmin, + tokenPoolInitParams + ); + // Manage ownership + vm.stopPrank(); + vm.prank(owner); + UpgradeableBurnMintTokenPool(address(tokenPoolProxy)).acceptOwnership(); + vm.startPrank(OWNER); + + return address(tokenPoolProxy); + } + + function _deployUpgradeableLockReleaseTokenPool( + address ghoToken, + address arm, + address router, + address owner, + uint256 bridgeLimit, + address proxyAdmin + ) internal returns (address) { + UpgradeableLockReleaseTokenPool tokenPoolImpl = new UpgradeableLockReleaseTokenPool(ghoToken, arm, false, true); + // Imple init + address[] memory emptyArray = new address[](0); + tokenPoolImpl.initialize(owner, emptyArray, router, bridgeLimit); + // proxy deploy and init + bytes memory tokenPoolInitParams = abi.encodeWithSignature( + "initialize(address,address[],address,uint256)", + owner, + emptyArray, + router, + bridgeLimit + ); + TransparentUpgradeableProxy tokenPoolProxy = new TransparentUpgradeableProxy( + address(tokenPoolImpl), + proxyAdmin, + tokenPoolInitParams + ); + + // Manage ownership + vm.stopPrank(); + vm.prank(owner); + UpgradeableLockReleaseTokenPool(address(tokenPoolProxy)).acceptOwnership(); + vm.startPrank(OWNER); + + return address(tokenPoolProxy); + } + + function _inflateFacilitatorLevel(address tokenPool, address ghoToken, uint256 amount) internal { + vm.stopPrank(); + vm.prank(tokenPool); + IBurnMintERC20(ghoToken).mint(address(0), amount); + } + + function _getProxyAdminAddress(address proxy) internal view returns (address) { + bytes32 ERC1967_ADMIN_SLOT = 0xb53127684a568b3173ae13b9f8a6016e243e63b6e8ee1178d6a717850b5d6103; + bytes32 adminSlot = vm.load(proxy, ERC1967_ADMIN_SLOT); + return address(uint160(uint256(adminSlot))); + } + + function _getProxyImplementationAddress(address proxy) internal view returns (address) { + bytes32 ERC1967_IMPLEMENTATION_SLOT = 0x360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc; + bytes32 implSlot = vm.load(proxy, ERC1967_IMPLEMENTATION_SLOT); + return address(uint160(uint256(implSlot))); + } + + function _enableLane(UtilsStorage storage s, uint256 fromId, uint256 toId) internal { + // from + UpgradeableTokenPool.ChainUpdate[] memory chainUpdate = new UpgradeableTokenPool.ChainUpdate[](1); + RateLimiter.Config memory emptyRateConfig = RateLimiter.Config(false, 0, 0); + chainUpdate[0] = UpgradeableTokenPool.ChainUpdate({ + remoteChainSelector: uint64(toId), + allowed: true, + outboundRateLimiterConfig: emptyRateConfig, + inboundRateLimiterConfig: emptyRateConfig + }); + + vm.startPrank(OWNER); + UpgradeableTokenPool(s.pools[fromId]).applyChainUpdates(chainUpdate); + + // to + chainUpdate[0].remoteChainSelector = uint64(fromId); + UpgradeableTokenPool(s.pools[toId]).applyChainUpdates(chainUpdate); + vm.stopPrank(); + } + + function _addBridge(UtilsStorage storage s, uint256 chainId, uint256 bucketCapacity) internal { + require(s.tokens[chainId] == address(0), "BRIDGE_ALREADY_EXISTS"); + + s.chainsList.push(chainId); + + // GHO Token + GhoToken ghoToken = new GhoToken(AAVE_DAO); + s.tokens[chainId] = address(ghoToken); + + // UpgradeableTokenPool + address bridgeTokenPool = _deployUpgradeableBurnMintTokenPool( + address(ghoToken), + ARM_PROXY, + ROUTER, + OWNER, + PROXY_ADMIN + ); + s.pools[chainId] = bridgeTokenPool; + + // Facilitator + s.bucketCapacities[chainId] = bucketCapacity; + vm.stopPrank(); + vm.startPrank(AAVE_DAO); + ghoToken.grantRole(ghoToken.FACILITATOR_MANAGER_ROLE(), AAVE_DAO); + ghoToken.addFacilitator(bridgeTokenPool, "UpgradeableTokenPool", uint128(bucketCapacity)); + vm.stopPrank(); + } + + function _updateBridgeLimit(UtilsStorage storage s, uint256 newBridgeLimit) internal { + vm.stopPrank(); + vm.startPrank(OWNER); + UpgradeableLockReleaseTokenPool(s.pools[0]).setBridgeLimit(newBridgeLimit); + vm.stopPrank(); + } + + function _updateBucketCapacity(UtilsStorage storage s, uint256 chainId, uint256 newBucketCapacity) internal { + s.bucketCapacities[chainId] = newBucketCapacity; + vm.stopPrank(); + vm.startPrank(AAVE_DAO); + GhoToken(s.tokens[chainId]).grantRole(GhoToken(s.tokens[chainId]).BUCKET_MANAGER_ROLE(), AAVE_DAO); + GhoToken(s.tokens[chainId]).setFacilitatorBucketCapacity(s.pools[chainId], uint128(newBucketCapacity)); + vm.stopPrank(); + } + + function _getCapacity(UtilsStorage storage s, uint256 chain) internal view returns (uint256) { + require(!_isEthereumChain(chain), "No bucket on Ethereum"); + (uint256 capacity, ) = GhoToken(s.tokens[chain]).getFacilitatorBucket(s.pools[chain]); + return capacity; + } + + function _getLevel(UtilsStorage storage s, uint256 chain) internal view returns (uint256) { + require(!_isEthereumChain(chain), "No bucket on Ethereum"); + (, uint256 level) = GhoToken(s.tokens[chain]).getFacilitatorBucket(s.pools[chain]); + return level; + } + + function _getMaxToBridgeOut(UtilsStorage storage s, uint256 fromChain) internal view returns (uint256) { + if (_isEthereumChain(fromChain)) { + UpgradeableLockReleaseTokenPool ethTokenPool = UpgradeableLockReleaseTokenPool(s.pools[0]); + uint256 bridgeLimit = ethTokenPool.getBridgeLimit(); + uint256 currentBridged = ethTokenPool.getCurrentBridgedAmount(); + return currentBridged > bridgeLimit ? 0 : bridgeLimit - currentBridged; + } else { + (, uint256 level) = GhoToken(s.tokens[fromChain]).getFacilitatorBucket(s.pools[fromChain]); + return level; + } + } + + function _getMaxToBridgeIn(UtilsStorage storage s, uint256 toChain) internal view returns (uint256) { + if (_isEthereumChain(toChain)) { + UpgradeableLockReleaseTokenPool ethTokenPool = UpgradeableLockReleaseTokenPool(s.pools[0]); + return ethTokenPool.getCurrentBridgedAmount(); + } else { + (uint256 capacity, uint256 level) = GhoToken(s.tokens[toChain]).getFacilitatorBucket(s.pools[toChain]); + return level > capacity ? 0 : capacity - level; + } + } + + function _bridgeGho( + UtilsStorage storage s, + uint256 fromChain, + uint256 toChain, + address user, + uint256 amount + ) internal { + _moveGhoOrigin(s, fromChain, toChain, user, amount); + _moveGhoDestination(s, fromChain, toChain, user, amount); + } + + function _moveGhoOrigin( + UtilsStorage storage s, + uint256 fromChain, + uint256 toChain, + address user, + uint256 amount + ) internal { + // Simulate CCIP pull of funds + vm.startPrank(user); + GhoToken(s.tokens[fromChain]).transfer(s.pools[fromChain], amount); + + vm.startPrank(RAMP); + IPool(s.pools[fromChain]).lockOrBurn(user, bytes(""), amount, uint64(toChain), bytes("")); + + if (_isEthereumChain(fromChain)) { + // Lock + s.bridged += amount; + } else { + // Burn + s.bucketLevels[fromChain] -= amount; + s.liquidity[fromChain] -= amount; + s.remoteLiquidity -= amount; + } + } + + function _moveGhoDestination( + UtilsStorage storage s, + uint256 fromChain, + uint256 toChain, + address user, + uint256 amount + ) internal { + vm.startPrank(RAMP); + IPool(s.pools[toChain]).releaseOrMint(bytes(""), user, amount, uint64(fromChain), bytes("")); + + if (_isEthereumChain(toChain)) { + // Release + s.bridged -= amount; + } else { + // Mint + s.bucketLevels[toChain] += amount; + s.liquidity[toChain] += amount; + s.remoteLiquidity += amount; + } + } + + function _isEthereumChain(uint256 chainId) internal pure returns (bool) { + return chainId == 0; + } +} diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumBridgeLimit.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumBridgeLimit.t.sol index ee1fa8df49..7ed07ad79c 100644 --- a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumBridgeLimit.t.sol +++ b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumBridgeLimit.t.sol @@ -3,43 +3,107 @@ pragma solidity 0.8.19; import {GhoToken} from "@aave/gho-core/gho/GhoToken.sol"; +import {BaseTest} from "../../BaseTest.t.sol"; import {IPool} from "../../../interfaces/pools/IPool.sol"; -import {GHOTokenPoolEthereumBridgeLimitSetup} from "./GHOTokenPoolEthereumBridgeLimitSetup.t.sol"; +import {UpgradeableLockReleaseTokenPool} from "../../../pools/GHO/UpgradeableLockReleaseTokenPool.sol"; +import {UpgradeableBurnMintTokenPool} from "../../../pools/GHO/UpgradeableBurnMintTokenPool.sol"; +import {UpgradeableTokenPool} from "../../../pools/GHO/UpgradeableTokenPool.sol"; +import {RateLimiter} from "../../../libraries/RateLimiter.sol"; +import {GHOBaseTest} from "./GHOBaseTest.t.sol"; + +contract GHOTokenPoolEthereumBridgeLimitSetup is GHOBaseTest { + UtilsStorage public s; + + function setUp() public virtual override { + // Ethereum with id 0 + s.chainsList.push(0); + s.tokens[0] = address(new GhoToken(AAVE_DAO)); + s.pools[0] = _deployUpgradeableLockReleaseTokenPool( + s.tokens[0], + ARM_PROXY, + ROUTER, + OWNER, + INITIAL_BRIDGE_LIMIT, + PROXY_ADMIN + ); + + // Mock calls for bridging + vm.mockCall(ROUTER, abi.encodeWithSelector(bytes4(keccak256("getOnRamp(uint64)"))), abi.encode(RAMP)); + vm.mockCall(ROUTER, abi.encodeWithSelector(bytes4(keccak256("isOffRamp(uint64,address)"))), abi.encode(true)); + vm.mockCall(ARM_PROXY, abi.encodeWithSelector(bytes4(keccak256("isCursed()"))), abi.encode(false)); + } + + function _assertInvariant() internal { + // Check bridged + assertEq(UpgradeableLockReleaseTokenPool(s.pools[0]).getCurrentBridgedAmount(), s.bridged); + + // Check levels and buckets + uint256 sumLevels; + uint256 chainId; + uint256 capacity; + uint256 level; + for (uint i = 1; i < s.chainsList.length; i++) { + // not counting Ethereum -{0} + chainId = s.chainsList[i]; + (capacity, level) = GhoToken(s.tokens[chainId]).getFacilitatorBucket(s.pools[chainId]); + + // Aggregate levels + sumLevels += level; + + assertEq(capacity, s.bucketCapacities[chainId], "wrong bucket capacity"); + assertEq(level, s.bucketLevels[chainId], "wrong bucket level"); + + assertEq( + capacity, + UpgradeableLockReleaseTokenPool(s.pools[0]).getBridgeLimit(), + "capacity must be equal to bridgeLimit" + ); + assertLe( + level, + UpgradeableLockReleaseTokenPool(s.pools[0]).getBridgeLimit(), + "level cannot be higher than bridgeLimit" + ); + } + // Check bridged is equal to sum of levels + assertEq(UpgradeableLockReleaseTokenPool(s.pools[0]).getCurrentBridgedAmount(), sumLevels, "wrong bridged"); + assertEq(s.remoteLiquidity, sumLevels, "wrong bridged"); + } +} contract GHOTokenPoolEthereumBridgeLimitSimpleScenario is GHOTokenPoolEthereumBridgeLimitSetup { function setUp() public virtual override { super.setUp(); // Arbitrum - _addBridge(1, INITIAL_BRIDGE_LIMIT); - _enableLane(0, 1); + _addBridge(s, 1, INITIAL_BRIDGE_LIMIT); + _enableLane(s, 0, 1); } function testFuzz_Bridge(uint256 amount) public { - uint256 maxAmount = _getMaxToBridgeOut(0); + uint256 maxAmount = _getMaxToBridgeOut(s, 0); amount = bound(amount, 1, maxAmount); _assertInvariant(); - assertEq(_getMaxToBridgeOut(0), maxAmount); - assertEq(_getMaxToBridgeIn(0), 0); - assertEq(_getMaxToBridgeOut(1), 0); - assertEq(_getMaxToBridgeIn(1), bucketCapacities[1]); + assertEq(_getMaxToBridgeOut(s, 0), maxAmount); + assertEq(_getMaxToBridgeIn(s, 0), 0); + assertEq(_getMaxToBridgeOut(s, 1), 0); + assertEq(_getMaxToBridgeIn(s, 1), s.bucketCapacities[1]); - deal(tokens[0], USER, amount); - _moveGhoOrigin(0, 1, USER, amount); + deal(s.tokens[0], USER, amount); + _moveGhoOrigin(s, 0, 1, USER, amount); - assertEq(_getMaxToBridgeOut(0), maxAmount - amount); - assertEq(_getMaxToBridgeIn(0), amount); - assertEq(_getMaxToBridgeOut(1), 0); - assertEq(_getMaxToBridgeIn(1), bucketCapacities[1]); + assertEq(_getMaxToBridgeOut(s, 0), maxAmount - amount); + assertEq(_getMaxToBridgeIn(s, 0), amount); + assertEq(_getMaxToBridgeOut(s, 1), 0); + assertEq(_getMaxToBridgeIn(s, 1), s.bucketCapacities[1]); - _moveGhoDestination(0, 1, USER, amount); + _moveGhoDestination(s, 0, 1, USER, amount); - assertEq(_getMaxToBridgeOut(0), maxAmount - amount); - assertEq(_getMaxToBridgeIn(0), amount); - assertEq(_getMaxToBridgeOut(1), bucketLevels[1]); - assertEq(_getMaxToBridgeIn(1), bucketCapacities[1] - bucketLevels[1]); + assertEq(_getMaxToBridgeOut(s, 0), maxAmount - amount); + assertEq(_getMaxToBridgeIn(s, 0), amount); + assertEq(_getMaxToBridgeOut(s, 1), s.bucketLevels[1]); + assertEq(_getMaxToBridgeIn(s, 1), s.bucketCapacities[1] - s.bucketLevels[1]); _assertInvariant(); } @@ -47,56 +111,56 @@ contract GHOTokenPoolEthereumBridgeLimitSimpleScenario is GHOTokenPoolEthereumBr function testBridgeAll() public { _assertInvariant(); - uint256 maxAmount = _getMaxToBridgeOut(0); - assertEq(_getMaxToBridgeIn(0), 0); - assertEq(_getMaxToBridgeOut(1), 0); - assertEq(_getMaxToBridgeIn(1), bucketCapacities[1]); + uint256 maxAmount = _getMaxToBridgeOut(s, 0); + assertEq(_getMaxToBridgeIn(s, 0), 0); + assertEq(_getMaxToBridgeOut(s, 1), 0); + assertEq(_getMaxToBridgeIn(s, 1), s.bucketCapacities[1]); - deal(tokens[0], USER, maxAmount); - _moveGhoOrigin(0, 1, USER, maxAmount); + deal(s.tokens[0], USER, maxAmount); + _moveGhoOrigin(s, 0, 1, USER, maxAmount); - assertEq(_getMaxToBridgeOut(0), 0); - assertEq(_getMaxToBridgeIn(0), maxAmount); - assertEq(_getMaxToBridgeOut(1), 0); - assertEq(_getMaxToBridgeIn(1), bucketCapacities[1]); + assertEq(_getMaxToBridgeOut(s, 0), 0); + assertEq(_getMaxToBridgeIn(s, 0), maxAmount); + assertEq(_getMaxToBridgeOut(s, 1), 0); + assertEq(_getMaxToBridgeIn(s, 1), s.bucketCapacities[1]); - _moveGhoDestination(0, 1, USER, maxAmount); + _moveGhoDestination(s, 0, 1, USER, maxAmount); - assertEq(_getMaxToBridgeOut(0), 0); - assertEq(_getMaxToBridgeIn(0), maxAmount); - assertEq(_getMaxToBridgeOut(1), bucketCapacities[1]); - assertEq(_getMaxToBridgeIn(1), 0); + assertEq(_getMaxToBridgeOut(s, 0), 0); + assertEq(_getMaxToBridgeIn(s, 0), maxAmount); + assertEq(_getMaxToBridgeOut(s, 1), s.bucketCapacities[1]); + assertEq(_getMaxToBridgeIn(s, 1), 0); _assertInvariant(); } /// @dev Bridge out two times function testFuzz_BridgeTwoSteps(uint256 amount1, uint256 amount2) public { - uint256 maxAmount = _getMaxToBridgeOut(0); + uint256 maxAmount = _getMaxToBridgeOut(s, 0); amount1 = bound(amount1, 1, maxAmount); amount2 = bound(amount2, 1, maxAmount); _assertInvariant(); - assertEq(_getMaxToBridgeOut(0), maxAmount); - assertEq(_getMaxToBridgeIn(0), 0); - assertEq(_getMaxToBridgeOut(1), 0); - assertEq(_getMaxToBridgeIn(1), bucketCapacities[1]); + assertEq(_getMaxToBridgeOut(s, 0), maxAmount); + assertEq(_getMaxToBridgeIn(s, 0), 0); + assertEq(_getMaxToBridgeOut(s, 1), 0); + assertEq(_getMaxToBridgeIn(s, 1), s.bucketCapacities[1]); - deal(tokens[0], USER, amount1); - _moveGhoOrigin(0, 1, USER, amount1); + deal(s.tokens[0], USER, amount1); + _moveGhoOrigin(s, 0, 1, USER, amount1); - assertEq(_getMaxToBridgeOut(0), maxAmount - amount1); - assertEq(_getMaxToBridgeIn(0), amount1); - assertEq(_getMaxToBridgeOut(1), 0); - assertEq(_getMaxToBridgeIn(1), bucketCapacities[1]); + assertEq(_getMaxToBridgeOut(s, 0), maxAmount - amount1); + assertEq(_getMaxToBridgeIn(s, 0), amount1); + assertEq(_getMaxToBridgeOut(s, 1), 0); + assertEq(_getMaxToBridgeIn(s, 1), s.bucketCapacities[1]); - _moveGhoDestination(0, 1, USER, amount1); + _moveGhoDestination(s, 0, 1, USER, amount1); - assertEq(_getMaxToBridgeOut(0), maxAmount - amount1); - assertEq(_getMaxToBridgeIn(0), amount1); - assertEq(_getMaxToBridgeOut(1), bucketLevels[1]); - assertEq(_getMaxToBridgeIn(1), bucketCapacities[1] - bucketLevels[1]); + assertEq(_getMaxToBridgeOut(s, 0), maxAmount - amount1); + assertEq(_getMaxToBridgeIn(s, 0), amount1); + assertEq(_getMaxToBridgeOut(s, 1), s.bucketLevels[1]); + assertEq(_getMaxToBridgeIn(s, 1), s.bucketCapacities[1] - s.bucketLevels[1]); _assertInvariant(); @@ -104,7 +168,7 @@ contract GHOTokenPoolEthereumBridgeLimitSimpleScenario is GHOTokenPoolEthereumBr if (amount1 + amount2 > maxAmount) { vm.expectRevert(); vm.prank(RAMP); - IPool(pools[0]).lockOrBurn(USER, bytes(""), amount2, uint64(1), bytes("")); + IPool(s.pools[0]).lockOrBurn(USER, bytes(""), amount2, uint64(1), bytes("")); amount2 = maxAmount - amount1; } @@ -113,20 +177,20 @@ contract GHOTokenPoolEthereumBridgeLimitSimpleScenario is GHOTokenPoolEthereumBr _assertInvariant(); uint256 acc = amount1 + amount2; - deal(tokens[0], USER, amount2); - _moveGhoOrigin(0, 1, USER, amount2); + deal(s.tokens[0], USER, amount2); + _moveGhoOrigin(s, 0, 1, USER, amount2); - assertEq(_getMaxToBridgeOut(0), maxAmount - acc); - assertEq(_getMaxToBridgeIn(0), acc); - assertEq(_getMaxToBridgeOut(1), amount1); - assertEq(_getMaxToBridgeIn(1), bucketCapacities[1] - amount1); + assertEq(_getMaxToBridgeOut(s, 0), maxAmount - acc); + assertEq(_getMaxToBridgeIn(s, 0), acc); + assertEq(_getMaxToBridgeOut(s, 1), amount1); + assertEq(_getMaxToBridgeIn(s, 1), s.bucketCapacities[1] - amount1); - _moveGhoDestination(0, 1, USER, amount2); + _moveGhoDestination(s, 0, 1, USER, amount2); - assertEq(_getMaxToBridgeOut(0), maxAmount - acc); - assertEq(_getMaxToBridgeIn(0), acc); - assertEq(_getMaxToBridgeOut(1), acc); - assertEq(_getMaxToBridgeIn(1), bucketCapacities[1] - acc); + assertEq(_getMaxToBridgeOut(s, 0), maxAmount - acc); + assertEq(_getMaxToBridgeIn(s, 0), acc); + assertEq(_getMaxToBridgeOut(s, 1), acc); + assertEq(_getMaxToBridgeIn(s, 1), s.bucketCapacities[1] - acc); _assertInvariant(); } @@ -134,31 +198,31 @@ contract GHOTokenPoolEthereumBridgeLimitSimpleScenario is GHOTokenPoolEthereumBr /// @dev Bridge some tokens out and later, bridge them back in function testFuzz_BridgeBackAndForth(uint256 amountOut, uint256 amountIn) public { - uint256 maxAmount = _getMaxToBridgeOut(0); + uint256 maxAmount = _getMaxToBridgeOut(s, 0); amountOut = bound(amountOut, 1, maxAmount); - amountIn = bound(amountIn, 1, _getCapacity(1)); + amountIn = bound(amountIn, 1, _getCapacity(s, 1)); _assertInvariant(); - assertEq(_getMaxToBridgeOut(0), maxAmount); - assertEq(_getMaxToBridgeIn(0), 0); - assertEq(_getMaxToBridgeOut(1), 0); - assertEq(_getMaxToBridgeIn(1), bucketCapacities[1]); + assertEq(_getMaxToBridgeOut(s, 0), maxAmount); + assertEq(_getMaxToBridgeIn(s, 0), 0); + assertEq(_getMaxToBridgeOut(s, 1), 0); + assertEq(_getMaxToBridgeIn(s, 1), s.bucketCapacities[1]); - deal(tokens[0], USER, amountOut); - _moveGhoOrigin(0, 1, USER, amountOut); + deal(s.tokens[0], USER, amountOut); + _moveGhoOrigin(s, 0, 1, USER, amountOut); - assertEq(_getMaxToBridgeOut(0), maxAmount - amountOut); - assertEq(_getMaxToBridgeIn(0), amountOut); - assertEq(_getMaxToBridgeOut(1), 0); - assertEq(_getMaxToBridgeIn(1), bucketCapacities[1]); + assertEq(_getMaxToBridgeOut(s, 0), maxAmount - amountOut); + assertEq(_getMaxToBridgeIn(s, 0), amountOut); + assertEq(_getMaxToBridgeOut(s, 1), 0); + assertEq(_getMaxToBridgeIn(s, 1), s.bucketCapacities[1]); - _moveGhoDestination(0, 1, USER, amountOut); + _moveGhoDestination(s, 0, 1, USER, amountOut); - assertEq(_getMaxToBridgeOut(0), maxAmount - amountOut); - assertEq(_getMaxToBridgeIn(0), amountOut); - assertEq(_getMaxToBridgeOut(1), bucketLevels[1]); - assertEq(_getMaxToBridgeIn(1), bucketCapacities[1] - bucketLevels[1]); + assertEq(_getMaxToBridgeOut(s, 0), maxAmount - amountOut); + assertEq(_getMaxToBridgeIn(s, 0), amountOut); + assertEq(_getMaxToBridgeOut(s, 1), s.bucketLevels[1]); + assertEq(_getMaxToBridgeIn(s, 1), s.bucketCapacities[1] - s.bucketLevels[1]); _assertInvariant(); @@ -167,7 +231,7 @@ contract GHOTokenPoolEthereumBridgeLimitSimpleScenario is GHOTokenPoolEthereumBr // Simulate revert on destination vm.expectRevert(); vm.prank(RAMP); - IPool(pools[0]).releaseOrMint(bytes(""), USER, amountIn, uint64(1), bytes("")); + IPool(s.pools[0]).releaseOrMint(bytes(""), USER, amountIn, uint64(1), bytes("")); amountIn = amountOut; } @@ -176,20 +240,20 @@ contract GHOTokenPoolEthereumBridgeLimitSimpleScenario is GHOTokenPoolEthereumBr _assertInvariant(); uint256 acc = amountOut - amountIn; - deal(tokens[1], USER, amountIn); - _moveGhoOrigin(1, 0, USER, amountIn); + deal(s.tokens[1], USER, amountIn); + _moveGhoOrigin(s, 1, 0, USER, amountIn); - assertEq(_getMaxToBridgeOut(0), maxAmount - amountOut); - assertEq(_getMaxToBridgeIn(0), amountOut); - assertEq(_getMaxToBridgeOut(1), acc); - assertEq(_getMaxToBridgeIn(1), bucketCapacities[1] - acc); + assertEq(_getMaxToBridgeOut(s, 0), maxAmount - amountOut); + assertEq(_getMaxToBridgeIn(s, 0), amountOut); + assertEq(_getMaxToBridgeOut(s, 1), acc); + assertEq(_getMaxToBridgeIn(s, 1), s.bucketCapacities[1] - acc); - _moveGhoDestination(1, 0, USER, amountIn); + _moveGhoDestination(s, 1, 0, USER, amountIn); - assertEq(_getMaxToBridgeOut(0), maxAmount - acc); - assertEq(_getMaxToBridgeIn(0), acc); - assertEq(_getMaxToBridgeOut(1), acc); - assertEq(_getMaxToBridgeIn(1), maxAmount - acc); + assertEq(_getMaxToBridgeOut(s, 0), maxAmount - acc); + assertEq(_getMaxToBridgeIn(s, 0), acc); + assertEq(_getMaxToBridgeOut(s, 1), acc); + assertEq(_getMaxToBridgeIn(s, 1), maxAmount - acc); _assertInvariant(); } @@ -202,31 +266,31 @@ contract GHOTokenPoolEthereumBridgeLimitSimpleScenario is GHOTokenPoolEthereumBr // Inflate bridgeAmount if (bridgeAmount > 0) { - deal(tokens[0], USER, bridgeAmount); - _bridgeGho(0, 1, USER, bridgeAmount); + deal(s.tokens[0], USER, bridgeAmount); + _bridgeGho(s, 0, 1, USER, bridgeAmount); } - deal(tokens[0], USER, amount); + deal(s.tokens[0], USER, amount); // Simulate CCIP pull of funds - vm.prank(USER); - GhoToken(tokens[0]).transfer(pools[0], amount); + vm.startPrank(USER); + GhoToken(s.tokens[0]).transfer(s.pools[0], amount); if (bridgeAmount + amount > INITIAL_BRIDGE_LIMIT) { vm.expectRevert(); } - vm.prank(RAMP); - IPool(pools[0]).lockOrBurn(USER, bytes(""), amount, uint64(1), bytes("")); + vm.startPrank(RAMP); + IPool(s.pools[0]).lockOrBurn(USER, bytes(""), amount, uint64(1), bytes("")); } /// @dev Bridge from Ethereum to Arbitrum reverts if amount is higher than capacity available function testFuzz_BridgeCapacityExceededDestinationReverts(uint256 amount, uint256 level) public { - (uint256 capacity, ) = GhoToken(tokens[1]).getFacilitatorBucket(pools[1]); + (uint256 capacity, ) = GhoToken(s.tokens[1]).getFacilitatorBucket(s.pools[1]); vm.assume(level < capacity); amount = bound(amount, 1, type(uint128).max); // Inflate level if (level > 0) { - _inflateFacilitatorLevel(pools[1], tokens[1], level); + _inflateFacilitatorLevel(s.pools[1], s.tokens[1], level); } // Skip origin move @@ -236,46 +300,46 @@ contract GHOTokenPoolEthereumBridgeLimitSimpleScenario is GHOTokenPoolEthereumBr vm.expectRevert(); } vm.prank(RAMP); - IPool(pools[1]).releaseOrMint(bytes(""), USER, amount, uint64(0), bytes("")); + IPool(s.pools[1]).releaseOrMint(bytes(""), USER, amount, uint64(0), bytes("")); } /// @dev Bridge from Arbitrum To Ethereum reverts if Arbitrum level is lower than amount function testFuzz_BridgeBackZeroLevelSourceReverts(uint256 amount, uint256 level) public { - (uint256 capacity, ) = GhoToken(tokens[1]).getFacilitatorBucket(pools[1]); + (uint256 capacity, ) = GhoToken(s.tokens[1]).getFacilitatorBucket(s.pools[1]); vm.assume(level < capacity); amount = bound(amount, 1, capacity - level); // Inflate level if (level > 0) { - _inflateFacilitatorLevel(pools[1], tokens[1], level); + _inflateFacilitatorLevel(s.pools[1], s.tokens[1], level); } - deal(tokens[1], USER, amount); + deal(s.tokens[1], USER, amount); // Simulate CCIP pull of funds vm.prank(USER); - GhoToken(tokens[1]).transfer(pools[1], amount); + GhoToken(s.tokens[1]).transfer(s.pools[1], amount); if (amount > level) { vm.expectRevert(); } vm.prank(RAMP); - IPool(pools[1]).lockOrBurn(USER, bytes(""), amount, uint64(0), bytes("")); + IPool(s.pools[1]).lockOrBurn(USER, bytes(""), amount, uint64(0), bytes("")); } /// @dev Bridge from Arbitrum To Ethereum reverts if Ethereum current bridged amount is lower than amount function testFuzz_BridgeBackZeroBridgeLimitDestinationReverts(uint256 amount, uint256 bridgeAmount) public { - (uint256 capacity, ) = GhoToken(tokens[1]).getFacilitatorBucket(pools[1]); + (uint256 capacity, ) = GhoToken(s.tokens[1]).getFacilitatorBucket(s.pools[1]); amount = bound(amount, 1, capacity); bridgeAmount = bound(bridgeAmount, 0, capacity - amount); // Inflate bridgeAmount if (bridgeAmount > 0) { - deal(tokens[0], USER, bridgeAmount); - _bridgeGho(0, 1, USER, bridgeAmount); + deal(s.tokens[0], USER, bridgeAmount); + _bridgeGho(s, 0, 1, USER, bridgeAmount); } // Inflate level on Arbitrum - _inflateFacilitatorLevel(pools[1], tokens[1], amount); + _inflateFacilitatorLevel(s.pools[1], s.tokens[1], amount); // Skip origin move @@ -284,42 +348,42 @@ contract GHOTokenPoolEthereumBridgeLimitSimpleScenario is GHOTokenPoolEthereumBr vm.expectRevert(); } vm.prank(RAMP); - IPool(pools[0]).releaseOrMint(bytes(""), USER, amount, uint64(1), bytes("")); + IPool(s.pools[0]).releaseOrMint(bytes(""), USER, amount, uint64(1), bytes("")); } /// @dev Bucket capacity reduction. Caution: bridge limit reduction must happen first function testReduceBucketCapacity() public { // Max out capacity - uint256 maxAmount = _getMaxToBridgeOut(0); - deal(tokens[0], USER, maxAmount); - _bridgeGho(0, 1, USER, maxAmount); + uint256 maxAmount = _getMaxToBridgeOut(s, 0); + deal(s.tokens[0], USER, maxAmount); + _bridgeGho(s, 0, 1, USER, maxAmount); - assertEq(_getMaxToBridgeIn(1), 0); - assertEq(_getCapacity(1), maxAmount); - assertEq(_getLevel(1), maxAmount); + assertEq(_getMaxToBridgeIn(s, 1), 0); + assertEq(_getCapacity(s, 1), maxAmount); + assertEq(_getLevel(s, 1), maxAmount); _assertInvariant(); - uint256 newBucketCapacity = bucketCapacities[1] - 10; + uint256 newBucketCapacity = s.bucketCapacities[1] - 10; // 1. Reduce bridge limit - _updateBridgeLimit(newBucketCapacity); - assertEq(_getMaxToBridgeOut(0), 0); - assertEq(_getMaxToBridgeIn(1), 0); + _updateBridgeLimit(s, newBucketCapacity); + assertEq(_getMaxToBridgeOut(s, 0), 0); + assertEq(_getMaxToBridgeIn(s, 1), 0); // 2. Reduce bucket capacity - _updateBucketCapacity(1, newBucketCapacity); - assertEq(_getMaxToBridgeOut(0), 0); - assertEq(_getMaxToBridgeIn(1), 0); + _updateBucketCapacity(s, 1, newBucketCapacity); + assertEq(_getMaxToBridgeOut(s, 0), 0); + assertEq(_getMaxToBridgeIn(s, 1), 0); // Maximum to bridge in is all minted on Arbitrum - assertEq(_getMaxToBridgeIn(0), maxAmount); - assertEq(_getMaxToBridgeOut(1), maxAmount); + assertEq(_getMaxToBridgeIn(s, 0), maxAmount); + assertEq(_getMaxToBridgeOut(s, 1), maxAmount); - _bridgeGho(1, 0, USER, maxAmount); - assertEq(_getMaxToBridgeOut(0), newBucketCapacity); - assertEq(_getMaxToBridgeIn(0), 0); - assertEq(_getMaxToBridgeOut(1), 0); - assertEq(_getMaxToBridgeIn(1), newBucketCapacity); + _bridgeGho(s, 1, 0, USER, maxAmount); + assertEq(_getMaxToBridgeOut(s, 0), newBucketCapacity); + assertEq(_getMaxToBridgeIn(s, 0), 0); + assertEq(_getMaxToBridgeOut(s, 1), 0); + assertEq(_getMaxToBridgeIn(s, 1), newBucketCapacity); _assertInvariant(); } @@ -327,85 +391,85 @@ contract GHOTokenPoolEthereumBridgeLimitSimpleScenario is GHOTokenPoolEthereumBr /// @dev Bucket capacity reduction, performed following wrong order procedure function testReduceBucketCapacityIncorrectProcedure() public { // Bridge a third of the capacity - uint256 amount = _getMaxToBridgeOut(0) / 3; - uint256 availableToBridge = _getMaxToBridgeOut(0) - amount; + uint256 amount = _getMaxToBridgeOut(s, 0) / 3; + uint256 availableToBridge = _getMaxToBridgeOut(s, 0) - amount; - deal(tokens[0], USER, amount); - _bridgeGho(0, 1, USER, amount); + deal(s.tokens[0], USER, amount); + _bridgeGho(s, 0, 1, USER, amount); - assertEq(_getMaxToBridgeIn(1), bucketCapacities[1] - amount); - assertEq(_getLevel(1), amount); + assertEq(_getMaxToBridgeIn(s, 1), s.bucketCapacities[1] - amount); + assertEq(_getLevel(s, 1), amount); _assertInvariant(); - uint256 newBucketCapacity = bucketCapacities[1] - 10; + uint256 newBucketCapacity = s.bucketCapacities[1] - 10; /// @dev INCORRECT ORDER PROCEDURE!! bridge limit reduction should happen first // 1. Reduce bucket capacity - _updateBucketCapacity(1, newBucketCapacity); - assertEq(_getMaxToBridgeOut(0), availableToBridge); // this is the UX issue - assertEq(_getMaxToBridgeIn(1), availableToBridge - 10); + _updateBucketCapacity(s, 1, newBucketCapacity); + assertEq(_getMaxToBridgeOut(s, 0), availableToBridge); // this is the UX issue + assertEq(_getMaxToBridgeIn(s, 1), availableToBridge - 10); // User can come and try to max bridge on Arbitrum // Transaction will succeed on Ethereum, but revert on Arbitrum - deal(tokens[0], USER, availableToBridge); - _moveGhoOrigin(0, 1, USER, availableToBridge); - assertEq(_getMaxToBridgeOut(0), 0); + deal(s.tokens[0], USER, availableToBridge); + _moveGhoOrigin(s, 0, 1, USER, availableToBridge); + assertEq(_getMaxToBridgeOut(s, 0), 0); vm.expectRevert(); vm.prank(RAMP); - IPool(pools[1]).releaseOrMint(bytes(""), USER, availableToBridge, uint64(0), bytes("")); + IPool(s.pools[1]).releaseOrMint(bytes(""), USER, availableToBridge, uint64(0), bytes("")); // User can only bridge up to new bucket capacity (10 units less) - assertEq(_getMaxToBridgeIn(1), availableToBridge - 10); + assertEq(_getMaxToBridgeIn(s, 1), availableToBridge - 10); vm.prank(RAMP); - IPool(pools[1]).releaseOrMint(bytes(""), USER, availableToBridge - 10, uint64(0), bytes("")); - assertEq(_getMaxToBridgeIn(1), 0); + IPool(s.pools[1]).releaseOrMint(bytes(""), USER, availableToBridge - 10, uint64(0), bytes("")); + assertEq(_getMaxToBridgeIn(s, 1), 0); // 2. Reduce bridge limit - _updateBridgeLimit(newBucketCapacity); - assertEq(_getMaxToBridgeOut(0), 0); - assertEq(_getMaxToBridgeIn(1), 0); + _updateBridgeLimit(s, newBucketCapacity); + assertEq(_getMaxToBridgeOut(s, 0), 0); + assertEq(_getMaxToBridgeIn(s, 1), 0); } /// @dev Bucket capacity reduction, with a bridge out in between function testReduceBucketCapacityWithBridgeOutInBetween() public { // Bridge a third of the capacity - uint256 amount = _getMaxToBridgeOut(0) / 3; - uint256 availableToBridge = _getMaxToBridgeOut(0) - amount; + uint256 amount = _getMaxToBridgeOut(s, 0) / 3; + uint256 availableToBridge = _getMaxToBridgeOut(s, 0) - amount; - deal(tokens[0], USER, amount); - _bridgeGho(0, 1, USER, amount); + deal(s.tokens[0], USER, amount); + _bridgeGho(s, 0, 1, USER, amount); - assertEq(_getMaxToBridgeIn(1), bucketCapacities[1] - amount); - assertEq(_getLevel(1), amount); + assertEq(_getMaxToBridgeIn(s, 1), s.bucketCapacities[1] - amount); + assertEq(_getLevel(s, 1), amount); _assertInvariant(); - uint256 newBucketCapacity = bucketCapacities[1] - 10; + uint256 newBucketCapacity = s.bucketCapacities[1] - 10; // 1. Reduce bridge limit - _updateBridgeLimit(newBucketCapacity); - assertEq(_getMaxToBridgeOut(0), availableToBridge - 10); - assertEq(_getMaxToBridgeIn(1), availableToBridge); + _updateBridgeLimit(s, newBucketCapacity); + assertEq(_getMaxToBridgeOut(s, 0), availableToBridge - 10); + assertEq(_getMaxToBridgeIn(s, 1), availableToBridge); // User initiates bridge out action - uint256 amount2 = _getMaxToBridgeOut(0); - deal(tokens[0], USER, amount2); - _moveGhoOrigin(0, 1, USER, amount2); - assertEq(_getMaxToBridgeOut(0), 0); - assertEq(_getMaxToBridgeIn(0), newBucketCapacity); + uint256 amount2 = _getMaxToBridgeOut(s, 0); + deal(s.tokens[0], USER, amount2); + _moveGhoOrigin(s, 0, 1, USER, amount2); + assertEq(_getMaxToBridgeOut(s, 0), 0); + assertEq(_getMaxToBridgeIn(s, 0), newBucketCapacity); // 2. Reduce bucket capacity - _updateBucketCapacity(1, newBucketCapacity); + _updateBucketCapacity(s, 1, newBucketCapacity); // Destination execution can happen, no more bridge out actions can be initiated - assertEq(_getMaxToBridgeOut(1), amount); - assertEq(_getMaxToBridgeIn(1), amount2); + assertEq(_getMaxToBridgeOut(s, 1), amount); + assertEq(_getMaxToBridgeIn(s, 1), amount2); // Finalize bridge out action - _moveGhoDestination(0, 1, USER, amount2); - assertEq(_getMaxToBridgeOut(0), 0); - assertEq(_getMaxToBridgeIn(0), newBucketCapacity); - assertEq(_getMaxToBridgeOut(1), newBucketCapacity); - assertEq(_getMaxToBridgeIn(1), 0); + _moveGhoDestination(s, 0, 1, USER, amount2); + assertEq(_getMaxToBridgeOut(s, 0), 0); + assertEq(_getMaxToBridgeIn(s, 0), newBucketCapacity); + assertEq(_getMaxToBridgeOut(s, 1), newBucketCapacity); + assertEq(_getMaxToBridgeIn(s, 1), 0); _assertInvariant(); } @@ -413,39 +477,39 @@ contract GHOTokenPoolEthereumBridgeLimitSimpleScenario is GHOTokenPoolEthereumBr /// @dev Bucket capacity reduction, with a bridge in in between function testReduceBucketCapacityWithBridgeInInBetween() public { // Bridge max amount - uint256 maxAmount = _getMaxToBridgeOut(0); + uint256 maxAmount = _getMaxToBridgeOut(s, 0); - deal(tokens[0], USER, maxAmount); - _bridgeGho(0, 1, USER, maxAmount); + deal(s.tokens[0], USER, maxAmount); + _bridgeGho(s, 0, 1, USER, maxAmount); - assertEq(_getMaxToBridgeIn(1), 0); - assertEq(_getCapacity(1), maxAmount); - assertEq(_getLevel(1), maxAmount); + assertEq(_getMaxToBridgeIn(s, 1), 0); + assertEq(_getCapacity(s, 1), maxAmount); + assertEq(_getLevel(s, 1), maxAmount); _assertInvariant(); - uint256 newBucketCapacity = bucketCapacities[1] - 10; + uint256 newBucketCapacity = s.bucketCapacities[1] - 10; // 1. Reduce bridge limit - _updateBridgeLimit(newBucketCapacity); - assertEq(_getMaxToBridgeOut(0), 0); - assertEq(_getMaxToBridgeIn(1), 0); + _updateBridgeLimit(s, newBucketCapacity); + assertEq(_getMaxToBridgeOut(s, 0), 0); + assertEq(_getMaxToBridgeIn(s, 1), 0); // User initiates bridge in action - _moveGhoOrigin(1, 0, USER, maxAmount); - assertEq(_getMaxToBridgeOut(1), 0); - assertEq(_getMaxToBridgeIn(1), maxAmount); + _moveGhoOrigin(s, 1, 0, USER, maxAmount); + assertEq(_getMaxToBridgeOut(s, 1), 0); + assertEq(_getMaxToBridgeIn(s, 1), maxAmount); // 2. Reduce bucket capacity - _updateBucketCapacity(1, newBucketCapacity); - assertEq(_getMaxToBridgeOut(0), 0); - assertEq(_getMaxToBridgeIn(0), maxAmount); + _updateBucketCapacity(s, 1, newBucketCapacity); + assertEq(_getMaxToBridgeOut(s, 0), 0); + assertEq(_getMaxToBridgeIn(s, 0), maxAmount); // Finalize bridge in action - _moveGhoDestination(1, 0, USER, maxAmount); - assertEq(_getMaxToBridgeOut(0), newBucketCapacity); - assertEq(_getMaxToBridgeIn(0), 0); - assertEq(_getMaxToBridgeOut(1), 0); - assertEq(_getMaxToBridgeIn(1), newBucketCapacity); + _moveGhoDestination(s, 1, 0, USER, maxAmount); + assertEq(_getMaxToBridgeOut(s, 0), newBucketCapacity); + assertEq(_getMaxToBridgeIn(s, 0), 0); + assertEq(_getMaxToBridgeOut(s, 1), 0); + assertEq(_getMaxToBridgeIn(s, 1), newBucketCapacity); _assertInvariant(); } @@ -453,40 +517,40 @@ contract GHOTokenPoolEthereumBridgeLimitSimpleScenario is GHOTokenPoolEthereumBr /// @dev Bucket capacity increase. Caution: bridge limit increase must happen afterwards function testIncreaseBucketCapacity() public { // Max out capacity - uint256 maxAmount = _getMaxToBridgeOut(0); - deal(tokens[0], USER, maxAmount); - _bridgeGho(0, 1, USER, maxAmount); + uint256 maxAmount = _getMaxToBridgeOut(s, 0); + deal(s.tokens[0], USER, maxAmount); + _bridgeGho(s, 0, 1, USER, maxAmount); - assertEq(_getMaxToBridgeIn(1), 0); - assertEq(_getCapacity(1), maxAmount); - assertEq(_getLevel(1), maxAmount); + assertEq(_getMaxToBridgeIn(s, 1), 0); + assertEq(_getCapacity(s, 1), maxAmount); + assertEq(_getLevel(s, 1), maxAmount); _assertInvariant(); - uint256 newBucketCapacity = bucketCapacities[1] + 10; + uint256 newBucketCapacity = s.bucketCapacities[1] + 10; // 2. Increase bucket capacity - _updateBucketCapacity(1, newBucketCapacity); - assertEq(_getMaxToBridgeOut(0), 0); - assertEq(_getMaxToBridgeIn(1), 10); + _updateBucketCapacity(s, 1, newBucketCapacity); + assertEq(_getMaxToBridgeOut(s, 0), 0); + assertEq(_getMaxToBridgeIn(s, 1), 10); // Reverts if a user tries to bridge out 10 vm.expectRevert(); vm.prank(RAMP); - IPool(pools[0]).lockOrBurn(USER, bytes(""), 10, uint64(1), bytes("")); + IPool(s.pools[0]).lockOrBurn(USER, bytes(""), 10, uint64(1), bytes("")); // 2. Increase bridge limit - _updateBridgeLimit(newBucketCapacity); - assertEq(_getMaxToBridgeOut(0), 10); - assertEq(_getMaxToBridgeIn(1), 10); + _updateBridgeLimit(s, newBucketCapacity); + assertEq(_getMaxToBridgeOut(s, 0), 10); + assertEq(_getMaxToBridgeIn(s, 1), 10); _assertInvariant(); // Now it is possible to bridge some again - _bridgeGho(1, 0, USER, maxAmount); - assertEq(_getMaxToBridgeOut(0), newBucketCapacity); - assertEq(_getMaxToBridgeIn(0), 0); - assertEq(_getMaxToBridgeOut(1), 0); - assertEq(_getMaxToBridgeIn(1), newBucketCapacity); + _bridgeGho(s, 1, 0, USER, maxAmount); + assertEq(_getMaxToBridgeOut(s, 0), newBucketCapacity); + assertEq(_getMaxToBridgeIn(s, 0), 0); + assertEq(_getMaxToBridgeOut(s, 1), 0); + assertEq(_getMaxToBridgeIn(s, 1), newBucketCapacity); _assertInvariant(); } @@ -494,48 +558,48 @@ contract GHOTokenPoolEthereumBridgeLimitSimpleScenario is GHOTokenPoolEthereumBr /// @dev Bucket capacity increase, performed following wrong order procedure function testIncreaseBucketCapacityIncorrectProcedure() public { // Max out capacity - uint256 maxAmount = _getMaxToBridgeOut(0); - deal(tokens[0], USER, maxAmount); - _bridgeGho(0, 1, USER, maxAmount); + uint256 maxAmount = _getMaxToBridgeOut(s, 0); + deal(s.tokens[0], USER, maxAmount); + _bridgeGho(s, 0, 1, USER, maxAmount); - assertEq(_getMaxToBridgeIn(1), 0); - assertEq(_getCapacity(1), maxAmount); - assertEq(_getLevel(1), maxAmount); + assertEq(_getMaxToBridgeIn(s, 1), 0); + assertEq(_getCapacity(s, 1), maxAmount); + assertEq(_getLevel(s, 1), maxAmount); _assertInvariant(); - uint256 newBucketCapacity = bucketCapacities[1] + 10; + uint256 newBucketCapacity = s.bucketCapacities[1] + 10; /// @dev INCORRECT ORDER PROCEDURE!! bucket capacity increase should happen first // 1. Increase bridge limit - _updateBridgeLimit(newBucketCapacity); - assertEq(_getMaxToBridgeOut(0), 10); - assertEq(_getMaxToBridgeIn(1), 0); // this is the UX issue + _updateBridgeLimit(s, newBucketCapacity); + assertEq(_getMaxToBridgeOut(s, 0), 10); + assertEq(_getMaxToBridgeIn(s, 1), 0); // this is the UX issue // User can come and try to max bridge on Arbitrum // Transaction will succeed on Ethereum, but revert on Arbitrum - deal(tokens[0], USER, 10); - _moveGhoOrigin(0, 1, USER, 10); - assertEq(_getMaxToBridgeOut(0), 0); - assertEq(_getMaxToBridgeIn(0), newBucketCapacity); + deal(s.tokens[0], USER, 10); + _moveGhoOrigin(s, 0, 1, USER, 10); + assertEq(_getMaxToBridgeOut(s, 0), 0); + assertEq(_getMaxToBridgeIn(s, 0), newBucketCapacity); // Execution on destination will revert until bucket capacity gets increased vm.expectRevert(); vm.prank(RAMP); - IPool(pools[1]).releaseOrMint(bytes(""), USER, 10, uint64(0), bytes("")); + IPool(s.pools[1]).releaseOrMint(bytes(""), USER, 10, uint64(0), bytes("")); // 2. Increase bucket capacity - _updateBucketCapacity(1, newBucketCapacity); - assertEq(_getMaxToBridgeOut(1), maxAmount); - assertEq(_getMaxToBridgeIn(1), 10); + _updateBucketCapacity(s, 1, newBucketCapacity); + assertEq(_getMaxToBridgeOut(s, 1), maxAmount); + assertEq(_getMaxToBridgeIn(s, 1), 10); // Now it is possible to execute on destination - _moveGhoDestination(0, 1, USER, 10); + _moveGhoDestination(s, 0, 1, USER, 10); - assertEq(_getMaxToBridgeOut(0), 0); - assertEq(_getMaxToBridgeIn(0), newBucketCapacity); - assertEq(_getMaxToBridgeOut(1), newBucketCapacity); - assertEq(_getMaxToBridgeIn(1), 0); + assertEq(_getMaxToBridgeOut(s, 0), 0); + assertEq(_getMaxToBridgeIn(s, 0), newBucketCapacity); + assertEq(_getMaxToBridgeOut(s, 1), newBucketCapacity); + assertEq(_getMaxToBridgeIn(s, 1), 0); _assertInvariant(); } @@ -543,47 +607,47 @@ contract GHOTokenPoolEthereumBridgeLimitSimpleScenario is GHOTokenPoolEthereumBr /// @dev Bucket capacity increase, with a bridge out in between function testIncreaseBucketCapacityWithBridgeOutInBetween() public { // Bridge a third of the capacity - uint256 amount = _getMaxToBridgeOut(0) / 3; - uint256 availableToBridge = _getMaxToBridgeOut(0) - amount; - deal(tokens[0], USER, amount); - _bridgeGho(0, 1, USER, amount); + uint256 amount = _getMaxToBridgeOut(s, 0) / 3; + uint256 availableToBridge = _getMaxToBridgeOut(s, 0) - amount; + deal(s.tokens[0], USER, amount); + _bridgeGho(s, 0, 1, USER, amount); - assertEq(_getMaxToBridgeIn(1), bucketCapacities[1] - amount); - assertEq(_getLevel(1), amount); + assertEq(_getMaxToBridgeIn(s, 1), s.bucketCapacities[1] - amount); + assertEq(_getLevel(s, 1), amount); _assertInvariant(); - uint256 newBucketCapacity = bucketCapacities[1] + 10; + uint256 newBucketCapacity = s.bucketCapacities[1] + 10; // 1. Increase bucket capacity - _updateBucketCapacity(1, newBucketCapacity); - assertEq(_getMaxToBridgeOut(0), availableToBridge); - assertEq(_getMaxToBridgeIn(1), availableToBridge + 10); + _updateBucketCapacity(s, 1, newBucketCapacity); + assertEq(_getMaxToBridgeOut(s, 0), availableToBridge); + assertEq(_getMaxToBridgeIn(s, 1), availableToBridge + 10); // Reverts if a user tries to bridge out all up to new bucket capacity vm.expectRevert(); vm.prank(RAMP); - IPool(pools[0]).lockOrBurn(USER, bytes(""), availableToBridge + 10, uint64(1), bytes("")); + IPool(s.pools[0]).lockOrBurn(USER, bytes(""), availableToBridge + 10, uint64(1), bytes("")); // User initiates bridge out action - deal(tokens[0], USER, availableToBridge); - _bridgeGho(0, 1, USER, availableToBridge); - assertEq(_getMaxToBridgeOut(0), 0); - assertEq(_getMaxToBridgeIn(1), 10); + deal(s.tokens[0], USER, availableToBridge); + _bridgeGho(s, 0, 1, USER, availableToBridge); + assertEq(_getMaxToBridgeOut(s, 0), 0); + assertEq(_getMaxToBridgeIn(s, 1), 10); // 2. Increase bridge limit - _updateBridgeLimit(newBucketCapacity); - assertEq(_getMaxToBridgeOut(0), 10); - assertEq(_getMaxToBridgeIn(1), 10); + _updateBridgeLimit(s, newBucketCapacity); + assertEq(_getMaxToBridgeOut(s, 0), 10); + assertEq(_getMaxToBridgeIn(s, 1), 10); _assertInvariant(); // Now it is possible to bridge some again - deal(tokens[0], USER, 10); - _bridgeGho(0, 1, USER, 10); - assertEq(_getMaxToBridgeOut(0), 0); - assertEq(_getMaxToBridgeIn(0), newBucketCapacity); - assertEq(_getMaxToBridgeOut(1), newBucketCapacity); - assertEq(_getMaxToBridgeIn(1), 0); + deal(s.tokens[0], USER, 10); + _bridgeGho(s, 0, 1, USER, 10); + assertEq(_getMaxToBridgeOut(s, 0), 0); + assertEq(_getMaxToBridgeIn(s, 0), newBucketCapacity); + assertEq(_getMaxToBridgeOut(s, 1), newBucketCapacity); + assertEq(_getMaxToBridgeIn(s, 1), 0); _assertInvariant(); } @@ -591,48 +655,48 @@ contract GHOTokenPoolEthereumBridgeLimitSimpleScenario is GHOTokenPoolEthereumBr /// @dev Bucket capacity increase, with a bridge in in between function testIncreaseBucketCapacityWithBridgeInInBetween() public { // Max out capacity - uint256 maxAmount = _getMaxToBridgeOut(0); - deal(tokens[0], USER, maxAmount); - _bridgeGho(0, 1, USER, maxAmount); + uint256 maxAmount = _getMaxToBridgeOut(s, 0); + deal(s.tokens[0], USER, maxAmount); + _bridgeGho(s, 0, 1, USER, maxAmount); - assertEq(_getMaxToBridgeIn(1), 0); - assertEq(_getCapacity(1), maxAmount); - assertEq(_getLevel(1), maxAmount); + assertEq(_getMaxToBridgeIn(s, 1), 0); + assertEq(_getCapacity(s, 1), maxAmount); + assertEq(_getLevel(s, 1), maxAmount); _assertInvariant(); - uint256 newBucketCapacity = bucketCapacities[1] + 10; + uint256 newBucketCapacity = s.bucketCapacities[1] + 10; // 1. Increase bucket capacity - _updateBucketCapacity(1, newBucketCapacity); - assertEq(_getMaxToBridgeOut(0), 0); - assertEq(_getMaxToBridgeIn(0), maxAmount); - assertEq(_getMaxToBridgeOut(1), maxAmount); - assertEq(_getMaxToBridgeIn(1), 10); + _updateBucketCapacity(s, 1, newBucketCapacity); + assertEq(_getMaxToBridgeOut(s, 0), 0); + assertEq(_getMaxToBridgeIn(s, 0), maxAmount); + assertEq(_getMaxToBridgeOut(s, 1), maxAmount); + assertEq(_getMaxToBridgeIn(s, 1), 10); // User initiates bridge in action - _moveGhoOrigin(1, 0, USER, maxAmount); - assertEq(_getMaxToBridgeOut(1), 0); - assertEq(_getMaxToBridgeIn(1), newBucketCapacity); + _moveGhoOrigin(s, 1, 0, USER, maxAmount); + assertEq(_getMaxToBridgeOut(s, 1), 0); + assertEq(_getMaxToBridgeIn(s, 1), newBucketCapacity); // 2. Increase bridge limit - _updateBridgeLimit(newBucketCapacity); - assertEq(_getMaxToBridgeOut(0), 10); - assertEq(_getMaxToBridgeIn(0), maxAmount); + _updateBridgeLimit(s, newBucketCapacity); + assertEq(_getMaxToBridgeOut(s, 0), 10); + assertEq(_getMaxToBridgeIn(s, 0), maxAmount); // User finalizes bridge in action - _moveGhoDestination(1, 0, USER, maxAmount); - assertEq(_getMaxToBridgeOut(0), newBucketCapacity); - assertEq(_getMaxToBridgeIn(0), 0); + _moveGhoDestination(s, 1, 0, USER, maxAmount); + assertEq(_getMaxToBridgeOut(s, 0), newBucketCapacity); + assertEq(_getMaxToBridgeIn(s, 0), 0); _assertInvariant(); // Now it is possible to bridge new bucket capacity - deal(tokens[0], USER, newBucketCapacity); - _bridgeGho(0, 1, USER, newBucketCapacity); - assertEq(_getMaxToBridgeOut(0), 0); - assertEq(_getMaxToBridgeIn(0), newBucketCapacity); - assertEq(_getMaxToBridgeOut(1), newBucketCapacity); - assertEq(_getMaxToBridgeIn(1), 0); + deal(s.tokens[0], USER, newBucketCapacity); + _bridgeGho(s, 0, 1, USER, newBucketCapacity); + assertEq(_getMaxToBridgeOut(s, 0), 0); + assertEq(_getMaxToBridgeIn(s, 0), newBucketCapacity); + assertEq(_getMaxToBridgeOut(s, 1), newBucketCapacity); + assertEq(_getMaxToBridgeIn(s, 1), 0); _assertInvariant(); } @@ -643,67 +707,67 @@ contract GHOTokenPoolEthereumBridgeLimitTripleScenario is GHOTokenPoolEthereumBr super.setUp(); // Arbitrum - _addBridge(1, INITIAL_BRIDGE_LIMIT); - _enableLane(0, 1); + _addBridge(s, 1, INITIAL_BRIDGE_LIMIT); + _enableLane(s, 0, 1); // Avalanche - _addBridge(2, INITIAL_BRIDGE_LIMIT); - _enableLane(1, 2); - _enableLane(0, 2); + _addBridge(s, 2, INITIAL_BRIDGE_LIMIT); + _enableLane(s, 1, 2); + _enableLane(s, 0, 2); } /// @dev Bridge out some tokens to third chain via second chain (Ethereum to Arbitrum, Arbitrum to Avalanche) function testFuzz_BridgeToTwoToThree(uint256 amount) public { - uint256 maxAmount = _getMaxToBridgeOut(0); + uint256 maxAmount = _getMaxToBridgeOut(s, 0); amount = bound(amount, 1, maxAmount); _assertInvariant(); - assertEq(_getMaxToBridgeOut(0), maxAmount); - assertEq(_getMaxToBridgeIn(0), 0); - assertEq(_getMaxToBridgeOut(1), 0); - assertEq(_getMaxToBridgeIn(1), bucketCapacities[1]); - assertEq(_getMaxToBridgeOut(2), 0); - assertEq(_getMaxToBridgeIn(2), bucketCapacities[2]); + assertEq(_getMaxToBridgeOut(s, 0), maxAmount); + assertEq(_getMaxToBridgeIn(s, 0), 0); + assertEq(_getMaxToBridgeOut(s, 1), 0); + assertEq(_getMaxToBridgeIn(s, 1), s.bucketCapacities[1]); + assertEq(_getMaxToBridgeOut(s, 2), 0); + assertEq(_getMaxToBridgeIn(s, 2), s.bucketCapacities[2]); - deal(tokens[0], USER, amount); - _moveGhoOrigin(0, 1, USER, amount); + deal(s.tokens[0], USER, amount); + _moveGhoOrigin(s, 0, 1, USER, amount); - assertEq(_getMaxToBridgeOut(0), maxAmount - amount); - assertEq(_getMaxToBridgeIn(0), amount); - assertEq(_getMaxToBridgeOut(1), 0); - assertEq(_getMaxToBridgeIn(1), bucketCapacities[1]); - assertEq(_getMaxToBridgeOut(2), 0); - assertEq(_getMaxToBridgeIn(2), bucketCapacities[2]); + assertEq(_getMaxToBridgeOut(s, 0), maxAmount - amount); + assertEq(_getMaxToBridgeIn(s, 0), amount); + assertEq(_getMaxToBridgeOut(s, 1), 0); + assertEq(_getMaxToBridgeIn(s, 1), s.bucketCapacities[1]); + assertEq(_getMaxToBridgeOut(s, 2), 0); + assertEq(_getMaxToBridgeIn(s, 2), s.bucketCapacities[2]); - _moveGhoDestination(0, 1, USER, amount); + _moveGhoDestination(s, 0, 1, USER, amount); - assertEq(_getMaxToBridgeOut(0), maxAmount - amount); - assertEq(_getMaxToBridgeIn(0), amount); - assertEq(_getMaxToBridgeOut(1), amount); - assertEq(_getMaxToBridgeIn(1), bucketCapacities[1] - bucketLevels[1]); - assertEq(_getMaxToBridgeOut(2), 0); - assertEq(_getMaxToBridgeIn(2), bucketCapacities[2]); + assertEq(_getMaxToBridgeOut(s, 0), maxAmount - amount); + assertEq(_getMaxToBridgeIn(s, 0), amount); + assertEq(_getMaxToBridgeOut(s, 1), amount); + assertEq(_getMaxToBridgeIn(s, 1), s.bucketCapacities[1] - s.bucketLevels[1]); + assertEq(_getMaxToBridgeOut(s, 2), 0); + assertEq(_getMaxToBridgeIn(s, 2), s.bucketCapacities[2]); _assertInvariant(); - _moveGhoOrigin(1, 2, USER, amount); + _moveGhoOrigin(s, 1, 2, USER, amount); - assertEq(_getMaxToBridgeOut(0), maxAmount - amount); - assertEq(_getMaxToBridgeIn(0), amount); - assertEq(_getMaxToBridgeOut(1), 0); - assertEq(_getMaxToBridgeIn(1), bucketCapacities[1]); - assertEq(_getMaxToBridgeOut(2), 0); - assertEq(_getMaxToBridgeIn(2), bucketCapacities[2]); + assertEq(_getMaxToBridgeOut(s, 0), maxAmount - amount); + assertEq(_getMaxToBridgeIn(s, 0), amount); + assertEq(_getMaxToBridgeOut(s, 1), 0); + assertEq(_getMaxToBridgeIn(s, 1), s.bucketCapacities[1]); + assertEq(_getMaxToBridgeOut(s, 2), 0); + assertEq(_getMaxToBridgeIn(s, 2), s.bucketCapacities[2]); - _moveGhoDestination(1, 2, USER, amount); + _moveGhoDestination(s, 1, 2, USER, amount); - assertEq(_getMaxToBridgeOut(0), maxAmount - amount); - assertEq(_getMaxToBridgeIn(0), amount); - assertEq(_getMaxToBridgeOut(1), 0); - assertEq(_getMaxToBridgeIn(1), bucketCapacities[1]); - assertEq(_getMaxToBridgeOut(2), amount); - assertEq(_getMaxToBridgeIn(2), bucketCapacities[2] - amount); + assertEq(_getMaxToBridgeOut(s, 0), maxAmount - amount); + assertEq(_getMaxToBridgeIn(s, 0), amount); + assertEq(_getMaxToBridgeOut(s, 1), 0); + assertEq(_getMaxToBridgeIn(s, 1), s.bucketCapacities[1]); + assertEq(_getMaxToBridgeOut(s, 2), amount); + assertEq(_getMaxToBridgeIn(s, 2), s.bucketCapacities[2] - amount); _assertInvariant(); } @@ -712,7 +776,7 @@ contract GHOTokenPoolEthereumBridgeLimitTripleScenario is GHOTokenPoolEthereumBr function testFuzz_BridgeRandomlyToTwoAndThree(uint64[] memory amounts) public { vm.assume(amounts.length < 30); - uint256 maxAmount = _getMaxToBridgeOut(0); + uint256 maxAmount = _getMaxToBridgeOut(s, 0); uint256 sourceAcc; uint256 amount; uint256 dest; @@ -727,109 +791,109 @@ contract GHOTokenPoolEthereumBridgeLimitTripleScenario is GHOTokenPoolEthereumBr } dest = (amount % 2) + 1; - deal(tokens[0], USER, amount); - _bridgeGho(0, dest, USER, amount); + deal(s.tokens[0], USER, amount); + _bridgeGho(s, 0, dest, USER, amount); sourceAcc += amount; } - assertEq(sourceAcc, bridged); + assertEq(sourceAcc, s.bridged); // Bridge all to Avalanche - uint256 toBridge = _getMaxToBridgeOut(1); + uint256 toBridge = _getMaxToBridgeOut(s, 1); if (toBridge > 0) { - _bridgeGho(1, 2, USER, toBridge); - assertEq(sourceAcc, bridged); - assertEq(_getLevel(2), bridged); - assertEq(_getLevel(1), 0); + _bridgeGho(s, 1, 2, USER, toBridge); + assertEq(sourceAcc, s.bridged); + assertEq(_getLevel(s, 2), s.bridged); + assertEq(_getLevel(s, 1), 0); } } /// @dev All remote liquidity is on one chain or the other function testLiquidityUnbalanced() public { // Bridge all out to Arbitrum - uint256 amount = _getMaxToBridgeOut(0); - deal(tokens[0], USER, amount); - _bridgeGho(0, 1, USER, amount); + uint256 amount = _getMaxToBridgeOut(s, 0); + deal(s.tokens[0], USER, amount); + _bridgeGho(s, 0, 1, USER, amount); // No more liquidity can go remotely - assertEq(_getMaxToBridgeOut(0), 0); + assertEq(_getMaxToBridgeOut(s, 0), 0); vm.expectRevert(); vm.prank(RAMP); - IPool(pools[0]).lockOrBurn(USER, bytes(""), 1, uint64(1), bytes("")); + IPool(s.pools[0]).lockOrBurn(USER, bytes(""), 1, uint64(1), bytes("")); vm.prank(RAMP); vm.expectRevert(); - IPool(pools[0]).lockOrBurn(USER, bytes(""), 1, uint64(2), bytes("")); + IPool(s.pools[0]).lockOrBurn(USER, bytes(""), 1, uint64(2), bytes("")); // All liquidity on Arbitrum, 0 on Avalanche - assertEq(_getLevel(1), bridged); - assertEq(_getLevel(1), _getCapacity(1)); - assertEq(_getLevel(2), 0); + assertEq(_getLevel(s, 1), s.bridged); + assertEq(_getLevel(s, 1), _getCapacity(s, 1)); + assertEq(_getLevel(s, 2), 0); // Move all liquidity to Avalanche - _bridgeGho(1, 2, USER, amount); - assertEq(_getLevel(1), 0); - assertEq(_getLevel(2), bridged); - assertEq(_getLevel(2), _getCapacity(2)); + _bridgeGho(s, 1, 2, USER, amount); + assertEq(_getLevel(s, 1), 0); + assertEq(_getLevel(s, 2), s.bridged); + assertEq(_getLevel(s, 2), _getCapacity(s, 2)); // Move all liquidity back to Ethereum - _bridgeGho(2, 0, USER, amount); - assertEq(_getLevel(1), 0); - assertEq(_getLevel(2), 0); - assertEq(bridged, 0); - assertEq(_getMaxToBridgeOut(0), amount); + _bridgeGho(s, 2, 0, USER, amount); + assertEq(_getLevel(s, 1), 0); + assertEq(_getLevel(s, 2), 0); + assertEq(s.bridged, 0); + assertEq(_getMaxToBridgeOut(s, 0), amount); } /// @dev Test showcasing incorrect bridge limit and bucket capacity configuration function testIncorrectBridgeLimitBucketConfig() public { // BridgeLimit 10, Arbitrum 9, Avalanche Bucket 10 - _updateBridgeLimit(10); - _updateBucketCapacity(1, 9); - _updateBucketCapacity(2, 10); + _updateBridgeLimit(s, 10); + _updateBucketCapacity(s, 1, 9); + _updateBucketCapacity(s, 2, 10); - assertEq(_getMaxToBridgeOut(0), 10); - assertEq(_getMaxToBridgeIn(1), 9); // here the issue - assertEq(_getMaxToBridgeIn(2), 10); + assertEq(_getMaxToBridgeOut(s, 0), 10); + assertEq(_getMaxToBridgeIn(s, 1), 9); // here the issue + assertEq(_getMaxToBridgeIn(s, 2), 10); // Possible to bridge 10 out to 2 - deal(tokens[0], USER, 10); - _bridgeGho(0, 2, USER, 10); + deal(s.tokens[0], USER, 10); + _bridgeGho(s, 0, 2, USER, 10); // Liquidity comes back - _bridgeGho(2, 0, USER, 10); + _bridgeGho(s, 2, 0, USER, 10); // Not possible to bridge 10 out to 1 - _moveGhoOrigin(0, 1, USER, 10); + _moveGhoOrigin(s, 0, 1, USER, 10); // Reverts on destination vm.expectRevert(); vm.prank(RAMP); - IPool(pools[1]).releaseOrMint(bytes(""), USER, 10, uint64(0), bytes("")); + IPool(s.pools[1]).releaseOrMint(bytes(""), USER, 10, uint64(0), bytes("")); // Only if bucket capacity gets increased, execution can succeed - _updateBucketCapacity(1, 10); - _moveGhoDestination(0, 1, USER, 10); + _updateBucketCapacity(s, 1, 10); + _moveGhoDestination(s, 0, 1, USER, 10); } /// @dev Test showcasing a user locked due to a bridge limit reduction below current bridged amount function testUserLockedBridgeLimitReductionBelowLevel() public { // Bridge all out to Arbitrum - uint256 amount = _getMaxToBridgeOut(0); - deal(tokens[0], USER, amount); - _bridgeGho(0, 1, USER, amount); + uint256 amount = _getMaxToBridgeOut(s, 0); + deal(s.tokens[0], USER, amount); + _bridgeGho(s, 0, 1, USER, amount); // Reduce bridge limit below current bridged amount uint256 newBridgeLimit = amount / 2; - _updateBridgeLimit(newBridgeLimit); - _updateBucketCapacity(1, newBridgeLimit); + _updateBridgeLimit(s, newBridgeLimit); + _updateBucketCapacity(s, 1, newBridgeLimit); // Moving to Avalanche is not a problem because bucket capacity is higher than bridge limit - assertGt(_getMaxToBridgeIn(2), newBridgeLimit); - _bridgeGho(1, 2, USER, amount); + assertGt(_getMaxToBridgeIn(s, 2), newBridgeLimit); + _bridgeGho(s, 1, 2, USER, amount); // Moving back to Arbitrum reverts on destination - assertEq(_getMaxToBridgeIn(1), newBridgeLimit); - _moveGhoOrigin(2, 1, USER, amount); + assertEq(_getMaxToBridgeIn(s, 1), newBridgeLimit); + _moveGhoOrigin(s, 2, 1, USER, amount); vm.expectRevert(); vm.prank(RAMP); - IPool(pools[1]).releaseOrMint(bytes(""), USER, amount, uint64(2), bytes("")); + IPool(s.pools[1]).releaseOrMint(bytes(""), USER, amount, uint64(2), bytes("")); } } diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumBridgeLimitSetup.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumBridgeLimitSetup.t.sol deleted file mode 100644 index bfb248b11f..0000000000 --- a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumBridgeLimitSetup.t.sol +++ /dev/null @@ -1,224 +0,0 @@ -// SPDX-License-Identifier: BUSL-1.1 -pragma solidity 0.8.19; - -import {GhoToken} from "@aave/gho-core/gho/GhoToken.sol"; - -import {BaseTest} from "../../BaseTest.t.sol"; -import {IPool} from "../../../interfaces/pools/IPool.sol"; -import {UpgradeableLockReleaseTokenPool} from "../../../pools/GHO/UpgradeableLockReleaseTokenPool.sol"; -import {UpgradeableBurnMintTokenPool} from "../../../pools/GHO/UpgradeableBurnMintTokenPool.sol"; -import {UpgradeableTokenPool} from "../../../pools/GHO/UpgradeableTokenPool.sol"; -import {RateLimiter} from "../../../libraries/RateLimiter.sol"; - -contract GHOTokenPoolEthereumBridgeLimitSetup is BaseTest { - address internal ARM_PROXY = makeAddr("ARM_PROXY"); - address internal ROUTER = makeAddr("ROUTER"); - address internal RAMP = makeAddr("RAMP"); - address internal AAVE_DAO = makeAddr("AAVE_DAO"); - address internal PROXY_ADMIN = makeAddr("PROXY_ADMIN"); - address internal USER = makeAddr("USER"); - - uint256 public immutable INITIAL_BRIDGE_LIMIT = 100e6 * 1e18; - - uint256[] public chainsList; - mapping(uint256 => address) public pools; // chainId => bridgeTokenPool - mapping(uint256 => address) public tokens; // chainId => ghoToken - mapping(uint256 => uint256) public bucketCapacities; // chainId => bucketCapacities - mapping(uint256 => uint256) public bucketLevels; // chainId => bucketLevels - mapping(uint256 => uint256) public liquidity; // chainId => liquidity - uint256 public remoteLiquidity; - uint256 public bridged; - - function setUp() public virtual override { - // Ethereum with id 0 - chainsList.push(0); - tokens[0] = address(new GhoToken(AAVE_DAO)); - pools[0] = _deployUpgradeableLockReleaseTokenPool( - tokens[0], - ARM_PROXY, - ROUTER, - OWNER, - INITIAL_BRIDGE_LIMIT, - PROXY_ADMIN - ); - - // Mock calls for bridging - vm.mockCall(ROUTER, abi.encodeWithSelector(bytes4(keccak256("getOnRamp(uint64)"))), abi.encode(RAMP)); - vm.mockCall(ROUTER, abi.encodeWithSelector(bytes4(keccak256("isOffRamp(uint64,address)"))), abi.encode(true)); - vm.mockCall(ARM_PROXY, abi.encodeWithSelector(bytes4(keccak256("isCursed()"))), abi.encode(false)); - } - - function _enableLane(uint256 fromId, uint256 toId) internal { - // from - UpgradeableTokenPool.ChainUpdate[] memory chainUpdate = new UpgradeableTokenPool.ChainUpdate[](1); - RateLimiter.Config memory emptyRateConfig = RateLimiter.Config(false, 0, 0); - chainUpdate[0] = UpgradeableTokenPool.ChainUpdate({ - remoteChainSelector: uint64(toId), - allowed: true, - outboundRateLimiterConfig: emptyRateConfig, - inboundRateLimiterConfig: emptyRateConfig - }); - - vm.startPrank(OWNER); - UpgradeableTokenPool(pools[fromId]).applyChainUpdates(chainUpdate); - - // to - chainUpdate[0].remoteChainSelector = uint64(fromId); - UpgradeableTokenPool(pools[toId]).applyChainUpdates(chainUpdate); - vm.stopPrank(); - } - - function _addBridge(uint256 chainId, uint256 bucketCapacity) internal { - require(tokens[chainId] == address(0), "BRIDGE_ALREADY_EXISTS"); - - chainsList.push(chainId); - - // GHO Token - GhoToken ghoToken = new GhoToken(AAVE_DAO); - tokens[chainId] = address(ghoToken); - - // UpgradeableTokenPool - address bridgeTokenPool = _deployUpgradeableBurnMintTokenPool( - address(ghoToken), - ARM_PROXY, - ROUTER, - OWNER, - PROXY_ADMIN - ); - pools[chainId] = bridgeTokenPool; - - // Facilitator - bucketCapacities[chainId] = bucketCapacity; - vm.stopPrank(); - vm.startPrank(AAVE_DAO); - ghoToken.grantRole(ghoToken.FACILITATOR_MANAGER_ROLE(), AAVE_DAO); - ghoToken.addFacilitator(bridgeTokenPool, "UpgradeableTokenPool", uint128(bucketCapacity)); - vm.stopPrank(); - } - - function _updateBridgeLimit(uint256 newBridgeLimit) internal { - vm.prank(OWNER); - UpgradeableLockReleaseTokenPool(pools[0]).setBridgeLimit(newBridgeLimit); - } - - function _updateBucketCapacity(uint256 chainId, uint256 newBucketCapacity) internal { - bucketCapacities[chainId] = newBucketCapacity; - vm.startPrank(AAVE_DAO); - GhoToken(tokens[chainId]).grantRole(GhoToken(tokens[chainId]).BUCKET_MANAGER_ROLE(), AAVE_DAO); - GhoToken(tokens[chainId]).setFacilitatorBucketCapacity(pools[chainId], uint128(newBucketCapacity)); - vm.stopPrank(); - } - - function _getMaxToBridgeOut(uint256 fromChain) internal view returns (uint256) { - if (_isEthereumChain(fromChain)) { - UpgradeableLockReleaseTokenPool ethTokenPool = UpgradeableLockReleaseTokenPool(pools[0]); - uint256 bridgeLimit = ethTokenPool.getBridgeLimit(); - uint256 currentBridged = ethTokenPool.getCurrentBridgedAmount(); - return currentBridged > bridgeLimit ? 0 : bridgeLimit - currentBridged; - } else { - (, uint256 level) = GhoToken(tokens[fromChain]).getFacilitatorBucket(pools[fromChain]); - return level; - } - } - - function _getMaxToBridgeIn(uint256 toChain) internal view returns (uint256) { - if (_isEthereumChain(toChain)) { - UpgradeableLockReleaseTokenPool ethTokenPool = UpgradeableLockReleaseTokenPool(pools[0]); - return ethTokenPool.getCurrentBridgedAmount(); - } else { - (uint256 capacity, uint256 level) = GhoToken(tokens[toChain]).getFacilitatorBucket(pools[toChain]); - return level > capacity ? 0 : capacity - level; - } - } - - function _getCapacity(uint256 chain) internal view returns (uint256) { - require(!_isEthereumChain(chain), "No bucket on Ethereum"); - (uint256 capacity, ) = GhoToken(tokens[chain]).getFacilitatorBucket(pools[chain]); - return capacity; - } - - function _getLevel(uint256 chain) internal view returns (uint256) { - require(!_isEthereumChain(chain), "No bucket on Ethereum"); - (, uint256 level) = GhoToken(tokens[chain]).getFacilitatorBucket(pools[chain]); - return level; - } - - function _bridgeGho(uint256 fromChain, uint256 toChain, address user, uint256 amount) internal { - _moveGhoOrigin(fromChain, toChain, user, amount); - _moveGhoDestination(fromChain, toChain, user, amount); - } - - function _moveGhoOrigin(uint256 fromChain, uint256 toChain, address user, uint256 amount) internal { - // Simulate CCIP pull of funds - vm.prank(user); - GhoToken(tokens[fromChain]).transfer(pools[fromChain], amount); - - vm.prank(RAMP); - IPool(pools[fromChain]).lockOrBurn(user, bytes(""), amount, uint64(toChain), bytes("")); - - if (_isEthereumChain(fromChain)) { - // Lock - bridged += amount; - } else { - // Burn - bucketLevels[fromChain] -= amount; - liquidity[fromChain] -= amount; - remoteLiquidity -= amount; - } - } - - function _moveGhoDestination(uint256 fromChain, uint256 toChain, address user, uint256 amount) internal { - vm.prank(RAMP); - IPool(pools[toChain]).releaseOrMint(bytes(""), user, amount, uint64(fromChain), bytes("")); - - if (_isEthereumChain(toChain)) { - // Release - bridged -= amount; - } else { - // Mint - bucketLevels[toChain] += amount; - liquidity[toChain] += amount; - remoteLiquidity += amount; - } - } - - function _isEthereumChain(uint256 chainId) internal pure returns (bool) { - return chainId == 0; - } - - function _assertInvariant() internal { - // Check bridged - assertEq(UpgradeableLockReleaseTokenPool(pools[0]).getCurrentBridgedAmount(), bridged); - - // Check levels and buckets - uint256 sumLevels; - uint256 chainId; - uint256 capacity; - uint256 level; - for (uint i = 1; i < chainsList.length; i++) { - // not counting Ethereum -{0} - chainId = chainsList[i]; - (capacity, level) = GhoToken(tokens[chainId]).getFacilitatorBucket(pools[chainId]); - - // Aggregate levels - sumLevels += level; - - assertEq(capacity, bucketCapacities[chainId], "wrong bucket capacity"); - assertEq(level, bucketLevels[chainId], "wrong bucket level"); - - assertEq( - capacity, - UpgradeableLockReleaseTokenPool(pools[0]).getBridgeLimit(), - "capacity must be equal to bridgeLimit" - ); - assertLe( - level, - UpgradeableLockReleaseTokenPool(pools[0]).getBridgeLimit(), - "level cannot be higher than bridgeLimit" - ); - } - // Check bridged is equal to sum of levels - assertEq(UpgradeableLockReleaseTokenPool(pools[0]).getCurrentBridgedAmount(), sumLevels, "wrong bridged"); - assertEq(remoteLiquidity, sumLevels, "wrong bridged"); - } -} diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumE2E.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumE2E.t.sol index 52ef9f5d6d..a3432a53ed 100644 --- a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumE2E.t.sol +++ b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumE2E.t.sol @@ -14,23 +14,19 @@ import {UpgradeableBurnMintTokenPool} from "../../../pools/GHO/UpgradeableBurnMi import {UpgradeableTokenPool} from "../../../pools/GHO/UpgradeableTokenPool.sol"; import {IPool} from "../../../interfaces/pools/IPool.sol"; import {RateLimiter} from "../../../libraries/RateLimiter.sol"; +import {BaseTest} from "../../BaseTest.t.sol"; import {E2E} from "../End2End.t.sol"; +import {GHOBaseTest} from "./GHOBaseTest.t.sol"; -contract GHOTokenPoolEthereumE2E is E2E { +contract GHOTokenPoolEthereumE2E is E2E, GHOBaseTest { using Internal for Internal.EVM2EVMMessage; - address internal USER = makeAddr("user"); - address internal AAVE_DAO = makeAddr("AAVE_DAO"); - address internal PROXY_ADMIN = makeAddr("PROXY_ADMIN"); - - uint256 internal INITIAL_BRIDGE_LIMIT = 100e6 * 1e18; - IBurnMintERC20 internal srcGhoToken; IBurnMintERC20 internal dstGhoToken; UpgradeableLockReleaseTokenPool internal srcGhoTokenPool; UpgradeableBurnMintTokenPool internal dstGhoTokenPool; - function setUp() public virtual override { + function setUp() public virtual override(E2E, BaseTest) { E2E.setUp(); // Deploy GHO Token on source chain diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumSetup.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumSetup.t.sol index 44038b9eb4..3ce21cef05 100644 --- a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumSetup.t.sol +++ b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumSetup.t.sol @@ -17,20 +17,17 @@ import {Router} from "../../../Router.sol"; import {IERC165} from "../../../../vendor/openzeppelin-solidity/v4.8.3/contracts/utils/introspection/IERC165.sol"; import {IERC20} from "../../../../vendor/openzeppelin-solidity/v4.8.3/contracts/token/ERC20/IERC20.sol"; import {RouterSetup} from "../../router/RouterSetup.t.sol"; +import {BaseTest} from "../../BaseTest.t.sol"; +import {GHOBaseTest} from "./GHOBaseTest.t.sol"; -contract GHOTokenPoolEthereumSetup is RouterSetup { +contract GHOTokenPoolEthereumSetup is RouterSetup, GHOBaseTest { IERC20 internal s_token; UpgradeableLockReleaseTokenPool internal s_ghoTokenPool; address internal s_allowedOnRamp = address(123); address internal s_allowedOffRamp = address(234); - address internal AAVE_DAO = makeAddr("AAVE_DAO"); - address internal PROXY_ADMIN = makeAddr("PROXY_ADMIN"); - - uint256 internal INITIAL_BRIDGE_LIMIT = 100e6 * 1e18; - - function setUp() public virtual override { + function setUp() public virtual override(RouterSetup, BaseTest) { RouterSetup.setUp(); // GHO deployment diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolRemoteE2E.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolRemoteE2E.t.sol index ccad39ce6c..017b19427d 100644 --- a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolRemoteE2E.t.sol +++ b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolRemoteE2E.t.sol @@ -14,23 +14,19 @@ import {UpgradeableBurnMintTokenPool} from "../../../pools/GHO/UpgradeableBurnMi import {UpgradeableTokenPool} from "../../../pools/GHO/UpgradeableTokenPool.sol"; import {IPool} from "../../../interfaces/pools/IPool.sol"; import {RateLimiter} from "../../../libraries/RateLimiter.sol"; +import {BaseTest} from "../../BaseTest.t.sol"; import {E2E} from "../End2End.t.sol"; +import {GHOBaseTest} from "./GHOBaseTest.t.sol"; -contract GHOTokenPoolRemoteE2E is E2E { +contract GHOTokenPoolRemoteE2E is E2E, GHOBaseTest { using Internal for Internal.EVM2EVMMessage; - address internal USER = makeAddr("user"); - address internal AAVE_DAO = makeAddr("AAVE_DAO"); - address internal PROXY_ADMIN = makeAddr("PROXY_ADMIN"); - - uint256 internal INITIAL_BRIDGE_LIMIT = 100e6 * 1e18; - IBurnMintERC20 internal srcGhoToken; IBurnMintERC20 internal dstGhoToken; UpgradeableBurnMintTokenPool internal srcGhoTokenPool; UpgradeableLockReleaseTokenPool internal dstGhoTokenPool; - function setUp() public virtual override { + function setUp() public virtual override(E2E, BaseTest) { E2E.setUp(); // Deploy GHO Token on source chain diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolRemoteSetup.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolRemoteSetup.t.sol index 402ca41b17..6492e35d94 100644 --- a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolRemoteSetup.t.sol +++ b/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolRemoteSetup.t.sol @@ -10,8 +10,10 @@ import {Router} from "../../../Router.sol"; import {BurnMintERC677} from "../../../../shared/token/ERC677/BurnMintERC677.sol"; import {UpgradeableBurnMintTokenPool} from "../../../pools/GHO/UpgradeableBurnMintTokenPool.sol"; import {RouterSetup} from "../../router/RouterSetup.t.sol"; +import {BaseTest} from "../../BaseTest.t.sol"; +import {GHOBaseTest} from "./GHOBaseTest.t.sol"; -contract GHOTokenPoolRemoteSetup is RouterSetup { +contract GHOTokenPoolRemoteSetup is RouterSetup, GHOBaseTest { event Transfer(address indexed from, address indexed to, uint256 value); event TokensConsumed(uint256 tokens); event Burned(address indexed sender, uint256 amount); @@ -22,10 +24,7 @@ contract GHOTokenPoolRemoteSetup is RouterSetup { UpgradeableBurnMintTokenPool internal s_pool; - address internal AAVE_DAO = makeAddr("AAVE_DAO"); - address internal PROXY_ADMIN = makeAddr("PROXY_ADMIN"); - - function setUp() public virtual override { + function setUp() public virtual override(RouterSetup, BaseTest) { RouterSetup.setUp(); // GHO deployment diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/invariant/GHOTokenPoolHandler.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/invariant/GHOTokenPoolHandler.t.sol index de07af3ca1..03725edab0 100644 --- a/contracts/src/v0.8/ccip/test/pools/GHO/invariant/GHOTokenPoolHandler.t.sol +++ b/contracts/src/v0.8/ccip/test/pools/GHO/invariant/GHOTokenPoolHandler.t.sol @@ -8,33 +8,17 @@ import {UpgradeableLockReleaseTokenPool} from "../../../../pools/GHO/Upgradeable import {UpgradeableTokenPool} from "../../../../pools/GHO/UpgradeableTokenPool.sol"; import {RateLimiter} from "../../../../libraries/RateLimiter.sol"; import {BaseTest} from "../../../BaseTest.t.sol"; +import {GHOBaseTest} from "../GHOBaseTest.t.sol"; -contract GHOTokenPoolHandler is BaseTest { - address internal ARM_PROXY = makeAddr("ARM_PROXY"); - address internal ROUTER = makeAddr("ROUTER"); - address internal RAMP = makeAddr("RAMP"); - address internal AAVE_DAO = makeAddr("AAVE_DAO"); - address internal PROXY_ADMIN = makeAddr("PROXY_ADMIN"); - address internal USER = makeAddr("USER"); - - uint256 public immutable INITIAL_BRIDGE_LIMIT = 100e6 * 1e18; - - uint256[] public chainsList; - mapping(uint256 => address) public pools; // chainId => bridgeTokenPool - mapping(uint256 => address) public tokens; // chainId => ghoToken - mapping(uint256 => uint256) public bucketCapacities; // chainId => bucketCapacities - mapping(uint256 => uint256) public bucketLevels; // chainId => bucketLevels - mapping(uint256 => uint256) public liquidity; // chainId => liquidity - uint256 public remoteLiquidity; - uint256 public bridged; - bool public capacityBelowLevelUpdate; +contract GHOTokenPoolHandler is GHOBaseTest { + UtilsStorage public s; constructor() { // Ethereum with id 0 - chainsList.push(0); - tokens[0] = address(new GhoToken(AAVE_DAO)); - pools[0] = _deployUpgradeableLockReleaseTokenPool( - tokens[0], + s.chainsList.push(0); + s.tokens[0] = address(new GhoToken(AAVE_DAO)); + s.pools[0] = _deployUpgradeableLockReleaseTokenPool( + s.tokens[0], ARM_PROXY, ROUTER, OWNER, @@ -48,13 +32,13 @@ contract GHOTokenPoolHandler is BaseTest { vm.mockCall(ARM_PROXY, abi.encodeWithSelector(bytes4(keccak256("isCursed()"))), abi.encode(false)); // Arbitrum - _addBridge(1, INITIAL_BRIDGE_LIMIT); - _enableLane(0, 1); + _addBridge(s, 1, INITIAL_BRIDGE_LIMIT); + _enableLane(s, 0, 1); // Avalanche - _addBridge(2, INITIAL_BRIDGE_LIMIT); - _enableLane(0, 2); - _enableLane(1, 2); + _addBridge(s, 2, INITIAL_BRIDGE_LIMIT); + _enableLane(s, 0, 2); + _enableLane(s, 1, 2); } /// forge-config: ccip.fuzz.runs = 500 @@ -62,13 +46,13 @@ contract GHOTokenPoolHandler is BaseTest { fromChain = bound(fromChain, 0, 2); toChain = bound(toChain, 0, 2); vm.assume(fromChain != toChain); - uint256 maxBalance = GhoToken(tokens[fromChain]).balanceOf(address(this)); - uint256 maxToBridge = _getMaxToBridgeOut(fromChain); + uint256 maxBalance = GhoToken(s.tokens[fromChain]).balanceOf(address(this)); + uint256 maxToBridge = _getMaxToBridgeOut(s, fromChain); uint256 maxAmount = maxBalance > maxToBridge ? maxToBridge : maxBalance; amount = bound(amount, 0, maxAmount); if (amount > 0) { - _bridgeGho(fromChain, toChain, address(this), amount); + _bridgeGho(s, fromChain, toChain, address(this), amount); } } @@ -76,165 +60,66 @@ contract GHOTokenPoolHandler is BaseTest { function updateBucketCapacity(uint256 chain, uint128 newCapacity) public { chain = bound(chain, 1, 2); uint256 otherChain = (chain % 2) + 1; - vm.assume(newCapacity >= bridged); + vm.assume(newCapacity >= s.bridged); - uint256 oldCapacity = bucketCapacities[chain]; + uint256 oldCapacity = s.bucketCapacities[chain]; - if (newCapacity < bucketLevels[chain]) { - capacityBelowLevelUpdate = true; + if (newCapacity < s.bucketLevels[chain]) { + s.capacityBelowLevelUpdate = true; } else { - capacityBelowLevelUpdate = false; + s.capacityBelowLevelUpdate = false; } if (newCapacity > oldCapacity) { // Increase - _updateBucketCapacity(chain, newCapacity); + _updateBucketCapacity(s, chain, newCapacity); // keep bridge limit as the minimum bucket capacity - if (newCapacity < bucketCapacities[otherChain]) { - _updateBridgeLimit(newCapacity); + if (newCapacity < s.bucketCapacities[otherChain]) { + _updateBridgeLimit(s, newCapacity); } } else { // Reduction // keep bridge limit as the minimum bucket capacity - if (newCapacity < bucketCapacities[otherChain]) { - _updateBridgeLimit(newCapacity); + if (newCapacity < s.bucketCapacities[otherChain]) { + _updateBridgeLimit(s, newCapacity); } - _updateBucketCapacity(chain, newCapacity); + _updateBucketCapacity(s, chain, newCapacity); } } - function _enableLane(uint256 fromId, uint256 toId) internal { - // from - UpgradeableTokenPool.ChainUpdate[] memory chainUpdate = new UpgradeableTokenPool.ChainUpdate[](1); - RateLimiter.Config memory emptyRateConfig = RateLimiter.Config(false, 0, 0); - chainUpdate[0] = UpgradeableTokenPool.ChainUpdate({ - remoteChainSelector: uint64(toId), - allowed: true, - outboundRateLimiterConfig: emptyRateConfig, - inboundRateLimiterConfig: emptyRateConfig - }); - - vm.startPrank(OWNER); - UpgradeableTokenPool(pools[fromId]).applyChainUpdates(chainUpdate); - - // to - chainUpdate[0].remoteChainSelector = uint64(fromId); - UpgradeableTokenPool(pools[toId]).applyChainUpdates(chainUpdate); - vm.stopPrank(); - } - - function _addBridge(uint256 chainId, uint256 bucketCapacity) internal { - require(tokens[chainId] == address(0), "BRIDGE_ALREADY_EXISTS"); - - chainsList.push(chainId); - - // GHO Token - GhoToken ghoToken = new GhoToken(AAVE_DAO); - tokens[chainId] = address(ghoToken); - - // UpgradeableTokenPool - address bridgeTokenPool = _deployUpgradeableBurnMintTokenPool( - address(ghoToken), - ARM_PROXY, - ROUTER, - OWNER, - PROXY_ADMIN - ); - pools[chainId] = bridgeTokenPool; - - // Facilitator - bucketCapacities[chainId] = bucketCapacity; - vm.stopPrank(); - vm.startPrank(AAVE_DAO); - ghoToken.grantRole(ghoToken.FACILITATOR_MANAGER_ROLE(), AAVE_DAO); - ghoToken.addFacilitator(bridgeTokenPool, "UpgradeableTokenPool", uint128(bucketCapacity)); - vm.stopPrank(); - } - - function _updateBridgeLimit(uint256 newBridgeLimit) internal { - vm.stopPrank(); - vm.startPrank(OWNER); - UpgradeableLockReleaseTokenPool(pools[0]).setBridgeLimit(newBridgeLimit); - vm.stopPrank(); - } - - function _updateBucketCapacity(uint256 chainId, uint256 newBucketCapacity) internal { - bucketCapacities[chainId] = newBucketCapacity; - vm.stopPrank(); - vm.startPrank(AAVE_DAO); - GhoToken(tokens[chainId]).grantRole(GhoToken(tokens[chainId]).BUCKET_MANAGER_ROLE(), AAVE_DAO); - GhoToken(tokens[chainId]).setFacilitatorBucketCapacity(pools[chainId], uint128(newBucketCapacity)); - vm.stopPrank(); + function getChainsList() public view returns (uint256[] memory) { + return s.chainsList; } - function _getCapacity(uint256 chain) internal view returns (uint256) { - require(!_isEthereumChain(chain), "No bucket on Ethereum"); - (uint256 capacity, ) = GhoToken(tokens[chain]).getFacilitatorBucket(pools[chain]); - return capacity; + function pools(uint256 i) public view returns (address) { + return s.pools[i]; } - function _getLevel(uint256 chain) internal view returns (uint256) { - require(!_isEthereumChain(chain), "No bucket on Ethereum"); - (, uint256 level) = GhoToken(tokens[chain]).getFacilitatorBucket(pools[chain]); - return level; + function tokens(uint256 i) public view returns (address) { + return s.tokens[i]; } - function _getMaxToBridgeOut(uint256 fromChain) internal view returns (uint256) { - if (_isEthereumChain(fromChain)) { - UpgradeableLockReleaseTokenPool ethTokenPool = UpgradeableLockReleaseTokenPool(pools[0]); - uint256 bridgeLimit = ethTokenPool.getBridgeLimit(); - uint256 currentBridged = ethTokenPool.getCurrentBridgedAmount(); - return currentBridged > bridgeLimit ? 0 : bridgeLimit - currentBridged; - } else { - (, uint256 level) = GhoToken(tokens[fromChain]).getFacilitatorBucket(pools[fromChain]); - return level; - } + function bucketCapacities(uint256 i) public view returns (uint256) { + return s.bucketCapacities[i]; } - function _bridgeGho(uint256 fromChain, uint256 toChain, address user, uint256 amount) internal { - _moveGhoOrigin(fromChain, toChain, user, amount); - _moveGhoDestination(fromChain, toChain, user, amount); + function bucketLevels(uint256 i) public view returns (uint256) { + return s.bucketLevels[i]; } - function _moveGhoOrigin(uint256 fromChain, uint256 toChain, address user, uint256 amount) internal { - // Simulate CCIP pull of funds - vm.startPrank(user); - GhoToken(tokens[fromChain]).transfer(pools[fromChain], amount); - - vm.startPrank(RAMP); - IPool(pools[fromChain]).lockOrBurn(user, bytes(""), amount, uint64(toChain), bytes("")); - - if (_isEthereumChain(fromChain)) { - // Lock - bridged += amount; - } else { - // Burn - bucketLevels[fromChain] -= amount; - liquidity[fromChain] -= amount; - remoteLiquidity -= amount; - } + function liquidity(uint256 i) public view returns (uint256) { + return s.liquidity[i]; } - function _moveGhoDestination(uint256 fromChain, uint256 toChain, address user, uint256 amount) internal { - vm.startPrank(RAMP); - IPool(pools[toChain]).releaseOrMint(bytes(""), user, amount, uint64(fromChain), bytes("")); - - if (_isEthereumChain(toChain)) { - // Release - bridged -= amount; - } else { - // Mint - bucketLevels[toChain] += amount; - liquidity[toChain] += amount; - remoteLiquidity += amount; - } + function remoteLiquidity() public view returns (uint256) { + return s.remoteLiquidity; } - function _isEthereumChain(uint256 chainId) internal pure returns (bool) { - return chainId == 0; + function bridged() public view returns (uint256) { + return s.bridged; } - function getChainsList() public view returns (uint256[] memory) { - return chainsList; + function capacityBelowLevelUpdate() public view returns (bool) { + return s.capacityBelowLevelUpdate; } } From f0f91047e50ad4e9310bc2ab31ab8c9a8f4d05e7 Mon Sep 17 00:00:00 2001 From: miguelmtzinf Date: Mon, 27 May 2024 18:36:16 +0200 Subject: [PATCH 14/18] fix: Rename test files --- .../{GHOBaseTest.t.sol => GhoBaseTest.t.sol} | 2 +- ...ereum.t.sol => GhoTokenPoolEthereum.t.sol} | 30 +++++++++---------- ... => GhoTokenPoolEthereumBridgeLimit.t.sol} | 8 ++--- ...2E.t.sol => GhoTokenPoolEthereumE2E.t.sol} | 4 +-- ....t.sol => GhoTokenPoolEthereumSetup.t.sol} | 4 +-- ...lRemote.t.sol => GhoTokenPoolRemote.t.sol} | 8 ++--- ...eE2E.t.sol => GhoTokenPoolRemoteE2E.t.sol} | 4 +-- ...up.t.sol => GhoTokenPoolRemoteSetup.t.sol} | 4 +-- ...kenPoolEthereumBridgeLimitInvariant.t.sol} | 8 ++--- ...andler.t.sol => GhoTokenPoolHandler.t.sol} | 4 +-- 10 files changed, 38 insertions(+), 38 deletions(-) rename contracts/src/v0.8/ccip/test/pools/GHO/{GHOBaseTest.t.sol => GhoBaseTest.t.sol} (99%) rename contracts/src/v0.8/ccip/test/pools/GHO/{GHOTokenPoolEthereum.t.sol => GhoTokenPoolEthereum.t.sol} (96%) rename contracts/src/v0.8/ccip/test/pools/GHO/{GHOTokenPoolEthereumBridgeLimit.t.sol => GhoTokenPoolEthereumBridgeLimit.t.sol} (99%) rename contracts/src/v0.8/ccip/test/pools/GHO/{GHOTokenPoolEthereumE2E.t.sol => GhoTokenPoolEthereumE2E.t.sol} (99%) rename contracts/src/v0.8/ccip/test/pools/GHO/{GHOTokenPoolEthereumSetup.t.sol => GhoTokenPoolEthereumSetup.t.sol} (96%) rename contracts/src/v0.8/ccip/test/pools/GHO/{GHOTokenPoolRemote.t.sol => GhoTokenPoolRemote.t.sol} (97%) rename contracts/src/v0.8/ccip/test/pools/GHO/{GHOTokenPoolRemoteE2E.t.sol => GhoTokenPoolRemoteE2E.t.sol} (99%) rename contracts/src/v0.8/ccip/test/pools/GHO/{GHOTokenPoolRemoteSetup.t.sol => GhoTokenPoolRemoteSetup.t.sol} (96%) rename contracts/src/v0.8/ccip/test/pools/GHO/invariant/{GHOTokenPoolEthereumBridgeLimitInvariant.t.sol => GhoTokenPoolEthereumBridgeLimitInvariant.t.sol} (91%) rename contracts/src/v0.8/ccip/test/pools/GHO/invariant/{GHOTokenPoolHandler.t.sol => GhoTokenPoolHandler.t.sol} (97%) diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/GHOBaseTest.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/GhoBaseTest.t.sol similarity index 99% rename from contracts/src/v0.8/ccip/test/pools/GHO/GHOBaseTest.t.sol rename to contracts/src/v0.8/ccip/test/pools/GHO/GhoBaseTest.t.sol index 8893095015..f14e67fff9 100644 --- a/contracts/src/v0.8/ccip/test/pools/GHO/GHOBaseTest.t.sol +++ b/contracts/src/v0.8/ccip/test/pools/GHO/GhoBaseTest.t.sol @@ -12,7 +12,7 @@ import {UpgradeableTokenPool} from "../../../pools/GHO/UpgradeableTokenPool.sol" import {RateLimiter} from "../../../libraries/RateLimiter.sol"; import {BaseTest} from "../../BaseTest.t.sol"; -abstract contract GHOBaseTest is BaseTest { +abstract contract GhoBaseTest is BaseTest { address internal ARM_PROXY = makeAddr("ARM_PROXY"); address internal ROUTER = makeAddr("ROUTER"); address internal RAMP = makeAddr("RAMP"); diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereum.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/GhoTokenPoolEthereum.t.sol similarity index 96% rename from contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereum.t.sol rename to contracts/src/v0.8/ccip/test/pools/GHO/GhoTokenPoolEthereum.t.sol index 8eacd4232e..46da97244c 100644 --- a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereum.t.sol +++ b/contracts/src/v0.8/ccip/test/pools/GHO/GhoTokenPoolEthereum.t.sol @@ -13,9 +13,9 @@ import {UpgradeableTokenPool} from "../../../pools/GHO/UpgradeableTokenPool.sol" import {EVM2EVMOffRamp} from "../../../offRamp/EVM2EVMOffRamp.sol"; import {RateLimiter} from "../../../libraries/RateLimiter.sol"; import {IERC165} from "../../../../vendor/openzeppelin-solidity/v4.8.3/contracts/utils/introspection/IERC165.sol"; -import {GHOTokenPoolEthereumSetup} from "./GHOTokenPoolEthereumSetup.t.sol"; +import {GhoTokenPoolEthereumSetup} from "./GhoTokenPoolEthereumSetup.t.sol"; -contract GHOTokenPoolEthereum_setRebalancer is GHOTokenPoolEthereumSetup { +contract GhoTokenPoolEthereum_setRebalancer is GhoTokenPoolEthereumSetup { function testSetRebalancerSuccess() public { assertEq(address(s_ghoTokenPool.getRebalancer()), OWNER); changePrank(AAVE_DAO); @@ -31,7 +31,7 @@ contract GHOTokenPoolEthereum_setRebalancer is GHOTokenPoolEthereumSetup { } } -contract GHOTokenPoolEthereum_lockOrBurn is GHOTokenPoolEthereumSetup { +contract GhoTokenPoolEthereum_lockOrBurn is GhoTokenPoolEthereumSetup { error SenderNotAllowed(address sender); event Locked(address indexed sender, uint256 amount); @@ -86,12 +86,12 @@ contract GHOTokenPoolEthereum_lockOrBurn is GHOTokenPoolEthereumSetup { } } -contract GHOTokenPoolEthereum_releaseOrMint is GHOTokenPoolEthereumSetup { +contract GhoTokenPoolEthereum_releaseOrMint is GhoTokenPoolEthereumSetup { event TokensConsumed(uint256 tokens); event Released(address indexed sender, address indexed recipient, uint256 amount); function setUp() public virtual override { - GHOTokenPoolEthereumSetup.setUp(); + GhoTokenPoolEthereumSetup.setUp(); UpgradeableTokenPool.ChainUpdate[] memory chainUpdate = new UpgradeableTokenPool.ChainUpdate[](1); chainUpdate[0] = UpgradeableTokenPool.ChainUpdate({ @@ -247,7 +247,7 @@ contract GHOTokenPoolEthereum_releaseOrMint is GHOTokenPoolEthereumSetup { } } -contract GHOTokenPoolEthereum_canAcceptLiquidity is GHOTokenPoolEthereumSetup { +contract GhoTokenPoolEthereum_canAcceptLiquidity is GhoTokenPoolEthereumSetup { function test_CanAcceptLiquiditySuccess() public { assertEq(true, s_ghoTokenPool.canAcceptLiquidity()); @@ -257,7 +257,7 @@ contract GHOTokenPoolEthereum_canAcceptLiquidity is GHOTokenPoolEthereumSetup { } } -contract GHOTokenPoolEthereum_provideLiquidity is GHOTokenPoolEthereumSetup { +contract GhoTokenPoolEthereum_provideLiquidity is GhoTokenPoolEthereumSetup { function testFuzz_ProvideLiquiditySuccess(uint256 amount) public { vm.assume(amount < type(uint128).max); @@ -293,7 +293,7 @@ contract GHOTokenPoolEthereum_provideLiquidity is GHOTokenPoolEthereumSetup { } } -contract GHOTokenPoolEthereum_withdrawalLiquidity is GHOTokenPoolEthereumSetup { +contract GhoTokenPoolEthereum_withdrawalLiquidity is GhoTokenPoolEthereumSetup { function testFuzz_WithdrawalLiquiditySuccess(uint256 amount) public { vm.assume(amount < type(uint128).max); @@ -329,7 +329,7 @@ contract GHOTokenPoolEthereum_withdrawalLiquidity is GHOTokenPoolEthereumSetup { } } -contract GHOTokenPoolEthereum_supportsInterface is GHOTokenPoolEthereumSetup { +contract GhoTokenPoolEthereum_supportsInterface is GhoTokenPoolEthereumSetup { function testSupportsInterfaceSuccess() public { assertTrue(s_ghoTokenPool.supportsInterface(s_ghoTokenPool.getLockReleaseInterfaceId())); assertTrue(s_ghoTokenPool.supportsInterface(type(IPool).interfaceId)); @@ -337,7 +337,7 @@ contract GHOTokenPoolEthereum_supportsInterface is GHOTokenPoolEthereumSetup { } } -contract GHOTokenPoolEthereum_setChainRateLimiterConfig is GHOTokenPoolEthereumSetup { +contract GhoTokenPoolEthereum_setChainRateLimiterConfig is GhoTokenPoolEthereumSetup { event ConfigChanged(RateLimiter.Config); event ChainConfigured( uint64 chainSelector, @@ -348,7 +348,7 @@ contract GHOTokenPoolEthereum_setChainRateLimiterConfig is GHOTokenPoolEthereumS uint64 internal s_remoteChainSelector; function setUp() public virtual override { - GHOTokenPoolEthereumSetup.setUp(); + GhoTokenPoolEthereumSetup.setUp(); UpgradeableTokenPool.ChainUpdate[] memory chainUpdates = new UpgradeableTokenPool.ChainUpdate[](1); s_remoteChainSelector = 123124; chainUpdates[0] = UpgradeableTokenPool.ChainUpdate({ @@ -457,7 +457,7 @@ contract GHOTokenPoolEthereum_setChainRateLimiterConfig is GHOTokenPoolEthereumS } } -contract GHOTokenPoolEthereum_setRateLimitAdmin is GHOTokenPoolEthereumSetup { +contract GhoTokenPoolEthereum_setRateLimitAdmin is GhoTokenPoolEthereumSetup { function testSetRateLimitAdminSuccess() public { assertEq(address(0), s_ghoTokenPool.getRateLimitAdmin()); changePrank(AAVE_DAO); @@ -475,7 +475,7 @@ contract GHOTokenPoolEthereum_setRateLimitAdmin is GHOTokenPoolEthereumSetup { } } -contract GHOTokenPoolEthereum_setBridgeLimit is GHOTokenPoolEthereumSetup { +contract GhoTokenPoolEthereum_setBridgeLimit is GhoTokenPoolEthereumSetup { event BridgeLimitUpdated(uint256 oldBridgeLimit, uint256 newBridgeLimit); function testSetBridgeLimitAdminSuccess() public { @@ -592,7 +592,7 @@ contract GHOTokenPoolEthereum_setBridgeLimit is GHOTokenPoolEthereumSetup { } } -contract GHOTokenPoolEthereum_setBridgeLimitAdmin is GHOTokenPoolEthereumSetup { +contract GhoTokenPoolEthereum_setBridgeLimitAdmin is GhoTokenPoolEthereumSetup { function testSetBridgeLimitAdminSuccess() public { assertEq(address(0), s_ghoTokenPool.getBridgeLimitAdmin()); @@ -613,7 +613,7 @@ contract GHOTokenPoolEthereum_setBridgeLimitAdmin is GHOTokenPoolEthereumSetup { } } -contract GHOTokenPoolEthereum_upgradeability is GHOTokenPoolEthereumSetup { +contract GhoTokenPoolEthereum_upgradeability is GhoTokenPoolEthereumSetup { function testInitialization() public { // Upgradeability assertEq(s_ghoTokenPool.REVISION(), 1); diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumBridgeLimit.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/GhoTokenPoolEthereumBridgeLimit.t.sol similarity index 99% rename from contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumBridgeLimit.t.sol rename to contracts/src/v0.8/ccip/test/pools/GHO/GhoTokenPoolEthereumBridgeLimit.t.sol index 7ed07ad79c..fe65e7fae0 100644 --- a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumBridgeLimit.t.sol +++ b/contracts/src/v0.8/ccip/test/pools/GHO/GhoTokenPoolEthereumBridgeLimit.t.sol @@ -9,9 +9,9 @@ import {UpgradeableLockReleaseTokenPool} from "../../../pools/GHO/UpgradeableLoc import {UpgradeableBurnMintTokenPool} from "../../../pools/GHO/UpgradeableBurnMintTokenPool.sol"; import {UpgradeableTokenPool} from "../../../pools/GHO/UpgradeableTokenPool.sol"; import {RateLimiter} from "../../../libraries/RateLimiter.sol"; -import {GHOBaseTest} from "./GHOBaseTest.t.sol"; +import {GhoBaseTest} from "./GhoBaseTest.t.sol"; -contract GHOTokenPoolEthereumBridgeLimitSetup is GHOBaseTest { +contract GhoTokenPoolEthereumBridgeLimitSetup is GhoBaseTest { UtilsStorage public s; function setUp() public virtual override { @@ -70,7 +70,7 @@ contract GHOTokenPoolEthereumBridgeLimitSetup is GHOBaseTest { } } -contract GHOTokenPoolEthereumBridgeLimitSimpleScenario is GHOTokenPoolEthereumBridgeLimitSetup { +contract GhoTokenPoolEthereumBridgeLimitSimpleScenario is GhoTokenPoolEthereumBridgeLimitSetup { function setUp() public virtual override { super.setUp(); @@ -702,7 +702,7 @@ contract GHOTokenPoolEthereumBridgeLimitSimpleScenario is GHOTokenPoolEthereumBr } } -contract GHOTokenPoolEthereumBridgeLimitTripleScenario is GHOTokenPoolEthereumBridgeLimitSetup { +contract GhoTokenPoolEthereumBridgeLimitTripleScenario is GhoTokenPoolEthereumBridgeLimitSetup { function setUp() public virtual override { super.setUp(); diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumE2E.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/GhoTokenPoolEthereumE2E.t.sol similarity index 99% rename from contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumE2E.t.sol rename to contracts/src/v0.8/ccip/test/pools/GHO/GhoTokenPoolEthereumE2E.t.sol index a3432a53ed..7cc15b8498 100644 --- a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumE2E.t.sol +++ b/contracts/src/v0.8/ccip/test/pools/GHO/GhoTokenPoolEthereumE2E.t.sol @@ -16,9 +16,9 @@ import {IPool} from "../../../interfaces/pools/IPool.sol"; import {RateLimiter} from "../../../libraries/RateLimiter.sol"; import {BaseTest} from "../../BaseTest.t.sol"; import {E2E} from "../End2End.t.sol"; -import {GHOBaseTest} from "./GHOBaseTest.t.sol"; +import {GhoBaseTest} from "./GhoBaseTest.t.sol"; -contract GHOTokenPoolEthereumE2E is E2E, GHOBaseTest { +contract GhoTokenPoolEthereumE2E is E2E, GhoBaseTest { using Internal for Internal.EVM2EVMMessage; IBurnMintERC20 internal srcGhoToken; diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumSetup.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/GhoTokenPoolEthereumSetup.t.sol similarity index 96% rename from contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumSetup.t.sol rename to contracts/src/v0.8/ccip/test/pools/GHO/GhoTokenPoolEthereumSetup.t.sol index 3ce21cef05..b0ffeb3259 100644 --- a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolEthereumSetup.t.sol +++ b/contracts/src/v0.8/ccip/test/pools/GHO/GhoTokenPoolEthereumSetup.t.sol @@ -18,9 +18,9 @@ import {IERC165} from "../../../../vendor/openzeppelin-solidity/v4.8.3/contracts import {IERC20} from "../../../../vendor/openzeppelin-solidity/v4.8.3/contracts/token/ERC20/IERC20.sol"; import {RouterSetup} from "../../router/RouterSetup.t.sol"; import {BaseTest} from "../../BaseTest.t.sol"; -import {GHOBaseTest} from "./GHOBaseTest.t.sol"; +import {GhoBaseTest} from "./GhoBaseTest.t.sol"; -contract GHOTokenPoolEthereumSetup is RouterSetup, GHOBaseTest { +contract GhoTokenPoolEthereumSetup is RouterSetup, GhoBaseTest { IERC20 internal s_token; UpgradeableLockReleaseTokenPool internal s_ghoTokenPool; diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolRemote.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/GhoTokenPoolRemote.t.sol similarity index 97% rename from contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolRemote.t.sol rename to contracts/src/v0.8/ccip/test/pools/GHO/GhoTokenPoolRemote.t.sol index b1027365c2..bba33d8066 100644 --- a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolRemote.t.sol +++ b/contracts/src/v0.8/ccip/test/pools/GHO/GhoTokenPoolRemote.t.sol @@ -12,9 +12,9 @@ import {EVM2EVMOffRamp} from "../../../offRamp/EVM2EVMOffRamp.sol"; import {BurnMintTokenPool} from "../../../pools/BurnMintTokenPool.sol"; import {UpgradeableBurnMintTokenPool} from "../../../pools/GHO/UpgradeableBurnMintTokenPool.sol"; import {RateLimiter} from "../../../libraries/RateLimiter.sol"; -import {GHOTokenPoolRemoteSetup} from "./GHOTokenPoolRemoteSetup.t.sol"; +import {GhoTokenPoolRemoteSetup} from "./GhoTokenPoolRemoteSetup.t.sol"; -contract GHOTokenPoolRemote_lockOrBurn is GHOTokenPoolRemoteSetup { +contract GhoTokenPoolRemote_lockOrBurn is GhoTokenPoolRemoteSetup { function testSetupSuccess() public { assertEq(address(s_burnMintERC677), address(s_pool.getToken())); assertEq(address(s_mockARM), s_pool.getArmProxy()); @@ -112,7 +112,7 @@ contract GHOTokenPoolRemote_lockOrBurn is GHOTokenPoolRemoteSetup { } } -contract GHOTokenPoolRemote_releaseOrMint is GHOTokenPoolRemoteSetup { +contract GhoTokenPoolRemote_releaseOrMint is GhoTokenPoolRemoteSetup { function testPoolMintSuccess() public { uint256 amount = 1e19; vm.startPrank(s_burnMintOffRamp); @@ -179,7 +179,7 @@ contract GHOTokenPoolRemote_releaseOrMint is GHOTokenPoolRemoteSetup { } } -contract GHOTokenPoolEthereum_upgradeability is GHOTokenPoolRemoteSetup { +contract GhoTokenPoolEthereum_upgradeability is GhoTokenPoolRemoteSetup { function testInitialization() public { // Upgradeability assertEq(s_pool.REVISION(), 1); diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolRemoteE2E.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/GhoTokenPoolRemoteE2E.t.sol similarity index 99% rename from contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolRemoteE2E.t.sol rename to contracts/src/v0.8/ccip/test/pools/GHO/GhoTokenPoolRemoteE2E.t.sol index 017b19427d..c77457506b 100644 --- a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolRemoteE2E.t.sol +++ b/contracts/src/v0.8/ccip/test/pools/GHO/GhoTokenPoolRemoteE2E.t.sol @@ -16,9 +16,9 @@ import {IPool} from "../../../interfaces/pools/IPool.sol"; import {RateLimiter} from "../../../libraries/RateLimiter.sol"; import {BaseTest} from "../../BaseTest.t.sol"; import {E2E} from "../End2End.t.sol"; -import {GHOBaseTest} from "./GHOBaseTest.t.sol"; +import {GhoBaseTest} from "./GhoBaseTest.t.sol"; -contract GHOTokenPoolRemoteE2E is E2E, GHOBaseTest { +contract GhoTokenPoolRemoteE2E is E2E, GhoBaseTest { using Internal for Internal.EVM2EVMMessage; IBurnMintERC20 internal srcGhoToken; diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolRemoteSetup.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/GhoTokenPoolRemoteSetup.t.sol similarity index 96% rename from contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolRemoteSetup.t.sol rename to contracts/src/v0.8/ccip/test/pools/GHO/GhoTokenPoolRemoteSetup.t.sol index 6492e35d94..8e7ab65a49 100644 --- a/contracts/src/v0.8/ccip/test/pools/GHO/GHOTokenPoolRemoteSetup.t.sol +++ b/contracts/src/v0.8/ccip/test/pools/GHO/GhoTokenPoolRemoteSetup.t.sol @@ -11,9 +11,9 @@ import {BurnMintERC677} from "../../../../shared/token/ERC677/BurnMintERC677.sol import {UpgradeableBurnMintTokenPool} from "../../../pools/GHO/UpgradeableBurnMintTokenPool.sol"; import {RouterSetup} from "../../router/RouterSetup.t.sol"; import {BaseTest} from "../../BaseTest.t.sol"; -import {GHOBaseTest} from "./GHOBaseTest.t.sol"; +import {GhoBaseTest} from "./GhoBaseTest.t.sol"; -contract GHOTokenPoolRemoteSetup is RouterSetup, GHOBaseTest { +contract GhoTokenPoolRemoteSetup is RouterSetup, GhoBaseTest { event Transfer(address indexed from, address indexed to, uint256 value); event TokensConsumed(uint256 tokens); event Burned(address indexed sender, uint256 amount); diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/invariant/GHOTokenPoolEthereumBridgeLimitInvariant.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/invariant/GhoTokenPoolEthereumBridgeLimitInvariant.t.sol similarity index 91% rename from contracts/src/v0.8/ccip/test/pools/GHO/invariant/GHOTokenPoolEthereumBridgeLimitInvariant.t.sol rename to contracts/src/v0.8/ccip/test/pools/GHO/invariant/GhoTokenPoolEthereumBridgeLimitInvariant.t.sol index 3e3a847bc2..4d928e5fba 100644 --- a/contracts/src/v0.8/ccip/test/pools/GHO/invariant/GHOTokenPoolEthereumBridgeLimitInvariant.t.sol +++ b/contracts/src/v0.8/ccip/test/pools/GHO/invariant/GhoTokenPoolEthereumBridgeLimitInvariant.t.sol @@ -5,15 +5,15 @@ import {GhoToken} from "@aave/gho-core/gho/GhoToken.sol"; import {UpgradeableLockReleaseTokenPool} from "../../../../pools/GHO/UpgradeableLockReleaseTokenPool.sol"; import {BaseTest} from "../../../BaseTest.t.sol"; -import {GHOTokenPoolHandler} from "./GHOTokenPoolHandler.t.sol"; +import {GhoTokenPoolHandler} from "./GhoTokenPoolHandler.t.sol"; -contract GHOTokenPoolEthereumBridgeLimitInvariant is BaseTest { - GHOTokenPoolHandler handler; +contract GhoTokenPoolEthereumBridgeLimitInvariant is BaseTest { + GhoTokenPoolHandler handler; function setUp() public override { super.setUp(); - handler = new GHOTokenPoolHandler(); + handler = new GhoTokenPoolHandler(); deal(handler.tokens(0), address(handler), handler.INITIAL_BRIDGE_LIMIT()); targetContract(address(handler)); diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/invariant/GHOTokenPoolHandler.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/invariant/GhoTokenPoolHandler.t.sol similarity index 97% rename from contracts/src/v0.8/ccip/test/pools/GHO/invariant/GHOTokenPoolHandler.t.sol rename to contracts/src/v0.8/ccip/test/pools/GHO/invariant/GhoTokenPoolHandler.t.sol index 03725edab0..3c4e0e951f 100644 --- a/contracts/src/v0.8/ccip/test/pools/GHO/invariant/GHOTokenPoolHandler.t.sol +++ b/contracts/src/v0.8/ccip/test/pools/GHO/invariant/GhoTokenPoolHandler.t.sol @@ -8,9 +8,9 @@ import {UpgradeableLockReleaseTokenPool} from "../../../../pools/GHO/Upgradeable import {UpgradeableTokenPool} from "../../../../pools/GHO/UpgradeableTokenPool.sol"; import {RateLimiter} from "../../../../libraries/RateLimiter.sol"; import {BaseTest} from "../../../BaseTest.t.sol"; -import {GHOBaseTest} from "../GHOBaseTest.t.sol"; +import {GhoBaseTest} from "../GhoBaseTest.t.sol"; -contract GHOTokenPoolHandler is GHOBaseTest { +contract GhoTokenPoolHandler is GhoBaseTest { UtilsStorage public s; constructor() { From 9041c4a8a479a6c582054c17dc362a170cab0c3a Mon Sep 17 00:00:00 2001 From: Nissan Levi <124057587+nisnislevi@users.noreply.github.com> Date: Tue, 28 May 2024 14:28:51 +0300 Subject: [PATCH 15/18] Certora review (#10) * remove space at the end of file name * initial version of certora directory * spec for CCIP * clean and add yml * fix yml * fix yml * clean and document spec file * for PR --- .github/workflows/certora.yml | 55 +++++++++++++++ certora/Makefile | 24 +++++++ certora/confs/ccip.conf | 18 +++++ certora/harness/SimpleERC20.sol | 58 ++++++++++++++++ certora/munged/.gitignore | 2 + certora/specs/ccip.spec | 119 ++++++++++++++++++++++++++++++++ 6 files changed, 276 insertions(+) create mode 100644 .github/workflows/certora.yml create mode 100644 certora/Makefile create mode 100644 certora/confs/ccip.conf create mode 100644 certora/harness/SimpleERC20.sol create mode 100644 certora/munged/.gitignore create mode 100644 certora/specs/ccip.spec diff --git a/.github/workflows/certora.yml b/.github/workflows/certora.yml new file mode 100644 index 0000000000..e3a89ebf27 --- /dev/null +++ b/.github/workflows/certora.yml @@ -0,0 +1,55 @@ +name: certora + +on: + push: + branches: + - main + - certora + - ccip-gho + pull_request: + branches: + - main + - certora + - ccip-gho + + workflow_dispatch: + +jobs: + verify: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + with: + submodules: recursive + + - name: Install python + uses: actions/setup-python@v2 + with: { python-version: 3.9 } + + - name: Install java + uses: actions/setup-java@v1 + with: { java-version: '11', java-package: jre } + + - name: Install certora cli + run: pip install certora-cli==7.6.3 + + - name: Install solc + run: | + wget https://github.com/ethereum/solidity/releases/download/v0.8.10/solc-static-linux + chmod +x solc-static-linux + sudo mv solc-static-linux /usr/local/bin/solc8.10 + + - name: Verify rule ${{ matrix.rule }} + run: | + echo "key length" ${#CERTORAKEY} + certoraRun certora/confs/${{ matrix.rule }} + env: + CERTORAKEY: ${{ secrets.CERTORAKEY }} + + strategy: + fail-fast: false + max-parallel: 16 + matrix: + rule: + - ccip.conf diff --git a/certora/Makefile b/certora/Makefile new file mode 100644 index 0000000000..0e33459cab --- /dev/null +++ b/certora/Makefile @@ -0,0 +1,24 @@ +default: help + +PATCH = applyHarness.patch +CONTRACTS_DIR = ../contracts +MUNGED_DIR = munged + +help: + @echo "usage:" + @echo " make clean: remove all generated files (those ignored by git)" + @echo " make $(MUNGED_DIR): create $(MUNGED_DIR) directory by applying the patch file to $(CONTRACTS_DIR)" + @echo " make record: record a new patch file capturing the differences between $(CONTRACTS_DIR) and $(MUNGED_DIR)" + +munged: $(wildcard $(CONTRACTS_DIR)/*.sol) $(PATCH) + rm -rf $@ + cp -r $(CONTRACTS_DIR) $@ + patch -p0 -d $@ < $(PATCH) + +record: + diff -ruN $(CONTRACTS_DIR) $(MUNGED_DIR) | sed 's+\.\./contracts/++g' | sed 's+munged/++g' > $(PATCH) + +clean: + git clean -fdX + touch $(PATCH) + diff --git a/certora/confs/ccip.conf b/certora/confs/ccip.conf new file mode 100644 index 0000000000..003f089502 --- /dev/null +++ b/certora/confs/ccip.conf @@ -0,0 +1,18 @@ +{ + "files": [ + "contracts/src/v0.8/ccip/pools/GHO/UpgradeableLockReleaseTokenPool.sol", + "certora/harness/SimpleERC20.sol" + ], + "link": [ + "UpgradeableLockReleaseTokenPool:i_token=SimpleERC20" + ], + "optimistic_loop": true, + "process": "emv", + "prover_args": ["-depth 10","-mediumTimeout 700"], + "smt_timeout": "600", + "solc": "solc8.10", + "verify": "UpgradeableLockReleaseTokenPool:certora/specs/ccip.spec", + "rule_sanity": "basic", + "msg": "CCIP" +} + diff --git a/certora/harness/SimpleERC20.sol b/certora/harness/SimpleERC20.sol new file mode 100644 index 0000000000..f9d14a7ff6 --- /dev/null +++ b/certora/harness/SimpleERC20.sol @@ -0,0 +1,58 @@ +// SPDX-License-Identifier: agpl-3.0 +pragma solidity ^0.8.0; + +import {IERC20} from "../../contracts/src/v0.8/vendor/openzeppelin-solidity/v4.8.3/contracts/token/ERC20/IERC20.sol"; + +/** +A simple ERC implementation used as the underlying_asset for the verification process. + */ +contract SimpleERC20 is IERC20 { + uint256 t; + mapping(address => uint256) b; + mapping(address => mapping(address => uint256)) a; + + function add(uint a, uint b) internal pure returns (uint256) { + uint c = a + b; + require(c >= a); + return c; + } + + function sub(uint a, uint b) internal pure returns (uint256) { + require(a >= b); + return a - b; + } + + function totalSupply() external view override returns (uint256) { + return t; + } + + function balanceOf(address account) external view override returns (uint256) { + return b[account]; + } + + function transfer(address recipient, uint256 amount) external override returns (bool) { + b[msg.sender] = sub(b[msg.sender], amount); + b[recipient] = add(b[recipient], amount); + return true; + } + + function allowance(address owner, address spender) external view override returns (uint256) { + return a[owner][spender]; + } + + function approve(address spender, uint256 amount) external override returns (bool) { + a[msg.sender][spender] = amount; + return true; + } + + function transferFrom( + address sender, + address recipient, + uint256 amount + ) external override returns (bool) { + b[sender] = sub(b[sender], amount); + b[recipient] = add(b[recipient], amount); + a[sender][msg.sender] = sub(a[sender][msg.sender], amount); + return true; + } +} diff --git a/certora/munged/.gitignore b/certora/munged/.gitignore new file mode 100644 index 0000000000..d6b7ef32c8 --- /dev/null +++ b/certora/munged/.gitignore @@ -0,0 +1,2 @@ +* +!.gitignore diff --git a/certora/specs/ccip.spec b/certora/specs/ccip.spec new file mode 100644 index 0000000000..e88cb8dab6 --- /dev/null +++ b/certora/specs/ccip.spec @@ -0,0 +1,119 @@ +/* + This is a Specification File for Smart Contract Verification with the Certora Prover. + Contract name: UpgradeableLockReleaseTokenPool +*/ + +using SimpleERC20 as erc20; + +methods { + function getCurrentBridgedAmount() external returns (uint256) envfree; + function getBridgeLimit() external returns (uint256) envfree; + function owner() external returns (address) envfree; +} + + +rule sanity { + env e; + calldataarg arg; + method f; + f(e, arg); + satisfy true; +} + + + +/* ============================================================================== + invariant: currentBridge_LEQ_bridgeLimit. + Description: The value of s_currentBridged is LEQ than the value of s_bridgeLimit. + Note: this may be violated if one calls to setBridgeLimit(newBridgeLimit) with + newBridgeLimit < s_currentBridged. + ============================================================================*/ +invariant currentBridge_LEQ_bridgeLimit() + getCurrentBridgedAmount() <= getBridgeLimit() + filtered { f -> + !f.isView && + f.selector != sig:setBridgeLimit(uint256).selector} + { + preserved initialize(address owner, address[] allowlist, address router, uint256 bridgeLimit) with (env e2) { + require getCurrentBridgedAmount()==0; + } + } + + +/* ============================================================================== + rule: withdrawLiquidity_correctness + description: The rule checks that the balance of the contract is as expected. + ============================================================================*/ +rule withdrawLiquidity_correctness(env e) { + uint256 amount; + + require e.msg.sender != currentContract; + uint256 bal_before = erc20.balanceOf(e, currentContract); + withdrawLiquidity(e, amount); + uint256 bal_after = erc20.balanceOf(e, currentContract); + + assert (to_mathint(bal_after) == bal_before - amount); +} + + +/* ============================================================================== + rule: provideLiquidity_correctness + description: The rule checks that the balance of the contract is as expected. + ============================================================================*/ +rule provideLiquidity_correctness(env e) { + uint256 amount; + + require e.msg.sender != currentContract; + uint256 bal_before = erc20.balanceOf(e, currentContract); + provideLiquidity(e, amount); + uint256 bal_after = erc20.balanceOf(e, currentContract); + + assert (to_mathint(bal_after) == bal_before + amount); +} + + +/* ============================================================================== + rule: only_lockOrBurn_can_increase_currentBridged + ============================================================================*/ +rule only_lockOrBurn_can_increase_currentBridged(env e) { + method f; + calldataarg args; + + uint256 curr_bridge_before = getCurrentBridgedAmount(); + f (e,args); + uint256 curr_bridge_after = getCurrentBridgedAmount(); + + assert + curr_bridge_after > curr_bridge_before => + f.selector==sig:lockOrBurn(address,bytes calldata,uint256,uint64,bytes calldata).selector; +} + + +/* ============================================================================== + rule: only_releaseOrMint_can_deccrease_currentBridged + ============================================================================*/ +rule only_releaseOrMint_can_decrease_currentBridged(env e) { + method f; + calldataarg args; + + uint256 curr_bridge_before = getCurrentBridgedAmount(); + f (e,args); + uint256 curr_bridge_after = getCurrentBridgedAmount(); + + assert + curr_bridge_after < curr_bridge_before => + f.selector==sig:releaseOrMint(bytes memory,address,uint256,uint64,bytes memory).selector; +} + + +/* ============================================================================== + rule: only_bridgeLimitAdmin_or_owner_can_call_setBridgeLimit + ============================================================================*/ +rule only_bridgeLimitAdmin_or_owner_can_call_setBridgeLimit(env e) { + uint256 newBridgeLimit; + + setBridgeLimit(e, newBridgeLimit); + + assert e.msg.sender==getBridgeLimitAdmin(e) || e.msg.sender==owner(); +} + From f72c0f788c6e7f2600039afae9f7344ad630425e Mon Sep 17 00:00:00 2001 From: miguelmtzinf Date: Tue, 4 Jun 2024 17:35:59 +0200 Subject: [PATCH 16/18] . From 7c777f926b9c63b75ad4728e4e5da7cb9441c3ca Mon Sep 17 00:00:00 2001 From: miguelmtz <36620902+miguelmtzinf@users.noreply.github.com> Date: Tue, 11 Jun 2024 10:28:29 +0200 Subject: [PATCH 17/18] fix: Apply fixes based on code review (#11) * fix: Use modern version of Initializable * fix: Update diffs * fix: Add event emission on bridge limit admin update * fix: Rebuild GHO diffs * fix: Fix certora and test config files * fix: Remove unneeded revision getter --- .gitmodules | 3 + certora/confs/ccip.conf | 3 + contracts/foundry-lib/gho-core | 1 + contracts/remappings.txt | 2 +- .../GHO/UpgradeableBurnMintTokenPool.sol | 18 +--- .../GHO/UpgradeableLockReleaseTokenPool.sol | 27 ++---- .../ccip/pools/GHO/VersionedInitializable.sol | 77 ---------------- ...radeableBurnMintTokenPoolAbstract_diff.md} | 15 ++-- ...d => UpgradeableBurnMintTokenPool_diff.md} | 49 +++++------ ...> UpgradeableLockReleaseTokenPool_diff.md} | 87 ++++++++----------- ...l_diff.md => UpgradeableTokenPool_diff.md} | 36 +++++--- .../v0.8/ccip/test/mocks/MockUpgradeable.sol | 19 +--- .../ccip/test/pools/GHO/GhoBaseTest.t.sol | 13 +-- .../test/pools/GHO/GhoTokenPoolEthereum.t.sol | 19 +++- .../test/pools/GHO/GhoTokenPoolRemote.t.sol | 8 +- ...okenPoolEthereumBridgeLimitInvariant.t.sol | 4 + .../GHO/invariant/GhoTokenPoolHandler.t.sol | 19 ++-- 17 files changed, 155 insertions(+), 245 deletions(-) create mode 160000 contracts/foundry-lib/gho-core delete mode 100644 contracts/src/v0.8/ccip/pools/GHO/VersionedInitializable.sol rename contracts/src/v0.8/ccip/pools/GHO/diffs/{BurnMintTokenPoolAbstract_diff.md => UpgradeableBurnMintTokenPoolAbstract_diff.md} (68%) rename contracts/src/v0.8/ccip/pools/GHO/diffs/{BurnMintTokenPool_diff.md => UpgradeableBurnMintTokenPool_diff.md} (76%) rename contracts/src/v0.8/ccip/pools/GHO/diffs/{LockReleaseTokenPool_diff.md => UpgradeableLockReleaseTokenPool_diff.md} (80%) rename contracts/src/v0.8/ccip/pools/GHO/diffs/{TokenPool_diff.md => UpgradeableTokenPool_diff.md} (61%) diff --git a/.gitmodules b/.gitmodules index 21fec76324..a8b5a9aeaf 100644 --- a/.gitmodules +++ b/.gitmodules @@ -4,3 +4,6 @@ [submodule "contracts/foundry-lib/solidity-utils"] path = contracts/foundry-lib/solidity-utils url = https://github.com/bgd-labs/solidity-utils +[submodule "contracts/foundry-lib/gho-core"] + path = contracts/foundry-lib/gho-core + url = https://github.com/aave/gho-core diff --git a/certora/confs/ccip.conf b/certora/confs/ccip.conf index 003f089502..0bd201b8d4 100644 --- a/certora/confs/ccip.conf +++ b/certora/confs/ccip.conf @@ -3,6 +3,9 @@ "contracts/src/v0.8/ccip/pools/GHO/UpgradeableLockReleaseTokenPool.sol", "certora/harness/SimpleERC20.sol" ], + "packages": [ + "solidity-utils/=contracts/foundry-lib/solidity-utils/src/" + ], "link": [ "UpgradeableLockReleaseTokenPool:i_token=SimpleERC20" ], diff --git a/contracts/foundry-lib/gho-core b/contracts/foundry-lib/gho-core new file mode 160000 index 0000000000..a8d05e6e72 --- /dev/null +++ b/contracts/foundry-lib/gho-core @@ -0,0 +1 @@ +Subproject commit a8d05e6e72409aa5ea6fd84d8a3c41e13887654d diff --git a/contracts/remappings.txt b/contracts/remappings.txt index 8fbfa33413..665a6d78a0 100644 --- a/contracts/remappings.txt +++ b/contracts/remappings.txt @@ -6,5 +6,5 @@ forge-std/=foundry-lib/forge-std/src/ hardhat/=node_modules/hardhat/ @eth-optimism/=node_modules/@eth-optimism/ @scroll-tech/=node_modules/@scroll-tech/ -@aave/gho-core/=node_modules/@aave/gho/src/contracts/ +@aave/gho-core/=foundry-lib/gho-core/src/contracts/ solidity-utils/=foundry-lib/solidity-utils/src/ diff --git a/contracts/src/v0.8/ccip/pools/GHO/UpgradeableBurnMintTokenPool.sol b/contracts/src/v0.8/ccip/pools/GHO/UpgradeableBurnMintTokenPool.sol index cc0f24af39..58be87812f 100644 --- a/contracts/src/v0.8/ccip/pools/GHO/UpgradeableBurnMintTokenPool.sol +++ b/contracts/src/v0.8/ccip/pools/GHO/UpgradeableBurnMintTokenPool.sol @@ -1,6 +1,8 @@ // SPDX-License-Identifier: BUSL-1.1 pragma solidity ^0.8.0; +import {Initializable} from "solidity-utils/contracts/transparent-proxy/Initializable.sol"; + import {ITypeAndVersion} from "../../../shared/interfaces/ITypeAndVersion.sol"; import {IBurnMintERC20} from "../../../shared/token/ERC20/IBurnMintERC20.sol"; @@ -8,15 +10,14 @@ import {UpgradeableTokenPool} from "./UpgradeableTokenPool.sol"; import {UpgradeableBurnMintTokenPoolAbstract} from "./UpgradeableBurnMintTokenPoolAbstract.sol"; import {IRouter} from "../../interfaces/IRouter.sol"; -import {VersionedInitializable} from "./VersionedInitializable.sol"; /// @title UpgradeableBurnMintTokenPool /// @author Aave Labs /// @notice Upgradeable version of Chainlink's CCIP BurnMintTokenPool /// @dev Contract adaptations: -/// - Implementation of VersionedInitializable to allow upgrades +/// - Implementation of Initializable to allow upgrades /// - Move of allowlist and router definition to initialization stage -contract UpgradeableBurnMintTokenPool is VersionedInitializable, UpgradeableBurnMintTokenPoolAbstract, ITypeAndVersion { +contract UpgradeableBurnMintTokenPool is Initializable, UpgradeableBurnMintTokenPoolAbstract, ITypeAndVersion { string public constant override typeAndVersion = "BurnMintTokenPool 1.4.0"; /// @dev Constructor @@ -52,15 +53,4 @@ contract UpgradeableBurnMintTokenPool is VersionedInitializable, UpgradeableBurn function _burn(uint256 amount) internal virtual override { IBurnMintERC20(address(i_token)).burn(amount); } - - /// @notice Returns the revision number - /// @return The revision number - function REVISION() public pure virtual returns (uint256) { - return 1; - } - - /// @inheritdoc VersionedInitializable - function getRevision() internal pure virtual override returns (uint256) { - return REVISION(); - } } diff --git a/contracts/src/v0.8/ccip/pools/GHO/UpgradeableLockReleaseTokenPool.sol b/contracts/src/v0.8/ccip/pools/GHO/UpgradeableLockReleaseTokenPool.sol index 0fac98c708..9a30b1e977 100644 --- a/contracts/src/v0.8/ccip/pools/GHO/UpgradeableLockReleaseTokenPool.sol +++ b/contracts/src/v0.8/ccip/pools/GHO/UpgradeableLockReleaseTokenPool.sol @@ -1,6 +1,8 @@ // SPDX-License-Identifier: BUSL-1.1 pragma solidity ^0.8.0; +import {Initializable} from "solidity-utils/contracts/transparent-proxy/Initializable.sol"; + import {ITypeAndVersion} from "../../../shared/interfaces/ITypeAndVersion.sol"; import {ILiquidityContainer} from "../../../rebalancer/interfaces/ILiquidityContainer.sol"; @@ -11,21 +13,15 @@ import {IERC20} from "../../../vendor/openzeppelin-solidity/v4.8.3/contracts/tok import {SafeERC20} from "../../../vendor/openzeppelin-solidity/v4.8.3/contracts/token/ERC20/utils/SafeERC20.sol"; import {IRouter} from "../../interfaces/IRouter.sol"; -import {VersionedInitializable} from "./VersionedInitializable.sol"; /// @title UpgradeableLockReleaseTokenPool /// @author Aave Labs /// @notice Upgradeable version of Chainlink's CCIP LockReleaseTokenPool /// @dev Contract adaptations: -/// - Implementation of VersionedInitializable to allow upgrades +/// - Implementation of Initializable to allow upgrades /// - Move of allowlist and router definition to initialization stage /// - Addition of a bridge limit to regulate the maximum amount of tokens that can be transferred out (burned/locked) -contract UpgradeableLockReleaseTokenPool is - VersionedInitializable, - UpgradeableTokenPool, - ILiquidityContainer, - ITypeAndVersion -{ +contract UpgradeableLockReleaseTokenPool is Initializable, UpgradeableTokenPool, ILiquidityContainer, ITypeAndVersion { using SafeERC20 for IERC20; error InsufficientLiquidity(); @@ -34,7 +30,9 @@ contract UpgradeableLockReleaseTokenPool is error BridgeLimitExceeded(uint256 bridgeLimit); error NotEnoughBridgedAmount(); + event BridgeLimitUpdated(uint256 oldBridgeLimit, uint256 newBridgeLimit); + event BridgeLimitAdminUpdated(address indexed oldAdmin, address indexed newAdmin); string public constant override typeAndVersion = "LockReleaseTokenPool 1.4.0"; @@ -197,7 +195,9 @@ contract UpgradeableLockReleaseTokenPool is /// @dev Only callable by the owner. /// @param bridgeLimitAdmin The new bridge limit admin address. function setBridgeLimitAdmin(address bridgeLimitAdmin) external onlyOwner { + address oldAdmin = s_bridgeLimitAdmin; s_bridgeLimitAdmin = bridgeLimitAdmin; + emit BridgeLimitAdminUpdated(oldAdmin, bridgeLimitAdmin); } /// @notice Gets the bridge limit @@ -263,15 +263,4 @@ contract UpgradeableLockReleaseTokenPool is _setRateLimitConfig(remoteChainSelector, outboundConfig, inboundConfig); } - - /// @notice Returns the revision number - /// @return The revision number - function REVISION() public pure virtual returns (uint256) { - return 1; - } - - /// @inheritdoc VersionedInitializable - function getRevision() internal pure virtual override returns (uint256) { - return REVISION(); - } } diff --git a/contracts/src/v0.8/ccip/pools/GHO/VersionedInitializable.sol b/contracts/src/v0.8/ccip/pools/GHO/VersionedInitializable.sol deleted file mode 100644 index b9fb054fa0..0000000000 --- a/contracts/src/v0.8/ccip/pools/GHO/VersionedInitializable.sol +++ /dev/null @@ -1,77 +0,0 @@ -// SPDX-License-Identifier: AGPL-3.0 -pragma solidity ^0.8.0; - -/** - * @title VersionedInitializable - * @author Aave, inspired by the OpenZeppelin Initializable contract - * @notice Helper contract to implement initializer functions. To use it, replace - * the constructor with a function that has the `initializer` modifier. - * @dev WARNING: Unlike constructors, initializer functions must be manually - * invoked. This applies both to deploying an Initializable contract, as well - * as extending an Initializable contract via inheritance. - * WARNING: When used with inheritance, manual care must be taken to not invoke - * a parent initializer twice, or ensure that all initializers are idempotent, - * because this is not dealt with automatically as with constructors. - */ -abstract contract VersionedInitializable { - /** - * @dev Indicates that the contract has been initialized. - */ - uint256 private lastInitializedRevision = 0; - - /** - * @dev Indicates that the contract is in the process of being initialized. - */ - bool private initializing; - - /** - * @dev Modifier to use in the initializer function of a contract. - */ - modifier initializer() { - uint256 revision = getRevision(); - require( - initializing || isConstructor() || revision > lastInitializedRevision, - "Contract instance has already been initialized" - ); - - bool isTopLevelCall = !initializing; - if (isTopLevelCall) { - initializing = true; - lastInitializedRevision = revision; - } - - _; - - if (isTopLevelCall) { - initializing = false; - } - } - - /** - * @notice Returns the revision number of the contract - * @dev Needs to be defined in the inherited class as a constant. - * @return The revision number - */ - function getRevision() internal pure virtual returns (uint256); - - /** - * @notice Returns true if and only if the function is running in the constructor - * @return True if the function is running in the constructor - */ - function isConstructor() private view returns (bool) { - // extcodesize checks the size of the code stored in an address, and - // address returns the current address. Since the code is still not - // deployed when running a constructor, any checks on its code size will - // yield zero, making it an effective way to detect if a contract is - // under construction or not. - uint256 cs; - //solium-disable-next-line - assembly { - cs := extcodesize(address()) - } - return cs == 0; - } - - // Reserved storage space to allow for layout changes in the future. - uint256[50] private ______gap; -} diff --git a/contracts/src/v0.8/ccip/pools/GHO/diffs/BurnMintTokenPoolAbstract_diff.md b/contracts/src/v0.8/ccip/pools/GHO/diffs/UpgradeableBurnMintTokenPoolAbstract_diff.md similarity index 68% rename from contracts/src/v0.8/ccip/pools/GHO/diffs/BurnMintTokenPoolAbstract_diff.md rename to contracts/src/v0.8/ccip/pools/GHO/diffs/UpgradeableBurnMintTokenPoolAbstract_diff.md index 11c20c0a6d..2255b2ca44 100644 --- a/contracts/src/v0.8/ccip/pools/GHO/diffs/BurnMintTokenPoolAbstract_diff.md +++ b/contracts/src/v0.8/ccip/pools/GHO/diffs/UpgradeableBurnMintTokenPoolAbstract_diff.md @@ -1,18 +1,19 @@ ```diff -diff --git a/src/v0.8/ccip/pools/BurnMintTokenPoolAbstract.sol b/src/v0.8/ccip/pools/UpgradeableBurnMintTokenPoolAbstract.sol -index f5eb135186..651965e40b 100644 +diff --git a/src/v0.8/ccip/pools/BurnMintTokenPoolAbstract.sol b/src/v0.8/ccip/pools/GHO/UpgradeableBurnMintTokenPoolAbstract.sol +index f5eb135186..e228732855 100644 --- a/src/v0.8/ccip/pools/BurnMintTokenPoolAbstract.sol -+++ b/src/v0.8/ccip/pools/UpgradeableBurnMintTokenPoolAbstract.sol ++++ b/src/v0.8/ccip/pools/GHO/UpgradeableBurnMintTokenPoolAbstract.sol @@ -1,11 +1,11 @@ // SPDX-License-Identifier: BUSL-1.1 -pragma solidity 0.8.19; +pragma solidity ^0.8.0; - - import {IBurnMintERC20} from "../../shared/token/ERC20/IBurnMintERC20.sol"; - + +-import {IBurnMintERC20} from "../../shared/token/ERC20/IBurnMintERC20.sol"; ++import {IBurnMintERC20} from "../../../shared/token/ERC20/IBurnMintERC20.sol"; + -import {TokenPool} from "./TokenPool.sol"; +import {UpgradeableTokenPool} from "./UpgradeableTokenPool.sol"; - + -abstract contract BurnMintTokenPoolAbstract is TokenPool { +abstract contract UpgradeableBurnMintTokenPoolAbstract is UpgradeableTokenPool { /// @notice Contains the specific burn call for a pool. diff --git a/contracts/src/v0.8/ccip/pools/GHO/diffs/BurnMintTokenPool_diff.md b/contracts/src/v0.8/ccip/pools/GHO/diffs/UpgradeableBurnMintTokenPool_diff.md similarity index 76% rename from contracts/src/v0.8/ccip/pools/GHO/diffs/BurnMintTokenPool_diff.md rename to contracts/src/v0.8/ccip/pools/GHO/diffs/UpgradeableBurnMintTokenPool_diff.md index 1dfabb1e60..05c3be4d06 100644 --- a/contracts/src/v0.8/ccip/pools/GHO/diffs/BurnMintTokenPool_diff.md +++ b/contracts/src/v0.8/ccip/pools/GHO/diffs/UpgradeableBurnMintTokenPool_diff.md @@ -1,39 +1,42 @@ ```diff -diff --git a/src/v0.8/ccip/pools/BurnMintTokenPool.sol b/src/v0.8/ccip/pools/UpgradeableBurnMintTokenPool.sol -index 9af0f22f4c..f07f8c3a28 100644 +diff --git a/src/v0.8/ccip/pools/BurnMintTokenPool.sol b/src/v0.8/ccip/pools/GHO/UpgradeableBurnMintTokenPool.sol +index 9af0f22f4c..58be87812f 100644 --- a/src/v0.8/ccip/pools/BurnMintTokenPool.sol -+++ b/src/v0.8/ccip/pools/UpgradeableBurnMintTokenPool.sol -@@ -1,29 +1,66 @@ ++++ b/src/v0.8/ccip/pools/GHO/UpgradeableBurnMintTokenPool.sol +@@ -1,28 +1,55 @@ // SPDX-License-Identifier: BUSL-1.1 -pragma solidity 0.8.19; +pragma solidity ^0.8.0; - - import {ITypeAndVersion} from "../../shared/interfaces/ITypeAndVersion.sol"; - import {IBurnMintERC20} from "../../shared/token/ERC20/IBurnMintERC20.sol"; - + +-import {ITypeAndVersion} from "../../shared/interfaces/ITypeAndVersion.sol"; +-import {IBurnMintERC20} from "../../shared/token/ERC20/IBurnMintERC20.sol"; ++import {Initializable} from "solidity-utils/contracts/transparent-proxy/Initializable.sol"; + -import {TokenPool} from "./TokenPool.sol"; -import {BurnMintTokenPoolAbstract} from "./BurnMintTokenPoolAbstract.sol"; -+import {UpgradeableTokenPool} from "./UpgradeableTokenPool.sol"; -+import {UpgradeableBurnMintTokenPoolAbstract} from "./UpgradeableBurnMintTokenPoolAbstract.sol"; - ++import {ITypeAndVersion} from "../../../shared/interfaces/ITypeAndVersion.sol"; ++import {IBurnMintERC20} from "../../../shared/token/ERC20/IBurnMintERC20.sol"; + -/// @notice This pool mints and burns a 3rd-party token. -/// @dev Pool whitelisting mode is set in the constructor and cannot be modified later. -/// It either accepts any address as originalSender, or only accepts whitelisted originalSender. -/// The only way to change whitelisting mode is to deploy a new pool. -/// If that is expected, please make sure the token's burner/minter roles are adjustable. -contract BurnMintTokenPool is BurnMintTokenPoolAbstract, ITypeAndVersion { -+import {IRouter} from "../interfaces/IRouter.sol"; -+import {VersionedInitializable} from "./VersionedInitializable.sol"; ++import {UpgradeableTokenPool} from "./UpgradeableTokenPool.sol"; ++import {UpgradeableBurnMintTokenPoolAbstract} from "./UpgradeableBurnMintTokenPoolAbstract.sol"; ++ ++import {IRouter} from "../../interfaces/IRouter.sol"; + +/// @title UpgradeableBurnMintTokenPool +/// @author Aave Labs +/// @notice Upgradeable version of Chainlink's CCIP BurnMintTokenPool +/// @dev Contract adaptations: -+/// - Implementation of VersionedInitializable to allow upgrades ++/// - Implementation of Initializable to allow upgrades +/// - Move of allowlist and router definition to initialization stage -+contract UpgradeableBurnMintTokenPool is VersionedInitializable, UpgradeableBurnMintTokenPoolAbstract, ITypeAndVersion { ++contract UpgradeableBurnMintTokenPool is Initializable, UpgradeableBurnMintTokenPoolAbstract, ITypeAndVersion { string public constant override typeAndVersion = "BurnMintTokenPool 1.4.0"; - + + /// @dev Constructor + /// @param token The bridgeable token that is managed by this pool. + /// @param armProxy The address of the arm proxy @@ -47,7 +50,7 @@ index 9af0f22f4c..f07f8c3a28 100644 - ) TokenPool(token, allowlist, armProxy, router) {} + bool allowlistEnabled + ) UpgradeableTokenPool(IBurnMintERC20(token), armProxy, allowlistEnabled) {} - + - /// @inheritdoc BurnMintTokenPoolAbstract + /// @dev Initializer + /// @dev The address passed as `owner` must accept ownership after initialization. @@ -72,16 +75,4 @@ index 9af0f22f4c..f07f8c3a28 100644 function _burn(uint256 amount) internal virtual override { IBurnMintERC20(address(i_token)).burn(amount); } -+ -+ /// @notice Returns the revision number -+ /// @return The revision number -+ function REVISION() public pure virtual returns (uint256) { -+ return 1; -+ } -+ -+ /// @inheritdoc VersionedInitializable -+ function getRevision() internal pure virtual override returns (uint256) { -+ return REVISION(); -+ } - } ``` diff --git a/contracts/src/v0.8/ccip/pools/GHO/diffs/LockReleaseTokenPool_diff.md b/contracts/src/v0.8/ccip/pools/GHO/diffs/UpgradeableLockReleaseTokenPool_diff.md similarity index 80% rename from contracts/src/v0.8/ccip/pools/GHO/diffs/LockReleaseTokenPool_diff.md rename to contracts/src/v0.8/ccip/pools/GHO/diffs/UpgradeableLockReleaseTokenPool_diff.md index ac5d7bf30e..1e738e3bdf 100644 --- a/contracts/src/v0.8/ccip/pools/GHO/diffs/LockReleaseTokenPool_diff.md +++ b/contracts/src/v0.8/ccip/pools/GHO/diffs/UpgradeableLockReleaseTokenPool_diff.md @@ -1,61 +1,64 @@ ```diff -diff --git a/src/v0.8/ccip/pools/LockReleaseTokenPool.sol b/src/v0.8/ccip/pools/UpgradeableLockReleaseTokenPool.sol -index 1a17fa0398..7ca3d5f389 100644 +diff --git a/src/v0.8/ccip/pools/LockReleaseTokenPool.sol b/src/v0.8/ccip/pools/GHO/UpgradeableLockReleaseTokenPool.sol +index 1a17fa0398..9a30b1e977 100644 --- a/src/v0.8/ccip/pools/LockReleaseTokenPool.sol -+++ b/src/v0.8/ccip/pools/UpgradeableLockReleaseTokenPool.sol -@@ -1,26 +1,41 @@ ++++ b/src/v0.8/ccip/pools/GHO/UpgradeableLockReleaseTokenPool.sol +@@ -1,26 +1,39 @@ // SPDX-License-Identifier: BUSL-1.1 -pragma solidity 0.8.19; +pragma solidity ^0.8.0; - - import {ITypeAndVersion} from "../../shared/interfaces/ITypeAndVersion.sol"; - import {ILiquidityContainer} from "../../rebalancer/interfaces/ILiquidityContainer.sol"; - + +-import {ITypeAndVersion} from "../../shared/interfaces/ITypeAndVersion.sol"; +-import {ILiquidityContainer} from "../../rebalancer/interfaces/ILiquidityContainer.sol"; ++import {Initializable} from "solidity-utils/contracts/transparent-proxy/Initializable.sol"; + -import {TokenPool} from "./TokenPool.sol"; +-import {RateLimiter} from "../libraries/RateLimiter.sol"; ++import {ITypeAndVersion} from "../../../shared/interfaces/ITypeAndVersion.sol"; ++import {ILiquidityContainer} from "../../../rebalancer/interfaces/ILiquidityContainer.sol"; + +-import {IERC20} from "../../vendor/openzeppelin-solidity/v4.8.3/contracts/token/ERC20/IERC20.sol"; +-import {SafeERC20} from "../../vendor/openzeppelin-solidity/v4.8.3/contracts/token/ERC20/utils/SafeERC20.sol"; +import {UpgradeableTokenPool} from "./UpgradeableTokenPool.sol"; - import {RateLimiter} from "../libraries/RateLimiter.sol"; - - import {IERC20} from "../../vendor/openzeppelin-solidity/v4.8.3/contracts/token/ERC20/IERC20.sol"; - import {SafeERC20} from "../../vendor/openzeppelin-solidity/v4.8.3/contracts/token/ERC20/utils/SafeERC20.sol"; - ++import {RateLimiter} from "../../libraries/RateLimiter.sol"; + -/// @notice Token pool used for tokens on their native chain. This uses a lock and release mechanism. -/// Because of lock/unlock requiring liquidity, this pool contract also has function to add and remove -/// liquidity. This allows for proper bookkeeping for both user and liquidity provider balances. -/// @dev One token per LockReleaseTokenPool. -contract LockReleaseTokenPool is TokenPool, ILiquidityContainer, ITypeAndVersion { -+import {IRouter} from "../interfaces/IRouter.sol"; -+import {VersionedInitializable} from "./VersionedInitializable.sol"; ++import {IERC20} from "../../../vendor/openzeppelin-solidity/v4.8.3/contracts/token/ERC20/IERC20.sol"; ++import {SafeERC20} from "../../../vendor/openzeppelin-solidity/v4.8.3/contracts/token/ERC20/utils/SafeERC20.sol"; ++ ++import {IRouter} from "../../interfaces/IRouter.sol"; + +/// @title UpgradeableLockReleaseTokenPool +/// @author Aave Labs +/// @notice Upgradeable version of Chainlink's CCIP LockReleaseTokenPool +/// @dev Contract adaptations: -+/// - Implementation of VersionedInitializable to allow upgrades ++/// - Implementation of Initializable to allow upgrades +/// - Move of allowlist and router definition to initialization stage +/// - Addition of a bridge limit to regulate the maximum amount of tokens that can be transferred out (burned/locked) -+contract UpgradeableLockReleaseTokenPool is -+ VersionedInitializable, -+ UpgradeableTokenPool, -+ ILiquidityContainer, -+ ITypeAndVersion -+{ ++contract UpgradeableLockReleaseTokenPool is Initializable, UpgradeableTokenPool, ILiquidityContainer, ITypeAndVersion { using SafeERC20 for IERC20; - + error InsufficientLiquidity(); error LiquidityNotAccepted(); error Unauthorized(address caller); - + + error BridgeLimitExceeded(uint256 bridgeLimit); + error NotEnoughBridgedAmount(); ++ + event BridgeLimitUpdated(uint256 oldBridgeLimit, uint256 newBridgeLimit); ++ event BridgeLimitAdminUpdated(address indexed oldAdmin, address indexed newAdmin); + string public constant override typeAndVersion = "LockReleaseTokenPool 1.4.0"; - + /// @dev The unique lock release pool flag to signal through EIP 165. -@@ -37,16 +52,55 @@ contract LockReleaseTokenPool is TokenPool, ILiquidityContainer, ITypeAndVersion +@@ -37,16 +50,55 @@ contract LockReleaseTokenPool is TokenPool, ILiquidityContainer, ITypeAndVersion /// @dev Can be address(0) if none is configured. address internal s_rateLimitAdmin; - + + /// @notice Maximum amount of tokens that can be bridged to other chains + uint256 private s_bridgeLimit; + /// @notice Amount of tokens bridged (transferred out) @@ -83,7 +86,7 @@ index 1a17fa0398..7ca3d5f389 100644 + ) UpgradeableTokenPool(IERC20(token), armProxy, allowlistEnabled) { i_acceptLiquidity = acceptLiquidity; } - + + /// @dev Initializer + /// @dev The address passed as `owner` must accept ownership after initialization. + /// @dev The `allowlist` is only effective if pool is set to access-controlled mode @@ -113,7 +116,7 @@ index 1a17fa0398..7ca3d5f389 100644 /// @notice Locks the token in the pool /// @param amount Amount to lock /// @dev The whenHealthy check is important to ensure that even if a ramp is compromised -@@ -66,6 +120,9 @@ contract LockReleaseTokenPool is TokenPool, ILiquidityContainer, ITypeAndVersion +@@ -66,6 +118,9 @@ contract LockReleaseTokenPool is TokenPool, ILiquidityContainer, ITypeAndVersion whenHealthy returns (bytes memory) { @@ -123,7 +126,7 @@ index 1a17fa0398..7ca3d5f389 100644 _consumeOutboundRateLimit(remoteChainSelector, amount); emit Locked(msg.sender, amount); return ""; -@@ -83,6 +140,11 @@ contract LockReleaseTokenPool is TokenPool, ILiquidityContainer, ITypeAndVersion +@@ -83,6 +138,11 @@ contract LockReleaseTokenPool is TokenPool, ILiquidityContainer, ITypeAndVersion uint64 remoteChainSelector, bytes memory ) external virtual override onlyOffRamp(remoteChainSelector) whenHealthy { @@ -135,10 +138,10 @@ index 1a17fa0398..7ca3d5f389 100644 _consumeInboundRateLimit(remoteChainSelector, amount); getToken().safeTransfer(receiver, amount); emit Released(msg.sender, receiver, amount); -@@ -120,11 +182,46 @@ contract LockReleaseTokenPool is TokenPool, ILiquidityContainer, ITypeAndVersion +@@ -120,11 +180,48 @@ contract LockReleaseTokenPool is TokenPool, ILiquidityContainer, ITypeAndVersion s_rateLimitAdmin = rateLimitAdmin; } - + + /// @notice Sets the bridge limit, the maximum amount of tokens that can be bridged out + /// @dev Only callable by the owner or the bridge limit admin. + /// @dev Bridge limit changes should be carefully managed, specially when reducing below the current bridged amount @@ -154,7 +157,9 @@ index 1a17fa0398..7ca3d5f389 100644 + /// @dev Only callable by the owner. + /// @param bridgeLimitAdmin The new bridge limit admin address. + function setBridgeLimitAdmin(address bridgeLimitAdmin) external onlyOwner { ++ address oldAdmin = s_bridgeLimitAdmin; + s_bridgeLimitAdmin = bridgeLimitAdmin; ++ emit BridgeLimitAdminUpdated(oldAdmin, bridgeLimitAdmin); + } + + /// @notice Gets the bridge limit @@ -173,7 +178,7 @@ index 1a17fa0398..7ca3d5f389 100644 function getRateLimitAdmin() external view returns (address) { return s_rateLimitAdmin; } - + + /// @notice Gets the bridge limiter admin address. + function getBridgeLimitAdmin() external view returns (address) { + return s_bridgeLimitAdmin; @@ -182,20 +187,4 @@ index 1a17fa0398..7ca3d5f389 100644 /// @notice Checks if the pool can accept liquidity. /// @return true if the pool can accept liquidity, false otherwise. function canAcceptLiquidity() external view returns (bool) { -@@ -166,4 +263,15 @@ contract LockReleaseTokenPool is TokenPool, ILiquidityContainer, ITypeAndVersion - - _setRateLimitConfig(remoteChainSelector, outboundConfig, inboundConfig); - } -+ -+ /// @notice Returns the revision number -+ /// @return The revision number -+ function REVISION() public pure virtual returns (uint256) { -+ return 1; -+ } -+ -+ /// @inheritdoc VersionedInitializable -+ function getRevision() internal pure virtual override returns (uint256) { -+ return REVISION(); -+ } - } ``` diff --git a/contracts/src/v0.8/ccip/pools/GHO/diffs/TokenPool_diff.md b/contracts/src/v0.8/ccip/pools/GHO/diffs/UpgradeableTokenPool_diff.md similarity index 61% rename from contracts/src/v0.8/ccip/pools/GHO/diffs/TokenPool_diff.md rename to contracts/src/v0.8/ccip/pools/GHO/diffs/UpgradeableTokenPool_diff.md index 6ff8893172..fcdc197580 100644 --- a/contracts/src/v0.8/ccip/pools/GHO/diffs/TokenPool_diff.md +++ b/contracts/src/v0.8/ccip/pools/GHO/diffs/UpgradeableTokenPool_diff.md @@ -1,16 +1,32 @@ ```diff -diff --git a/src/v0.8/ccip/pools/TokenPool.sol b/src/v0.8/ccip/pools/UpgradeableTokenPool.sol -index b3571bb449..fcd8948098 100644 +diff --git a/src/v0.8/ccip/pools/TokenPool.sol b/src/v0.8/ccip/pools/GHO/UpgradeableTokenPool.sol +index b3571bb449..ee359ac1f8 100644 --- a/src/v0.8/ccip/pools/TokenPool.sol -+++ b/src/v0.8/ccip/pools/UpgradeableTokenPool.sol -@@ -1,5 +1,5 @@ ++++ b/src/v0.8/ccip/pools/GHO/UpgradeableTokenPool.sol +@@ -1,21 +1,21 @@ // SPDX-License-Identifier: BUSL-1.1 -pragma solidity 0.8.19; +pragma solidity ^0.8.0; - - import {IPool} from "../interfaces/pools/IPool.sol"; - import {IARM} from "../interfaces/IARM.sol"; -@@ -15,7 +15,7 @@ import {EnumerableSet} from "../../vendor/openzeppelin-solidity/v4.8.3/contracts + +-import {IPool} from "../interfaces/pools/IPool.sol"; +-import {IARM} from "../interfaces/IARM.sol"; +-import {IRouter} from "../interfaces/IRouter.sol"; ++import {IPool} from "../../interfaces/pools/IPool.sol"; ++import {IARM} from "../../interfaces/IARM.sol"; ++import {IRouter} from "../../interfaces/IRouter.sol"; + +-import {OwnerIsCreator} from "../../shared/access/OwnerIsCreator.sol"; +-import {RateLimiter} from "../libraries/RateLimiter.sol"; ++import {OwnerIsCreator} from "../../../shared/access/OwnerIsCreator.sol"; ++import {RateLimiter} from "../../libraries/RateLimiter.sol"; + +-import {IERC20} from "../../vendor/openzeppelin-solidity/v4.8.3/contracts/token/ERC20/IERC20.sol"; +-import {IERC165} from "../../vendor/openzeppelin-solidity/v4.8.3/contracts/utils/introspection/IERC165.sol"; +-import {EnumerableSet} from "../../vendor/openzeppelin-solidity/v4.8.3/contracts/utils/structs/EnumerableSet.sol"; ++import {IERC20} from "../../../vendor/openzeppelin-solidity/v4.8.3/contracts/token/ERC20/IERC20.sol"; ++import {IERC165} from "../../../vendor/openzeppelin-solidity/v4.8.3/contracts/utils/introspection/IERC165.sol"; ++import {EnumerableSet} from "../../../vendor/openzeppelin-solidity/v4.8.3/contracts/utils/structs/EnumerableSet.sol"; + /// @notice Base abstract class with common functions for all token pools. /// A token pool serves as isolated place for holding tokens and token specific logic /// that may execute as tokens move across the bridge. @@ -30,7 +46,7 @@ index b3571bb449..fcd8948098 100644 /// degrees and prefer different limits) - mapping(uint64 remoteChainSelector => RateLimiter.TokenBucket) internal s_inboundRateLimits; + mapping(uint64 => RateLimiter.TokenBucket) internal s_inboundRateLimits; - + - constructor(IERC20 token, address[] memory allowlist, address armProxy, address router) { - if (address(token) == address(0) || router == address(0)) revert ZeroAddressNotAllowed(); + constructor(IERC20 token, address armProxy, bool allowlistEnabled) { @@ -46,6 +62,6 @@ index b3571bb449..fcd8948098 100644 - } + i_allowlistEnabled = allowlistEnabled; } - + /// @notice Get ARM proxy address ``` diff --git a/contracts/src/v0.8/ccip/test/mocks/MockUpgradeable.sol b/contracts/src/v0.8/ccip/test/mocks/MockUpgradeable.sol index bd80dee812..45eeb5c5d7 100644 --- a/contracts/src/v0.8/ccip/test/mocks/MockUpgradeable.sol +++ b/contracts/src/v0.8/ccip/test/mocks/MockUpgradeable.sol @@ -1,12 +1,12 @@ // SPDX-License-Identifier: MIT pragma solidity ^0.8.0; -import {VersionedInitializable} from "../../pools/GHO/VersionedInitializable.sol"; +import {Initializable} from "solidity-utils/contracts/transparent-proxy/Initializable.sol"; /** * @dev Mock contract to test upgrades, not to be used in production. */ -contract MockUpgradeable is VersionedInitializable { +contract MockUpgradeable is Initializable { /** * @dev Constructor */ @@ -17,20 +17,7 @@ contract MockUpgradeable is VersionedInitializable { /** * @dev Initializer */ - function initialize() public initializer { + function initialize() public reinitializer(2) { // Intentionally left bank } - - /** - * @notice Returns the revision number - * @return The revision number - */ - function REVISION() public pure returns (uint256) { - return 2; - } - - /// @inheritdoc VersionedInitializable - function getRevision() internal pure virtual override returns (uint256) { - return REVISION(); - } } diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/GhoBaseTest.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/GhoBaseTest.t.sol index f14e67fff9..66d6fc63b5 100644 --- a/contracts/src/v0.8/ccip/test/pools/GHO/GhoBaseTest.t.sol +++ b/contracts/src/v0.8/ccip/test/pools/GHO/GhoBaseTest.t.sol @@ -43,10 +43,8 @@ abstract contract GhoBaseTest is BaseTest { ) internal returns (address) { // Deploy BurnMintTokenPool for GHO token on source chain UpgradeableBurnMintTokenPool tokenPoolImpl = new UpgradeableBurnMintTokenPool(ghoToken, arm, false); - // Imple init - address[] memory emptyArray = new address[](0); - tokenPoolImpl.initialize(owner, emptyArray, router); // proxy deploy and init + address[] memory emptyArray = new address[](0); bytes memory tokenPoolInitParams = abi.encodeWithSignature( "initialize(address,address[],address)", owner, @@ -76,10 +74,8 @@ abstract contract GhoBaseTest is BaseTest { address proxyAdmin ) internal returns (address) { UpgradeableLockReleaseTokenPool tokenPoolImpl = new UpgradeableLockReleaseTokenPool(ghoToken, arm, false, true); - // Imple init - address[] memory emptyArray = new address[](0); - tokenPoolImpl.initialize(owner, emptyArray, router, bridgeLimit); // proxy deploy and init + address[] memory emptyArray = new address[](0); bytes memory tokenPoolInitParams = abi.encodeWithSignature( "initialize(address,address[],address,uint256)", owner, @@ -120,6 +116,11 @@ abstract contract GhoBaseTest is BaseTest { return address(uint160(uint256(implSlot))); } + function _getUpgradeableVersion(address proxy) internal view returns (uint8) { + // version is 1st slot + return uint8(uint256(vm.load(proxy, bytes32(uint256(0))))); + } + function _enableLane(UtilsStorage storage s, uint256 fromId, uint256 toId) internal { // from UpgradeableTokenPool.ChainUpdate[] memory chainUpdate = new UpgradeableTokenPool.ChainUpdate[](1); diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/GhoTokenPoolEthereum.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/GhoTokenPoolEthereum.t.sol index 46da97244c..f2824d7b6f 100644 --- a/contracts/src/v0.8/ccip/test/pools/GHO/GhoTokenPoolEthereum.t.sol +++ b/contracts/src/v0.8/ccip/test/pools/GHO/GhoTokenPoolEthereum.t.sol @@ -477,6 +477,7 @@ contract GhoTokenPoolEthereum_setRateLimitAdmin is GhoTokenPoolEthereumSetup { contract GhoTokenPoolEthereum_setBridgeLimit is GhoTokenPoolEthereumSetup { event BridgeLimitUpdated(uint256 oldBridgeLimit, uint256 newBridgeLimit); + event BridgeLimitAdminUpdated(address indexed oldAdmin, address indexed newAdmin); function testSetBridgeLimitAdminSuccess() public { assertEq(INITIAL_BRIDGE_LIMIT, s_ghoTokenPool.getBridgeLimit()); @@ -493,6 +494,10 @@ contract GhoTokenPoolEthereum_setBridgeLimit is GhoTokenPoolEthereumSetup { // Bridge Limit Admin address bridgeLimitAdmin = address(28973509103597907); + + vm.expectEmit(); + emit BridgeLimitAdminUpdated(address(0), bridgeLimitAdmin); + s_ghoTokenPool.setBridgeLimitAdmin(bridgeLimitAdmin); vm.startPrank(bridgeLimitAdmin); @@ -593,11 +598,17 @@ contract GhoTokenPoolEthereum_setBridgeLimit is GhoTokenPoolEthereumSetup { } contract GhoTokenPoolEthereum_setBridgeLimitAdmin is GhoTokenPoolEthereumSetup { + event BridgeLimitAdminUpdated(address indexed oldAdmin, address indexed newAdmin); + function testSetBridgeLimitAdminSuccess() public { assertEq(address(0), s_ghoTokenPool.getBridgeLimitAdmin()); address bridgeLimitAdmin = address(28973509103597907); changePrank(AAVE_DAO); + + vm.expectEmit(); + emit BridgeLimitAdminUpdated(address(0), bridgeLimitAdmin); + s_ghoTokenPool.setBridgeLimitAdmin(bridgeLimitAdmin); assertEq(bridgeLimitAdmin, s_ghoTokenPool.getBridgeLimitAdmin()); @@ -616,7 +627,7 @@ contract GhoTokenPoolEthereum_setBridgeLimitAdmin is GhoTokenPoolEthereumSetup { contract GhoTokenPoolEthereum_upgradeability is GhoTokenPoolEthereumSetup { function testInitialization() public { // Upgradeability - assertEq(s_ghoTokenPool.REVISION(), 1); + assertEq(_getUpgradeableVersion(address(s_ghoTokenPool)), 1); vm.startPrank(PROXY_ADMIN); (bool ok, bytes memory result) = address(s_ghoTokenPool).staticcall( abi.encodeWithSelector(TransparentUpgradeableProxy.admin.selector) @@ -643,17 +654,17 @@ contract GhoTokenPoolEthereum_upgradeability is GhoTokenPoolEthereumSetup { TransparentUpgradeableProxy(payable(address(s_ghoTokenPool))).upgradeToAndCall(address(newImpl), mockImpleParams); vm.startPrank(OWNER); - assertEq(s_ghoTokenPool.REVISION(), 2); + assertEq(_getUpgradeableVersion(address(s_ghoTokenPool)), 2); } function testUpgradeAdminReverts() public { vm.expectRevert(); TransparentUpgradeableProxy(payable(address(s_ghoTokenPool))).upgradeToAndCall(address(0), bytes("")); - assertEq(s_ghoTokenPool.REVISION(), 1); + assertEq(_getUpgradeableVersion(address(s_ghoTokenPool)), 1); vm.expectRevert(); TransparentUpgradeableProxy(payable(address(s_ghoTokenPool))).upgradeTo(address(0)); - assertEq(s_ghoTokenPool.REVISION(), 1); + assertEq(_getUpgradeableVersion(address(s_ghoTokenPool)), 1); } function testChangeAdmin() public { diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/GhoTokenPoolRemote.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/GhoTokenPoolRemote.t.sol index bba33d8066..dd784e68c1 100644 --- a/contracts/src/v0.8/ccip/test/pools/GHO/GhoTokenPoolRemote.t.sol +++ b/contracts/src/v0.8/ccip/test/pools/GHO/GhoTokenPoolRemote.t.sol @@ -182,7 +182,7 @@ contract GhoTokenPoolRemote_releaseOrMint is GhoTokenPoolRemoteSetup { contract GhoTokenPoolEthereum_upgradeability is GhoTokenPoolRemoteSetup { function testInitialization() public { // Upgradeability - assertEq(s_pool.REVISION(), 1); + assertEq(_getUpgradeableVersion(address(s_pool)), 1); vm.startPrank(PROXY_ADMIN); (bool ok, bytes memory result) = address(s_pool).staticcall( abi.encodeWithSelector(TransparentUpgradeableProxy.admin.selector) @@ -209,17 +209,17 @@ contract GhoTokenPoolEthereum_upgradeability is GhoTokenPoolRemoteSetup { TransparentUpgradeableProxy(payable(address(s_pool))).upgradeToAndCall(address(newImpl), mockImpleParams); vm.startPrank(OWNER); - assertEq(s_pool.REVISION(), 2); + assertEq(_getUpgradeableVersion(address(s_pool)), 2); } function testUpgradeAdminReverts() public { vm.expectRevert(); TransparentUpgradeableProxy(payable(address(s_pool))).upgradeToAndCall(address(0), bytes("")); - assertEq(s_pool.REVISION(), 1); + assertEq(_getUpgradeableVersion(address(s_pool)), 1); vm.expectRevert(); TransparentUpgradeableProxy(payable(address(s_pool))).upgradeTo(address(0)); - assertEq(s_pool.REVISION(), 1); + assertEq(_getUpgradeableVersion(address(s_pool)), 1); } function testChangeAdmin() public { diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/invariant/GhoTokenPoolEthereumBridgeLimitInvariant.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/invariant/GhoTokenPoolEthereumBridgeLimitInvariant.t.sol index 4d928e5fba..2b730bab1e 100644 --- a/contracts/src/v0.8/ccip/test/pools/GHO/invariant/GhoTokenPoolEthereumBridgeLimitInvariant.t.sol +++ b/contracts/src/v0.8/ccip/test/pools/GHO/invariant/GhoTokenPoolEthereumBridgeLimitInvariant.t.sol @@ -17,6 +17,10 @@ contract GhoTokenPoolEthereumBridgeLimitInvariant is BaseTest { deal(handler.tokens(0), address(handler), handler.INITIAL_BRIDGE_LIMIT()); targetContract(address(handler)); + bytes4[] memory selectors = new bytes4[](2); + selectors[0] = GhoTokenPoolHandler.bridgeGho.selector; + selectors[1] = GhoTokenPoolHandler.updateBucketCapacity.selector; + targetSelector(FuzzSelector({addr: address(handler), selectors: selectors})); } /// forge-config: ccip.invariant.fail-on-revert = true diff --git a/contracts/src/v0.8/ccip/test/pools/GHO/invariant/GhoTokenPoolHandler.t.sol b/contracts/src/v0.8/ccip/test/pools/GHO/invariant/GhoTokenPoolHandler.t.sol index 3c4e0e951f..2b3491a7d4 100644 --- a/contracts/src/v0.8/ccip/test/pools/GHO/invariant/GhoTokenPoolHandler.t.sol +++ b/contracts/src/v0.8/ccip/test/pools/GHO/invariant/GhoTokenPoolHandler.t.sol @@ -45,14 +45,15 @@ contract GhoTokenPoolHandler is GhoBaseTest { function bridgeGho(uint256 fromChain, uint256 toChain, uint256 amount) public { fromChain = bound(fromChain, 0, 2); toChain = bound(toChain, 0, 2); - vm.assume(fromChain != toChain); - uint256 maxBalance = GhoToken(s.tokens[fromChain]).balanceOf(address(this)); - uint256 maxToBridge = _getMaxToBridgeOut(s, fromChain); - uint256 maxAmount = maxBalance > maxToBridge ? maxToBridge : maxBalance; - amount = bound(amount, 0, maxAmount); - - if (amount > 0) { - _bridgeGho(s, fromChain, toChain, address(this), amount); + if (fromChain != toChain) { + uint256 maxBalance = GhoToken(s.tokens[fromChain]).balanceOf(address(this)); + uint256 maxToBridge = _getMaxToBridgeOut(s, fromChain); + uint256 maxAmount = maxBalance > maxToBridge ? maxToBridge : maxBalance; + amount = bound(amount, 0, maxAmount); + + if (amount > 0) { + _bridgeGho(s, fromChain, toChain, address(this), amount); + } } } @@ -60,7 +61,7 @@ contract GhoTokenPoolHandler is GhoBaseTest { function updateBucketCapacity(uint256 chain, uint128 newCapacity) public { chain = bound(chain, 1, 2); uint256 otherChain = (chain % 2) + 1; - vm.assume(newCapacity >= s.bridged); + newCapacity = uint128(bound(newCapacity, s.bridged, type(uint128).max)); uint256 oldCapacity = s.bucketCapacities[chain]; From aaa94e0a3b5f9e6ebec7eaa62a031f5c1f090ed9 Mon Sep 17 00:00:00 2001 From: miguelmtz <36620902+miguelmtzinf@users.noreply.github.com> Date: Tue, 11 Jun 2024 10:42:14 +0200 Subject: [PATCH 18/18] ci: Remove unneeded ci actions (#12) * fix: Use modern version of Initializable * fix: Update diffs * fix: Add event emission on bridge limit admin update * fix: Rebuild GHO diffs * ci: Remove unneeded ci actions * ci: Remove unneeded ci actions * fix: Add dep * fix: Fix certora config file * test: Force seletors in invariant tests * fix: Fix config files * test: Fix test assume * test: Fix test assume * fix: Fix certora config file --- .github/ISSUE_TEMPLATE/config.yml | 5 - .github/ISSUE_TEMPLATE/development.md | 22 - .github/ISSUE_TEMPLATE/faucet.md | 19 - .github/ISSUE_TEMPLATE/feature-request.md | 19 - .github/ISSUE_TEMPLATE/node-operator.md | 30 - .github/ISSUE_TEMPLATE/smart-contract.md | 19 - .../actions/build-chainlink-image/action.yml | 48 - .../build-sign-publish-chainlink/action.yml | 300 -- .github/actions/build-test-image/action.yml | 123 - .github/actions/delete-deployments/action.yml | 59 - .github/actions/delete-deployments/index.ts | 232 -- .../actions/delete-deployments/package.json | 25 - .../actions/delete-deployments/pnpm-lock.yaml | 350 --- .github/actions/delete-deployments/test.sh | 9 - .../actions/delete-deployments/tsconfig.json | 104 - .github/actions/golangci-lint/action.yml | 72 - .../goreleaser-build-sign-publish/README.md | 131 - .../goreleaser-build-sign-publish/action.yml | 111 - .../action_utils | 78 - .../notify-slack-jobs-result/README.md | 37 - .../notify-slack-jobs-result/action.yml | 110 - .../notify-slack-jobs-result/image.png | Bin 47298 -> 0 bytes .../action.yml | 95 - .../action.yml | 130 - .../setup-create-base64-config/action.yml | 122 - .../action.yml | 61 - .github/actions/setup-go/action.yml | 64 - .github/actions/setup-hardhat/action.yaml | 31 - .../setup-merge-base64-config/action.yml | 54 - .github/actions/setup-nodejs/action.yaml | 2 +- .../setup-parse-base64-config/action.yml | 38 - .github/actions/setup-postgres/.env | 5 - .github/actions/setup-postgres/action.yml | 13 - .../actions/setup-postgres/docker-compose.yml | 16 - .../wait-for-healthy-postgres.sh | 25 - .github/actions/setup-solana/action.yml | 21 - .github/actions/setup-wasmd/action.yml | 22 - .github/actions/split-tests/.npmrc | 2 - .github/actions/split-tests/action.yaml | 35 - .github/actions/split-tests/jest.config.js | 15 - .github/actions/split-tests/mjs-resolver.ts | 15 - .github/actions/split-tests/package.json | 26 - .github/actions/split-tests/pnpm-lock.yaml | 2675 ----------------- .github/actions/split-tests/src/index.mts | 74 - .github/actions/split-tests/src/sieve.mts | 27 - .github/actions/split-tests/src/splitter.mts | 43 - .github/actions/split-tests/src/types.mts | 75 - .../test/__snapshots__/sieve.test.ts.snap | 99 - .../test/__snapshots__/splitter.test.ts.snap | 119 - .github/actions/split-tests/test/fixtures.mts | 20 - .../actions/split-tests/test/sieve.test.ts | 15 - .../actions/split-tests/test/splitter.test.ts | 21 - .github/actions/split-tests/tsconfig.json | 104 - .github/actions/version-file-bump/action.yml | 51 - .github/cr.yaml | 2 - .github/pull_request_template.md | 4 - .github/scripts/functions.sh | 17 - .github/tracing/README.md | 112 - .github/tracing/grafana-datasources.yaml | 18 - .../tracing/local-smoke-docker-compose.yaml | 48 - .github/tracing/otel-collector-ci.yaml | 22 - .github/tracing/otel-collector-dev.yaml | 20 - .github/tracing/replay.sh | 6 - .github/tracing/tempo.yaml | 24 - .github/workflows/auto-update.yml | 17 - .../workflows/automation-benchmark-tests.yml | 99 - .github/workflows/automation-load-tests.yml | 97 - .../workflows/automation-nightly-tests.yml | 262 -- .../workflows/automation-ondemand-tests.yml | 262 -- .github/workflows/bash-scripts.yml | 37 - .github/workflows/build-publish-pr.yml | 112 - .github/workflows/build.yml | 32 - .github/workflows/ccip-chaos-tests.yml | 231 -- .github/workflows/ccip-live-testnet-tests.yml | 302 -- .github/workflows/ccip-load-tests.yml | 280 -- .github/workflows/certora.yml | 16 +- .github/workflows/changelog.yml | 40 - .github/workflows/ci-core.yml | 278 -- .github/workflows/ci-scripts.yml | 46 - .../workflows/client-compatibility-tests.yml | 333 -- .github/workflows/codeql-analysis.yml | 54 - .github/workflows/delete-deployments.yml | 33 - .github/workflows/dependency-check.yml | 56 - .../goreleaser-build-publish-develop.yml | 89 - .github/workflows/helm-chart-publish.yml | 39 - .github/workflows/helm-chart.yml | 25 - .github/workflows/integration-chaos-tests.yml | 148 - .../workflows/integration-staging-tests.yml | 132 - .../workflows/integration-tests-publish.yml | 101 - .github/workflows/integration-tests.yml | 1171 -------- .github/workflows/lint-gh-workflows.yml | 22 - .github/workflows/live-testnet-tests.yml | 957 ------ .github/workflows/on-demand-log-poller.yml | 34 - .github/workflows/on-demand-ocr-soak-test.yml | 94 - .../on-demand-vrfv2-eth2-clients-test.yml | 63 - .../on-demand-vrfv2-performance-test.yml | 84 - .../on-demand-vrfv2plus-eth2-clients-test.yml | 63 - .../on-demand-vrfv2plus-performance-test.yml | 85 - .github/workflows/operator-ui-cd.yml | 59 - .github/workflows/operator-ui-ci.yml | 46 - .github/workflows/pr-labels.yml | 54 - .github/workflows/publish.yml | 81 - .github/workflows/readme.yml | 40 - .github/workflows/sigscanner.yml | 35 - .github/workflows/solidity-foundry.yml | 164 - .github/workflows/solidity-hardhat.yml | 184 -- .github/workflows/solidity.yml | 247 -- .github/workflows/stale.yml | 26 - ...evelop-from-smartcontractkit-chainlink.yml | 39 - .github/workflows/tests.yml | 62 + certora/confs/ccip.conf | 2 +- 111 files changed, 66 insertions(+), 12958 deletions(-) delete mode 100644 .github/ISSUE_TEMPLATE/config.yml delete mode 100644 .github/ISSUE_TEMPLATE/development.md delete mode 100644 .github/ISSUE_TEMPLATE/faucet.md delete mode 100644 .github/ISSUE_TEMPLATE/feature-request.md delete mode 100644 .github/ISSUE_TEMPLATE/node-operator.md delete mode 100644 .github/ISSUE_TEMPLATE/smart-contract.md delete mode 100644 .github/actions/build-chainlink-image/action.yml delete mode 100644 .github/actions/build-sign-publish-chainlink/action.yml delete mode 100644 .github/actions/build-test-image/action.yml delete mode 100644 .github/actions/delete-deployments/action.yml delete mode 100644 .github/actions/delete-deployments/index.ts delete mode 100644 .github/actions/delete-deployments/package.json delete mode 100644 .github/actions/delete-deployments/pnpm-lock.yaml delete mode 100755 .github/actions/delete-deployments/test.sh delete mode 100644 .github/actions/delete-deployments/tsconfig.json delete mode 100644 .github/actions/golangci-lint/action.yml delete mode 100644 .github/actions/goreleaser-build-sign-publish/README.md delete mode 100644 .github/actions/goreleaser-build-sign-publish/action.yml delete mode 100755 .github/actions/goreleaser-build-sign-publish/action_utils delete mode 100644 .github/actions/notify-slack-jobs-result/README.md delete mode 100644 .github/actions/notify-slack-jobs-result/action.yml delete mode 100644 .github/actions/notify-slack-jobs-result/image.png delete mode 100644 .github/actions/setup-create-base64-config-ccip/action.yml delete mode 100644 .github/actions/setup-create-base64-config-live-testnets/action.yml delete mode 100644 .github/actions/setup-create-base64-config/action.yml delete mode 100644 .github/actions/setup-create-base64-upgrade-config/action.yml delete mode 100644 .github/actions/setup-go/action.yml delete mode 100644 .github/actions/setup-hardhat/action.yaml delete mode 100644 .github/actions/setup-merge-base64-config/action.yml delete mode 100644 .github/actions/setup-parse-base64-config/action.yml delete mode 100644 .github/actions/setup-postgres/.env delete mode 100644 .github/actions/setup-postgres/action.yml delete mode 100644 .github/actions/setup-postgres/docker-compose.yml delete mode 100755 .github/actions/setup-postgres/wait-for-healthy-postgres.sh delete mode 100644 .github/actions/setup-solana/action.yml delete mode 100644 .github/actions/setup-wasmd/action.yml delete mode 100644 .github/actions/split-tests/.npmrc delete mode 100644 .github/actions/split-tests/action.yaml delete mode 100644 .github/actions/split-tests/jest.config.js delete mode 100644 .github/actions/split-tests/mjs-resolver.ts delete mode 100644 .github/actions/split-tests/package.json delete mode 100644 .github/actions/split-tests/pnpm-lock.yaml delete mode 100644 .github/actions/split-tests/src/index.mts delete mode 100644 .github/actions/split-tests/src/sieve.mts delete mode 100644 .github/actions/split-tests/src/splitter.mts delete mode 100644 .github/actions/split-tests/src/types.mts delete mode 100644 .github/actions/split-tests/test/__snapshots__/sieve.test.ts.snap delete mode 100644 .github/actions/split-tests/test/__snapshots__/splitter.test.ts.snap delete mode 100644 .github/actions/split-tests/test/fixtures.mts delete mode 100644 .github/actions/split-tests/test/sieve.test.ts delete mode 100644 .github/actions/split-tests/test/splitter.test.ts delete mode 100644 .github/actions/split-tests/tsconfig.json delete mode 100644 .github/actions/version-file-bump/action.yml delete mode 100644 .github/cr.yaml delete mode 100644 .github/pull_request_template.md delete mode 100644 .github/scripts/functions.sh delete mode 100644 .github/tracing/README.md delete mode 100644 .github/tracing/grafana-datasources.yaml delete mode 100644 .github/tracing/local-smoke-docker-compose.yaml delete mode 100644 .github/tracing/otel-collector-ci.yaml delete mode 100644 .github/tracing/otel-collector-dev.yaml delete mode 100644 .github/tracing/replay.sh delete mode 100644 .github/tracing/tempo.yaml delete mode 100644 .github/workflows/auto-update.yml delete mode 100644 .github/workflows/automation-benchmark-tests.yml delete mode 100644 .github/workflows/automation-load-tests.yml delete mode 100644 .github/workflows/automation-nightly-tests.yml delete mode 100644 .github/workflows/automation-ondemand-tests.yml delete mode 100644 .github/workflows/bash-scripts.yml delete mode 100644 .github/workflows/build-publish-pr.yml delete mode 100644 .github/workflows/build.yml delete mode 100644 .github/workflows/ccip-chaos-tests.yml delete mode 100644 .github/workflows/ccip-live-testnet-tests.yml delete mode 100644 .github/workflows/ccip-load-tests.yml delete mode 100644 .github/workflows/changelog.yml delete mode 100644 .github/workflows/ci-core.yml delete mode 100644 .github/workflows/ci-scripts.yml delete mode 100644 .github/workflows/client-compatibility-tests.yml delete mode 100644 .github/workflows/codeql-analysis.yml delete mode 100644 .github/workflows/delete-deployments.yml delete mode 100644 .github/workflows/dependency-check.yml delete mode 100644 .github/workflows/goreleaser-build-publish-develop.yml delete mode 100644 .github/workflows/helm-chart-publish.yml delete mode 100644 .github/workflows/helm-chart.yml delete mode 100644 .github/workflows/integration-chaos-tests.yml delete mode 100644 .github/workflows/integration-staging-tests.yml delete mode 100644 .github/workflows/integration-tests-publish.yml delete mode 100644 .github/workflows/integration-tests.yml delete mode 100644 .github/workflows/lint-gh-workflows.yml delete mode 100644 .github/workflows/live-testnet-tests.yml delete mode 100644 .github/workflows/on-demand-log-poller.yml delete mode 100644 .github/workflows/on-demand-ocr-soak-test.yml delete mode 100644 .github/workflows/on-demand-vrfv2-eth2-clients-test.yml delete mode 100644 .github/workflows/on-demand-vrfv2-performance-test.yml delete mode 100644 .github/workflows/on-demand-vrfv2plus-eth2-clients-test.yml delete mode 100644 .github/workflows/on-demand-vrfv2plus-performance-test.yml delete mode 100644 .github/workflows/operator-ui-cd.yml delete mode 100644 .github/workflows/operator-ui-ci.yml delete mode 100644 .github/workflows/pr-labels.yml delete mode 100644 .github/workflows/publish.yml delete mode 100644 .github/workflows/readme.yml delete mode 100644 .github/workflows/sigscanner.yml delete mode 100644 .github/workflows/solidity-foundry.yml delete mode 100644 .github/workflows/solidity-hardhat.yml delete mode 100644 .github/workflows/solidity.yml delete mode 100644 .github/workflows/stale.yml delete mode 100644 .github/workflows/sync-develop-from-smartcontractkit-chainlink.yml create mode 100644 .github/workflows/tests.yml diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml deleted file mode 100644 index c934b6b945..0000000000 --- a/.github/ISSUE_TEMPLATE/config.yml +++ /dev/null @@ -1,5 +0,0 @@ -blank_issues_enabled: true -contact_links: - - name: Question - url: https://stackoverflow.com/questions/tagged/chainlink - about: Please ask and answer questions here. diff --git a/.github/ISSUE_TEMPLATE/development.md b/.github/ISSUE_TEMPLATE/development.md deleted file mode 100644 index 45baaccf24..0000000000 --- a/.github/ISSUE_TEMPLATE/development.md +++ /dev/null @@ -1,22 +0,0 @@ ---- -name: Development Issue -about: Report an issue encountered while working on code found in this repository. -title: '[DEVEL] ' -labels: 'Development' -assignees: '' ---- - -**Description** -[replace this line with a clear and concise description of the development issue you are experiencing] - -**Your Environment** -[replace this line with basic information about your environment, such as your operating system and the versions of any relevant tools you are using (e.g. Go, Docker)] - -**Basic Information** -[replace this line with basic information about the issue you are experiencing, including but not limited to the names of the files you are working with and any relevant error messages] - -**Steps to Reproduce** -[replace this line with detailed steps to reproduce the issue you are experiencing] - -**Additional Information** -[replace this line with any additional information you would like to provide, such as screenshots illustrating the issue] diff --git a/.github/ISSUE_TEMPLATE/faucet.md b/.github/ISSUE_TEMPLATE/faucet.md deleted file mode 100644 index 47d82b0148..0000000000 --- a/.github/ISSUE_TEMPLATE/faucet.md +++ /dev/null @@ -1,19 +0,0 @@ ---- -name: Faucet Issue -about: Report an issue with a Chainlink LINK Faucet. -title: '[FAUC] ' -labels: 'Faucet' -assignees: '' ---- - -**Description** -[replace this line with a clear and concise description of the Chainlink LINK Faucet issue you are experiencing] - -**Basic Information** -[replace this line with basic information about the issue you are experiencing, including but not limited to your testnet address, the name and version of your web browser and wallet, and the link to the faucet transaction on Etherscan] - -**Steps to Reproduce** -[replace this line with detailed steps to reproduce the issue you are experiencing] - -**Additional Information** -[replace this line with any additional information you would like to provide, such as screenshots illustrating the issue] diff --git a/.github/ISSUE_TEMPLATE/feature-request.md b/.github/ISSUE_TEMPLATE/feature-request.md deleted file mode 100644 index 0535501239..0000000000 --- a/.github/ISSUE_TEMPLATE/feature-request.md +++ /dev/null @@ -1,19 +0,0 @@ ---- -name: Feature Request -about: Request a feature. Help us make Chainlink better! -title: '[FEAT] ' -labels: 'Feature Request' -assignees: '' ---- - -**Description** -[replace this line with a clear and concise description of the feature you are requesting] - -**Motivation** -[replace this line with a clear and concise explanation of _why_ you are requesting this feature] - -**Justification** -[replace this line with a clear and concise explanation of _why_ the feature you are requesting is the best way to approach this issue and list other approaches you considered] - -**Additional Information** -[replace this line with any additional information you would like to provide, such as examples or screenshots of similar features] diff --git a/.github/ISSUE_TEMPLATE/node-operator.md b/.github/ISSUE_TEMPLATE/node-operator.md deleted file mode 100644 index 5857679e5e..0000000000 --- a/.github/ISSUE_TEMPLATE/node-operator.md +++ /dev/null @@ -1,30 +0,0 @@ ---- -name: Node Operator Issue -about: Report an issue encountered while operating a Chainlink node. -title: '[NODE] ' -labels: 'Node Operator' -assignees: '' ---- - -**Description** -[replace this line with a clear and concise description of the issue you are experiencing] - -**Basic Information** -[replace this line with basic information about the issue you are experiencing, including but not limited to all relevant logs and any other relevant information, such as if you are using a Docker container to run the node, job specification, oracle contract address, transaction IDs, etc.] - -- Network: [e.g. Ethereum Mainnet, Ropsten] -- Blockchain Client: [name and version of blockchain client e.g. Geth v1.9.6] -- Go Version: [e.g. v1.12] -- Operating System: [name and version of operating system running Chainlink node] -- Commit: [log INFO line when starting node] -- Hosting Provider: [e.g. AWS, GCP, self-hosted] -- Startup Command: [e.g. `docker run smartcontract/chainlink local n`] - -**Environment Variables** -[replace this line with the output of the environment variables when running the node in debug mode] - -**Steps to Reproduce** -[replace this line with detailed steps to reproduce the issue you are experiencing] - -**Additional Information** -[replace this line with any additional information you would like to provide, such as screenshots illustrating the issue] diff --git a/.github/ISSUE_TEMPLATE/smart-contract.md b/.github/ISSUE_TEMPLATE/smart-contract.md deleted file mode 100644 index e4b9b97bf7..0000000000 --- a/.github/ISSUE_TEMPLATE/smart-contract.md +++ /dev/null @@ -1,19 +0,0 @@ ---- -name: Smart Contract Issue -about: Report an issue with smart contracts found in this repository. -title: '[SMRT] ' -labels: 'Smart Contract' -assignees: '' ---- - -**Description** -[replace this line with a clear and concise description of the smart contract issue you are experiencing] - -**Basic Information** -[replace this line with basic information about the issue you are experiencing, including but not limited to the names of the smart contract files and the version of the Chainlink software repository in which they are found, contract addresses, transaction IDs, etc.] - -**Steps to Reproduce** -[replace this line with detailed steps to reproduce the issue you are experiencing] - -**Additional Information** -[replace this line with any additional information you would like to provide, such as screenshots illustrating the issue] diff --git a/.github/actions/build-chainlink-image/action.yml b/.github/actions/build-chainlink-image/action.yml deleted file mode 100644 index d5839cc79b..0000000000 --- a/.github/actions/build-chainlink-image/action.yml +++ /dev/null @@ -1,48 +0,0 @@ -name: Build Chainlink Image -description: A composite action that allows building and publishing the Chainlink image for integration testing - -inputs: - tag_suffix: - description: The suffix to append to the image tag (usually blank or "-plugins") - default: "" - dockerfile: - description: The path to the Dockerfile to use (usually core/chainlink.Dockerfile or plugins/chainlink.Dockerfile) - default: core/chainlink.Dockerfile - git_commit_sha: - description: The git commit sha to use for the image tag - default: ${{ github.sha }} - GRAFANA_CLOUD_BASIC_AUTH: - description: "grafana cloud basic auth" - GRAFANA_CLOUD_HOST: - description: "grafana cloud hostname" - AWS_REGION: - description: "AWS region to use for ECR" - AWS_ROLE_TO_ASSUME: - description: "AWS role to assume for ECR" - -runs: - using: composite - steps: - - name: Check if image exists - id: check-image - uses: smartcontractkit/chainlink-github-actions/docker/image-exists@e865e376b8c2d594028c8d645dd6c47169b72974 # v2.2.16 - with: - repository: chainlink - tag: ${{ inputs.git_commit_sha }}${{ inputs.tag_suffix }} - AWS_REGION: ${{ inputs.AWS_REGION }} - AWS_ROLE_TO_ASSUME: ${{ inputs.AWS_ROLE_TO_ASSUME }} - - name: Build Image - if: steps.check-image.outputs.exists == 'false' - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/build-image@e865e376b8c2d594028c8d645dd6c47169b72974 # v2.2.16 - with: - cl_repo: smartcontractkit/chainlink - cl_ref: ${{ inputs.git_commit_sha }} - cl_dockerfile: ${{ inputs.dockerfile }} - push_tag: ${{ env.CHAINLINK_IMAGE }}:${{ inputs.git_commit_sha }}${{ inputs.tag_suffix }} - QA_AWS_REGION: ${{ inputs.AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ inputs.AWS_ROLE_TO_ASSUME }} - - name: Print Chainlink Image Built - shell: sh - run: | - echo "### Chainlink node image tag used for this test run :link:" >>$GITHUB_STEP_SUMMARY - echo "\`${GITHUB_SHA}\`" >>$GITHUB_STEP_SUMMARY diff --git a/.github/actions/build-sign-publish-chainlink/action.yml b/.github/actions/build-sign-publish-chainlink/action.yml deleted file mode 100644 index 8c79f651af..0000000000 --- a/.github/actions/build-sign-publish-chainlink/action.yml +++ /dev/null @@ -1,300 +0,0 @@ -name: Build and Publish Chainlink - -description: A composite action that allows building and publishing signed chainlink images. - -inputs: - # Inputs for publishing - publish: - description: When set to the string boolean value of "true", the resulting built image will be published - default: "false" - required: false - - dockerfile: - description: Path to the Dockerfile (relative to the repo root) - default: core/chainlink.Dockerfile - required: false - dockerhub_username: - description: Username for Docker Hub to avoid rate limits when pulling public images - required: false - dockerhub_password: - description: Password for Docker Hub to avoid rate limits when pulling public images - required: false - ecr-hostname: - description: The ECR registry scope - default: public.ecr.aws - required: false - ecr-image-name: - description: | - The image name with path, in the format of `[registry]/repository`. For private ECR repos the registry name is optional, where for public repos, it is required. - Eg. Public ECR repo `chainlink` and registry alias `chainlinklabs` should be `chainlinklabs/chainlink`. For a private ECR repo `chainlink` the image name should be `chainlink` - default: chainlink/chainlink - required: false - ecr-tag-suffix: - description: Docker image tag suffix - required: false - git-commit-sha: - description: Git commit SHA used as metadata when building the application (appears in logs) - default: ${{ github.event.pull_request.head.sha || github.sha }} - required: false - aws-role-to-assume: - description: The AWS role to assume as the CD user, if any. Used in configuring the docker/login-action - required: false - aws-role-duration-seconds: - description: The duration of the role assumed - required: false - aws-region: - description: The AWS region the ECR repository is located in, should only be needed for public ECR repositories, used in configuring docker/login-action - required: false - - # Inputs for signing - sign-images: - description: When set to the string boolean value of "true", the resulting build image will be signed - default: "false" - required: false - cosign-private-key: - description: The private key to be used with cosign to sign the image - required: false - cosign-public-key: - description: The public key to be used with cosign for verification - required: false - cosign-password: - description: The password to decrypt the cosign private key needed to sign the image - required: false - sign-method: - description: Build image will be signed using keypair or keyless methods - default: "keypair" - required: true - verify-signature: - description: When set to the string boolean value of "true", the resulting build image signature will be verified - default: "false" - required: false - -runs: - using: composite - steps: - - name: Set shared variables - shell: bash - # See https://docs.github.com/en/actions/learn-github-actions/workflow-commands-for-github-actions#multiline-strings - run: | - SHARED_IMAGES=${{ inputs.ecr-hostname }}/${{ inputs.ecr-image-name }} - - SHARED_TAG_LIST=$(cat << EOF - type=ref,event=branch,suffix=${{ inputs.ecr-tag-suffix }} - type=semver,pattern={{version}},suffix=${{ inputs.ecr-tag-suffix }} - type=sha,format=short,suffix=${{ inputs.ecr-tag-suffix }} - EOF - ) - - SHARED_BUILD_ARGS=$(cat << EOF - COMMIT_SHA=${{ inputs.git-commit-sha }} - EOF - ) - - echo "shared-images<> $GITHUB_ENV - echo "$SHARED_IMAGES" >> $GITHUB_ENV - echo "EOF" >> $GITHUB_ENV - - echo "shared-tag-list<> $GITHUB_ENV - echo "$SHARED_TAG_LIST" >> $GITHUB_ENV - echo "EOF" >> $GITHUB_ENV - - echo "shared-build-args<> $GITHUB_ENV - echo "$SHARED_BUILD_ARGS" >> $GITHUB_ENV - echo "EOF" >> $GITHUB_ENV - - - if: inputs.publish == 'true' - # Log in to AWS for publish to ECR - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@010d0da01d0b5a38af31e9c3470dbfdabdecca3a # v4.0.1 - with: - role-to-assume: ${{ inputs.aws-role-to-assume }} - role-duration-seconds: ${{ inputs.aws-role-duration-seconds }} - aws-region: ${{ inputs.aws-region }} - - - if: inputs.publish == 'true' - name: Login to ECR - uses: docker/login-action@465a07811f14bebb1938fbed4728c6a1ff8901fc # v2.2.0 - with: - registry: ${{ inputs.ecr-hostname }} - - - name: Setup Docker Buildx - uses: docker/setup-buildx-action@f95db51fddba0c2d1ec667646a06c2ce06100226 # v3.0.0 - - - name: Generate docker metadata for root image - id: meta-root - uses: docker/metadata-action@2c0bd771b40637d97bf205cbccdd294a32112176 # v4.5.0 - env: - DOCKER_METADATA_PR_HEAD_SHA: "true" - with: - # list of Docker images to use as base name for tags - images: ${{ env.shared-images }} - # XXX: DO NOT USE SHARED TAGS HERE - tags: | - type=ref,event=branch,suffix=${{ inputs.ecr-tag-suffix }}-root - type=semver,pattern={{version}},suffix=${{ inputs.ecr-tag-suffix }}-root - type=sha,format=short,suffix=${{ inputs.ecr-tag-suffix }}-root - - # To avoid rate limiting from Docker Hub, we login with a paid user account. - - name: Login to Docker Hub - if: inputs.dockerhub_username && inputs.dockerhub_password - uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v3.0.0 - with: - username: ${{ inputs.dockerhub_username }} - password: ${{ inputs.dockerhub_password }} - - - name: Build and push root docker image - id: buildpush-root - uses: docker/build-push-action@0565240e2d4ab88bba5387d719585280857ece09 # v5.0.0 - with: - push: ${{ inputs.publish }} - context: . - load: ${{ contains(inputs.publish, false) }} - tags: ${{ steps.meta-root.outputs.tags }} - labels: ${{ steps.meta-root.outputs.labels }} - file: ${{ inputs.dockerfile }} - build-args: | - CHAINLINK_USER=root - ${{ env.shared-build-args }} - - - name: Save root image name in GITHUB_ENV - id: save-root-image-name-env - shell: sh - run: | - IMAGES_NAME_RAW=${{ fromJSON(steps.buildpush-root.outputs.metadata)['image.name'] }} - IMAGE_NAME=$(echo "$IMAGES_NAME_RAW" | cut -d"," -f1) - echo "root_image_name=${IMAGE_NAME}" >> $GITHUB_ENV - - - name: Generate docker metadata for non-root image - id: meta-nonroot - uses: docker/metadata-action@2c0bd771b40637d97bf205cbccdd294a32112176 # v4.5.0 - env: - DOCKER_METADATA_PR_HEAD_SHA: "true" - with: - flavor: | - latest=auto - prefix= - suffix= - images: ${{ env.shared-images }} - tags: ${{ env.shared-tag-list }} - - # To avoid rate limiting from Docker Hub, we login with a paid user account. - - name: Login to Docker Hub - if: inputs.dockerhub_username && inputs.dockerhub_password - uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v3.0.0 - with: - username: ${{ inputs.dockerhub_username }} - password: ${{ inputs.dockerhub_password }} - - - name: Build and push non-root docker image - id: buildpush-nonroot - uses: docker/build-push-action@0565240e2d4ab88bba5387d719585280857ece09 # v5.0.0 - with: - push: ${{ inputs.publish }} - context: . - load: ${{ contains(inputs.publish, false) }} - tags: ${{ steps.meta-nonroot.outputs.tags }} - labels: ${{ steps.meta-nonroot.outputs.labels }} - file: ${{ inputs.dockerfile }} - build-args: | - CHAINLINK_USER=chainlink - ${{ env.shared-build-args }} - - - name: Save non-root image name in GITHUB_ENV and GITHUB_STEP_SUMMARY - id: save-non-root-image-name-env - shell: sh - run: | - IMAGES_NAME_RAW=${{ fromJSON(steps.buildpush-nonroot.outputs.metadata)['image.name'] }} - IMAGE_DIGEST=${{ fromJSON(steps.buildpush-nonroot.outputs.metadata)['containerimage.digest'] }} - IMAGE_NAME=$(echo "$IMAGES_NAME_RAW" | cut -d"," -f1) - echo "nonroot_image_name=${IMAGE_NAME}" >> $GITHUB_ENV - echo '### Docker Image' >> $GITHUB_STEP_SUMMARY - echo "Image Name: ${IMAGE_NAME}" >> $GITHUB_STEP_SUMMARY - echo "Image Digest: ${IMAGE_DIGEST}" >> $GITHUB_STEP_SUMMARY - - - name: Check if non-root image runs as root - id: check-nonroot-runs-root - shell: sh - env: - PUBLISH: ${{ inputs.publish }} - run: | - echo "Fail build if non-root image runs as user: root" - # if we're publishing the image, it doesn't get loaded into the local docker daemon - # so we need to pull the image into our daemon - if [ $PUBLISH = "true" ]; then - docker pull "${nonroot_image_name}" - fi - docker inspect "${nonroot_image_name}" | jq -r '.[].Config.User' | ( ! grep "root" ) - - - if: inputs.sign-images == 'true' - name: Install cosign - uses: sigstore/cosign-installer@11086d25041f77fe8fe7b9ea4e48e3b9192b8f19 # v3.1.2 - with: - cosign-release: "v1.6.0" - - - if: inputs.sign-images == 'true' && inputs.sign-method == 'keypair' - name: Sign the published root Docker image using keypair method - shell: sh - env: - COSIGN_PASSWORD: "${{ inputs.cosign-password }}" - run: | - echo "${{ inputs.cosign-private-key }}" > cosign.key - cosign sign --key cosign.key "${{ env.root_image_name }}" - rm -f cosign.key - - - if: inputs.verify-signature == 'true' && inputs.sign-method == 'keypair' - name: Verify the signature of the published root Docker image using keypair - shell: sh - run: | - echo "${{ inputs.cosign-public-key }}" > cosign.key - cosign verify --key cosign.key "${{ env.root_image_name }}" - rm -f cosign.key - - - if: inputs.sign-images == 'true' && inputs.sign-method == 'keyless' - name: Sign the published root Docker image using keyless method - shell: sh - env: - COSIGN_EXPERIMENTAL: 1 - run: | - cosign sign "${{ env.root_image_name }}" - - - if: inputs.verify-signature == 'true' && inputs.sign-method == 'keyless' - name: Verify the signature of the published root Docker image using keyless - shell: sh - env: - COSIGN_EXPERIMENTAL: 1 - run: | - cosign verify "${{ env.root_image_name }}" - - - if: inputs.sign-images == 'true' && inputs.sign-method == 'keypair' - name: Sign the published non-root Docker image using keypair method - shell: sh - env: - COSIGN_PASSWORD: "${{ inputs.cosign-password }}" - run: | - echo "${{ inputs.cosign-private-key }}" > cosign.key - cosign sign --key cosign.key "${{ env.nonroot_image_name }}" - rm -f cosign.key - - - if: inputs.verify-signature == 'true' && inputs.sign-method == 'keypair' - name: Verify the signature of the published non-root Docker image using keypair - shell: sh - run: | - echo "${{ inputs.cosign-public-key }}" > cosign.key - cosign verify --key cosign.key "${{ env.nonroot_image_name }}" - rm -f cosign.key - - - if: inputs.sign-images == 'true' && inputs.sign-method == 'keyless' - name: Sign the published non-root Docker image using keyless method - shell: sh - env: - COSIGN_EXPERIMENTAL: 1 - run: | - cosign sign "${{ env.nonroot_image_name }}" - - - if: inputs.verify-signature == 'true' && inputs.sign-method == 'keyless' - name: Verify the signature of the published non-root Docker image using keyless - shell: sh - env: - COSIGN_EXPERIMENTAL: 1 - run: | - cosign verify "${{ env.nonroot_image_name }}" diff --git a/.github/actions/build-test-image/action.yml b/.github/actions/build-test-image/action.yml deleted file mode 100644 index b1305e4005..0000000000 --- a/.github/actions/build-test-image/action.yml +++ /dev/null @@ -1,123 +0,0 @@ -name: Build Test Image -description: A composite action that allows building and publishing the test remote runner image - -inputs: - repository: - description: The docker repository for the image - default: chainlink-ccip-tests - required: false - tag: - description: The tag to use by default and to use for checking image existance - default: ${{ github.sha }} - required: false - other_tags: - description: Other tags to push if needed - required: false - suites: - description: The test suites to build into the image - default: chaos migration reorg smoke soak benchmark load/automationv2_1 ccip-tests/load ccip-tests/smoke ccip-tests/chaos - required: false - QA_AWS_ROLE_TO_ASSUME: - description: The AWS role to assume as the CD user, if any. Used in configuring the docker/login-action - required: true - QA_AWS_REGION: - description: The AWS region the ECR repository is located in, should only be needed for public ECR repositories, used in configuring docker/login-action - required: true - QA_AWS_ACCOUNT_NUMBER: - description: The AWS region the ECR repository is located in, should only be needed for public ECR repositories, used in configuring docker/login-action - required: true - -runs: - using: composite - steps: - - # Base Test Image Logic - - name: Get CTF Version - id: version - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/mod-version@e865e376b8c2d594028c8d645dd6c47169b72974 # v2.2.16 - with: - go-project-path: ./integration-tests - module-name: github.com/smartcontractkit/chainlink-testing-framework - enforce-semantic-tag: false - - name: Get CTF sha - if: steps.version.outputs.is_semantic == 'false' - id: short_sha - env: - VERSION: ${{ steps.version.outputs.version }} - shell: bash - run: | - short_sha="${VERSION##*-}" - echo "short sha is: ${short_sha}" - echo "short_sha=${short_sha}" >> "$GITHUB_OUTPUT" - - name: Checkout chainlink-testing-framework - if: steps.version.outputs.is_semantic == 'false' - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - repository: smartcontractkit/chainlink-testing-framework - ref: main - fetch-depth: 0 - path: ctf - - name: Get long sha - if: steps.version.outputs.is_semantic == 'false' - id: long_sha - env: - SHORT_SHA: ${{ steps.short_sha.outputs.short_sha }} - shell: bash - run: | - cd ctf - long_sha=$(git rev-parse ${SHORT_SHA}) - echo "sha is: ${long_sha}" - echo "long_sha=${long_sha}" >> "$GITHUB_OUTPUT" - - name: Check if test base image exists - if: steps.version.outputs.is_semantic == 'false' - id: check-base-image - uses: smartcontractkit/chainlink-github-actions/docker/image-exists@e865e376b8c2d594028c8d645dd6c47169b72974 # v2.2.16 - with: - repository: ${{ inputs.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ inputs.QA_AWS_REGION }}.amazonaws.com/test-base-image - tag: ${{ steps.long_sha.outputs.long_sha }} - AWS_REGION: ${{ inputs.QA_AWS_REGION }} - AWS_ROLE_TO_ASSUME: ${{ inputs.QA_AWS_ROLE_TO_ASSUME }} - - name: Build Base Image - if: steps.version.outputs.is_semantic == 'false' && steps.check-base-image.outputs.exists == 'false' - uses: smartcontractkit/chainlink-github-actions/docker/build-push@e865e376b8c2d594028c8d645dd6c47169b72974 # v2.2.16 - env: - BASE_IMAGE_NAME: ${{ inputs.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ inputs.QA_AWS_REGION }}.amazonaws.com/test-base-image:${{ steps.long_sha.outputs.long_sha }} - with: - tags: ${{ env.BASE_IMAGE_NAME }} - file: ctf/k8s/Dockerfile.base - AWS_REGION: ${{ inputs.QA_AWS_REGION }} - AWS_ROLE_TO_ASSUME: ${{ inputs.QA_AWS_ROLE_TO_ASSUME }} - # End Base Image Logic - - # Test Runner Logic - - name: Check if image exists - id: check-image - uses: smartcontractkit/chainlink-github-actions/docker/image-exists@e865e376b8c2d594028c8d645dd6c47169b72974 # v2.2.16 - with: - repository: ${{ inputs.repository }} - tag: ${{ inputs.tag }} - AWS_REGION: ${{ inputs.QA_AWS_REGION }} - AWS_ROLE_TO_ASSUME: ${{ inputs.QA_AWS_ROLE_TO_ASSUME }} - - name: Build and Publish Test Runner - if: steps.check-image.outputs.exists == 'false' - uses: smartcontractkit/chainlink-github-actions/docker/build-push@e865e376b8c2d594028c8d645dd6c47169b72974 # v2.2.16 - with: - tags: | - ${{ inputs.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ inputs.QA_AWS_REGION }}.amazonaws.com/${{ inputs.repository }}:${{ inputs.tag }} - ${{ inputs.other_tags }} - file: ./integration-tests/test.Dockerfile - build-args: | - BASE_IMAGE=${{ inputs.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ inputs.QA_AWS_REGION }}.amazonaws.com/test-base-image - IMAGE_VERSION=${{ steps.long_sha.outputs.long_sha || steps.version.outputs.version }} - SUITES="${{ inputs.suites }}" - AWS_REGION: ${{ inputs.QA_AWS_REGION }} - AWS_ROLE_TO_ASSUME: ${{ inputs.QA_AWS_ROLE_TO_ASSUME }} - - name: Print Image Built - shell: sh - env: - INPUTS_REPOSITORY: ${{ inputs.repository }} - INPUTS_TAG: ${{ inputs.tag }} - run: | - echo "### ${INPUTS_REPOSITORY} image tag for this test run :ship:" >>$GITHUB_STEP_SUMMARY - echo "\`${INPUTS_TAG}\`" >>$GITHUB_STEP_SUMMARY - # End Test Runner Logic diff --git a/.github/actions/delete-deployments/action.yml b/.github/actions/delete-deployments/action.yml deleted file mode 100644 index 5fc7ef0287..0000000000 --- a/.github/actions/delete-deployments/action.yml +++ /dev/null @@ -1,59 +0,0 @@ -name: Delete Deployments -description: Delete deployments by env and ref -inputs: - environment: - required: true - description: The Github environment to filter deployments by - ref: - required: true - description: The ref to filter deployments by - dry-run: - required: false - description: Whether to actually delete deployments or not - github-token: - description: "The Github token to use for authentication" - required: true - default: ${{ github.token }} - num-of-pages: - required: false - description: The number of pages (of 100 per page) to fetch deployments from, set to 'all' to fetch all deployments - default: "all" - starting-page: - required: false - description: The page to start fetching deployments from, only valid if num-of-pages is set to a number - repository: - required: false - description: The owner and repository name to delete deployments from, defaults to the current repository, ex. 'smartcontractkit/chainlink' - default: ${{ github.repository }} - -runs: - using: composite - steps: - - uses: pnpm/action-setup@c3b53f6a16e57305370b4ae5a540c2077a1d50dd # v2.2.4 - with: - version: ^8.0.0 - - - uses: actions/setup-node@8f152de45cc393bb48ce5d89d36b731f54556e65 # v4.0.0 - with: - node-version: "18" - cache: "pnpm" - cache-dependency-path: "./.github/actions/delete-deployments/pnpm-lock.yaml" - - - name: Install dependencies - shell: bash - run: pnpm i --prod - working-directory: "./.github/actions/delete-deployments" - - - name: Run deployment deleter - shell: bash - run: pnpm start - env: - NUM_OF_PAGES: ${{ inputs.num-of-pages }} - STARTING_PAGE: ${{ inputs.starting-page }} - GITHUB_TOKEN: ${{ inputs.github-token }} - ENVIRONMENT: ${{ inputs.environment }} - REF: ${{ inputs.ref }} - DRY_RUN: ${{ inputs.dry-run }} - OWNER: ${{ inputs.owner }} - REPOSITORY: ${{ inputs.repository }} - working-directory: "./.github/actions/delete-deployments" diff --git a/.github/actions/delete-deployments/index.ts b/.github/actions/delete-deployments/index.ts deleted file mode 100644 index e38f1957d2..0000000000 --- a/.github/actions/delete-deployments/index.ts +++ /dev/null @@ -1,232 +0,0 @@ -import { Octokit } from "@octokit/action"; -import { info, warning, isDebug } from "@actions/core"; -import { throttling } from "@octokit/plugin-throttling"; -import { retry } from "@octokit/plugin-retry"; - -async function main() { - const { - dryRun, - environment, - numOfPages, - owner, - ref, - repo, - debug, - startingPage, - } = getInputs(); - const octokit = getOctokit(debug); - - const deployments = await getDeployments({ - octokit, - owner, - repo, - environment, - ref, - paginateOptions: { - numOfPages, - startingPage, - }, - }); - const deploymentIds = deployments.map((d) => d.id); - if (dryRun) { - info(`Dry run: would delete deployments (${deploymentIds.length})`); - return; - } - - info(`Deleting deployments (${deploymentIds.length})`); - const deleteDeployments = deploymentIds.map(async (id) => { - const sharedArgs = { - owner, - repo, - deployment_id: id, - request: { - retries: 0, - }, - }; - - const setStatus = await octokit.repos - .createDeploymentStatus({ - ...sharedArgs, - state: "inactive", - }) - .then(() => true) - .catch((e) => { - warning( - `Marking deployment id ${id} to "inactive" failed: ${e.message}` - ); - return false; - }); - if (!setStatus) return false; - - return octokit.repos - .deleteDeployment({ - ...sharedArgs, - }) - .then(() => true) - .catch((e) => { - warning(`Deleting deployment id ${id} failed: ${e.message}`); - return false; - }); - }); - - const processed = await Promise.all(deleteDeployments); - const succeeded = processed.filter((p) => !!p); - info( - `Successfully deleted ${succeeded.length}/${processed.length} deployments` - ); -} -main(); - -function getInputs() { - const debug = !!(process.env.DEBUG || isDebug()); - - const dryRun = process.env.DRY_RUN === "true"; - - const environment = process.env.ENVIRONMENT; - if (!environment) throw new Error("ENVIRONMENT not set"); - - const ref = process.env.REF; - - const repository = process.env.REPOSITORY; - if (!repository) throw new Error("REPOSITORY not set"); - const [owner, repo] = repository.split("/"); - - const rawStartingPage = process.env.STARTING_PAGE; - - let startingPage: number | undefined; - if (rawStartingPage) { - startingPage = parseInt(rawStartingPage); - if (isNaN(startingPage)) { - throw new Error(`STARTING_PAGE is not a number: ${rawStartingPage}`); - } - if (startingPage < 0) { - throw new Error( - `STARTING_PAGE must be a positive integer or zero: ${rawStartingPage}` - ); - } - info(`Starting from page ${startingPage}`); - } - - const rawNumOfPages = process.env.NUM_OF_PAGES; - let numOfPages: "all" | number = "all"; - if (rawNumOfPages === "all") { - info("Fetching all pages of deployments"); - } else { - const parsedPages = parseInt(rawNumOfPages || ""); - if (isNaN(parsedPages)) { - throw new Error(`NUM_OF_PAGES is not a number: ${rawNumOfPages}`); - } - if (parsedPages < 1) { - throw new Error(`NUM_OF_PAGES must be greater than 0: ${rawNumOfPages}`); - } - numOfPages = parsedPages; - } - - if (numOfPages === "all" && startingPage) { - throw new Error(`Cannot use STARTING_PAGE with NUM_OF_PAGES=all`); - } - - const parsedInputs = { - environment, - ref, - owner, - repo, - numOfPages, - startingPage, - dryRun, - debug, - }; - info(`Configuration: ${JSON.stringify(parsedInputs)}`); - return parsedInputs; -} - -function getOctokit(debug: boolean) { - const OctokitAPI = Octokit.plugin(throttling, retry); - const octokit = new OctokitAPI({ - log: debug ? console : undefined, - throttle: { - onRateLimit: (retryAfter, options, octokit, retryCount) => { - octokit.log.warn( - // Types are busted from octokit - //@ts-expect-error - `Request quota exhausted for request ${options.method} ${options.url}` - ); - - octokit.log.info(`Retrying after ${retryAfter} seconds!`); - return true; - }, - onSecondaryRateLimit: (_retryAfter, options, octokit) => { - octokit.log.warn( - // Types are busted from octokit - //@ts-expect-error - `SecondaryRateLimit detected for request ${options.method} ${options.url}` - ); - return true; - }, - }, - }); - - return octokit; -} - -async function getDeployments({ - octokit, - owner, - repo, - environment, - ref, - paginateOptions, -}: { - octokit: ReturnType; - owner: string; - repo: string; - environment: string; - ref?: string; - paginateOptions: { - numOfPages: number | "all"; - startingPage?: number; - }; -}) { - const listDeploymentsSharedArgs: Parameters< - typeof octokit.repos.listDeployments - >[0] = { - owner, - repo, - environment, - ref, - per_page: 100, - request: { - retries: 20, - }, - }; - - if (paginateOptions.numOfPages === "all") { - info(`Fetching all deployments`); - const deployments = await octokit.paginate(octokit.repos.listDeployments, { - ...listDeploymentsSharedArgs, - }); - - return deployments; - } else { - info( - `Fetching ${ - paginateOptions.numOfPages * listDeploymentsSharedArgs.per_page! - } deployments` - ); - const deployments: Awaited< - ReturnType - >["data"] = []; - - const offset = paginateOptions.startingPage || 0; - for (let i = offset; i < paginateOptions.numOfPages + offset; i++) { - const deploymentPage = await octokit.repos.listDeployments({ - ...listDeploymentsSharedArgs, - page: i, - }); - - deployments.push(...deploymentPage.data); - } - - return deployments; - } -} diff --git a/.github/actions/delete-deployments/package.json b/.github/actions/delete-deployments/package.json deleted file mode 100644 index 7045cb3579..0000000000 --- a/.github/actions/delete-deployments/package.json +++ /dev/null @@ -1,25 +0,0 @@ -{ - "name": "delete-deployments", - "version": "1.0.0", - "description": "", - "main": "index.ts", - "scripts": { - "start": "ts-node -T .", - "test": "echo \"Error: no test specified\" && exit 1" - }, - "keywords": [], - "author": "", - "license": "ISC", - "dependencies": { - "@actions/core": "^1.10.1", - "@octokit/action": "^6.0.5", - "@octokit/plugin-retry": "^6.0.0", - "@octokit/plugin-throttling": "^7.0.0", - "ts-node": "^10.9.1" - }, - "devDependencies": { - "@octokit/types": "^11.1.0", - "@types/node": "^18", - "typescript": "^5.2.2" - } -} diff --git a/.github/actions/delete-deployments/pnpm-lock.yaml b/.github/actions/delete-deployments/pnpm-lock.yaml deleted file mode 100644 index a5553eb3de..0000000000 --- a/.github/actions/delete-deployments/pnpm-lock.yaml +++ /dev/null @@ -1,350 +0,0 @@ -lockfileVersion: '6.0' - -settings: - autoInstallPeers: true - excludeLinksFromLockfile: false - -dependencies: - '@actions/core': - specifier: ^1.10.1 - version: 1.10.1 - '@octokit/action': - specifier: ^6.0.5 - version: 6.0.5 - '@octokit/plugin-retry': - specifier: ^6.0.0 - version: 6.0.0(@octokit/core@5.0.0) - '@octokit/plugin-throttling': - specifier: ^7.0.0 - version: 7.0.0(@octokit/core@5.0.0) - ts-node: - specifier: ^10.9.1 - version: 10.9.1(@types/node@18.17.15)(typescript@5.2.2) - -devDependencies: - '@octokit/types': - specifier: ^11.1.0 - version: 11.1.0 - '@types/node': - specifier: ^18 - version: 18.17.15 - typescript: - specifier: ^5.2.2 - version: 5.2.2 - -packages: - - /@actions/core@1.10.1: - resolution: {integrity: sha512-3lBR9EDAY+iYIpTnTIXmWcNbX3T2kCkAEQGIQx4NVQ0575nk2k3GRZDTPQG+vVtS2izSLmINlxXf0uLtnrTP+g==} - dependencies: - '@actions/http-client': 2.1.1 - uuid: 8.3.2 - dev: false - - /@actions/http-client@2.1.1: - resolution: {integrity: sha512-qhrkRMB40bbbLo7gF+0vu+X+UawOvQQqNAA/5Unx774RS8poaOhThDOG6BGmxvAnxhQnDp2BG/ZUm65xZILTpw==} - dependencies: - tunnel: 0.0.6 - dev: false - - /@cspotcode/source-map-support@0.8.1: - resolution: {integrity: sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==} - engines: {node: '>=12'} - dependencies: - '@jridgewell/trace-mapping': 0.3.9 - dev: false - - /@jridgewell/resolve-uri@3.1.1: - resolution: {integrity: sha512-dSYZh7HhCDtCKm4QakX0xFpsRDqjjtZf/kjI/v3T3Nwt5r8/qz/M19F9ySyOqU94SXBmeG9ttTul+YnR4LOxFA==} - engines: {node: '>=6.0.0'} - dev: false - - /@jridgewell/sourcemap-codec@1.4.15: - resolution: {integrity: sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==} - dev: false - - /@jridgewell/trace-mapping@0.3.9: - resolution: {integrity: sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==} - dependencies: - '@jridgewell/resolve-uri': 3.1.1 - '@jridgewell/sourcemap-codec': 1.4.15 - dev: false - - /@octokit/action@6.0.5: - resolution: {integrity: sha512-jcCZb+jR4nzHgj86wlUvbTv92hiZ4OWpI9dIoWRilbtT4HuVVNFZvQih8X/YE2GMVrLCVbBD0xkjeq+1m8Rcpw==} - engines: {node: '>= 18'} - dependencies: - '@octokit/auth-action': 4.0.0 - '@octokit/core': 5.0.0 - '@octokit/plugin-paginate-rest': 8.0.0(@octokit/core@5.0.0) - '@octokit/plugin-rest-endpoint-methods': 9.0.0(@octokit/core@5.0.0) - '@octokit/types': 11.1.0 - undici: 5.24.0 - dev: false - - /@octokit/auth-action@4.0.0: - resolution: {integrity: sha512-sMm9lWZdiX6e89YFaLrgE9EFs94k58BwIkvjOtozNWUqyTmsrnWFr/M5LolaRzZ7Kmb5FbhF9hi7FEeE274SoQ==} - engines: {node: '>= 18'} - dependencies: - '@octokit/auth-token': 4.0.0 - '@octokit/types': 11.1.0 - dev: false - - /@octokit/auth-token@4.0.0: - resolution: {integrity: sha512-tY/msAuJo6ARbK6SPIxZrPBms3xPbfwBrulZe0Wtr/DIY9lje2HeV1uoebShn6mx7SjCHif6EjMvoREj+gZ+SA==} - engines: {node: '>= 18'} - dev: false - - /@octokit/core@5.0.0: - resolution: {integrity: sha512-YbAtMWIrbZ9FCXbLwT9wWB8TyLjq9mxpKdgB3dUNxQcIVTf9hJ70gRPwAcqGZdY6WdJPZ0I7jLaaNDCiloGN2A==} - engines: {node: '>= 18'} - dependencies: - '@octokit/auth-token': 4.0.0 - '@octokit/graphql': 7.0.1 - '@octokit/request': 8.1.1 - '@octokit/request-error': 5.0.0 - '@octokit/types': 11.1.0 - before-after-hook: 2.2.3 - universal-user-agent: 6.0.0 - dev: false - - /@octokit/endpoint@9.0.0: - resolution: {integrity: sha512-szrQhiqJ88gghWY2Htt8MqUDO6++E/EIXqJ2ZEp5ma3uGS46o7LZAzSLt49myB7rT+Hfw5Y6gO3LmOxGzHijAQ==} - engines: {node: '>= 18'} - dependencies: - '@octokit/types': 11.1.0 - is-plain-object: 5.0.0 - universal-user-agent: 6.0.0 - dev: false - - /@octokit/graphql@7.0.1: - resolution: {integrity: sha512-T5S3oZ1JOE58gom6MIcrgwZXzTaxRnxBso58xhozxHpOqSTgDS6YNeEUvZ/kRvXgPrRz/KHnZhtb7jUMRi9E6w==} - engines: {node: '>= 18'} - dependencies: - '@octokit/request': 8.1.1 - '@octokit/types': 11.1.0 - universal-user-agent: 6.0.0 - dev: false - - /@octokit/openapi-types@18.0.0: - resolution: {integrity: sha512-V8GImKs3TeQRxRtXFpG2wl19V7444NIOTDF24AWuIbmNaNYOQMWRbjcGDXV5B+0n887fgDcuMNOmlul+k+oJtw==} - - /@octokit/plugin-paginate-rest@8.0.0(@octokit/core@5.0.0): - resolution: {integrity: sha512-2xZ+baZWUg+qudVXnnvXz7qfrTmDeYPCzangBVq/1gXxii/OiS//4shJp9dnCCvj1x+JAm9ji1Egwm1BA47lPQ==} - engines: {node: '>= 18'} - peerDependencies: - '@octokit/core': '>=5' - dependencies: - '@octokit/core': 5.0.0 - '@octokit/types': 11.1.0 - dev: false - - /@octokit/plugin-rest-endpoint-methods@9.0.0(@octokit/core@5.0.0): - resolution: {integrity: sha512-KquMF/VB1IkKNiVnzJKspY5mFgGyLd7HzdJfVEGTJFzqu9BRFNWt+nwTCMuUiWc72gLQhRWYubTwOkQj+w/1PA==} - engines: {node: '>= 18'} - peerDependencies: - '@octokit/core': '>=5' - dependencies: - '@octokit/core': 5.0.0 - '@octokit/types': 11.1.0 - dev: false - - /@octokit/plugin-retry@6.0.0(@octokit/core@5.0.0): - resolution: {integrity: sha512-a1/A4A+PB1QoAHQfLJxGHhLfSAT03bR1jJz3GgQJZvty2ozawFWs93MiBQXO7SL2YbO7CIq0Goj4qLOBj8JeMQ==} - engines: {node: '>= 18'} - peerDependencies: - '@octokit/core': '>=5' - dependencies: - '@octokit/core': 5.0.0 - '@octokit/request-error': 5.0.0 - '@octokit/types': 11.1.0 - bottleneck: 2.19.5 - dev: false - - /@octokit/plugin-throttling@7.0.0(@octokit/core@5.0.0): - resolution: {integrity: sha512-KL2k/d0uANc8XqP5S64YcNFCudR3F5AaKO39XWdUtlJIjT9Ni79ekWJ6Kj5xvAw87udkOMEPcVf9xEge2+ahew==} - engines: {node: '>= 18'} - peerDependencies: - '@octokit/core': ^5.0.0 - dependencies: - '@octokit/core': 5.0.0 - '@octokit/types': 11.1.0 - bottleneck: 2.19.5 - dev: false - - /@octokit/request-error@5.0.0: - resolution: {integrity: sha512-1ue0DH0Lif5iEqT52+Rf/hf0RmGO9NWFjrzmrkArpG9trFfDM/efx00BJHdLGuro4BR/gECxCU2Twf5OKrRFsQ==} - engines: {node: '>= 18'} - dependencies: - '@octokit/types': 11.1.0 - deprecation: 2.3.1 - once: 1.4.0 - dev: false - - /@octokit/request@8.1.1: - resolution: {integrity: sha512-8N+tdUz4aCqQmXl8FpHYfKG9GelDFd7XGVzyN8rc6WxVlYcfpHECnuRkgquzz+WzvHTK62co5di8gSXnzASZPQ==} - engines: {node: '>= 18'} - dependencies: - '@octokit/endpoint': 9.0.0 - '@octokit/request-error': 5.0.0 - '@octokit/types': 11.1.0 - is-plain-object: 5.0.0 - universal-user-agent: 6.0.0 - dev: false - - /@octokit/types@11.1.0: - resolution: {integrity: sha512-Fz0+7GyLm/bHt8fwEqgvRBWwIV1S6wRRyq+V6exRKLVWaKGsuy6H9QFYeBVDV7rK6fO3XwHgQOPxv+cLj2zpXQ==} - dependencies: - '@octokit/openapi-types': 18.0.0 - - /@tsconfig/node10@1.0.9: - resolution: {integrity: sha512-jNsYVVxU8v5g43Erja32laIDHXeoNvFEpX33OK4d6hljo3jDhCBDhx5dhCCTMWUojscpAagGiRkBKxpdl9fxqA==} - dev: false - - /@tsconfig/node12@1.0.11: - resolution: {integrity: sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==} - dev: false - - /@tsconfig/node14@1.0.3: - resolution: {integrity: sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==} - dev: false - - /@tsconfig/node16@1.0.4: - resolution: {integrity: sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==} - dev: false - - /@types/node@18.17.15: - resolution: {integrity: sha512-2yrWpBk32tvV/JAd3HNHWuZn/VDN1P+72hWirHnvsvTGSqbANi+kSeuQR9yAHnbvaBvHDsoTdXV0Fe+iRtHLKA==} - - /acorn-walk@8.2.0: - resolution: {integrity: sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA==} - engines: {node: '>=0.4.0'} - dev: false - - /acorn@8.10.0: - resolution: {integrity: sha512-F0SAmZ8iUtS//m8DmCTA0jlh6TDKkHQyK6xc6V4KDTyZKA9dnvX9/3sRTVQrWm79glUAZbnmmNcdYwUIHWVybw==} - engines: {node: '>=0.4.0'} - hasBin: true - dev: false - - /arg@4.1.3: - resolution: {integrity: sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==} - dev: false - - /before-after-hook@2.2.3: - resolution: {integrity: sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==} - dev: false - - /bottleneck@2.19.5: - resolution: {integrity: sha512-VHiNCbI1lKdl44tGrhNfU3lup0Tj/ZBMJB5/2ZbNXRCPuRCO7ed2mgcK4r17y+KB2EfuYuRaVlwNbAeaWGSpbw==} - dev: false - - /busboy@1.6.0: - resolution: {integrity: sha512-8SFQbg/0hQ9xy3UNTB0YEnsNBbWfhf7RtnzpL7TkBiTBRfrQ9Fxcnz7VJsleJpyp6rVLvXiuORqjlHi5q+PYuA==} - engines: {node: '>=10.16.0'} - dependencies: - streamsearch: 1.1.0 - dev: false - - /create-require@1.1.1: - resolution: {integrity: sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==} - dev: false - - /deprecation@2.3.1: - resolution: {integrity: sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==} - dev: false - - /diff@4.0.2: - resolution: {integrity: sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==} - engines: {node: '>=0.3.1'} - dev: false - - /is-plain-object@5.0.0: - resolution: {integrity: sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==} - engines: {node: '>=0.10.0'} - dev: false - - /make-error@1.3.6: - resolution: {integrity: sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==} - dev: false - - /once@1.4.0: - resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==} - dependencies: - wrappy: 1.0.2 - dev: false - - /streamsearch@1.1.0: - resolution: {integrity: sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg==} - engines: {node: '>=10.0.0'} - dev: false - - /ts-node@10.9.1(@types/node@18.17.15)(typescript@5.2.2): - resolution: {integrity: sha512-NtVysVPkxxrwFGUUxGYhfux8k78pQB3JqYBXlLRZgdGUqTO5wU/UyHop5p70iEbGhB7q5KmiZiU0Y3KlJrScEw==} - hasBin: true - peerDependencies: - '@swc/core': '>=1.2.50' - '@swc/wasm': '>=1.2.50' - '@types/node': '*' - typescript: '>=2.7' - peerDependenciesMeta: - '@swc/core': - optional: true - '@swc/wasm': - optional: true - dependencies: - '@cspotcode/source-map-support': 0.8.1 - '@tsconfig/node10': 1.0.9 - '@tsconfig/node12': 1.0.11 - '@tsconfig/node14': 1.0.3 - '@tsconfig/node16': 1.0.4 - '@types/node': 18.17.15 - acorn: 8.10.0 - acorn-walk: 8.2.0 - arg: 4.1.3 - create-require: 1.1.1 - diff: 4.0.2 - make-error: 1.3.6 - typescript: 5.2.2 - v8-compile-cache-lib: 3.0.1 - yn: 3.1.1 - dev: false - - /tunnel@0.0.6: - resolution: {integrity: sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==} - engines: {node: '>=0.6.11 <=0.7.0 || >=0.7.3'} - dev: false - - /typescript@5.2.2: - resolution: {integrity: sha512-mI4WrpHsbCIcwT9cF4FZvr80QUeKvsUsUvKDoR+X/7XHQH98xYD8YHZg7ANtz2GtZt/CBq2QJ0thkGJMHfqc1w==} - engines: {node: '>=14.17'} - hasBin: true - - /undici@5.24.0: - resolution: {integrity: sha512-OKlckxBjFl0oXxcj9FU6oB8fDAaiRUq+D8jrFWGmOfI/gIyjk/IeS75LMzgYKUaeHzLUcYvf9bbJGSrUwTfwwQ==} - engines: {node: '>=14.0'} - dependencies: - busboy: 1.6.0 - dev: false - - /universal-user-agent@6.0.0: - resolution: {integrity: sha512-isyNax3wXoKaulPDZWHQqbmIx1k2tb9fb3GGDBRxCscfYV2Ch7WxPArBsFEG8s/safwXTT7H4QGhaIkTp9447w==} - dev: false - - /uuid@8.3.2: - resolution: {integrity: sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==} - hasBin: true - dev: false - - /v8-compile-cache-lib@3.0.1: - resolution: {integrity: sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==} - dev: false - - /wrappy@1.0.2: - resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==} - dev: false - - /yn@3.1.1: - resolution: {integrity: sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==} - engines: {node: '>=6'} - dev: false diff --git a/.github/actions/delete-deployments/test.sh b/.github/actions/delete-deployments/test.sh deleted file mode 100755 index 18b7726088..0000000000 --- a/.github/actions/delete-deployments/test.sh +++ /dev/null @@ -1,9 +0,0 @@ -#!/bin/sh -export NUM_OF_PAGES=all -export ENVIRONMENT=integration -export DRY_RUN=false -export REPOSITORY=smartcontractkit/chainlink -export REF=fix/golint -export GITHUB_ACTION=true - -pnpm start diff --git a/.github/actions/delete-deployments/tsconfig.json b/.github/actions/delete-deployments/tsconfig.json deleted file mode 100644 index 4b36d4a178..0000000000 --- a/.github/actions/delete-deployments/tsconfig.json +++ /dev/null @@ -1,104 +0,0 @@ -{ - "compilerOptions": { - /* Visit https://aka.ms/tsconfig to read more about this file */ - - /* Projects */ - // "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */ - // "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */ - // "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */ - // "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */ - // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */ - // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ - - /* Language and Environment */ - "target": "ESNext" /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */, - // "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */ - // "jsx": "preserve", /* Specify what JSX code is generated. */ - // "experimentalDecorators": true, /* Enable experimental support for TC39 stage 2 draft decorators. */ - // "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */ - // "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */ - // "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */ - // "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */ - // "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */ - // "noLib": true, /* Disable including any library files, including the default lib.d.ts. */ - // "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */ - // "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */ - - /* Modules */ - "module": "NodeNext" /* Specify what module code is generated. */, - // "rootDir": "./", /* Specify the root folder within your source files. */ - "moduleResolution": "NodeNext" /* Specify how TypeScript looks up a file from a given module specifier. */, - // "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */ - // "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */ - // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */ - // "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */ - // "types": [], /* Specify type package names to be included without being referenced in a source file. */ - // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ - // "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */ - // "resolveJsonModule": true, /* Enable importing .json files. */ - // "noResolve": true, /* Disallow 'import's, 'require's or ''s from expanding the number of files TypeScript should add to a project. */ - - /* JavaScript Support */ - // "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */ - // "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */ - // "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */ - - /* Emit */ - // "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */ - // "declarationMap": true, /* Create sourcemaps for d.ts files. */ - // "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */ - // "sourceMap": true, /* Create source map files for emitted JavaScript files. */ - // "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */ - // "outDir": "./", /* Specify an output folder for all emitted files. */ - // "removeComments": true, /* Disable emitting comments. */ - "noEmit": true /* Disable emitting files from a compilation. */, - // "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */ - // "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types. */ - // "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */ - // "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */ - // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ - // "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */ - // "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */ - // "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */ - // "newLine": "crlf", /* Set the newline character for emitting files. */ - // "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */ - // "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */ - // "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */ - // "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */ - // "declarationDir": "./", /* Specify the output directory for generated declaration files. */ - // "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */ - - /* Interop Constraints */ - // "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */ - // "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */ - "esModuleInterop": true /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */, - // "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */ - "forceConsistentCasingInFileNames": true /* Ensure that casing is correct in imports. */, - - /* Type Checking */ - "strict": true /* Enable all strict type-checking options. */, - // "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */ - // "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */ - // "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */ - // "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */ - // "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */ - // "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */ - // "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */ - // "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */ - // "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */ - // "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */ - // "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */ - // "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */ - // "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */ - // "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */ - // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */ - // "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */ - // "allowUnusedLabels": true, /* Disable error reporting for unused labels. */ - // "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */ - - /* Completeness */ - // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */ - "skipLibCheck": false /* Skip type checking all .d.ts files. */ - }, - "include": ["src", "test"] -} diff --git a/.github/actions/golangci-lint/action.yml b/.github/actions/golangci-lint/action.yml deleted file mode 100644 index 0047c6a54b..0000000000 --- a/.github/actions/golangci-lint/action.yml +++ /dev/null @@ -1,72 +0,0 @@ -name: CI lint for Golang -description: Runs CI lint for Golang -inputs: - # general inputs - name: - description: Name of the lint action - default: lint - go-directory: - description: Go directory to run commands from - default: "." - # setup-go inputs - only-modules: - description: Set to 'true' to only cache modules - default: "false" - cache-version: - description: Set this to cache bust - default: "1" - go-version-file: - description: Set where the go version file is located at - default: "go.mod" - go-module-file: - description: Set where the go module file is located at - default: "go.sum" - # grafana cloud inputs - gc-host: - description: "grafana cloud hostname" - gc-basic-auth: - description: "grafana cloud basic auth" - -runs: - using: composite - steps: - - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - - name: Setup Go - uses: ./.github/actions/setup-go - with: - only-modules: ${{ inputs.only-modules }} - cache-version: ${{ inputs.cache-version }} - go-version-file: ${{ inputs.go-version-file }} - go-module-file: ${{ inputs.go-module-file }} - - name: Touching core/web/assets/index.html - shell: bash - run: mkdir -p core/web/assets && touch core/web/assets/index.html - - name: Build binary - working-directory: ${{ inputs.go-directory }} - shell: bash - run: go build ./... - - name: golangci-lint - uses: golangci/golangci-lint-action@3a919529898de77ec3da873e3063ca4b10e7f5cc # v3.7.0 - with: - version: v1.55.2 - # We already cache these directories in setup-go - skip-pkg-cache: true - skip-build-cache: true - # only-new-issues is only applicable to PRs, otherwise it is always set to false - only-new-issues: false # disabled for PRs due to unreliability - args: --out-format colored-line-number,checkstyle:golangci-lint-report.xml - working-directory: ${{ inputs.go-directory }} - - name: Store lint report artifact - if: always() - uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8 # v3.1.0 - with: - name: golangci-lint-report - path: ${{ inputs.go-directory }}/golangci-lint-report.xml - - name: Collect Metrics - if: always() - uses: smartcontractkit/push-gha-metrics-action@d1618b772a97fd87e6505de97b872ee0b1f1729a # v2.0.2 - with: - basic-auth: ${{ inputs.gc-basic-auth }} - hostname: ${{ inputs.gc-host }} - this-job-name: ${{ inputs.name }} - continue-on-error: true diff --git a/.github/actions/goreleaser-build-sign-publish/README.md b/.github/actions/goreleaser-build-sign-publish/README.md deleted file mode 100644 index d6bf7e6fd4..0000000000 --- a/.github/actions/goreleaser-build-sign-publish/README.md +++ /dev/null @@ -1,131 +0,0 @@ -# goreleaser-build-sign-publish - -> goreleaser wrapper action - -## workflows - -### build publish - -```yaml -name: goreleaser - -on: - push: - tags: - - "v*" - -jobs: - goreleaser: - runs-on: ubuntu-latest - environment: release - permissions: - id-token: write - contents: read - env: - MACOS_SDK_VERSION: 12.3 - steps: - - name: Checkout repository - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - - name: Configure aws credentials - uses: aws-actions/configure-aws-credentials@010d0da01d0b5a38af31e9c3470dbfdabdecca3a # v4.0.1 - with: - role-to-assume: ${{ secrets.aws-role-arn }} - role-duration-seconds: ${{ secrets.aws-role-dur-sec }} - aws-region: ${{ secrets.aws-region }} - - name: Cache macos sdk - id: sdk-cache - uses: actions/cache@v3 - with: - path: ${{ format('MacOSX{0}.sdk', env.MAC_SDK_VERSION) }} - key: ${{ runner.OS }}-${{ env.MAC_SDK_VERSION }}-macos-sdk-cache-${{ hashFiles('**/SDKSettings.json') }} - restore-keys: | - ${{ runner.OS }}-${{ env.MAC_SDK_VERSION }}-macos-sdk-cache- - - name: Get macos sdk - if: steps.sdk-cache.outputs.cache-hit != 'true' - run: | - curl -L https://github.com/joseluisq/macosx-sdks/releases/download/${MACOS_SDK_VERSION}/MacOSX${MACOS_SDK_VERSION}.sdk.tar.xz > MacOSX${MACOS_SDK_VERSION}.sdk.tar.xz - tar -xf MacOSX${MACOS_SDK_VERSION}.sdk.tar.xz - - name: Build, sign, and publish - uses: ./.github/actions/goreleaser-build-sign-publish - with: - enable-docker-publish: "true" - enable-goreleaser-snapshot: "false" - docker-registry: ${{ secrets.aws-ecr-registry }} - goreleaser-exec: goreleaser - goreleaser-config: .goreleaser.yaml - macos-sdk-dir: ${{ format('MacOSX{0}.sdk', env.MAC_SDK_VERSION) }} - env: - GITHUB_TOKEN: ${{ secrets.gh-token }} -``` - -### snapshot release - -```yaml -- name: Build, sign, and publish image - uses: ./.github/actions/goreleaser-build-sign-publish - with: - enable-docker-publish: "true" - enable-goreleaser-snapshot: "true" - docker-registry: ${{ secrets.aws-ecr-registry }} - goreleaser-exec: goreleaser - goreleaser-config: .goreleaser.yaml -``` - -### image signing - -```yaml -- name: Build, sign, and publish - uses: ./.github/actions/goreleaser-build-sign-publish - with: - enable-docker-publish: "true" - enable-goreleaser-snapshot: "false" - enable-cosign: "true" - docker-registry: ${{ secrets.aws-ecr-registry }} - goreleaser-exec: goreleaser - goreleaser-config: .goreleaser.yaml - cosign-password: ${{ secrets.cosign-password }} - cosign-public-key: ${{ secrets.cosign-public-key }} - cosign-private-key: ${{ secrets.cosign-private-key }} - macos-sdk-dir: MacOSX12.3.sdk -``` - -## customizing - -### inputs - -Following inputs can be used as `step.with` keys - -| Name | Type | Default | Description | -| ---------------------------- | ------ | ------------------ | ----------------------------------------------------------------------- | -| `goreleaser-version` | String | `1.13.1` | `goreleaser` version | -| `zig-version` | String | `0.10.0` | `zig` version | -| `cosign-version` | String | `v1.13.1` | `cosign` version | -| `macos-sdk-dir` | String | `MacOSX12.3.sdk` | MacOSX sdk directory | -| `enable-docker-publish` | Bool | `true` | Enable publishing of Docker images / manifests | -| `docker-registry` | String | `localhost:5001` | Docker registry | -| `enable-goreleaser-snapshot` | Bool | `false` | Enable goreleaser build / release snapshot | -| `goreleaser-exec` | String | `goreleaser` | The goreleaser executable, can invoke wrapper script | -| `goreleaser-config` | String | `.goreleaser.yaml` | The goreleaser configuration yaml | -| `enable-cosign` | Bool | `false` | Enable signing of Docker images | -| `cosign-public-key` | String | `""` | The public key to be used with cosign for verification | -| `cosign-private-key` | String | `""` | The private key to be used with cosign to sign the image | -| `cosign-password-key` | String | `""` | The password to decrypt the cosign private key needed to sign the image | - -## testing - -- bring up local docker registry - -```sh -docker run -d --restart=always -p "127.0.0.1:5001:5000" --name registry registry:2 -``` - -- run snapshot release, publish to local docker registry - -```sh -GORELEASER_EXEC=" set -_publish_snapshot_images() { - local full_sha=$(git rev-parse HEAD) - local images=$(docker images --filter "label=org.opencontainers.image.revision=$full_sha" --format "{{.Repository}}:{{.Tag}}") - for image in $images; do - docker push "$image" - done -} - -# publish snapshot docker manifest lists -# must have label=org.opencontainers.image.revision= set -_publish_snapshot_manifests() { - local docker_manifest_extra_args=$DOCKER_MANIFEST_EXTRA_ARGS - local full_sha=$(git rev-parse HEAD) - local images=$(docker images --filter "label=org.opencontainers.image.revision=$full_sha" --format "{{.Repository}}:{{.Tag}}" | sort) - local arches=(amd64 arm64) - local raw_manifest_lists="" - for image in $images; do - for arch in "${arches[@]}"; do - image=${image%"-$arch"} - done - raw_manifest_lists+="$image"$'\n' - done - local manifest_lists=$(echo "$raw_manifest_lists" | sort | uniq) - for manifest_list in $manifest_lists; do - manifests="" - for arch in "${arches[@]}"; do - archExists=$(echo "$images" | grep -c "$manifest_lists-$arch") - if [[ $archExists -ne 0 ]]; then - manifests+="$manifest_list-$arch " - fi - done - docker manifest create $manifest_list $manifests $docker_manifest_extra_args - docker manifest push "$manifest_list" - done -} - -# wrapper function to invoke goreleaser release -goreleaser_release() { - if [[ $ENABLE_COSIGN == "true" ]]; then - echo "$COSIGN_PUBLIC_KEY" > cosign.pub - echo "$COSIGN_PRIVATE_KEY" > cosign.key - fi - if [[ -n $MACOS_SDK_DIR ]]; then - MACOS_SDK_DIR=$(echo "$(cd "$(dirname "$MACOS_SDK_DIR")" || exit; pwd)/$(basename "$MACOS_SDK_DIR")") - fi - if [[ $ENABLE_GORELEASER_SNAPSHOT == "true" ]]; then - $GORELEASER_EXEC release --snapshot --clean --config "$GORELEASER_CONFIG" "$@" - if [[ $ENABLE_DOCKER_PUBLISH == "true" ]]; then - _publish_snapshot_images - _publish_snapshot_manifests - fi - else - $GORELEASER_EXEC release --clean --config "$GORELEASER_CONFIG" "$@" - fi - if [[ $ENABLE_COSIGN == "true" ]]; then - rm -rf cosign.pub - rm -rf cosign.key - fi - - echo "metadata=$(cat dist/metadata.json)" >> "$GITHUB_OUTPUT" - echo "artifacts=$(cat dist/artifacts.json)" >> "$GITHUB_OUTPUT" -} - -"$@" diff --git a/.github/actions/notify-slack-jobs-result/README.md b/.github/actions/notify-slack-jobs-result/README.md deleted file mode 100644 index 298930c0d9..0000000000 --- a/.github/actions/notify-slack-jobs-result/README.md +++ /dev/null @@ -1,37 +0,0 @@ -# Notify Slack Jobs Result - -Sends a Slack message to a specified channel detailing the results of one to many GHA job results using a regex. The job results will be grouped by the `github_job_name_regex` and displayed underneath the `message_title`, with the regex matching group displayed as an individual result. This is primarily designed for when you have test groups running in a matrix, and would like condensed reporting on their status by group. It's often accompanied by posting a Slack message before to start a thread, then attaching all the results to that thread like we do in the reporting section of the [live-testnet-test.yml workflow](../../workflows/live-testnet-tests.yml). Check out the example below, where we post an initial summary message, then use this action to thread together specific results: - -```yaml -message_title: Optimism Goerli -github_job_name_regex: ^Optimism Goerli (?.*?) Tests$ # Note that the regex MUST have a capturing group named "cap" -``` - -![example](image.png) - -## Inputs - -```yaml -inputs: - github_token: - description: "The GitHub token to use for authentication (usually ${{ github.token }})" - required: true - github_repository: - description: "The GitHub owner/repository to use for authentication (usually ${{ github.repository }}))" - required: true - workflow_run_id: - description: "The workflow run ID to get the results from (usually ${{ github.run_id }})" - required: true - github_job_name_regex: - description: "The regex to use to match 1..many job name(s) to collect results from. Should include a capture group named 'cap' for the part of the job name you want to display in the Slack message (e.g. ^Client Compatability Test (?.*?)$)" - required: true - message_title: - description: "The title of the Slack message" - required: true - slack_channel_id: - description: "The Slack channel ID to post the message to" - required: true - slack_thread_ts: - description: "The Slack thread timestamp to post the message to, handy for keeping multiple related results in a single thread" - required: false -``` diff --git a/.github/actions/notify-slack-jobs-result/action.yml b/.github/actions/notify-slack-jobs-result/action.yml deleted file mode 100644 index 63840cfa39..0000000000 --- a/.github/actions/notify-slack-jobs-result/action.yml +++ /dev/null @@ -1,110 +0,0 @@ -name: Notify Slack Jobs Result -description: Will send a notification in Slack for the result of a GitHub action run, typically for test results -inputs: - github_token: - description: "The GitHub token to use for authentication (usually github.token)" - required: true - github_repository: - description: "The GitHub owner/repository to use for authentication (usually github.repository))" - required: true - workflow_run_id: - description: "The workflow run ID to get the results from (usually github.run_id)" - required: true - github_job_name_regex: - description: "The regex to use to match 1..many job name(s) to collect results from. Should include a capture group named 'cap' for the part of the job name you want to display in the Slack message (e.g. ^Client Compatability Test (?.*?)$)" - required: true - message_title: - description: "The title of the Slack message" - required: true - slack_channel_id: - description: "The Slack channel ID to post the message to" - required: true - slack_bot_token: - description: "The Slack bot token to use for authentication which needs permission and an installed app in the channel" - required: true - slack_thread_ts: - description: "The Slack thread timestamp to post the message to, handy for keeping multiple related results in a single thread" - required: false - -runs: - using: composite - steps: - - name: Get Results - shell: bash - id: test-results - run: | - # I feel like there's some clever, fully jq way to do this, but I ain't got the motivation to figure it out - echo "Querying test results at https://api.github.com/repos/${{inputs.github_repository}}/actions/runs/${{ inputs.workflow_run_id }}/jobs" - - PARSED_RESULTS=$(curl \ - -H "Authorization: Bearer ${{ inputs.github_token }}" \ - 'https://api.github.com/repos/${{inputs.github_repository}}/actions/runs/${{ inputs.workflow_run_id }}/jobs' \ - | jq -r --arg pattern "${{ inputs.github_job_name_regex }}" '.jobs[] - | select(.name | test($pattern)) as $job - | $job.steps[] - | select(.name == "Run Tests") - | { conclusion: (if .conclusion == "success" then ":white_check_mark:" else ":x:" end), cap: ("*" + ($job.name | capture($pattern).cap) + "*"), html_url: $job.html_url }') - - echo "Parsed Results:" - echo $PARSED_RESULTS - - ALL_SUCCESS=true - echo "Checking for failures" - echo "$PARSED_RESULTS" | jq -s | jq -r '.[] | select(.conclusion != ":white_check_mark:")' - for row in $(echo "$PARSED_RESULTS" | jq -s | jq -r '.[] | select(.conclusion != ":white_check_mark:")'); do - ALL_SUCCESS=false - break - done - echo "Success: $ALL_SUCCESS" - - echo all_success=$ALL_SUCCESS >> $GITHUB_OUTPUT - - FORMATTED_RESULTS=$(echo $PARSED_RESULTS | jq -s '[.[] - | { - conclusion: .conclusion, - cap: .cap, - html_url: .html_url - } - ] - | map("{\"type\": \"section\", \"text\": {\"type\": \"mrkdwn\", \"text\": \"<\(.html_url)|\(.cap)>: \(.conclusion)\"}}") - | join(",")') - - echo "Formatted Results:" - echo $FORMATTED_RESULTS - - # Cleans out backslashes and quotes from jq - CLEAN_RESULTS=$(echo "$FORMATTED_RESULTS" | sed 's/\\\"/"/g' | sed 's/^"//;s/"$//') - - echo "Clean Results" - echo $CLEAN_RESULTS - - echo results=$CLEAN_RESULTS >> $GITHUB_OUTPUT - - name: Post Results - uses: slackapi/slack-github-action@e28cf165c92ffef168d23c5c9000cffc8a25e117 # v1.24.0 - env: - SLACK_BOT_TOKEN: ${{ inputs.slack_bot_token }} - with: - channel-id: ${{ inputs.slack_channel_id }} - payload: | - { - "thread_ts": "${{ inputs.slack_thread_ts }}", - "attachments": [ - { - "color": "${{ steps.test-results.outputs.all_success == 'true' && '#2E7D32' || '#C62828' }}", - "blocks": [ - { - "type": "header", - "text": { - "type": "plain_text", - "text": "${{ inputs.message_title }} ${{ steps.test-results.outputs.all_success == 'true' && ':white_check_mark:' || ':x:'}}", - "emoji": true - } - }, - { - "type": "divider" - }, - ${{ steps.test-results.outputs.results }} - ] - } - ] - } diff --git a/.github/actions/notify-slack-jobs-result/image.png b/.github/actions/notify-slack-jobs-result/image.png deleted file mode 100644 index 3bd398101fbdc8e4a0eb4c5addd7332f130190ab..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 47298 zcmd3ORajh2*CiI*HCS*9?i$=05AN=6!QBboxQAdNKyY`baY=%EwTV_yl0iczLWY8ZLIcQ3szO1*&_F>!3n0Qn?ttD<@FhhWJ*uq)YmfURDF}i%elXDudyXtnbijvsOq?fiz1H(l0gL!*)vuo zfm(wgII+iJq&d?&q<%sB9P&-xE0c*?VssAfAt9|IeuR_ywhLX{Kh*!*$VV4H8!-@B%5L}KP^fS28) zs#t951~6YOT5OQcsh|wn+QVIu&xQNVG<@4h_T0<9^Gpr zi!DEkF`dM0U0`XJ3ioT3<)w>18!OYIF}w*jye~TuL5+&hN@6+Ez&>OFq^Y?#IiB1Bv#8c^~1i8RJMM zl4IET=@}Q6A`|Ud7wi>V7u@|?KU(W$xPO6aQn!5>KHQsT6EAtCI-C05dupvg)s7nA zZC(Cp3dz!$>L)vZ6#xvqBcV(Nvk_7hk^|{LYUZ)#(*knn+(T)UZswOQ(?7kTsZ{fM zW7=c(JjDiItjsgk>n8`~@_8}mC+8>aMpEY-4F&uiwPM<&HS^nkyY0JETM^kZyALn^ z%ypf7?KRq+t2Wx@)kS?l2!IVdT+(dfKI9tHMD}lW((9MG{GenmT+FOtZ&FwIRfk*i z0fuZ8%D=^{4bci-?+JVh5&_zQZh>U9fm^F!;f)EmeAj%JwV+{xbrx46S0fiPw|p7f zpWdfjb|+PZHOM~bgjI6f3Znvu5K@LawE2V`Uzd0U$bVR@3LbPj zw-SyG~C5$W5>l{kB7Ss9h==`Vae(fmwW~@zaeM8p4=ROd4%M`ha<2^p=cJ~+h z!`8hvlIarkhIH#~sA^vhzFsH5%QL}}cd>OJHd5eg2P2w3L5eeh9>^^wKOz66zt#oFi=pDHc)W? z(?+M>=<9&&k8glR1rFA2b{4bsN9Y z!+Z{+{DSzmIm4h@zmL3}JHmsO62(%1!~N5+A<{EUBZ&R$s|s8^B3w!e3D&PK;s4X) zPeViVfcl?Dbz=nw0fAIsiRv-`^K?uC6u9v$-ei=?Qz}xyocEvXeqmM3G4?VtzqnkW zd%B*dhl{|?pWpSd4D^ErmC#?X&m!#zt*L@atFyC{85uPzQ(U@~Q&_uR~ns;Lc_k-iH!t@E#SS`!A14GUfLOkuzO^JdP8h}zR7a3T-$5*%mWp#!it zCSuJcWO;_)i}E+x?OpvI3=5>RG&YK1T{Y!vd<+GiQJ~In>#_j_55pZ&Micltxk+ z86R&?)2|PIM~0Cdz+#8K*Z4l1^{aef=Tak9^@m?epR{bw5;^<5@8jA;f#cr|=d;A3 zPf77RTNRvX(6c&=BDvJUd_yb*<4(n^Q=Md=_*s7n8~Ogtz&=%b6y_u>+|sIhj+GE5 zp0sPUg*uxHhtHVy{@SapmMnOs?W9$Lh}VXMy*IwgbC}d~8IN5^7fMh^j=I*wn`wWh zxGu+A+NtbgVT8G*CFp3OZaapxGex6ZQ$}96t{xm^a%q2&~Q>A_;Run5sC4rJ5FN1)&d+iXUJ{g1D+l-KOyPz zDAy=Yb6vTflPms=r>HubUX|E(+SycX9)l}g@!=h7y`6g9y~WwORgvS0zy<%=%hBv) z?(9HqrAmQ#4YsnHZ9!};-#B(CR6cCRf(F!WGid|)zY%ZbN~eG(&gJ!c(y+DZfW z#asgBm4W_-dbJh$(UCjUpdCWuN7I+)NjFq8l0hjZ{YjB303(90@DV$Kh|akzFZg+{ z4!}p-i<^y(}t_Mm~+n>tn3dh$;2c#^WjAmGM!keRJ7uGKtu!+0gcP1%oi^ zFTlIS20;Kz+f!}S9LY(LYOx%$;FwtGNfO8QNSJwgPL6%GoPq)>ztOx{01439!IW~` zg_1_@?p#q`ULxR@NSwoRtm@(kg%xC1)$vigRtlZ?>4*~|b(3$ZLN8?H(penh<m zLW5t|l`O0sRaI5R&HTz0<| ze(Z~oE3Z%-wOa!VJgCqY(W>|_L@%M^gOeB~?pXV(Ms5b8>FByWH=X0?mcn$&DUq5v z(C64bEK$}JOp%4FN6t;ry&*;%mo|!xIx)K#tDF3a`ts!1xi8JXU({IZb4}#E-V>=H z(3t#%|NWx!)>gd4E{viI+{W;8h_JuHo>hJzgONYCxLgVoMXBdLW#H4fOj!BnQ!dcr z2hSB6j1co3^IaQ)&$=OlaS|@`?KXAG{*&Yk(Z0Etb0o>j(GJQf&3si){C17)_d82c z1hn$hRn>qICL3w;>&)I566{`)!tD@|*7_@oH>twY5uF$123Vf`*L+?hN|6(GM z@e}(aom=QVu$g;6(4f?Vd7;j;YP$Y-rH%FNI~%L+pqGn<-TWT?jCTy5^R)hoVP(J|^&s{D6l(c!)UHFIT_{e1Pb zJAS+l6;c4!ZjM`Hz2h%A>|~(%%hUbEb(B-l3h21V^E8k4I9XX3@Lfwe*=1{Jv>yp4 z^xmakejG6zJ(`R0Xt733Iw+@^hOj@vo6BJa*PD8{ws=%=sP5AxK2F>V;ckw98kg<- zaO@KSQwA$p;6>TdYRhP@f^)uZ^viQq%3T=Tz4g3It^LAkBV+ zpvqaZYwC#Ws`oAtr>v!P+E^~Ha_8%dAYd(@!Yc1m$<@KHzWz_nh=x(Q*W^eBt?o&V z25lL$4bwfsck8ex^c(@3aG7k5A^ho@RA{$8jXx{ZWIT=+nK>;dQj!X#gBKpaVT}&O zvo@vThwAbkCyNXsuR^M{g_k8dd%ZC%Tyt9Av08c6LrWXQ%amCPNqAue~RWh zgzuY;q}XtH9BJ41H>#MqY{l^o=kU8(9JhXQozSr&teiYerW+?OOTwCdPr6HqH;3eq+^G{q$j?vrMS}7ErcJpX1`=8y`@_Sla!~K9{ipucuQ^F-Fnm?BaeysfJPpVp7`w$)td(N>VrF^ z&j1n|S9xw!E>=>(0wx584xd|Kuogz5zPk?!K2r+*9+$OY%jpY(1-o?JoS|=}yHI7Q z{Zgkvqr(!*2VeK}IAUHRZr`$?eL5E5RqHp{qA<(i;*woSi*LgXxxbLj@M{dl6Vu_) zy|DP+9WI>k*K8)3sbq_?kZtqFo%#DpPbJ*P0EnxRW!? zG#u`iY_;yLJ|f@MdTnijZcy(&+%VHlLau|{yOFeRR zFUDq-iJ1`uRj})572}iD1l37ef|_^D9hT(Nti}CjJbE)mFczKihwE4X?+nIihs`W4 zhyA>B;xB4z`mEtAn>YSJGqcZ&2noFPvy)-?;ge;V8~6;#Y`aTMA`8pA!$#8bpD;CL z?keg(oWpq^WLm|Q#3~K-m!s#A3P0Y52T0Huqp!5hy&kppC4UJnke->&p@tf=Pf3@Y z#ByVq6}gZN*Gb4KkR@tuzT*=jX7Ez>h~%T^jH4D(^Mn6}D8?zmk=(Mw4;^p-jy;~Gx<>~np9{L0b+#`b<#=;0un&7S1j zQjn0HM_NMx_Mq>_FF7UiIOxj1LTil#k_D>f`CZr)@K}I02UMZK+m*bAoc%PFw%j_A z)9rc>SNxNhKxVzh;i&?N6|pxrL$QRR@t^XqPJXF5HlGOSpur5p&oh(o&mM6&EV_H| zZ)=U=YCSlrmupJ-+#FGycHMS@hk`953M5w1X*mOyy=>;cKyP}cXL4GBMr9)}&iXKd z;doF6iJdh9ljpqF@83HY9#E~4=JqKU;qIy!#;|K* zL$*^VD6A>GB%-K@o69c6D3I9hBwvyimm5caeYbgw2&81)Bs;SsPBrt6h=-TK2EG>v z|5aLXI3zsAV$v~iR~gMv%R1}3Ru#*miLMSb!p7I(SdrHhkrIWZohi(10+iWQfGdfs zs1f-P*)VB3^(C;ApiijrT|b-5CQMj#(ZiG5BxhKWP!uQkheDZz;7J{V#PkjhiZA>X z`o+NL+ZP0~;q9~1#dtyq>e6JBue!+NNmSGXE3KcB7s+MdxQEgiPXq4?D2S-F#747x`@M%1)1j8fecGWF?EbBQvmgqWWEa#)MyB%fZy$;H+7e(IJq1g5^ z@zbAW$shaHJPw&ToYy~zci|%Yv-06Yap1uP6&1N8(~5VcaNGSxw5qSSU0JlY{>DuH zksgIkgO{P@_~x~b9{U>B^c*}p+1*Aj#$ILU#_&)Ep@?I5u!C~-M-q~0sp{OzAPpi_l zkp<|u2c$r})e(k0@=1X@-h+nd*!yeVf=fp(8(5CMC)Vb5KYN_(~oHG5>882;g?Vic7F#D*W=% z1%;d1Uv#GH?t*b6w#*ANz3sGlMH19)Uz;x-mqNPa*{%PU^>)>t;K?Llh*L=M$HAHr zEwq&_i;~cWLh9 zivCpJVt^P9;qpjx=vwH=_*oQGiLdDud3E@-=j>o5|0bjw_C>^Fd4=Q z2VWGOH?vNdmO?)#z3S)x-A~XpT1ew!q2&iwq z0oB5I({zs|86EsC4=r2r6g~V1s&ML!Z~eB(!m3Dt>k&ejNf?pRKlFG=soT0%cmlAT z4BD@PID%}i1{U>n%EIUF{qi<>g^Q^2y)#8zLeozW- zhCyTB&lZy{cnSSQFR+WJ#an{uXflTvp|DUE7N^hWU0}SlR5{z%THK7 zgbg@Qv!NyNb$B--EDV^G$1s{`#N3V`C%63Vpp(-1iVCxHHx#`O z#^8&DU7lz|UNRJG0`)>@qb&?B(yfS)o+AmLexq5nYKfm4U=77n8$))qGk5b`jQMzp znefp=Hlztx`s(eqmOYJNqZfEWPdH8NRyoy(wlG)>e&LK0G$C$fg20D;IRHuf=_YH1 z!Oj=rl$ia{4xT=Xs_Ntk-1_oTe2Nf`c2!>0j;uBUb^pfJMs5^;eEj$Qc}QY7&P@%( z@r%m|#^QF^`02yPcd0brn`eHSZ?5`Mr^&I9-yWG0A>5(}6e>Qp zjE#ignIrp1GVV0LPPU*b4@R{5rQ-P5wS-FHeRxbkZu7i7J1Ok;HOziXXyY&>>384N zx@=zr+BgfT8Dk2o(~VY@;WivWYjH`4T{CL)1CjUEc$0$uLuz#Z6(A;UT+yJaXN)z= z>5i9FGJ`YS`sKbftSGiWXa_edG9a|i3A)F@#Xr}%lgMH;Jr&xE_uKf{wkeFr%QsMw z!XRp$+F}oQrlMsm>?XANc(p;^FyNXy6T?BJX|_%7@#gqB3AT$mK3(6nrvAeQoWyWl z?K5~fe-(EhJ@CDN|737P%qapvLSbM$LeXz5phx~_`T&2`U-Q$7g;>teGrwTtc5#A^ zUZu0(Qu}5koWL+US_g}BLv=>G;tq#~A%Fm1r4ZIs2yw)|Om~*CN!XpGQs`o{kji4~ zAY8Rm)s9Phs-{0ZWT7aeOS^>R{#AOO*`cWpyCHfU>1A^T&Y^k|~zYH%Q^5EvSfCX82ariRaKXy2%Qs-`5^ z0NeW=w3N|MO~Ymuv8!i{tKcfSk8p{pNH&4HsXSfwv2!Rk-qG6HrY`V!JKdOI&4vU( zcyoTX@wVr2?w($)HYLaBfC14CTOritVzl=S%6Pq=t@{sX3Lhvn(}iTVTj5FIq`2jpGD|~*DkDv*bR{T(;B z!@*b<>s1Od?~@h+#h>YhJ~Xd1JM$kL`0T?ZoH@9;rC2)mU`REs!fRcfb zyR<&WUQ6^)%+`)3+d`EyA>y!z%9rEBJqZsWM?iCzE@5zi_Un}+ECsi|RabCknXUZe zlvDk2%1Pj+$A30*Oj)bg!I%aqRhfU5f$wTl5Po#6RIgap^;^XTrrMcMq7!{*Qt(~w zQeZjNlW^>+iAz!LnYnm7?LqomEzkE;$gYOB9#Ob_BxPmSzkwBW? zN+4!i{vIs=G3D6Zx#LvW42JK=v`dNFwl48$;2Fj60*P5yemI^nKh6C?IsBIz(u8Y@ z&XgFIBeNeEx0f5$CM8M2=!-#Wq8YS`8MrLE!ERmMMzmlBWyjq_-zOHRs z?jSzv?2KhbZojOt&t?-&#=){9T7@{zFxm?eb|U78`!86eXSGN1<^rrtQWP(Lzna{P z+hia~9rp^J8c(+dXu%~vqLc)csg-7avexZIu%=Q#xCbCn1zt@Lv5g;xhzt|X;M z)nvSo_X$6I>sBZ+-n{ql`cSH81Rbd;%o_N@FaNCv#7Z(i_TzMw&zn8LS{19L@RDs@ zpqP5Bfc~}t7GZ?dOd7>1qQw4m%bdANCs!sjCr@1k`yBf)PC*H z5=SE~JxqUZFq1*Zzw}*wU$y%=@B(RdLwGlbvDEVfj&6mzy-;Mdu~KgZ1hHKZX@f-! zGq4O;S=1n>e)^1Nxzw%SH!vfs&WCY#UeE?^XwvMS$hLoyt99d~VX;ff@ssm!0e@|D zwIx0o-?>KiB+B{T1uM_tdB5d=dV_`1SkC z{?PDnh!_GxP|7+~oT2XcBA+l9Fa(Q=xc8}0iYKa!9QSb@lThhUbPZD=H3G#=PI_0A zH7dh*1($AYVP+v2z2fILFu{cD@BktwaMtwYE^;fw5*~=Xc;AQQ;#?9IfxL0^lO*b?UUQ_{tDYzb;2u=jv@rh#OjbluGNsvs(b;9MY0czDRS#R5_ZH`wtUALEfw0OAZ&5 zew`#kMx7!_2fYB9ra%7R6(y#o+U7F)5?vGcflnUZirsf`kgIcM_pfNE$+v$s*@+J{cGC1(JG$lBoCS)6Su;|~3N z#dNUdbDEoY({qrH#7 zU20JAj@uWs4H$!WD5D@M6VBFN9$E2=J9%=nIod#IeX$8g{Z9$hjc03eJ2@mEoznM4 z2*fLYp|3xno(u{MdNRGrDkjT18!j6%Fo~ohg%EX!e+C+(R5}C&JTuu}qV>Vj#hk|M z8KYwHv(12EnD;t8$|yRMkU1^aT6YKD{{4D_X+Nk`)CO5ZOx%>hyyGBq0uupn$D_lj#>rPT5xePK?`K zRz(Z`?tKlL%+{v4+D~Rs6xE6UZq7eQ*gyL-Eb~Q06t3l`vdCy3D zRj`lI_k%BWBt!2SH?&eE2#}!>`fQ&wsx+2({m7KbmRwfy06?ljh&K6rrTFnLdh{30 z3=U+4G4Yq9?1hFAyXaov#E%Lef}K@Vr6F{FX0w^=4A%E>5!?@P>IjYr3P0irU|6^# zH-dL16ybc6PnSzI^!6M0~vI6MDg!e8_U za^N6(dJD1omX%yM5xUMbDi_rX3mYde<%$VqF)_k*q6HEjPZ|NUYvhHl$UrNMfatP2 zY+JMYgHbfshlTuM0yQaVg{GoIexs4y{E8l4eqC8VeIX{n$Ns(sMUEz8{_BOA-FA46 zv(K8qOhtGY6GLsU@iERpmeH?&&dA>d(8CM~!;=`xE6DBL6BYy687%Ez?w0y=AEUlm(_?2=LtMBK9Zev~n z`5k(xof81Od~lTlTRs1^{UeNdl;jj zgAsSKnYcue))BPdui8YP1$P#~JpjQ$)zqW=$5OhyPK+o7O{tM5#B`$s%!wDjbd5i0 zVUXx;3=<_Jcy0)?t~RUt0r(_<$vEaqtqU{NI$1)Oy$z~uC9*btEeYcqISpiC;hf&h zok&7v6anQ}rO3OL-li~DGLR#LE(o*lFhkRsEwjjVOn%MCgUKo!KU5MFR;YqhPmpoO zTK;OtShuU3bcN(g<1HR7&N?gl7o3lRC=VFj;K9go3<7btFg1Tp8j5$7@u|=pNkLBP z{eV;)?CtWTTyO`)p~bMgNKEP!$zgbZXlzl`aX1z|m6+@8X>WS47AWxKlv2wmpB694 zJSR8$XWe=_%w~_%KDhTBFQ%4|otdnJG~0PT@((WYWgc_*dmfF;2M7M`lnK=;1p`q1 zdBJeTTO?hy_BLwj*Z_T7SO%jX^|8^f0FK5|`5C{A(0?|W9xk}Ls6RbBbTXxn@RvV@ zIzb*^yhrW8#6-U^{(G~nj z5PjG4vYA>W;fY9G?7A;_!2(V+dAPMeZ? zzlxf%<5}$&-&gC3oh;@AVc$G1M_Sd9MTT={<7;pV9AN6XpPXik#Ewf!*f}~$ zt(6sCW0=cevI!WGCW_r}jiHpmwDOc=+(^y}$5YExB5`M)#_z!EVxbl;2G|FHoV#}T z_WqBFe9vuwY(Q3Qvc;2=WdnB(f4l1_Gq3y-jxt-O(9@n7htotm456Vbz=(hMsBY|@ zz|c=8ApgoYm@o1{a>3PN8xk7Gwt3KHhVNtq@Q}n0DQD34iQKDu9M5}yt3%B;6jHF8 zLv0@kBb8Hi9UbQdEQulz^K5m4kh&i%j05Py;9}NHk!6Hd_A&0)^vJ(E;?R;da9TV^ zTwly8Q(0{G1GLMU@BH(=tCtg1#CWdyi-e{0`#7CEGF6jV$xG8J{NTy?y?=n$m^$1x z5=y`?=v$Seb|S}LXXu1;Hghm%(U&*i9HgAOY){?mr1I^r&cu8Z9AwDD^_#%Rf7>ax z8&?*4nA(qR35?u+UNgRt^VSeB0Mi2hsvxJ&@ou8B88P6;PQs&*6(w6d4w>G2`MY1n zB}4U@2$vk*z~XfdHTg|tr^69u-ZlEBwU@xb$ju;Z0aOCgqmFGn zBoyjRJ*<|6{rOGS^6D;B>theV*Q%6dqesK!{&@ql?NJ(u@c@tC!bM{FxHL50cL#Y8 zJuE3se)QIQwcVQo{Co?}Do0SU2)5%N;F)ZKjTs7+>?S5NdJbnsc%**EGjUfrnKqD= zRDi$ZQzB{*jj`)6pY?2}>=%ns0Xt46em|t$9-$Da7CMj;Y{9Pz?n@9(8yjOe+;B-Mb~kT3v~rRzl&|%U_5X0c6(|d7QqzDy&wy*pE*n+S#tq{~9gH40=pQ>t#ga zB(Pk~oRoPh{I!T0gL_2{4mVi#k6wv=8a7LSzyjRGc1=pwDY6o>G&wub?DM8c-&CH( z(zUQLUUtawx!(Qw_QK{DhEdQyfgPJuJrHK*Huzz5NZHPhW2dR61T`2@rk|FALaanF z-*E&O#jU}|!Hm`go27=|-RQKt!`;qp`>T5_^ukiNe%3}OLsO?t8m&yHuB526baW)O z-2Ac80qZo5%2lTGWrtX&%cFx|ztO(MZPA{X%bvW9fuYuUn$m6gy9Q&O5*(7j!;3p4 z1d^LAQ7l%-&~Ynexz^vMZcZSgZgl;GQGiW4mWDIe>Y7jc8FUTJ@->i zk1+%(r){RcZLu8BG6r5PxQvE~`b@NF&sgL@xCC;)zH7vKTY`WzwHBB7&Q=NP<*FJ= zFiBsEdLzGiqwooeL9$aJsh=-poA@(NBW=uk-RoQj-j_jr^|CxQq?OKHYzQp^8;=b2 z=U7ENUJ@`Nrj=Qg<4+BHnXkP(*Wy}?v(_x@QR`onTEy23A{y>=3&u8O!Qu#$`^`PG z)ZSOCFafvskE>PiC%lOL?bQA2 zV$Td?q`Ul~@ZKb}KZ1eWGMxxLpL&X&8q-{2R;+cS-azO$n6|%)w1*Y;n+ZF?7aUj0 z%~&k1Ac-}9<#z#l?7L%Fb;Xk)4cLe53uj!T56p@`+C0AS2SCt63(6-U`Z5~T?G~$i z?ia+aVEv>N^&h%AkTj`YvDotiEqb8q^j+Y~efq^Vff4*F6xJuK|QZ89V#O8aG! zL-dHTM>kYJW!UUoqyA0B@V#b*F>oO)-uM^vd0?&7{_sF-=}Uf_nQjX1(4Jv7 zXp9v?Rg|yP<(9yH@CMTpiUhzY@F>GgYJFD+yz{=JaPwQa=G}ZpmE5;36`WhCcX2j6 zM%|N^anMwKfkt`}4}nloCi)E&edY{33+*Sb(ru)a%oLA;SA9=8H%{^&t`G2h zob*}gv028Ng65};LnO7?TD&UNm6|z$c0JSc@{PT>1bP9-#3r}gexg8Nvboxhw72)Y z+Kbi_Cid&4#i|PpO#tS#7R1{^mV&bX>g;`2FjOp4yPa$T~`?B>hS!$3GB$!m146r{?BzE8`wS8_5 z2(Qq*w|~uTPi6*Yok?bdwto6x;M0f8=5SCy)$da8wLu9%5?(VGc-MLp|ZLiRO=$1P|f%AaXfFV-L)-#KAqxZY{oOE*DDki4~u>we(qbL!PQ}cAgco)3k&I?pIM|lt(nJ->&*R zI@AUFq)#BY=kIKa2bd~0v2elT+In<8*oLt;`Q7zY8;_6$3h6^=0qDIMluL~ckvsc^ z#@h9{5N-g^B68`wsB1gvn0iH@>-frm!c9t~Llx>Z%0D>?R_)K715(g&)hhPDirJw3^H#WwJ83YitI9mYljMuBt%8 zBfv;9f3D_$;htryYGV@*Q$^1=Cv}!_2NBIK0;fd~x)m1*Pz(M2m&xrwmOj7D{{A*5Hb#Q-j@(OXHSe+BopnWRF(+S-NRhT746ZSy zBp0Ma>_X!c1o z4ApZ@8BQ?Hz&>Pvy!Txb<(DjAXodHZ)#x{)fTU5@Hr&P7#<|GO{j& zByuu~#cG<{ZZ5pY?HP@CM4q*S{z|6>CV&J&4UIesIUNjMwUXU5K{r)Q_geE@dsc07 z{J>u<;mcp>=_G*(QJOVRVKzhP5hk@!KDLLAmAz71O5&uma}g8$1O{G7erGQT>}#I& z7u!4VBoZHqhUgeyLx z8dXnc&dlE4exr=A`SodII8s8@l)hXVk}wJm{N1C z$jJkKYDEKSa=v6Vo|-Exk!pxcVpM6@u9Rdl9ewBtzKkM#l=bxhot6AD?eGXTH+kGX@nEgFF-Wvrh z(9mMgomwwB-`;eGVhn`I%PyiPeJOzh_j0K18E5s3{m8a7pqdq&VBgMab_US7eOT$J~;uCPX+F>bo zKCdIlV?XSsveMw2;Jgwic+!r7Qg}Z%Oz-T1rQ9CREzLI?d6-o# z0VWf?nOpOck3c2F=Hbq&+%wxyDDAje4BpETQvLaen(dX5JUvsqzvWxu)rPJXSg!tq zX*`=d^;z2qdZ4ix^J^ax-WBVpU)%kjTvO0boVd|lua^s{@G8)Lfv8l9%iS+)1EOj&YBmX8+ zh6_A%=cr_t*$wlK8m4|zf4|G7fL>&jF)=mWNvylkpUiTwAENfJ8QTIR{?_9D`+~x| z4l0g>J>pGXhdZgw4rlkchMFC2LngP0{rpHZk~qXt9`Y5<oo3u-hamwrM*z3vJLD`)oFgii8XE_}hM%b@Z z8rY?2_>8<>CbI4y{-&(E4kaU+A9MCegSIFt<56lrnsiJ@=In{%&c9Inb zdAEP(c%dP+KYNA$bwdUYvIFr^wo~H&?(-S)P=EOU0y?5ZMFAkJk7&Axf0zBwUsS(N z{=4u9lD@`*lo|0;eiuV2_|If=a#w_Z*ZGg0p@M;w8Oca;TT}e!*`k1GVc7r4KSLU4 zN%Aj6;HYo#%whkvYKCd6zfbt5s_Fli+z2oxF6~tXRR8 zdk--eAVuC4>mTp6Iz#*fo~X)H8*DSwFO)Q@bPQNp>Z?#&PYof21gq^l-xDCD_D0@M z+|RJ6ul^P^)K{c2nnrsKA4fF8{Mxe3wp|4JHw1(b85B4i=xjrxrlnP(510@rZ-UhP z03boU`D%_g!l%I9TYjU5n-1BR2fO&OTtQ`z(^Y0jM4mdz(xEQkzDJ%Qa&sqEY?Kls zpZ=bPIg6Khgofx(EcN$Cpyz=I1UR(StyBq;D=^jR3h1~xT7CW^3pm_b3i7*3B`^w* z`h&{@XSIyt5;-hJ!*XU}|0M?NYq1bn(K?>hDzch9x#&UYGv69iya3}Q0BkTL4Fo+} zO$R!CkGZu*!vI|bsRuAs z1uA6oXh3S5n6jJflj2KrcRpi1Iznt}vFgBQ4(|&xb^}Q=NKsgT?9I^v6$FbiBAG7(S0FE_Ebq6ev}@U8Ke zIvi)z_-;Kj-%6uSEbgFif3Y3X;<(bN2>b%w^6>rTVx+ukw7zUsVSbO@4E*dFDdz-! z{gqBN_IvjdG^;FPHo{@ENv#9J{3rDQN?{0$LThv(lpRJOLx+?vJb{YX9Uk28pJq3D z+FfhwXapGGPy?~YNOo>eg$Z-w7|SE&nB zs<5xpmE=lJ9h&d2n-}O<-F`o4r%K7KG4ov`9SkK`#o_D$`71Kqg8zJ=5?5n zv>eO85|7`R%y;@BFJI2`>w5`64tXYsvy!SaN8f7B{`P2rQEzY$3`ey(Gck5mcx&SgC>%F?{G;uR1bapb~_Hba_H<>PJXXh=8;MBAOF zcaRIE%ozqs{0P(BEbD83^aPpopfAO{}Jm-7;ij)<5sXLicQ!-KZ%# zR+72>PiIL{f4R>%4&(CwQm`s;l$2kJs42OK{>$hNL_^AJHzoBhRsYe#5M>-31(C~W z#5nkW>$T6KE|8H$eAB<%|6?!7DIw;XQoIoQUnd%3>2n|>8IKqF7N;kO`uu ztpCeGPTh-wh`*!41$!qKoKCY;K~rf^B}y7D<)kBjhU8&<3^EkE-LhsY@jlnPdGv z-M|&r%PYLQ2D@#3xU<@J7Wb2#-DwNpnwz6|vxO*kr0o!C7Ib&@!+@F}&*9fc+Sjj| zTbOeSG4D3{e-z#cx6t|ZrXEJ^_LlbZUfCvH`BU1&_1n%!9o0xkxz?iSn_ahR9On?I!ciQ->ogc47^h8oaV{STt>Q zYF-VgZepFJJnTne&|pkIk4M3_41Fld$`Q{o+bcDf^_T$!OlZ45r2G>Rrt_ZFQHC8+ZJt}H$-$6v7Y}Yi$CoOK zRNN1*GJzVyUKs$K646HZM41$I4eGg_2Z7ZWv7t|>iqz#dA8MaE#|PPt;ltLhT)maN z`EBmM^PWV!+X_!CFzYDsMk?0=mX6v6blG){M-pSXimV6N7z@int#pok1|QC@hv?Mh zcvCt_TS(RjpVvZ)Y^NoZHtRG#vt&~*T)Cc)E_Q?3C+12UOg11=&ioOdhxL3WgdP%f zKS%9i7&^7lU3d->*Sc6^cmM5C65%P~%yOV?UW<}#8kh&XbMBDxjGzv^SZuod{tJC-Kmp;#so%mS zSJJ}#3b{?^lw_v)^)eIFv3oUj6r1U8v_Wa1cwi%7@Z=%JcLw7s(<^(aK^vde@vU9J zW5TNcX(^WTf@&%}3S`%R?208hW^SnPDt(G@5s@fF;XU~(#1zuu<^~@OJL_~Ks1l7? z>m~n%)dm^i)HZ18W+)INUgMlbntA4Yz^f(eyB06sFRXGChBXFwZ^6?qN&OBp3ElJ} z_Qjs`9Fplx2iAv&3eI_=>x1dR^Ua8%XJx#e>H&{09n2Qb^3dh4RODEeVHXEDV%Rx# z!5TDrlmKU(unyzjyYe8&g5q>n5HFHH;82^||6x#?E+K_wGr`liyUeRy(pg+L^DC}k zMOJo-xYE5muR0^fj{W>i6{O#7=6j9C*Yt>sDH7I_R5Cy`Zg;~8W~5OHBh5yH63+J3 zA`HMtVUa*I-FZAh`2xBg2G#en0s^n+l{yLCMz6E)XMi0FNFM1zQ)OJpkgr+YgW%lg zHiOj6C$x!krPCdkkr1{vcYxg*7~N0;3NumM5z6wY?ygg5I5!w(y{A-F#`vZNN5V!k z+n#w38)Q4g%cip7SUCtEJJMzBMowWZi`J$kv&H+1&I zPvvR^S8^Z>BC|$reo6s96iUiU&!310hz|M0n$kQbJR7ym@9rSbX2L+^~;iH@r8*ttexI{w2@2LzBTOeZH$ z4;_f6)iX6-{D$soY^VZiYoT5siRZIv${66h3V>YkEqTCX!gOq%Ij2g67C#5*(hg-R z9d?6Y6&!|U%Tl^1h#zt{g`3=~mZ(M%vXfqjT!w?H0yO@R(!~d=#0r^-qg*ckz9fF6 z_QCfAn5)$bPI~G21zKtU8l7q!Y~9yEH~aN-om^QOG2fYFGeA?IO`53HQniGsv(^S@ zx;!WbEYA?-3>!{Q zzOAouSiO^{rq2OKOvdH&!*FBo%-~~@4->$emq}^Wl|#7C6zwqOabOx zpsO3(LztYh?~f={rxYcJ6uAI=5@c6zAB-}&jP2*LDAD_0IGaU3EG=b5JLx-$irl|RzzSIoWj$SL#u)#?vP6!?qBkQkE4#__By8ke>m$~es>i_; zx5jb}j}TCKtMgcEi;T}7AMa^c&n(v#e&OKQ9X%}?=Tm=C_nJygY2dg-9P;832Fzxgd;+HY0N3qymOkiDb@AKCcKD*Bc8_x4VJQeuz z`WmA{VRz9!{JH0Sci=veh#UvOChSKa@y6!_X)uc+a&z4GcR;WDQ#ynXkb;i(ZMSQdT;sFJ?0M&N!RCCF0 zf{|S$K!K*kf&$FRa9DR^rL1bLi&Z3Hh#kee7%7p(Zad%m&f>0Mok>!~;VvominIrC zsl73bjB*0x5~|w8BsoZ%Ix$^k9EC&|kd?%&RyCE4!p{Fa4`cW$P`|Z;(&HTWD$_e_ zX-B7^f9I}gN>x92y#sG*i?-m*eK3DbrobcWh|lOK&p`gn`mHhk!q9|&R;J#5pG@ht zLz{PueFlbI%Xz|NFmacfog&*3>UF9p(_6$>fm zvwiC$LJ5v#PHsj79Fa*t1VFGa4;E$8MN1;f&%t9*?)|Nn%O5$EjJJ#-^t>H*v`Tn zA>yPm<67H6KVp9`{6-mMUQ#U%y*0aS^ao6qahM`y8nPZIzzuQmGdhyb@+T3)0%<-P z=b6UaE{NRg+CGS*IBFu+GEq1JG{k#D#G+)hTzqc={GC@?Q*aNvMlY4W`%iFJ_uNlB zEmwc-9y2A>$PLCy?LWm*%m+=!dThtW2f5Rm+a@=+e{pTq1~bRR-B%a-XbS6GpLq=d z6bVjbcQ{36-7WgdxKj?j+`+MmU#(Sb{7fPD(|wNyg3|@MszXBXDVNsi7CMQVL#$$V zVfU9_wz!)t8ezprZ9h3Wf&u>DL{;o2A96`Vi{HA<)iw@SN!Tt@h+rooZ-(}=0$Nkb zOiMLZ^-oJ@=1TT%$n~oy(#UTuS^JMXyR_0NzmU$H4~bgNsPcB$V1BbC0n{E{&g5}B z`U(VTT)_OiVZUJRRI+)@)+zolnLK6he#o|4>X7%oY`3kb<1?tkb=}<#B?n!v7L4`{ zd?@u(`fOJ}>C?S=?}!Iae&SP|Wy~1Mh3feu62W@M@anc1fpV{4c#3+YSw!*!6C5== z&ME;;uR1bRsjtQ!@B&tk5_Oz#Vfi7HU1F$LD2aj*?s#za4Av)8w<-KX6B=CwGj5!< zMa+*a99Eu+#sUAq=rdz)@J>7CzVbJ>_1RkH!HLKXW&<;md+)^w!TV1~o}mwa9#2~^ zS2{+}?3>&&9?01>&$>a5e~b041vf6FHl;g>{|J02aF$6` z{pPRl$IJa)k~@WGk%L3%DBN$U%%rNbT^x4}A%BoqWef4xp6q9mOVr2NrQW zB8i_l5L%ho17a>?!Xrn-Zxn;V1=AE^7_s-vmjZuGe27({BO&Jv3KSIEyxZ#*f5lY7 zcf@X@@Hic~%udszZ8+(Zrv8fnIbi&8qaWc-QPq2a&(KEeloRSRDRR`prlr_^)LjbA zr9NDBm{UlUW^p;vBz8UV{Tp8qW?TZZAQ2FVD#4?#Zc1?i!X`4pWu3H_(=Ri4ZSL2C zn&%CTdB}wO@I{;oZId$wYF4lq8Iq1YfiXM%z!&ZtCzZ>}fnlvKX=j5Eo+%H8pyj+C zM&;YMm|wQ!PqriGClTY;E{J=kR8qi+qKn*&pDYL;zh+8b#s@yirhR5j6Bj}Yd8JS4 z&eCTTmkdOWildNXIlvqf$0`m|drc4`JPy5x0TxBeit;op2pVlZ;0eGUeWQG+16}t* z?XeRqOpJILv`G+G#i4l-oHpF_zj@b$narZsYHM9e8#QuYdB%B7)7pJPsp!a3s-hkn zc!OB{@Lci$HDoeHr!IMZxEQq0aWg&}vwz%V;12@pQGLsKW`5DljPp3BEh*6)$`rzp zOV38R)>QHoK)E-HPc@e1j~?+&S!EE6{q;y|Fa0#DFB^-uIX87RSrM{~%>Uii|3UY; zV%OX8+xebNtrXxG==Hiq?JzFmIlblDsvF|>5in#sMNPpCiJA2<2SMJ(A$=YyF<>ta zED_l(KphBPSjZEH&(~cjMb;zt$a^O~R6HgSP&v)Yuq7JihGisfr)Uv?fWo-fIL>1_ zCUE<^>FT|q+l$Vne+s40uJAOrtlGm3}-6 zF^epH9WGSwO!&}vkMCHORfWwX#4Vgu`weA^zy!(0+v5dJp1nl(AS}mWdw7hKqyWJ@ zh%J9t2hAWRJ>A}R1B67;++7mGhB#glU6i+ZmDd_L>)25v0vhbfcz|r{Z8;HwGM^2b z#uJ0><;Eza=Y8lP$kAM)``*IAu0i`=U_c(>J(VJTNJbEsBWTTB!BX}$*PmJXRg6AE zih#|+zZXyZ1n0TIjW=vrq=jUYZZJ`37DFNgukD4+G&4d_%d2LDpjYw+R%Pe@ncJ{O zp%;F*@W7szgtWf%LJ;Lqwl90oWBFzqblu~>f-!!WLlQVnaLtH*;_clp>Q8ClY z6=HQ-Vt(WG@jITP!~rvaa$`&njU2JMjP=)M%f}v8yOoBuZE!E|@xQO8({dcF@!ZPJIHQ-lCpn_I23tCeJRb z$1{x-KEyKx8uZ6B4aM1;v}=71aYc-2j-)_rGXJnIxG~?@eHhct#sxOUwsQDhm}3ce z>nLf!t5?Tt>QMrs{q`A5@hDw(hRed*NhYhC-ByI52*ZhAu}xeLqPP~xVfk4vf@({smi^^9{BhYvHAL)2t4G5iOkj1OPNcv~9jJ!j(O93k2M_0uE7@?{dbg=!;|x`dz#cX{6s zT{$yTn4Rve;1A`{cRy8>GIxFtW>7OsvA6|7#kH9g6lG}*sFum>>m_5l`4lO6H0~zL zmf#PU+y}_-7OK|a@*NoBfoGXx6AKh80)ekln^y9BB%r&`K;c2I{rXQ6XtsmVqQwxRjFyt9i z*<~|C9KJV3ShbPQGFpynaN$%kn;GVZtwt<=70N~Ugb!6N&UO{$iGWWSd-xe2Jw3%M z<8pi-haY4nE;|h}1g69@vpPD#WpQ2qapN_)KqoW-6k^Ww#w(e+ z7Fk{_;$Di#uqwUi@$|0lEcp1(jJuX*1?>1ZJS8xcl9DFdV*AfX=>4;esO*BI?3C8b zwOr&zL`nf$*mK!w%t!n<*4gqAW`x#g%4`XwgU=BvjWx@)LLvBB{G?=%kVkw_-q^Q0 zxfXnp)9O9r?sy@ZNX*Ens*Ny&UT%DZ2zdMKEYwotC}`1wxiE<6Jcz_Qw3&ex1J+-4Gcv!2O>g%8fd|W18NM0@Bbh?u3DFpQ z|)w0C*2+XgVMKz)7@}$MOu9 zoF&Lo)*Dnv>Cv@0D9x2)y)`E#;hDz8`@ zmNf)sM@xlgc+n4${LFN?Q=Hb&vM4yu2N647z!E?NQY}T%k_S48q0p*G;y$I%tv*p8ak4e@yHtbACty0;pn(5mZq0~tn+OS( zbp111+((9tFyH6G1ibMu$EBTT%<&k@-^~MgB45Yt2dhp{Z10yxjL6v1_pdVwtT^s7 zle!x;LJ6dt`kYP9P&}4ZXkczU-416(;6#ST({RE|e_y)zDWJX@z1+c=a$>CcE~WBV zMFaay3AYa$6uQXDE{{A<<0T)A#G_?Cmz^Np74YU%dwA^Sr85$U~t8qEM1*OO{WwOA7Mb z;jD5Y=X)Q;y5j>}nC}mGJu(Xuq2TUOQl40U4vpN0>~FyL(rv22jQf7WnqvRF?_k|9 zNLG#IiV?h}YJ-KS#F%q|#UDlb6k0%IAl+&^v82 z8~S=kuCFGZb=w`CtAd3n3BMlITN(_;^SzFtB{FhFr8~zX-u7_Mj(cu9np;qQbgRk> zf@7oG<%@uEAhz9`daE3I-}x@ixrxnyh+Vrzru3s#{~Y% zQ(H(FxG>i@-f41@UqbTq!V==+a1XaWSg6m$vgVo5g90|9X1RVfxeE`Oe8cTXbCSE! z1pZ&irtyTDwG5fnxbK`>!~;<=tln>%YM5Oad75ouM+fq>b;sWjKJpNgyg>7suAR|* z(;v5b3}9o>AFMry4!l|RI;6Zb^nTTQpPPz0%$XTkN|12&yInQryLI!ez<}e}= zCj>;liq(XEdPVn~;~B?0>!--{~_mc?pk<8 zfA8(GV!CO7ZA;EKLc(#i3{5#UBnpjiZ`{z45{`M~^~lXmT<(`g z@sR)z8B?c9QeI}+vKYALQ}Q`vrBG<@KRKG2wYRIc8@@m^-$&SBza&XIll6Gke5rC2 z4+zl|%0x)|wr$sCG9tx3`nqHUJY4OpzH(zt$9^tV1PukVFL>Bpe{2fc4>b1AS4K6Z z+)G$JrZ2339WkI}INKzPS3wt$H`P zd%FTp_;XQAY;2Mn>uLlWLJ)3jiLJr!3jm7J362`hTV(!=VK63988^eA!#vs=3p+6> z99g<5(xg?rY0>zeY>9mmJ7wL(IKBmlWtHR>dez?pW`a~I-81oj&3fIc&JPNvv3 zn{1TlHqo{eGa#dk!G$Jc>gqJ<75Am~W3yfa{Mxef6ZPJ_3*#RAwms>$=|3QEcXaA} zxc>dfik7b2O%Tcj^fTOZy%2#|v2MAWv7smF@_yIgUlxPH`d=2q1E=?-&M6T@!>k{C zmNs`KbM_g3>L+|cinuv{Bxoak#cCs;?I&+;>r`r@A;w)4lY9lN)t&`XIWYtA?(+)6 z4O6ZDYP86e>^~cfZ0`#hHaGxA)fd8%HZRjg{;iFxxb26s4?#I;-; zgXwAou7n8aCjo9Kc?Lc0Pvd{yi((HbzM}^mFrA_=N`g^c@V$a2*|(59U9MJ6YZOU6 zN}xw~V==8$Uu%wYka-BEVLw;g=iRRm-!%_G;7wWYUe-I|SPw@4G;KUuaqsBpiN=PR zwV1(_S-nRXHgW0cmNO$&+}JgKs|+3yF>XIV02{m9NLC>4rg<|5R8T5i5Fmn=3mOOTexc z@*|RdsZfkw+I!}Bb#F~G(O6_#{8&SnoM_k2+!h7n#7v_mG-NyN=ccL#+}|(5=0X+g z_~c9?kB73$7pjKSPpd_PDWfA+j(9-)aaEjwp*jktSYz~uhA#MeMa$LTVYmkDe7o7} zbWd`C+b6L+J{mD8hTvvtX~Pdv`Gi`~9M&tLLWD=1e}y^0B#&;O;8qiOkrrn(KDRaY zMl&ya+9&OvmHCHD0#?kPCi`I2`rpU@kIC`Q3np>AJ@HIq{EsH`0nB5}i}^2C=kH@+ zE+!#nLH}gVx8Hheph@ok$w`EF$HjEB3Q2!C&^N$A!+f*T`zyNfUv^0}KXh~$T@Uw( zXM7!VFS(F}UY1$dKi>HHP(cm_7VYY3I=IjN!#(L{4F=Opj4Fihod02{_~m2$C3#v- zn791X-FgJ`0QV&B^ds)P8ZT=?f;X{^0z~VaQ~WWKhCfIEBFwt1|}yGR(>ts zQ~TFRR_N{Dzn#2dj4I^+|5h=;v^&D2`J$lzJ<0d?#~w~)E(QNEQVEm5d=+l`ayi7x ze@z~LP4~wt7yqSpf|Vxe(ZR$}6}1d(t$#X)>;5lS^mM*-Bkmtss;vr`oU>a}!NFAj zuj!NwEh#aPR#VslESa=5$rK}DF<(X{m(C6rs2Rn97nq}I*Np)S2AVg!OJwVL7s z9|J|QqKbX zqI5|V=~Td&Yne62NI_%yDV>r87BUw6Q&voL?a40mKQhjvDOzYmqBWRsxkv8!0+vWm z`VD4ridxPVMV&6UNd%+5_h)9a?o7SgT=0I#FKPVzVk1RE2@TFm6TZ-Khr>dL5h5`x z{vq@!k(!(q6&hVs4A4OyPD?Kr3(oLE6GlcOh4|jo;c$M=m@)ASo(f@)aD5!iLv=Cf z)HHeHJ6QYX3Jl_3sxg47P_ry*ZK<_Af8mbwc0mFQqub}ZnJdG?Ly(a|2l}E7!$Inb zNr{R=^S@uqhz3gJ*yRVOp`2~`1uV5V6xT7b(xHpzqw{~#l2VXyNWPIB2*<*{Eely9sc2F@yS&upM9 zDs-xPZ~ycr8e|hOL^U8jlh`7zSL{--4Kz}IowO+TMV9*I4Mbw$bz?Gt=CKREP@$Qh zd@z|EV;~Z&-y?uHZ*QSR{{~=_m~iO8|5`>y%>!F&C!vxCBEe>~UacQYRNxouU50K* z?~_z5SEW#Lf;~Dq3N1F*vY?M9;@j_Zi%xzb9032FXpn#V%5ROzRwR!yvX^Cw?}5EY z;XMu7&$Rj@iMOX``wb&Gc`qHM3iLYh#WCZm&(;S|qu z#Ha#m5Z>P2$848(5KHYp8L(M&{b^|3*b@Usz-erNqeJfM-OJp*w9z#jc>qtCzQ0MmX2 zul+UbqI;R@-W~(ubr82_n6vDju7BF59xSHS^~-E~3mI$vGa#6R1=T6ZN0ar2dh_e- zF&Gb`YiiM2FE%``u8OnEh&dPWO=f04Sh{yGM_lULAh{qc+$ZQAFo3ZB6DWTtfp*Nx zbMMQ}3ArB$@3T7;(;3Bx`1ei!i9Z)uKhL|wW(rvanc^38bjow>|2exZxYg>uxrVI2OBnQTthQ5; z=262}{Z!+#nUCbdBbr5*XP z@tgBsQ#T?6I+bp>WbOYVs0sY;T_&65jDJmALB63FEcyi+|C%Oft6^1BMp`R0#=+N7s9uVWO6#zeE-E^z4`_8(8rH6WM6<4HPSQ68Es<#>84_%-R4Bqd_kYM_0o zKgX5S#4;KfdG!?Wkt3<}gy5JpOvFm>X~zV9RJUi%vhTrV_31I>eH~|Gwrrn5A%3AQ zQ!CwW=nX=|nXY~>*Im@J4(ic=$Fizjnwse7j3(~2)@l+TibljzP?J+tZy5=a-|aHgYbU4 zgL?R;?V?bws(BzmEcEWpLXqB%r}kVpxt%MuBkmeZ{Py%Smkb7-m{^{;7)Yx^%RXxW z4GT-J_V)IcHd^q6IC+Vnei20|Ej|QuOwP&nK`)>hH3f%B>k|wi zBbzntf0lE2l5`q%2+#gvzMXx?G4s?7mM{@NdMTpn8 zT5Xj3x>6$qg&PHHwN{Tu;`4lTcSf60xx}DUB#TG*&3R2TqVH<}Y-iXt)~L}nVfn^YDm46rE&P^Rt;-#l5IH9i8Gca1g0)))SME;YJfD6$ zbTp;$cew9>m0?BgrNf)(zcc?0#pY6I-thRL_nQ*7xcN7(15#uV|0z$9!w;tl1;Vg0 z*zg|Qm)pdB3mm3SM&|m1?6@&at5m7jX_c|Xd~3-BS|URnnsYbW`z*eVJ0&ENxDMLC zTSJZvpzEc5Nf0LV*K#=naFn;<49I*cCJafqtx4TOvb?NbVBS3|(P`p9_z) z^YsU+2ql$4eoor`Yl z)(zX>^XMKL!rAl&F>%RmPtdDZgl=@W8ywF~wW|uv4ClGlJK!WTsAHS=JAk5z9$#aY zC1vCW`cMTT;Yh5i8|S&2IUHXe5iks_Lod9_xTtyXFCslx8$Kp8>I4_cq?G-}ppr(5 zkf)U(OA+Pix+CB)e0yYOzAx0l#>e&dImGlf;F zhK6}F#V!i&IgQ<2@66P|-~Qf7y5Ch_p&&o+<)bEf0Ak3ZyqRwLx|sNeK^;6Fisb?Q z;G#ftZ(Q#|I*(K<>mk2Y4(|K{ep623dquk5METKTVg?LQ#{b5oYzL53ospOo6Wd4N zusHL1ekBn(^ZX3-!rcI}3q7A7r_cDDf=|2-Cidghl%LaxA8r(@qKx7)Uh zQs<7>E3g#02Yy$sXGM4Shp%0~)}I;Yzg`3gP5Hlu*Vg=Ap{h5^QlJ>aP-si{7M)J#2kt#zo7TXya!i@Tj!E$`EW*Wlb4gZJI>v?up8sj{MwE@(GU5Op@esXV5}AD0&v&%5hd{-QT>&N|0VE%F*aU<0+)c z#5Xa{J2^=y#3E})*Qa@bqs1ROj=%H>oyz|}MV{-um03S06|FH9NFKsk?TG2A_ZTV< zp~(%|Qw(y}0{8&Ra!5oxgy8I(A~+q@$kz*x!{_XwLqAS@WP;dBvJ4)lbZK&A6qLcA z*S{|(8f}5SW()NQ;MB-qmd=;&^A7K9z<@AUkNzLhDwizUZ@f3R%C{c~@>@AwJRixZ zBp=CMo=*Z|Wj8{4Y>?!T{gu&c|#FX2OqL-lwib zI{Yhz5g3%k3fY6Ev;0%_lU@ek1LcwS_sdHW)a|4cyiTz+B` zq(42`^D8cR<{kc-aZMMN9f~GW7VedNXI|D&;lDr3Pt>nacauI{*A$FAr;iHFFY z6(WhXxV2MDhl9yNs21N*DJ88E2@H#>!cC5Wc7F5Krfr+r1c^qc{mN{|X;r#@q_ScMZtC&yBoowr zpCbq+@we&#PYjSUZOhjzMvbo^)H1k(OQ1;U;yCtrzFr0xm{B>gzdkse$v*_W+?1$w zz@~Fp1aerfmcSk6_*Da!58QL4_eRUon0_FC8WDkvFG65Uml#;bW*bjMjq$hE`%DS z5u-6e)6WDXTF;lwODf2ylb}G@%q0v)zu_K$$X zUtes0vV0evNT)rNR5XcEm9G>X*`_ZTyi@}a>x6lCeWJ68`diig@7|uoiOMTYcI3KR!nq@?~daog1nDh&Yp3JLQwZY z1L>xMlHvNu23#>1+ng_YYV(IO&kBk&Vr|a}QAqgxDeNTL-ElGrnkT^ZQB&oxD|DJr z6nNjbIKG=j@Tt#BCQjwsrj^&4xn)MDc^F;YUFO$pP3H!vV%=}N@R>$qKzFLVVZVOU z>@=hTV?{E3Ft@$n@|cM^W*S`($q7y2V*5vRrk4akG|L)+&G>pZ$ON1-tmEnf9Ugb0 zX}^EkJ;uJ`N8){x_j)HWTa4ySzLKP}nU|UB^1bkW)O8XtjhV<t_1Mv?<^n92n!?rnv9^x=Qs zW_Woz$hPp3f4hWoI!R_auFHb;F%s(KQ{$3SLwvH0&o_CwG&S<;muj5?gF+!ZOE(-` zaKf1KT3*(p+?X@0g(GZ=vU_gEDKvT3eF#-XL!Am8YW$+;*Ds%Pr;|)z`J5JyRcV6~WQ)8#z)HGRtT2pr0$6*cYZe8~bA zUP@-;ewYSoBq?ui3~RcF+HBMhbUYR@IxJqK9-19zeJbmWDOamjYvHYm3JIQ0WxLgh zifZsM`y+>!_(Mcj_RAY*zkwJ(XVnlRH~YYPVoWxdUw9Jz&Q@*!r9OkfWP8i#>^<6^ zVXcG>TiDAIMQv5v_ZtKR)1-$0zPNT0`PBVb**{_^g+YJbCK-f=$f$J}l(*A4h*3gh zAI=1P|I8H9S<&WEB5Kot@k~|-76nfM3~2uQ9<3}IQ_=M0l;6BBc42^i8$@%23syrDAx3MXRZP-;v+W$+9U&>ln8XYNLxq|rUdahI z;L?VXqvdv$16?&S;PQXrWy)rLQ4Sns2`;{8TS6caX{Kos8^Zd#WjQy!A-a*&N&p3K z@6Cz^NdPR0ivRA8LQx)Ag7r_|tAuXW01{<%$8uQ0y#w%$P~MQ?J7p%n*c|V}=xA7N ztWkB(?5E(=m$)oWJ(|wWjS981^!}6BW4=Bh!JE}mvyj^;mjX>!WY>F$k&#_AFKZz! zZ@ukIUPXxT<_^+zsO#vwzTJdDll{-pGGZa>>rPv^gghcwpKfH*@bg5 z0{E-xbjHoESKJ1L`1F(Rxsy)H!)*1~6n{o=if%oPpT$Y%(>+0C&XpL~Lm`qA0Wu3= zRX+q`S=am94^@Y~%b`;N{Z14XSk7AtRv_iNbK>xic2U+OfgYW(p$-8d1Ep>irybhIO>d2yN|i^mW*h-Be=d}*?2ZOFA#qcT7A z!&^%_-_tIpBU8FzkAILwy++m4TAc^ln~2kTRNF&zz9()Y0_Le3$GMaDyH}&fjGW$n z?q&ArJy+-J8yptC-DZ#2XaH~xlb9ul4}F%i&Q_^-foNBq46 zhmWm_d4vKVo>FiQBF6!*H0%0aDui9u8ER#mum|=pPm6>Y827y4U0}JB*D@BcM#OQ| zk5a=z*}TA^LtGiutjW2olp8D!SzG%J$LFNii!Nscq<7&xU3RbM+uZi((c|1vZdkqV z!du{j`Fs>W!uMxxzf|!lGLTiFd8~vJ^n7A-h~r9SpQc_=qg*Gup4egCw|6sw*^l}Y zMaFEf%hs)6>A>AlT_;(U*5brjmZE2n7`suHFcjjzTsy|@VPk~kX=m9KpawGB$dqjj z64q0moxOavtj@)XTJ52@l?QDe(4%XtqSkHm!pe(RiUAn!Tbm;=szB=xo8_0H#nD7F zB?eG?PTM_eoloa&jC^VJjqAmyd^;ibF8DTqT`UB2aNC&ITg?Qh5Y@#YZolv^b}R;S z^ChJE?Lxy7?DZ&XM|B0Tkze!=3eJXR5e_De-u<8^TRk4m ziqDr~O4o3)%VSeH}lSxP+2|+{qELwzm9JrPfb$&M3nGpYdY~3HDkp z?-o0aJiE4R6(xJn_V{+HHrE&|z{Sh9K_uQP0*tPL5|ea5pL~Q*`=7wj!`3AEmR;%- z+ZBye?Bx%uk|yLJOV=9qHkWz-889tkX9`7`mrHOjDV6c)v&pGgKK0)8nZVsl1q!W9 zpgq<20Tx{X8B)p-pw*k@P^cpdt{YUDvnRFy-e!v9paP9Bx+Vu zn?z_eVufBx?@4@`zjWDWomVr?xC~%^xJW-jI$ywO$Ub zbB$vjlEi{B)ZX@N{Ak4=%BG3b}oeuSA;1ka6@-0l&4 zQ3mobrqB@?_H|XPdWQEY>2%85(#P1Tpzm7w=HE_!VlbWJ0o@79fGC5+*BZZkI_+VF zx1y=LTf~*$InRnRDIyZ9yPm||7`;js7pd_31`^O(>Aoe1|D7PI5L~gSstiRZ97OwA z`-Fu-tOLIs|4`RQk9nF|1XK&a=zmdB`xwbjDQVWhou&KdV9W-KIwm4$a4V-7DT2Tm z^QO&QWp;(xjGh`gu)9)$lv6a|dT>1x9$1O07_A5fOk|>wzQl^u>>M%cbN0x$Xfw ziw5b!3lWnSH~?V@#W4|Jptrs8Iq_Z^$~$dDB#)}wp6FcMigS{-7FBU@j2GaS)=tJN zn4&AuH=^9fF?xGI#3eS2^=fF-dshvDXs&V?2PLtS=QMf|hR!kF+IJeM-(Tdmeggk5 zq<>Ls@D_v=LQcw*+eBKKIHK^&nlHdhWU47854BMVrJ^&Mu^D5%Zhuh{!MqUubgmI^L`{bO zYv%7bumJZF2EStQ<#e}3SXAv7FD!`CDUvdPfOL0vcOxAVl0%ntcMHyF$zR#YQtt|ZrF`!l{06ajaF z$ZnohWiFZE*fq(=p`%O0?9~qH^@dO66W>>Nt{e#%Ec{TcxJP^Fr=ngnG(P{Z&CARu z5t!c3Cduy?UR$<(BH?=G){HeNTPpC`MUUkudDm2?<^#?De0rLE=X#5cTsmKV;D>_k zaOG%*P!>16@DMRg8S<=3OV*<)`MoOZO%#NV_)MRP&=@1X);FXI`aI%kzGKD5;+Efq zj@Uo?QjS0qy6u>0E}-K$_Kz1D)AHO~da#sQp3Xx;S`-^KknxsP)dI?X%gKW2=3@i@nB;E>rz0S*>{By;flASpe#BT-JHQQ%S1k93c)xSEp)sQb6 zIKC%+^8p?0oya?0{yVk~vi65N7`_Y^RIabP0^R)u*TBek5u4|HzMkEh zIa{Z_=yI}nRy)&f&o}d;kG?tYXly5vAP?rtEN>PqR%yx~6EhM=kHHL+jU0I>M7@d7 z%C^j^mS~tr$+XXvxC)5VPWbth>RE#LY9$)Y$)~Vde_*WU$h$si_j%~-W@*iu6uu469Tlg&GLBa7k!Yeyt;k-#+*CM3h_F~gT2>&i`id16 zj4?Yzbb2i;FiY_iV0pSMKX-oV89BSp{Xv~_AzDXod@s$YKpjBm_{I&Ets^V8eOJtF zQ%K~R;+2gp9H5s-_!JMs2=FY%5rveYk$k35lhh|q_yykavp~I+m=!b`=PxQ4h|e)V zwGjFQKByuyZB_zSabPq(UBdBUWk9^1Auz>{-*<4v+qYTdN&&iLMdq4NP3~PDY8$;c zF2OU_66+)N!{hyu2@#5M272fg$@F1V15x!u2p5RBlrf~z0#KVbJ^q%sp zoC5Jo5yfCV?-?Z)^kYD`dZawb!oFk)$v8T$t2PlhJXOo~&!>5L7{ekTB z+IVLyq1PKgN-o&e?6}>fDshn1ZxX`oh6%Kp?pbQGx7l{wb!$tvL#Ox}7#qlXiVD{W z`(gzU8xf^%e|fpwa@k6Zj){j?yl{GaESNYNm8t%t{hEn`Ac}XYQPPO#OCF+BBAx6+ z6ZL8*z(KD!yzNj{KS^-)w2X4t74a>5O>>s~6pM^=P4w$}tpkGa0!2*9R0hw(&_p?f z$JWk}6_buwf2HYomZf{0q0$mLhwFPnvH;jF!)&puHl#lR?;Gj2<1aU-^c|TtQMytr zX2)P^O0?TsZ&wYM=1nCtF-|V+8JL6;5sfr@UKfH}{e-M(#Cxg@!az;9w84Wm3yp#! zofS))#KQ}z3vdgmo9Bb^n~{x(nT{>c<22}J2;ijM(Bp2_WKglk?U~Ri)(C_3{w;^| z;ZgzuXR1-d{d5$+;5!vXo`;?|iT2de`m;RkC# z!pj>9)nZc6!6feL>T02O)$8Z&4|stnUeav}N1Bne%__ARq=$>zxe9&T&R>Ps`y#3= z=j}+z7kfk9+fwN=xEvAI$z?>+xatzfJ0Uj4qegqnEs4s}xp{f(iF&4nKOgSTgv6O2 z`9wJaLNPEHibl7UBPxn!@6fI%>>12Yc|x}S1}zVZ12XjoDq9G;4E8efmxNx&aRr2H)^1i8xWS49j6h$IE5!w}F-MY9sA?rpC_)1~4H0^rugC3$uRSY^QC2r}P=6n| zuCkILo>~iw7;|lPDWT2%vRDBlC;?!0LL8*DSAZ^!2opDWb>?eGn^XH=JED6010b)6 z9S&z~Ne|&73Aqm!Ec35{9&1^SB_E5!pGkH+RQM9!B_Ax*A$pBnr$AH%0kOhl=uG@r ziZHKem>5y|#mLqnl2bitkYbtQZ;^PMh8b zB9C&FGq1RC?lFjq6}4BN$!@))QX{WKhIe6ly}>i%**KI$aiZo)p#-ya5v z-~tPvW+H%Q?T|cH{W+ky55F=oEx`J{=ng_vNByq&qqR(pUaKCOmclZ3UK|q|fjnj^ z;zXWhJ?N*q(L-37Rzq>NA}JN;eyzo&^~n=tRbv7!2Z0J*UW&6m$e8ukXj*@842dn* z1553F+1nZ{N6Lb}*;$Z8a<8Z82#ZQ^xYXy^_kw=*{7m=RdD^&QY>A-w(|ryI2Nr>`E51hAk#lnX z-E&J#_F4$|3igVG?;i6F;Z4Q;UO5c>&lKVpiiTT6UcW??bTH*SY1#-nra`EL0cuvD z&f2l|o(Eo7eaXBODsG9Zm14Jv=*n;SEQu4N3q81Jk^N zMr5jWwx)ykQ|*}^%NYf}{*h0}q&}{NR0DJ%Q5wiUeZSMkZ*=WsbBL=K7CR7=Cb%H| z1N7dPivD^lZk4vzqM@12Khg)aj)BQ7s~bgeqX$24-04gn*rYu;R++g0AvGn?Ojs5a z%AtCYhpngJyRlK^0?4J_^3q%ju5^4InnqqcC5s3RRa)i1BBfDjzdv-hTo_)|tl8+V zEYwHQs-Jr%hq&-FxFL|l??hXzLKk4|2FRN!ZNKd_&cDc+k0fxyTBvS#Ut=4poLkcu2;h&?qK!z|qU(DJ;N zqBlXW@PMIa_6|DXJ<08NOZNTe_A><1&mN1`&ShaDE%}f@pKk2#9s2@Jn9WeLkI}Nw zdt`2K=2$p5g%H#QCi$;~{7@EON)-(^{ZRXIcmnhei`3-Pg zE%fjof%#S02W?hG7PkHiA0~t13K7rWu>RQE3vxO*>=xcY2C(Ebs;P(leDL&i(UeNk zUaEAkxK>77krXAP^7e#y6tiSilEDK!iF8EYP|SVR{+d#$#1$eJ|6nT9t-eE(Emd(1 zi!I~#-iOk_wW^6Z{__0S?EAFnzjh)xC~~MV{*ttkACb7TM?#8t(%mTm{+JHshCYxi zka`%u6``KLFh9`B+PNG7wSiwG?b$3xcluMd!?L&}7sZi`>}WJZjQTkeCsc$nY@#u5 za&FtZ(8TXE5BCxR5jh{SZr_7js0~FH>3Df%Q}XJgIJTk)+NufZ8~~}72y>88#q_#1 zo_dKw7Bry8S-S&c*cudfH<4NbG7-n`(cpIYt?|m2l-fAY2Jm|qEiUGAq6~(UoM(dK z+~x-rS0Y+G?$6n&*vW`jOHZhpD#j+N5gHU^)n05|rPLeIsF9Z`rN|X=z3k!>HIS`a z%ld3>hGu;E1!?}Y`2=Z~qBcha*{WD}eg}f`;*Su&QAj`pz6y|sB*jStAZ%7Uz<*JD zeEw2;`4DX0ncjuzF?)IK$~x4H(E6O4kJxW-1EdvKzxb-I2z|#HG~cI_7+}Oe8L}tc zRu9UnxgVu!H73YgJY(S=UOarMXNvhupjM`Lr);l#1DoY2Sssy!2p{u+1MT zB{r($=cw1LaWR#e4A$dJiMv)XuyA+S&K8ebF*swgrtM3XxR2@r^+kFW&DG-~J-z3Y zxj#!A<{lBaO1@g;F4*T7pd82Nb9w&@;{dy(eOA2se!-Ej$1ip0+BJ}tD&1wIcD6l1 zkl}e#v}>DPrsvRgnuDNjtEj$-y*b7@!uei;zJzFS{8WY_{kzDFB zPHW{e+8_l=dDQ7>38GNcFf5bpbKi96(T0Hr2R!IC3pUHc@MmKJVZ|{b%Zs1Oj7PPKgr9!GMHbs#X;nF&p13fs0{0z{ zFdtUm5s*z*9}`)}QPSsG%>GnBEil&?E7PnStz|}$KA5kOPY?y1H1JjHuMz>b4%I{t zHOJ0J*!aa(*D(7Xv&54XR!i_|M@ zrWgz+@z#5`2qrECUO3LCIR3->P`8W^cLN27z2=`<(oPC1_N^xN!|2=XB464aeIu^5 zGV&slPv=2Tr9Ocm(yuOE+iRX{r?9=Bgt2L7R0yzfihB|qTD#Q*)*Uq2?Xs`20y@!? zN_;rx2tmhsv&1|Qr_*XC4|*^g#QeZ=Ev)K%gzu-K;U9pBZ=ZZK&%!#kUOb$0*o9i# zqSr=kw!qRNE{zZ2D+S}M6rfGWvdcQ{g-&Vu%baw^sTq1_T6)qL47ypHIf<2k zDjaU7RA%%kf_b~$KJ#4S^?43#!|843Dxdl(%Lzz8u|-AwFs}!so0;6#e9szad(rh& zquSw<+uUraqO7Zj)4@WSSiU2H!@kKuqIgUiqtSC+x1E=F!fhUr3J{toGRQ|MzqEs; zCh6nrq&{r}ZqGZ8n^*xOddx61EK=>L7NU-$wV_XL*OdaRRhpv;96=G!;J4zz*n3e0 zbBoH<)@y`hq;or61)a*QHi0YXeYn*&(F==vT6$F-s$If%B_84x+EKw=dk>wKQg_Lt;O?*)rAMX?7A$Efk-lOy(?Ckv4i=*>X${52(Gp%M5=R z%KlQmfKk##_dOzg#cJ5CsrHq@*A%? zswu>4vs8@DH833F4AqF~J8ej+4ygo1L{Fb9X?Z^F2^){RO?GYG_#W*nETcYovl&H#xCB+Tw~Z4!+B2k#nj31pJx-DuDjs zJNHBDCd@x;k5;&MaIDt8@8{KDW%VP$p#^w!nzSlfQyc!k%Rc{)U-X$n&S5hQb2Nqh ze5Fi}iPPrsK0H2uoWw-g{uE@X(x2g}i%5PS18()kYbf}y8?UwBYyOAYf93>SM4YB% ztpC25HYs3Q!d{h{dR!c+$F~$1eRLmjQWsJF?^~ghHNmMT{{o?Yz06KdKtyu%t)%SV zs}Lf>emqaKakBQ$4FOzk_JIA!ZdA(rAJft#0h~t%Nyz_q38^mth|gY4>3_VOqL1gH zm$T6RyM$C=1yXBblmA^O-J_K%44n7=-z984dZJPjrZJ)azRWHLI8WQ3TK1o@i<4Y` z*j2UYQ^tRIbR+7S*i%bR9vp(18qKoGz7%gOv8$e&?_lYoKQZnfZLBcUhzKBAz*Scj>6I&w{rZW zdXu3Wj{g5(btRIv8Q(VYJdTc$eS1YBwk4N>|59O%y>Qx_=+0pTR-jjMWear^*{;lh zI}u7y(1*kAjN(D36^2^9+`BjS++ZbB@F|PM0_IC<6@kD3|9jXABOn!;YjMutT6na@ z0AsvnT6+z2-&@aOd@_OcZeX6j@1HWF2y~hL3*3X2oljNcMg`QNWsQ@B%jp<@-|3W?rH$q|It^oH@m}E- z=(xBcNl9ja&&E$gn#)|t9YBYBx&a)kBk^|iE&wr-646vPd!(p))_R4M zC2L-m^A5IRwt}rH*Zw!Olge>mAmOyD;7)Pr8kMqeHSoYb#sDEALbzZGTLG_cJ{MBa z`|}N`KcZ5+{rzjqWx-&Ekf?`3FL~axgGKzfH?nG*0D_?4`)(}V1BcuC)o2EP#h|dT zFyPUVw*1;YGEwbu(u=LPCb5J*la|;kGlVq){^48cf%VW*qCo{tv>v3rAZX^zTP_?6$HpvYWn2~KF4}W7oBN?1T zABoO$elLCpuS49V5vf4if(n%c0!mL(P%`YL{-KzJd>7G(Eg!~4+%F*Ea zo*vS><9^G!$i(FcVF3UYQ|%2v3@D-uQUiSQq6arE>f}&@e);nhPDauNmgssoGM)_Y z6r6PtUfSYX#*;wyD8b!vS;m3g{GEiyI|U5a<6Z~lFR@6AkyrYT2b#$#0p0Si7#Ljv zL=m)ywcfDq*;#D&i$g)^%@2)Qc6Ll(%D!fXmZ+748uo^E_4mWxUi?tze0_LcqxEIy z98k3S6j$3{TC>uMbCTzXM~JlC{upYe3_&^Zp|Fum(u2X8d}6k@ci2@KyMCbQZztiv z##>6NWbV%*IyG-=3#S&B9s-= z@?Ft8Y(DZ9phHG&;?g*pw>qy(%bh3DwCAPo|v0 zdv|kHix=XHmC^T=F~6u#gF^QMpDQ|{4oCA=D7yrauk-^?nj{@imIvxe^Ie!- z=5l*pn5>abuL*%%?v*+y!j)w-n4j!(`<>nQ8@EB+_w&Vh{kbz|;Yc5@R)dN+eJa{Q zJG}+0*yrWvYXpTLCi!>heyEXY3w~L!D+-H`rC1H#sCdXY21lGHUYv83>OdKr)Iy#^ zTq&V!=DQB@|6~9}WAHf8e!k-0uXSxsP@WbWd+u57mRpMBoHjax_5p|1ZlP8I+hNP4 zZalpn=zMQhyaKeW*rFy2SpHtE_eY6q;+?w?uivz`n{L5iD4e= zkTL_NGoZKFIh+~@mVtF^;dwB>Cmk0S?0g zqpl%a;LEwvFBj^sfCr=-ww+rOYCxb5&&>^E`Sw;(rQk^F6B8pxO0+lNhK&g_Vv1GX ze_C+-26y9Jqt*pY0ixzY!D)>p zu-t$&{YCGJ$1OTOeo6h*!yH~T1!b;N&WUh5$AltIQ0p6N`m!vaNnM4Uy2t8>5&vuE z3R`X7u0i+IFySb*K}{9KTgvPnIpIH%ue0p8!~M9hSruhxA&R8b;yX|DDKiggxUNyn ztNMv4FWmf8SDm}NJF{)H7n`=)dRUY;8R^!SkVa5(-dEhK_#6z{@q86{TBTV$i$M5g zi;mE$gg2QFlgYthU-RrFEm`U%vDQn57bjOgMJ;9$sDx1Ycj1T(M#=en#(#qKoV!BcuI6U|S!pjdKQ$+>n&dD$#KH`q& z;HB1xFp8=?u6WsxInn=~eQ|-?`!p|cBKOhsPM|DMl2n|Nkl?`3omup~j_7)ai<`(` zI_tn{Q=`ONp5xJo;>Dt9NIVfMQ!)!%baNv9C+F-e6A=&@D05)tc@<`>_hi^&dzp2h zl;*PwBoYPNG${SK+Nqf)uhkR_{xG+Pfp$i>EB{Ce|LEwp?W z5~Q-&EJS1W;O zw#=^VSN3UrOd@m_j*z&KKf9|uoXhd;)Nw{@O(^3GtP{^ z{&&bb4)ElYBYgV1xe&pe07qi)a_MjXCDW-u{Nhmj9R4qH&Uj2@$vx)(78g*&-+)BD zCq(E`efoPzeeNDpcmAEB_Wx3806s(~lfz{CpCaJ#$hD;2#lK9N1OD@#LMp|(Hw|z} zVV?|2(oj5X3AZDjpJ%V9?uUJyHLBAzh+v0W3_l7v9~m%F`m;vX%0NzTJ%#Ap@~XZ_ z_xatzb*AQw2?e$;=((S6NNh=a!Q6465&ldhw-=I@MTO{KEanx>d5eymPUN4&zWgy2 zw}5#wV=vdKJ&E)c_S|f;k!%o)D$GF(&n{oWVLZm%KoGK!tEAwHiGtYmI-^NK`m?uf zfs2ft=gZvu$M5~^0w{JiE}GODmScADjEQP38IzE4=aL(7xj2-P%D=yvSk#xT5in)y z%*?TZev%V~j=q@#ZtW!oeqL6WI?+#m-SiEA;7U>zV!38T1qg-LUK2ePS3>DBuklqG zP?dpA8Ab5x^=3pjQs8WS9hsNW*%|jVCix?D*b1~q{Dz6%v+UB}WPClaNK5sJhc~D^ z3`A%fo>w{cbffAV4b>m>WQ zpxQs{Gmp_YX!Lr~>c{522dY#wsyAmq){Kj_ZBWptT)!z zpVst3z6Udmiy!>nUBZDY!#b{JL(jul{U8jHs`G2(G1iRTV`^~S*{~(Z zfB0er&k&US@ON_@T_X3#mDH5KfDJgG#dPm0$TFS5NHVKpo)lS6k4VmXH)>1S$h=`( zzAS_Nclup}cih~CCOgYmG8jKaXA8`9*I zSJf<`L~aY~2*wBqEmim#C{4E(8*RF+Q7Q^-MEzP-pkrY=Mnl-bgAeTXY}?Dd zSsQ_zKx8x!x(|R)HqH!sR()u)n^*swQwNw}YW8;k50}f${8n^wYO?XNEWQ2@Bn!9% zX~Vn^=ol}T$^u{LzSZLhzgdVTC;yw3s&ZXlk@lpoz@^DQDPvx=IAN3Or`Ta00 zDZ(K62zHU!TrwjRestXroz|J}gKtf@*3iJwbvu4mN+Ngy_)4>^Qob~PxMp%XT-sd9 zm(Soco6dy#VlxMrICz-!JDD_TSFP$;coz^Iy_L0_GQS(2l=c8W$C6_wjoFc)N!?;H#Vh zwr3Q%3|>FPE&4($j?#%nTO)_VJ2)=K4@r?(EGQrze-z}2;uY_wJ(sQmC6LQrsKJbeNcFzAg*YcuK^$GDgWrvfP^g zUbVpLc`bF^OCu^dp|YceV0r!kKJIki`BW*^MwD4HN8#IsH;TFgu|%4GG9@4G2q2Nw zE;lPtD^vNr`Bn6%;aK`|BX|{hfqz^M`6^GG=|X4BjIG-X!4{{>lJm{MkG9Xw>-a3VM$JTJ~te)3Xx%f1`d+`&O zY4J-?TTC<l6+gaN$O?k+qUOk_pGo(i4{PfmDUB%aeTk=)RVW_p<;$8buU1m zNV*Nt>xULX@V9S*s^m;e80SBVwwAt5$@3P$3?==H>`rF2h<}35^$t!m+ANvdnGTpZ zTef>Md3?5W3q6{~UGRm~dddIGSoi8&_m%z{spU%Tzs9*gyV4Do6 zWB}8nyw;ehTB4?kdR({nr8^OV{p@o|rSYCx-GvYAzOK(++r@qag;FB};2kf7fp3UQ zmRd7ibh?^q4^6Bjcd~O;Qo@i|8B2>JmoIX~BDEBc>?wM0CQ+DdhY!?aVk#=}2wC7v zKiZyE$trr|pABLWyHXrhi_NmC8`h9l?S=rk=9_HpHD}*r!lv~Z@kRdkrU#(#wLP9B zt4rO98idNiP!uzJhUJC%fMm%mg5 z84d1Y+|8TET zmmB8Pm1Si=e@t|FUQ;nUXwOw91d4mO*AsJz0r&KQh>0B!ny@^fpjg;~;c?##o`P|< zCMv7+n~6!#v;1M)rIARPCP%g#)?F>lstIDUVsNw%30iB`Wy1?Oqz+6bCs5Jc&0$^T z!{hzp=B2Z#!07)G2F|2FQsVGdSV(C5o+bhu=r%8N{mwieQKv!9DxqW|wr8;PZAZxm zv~A^P0VCNeCv+*^3*6Gj!Zi-sRWubAx7;2K*3UMuKkLvWu>sn z)bGVxN_aKXqpm7tcZRc0{41M)<87>a81v^dBL503$LYLxYVvE{#CFza(eK=U=Z=l6 z@Df*n<6EnzwooT|6dN|H5^`RN*X!qdv_#=?>1bXA!}fu)KBi0&K$9jaO4WuY!L;ECvi z_&4*xC%e4yEWhwd{@pF1U=H0BAv&7ey-V&!}Zpu`jH*zD6HFS>}g#UQod|m-l+R6#=7Aw zoDnvKhE3V{%psllCU>yyLPN{Jd<(Ws)GN{DU|E;rfcD~si5p~mNfu;v!@;)pTRS+{ zh-7vP-|Zx`OI4}hLR%>fBMQYX1V15W(n;NgNv#mx!%wxZZ8^UaCS}2djR^Huz@sHn z+Wf%e_y@p%VbR84h6JyX_gYyQ==d=c{Jh2m#nLg)G#)?W_wstDaMj0$SG0A}D97bE zl7WABlAUFd$G&R9PQGp(oj!uGhOupgPc!R<5IsttvWbdWKhwV$W9qP3XJ z@9OE00XkOME)9D+ZVqfSsFjvNCgI}s3U5u`>0`PRSqST*+w65YXteD3T|ErpoYbex z)4Vzws#j}}L8GW>g=^cfM2<8!Jlg`vXdD+6b=0k{#l~O^tzzMMmxZkO-&k#@m8`%m zvb$D>|EoJ%Dg8Kp%c5KgOH$IgYsi}KSFB;e>hN&sq#BFn3y_#O?B7z)+V!!FxR!Rf1!exL^r-%Cl^)@CRWs&qT3|drOh;DsVV3GF2=w?0AOtw zr2Sdznk77CprtEgkL4rriD=s&;z#Hk+v{(=&Yt*bKCg=Z)K~zWEsoYmA`+4iM@jJi zw?HhRXOlWgP?ALIyz4~@e|-rQdCZ)Tu9&d{{V8-j*I{R diff --git a/.github/actions/setup-create-base64-config-ccip/action.yml b/.github/actions/setup-create-base64-config-ccip/action.yml deleted file mode 100644 index 72d5bdca45..0000000000 --- a/.github/actions/setup-create-base64-config-ccip/action.yml +++ /dev/null @@ -1,95 +0,0 @@ -name: Create Base64 Config for CCIP Tests -description: A composite action that creates a base64-encoded config to be used by ccip integration tests - -inputs: - runId: - description: The run id - testLogCollect: - description: Whether to always collect logs, even for passing tests - default: "false" - selectedNetworks: - description: The networks to run tests against - chainlinkImage: - description: The chainlink image to use - default: "public.ecr.aws/chainlink/chainlink" - chainlinkVersion: - description: The git commit sha to use for the image tag - lokiEndpoint: - description: Loki push endpoint - lokiTenantId: - description: Loki tenant id - lokiBasicAuth: - description: Loki basic auth - logstreamLogTargets: - description: Where to send logs (e.g. file, loki) - -runs: - using: composite - steps: - - name: Prepare Base64 TOML override - shell: bash - id: base64-config-override - env: - RUN_ID: ${{ inputs.runId }} - TEST_LOG_COLLECT: ${{ inputs.testLogCollect }} - CHAINLINK_IMAGE: ${{ inputs.chainlinkImage }} - CHAINLINK_VERSION: ${{ inputs.chainlinkVersion }} - LOKI_ENDPOINT: ${{ inputs.lokiEndpoint }} - LOKI_TENANT_ID: ${{ inputs.lokiTenantId }} - LOKI_BASIC_AUTH: ${{ inputs.lokiBasicAuth }} - LOGSTREAM_LOG_TARGETS: ${{ inputs.logstreamLogTargets }} - GRAFANA_URL: ${{ inputs.grafanaUrl }} - GRAFANA_DASHBOARD_URL: ${{ inputs.grafanaDashboardUrl }} - run: | - echo ::add-mask::$CHAINLINK_IMAGE - function convert_to_toml_array() { - local IFS=',' - local input_array=($1) - local toml_array_format="[" - - for element in "${input_array[@]}"; do - toml_array_format+="\"$element\"," - done - - toml_array_format="${toml_array_format%,}]" - echo "$toml_array_format" - } - - log_targets=$(convert_to_toml_array "$LOGSTREAM_LOG_TARGETS") - - if [ -n "$TEST_LOG_COLLECT" ]; then - test_log_collect=true - else - test_log_collect=false - fi - - cat << EOF > config.toml - [CCIP] - [CCIP.Env] - [CCIP.Env.Chainlink] - [CCIP.Env.Chainlink.Common] - [CCIP.Env.Chainlink.Common.ChainlinkImage] - image="$CHAINLINK_IMAGE" - version="$CHAINLINK_VERSION" - - [CCIP.Env.Logging] - test_log_collect=$test_log_collect - run_id="$RUN_ID" - - [CCIP.Env.Logging.LogStream] - log_targets=$log_targets - - [CCIP.Env.Logging.Loki] - tenant_id="$LOKI_TENANT_ID" - endpoint="$LOKI_ENDPOINT" - basic_auth="$LOKI_BASIC_AUTH" - - [CCIP.Env.Logging.Grafana] - base_url="$GRAFANA_URL" - dasboard_url="$GRAFANA_DASHBOARD_URL" - EOF - - BASE64_CCIP_SECRETS_CONFIG=$(cat config.toml | base64 -w 0) - echo ::add-mask::$BASE64_CCIP_SECRETS_CONFIG - echo "BASE64_CCIP_SECRETS_CONFIG=$BASE64_CCIP_SECRETS_CONFIG" >> $GITHUB_ENV - echo "TEST_BASE64_CCIP_SECRETS_CONFIG=$BASE64_CCIP_SECRETS_CONFIG" >> $GITHUB_ENV diff --git a/.github/actions/setup-create-base64-config-live-testnets/action.yml b/.github/actions/setup-create-base64-config-live-testnets/action.yml deleted file mode 100644 index 5ba8150989..0000000000 --- a/.github/actions/setup-create-base64-config-live-testnets/action.yml +++ /dev/null @@ -1,130 +0,0 @@ -name: Create Base64 Config -description: A composite action that creates a base64-encoded config to be used by integration tests - -inputs: - runId: - description: The run id - testLogCollect: - description: Whether to always collect logs, even for passing tests - default: "false" - chainlinkImage: - description: The chainlink image to use - default: "public.ecr.aws/chainlink/chainlink" - chainlinkVersion: - description: The git commit sha to use for the image tag - pyroscopeServer: - description: URL of Pyroscope server - pyroscopeEnvironment: - description: Name of Pyroscope environment - pyroscopeKey: - description: Pyroscope server key - lokiEndpoint: - description: Loki push endpoint - lokiTenantId: - description: Loki tenant id - lokiBasicAuth: - description: Loki basic auth - logstreamLogTargets: - description: Where to send logs (e.g. file, loki) - grafanaUrl: - description: Grafana URL - grafanaDashboardUrl: - description: Grafana dashboard URL - network: - description: Network to run tests on - httpEndpoints: - description: HTTP endpoints to use for network - wsEndpoints: - description: WS endpoints to use for network - fundingKeys: - description: Funding keys to use for network - -runs: - using: composite - steps: - - name: Prepare Base64 TOML override - shell: bash - id: base64-config-override - env: - RUN_ID: ${{ inputs.runId }} - PYROSCOPE_SERVER: ${{ inputs.pyroscopeServer }} - PYROSCOPE_ENVIRONMENT: ${{ inputs.pyroscopeEnvironment }} - PYROSCOPE_KEY: ${{ inputs.pyroscopeKey }} - CHAINLINK_IMAGE: ${{ inputs.chainlinkImage }} - CHAINLINK_VERSION: ${{ inputs.chainlinkVersion }} - LOKI_ENDPOINT: ${{ inputs.lokiEndpoint }} - LOKI_TENANT_ID: ${{ inputs.lokiTenantId }} - LOKI_BASIC_AUTH: ${{ inputs.lokiBasicAuth }} - LOGSTREAM_LOG_TARGETS: ${{ inputs.logstreamLogTargets }} - GRAFANA_URL: ${{ inputs.grafanaUrl }} - GRAFANA_DASHBOARD_URL: ${{ inputs.grafanaDashboardUrl }} - NETWORK: ${{ inputs.network }} - HTTP_ENDPOINTS: ${{ inputs.httpEndpoints }} - WS_ENDPOINTS: ${{ inputs.wsEndpoints }} - FUNDING_KEYS: ${{ inputs.fundingKeys }} - run: | - convert_to_toml_array() { - local IFS=',' - local input_array=($1) - local toml_array_format="[" - - for element in "${input_array[@]}"; do - toml_array_format+="\"$element\"," - done - - toml_array_format="${toml_array_format%,}]" - echo "$toml_array_format" - } - - if [ -n "$PYROSCOPE_SERVER" ]; then - pyroscope_enabled=true - else - pyroscope_enabled=false - fi - - cat << EOF > config.toml - [Common] - chainlink_node_funding=0.5 - - [ChainlinkImage] - image="$CHAINLINK_IMAGE" - version="$CHAINLINK_VERSION" - - [Pyroscope] - enabled=$pyroscope_enabled - server_url="$PYROSCOPE_SERVER" - environment="$PYROSCOPE_ENVIRONMENT" - key="$PYROSCOPE_KEY" - - [Logging] - run_id="$RUN_ID" - - [Logging.LogStream] - log_targets=$(convert_to_toml_array "$LOGSTREAM_LOG_TARGETS") - - [Logging.Loki] - tenant_id="$LOKI_TENANT_ID" - endpoint="$LOKI_URL" - basic_auth="$LOKI_BASIC_AUTH" - - [Logging.Grafana] - base_url="$GRAFANA_URL" - dasboard_url="$GRAFANA_DASHBOARD_URL" - - [Network] - selected_networks=["$NETWORK"] - - [Network.RpcHttpUrls] - "$NETWORK" = $(convert_to_toml_array "$HTTP_ENDPOINTS") - - [Network.RpcWsUrls] - "$NETWORK" = $(convert_to_toml_array "$WS_ENDPOINTS") - - [Network.WalletKeys] - "$NETWORK" = $(convert_to_toml_array "$FUNDING_KEYS") - EOF - - BASE64_CONFIG_OVERRIDE=$(cat config.toml | base64 -w 0) - echo ::add-mask::$BASE64_CONFIG_OVERRIDE - echo "BASE64_CONFIG_OVERRIDE=$BASE64_CONFIG_OVERRIDE" >> $GITHUB_ENV - touch .root_dir diff --git a/.github/actions/setup-create-base64-config/action.yml b/.github/actions/setup-create-base64-config/action.yml deleted file mode 100644 index eafd670226..0000000000 --- a/.github/actions/setup-create-base64-config/action.yml +++ /dev/null @@ -1,122 +0,0 @@ -name: Create Base64 Config -description: A composite action that creates a base64-encoded config to be used by integration tests - -inputs: - runId: - description: The run id - testLogCollect: - description: Whether to always collect logs, even for passing tests - default: "false" - selectedNetworks: - description: The networks to run tests against - chainlinkImage: - description: The chainlink image to use - default: "public.ecr.aws/chainlink/chainlink" - chainlinkVersion: - description: The git commit sha to use for the image tag - pyroscopeServer: - description: URL of Pyroscope server - pyroscopeEnvironment: - description: Name of Pyroscope environment - pyroscopeKey: - description: Pyroscope server key - lokiEndpoint: - description: Loki push endpoint - lokiTenantId: - description: Loki tenant id - lokiBasicAuth: - description: Loki basic auth - logstreamLogTargets: - description: Where to send logs (e.g. file, loki) - grafanaUrl: - description: Grafana URL - grafanaDashboardUrl: - description: Grafana dashboard URL - -runs: - using: composite - steps: - - name: Prepare Base64 TOML override - shell: bash - id: base64-config-override - env: - RUN_ID: ${{ inputs.runId }} - TEST_LOG_COLLECT: ${{ inputs.testLogCollect }} - SELECTED_NETWORKS: ${{ inputs.selectedNetworks }} - PYROSCOPE_SERVER: ${{ inputs.pyroscopeServer }} - PYROSCOPE_ENVIRONMENT: ${{ inputs.pyroscopeEnvironment }} - PYROSCOPE_KEY: ${{ inputs.pyroscopeKey }} - CHAINLINK_IMAGE: ${{ inputs.chainlinkImage }} - CHAINLINK_VERSION: ${{ inputs.chainlinkVersion }} - LOKI_ENDPOINT: ${{ inputs.lokiEndpoint }} - LOKI_TENANT_ID: ${{ inputs.lokiTenantId }} - LOKI_BASIC_AUTH: ${{ inputs.lokiBasicAuth }} - LOGSTREAM_LOG_TARGETS: ${{ inputs.logstreamLogTargets }} - GRAFANA_URL: ${{ inputs.grafanaUrl }} - GRAFANA_DASHBOARD_URL: ${{ inputs.grafanaDashboardUrl }} - run: | - echo ::add-mask::$CHAINLINK_IMAGE - function convert_to_toml_array() { - local IFS=',' - local input_array=($1) - local toml_array_format="[" - - for element in "${input_array[@]}"; do - toml_array_format+="\"$element\"," - done - - toml_array_format="${toml_array_format%,}]" - echo "$toml_array_format" - } - - selected_networks=$(convert_to_toml_array "$SELECTED_NETWORKS") - log_targets=$(convert_to_toml_array "$LOGSTREAM_LOG_TARGETS") - - if [ -n "$PYROSCOPE_SERVER" ]; then - pyroscope_enabled=true - else - pyroscope_enabled=false - fi - - if [ -n "$TEST_LOG_COLLECT" ]; then - test_log_collect=true - else - test_log_collect=false - fi - - cat << EOF > config.toml - [Network] - selected_networks=$selected_networks - - [ChainlinkImage] - image="$CHAINLINK_IMAGE" - version="$CHAINLINK_VERSION" - - [Pyroscope] - enabled=$pyroscope_enabled - server_url="$PYROSCOPE_SERVER" - environment="$PYROSCOPE_ENVIRONMENT" - key="$PYROSCOPE_KEY" - - [Logging] - test_log_collect=$test_log_collect - run_id="$RUN_ID" - - [Logging.LogStream] - log_targets=$log_targets - - [Logging.Loki] - tenant_id="$LOKI_TENANT_ID" - endpoint="$LOKI_ENDPOINT" - basic_auth="$LOKI_BASIC_AUTH" - # legacy, you only need this to access the cloud version - # bearer_token="bearer_token" - - [Logging.Grafana] - base_url="$GRAFANA_URL" - dasboard_url="$GRAFANA_DASHBOARD_URL" - EOF - - BASE64_CONFIG_OVERRIDE=$(cat config.toml | base64 -w 0) - echo ::add-mask::$BASE64_CONFIG_OVERRIDE - echo "BASE64_CONFIG_OVERRIDE=$BASE64_CONFIG_OVERRIDE" >> $GITHUB_ENV diff --git a/.github/actions/setup-create-base64-upgrade-config/action.yml b/.github/actions/setup-create-base64-upgrade-config/action.yml deleted file mode 100644 index f3acde2ea9..0000000000 --- a/.github/actions/setup-create-base64-upgrade-config/action.yml +++ /dev/null @@ -1,61 +0,0 @@ -name: Create Base64 Upgrade Config -description: A composite action that creates a base64-encoded config to be used by Chainlink version upgrade tests - -inputs: - selectedNetworks: - description: The networks to run tests against - chainlinkImage: - description: The chainlink image to upgrade from - default: "public.ecr.aws/chainlink/chainlink" - chainlinkVersion: - description: The git commit sha to use for the image tag - upgradeImage: - description: The chainlink image to upgrade to - default: "public.ecr.aws/chainlink/chainlink" - upgradeVersion: - description: The git commit sha to use for the image tag - -runs: - using: composite - steps: - - name: Prepare Base64 TOML override - shell: bash - id: base64-config-override - env: - SELECTED_NETWORKS: ${{ inputs.selectedNetworks }} - CHAINLINK_IMAGE: ${{ inputs.chainlinkImage }} - CHAINLINK_VERSION: ${{ inputs.chainlinkVersion }} - UPGRADE_IMAGE: ${{ inputs.upgradeImage }} - UPGRADE_VERSION: ${{ inputs.upgradeVersion }} - run: | - function convert_to_toml_array() { - local IFS=',' - local input_array=($1) - local toml_array_format="[" - - for element in "${input_array[@]}"; do - toml_array_format+="\"$element\"," - done - - toml_array_format="${toml_array_format%,}]" - echo "$toml_array_format" - } - - selected_networks=$(convert_to_toml_array "$SELECTED_NETWORKS") - - cat << EOF > config.toml - [Network] - selected_networks=$selected_networks - - [ChainlinkImage] - image="$CHAINLINK_IMAGE" - version="$CHAINLINK_VERSION" - - [ChainlinkUpgradeImage] - image="$UPGRADE_IMAGE" - version="$UPGRADE_VERSION" - EOF - - BASE64_CONFIG_OVERRIDE=$(cat config.toml | base64 -w 0) - echo ::add-mask::$BASE64_CONFIG_OVERRIDE - echo "BASE64_CONFIG_OVERRIDE=$BASE64_CONFIG_OVERRIDE" >> $GITHUB_ENV diff --git a/.github/actions/setup-go/action.yml b/.github/actions/setup-go/action.yml deleted file mode 100644 index eba0152106..0000000000 --- a/.github/actions/setup-go/action.yml +++ /dev/null @@ -1,64 +0,0 @@ -name: Setup Go -description: Setup Golang with efficient caching -inputs: - only-modules: - description: Set to 'true' to only cache modules - default: "false" - cache-version: - description: Set this to cache bust - default: "1" - go-version-file: - description: Set where the go version file is located at - default: "go.mod" - go-module-file: - description: Set where the go module file is located at - default: "go.sum" - -runs: - using: composite - steps: - - name: Set up Go - uses: actions/setup-go@v4 - with: - go-version-file: ${{ inputs.go-version-file }} - cache: false - - - name: Get branch name - if: ${{ inputs.only-modules == 'false' }} - id: branch-name - uses: tj-actions/branch-names@2e5354c6733793113f416314375826df030ada23 #v6.5 - - - name: Set go cache keys - shell: bash - id: go-cache-dir - run: | - echo "gomodcache=$(go env GOMODCACHE)" >> $GITHUB_OUTPUT - echo "gobuildcache=$(go env GOCACHE)" >> $GITHUB_OUTPUT - - - name: Set go module path - id: go-module-path - shell: bash - run: echo "path=./${{ inputs.go-module-file }}" >> $GITHUB_OUTPUT - - - uses: actions/cache@v3 - name: Cache Go Modules - with: - path: | - ${{ steps.go-cache-dir.outputs.gomodcache }} - # The lifetime of go modules is much higher than the build outputs, so we increase cache efficiency - # here by not having the primary key contain the branch name - key: ${{ runner.os }}-gomod-${{ inputs.cache-version }}-${{ hashFiles(steps.go-module-path.output.path) }} - restore-keys: | - ${{ runner.os }}-gomod-${{ inputs.cache-version }}- - - - uses: actions/cache@v3 - if: ${{ inputs.only-modules == 'false' }} - name: Cache Go Build Outputs - with: - path: | - ${{ steps.go-cache-dir.outputs.gobuildcache }} - # The lifetime of go build outputs is pretty short, so we make our primary cache key be the branch name - key: ${{ runner.os }}-gobuild-${{ inputs.cache-version }}-${{ hashFiles(steps.go-module-path.output.path) }}-${{ steps.branch-name.outputs.current_branch }} - restore-keys: | - ${{ runner.os }}-gobuild-${{ inputs.cache-version }}-${{ hashFiles(steps.go-module-path.output.path) }}- - ${{ runner.os }}-gobuild-${{ inputs.cache-version }}- diff --git a/.github/actions/setup-hardhat/action.yaml b/.github/actions/setup-hardhat/action.yaml deleted file mode 100644 index 3b52a4b8c5..0000000000 --- a/.github/actions/setup-hardhat/action.yaml +++ /dev/null @@ -1,31 +0,0 @@ -name: Setup NodeJS -inputs: - namespace: - required: true - description: A cache namespace to add - cache-version: - default: "6" - description: Change to bust cache -description: Setup pnpm for contracts -runs: - using: composite - steps: - - name: Cache Compilers - uses: actions/cache@v3 - with: - path: ~/.cache/hardhat-nodejs/ - key: contracts-compilers-${{ runner.os }}-${{ inputs.cache-version }}-${{ hashFiles('contracts/pnpm-lock.yaml', 'contracts/hardhat.config.ts') }} - - - name: Cache contracts build outputs - uses: actions/cache@v3 - with: - path: | - contracts/cache/ - contracts/artifacts/ - contracts/typechain/ - key: ${{ format('contracts-{0}-{1}-{2}-{3}', runner.os, inputs.cache-version, inputs.namespace, hashFiles('contracts/pnpm-lock.yaml', 'contracts/hardhat.config.ts', 'contracts/src/**/*.sol')) }} - - - name: Compile contracts - shell: bash - run: pnpm compile - working-directory: contracts diff --git a/.github/actions/setup-merge-base64-config/action.yml b/.github/actions/setup-merge-base64-config/action.yml deleted file mode 100644 index e5bf2a7d27..0000000000 --- a/.github/actions/setup-merge-base64-config/action.yml +++ /dev/null @@ -1,54 +0,0 @@ -name: Merge Base64 Config -description: A composite action that merges user-provided Base64-encoded config with repository's secrets - -inputs: - base64Config: - description: Base64-encoded config to decode - -runs: - using: composite - steps: - - name: Add masks and export base64 config - shell: bash - run: | - BASE64_CONFIG_OVERRIDE=$(jq -r '.inputs.base64Config' $GITHUB_EVENT_PATH) - echo ::add-mask::$BASE64_CONFIG_OVERRIDE - echo "BASE64_CONFIG_OVERRIDE=$BASE64_CONFIG_OVERRIDE" >> $GITHUB_ENV - - decoded_toml=$(echo $BASE64_CONFIG_OVERRIDE | base64 -d) - CHAINLINK_IMAGE=$(echo "$decoded_toml" | awk -F'=' '/^[[:space:]]*image[[:space:]]*=/ {gsub(/^[[:space:]]+|[[:space:]]+$/, "", $2); print $2}' 2>/dev/null) - echo ::add-mask::$CHAINLINK_IMAGE - CHAINLINK_VERSION=$(echo "$decoded_toml" | awk -F'=' '/^[[:space:]]*version[[:space:]]*=/ {gsub(/^[[:space:]]+|[[:space:]]+$/, "", $2); print $2}' 2>/dev/null) - NETWORKS=$(echo "$decoded_toml" | awk -F'=' '/^[[:space:]]*selected_networks[[:space:]]*=/ {gsub(/^[[:space:]]+|[[:space:]]+$/, "", $2); print $2}' 2>/dev/null) - - if [ -n "$CHAINLINK_IMAGE" ]; then - echo "CHAINLINK_IMAGE=$CHAINLINK_IMAGE" >> $GITHUB_ENV - else - echo "No Chainlink Image found in base64-ed config. Exiting" - exit 1 - fi - if [ -n "$CHAINLINK_VERSION" ]; then - echo "CHAINLINK_VERSION=$CHAINLINK_VERSION" >> $GITHUB_ENV - else - echo "No Chainlink Version found in base64-ed config. Exiting" - exit 1 - fi - if [ -n "$NETWORKS" ]; then - echo "NETWORKS=$NETWORKS" >> $GITHUB_ENV - fi - - # use Loki config from GH secrets and merge it with base64 input - cat << EOF > config.toml - [Logging.Loki] - tenant_id="$LOKI_TENANT_ID" - endpoint="$LOKI_URL" - basic_auth="$LOKI_BASIC_AUTH" - # legacy, you only need this to access the cloud version - # bearer_token="bearer_token" - EOF - - echo "$decoded_toml" >> final_config.toml - cat config.toml >> final_config.toml - BASE64_CONFIG_OVERRIDE=$(cat final_config.toml | base64 -w 0) - echo ::add-mask::$BASE64_CONFIG_OVERRIDE - echo "BASE64_CONFIG_OVERRIDE=$BASE64_CONFIG_OVERRIDE" >> $GITHUB_ENV \ No newline at end of file diff --git a/.github/actions/setup-nodejs/action.yaml b/.github/actions/setup-nodejs/action.yaml index 1bb529b421..da605ac769 100644 --- a/.github/actions/setup-nodejs/action.yaml +++ b/.github/actions/setup-nodejs/action.yaml @@ -27,4 +27,4 @@ runs: name: Install prod dependencies shell: bash run: pnpm i --prod - working-directory: contracts + working-directory: contracts \ No newline at end of file diff --git a/.github/actions/setup-parse-base64-config/action.yml b/.github/actions/setup-parse-base64-config/action.yml deleted file mode 100644 index a744abae9e..0000000000 --- a/.github/actions/setup-parse-base64-config/action.yml +++ /dev/null @@ -1,38 +0,0 @@ -name: Parse Base64 Config -description: A composite action that extracts the chainlink image, version and network from a base64-encoded config - -inputs: - base64Config: - description: Base64-encoded config to decode - -runs: - using: composite - steps: - - name: Add masks and export base64 config - shell: bash - run: | - decoded_toml=$(echo $BASE64_CONFIG_OVERRIDE | base64 -d) - CHAINLINK_IMAGE=$(echo "$decoded_toml" | awk -F'=' '/^[[:space:]]*image[[:space:]]*=/ {gsub(/^[[:space:]]+|[[:space:]]+$/, "", $2); print $2}' 2>/dev/null) - echo ::add-mask::$CHAINLINK_IMAGE - CHAINLINK_VERSION=$(echo "$decoded_toml" | awk -F'=' '/^[[:space:]]*version[[:space:]]*=/ {gsub(/^[[:space:]]+|[[:space:]]+$/, "", $2); print $2}' 2>/dev/null) - NETWORKS=$(echo "$decoded_toml" | awk -F'=' '/^[[:space:]]*selected_networks[[:space:]]*=/ {gsub(/^[[:space:]]+|[[:space:]]+$/, "", $2); print $2}' 2>/dev/null) - ETH2_EL_CLIENT=$(echo "$decoded_toml" | awk -F'=' '/^[[:space:]]*execution_layer[[:space:]]*=/ {gsub(/^[[:space:]]+|[[:space:]]+$/, "", $2); print $2}' 2>/dev/null) - - if [ -n "$CHAINLINK_IMAGE" ]; then - echo "CHAINLINK_IMAGE=$CHAINLINK_IMAGE" >> $GITHUB_ENV - else - echo "No Chainlink Image found in base64-ed config. Exiting" - exit 1 - fi - if [ -n "$CHAINLINK_VERSION" ]; then - echo "CHAINLINK_VERSION=$CHAINLINK_VERSION" >> $GITHUB_ENV - else - echo "No Chainlink Version found in base64-ed config. Exiting" - exit 1 - fi - if [ -n "$NETWORKS" ]; then - echo "NETWORKS=$NETWORKS" >> $GITHUB_ENV - fi - if [ -n "$ETH2_EL_CLIENT" ]; then - echo "ETH2_EL_CLIENT=$ETH2_EL_CLIENT" >> $GITHUB_ENV - fi \ No newline at end of file diff --git a/.github/actions/setup-postgres/.env b/.github/actions/setup-postgres/.env deleted file mode 100644 index 47ed8d9bcd..0000000000 --- a/.github/actions/setup-postgres/.env +++ /dev/null @@ -1,5 +0,0 @@ -POSTGRES_USER=postgres -POSTGRES_OPTIONS="-c max_connections=1000 -c shared_buffers=2GB -c log_lock_waits=true" -POSTGRES_PASSWORD=postgres -POSTGRES_DB=chainlink_test -POSTGRES_HOST_AUTH_METHOD=trust diff --git a/.github/actions/setup-postgres/action.yml b/.github/actions/setup-postgres/action.yml deleted file mode 100644 index f683934d78..0000000000 --- a/.github/actions/setup-postgres/action.yml +++ /dev/null @@ -1,13 +0,0 @@ -name: Setup Postgresql -description: Setup postgres docker container via docker-compose, allowing usage of a custom command, see https://github.com/orgs/community/discussions/26688 -runs: - using: composite - steps: - - name: Start postgres service - run: docker compose up -d - shell: bash - working-directory: ./.github/actions/setup-postgres - - name: Wait for postgres service to be healthy - run: ./wait-for-healthy-postgres.sh - shell: bash - working-directory: ./.github/actions/setup-postgres diff --git a/.github/actions/setup-postgres/docker-compose.yml b/.github/actions/setup-postgres/docker-compose.yml deleted file mode 100644 index 3acaa1ecd1..0000000000 --- a/.github/actions/setup-postgres/docker-compose.yml +++ /dev/null @@ -1,16 +0,0 @@ -name: gha_postgres -version: "3.8" -services: - postgres: - ports: - - "5432:5432" - container_name: cl_pg - image: postgres:14-alpine - command: postgres ${POSTGRES_OPTIONS} - env_file: - - .env - healthcheck: - test: "pg_isready -d ${POSTGRES_DB} -U ${POSTGRES_USER}" - interval: 2s - timeout: 5s - retries: 5 diff --git a/.github/actions/setup-postgres/wait-for-healthy-postgres.sh b/.github/actions/setup-postgres/wait-for-healthy-postgres.sh deleted file mode 100755 index 438cfbaff3..0000000000 --- a/.github/actions/setup-postgres/wait-for-healthy-postgres.sh +++ /dev/null @@ -1,25 +0,0 @@ -#!/bin/bash -RETRIES=10 - -until [ $RETRIES -eq 0 ]; do - DOCKER_OUTPUT=$(docker compose ps postgres --status running --format json) - JSON_TYPE=$(echo "$DOCKER_OUTPUT" | jq -r 'type') - - if [ "$JSON_TYPE" == "array" ]; then - HEALTH_STATUS=$(echo "$DOCKER_OUTPUT" | jq -r '.[0].Health') - elif [ "$JSON_TYPE" == "object" ]; then - HEALTH_STATUS=$(echo "$DOCKER_OUTPUT" | jq -r '.Health') - else - HEALTH_STATUS="Unknown JSON type: $JSON_TYPE" - fi - - echo "postgres health status: $HEALTH_STATUS" - if [ "$HEALTH_STATUS" == "healthy" ]; then - exit 0 - fi - - echo "Waiting for postgres server, $((RETRIES--)) remaining attempts..." - sleep 2 -done - -exit 1 diff --git a/.github/actions/setup-solana/action.yml b/.github/actions/setup-solana/action.yml deleted file mode 100644 index c50ccd5835..0000000000 --- a/.github/actions/setup-solana/action.yml +++ /dev/null @@ -1,21 +0,0 @@ -name: Setup Solana CLI -description: Setup solana CLI -runs: - using: composite - steps: - - uses: actions/cache@v3 - id: cache - name: Cache solana CLI - with: - path: | - ~/.local/share/solana/install/active_release/bin - key: ${{ runner.os }}-solana-cli-${{ hashFiles('./tools/ci/install_solana') }} - - - if: ${{ steps.cache.outputs.cache-hit != 'true' }} - name: Install solana cli - shell: bash - run: ./tools/ci/install_solana - - - name: Export solana path to env - shell: bash - run: echo "PATH=$HOME/.local/share/solana/install/active_release/bin:$PATH" >> $GITHUB_ENV diff --git a/.github/actions/setup-wasmd/action.yml b/.github/actions/setup-wasmd/action.yml deleted file mode 100644 index 46fb84ba3e..0000000000 --- a/.github/actions/setup-wasmd/action.yml +++ /dev/null @@ -1,22 +0,0 @@ -name: Setup Cosmos wasmd -description: Setup Cosmos wasmd, used for integration tests -runs: - using: composite - steps: - - uses: actions/cache@v3 - id: cache - name: Cache wasmd-build - with: - path: ~/wasmd-build - # this caching works without cloning the repo because the install_wasmd contains - # the commit hash. - key: ${{ runner.os }}-wasmd-cli-${{ hashFiles('./tools/ci/install_wasmd') }} - - - if: ${{ steps.cache.outputs.cache-hit != 'true' }} - name: Install wasmd - shell: bash - run: ./tools/ci/install_wasmd - - - name: Export wasmd path to env - shell: bash - run: echo "PATH=$HOME/wasmd-build/bin:$PATH" >> $GITHUB_ENV diff --git a/.github/actions/split-tests/.npmrc b/.github/actions/split-tests/.npmrc deleted file mode 100644 index 4c2f52b3be..0000000000 --- a/.github/actions/split-tests/.npmrc +++ /dev/null @@ -1,2 +0,0 @@ -auto-install-peers=true -strict-peer-dependencies=false diff --git a/.github/actions/split-tests/action.yaml b/.github/actions/split-tests/action.yaml deleted file mode 100644 index 684fd6a2bd..0000000000 --- a/.github/actions/split-tests/action.yaml +++ /dev/null @@ -1,35 +0,0 @@ -name: Test Spliting -description: Split tests -inputs: - config: - required: true - description: The path to the splitting config -outputs: - splits: - description: The generated test splits - value: ${{ steps.split.outputs.splits }} -runs: - using: composite - steps: - - uses: pnpm/action-setup@c3b53f6a16e57305370b4ae5a540c2077a1d50dd #v2.2.4 - with: - version: ^7.0.0 - - - uses: actions/setup-node@8f152de45cc393bb48ce5d89d36b731f54556e65 # v4.0.0 - with: - node-version: "16" - cache: "pnpm" - cache-dependency-path: "./.github/actions/split-tests/pnpm-lock.yaml" - - - name: Install dependencies - shell: bash - run: pnpm i --prod - working-directory: "./.github/actions/split-tests" - - - name: Run test spliter - id: split - shell: bash - run: pnpm start - env: - CONFIG: ${{ inputs.config }} - working-directory: "./.github/actions/split-tests" diff --git a/.github/actions/split-tests/jest.config.js b/.github/actions/split-tests/jest.config.js deleted file mode 100644 index 7b3dcf296f..0000000000 --- a/.github/actions/split-tests/jest.config.js +++ /dev/null @@ -1,15 +0,0 @@ -/** @type {import('ts-jest').JestConfigWithTsJest} */ -const jestConfig = { - preset: "ts-jest/presets/default-esm", - resolver: "/mjs-resolver.ts", - transform: { - "^.+\\.mts?$": [ - "ts-jest", - { - useESM: true, - }, - ], - }, - testEnvironment: "node", -}; -export default jestConfig; diff --git a/.github/actions/split-tests/mjs-resolver.ts b/.github/actions/split-tests/mjs-resolver.ts deleted file mode 100644 index 92c66f7b6c..0000000000 --- a/.github/actions/split-tests/mjs-resolver.ts +++ /dev/null @@ -1,15 +0,0 @@ -const mjsResolver = (path, options) => { - const mjsExtRegex = /\.mjs$/i; - const resolver = options.defaultResolver; - if (mjsExtRegex.test(path)) { - try { - return resolver(path.replace(mjsExtRegex, ".mts"), options); - } catch { - // use default resolver - } - } - - return resolver(path, options); -}; - -module.exports = mjsResolver; diff --git a/.github/actions/split-tests/package.json b/.github/actions/split-tests/package.json deleted file mode 100644 index 1624bda7b3..0000000000 --- a/.github/actions/split-tests/package.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "name": "shard-packages", - "version": "1.0.0", - "description": "", - "type": "module", - "main": "index.js", - "scripts": { - "start": "ts-node -T --esm ./src/index.mts", - "test": "node --experimental-vm-modules --no-warnings node_modules/jest/bin/jest.js" - }, - "keywords": [], - "author": "", - "license": "MIT", - "dependencies": { - "@actions/core": "^1.10.0", - "ts-node": "^10.9.1", - "zx": "^7.0.8" - }, - "devDependencies": { - "@types/jest": "^29.1.2", - "@types/node": "^18.8.2", - "jest": "^29.1.2", - "ts-jest": "^29.0.3", - "typescript": "^5.2.2" - } -} diff --git a/.github/actions/split-tests/pnpm-lock.yaml b/.github/actions/split-tests/pnpm-lock.yaml deleted file mode 100644 index 9b5deb258d..0000000000 --- a/.github/actions/split-tests/pnpm-lock.yaml +++ /dev/null @@ -1,2675 +0,0 @@ -lockfileVersion: '6.0' - -settings: - autoInstallPeers: true - excludeLinksFromLockfile: false - -dependencies: - '@actions/core': - specifier: ^1.10.0 - version: 1.10.0 - ts-node: - specifier: ^10.9.1 - version: 10.9.1(@types/node@18.8.2)(typescript@5.2.2) - zx: - specifier: ^7.0.8 - version: 7.0.8 - -devDependencies: - '@types/jest': - specifier: ^29.1.2 - version: 29.1.2 - '@types/node': - specifier: ^18.8.2 - version: 18.8.2 - jest: - specifier: ^29.1.2 - version: 29.1.2(@types/node@18.8.2)(ts-node@10.9.1) - ts-jest: - specifier: ^29.0.3 - version: 29.0.3(@babel/core@7.19.3)(jest@29.1.2)(typescript@5.2.2) - typescript: - specifier: ^5.2.2 - version: 5.2.2 - -packages: - - /@actions/core@1.10.0: - resolution: {integrity: sha512-2aZDDa3zrrZbP5ZYg159sNoLRb61nQ7awl5pSvIq5Qpj81vwDzdMRKzkWJGJuwVvWpvZKx7vspJALyvaaIQyug==} - dependencies: - '@actions/http-client': 2.0.1 - uuid: 8.3.2 - dev: false - - /@actions/http-client@2.0.1: - resolution: {integrity: sha512-PIXiMVtz6VvyaRsGY268qvj57hXQEpsYogYOu2nrQhlf+XCGmZstmuZBbAybUl1nQGnvS1k1eEsQ69ZoD7xlSw==} - dependencies: - tunnel: 0.0.6 - dev: false - - /@ampproject/remapping@2.2.0: - resolution: {integrity: sha512-qRmjj8nj9qmLTQXXmaR1cck3UXSRMPrbsLJAasZpF+t3riI71BXed5ebIOYwQntykeZuhjsdweEc9BxH5Jc26w==} - engines: {node: '>=6.0.0'} - dependencies: - '@jridgewell/gen-mapping': 0.1.1 - '@jridgewell/trace-mapping': 0.3.15 - dev: true - - /@babel/code-frame@7.18.6: - resolution: {integrity: sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/highlight': 7.18.6 - dev: true - - /@babel/compat-data@7.19.3: - resolution: {integrity: sha512-prBHMK4JYYK+wDjJF1q99KK4JLL+egWS4nmNqdlMUgCExMZ+iZW0hGhyC3VEbsPjvaN0TBhW//VIFwBrk8sEiw==} - engines: {node: '>=6.9.0'} - dev: true - - /@babel/core@7.19.3: - resolution: {integrity: sha512-WneDJxdsjEvyKtXKsaBGbDeiyOjR5vYq4HcShxnIbG0qixpoHjI3MqeZM9NDvsojNCEBItQE4juOo/bU6e72gQ==} - engines: {node: '>=6.9.0'} - dependencies: - '@ampproject/remapping': 2.2.0 - '@babel/code-frame': 7.18.6 - '@babel/generator': 7.19.3 - '@babel/helper-compilation-targets': 7.19.3(@babel/core@7.19.3) - '@babel/helper-module-transforms': 7.19.0 - '@babel/helpers': 7.19.0 - '@babel/parser': 7.19.3 - '@babel/template': 7.18.10 - '@babel/traverse': 7.19.3 - '@babel/types': 7.19.3 - convert-source-map: 1.8.0 - debug: 4.3.4 - gensync: 1.0.0-beta.2 - json5: 2.2.1 - semver: 6.3.0 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/generator@7.19.3: - resolution: {integrity: sha512-fqVZnmp1ncvZU757UzDheKZpfPgatqY59XtW2/j/18H7u76akb8xqvjw82f+i2UKd/ksYsSick/BCLQUUtJ/qQ==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/types': 7.19.3 - '@jridgewell/gen-mapping': 0.3.2 - jsesc: 2.5.2 - dev: true - - /@babel/helper-compilation-targets@7.19.3(@babel/core@7.19.3): - resolution: {integrity: sha512-65ESqLGyGmLvgR0mst5AdW1FkNlj9rQsCKduzEoEPhBCDFGXvz2jW6bXFG6i0/MrV2s7hhXjjb2yAzcPuQlLwg==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0 - dependencies: - '@babel/compat-data': 7.19.3 - '@babel/core': 7.19.3 - '@babel/helper-validator-option': 7.18.6 - browserslist: 4.21.4 - semver: 6.3.0 - dev: true - - /@babel/helper-environment-visitor@7.18.9: - resolution: {integrity: sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg==} - engines: {node: '>=6.9.0'} - dev: true - - /@babel/helper-function-name@7.19.0: - resolution: {integrity: sha512-WAwHBINyrpqywkUH0nTnNgI5ina5TFn85HKS0pbPDfxFfhyR/aNQEn4hGi1P1JyT//I0t4OgXUlofzWILRvS5w==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/template': 7.18.10 - '@babel/types': 7.19.3 - dev: true - - /@babel/helper-hoist-variables@7.18.6: - resolution: {integrity: sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/types': 7.19.3 - dev: true - - /@babel/helper-module-imports@7.18.6: - resolution: {integrity: sha512-0NFvs3VkuSYbFi1x2Vd6tKrywq+z/cLeYC/RJNFrIX/30Bf5aiGYbtvGXolEktzJH8o5E5KJ3tT+nkxuuZFVlA==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/types': 7.19.3 - dev: true - - /@babel/helper-module-transforms@7.19.0: - resolution: {integrity: sha512-3HBZ377Fe14RbLIA+ac3sY4PTgpxHVkFrESaWhoI5PuyXPBBX8+C34qblV9G89ZtycGJCmCI/Ut+VUDK4bltNQ==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/helper-environment-visitor': 7.18.9 - '@babel/helper-module-imports': 7.18.6 - '@babel/helper-simple-access': 7.18.6 - '@babel/helper-split-export-declaration': 7.18.6 - '@babel/helper-validator-identifier': 7.19.1 - '@babel/template': 7.18.10 - '@babel/traverse': 7.19.3 - '@babel/types': 7.19.3 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/helper-plugin-utils@7.19.0: - resolution: {integrity: sha512-40Ryx7I8mT+0gaNxm8JGTZFUITNqdLAgdg0hXzeVZxVD6nFsdhQvip6v8dqkRHzsz1VFpFAaOCHNn0vKBL7Czw==} - engines: {node: '>=6.9.0'} - dev: true - - /@babel/helper-simple-access@7.18.6: - resolution: {integrity: sha512-iNpIgTgyAvDQpDj76POqg+YEt8fPxx3yaNBg3S30dxNKm2SWfYhD0TGrK/Eu9wHpUW63VQU894TsTg+GLbUa1g==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/types': 7.19.3 - dev: true - - /@babel/helper-split-export-declaration@7.18.6: - resolution: {integrity: sha512-bde1etTx6ZyTmobl9LLMMQsaizFVZrquTEHOqKeQESMKo4PlObf+8+JA25ZsIpZhT/WEd39+vOdLXAFG/nELpA==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/types': 7.19.3 - dev: true - - /@babel/helper-string-parser@7.18.10: - resolution: {integrity: sha512-XtIfWmeNY3i4t7t4D2t02q50HvqHybPqW2ki1kosnvWCwuCMeo81Jf0gwr85jy/neUdg5XDdeFE/80DXiO+njw==} - engines: {node: '>=6.9.0'} - dev: true - - /@babel/helper-validator-identifier@7.19.1: - resolution: {integrity: sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w==} - engines: {node: '>=6.9.0'} - dev: true - - /@babel/helper-validator-option@7.18.6: - resolution: {integrity: sha512-XO7gESt5ouv/LRJdrVjkShckw6STTaB7l9BrpBaAHDeF5YZT+01PCwmR0SJHnkW6i8OwW/EVWRShfi4j2x+KQw==} - engines: {node: '>=6.9.0'} - dev: true - - /@babel/helpers@7.19.0: - resolution: {integrity: sha512-DRBCKGwIEdqY3+rPJgG/dKfQy9+08rHIAJx8q2p+HSWP87s2HCrQmaAMMyMll2kIXKCW0cO1RdQskx15Xakftg==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/template': 7.18.10 - '@babel/traverse': 7.19.3 - '@babel/types': 7.19.3 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/highlight@7.18.6: - resolution: {integrity: sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/helper-validator-identifier': 7.19.1 - chalk: 2.4.2 - js-tokens: 4.0.0 - dev: true - - /@babel/parser@7.19.3: - resolution: {integrity: sha512-pJ9xOlNWHiy9+FuFP09DEAFbAn4JskgRsVcc169w2xRBC3FRGuQEwjeIMMND9L2zc0iEhO/tGv4Zq+km+hxNpQ==} - engines: {node: '>=6.0.0'} - hasBin: true - dependencies: - '@babel/types': 7.19.3 - dev: true - - /@babel/plugin-syntax-async-generators@7.8.4(@babel/core@7.19.3): - resolution: {integrity: sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.19.3 - '@babel/helper-plugin-utils': 7.19.0 - dev: true - - /@babel/plugin-syntax-bigint@7.8.3(@babel/core@7.19.3): - resolution: {integrity: sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg==} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.19.3 - '@babel/helper-plugin-utils': 7.19.0 - dev: true - - /@babel/plugin-syntax-class-properties@7.12.13(@babel/core@7.19.3): - resolution: {integrity: sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.19.3 - '@babel/helper-plugin-utils': 7.19.0 - dev: true - - /@babel/plugin-syntax-import-meta@7.10.4(@babel/core@7.19.3): - resolution: {integrity: sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g==} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.19.3 - '@babel/helper-plugin-utils': 7.19.0 - dev: true - - /@babel/plugin-syntax-json-strings@7.8.3(@babel/core@7.19.3): - resolution: {integrity: sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.19.3 - '@babel/helper-plugin-utils': 7.19.0 - dev: true - - /@babel/plugin-syntax-jsx@7.18.6(@babel/core@7.19.3): - resolution: {integrity: sha512-6mmljtAedFGTWu2p/8WIORGwy+61PLgOMPOdazc7YoJ9ZCWUyFy3A6CpPkRKLKD1ToAesxX8KGEViAiLo9N+7Q==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.19.3 - '@babel/helper-plugin-utils': 7.19.0 - dev: true - - /@babel/plugin-syntax-logical-assignment-operators@7.10.4(@babel/core@7.19.3): - resolution: {integrity: sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.19.3 - '@babel/helper-plugin-utils': 7.19.0 - dev: true - - /@babel/plugin-syntax-nullish-coalescing-operator@7.8.3(@babel/core@7.19.3): - resolution: {integrity: sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.19.3 - '@babel/helper-plugin-utils': 7.19.0 - dev: true - - /@babel/plugin-syntax-numeric-separator@7.10.4(@babel/core@7.19.3): - resolution: {integrity: sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.19.3 - '@babel/helper-plugin-utils': 7.19.0 - dev: true - - /@babel/plugin-syntax-object-rest-spread@7.8.3(@babel/core@7.19.3): - resolution: {integrity: sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.19.3 - '@babel/helper-plugin-utils': 7.19.0 - dev: true - - /@babel/plugin-syntax-optional-catch-binding@7.8.3(@babel/core@7.19.3): - resolution: {integrity: sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.19.3 - '@babel/helper-plugin-utils': 7.19.0 - dev: true - - /@babel/plugin-syntax-optional-chaining@7.8.3(@babel/core@7.19.3): - resolution: {integrity: sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.19.3 - '@babel/helper-plugin-utils': 7.19.0 - dev: true - - /@babel/plugin-syntax-top-level-await@7.14.5(@babel/core@7.19.3): - resolution: {integrity: sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.19.3 - '@babel/helper-plugin-utils': 7.19.0 - dev: true - - /@babel/plugin-syntax-typescript@7.18.6(@babel/core@7.19.3): - resolution: {integrity: sha512-mAWAuq4rvOepWCBid55JuRNvpTNf2UGVgoz4JV0fXEKolsVZDzsa4NqCef758WZJj/GDu0gVGItjKFiClTAmZA==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.19.3 - '@babel/helper-plugin-utils': 7.19.0 - dev: true - - /@babel/template@7.18.10: - resolution: {integrity: sha512-TI+rCtooWHr3QJ27kJxfjutghu44DLnasDMwpDqCXVTal9RLp3RSYNh4NdBrRP2cQAoG9A8juOQl6P6oZG4JxA==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/code-frame': 7.18.6 - '@babel/parser': 7.19.3 - '@babel/types': 7.19.3 - dev: true - - /@babel/traverse@7.19.3: - resolution: {integrity: sha512-qh5yf6149zhq2sgIXmwjnsvmnNQC2iw70UFjp4olxucKrWd/dvlUsBI88VSLUsnMNF7/vnOiA+nk1+yLoCqROQ==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/code-frame': 7.18.6 - '@babel/generator': 7.19.3 - '@babel/helper-environment-visitor': 7.18.9 - '@babel/helper-function-name': 7.19.0 - '@babel/helper-hoist-variables': 7.18.6 - '@babel/helper-split-export-declaration': 7.18.6 - '@babel/parser': 7.19.3 - '@babel/types': 7.19.3 - debug: 4.3.4 - globals: 11.12.0 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/types@7.19.3: - resolution: {integrity: sha512-hGCaQzIY22DJlDh9CH7NOxgKkFjBk0Cw9xDO1Xmh2151ti7wiGfQ3LauXzL4HP1fmFlTX6XjpRETTpUcv7wQLw==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/helper-string-parser': 7.18.10 - '@babel/helper-validator-identifier': 7.19.1 - to-fast-properties: 2.0.0 - dev: true - - /@bcoe/v8-coverage@0.2.3: - resolution: {integrity: sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==} - dev: true - - /@cspotcode/source-map-support@0.8.1: - resolution: {integrity: sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==} - engines: {node: '>=12'} - dependencies: - '@jridgewell/trace-mapping': 0.3.9 - - /@istanbuljs/load-nyc-config@1.1.0: - resolution: {integrity: sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ==} - engines: {node: '>=8'} - dependencies: - camelcase: 5.3.1 - find-up: 4.1.0 - get-package-type: 0.1.0 - js-yaml: 3.14.1 - resolve-from: 5.0.0 - dev: true - - /@istanbuljs/schema@0.1.3: - resolution: {integrity: sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==} - engines: {node: '>=8'} - dev: true - - /@jest/console@29.1.2: - resolution: {integrity: sha512-ujEBCcYs82BTmRxqfHMQggSlkUZP63AE5YEaTPj7eFyJOzukkTorstOUC7L6nE3w5SYadGVAnTsQ/ZjTGL0qYQ==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/types': 29.1.2 - '@types/node': 18.8.2 - chalk: 4.1.2 - jest-message-util: 29.1.2 - jest-util: 29.1.2 - slash: 3.0.0 - dev: true - - /@jest/core@29.1.2(ts-node@10.9.1): - resolution: {integrity: sha512-sCO2Va1gikvQU2ynDN8V4+6wB7iVrD2CvT0zaRst4rglf56yLly0NQ9nuRRAWFeimRf+tCdFsb1Vk1N9LrrMPA==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - peerDependencies: - node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0 - peerDependenciesMeta: - node-notifier: - optional: true - dependencies: - '@jest/console': 29.1.2 - '@jest/reporters': 29.1.2 - '@jest/test-result': 29.1.2 - '@jest/transform': 29.1.2 - '@jest/types': 29.1.2 - '@types/node': 18.8.2 - ansi-escapes: 4.3.2 - chalk: 4.1.2 - ci-info: 3.4.0 - exit: 0.1.2 - graceful-fs: 4.2.10 - jest-changed-files: 29.0.0 - jest-config: 29.1.2(@types/node@18.8.2)(ts-node@10.9.1) - jest-haste-map: 29.1.2 - jest-message-util: 29.1.2 - jest-regex-util: 29.0.0 - jest-resolve: 29.1.2 - jest-resolve-dependencies: 29.1.2 - jest-runner: 29.1.2 - jest-runtime: 29.1.2 - jest-snapshot: 29.1.2 - jest-util: 29.1.2 - jest-validate: 29.1.2 - jest-watcher: 29.1.2 - micromatch: 4.0.5 - pretty-format: 29.1.2 - slash: 3.0.0 - strip-ansi: 6.0.1 - transitivePeerDependencies: - - supports-color - - ts-node - dev: true - - /@jest/environment@29.1.2: - resolution: {integrity: sha512-rG7xZ2UeOfvOVzoLIJ0ZmvPl4tBEQ2n73CZJSlzUjPw4or1oSWC0s0Rk0ZX+pIBJ04aVr6hLWFn1DFtrnf8MhQ==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/fake-timers': 29.1.2 - '@jest/types': 29.1.2 - '@types/node': 18.8.2 - jest-mock: 29.1.2 - dev: true - - /@jest/expect-utils@29.1.2: - resolution: {integrity: sha512-4a48bhKfGj/KAH39u0ppzNTABXQ8QPccWAFUFobWBaEMSMp+sB31Z2fK/l47c4a/Mu1po2ffmfAIPxXbVTXdtg==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - jest-get-type: 29.0.0 - dev: true - - /@jest/expect@29.1.2: - resolution: {integrity: sha512-FXw/UmaZsyfRyvZw3M6POgSNqwmuOXJuzdNiMWW9LCYo0GRoRDhg+R5iq5higmRTHQY7hx32+j7WHwinRmoILQ==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - expect: 29.1.2 - jest-snapshot: 29.1.2 - transitivePeerDependencies: - - supports-color - dev: true - - /@jest/fake-timers@29.1.2: - resolution: {integrity: sha512-GppaEqS+QQYegedxVMpCe2xCXxxeYwQ7RsNx55zc8f+1q1qevkZGKequfTASI7ejmg9WwI+SJCrHe9X11bLL9Q==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/types': 29.1.2 - '@sinonjs/fake-timers': 9.1.2 - '@types/node': 18.8.2 - jest-message-util: 29.1.2 - jest-mock: 29.1.2 - jest-util: 29.1.2 - dev: true - - /@jest/globals@29.1.2: - resolution: {integrity: sha512-uMgfERpJYoQmykAd0ffyMq8wignN4SvLUG6orJQRe9WAlTRc9cdpCaE/29qurXixYJVZWUqIBXhSk8v5xN1V9g==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/environment': 29.1.2 - '@jest/expect': 29.1.2 - '@jest/types': 29.1.2 - jest-mock: 29.1.2 - transitivePeerDependencies: - - supports-color - dev: true - - /@jest/reporters@29.1.2: - resolution: {integrity: sha512-X4fiwwyxy9mnfpxL0g9DD0KcTmEIqP0jUdnc2cfa9riHy+I6Gwwp5vOZiwyg0vZxfSDxrOlK9S4+340W4d+DAA==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - peerDependencies: - node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0 - peerDependenciesMeta: - node-notifier: - optional: true - dependencies: - '@bcoe/v8-coverage': 0.2.3 - '@jest/console': 29.1.2 - '@jest/test-result': 29.1.2 - '@jest/transform': 29.1.2 - '@jest/types': 29.1.2 - '@jridgewell/trace-mapping': 0.3.15 - '@types/node': 18.8.2 - chalk: 4.1.2 - collect-v8-coverage: 1.0.1 - exit: 0.1.2 - glob: 7.2.3 - graceful-fs: 4.2.10 - istanbul-lib-coverage: 3.2.0 - istanbul-lib-instrument: 5.2.1 - istanbul-lib-report: 3.0.0 - istanbul-lib-source-maps: 4.0.1 - istanbul-reports: 3.1.5 - jest-message-util: 29.1.2 - jest-util: 29.1.2 - jest-worker: 29.1.2 - slash: 3.0.0 - string-length: 4.0.2 - strip-ansi: 6.0.1 - terminal-link: 2.1.1 - v8-to-istanbul: 9.0.1 - transitivePeerDependencies: - - supports-color - dev: true - - /@jest/schemas@29.0.0: - resolution: {integrity: sha512-3Ab5HgYIIAnS0HjqJHQYZS+zXc4tUmTmBH3z83ajI6afXp8X3ZtdLX+nXx+I7LNkJD7uN9LAVhgnjDgZa2z0kA==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@sinclair/typebox': 0.24.44 - dev: true - - /@jest/source-map@29.0.0: - resolution: {integrity: sha512-nOr+0EM8GiHf34mq2GcJyz/gYFyLQ2INDhAylrZJ9mMWoW21mLBfZa0BUVPPMxVYrLjeiRe2Z7kWXOGnS0TFhQ==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jridgewell/trace-mapping': 0.3.15 - callsites: 3.1.0 - graceful-fs: 4.2.10 - dev: true - - /@jest/test-result@29.1.2: - resolution: {integrity: sha512-jjYYjjumCJjH9hHCoMhA8PCl1OxNeGgAoZ7yuGYILRJX9NjgzTN0pCT5qAoYR4jfOP8htIByvAlz9vfNSSBoVg==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/console': 29.1.2 - '@jest/types': 29.1.2 - '@types/istanbul-lib-coverage': 2.0.4 - collect-v8-coverage: 1.0.1 - dev: true - - /@jest/test-sequencer@29.1.2: - resolution: {integrity: sha512-fU6dsUqqm8sA+cd85BmeF7Gu9DsXVWFdGn9taxM6xN1cKdcP/ivSgXh5QucFRFz1oZxKv3/9DYYbq0ULly3P/Q==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/test-result': 29.1.2 - graceful-fs: 4.2.10 - jest-haste-map: 29.1.2 - slash: 3.0.0 - dev: true - - /@jest/transform@29.1.2: - resolution: {integrity: sha512-2uaUuVHTitmkx1tHF+eBjb4p7UuzBG7SXIaA/hNIkaMP6K+gXYGxP38ZcrofzqN0HeZ7A90oqsOa97WU7WZkSw==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@babel/core': 7.19.3 - '@jest/types': 29.1.2 - '@jridgewell/trace-mapping': 0.3.15 - babel-plugin-istanbul: 6.1.1 - chalk: 4.1.2 - convert-source-map: 1.8.0 - fast-json-stable-stringify: 2.1.0 - graceful-fs: 4.2.10 - jest-haste-map: 29.1.2 - jest-regex-util: 29.0.0 - jest-util: 29.1.2 - micromatch: 4.0.5 - pirates: 4.0.5 - slash: 3.0.0 - write-file-atomic: 4.0.2 - transitivePeerDependencies: - - supports-color - dev: true - - /@jest/types@29.1.2: - resolution: {integrity: sha512-DcXGtoTykQB5jiwCmVr8H4vdg2OJhQex3qPkG+ISyDO7xQXbt/4R6dowcRyPemRnkH7JoHvZuxPBdlq+9JxFCg==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/schemas': 29.0.0 - '@types/istanbul-lib-coverage': 2.0.4 - '@types/istanbul-reports': 3.0.1 - '@types/node': 18.8.2 - '@types/yargs': 17.0.13 - chalk: 4.1.2 - dev: true - - /@jridgewell/gen-mapping@0.1.1: - resolution: {integrity: sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w==} - engines: {node: '>=6.0.0'} - dependencies: - '@jridgewell/set-array': 1.1.2 - '@jridgewell/sourcemap-codec': 1.4.14 - dev: true - - /@jridgewell/gen-mapping@0.3.2: - resolution: {integrity: sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A==} - engines: {node: '>=6.0.0'} - dependencies: - '@jridgewell/set-array': 1.1.2 - '@jridgewell/sourcemap-codec': 1.4.14 - '@jridgewell/trace-mapping': 0.3.15 - dev: true - - /@jridgewell/resolve-uri@3.1.0: - resolution: {integrity: sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w==} - engines: {node: '>=6.0.0'} - - /@jridgewell/set-array@1.1.2: - resolution: {integrity: sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw==} - engines: {node: '>=6.0.0'} - dev: true - - /@jridgewell/sourcemap-codec@1.4.14: - resolution: {integrity: sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw==} - - /@jridgewell/trace-mapping@0.3.15: - resolution: {integrity: sha512-oWZNOULl+UbhsgB51uuZzglikfIKSUBO/M9W2OfEjn7cmqoAiCgmv9lyACTUacZwBz0ITnJ2NqjU8Tx0DHL88g==} - dependencies: - '@jridgewell/resolve-uri': 3.1.0 - '@jridgewell/sourcemap-codec': 1.4.14 - dev: true - - /@jridgewell/trace-mapping@0.3.9: - resolution: {integrity: sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==} - dependencies: - '@jridgewell/resolve-uri': 3.1.0 - '@jridgewell/sourcemap-codec': 1.4.14 - - /@nodelib/fs.scandir@2.1.5: - resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==} - engines: {node: '>= 8'} - dependencies: - '@nodelib/fs.stat': 2.0.5 - run-parallel: 1.2.0 - dev: false - - /@nodelib/fs.stat@2.0.5: - resolution: {integrity: sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==} - engines: {node: '>= 8'} - dev: false - - /@nodelib/fs.walk@1.2.8: - resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==} - engines: {node: '>= 8'} - dependencies: - '@nodelib/fs.scandir': 2.1.5 - fastq: 1.13.0 - dev: false - - /@sinclair/typebox@0.24.44: - resolution: {integrity: sha512-ka0W0KN5i6LfrSocduwliMMpqVgohtPFidKdMEOUjoOFCHcOOYkKsPRxfs5f15oPNHTm6ERAm0GV/+/LTKeiWg==} - dev: true - - /@sinonjs/commons@1.8.3: - resolution: {integrity: sha512-xkNcLAn/wZaX14RPlwizcKicDk9G3F8m2nU3L7Ukm5zBgTwiT0wsoFAHx9Jq56fJA1z/7uKGtCRu16sOUCLIHQ==} - dependencies: - type-detect: 4.0.8 - dev: true - - /@sinonjs/fake-timers@9.1.2: - resolution: {integrity: sha512-BPS4ynJW/o92PUR4wgriz2Ud5gpST5vz6GQfMixEDK0Z8ZCUv2M7SkBLykH56T++Xs+8ln9zTGbOvNGIe02/jw==} - dependencies: - '@sinonjs/commons': 1.8.3 - dev: true - - /@tsconfig/node10@1.0.9: - resolution: {integrity: sha512-jNsYVVxU8v5g43Erja32laIDHXeoNvFEpX33OK4d6hljo3jDhCBDhx5dhCCTMWUojscpAagGiRkBKxpdl9fxqA==} - - /@tsconfig/node12@1.0.11: - resolution: {integrity: sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==} - - /@tsconfig/node14@1.0.3: - resolution: {integrity: sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==} - - /@tsconfig/node16@1.0.3: - resolution: {integrity: sha512-yOlFc+7UtL/89t2ZhjPvvB/DeAr3r+Dq58IgzsFkOAvVC6NMJXmCGjbptdXdR9qsX7pKcTL+s87FtYREi2dEEQ==} - - /@types/babel__core@7.1.19: - resolution: {integrity: sha512-WEOTgRsbYkvA/KCsDwVEGkd7WAr1e3g31VHQ8zy5gul/V1qKullU/BU5I68X5v7V3GnB9eotmom4v5a5gjxorw==} - dependencies: - '@babel/parser': 7.19.3 - '@babel/types': 7.19.3 - '@types/babel__generator': 7.6.4 - '@types/babel__template': 7.4.1 - '@types/babel__traverse': 7.18.2 - dev: true - - /@types/babel__generator@7.6.4: - resolution: {integrity: sha512-tFkciB9j2K755yrTALxD44McOrk+gfpIpvC3sxHjRawj6PfnQxrse4Clq5y/Rq+G3mrBurMax/lG8Qn2t9mSsg==} - dependencies: - '@babel/types': 7.19.3 - dev: true - - /@types/babel__template@7.4.1: - resolution: {integrity: sha512-azBFKemX6kMg5Io+/rdGT0dkGreboUVR0Cdm3fz9QJWpaQGJRQXl7C+6hOTCZcMll7KFyEQpgbYI2lHdsS4U7g==} - dependencies: - '@babel/parser': 7.19.3 - '@babel/types': 7.19.3 - dev: true - - /@types/babel__traverse@7.18.2: - resolution: {integrity: sha512-FcFaxOr2V5KZCviw1TnutEMVUVsGt4D2hP1TAfXZAMKuHYW3xQhe3jTxNPWutgCJ3/X1c5yX8ZoGVEItxKbwBg==} - dependencies: - '@babel/types': 7.19.3 - dev: true - - /@types/fs-extra@9.0.13: - resolution: {integrity: sha512-nEnwB++1u5lVDM2UI4c1+5R+FYaKfaAzS4OococimjVm3nQw3TuzH5UNsocrcTBbhnerblyHj4A49qXbIiZdpA==} - dependencies: - '@types/node': 18.8.2 - dev: false - - /@types/graceful-fs@4.1.5: - resolution: {integrity: sha512-anKkLmZZ+xm4p8JWBf4hElkM4XR+EZeA2M9BAkkTldmcyDY4mbdIJnRghDJH3Ov5ooY7/UAoENtmdMSkaAd7Cw==} - dependencies: - '@types/node': 18.8.2 - dev: true - - /@types/istanbul-lib-coverage@2.0.4: - resolution: {integrity: sha512-z/QT1XN4K4KYuslS23k62yDIDLwLFkzxOuMplDtObz0+y7VqJCaO2o+SPwHCvLFZh7xazvvoor2tA/hPz9ee7g==} - dev: true - - /@types/istanbul-lib-report@3.0.0: - resolution: {integrity: sha512-plGgXAPfVKFoYfa9NpYDAkseG+g6Jr294RqeqcqDixSbU34MZVJRi/P+7Y8GDpzkEwLaGZZOpKIEmeVZNtKsrg==} - dependencies: - '@types/istanbul-lib-coverage': 2.0.4 - dev: true - - /@types/istanbul-reports@3.0.1: - resolution: {integrity: sha512-c3mAZEuK0lvBp8tmuL74XRKn1+y2dcwOUpH7x4WrF6gk1GIgiluDRgMYQtw2OFcBvAJWlt6ASU3tSqxp0Uu0Aw==} - dependencies: - '@types/istanbul-lib-report': 3.0.0 - dev: true - - /@types/jest@29.1.2: - resolution: {integrity: sha512-y+nlX0h87U0R+wsGn6EBuoRWYyv3KFtwRNP3QWp9+k2tJ2/bqcGS3UxD7jgT+tiwJWWq3UsyV4Y+T6rsMT4XMg==} - dependencies: - expect: 29.1.2 - pretty-format: 29.1.2 - dev: true - - /@types/minimist@1.2.2: - resolution: {integrity: sha512-jhuKLIRrhvCPLqwPcx6INqmKeiA5EWrsCOPhrlFSrbrmU4ZMPjj5Ul/oLCMDO98XRUIwVm78xICz4EPCektzeQ==} - dev: false - - /@types/node@18.8.2: - resolution: {integrity: sha512-cRMwIgdDN43GO4xMWAfJAecYn8wV4JbsOGHNfNUIDiuYkUYAR5ec4Rj7IO2SAhFPEfpPtLtUTbbny/TCT7aDwA==} - - /@types/prettier@2.7.1: - resolution: {integrity: sha512-ri0UmynRRvZiiUJdiz38MmIblKK+oH30MztdBVR95dv/Ubw6neWSb8u1XpRb72L4qsZOhz+L+z9JD40SJmfWow==} - dev: true - - /@types/ps-tree@1.1.2: - resolution: {integrity: sha512-ZREFYlpUmPQJ0esjxoG1fMvB2HNaD3z+mjqdSosZvd3RalncI9NEur73P8ZJz4YQdL64CmV1w0RuqoRUlhQRBw==} - dev: false - - /@types/stack-utils@2.0.1: - resolution: {integrity: sha512-Hl219/BT5fLAaz6NDkSuhzasy49dwQS/DSdu4MdggFB8zcXv7vflBI3xp7FEmkmdDkBUI2bPUNeMttp2knYdxw==} - dev: true - - /@types/which@2.0.1: - resolution: {integrity: sha512-Jjakcv8Roqtio6w1gr0D7y6twbhx6gGgFGF5BLwajPpnOIOxFkakFhCq+LmyyeAz7BX6ULrjBOxdKaCDy+4+dQ==} - dev: false - - /@types/yargs-parser@21.0.0: - resolution: {integrity: sha512-iO9ZQHkZxHn4mSakYV0vFHAVDyEOIJQrV2uZ06HxEPcx+mt8swXoZHIbaaJ2crJYFfErySgktuTZ3BeLz+XmFA==} - dev: true - - /@types/yargs@17.0.13: - resolution: {integrity: sha512-9sWaruZk2JGxIQU+IhI1fhPYRcQ0UuTNuKuCW9bR5fp7qi2Llf7WDzNa17Cy7TKnh3cdxDOiyTu6gaLS0eDatg==} - dependencies: - '@types/yargs-parser': 21.0.0 - dev: true - - /acorn-walk@8.2.0: - resolution: {integrity: sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA==} - engines: {node: '>=0.4.0'} - - /acorn@8.8.0: - resolution: {integrity: sha512-QOxyigPVrpZ2GXT+PFyZTl6TtOFc5egxHIP9IlQ+RbupQuX4RkT/Bee4/kQuC02Xkzg84JcT7oLYtDIQxp+v7w==} - engines: {node: '>=0.4.0'} - hasBin: true - - /ansi-escapes@4.3.2: - resolution: {integrity: sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==} - engines: {node: '>=8'} - dependencies: - type-fest: 0.21.3 - dev: true - - /ansi-regex@5.0.1: - resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==} - engines: {node: '>=8'} - dev: true - - /ansi-styles@3.2.1: - resolution: {integrity: sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==} - engines: {node: '>=4'} - dependencies: - color-convert: 1.9.3 - dev: true - - /ansi-styles@4.3.0: - resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==} - engines: {node: '>=8'} - dependencies: - color-convert: 2.0.1 - dev: true - - /ansi-styles@5.2.0: - resolution: {integrity: sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==} - engines: {node: '>=10'} - dev: true - - /anymatch@3.1.2: - resolution: {integrity: sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg==} - engines: {node: '>= 8'} - dependencies: - normalize-path: 3.0.0 - picomatch: 2.3.1 - dev: true - - /arg@4.1.3: - resolution: {integrity: sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==} - - /argparse@1.0.10: - resolution: {integrity: sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==} - dependencies: - sprintf-js: 1.0.3 - dev: true - - /babel-jest@29.1.2(@babel/core@7.19.3): - resolution: {integrity: sha512-IuG+F3HTHryJb7gacC7SQ59A9kO56BctUsT67uJHp1mMCHUOMXpDwOHWGifWqdWVknN2WNkCVQELPjXx0aLJ9Q==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - peerDependencies: - '@babel/core': ^7.8.0 - dependencies: - '@babel/core': 7.19.3 - '@jest/transform': 29.1.2 - '@types/babel__core': 7.1.19 - babel-plugin-istanbul: 6.1.1 - babel-preset-jest: 29.0.2(@babel/core@7.19.3) - chalk: 4.1.2 - graceful-fs: 4.2.10 - slash: 3.0.0 - transitivePeerDependencies: - - supports-color - dev: true - - /babel-plugin-istanbul@6.1.1: - resolution: {integrity: sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA==} - engines: {node: '>=8'} - dependencies: - '@babel/helper-plugin-utils': 7.19.0 - '@istanbuljs/load-nyc-config': 1.1.0 - '@istanbuljs/schema': 0.1.3 - istanbul-lib-instrument: 5.2.1 - test-exclude: 6.0.0 - transitivePeerDependencies: - - supports-color - dev: true - - /babel-plugin-jest-hoist@29.0.2: - resolution: {integrity: sha512-eBr2ynAEFjcebVvu8Ktx580BD1QKCrBG1XwEUTXJe285p9HA/4hOhfWCFRQhTKSyBV0VzjhG7H91Eifz9s29hg==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@babel/template': 7.18.10 - '@babel/types': 7.19.3 - '@types/babel__core': 7.1.19 - '@types/babel__traverse': 7.18.2 - dev: true - - /babel-preset-current-node-syntax@1.0.1(@babel/core@7.19.3): - resolution: {integrity: sha512-M7LQ0bxarkxQoN+vz5aJPsLBn77n8QgTFmo8WK0/44auK2xlCXrYcUxHFxgU7qW5Yzw/CjmLRK2uJzaCd7LvqQ==} - peerDependencies: - '@babel/core': ^7.0.0 - dependencies: - '@babel/core': 7.19.3 - '@babel/plugin-syntax-async-generators': 7.8.4(@babel/core@7.19.3) - '@babel/plugin-syntax-bigint': 7.8.3(@babel/core@7.19.3) - '@babel/plugin-syntax-class-properties': 7.12.13(@babel/core@7.19.3) - '@babel/plugin-syntax-import-meta': 7.10.4(@babel/core@7.19.3) - '@babel/plugin-syntax-json-strings': 7.8.3(@babel/core@7.19.3) - '@babel/plugin-syntax-logical-assignment-operators': 7.10.4(@babel/core@7.19.3) - '@babel/plugin-syntax-nullish-coalescing-operator': 7.8.3(@babel/core@7.19.3) - '@babel/plugin-syntax-numeric-separator': 7.10.4(@babel/core@7.19.3) - '@babel/plugin-syntax-object-rest-spread': 7.8.3(@babel/core@7.19.3) - '@babel/plugin-syntax-optional-catch-binding': 7.8.3(@babel/core@7.19.3) - '@babel/plugin-syntax-optional-chaining': 7.8.3(@babel/core@7.19.3) - '@babel/plugin-syntax-top-level-await': 7.14.5(@babel/core@7.19.3) - dev: true - - /babel-preset-jest@29.0.2(@babel/core@7.19.3): - resolution: {integrity: sha512-BeVXp7rH5TK96ofyEnHjznjLMQ2nAeDJ+QzxKnHAAMs0RgrQsCywjAN8m4mOm5Di0pxU//3AoEeJJrerMH5UeA==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - peerDependencies: - '@babel/core': ^7.0.0 - dependencies: - '@babel/core': 7.19.3 - babel-plugin-jest-hoist: 29.0.2 - babel-preset-current-node-syntax: 1.0.1(@babel/core@7.19.3) - dev: true - - /balanced-match@1.0.2: - resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} - dev: true - - /brace-expansion@1.1.11: - resolution: {integrity: sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==} - dependencies: - balanced-match: 1.0.2 - concat-map: 0.0.1 - dev: true - - /braces@3.0.2: - resolution: {integrity: sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==} - engines: {node: '>=8'} - dependencies: - fill-range: 7.0.1 - - /browserslist@4.21.4: - resolution: {integrity: sha512-CBHJJdDmgjl3daYjN5Cp5kbTf1mUhZoS+beLklHIvkOWscs83YAhLlF3Wsh/lciQYAcbBJgTOD44VtG31ZM4Hw==} - engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} - hasBin: true - dependencies: - caniuse-lite: 1.0.30001418 - electron-to-chromium: 1.4.275 - node-releases: 2.0.6 - update-browserslist-db: 1.0.10(browserslist@4.21.4) - dev: true - - /bs-logger@0.2.6: - resolution: {integrity: sha512-pd8DCoxmbgc7hyPKOvxtqNcjYoOsABPQdcCUjGp3d42VR2CX1ORhk2A87oqqu5R1kk+76nsxZupkmyd+MVtCog==} - engines: {node: '>= 6'} - dependencies: - fast-json-stable-stringify: 2.1.0 - dev: true - - /bser@2.1.1: - resolution: {integrity: sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ==} - dependencies: - node-int64: 0.4.0 - dev: true - - /buffer-from@1.1.2: - resolution: {integrity: sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==} - dev: true - - /callsites@3.1.0: - resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==} - engines: {node: '>=6'} - dev: true - - /camelcase@5.3.1: - resolution: {integrity: sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==} - engines: {node: '>=6'} - dev: true - - /camelcase@6.3.0: - resolution: {integrity: sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==} - engines: {node: '>=10'} - dev: true - - /caniuse-lite@1.0.30001418: - resolution: {integrity: sha512-oIs7+JL3K9JRQ3jPZjlH6qyYDp+nBTCais7hjh0s+fuBwufc7uZ7hPYMXrDOJhV360KGMTcczMRObk0/iMqZRg==} - dev: true - - /chalk@2.4.2: - resolution: {integrity: sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==} - engines: {node: '>=4'} - dependencies: - ansi-styles: 3.2.1 - escape-string-regexp: 1.0.5 - supports-color: 5.5.0 - dev: true - - /chalk@4.1.2: - resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==} - engines: {node: '>=10'} - dependencies: - ansi-styles: 4.3.0 - supports-color: 7.2.0 - dev: true - - /chalk@5.0.1: - resolution: {integrity: sha512-Fo07WOYGqMfCWHOzSXOt2CxDbC6skS/jO9ynEcmpANMoPrD+W1r1K6Vx7iNm+AQmETU1Xr2t+n8nzkV9t6xh3w==} - engines: {node: ^12.17.0 || ^14.13 || >=16.0.0} - dev: false - - /char-regex@1.0.2: - resolution: {integrity: sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw==} - engines: {node: '>=10'} - dev: true - - /ci-info@3.4.0: - resolution: {integrity: sha512-t5QdPT5jq3o262DOQ8zA6E1tlH2upmUc4Hlvrbx1pGYJuiiHl7O7rvVNI+l8HTVhd/q3Qc9vqimkNk5yiXsAug==} - dev: true - - /cjs-module-lexer@1.2.2: - resolution: {integrity: sha512-cOU9usZw8/dXIXKtwa8pM0OTJQuJkxMN6w30csNRUerHfeQ5R6U3kkU/FtJeIf3M202OHfY2U8ccInBG7/xogA==} - dev: true - - /cliui@8.0.1: - resolution: {integrity: sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==} - engines: {node: '>=12'} - dependencies: - string-width: 4.2.3 - strip-ansi: 6.0.1 - wrap-ansi: 7.0.0 - dev: true - - /co@4.6.0: - resolution: {integrity: sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ==} - engines: {iojs: '>= 1.0.0', node: '>= 0.12.0'} - dev: true - - /collect-v8-coverage@1.0.1: - resolution: {integrity: sha512-iBPtljfCNcTKNAto0KEtDfZ3qzjJvqE3aTGZsbhjSBlorqpXJlaWWtPO35D+ZImoC3KWejX64o+yPGxhWSTzfg==} - dev: true - - /color-convert@1.9.3: - resolution: {integrity: sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==} - dependencies: - color-name: 1.1.3 - dev: true - - /color-convert@2.0.1: - resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==} - engines: {node: '>=7.0.0'} - dependencies: - color-name: 1.1.4 - dev: true - - /color-name@1.1.3: - resolution: {integrity: sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==} - dev: true - - /color-name@1.1.4: - resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==} - dev: true - - /concat-map@0.0.1: - resolution: {integrity: sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=} - dev: true - - /convert-source-map@1.8.0: - resolution: {integrity: sha512-+OQdjP49zViI/6i7nIJpA8rAl4sV/JdPfU9nZs3VqOwGIgizICvuN2ru6fMd+4llL0tar18UYJXfZ/TWtmhUjA==} - dependencies: - safe-buffer: 5.1.2 - dev: true - - /create-require@1.1.1: - resolution: {integrity: sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==} - - /cross-spawn@7.0.3: - resolution: {integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==} - engines: {node: '>= 8'} - dependencies: - path-key: 3.1.1 - shebang-command: 2.0.0 - which: 2.0.2 - dev: true - - /data-uri-to-buffer@4.0.0: - resolution: {integrity: sha512-Vr3mLBA8qWmcuschSLAOogKgQ/Jwxulv3RNE4FXnYWRGujzrRWQI4m12fQqRkwX06C0KanhLr4hK+GydchZsaA==} - engines: {node: '>= 12'} - dev: false - - /debug@4.3.4: - resolution: {integrity: sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==} - engines: {node: '>=6.0'} - peerDependencies: - supports-color: '*' - peerDependenciesMeta: - supports-color: - optional: true - dependencies: - ms: 2.1.2 - dev: true - - /dedent@0.7.0: - resolution: {integrity: sha512-Q6fKUPqnAHAyhiUgFU7BUzLiv0kd8saH9al7tnu5Q/okj6dnupxyTgFIBjVzJATdfIAm9NAsvXNzjaKa+bxVyA==} - dev: true - - /deepmerge@4.2.2: - resolution: {integrity: sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg==} - engines: {node: '>=0.10.0'} - dev: true - - /detect-newline@3.1.0: - resolution: {integrity: sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA==} - engines: {node: '>=8'} - dev: true - - /diff-sequences@29.0.0: - resolution: {integrity: sha512-7Qe/zd1wxSDL4D/X/FPjOMB+ZMDt71W94KYaq05I2l0oQqgXgs7s4ftYYmV38gBSrPz2vcygxfs1xn0FT+rKNA==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dev: true - - /diff@4.0.2: - resolution: {integrity: sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==} - engines: {node: '>=0.3.1'} - - /dir-glob@3.0.1: - resolution: {integrity: sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==} - engines: {node: '>=8'} - dependencies: - path-type: 4.0.0 - dev: false - - /duplexer@0.1.2: - resolution: {integrity: sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg==} - dev: false - - /electron-to-chromium@1.4.275: - resolution: {integrity: sha512-aJeQQ+Hl9Jyyzv4chBqYJwmVRY46N5i2BEX5Cuyk/5gFCUZ5F3i7Hnba6snZftWla7Gglwc5pIgcd+E7cW+rPg==} - dev: true - - /emittery@0.10.2: - resolution: {integrity: sha512-aITqOwnLanpHLNXZJENbOgjUBeHocD+xsSJmNrjovKBW5HbSpW3d1pEls7GFQPUWXiwG9+0P4GtHfEqC/4M0Iw==} - engines: {node: '>=12'} - dev: true - - /emoji-regex@8.0.0: - resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} - dev: true - - /error-ex@1.3.2: - resolution: {integrity: sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==} - dependencies: - is-arrayish: 0.2.1 - dev: true - - /escalade@3.1.1: - resolution: {integrity: sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==} - engines: {node: '>=6'} - dev: true - - /escape-string-regexp@1.0.5: - resolution: {integrity: sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==} - engines: {node: '>=0.8.0'} - dev: true - - /escape-string-regexp@2.0.0: - resolution: {integrity: sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==} - engines: {node: '>=8'} - dev: true - - /esprima@4.0.1: - resolution: {integrity: sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==} - engines: {node: '>=4'} - hasBin: true - dev: true - - /event-stream@3.3.4: - resolution: {integrity: sha512-QHpkERcGsR0T7Qm3HNJSyXKEEj8AHNxkY3PK8TS2KJvQ7NiSHe3DDpwVKKtoYprL/AreyzFBeIkBIWChAqn60g==} - dependencies: - duplexer: 0.1.2 - from: 0.1.7 - map-stream: 0.1.0 - pause-stream: 0.0.11 - split: 0.3.3 - stream-combiner: 0.0.4 - through: 2.3.8 - dev: false - - /execa@5.1.1: - resolution: {integrity: sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==} - engines: {node: '>=10'} - dependencies: - cross-spawn: 7.0.3 - get-stream: 6.0.1 - human-signals: 2.1.0 - is-stream: 2.0.1 - merge-stream: 2.0.0 - npm-run-path: 4.0.1 - onetime: 5.1.2 - signal-exit: 3.0.7 - strip-final-newline: 2.0.0 - dev: true - - /exit@0.1.2: - resolution: {integrity: sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ==} - engines: {node: '>= 0.8.0'} - dev: true - - /expect@29.1.2: - resolution: {integrity: sha512-AuAGn1uxva5YBbBlXb+2JPxJRuemZsmlGcapPXWNSBNsQtAULfjioREGBWuI0EOvYUKjDnrCy8PW5Zlr1md5mw==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/expect-utils': 29.1.2 - jest-get-type: 29.0.0 - jest-matcher-utils: 29.1.2 - jest-message-util: 29.1.2 - jest-util: 29.1.2 - dev: true - - /fast-glob@3.2.12: - resolution: {integrity: sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w==} - engines: {node: '>=8.6.0'} - dependencies: - '@nodelib/fs.stat': 2.0.5 - '@nodelib/fs.walk': 1.2.8 - glob-parent: 5.1.2 - merge2: 1.4.1 - micromatch: 4.0.5 - dev: false - - /fast-json-stable-stringify@2.1.0: - resolution: {integrity: sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==} - dev: true - - /fastq@1.13.0: - resolution: {integrity: sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw==} - dependencies: - reusify: 1.0.4 - dev: false - - /fb-watchman@2.0.2: - resolution: {integrity: sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA==} - dependencies: - bser: 2.1.1 - dev: true - - /fetch-blob@3.2.0: - resolution: {integrity: sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==} - engines: {node: ^12.20 || >= 14.13} - dependencies: - node-domexception: 1.0.0 - web-streams-polyfill: 3.2.1 - dev: false - - /fill-range@7.0.1: - resolution: {integrity: sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==} - engines: {node: '>=8'} - dependencies: - to-regex-range: 5.0.1 - - /find-up@4.1.0: - resolution: {integrity: sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==} - engines: {node: '>=8'} - dependencies: - locate-path: 5.0.0 - path-exists: 4.0.0 - dev: true - - /formdata-polyfill@4.0.10: - resolution: {integrity: sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==} - engines: {node: '>=12.20.0'} - dependencies: - fetch-blob: 3.2.0 - dev: false - - /from@0.1.7: - resolution: {integrity: sha512-twe20eF1OxVxp/ML/kq2p1uc6KvFK/+vs8WjEbeKmV2He22MKm7YF2ANIt+EOqhJ5L3K/SuuPhk0hWQDjOM23g==} - dev: false - - /fs-extra@10.1.0: - resolution: {integrity: sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ==} - engines: {node: '>=12'} - dependencies: - graceful-fs: 4.2.10 - jsonfile: 6.1.0 - universalify: 2.0.0 - dev: false - - /fs.realpath@1.0.0: - resolution: {integrity: sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==} - dev: true - - /fsevents@2.3.2: - resolution: {integrity: sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==} - engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} - os: [darwin] - requiresBuild: true - dev: true - optional: true - - /function-bind@1.1.1: - resolution: {integrity: sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==} - dev: true - - /gensync@1.0.0-beta.2: - resolution: {integrity: sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==} - engines: {node: '>=6.9.0'} - dev: true - - /get-caller-file@2.0.5: - resolution: {integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==} - engines: {node: 6.* || 8.* || >= 10.*} - dev: true - - /get-package-type@0.1.0: - resolution: {integrity: sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==} - engines: {node: '>=8.0.0'} - dev: true - - /get-stream@6.0.1: - resolution: {integrity: sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==} - engines: {node: '>=10'} - dev: true - - /glob-parent@5.1.2: - resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==} - engines: {node: '>= 6'} - dependencies: - is-glob: 4.0.3 - dev: false - - /glob@7.2.3: - resolution: {integrity: sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==} - dependencies: - fs.realpath: 1.0.0 - inflight: 1.0.6 - inherits: 2.0.4 - minimatch: 3.1.2 - once: 1.4.0 - path-is-absolute: 1.0.1 - dev: true - - /globals@11.12.0: - resolution: {integrity: sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==} - engines: {node: '>=4'} - dev: true - - /globby@13.1.2: - resolution: {integrity: sha512-LKSDZXToac40u8Q1PQtZihbNdTYSNMuWe+K5l+oa6KgDzSvVrHXlJy40hUP522RjAIoNLJYBJi7ow+rbFpIhHQ==} - engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} - dependencies: - dir-glob: 3.0.1 - fast-glob: 3.2.12 - ignore: 5.2.0 - merge2: 1.4.1 - slash: 4.0.0 - dev: false - - /graceful-fs@4.2.10: - resolution: {integrity: sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==} - - /has-flag@3.0.0: - resolution: {integrity: sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==} - engines: {node: '>=4'} - dev: true - - /has-flag@4.0.0: - resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==} - engines: {node: '>=8'} - dev: true - - /has@1.0.3: - resolution: {integrity: sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==} - engines: {node: '>= 0.4.0'} - dependencies: - function-bind: 1.1.1 - dev: true - - /html-escaper@2.0.2: - resolution: {integrity: sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==} - dev: true - - /human-signals@2.1.0: - resolution: {integrity: sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==} - engines: {node: '>=10.17.0'} - dev: true - - /ignore@5.2.0: - resolution: {integrity: sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ==} - engines: {node: '>= 4'} - dev: false - - /import-local@3.1.0: - resolution: {integrity: sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg==} - engines: {node: '>=8'} - hasBin: true - dependencies: - pkg-dir: 4.2.0 - resolve-cwd: 3.0.0 - dev: true - - /imurmurhash@0.1.4: - resolution: {integrity: sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==} - engines: {node: '>=0.8.19'} - dev: true - - /inflight@1.0.6: - resolution: {integrity: sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==} - dependencies: - once: 1.4.0 - wrappy: 1.0.2 - dev: true - - /inherits@2.0.4: - resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==} - dev: true - - /is-arrayish@0.2.1: - resolution: {integrity: sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==} - dev: true - - /is-core-module@2.10.0: - resolution: {integrity: sha512-Erxj2n/LDAZ7H8WNJXd9tw38GYM3dv8rk8Zcs+jJuxYTW7sozH+SS8NtrSjVL1/vpLvWi1hxy96IzjJ3EHTJJg==} - dependencies: - has: 1.0.3 - dev: true - - /is-extglob@2.1.1: - resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==} - engines: {node: '>=0.10.0'} - dev: false - - /is-fullwidth-code-point@3.0.0: - resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==} - engines: {node: '>=8'} - dev: true - - /is-generator-fn@2.1.0: - resolution: {integrity: sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ==} - engines: {node: '>=6'} - dev: true - - /is-glob@4.0.3: - resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==} - engines: {node: '>=0.10.0'} - dependencies: - is-extglob: 2.1.1 - dev: false - - /is-number@7.0.0: - resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==} - engines: {node: '>=0.12.0'} - - /is-stream@2.0.1: - resolution: {integrity: sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==} - engines: {node: '>=8'} - dev: true - - /isexe@2.0.0: - resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} - - /istanbul-lib-coverage@3.2.0: - resolution: {integrity: sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw==} - engines: {node: '>=8'} - dev: true - - /istanbul-lib-instrument@5.2.1: - resolution: {integrity: sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg==} - engines: {node: '>=8'} - dependencies: - '@babel/core': 7.19.3 - '@babel/parser': 7.19.3 - '@istanbuljs/schema': 0.1.3 - istanbul-lib-coverage: 3.2.0 - semver: 6.3.0 - transitivePeerDependencies: - - supports-color - dev: true - - /istanbul-lib-report@3.0.0: - resolution: {integrity: sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw==} - engines: {node: '>=8'} - dependencies: - istanbul-lib-coverage: 3.2.0 - make-dir: 3.1.0 - supports-color: 7.2.0 - dev: true - - /istanbul-lib-source-maps@4.0.1: - resolution: {integrity: sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw==} - engines: {node: '>=10'} - dependencies: - debug: 4.3.4 - istanbul-lib-coverage: 3.2.0 - source-map: 0.6.1 - transitivePeerDependencies: - - supports-color - dev: true - - /istanbul-reports@3.1.5: - resolution: {integrity: sha512-nUsEMa9pBt/NOHqbcbeJEgqIlY/K7rVWUX6Lql2orY5e9roQOthbR3vtY4zzf2orPELg80fnxxk9zUyPlgwD1w==} - engines: {node: '>=8'} - dependencies: - html-escaper: 2.0.2 - istanbul-lib-report: 3.0.0 - dev: true - - /jest-changed-files@29.0.0: - resolution: {integrity: sha512-28/iDMDrUpGoCitTURuDqUzWQoWmOmOKOFST1mi2lwh62X4BFf6khgH3uSuo1e49X/UDjuApAj3w0wLOex4VPQ==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - execa: 5.1.1 - p-limit: 3.1.0 - dev: true - - /jest-circus@29.1.2: - resolution: {integrity: sha512-ajQOdxY6mT9GtnfJRZBRYS7toNIJayiiyjDyoZcnvPRUPwJ58JX0ci0PKAKUo2C1RyzlHw0jabjLGKksO42JGA==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/environment': 29.1.2 - '@jest/expect': 29.1.2 - '@jest/test-result': 29.1.2 - '@jest/types': 29.1.2 - '@types/node': 18.8.2 - chalk: 4.1.2 - co: 4.6.0 - dedent: 0.7.0 - is-generator-fn: 2.1.0 - jest-each: 29.1.2 - jest-matcher-utils: 29.1.2 - jest-message-util: 29.1.2 - jest-runtime: 29.1.2 - jest-snapshot: 29.1.2 - jest-util: 29.1.2 - p-limit: 3.1.0 - pretty-format: 29.1.2 - slash: 3.0.0 - stack-utils: 2.0.5 - transitivePeerDependencies: - - supports-color - dev: true - - /jest-cli@29.1.2(@types/node@18.8.2)(ts-node@10.9.1): - resolution: {integrity: sha512-vsvBfQ7oS2o4MJdAH+4u9z76Vw5Q8WBQF5MchDbkylNknZdrPTX1Ix7YRJyTlOWqRaS7ue/cEAn+E4V1MWyMzw==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - hasBin: true - peerDependencies: - node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0 - peerDependenciesMeta: - node-notifier: - optional: true - dependencies: - '@jest/core': 29.1.2(ts-node@10.9.1) - '@jest/test-result': 29.1.2 - '@jest/types': 29.1.2 - chalk: 4.1.2 - exit: 0.1.2 - graceful-fs: 4.2.10 - import-local: 3.1.0 - jest-config: 29.1.2(@types/node@18.8.2)(ts-node@10.9.1) - jest-util: 29.1.2 - jest-validate: 29.1.2 - prompts: 2.4.2 - yargs: 17.6.0 - transitivePeerDependencies: - - '@types/node' - - supports-color - - ts-node - dev: true - - /jest-config@29.1.2(@types/node@18.8.2)(ts-node@10.9.1): - resolution: {integrity: sha512-EC3Zi86HJUOz+2YWQcJYQXlf0zuBhJoeyxLM6vb6qJsVmpP7KcCP1JnyF0iaqTaXdBP8Rlwsvs7hnKWQWWLwwA==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - peerDependencies: - '@types/node': '*' - ts-node: '>=9.0.0' - peerDependenciesMeta: - '@types/node': - optional: true - ts-node: - optional: true - dependencies: - '@babel/core': 7.19.3 - '@jest/test-sequencer': 29.1.2 - '@jest/types': 29.1.2 - '@types/node': 18.8.2 - babel-jest: 29.1.2(@babel/core@7.19.3) - chalk: 4.1.2 - ci-info: 3.4.0 - deepmerge: 4.2.2 - glob: 7.2.3 - graceful-fs: 4.2.10 - jest-circus: 29.1.2 - jest-environment-node: 29.1.2 - jest-get-type: 29.0.0 - jest-regex-util: 29.0.0 - jest-resolve: 29.1.2 - jest-runner: 29.1.2 - jest-util: 29.1.2 - jest-validate: 29.1.2 - micromatch: 4.0.5 - parse-json: 5.2.0 - pretty-format: 29.1.2 - slash: 3.0.0 - strip-json-comments: 3.1.1 - ts-node: 10.9.1(@types/node@18.8.2)(typescript@5.2.2) - transitivePeerDependencies: - - supports-color - dev: true - - /jest-diff@29.1.2: - resolution: {integrity: sha512-4GQts0aUopVvecIT4IwD/7xsBaMhKTYoM4/njE/aVw9wpw+pIUVp8Vab/KnSzSilr84GnLBkaP3JLDnQYCKqVQ==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - chalk: 4.1.2 - diff-sequences: 29.0.0 - jest-get-type: 29.0.0 - pretty-format: 29.1.2 - dev: true - - /jest-docblock@29.0.0: - resolution: {integrity: sha512-s5Kpra/kLzbqu9dEjov30kj1n4tfu3e7Pl8v+f8jOkeWNqM6Ds8jRaJfZow3ducoQUrf2Z4rs2N5S3zXnb83gw==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - detect-newline: 3.1.0 - dev: true - - /jest-each@29.1.2: - resolution: {integrity: sha512-AmTQp9b2etNeEwMyr4jc0Ql/LIX/dhbgP21gHAizya2X6rUspHn2gysMXaj6iwWuOJ2sYRgP8c1P4cXswgvS1A==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/types': 29.1.2 - chalk: 4.1.2 - jest-get-type: 29.0.0 - jest-util: 29.1.2 - pretty-format: 29.1.2 - dev: true - - /jest-environment-node@29.1.2: - resolution: {integrity: sha512-C59yVbdpY8682u6k/lh8SUMDJPbOyCHOTgLVVi1USWFxtNV+J8fyIwzkg+RJIVI30EKhKiAGNxYaFr3z6eyNhQ==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/environment': 29.1.2 - '@jest/fake-timers': 29.1.2 - '@jest/types': 29.1.2 - '@types/node': 18.8.2 - jest-mock: 29.1.2 - jest-util: 29.1.2 - dev: true - - /jest-get-type@29.0.0: - resolution: {integrity: sha512-83X19z/HuLKYXYHskZlBAShO7UfLFXu/vWajw9ZNJASN32li8yHMaVGAQqxFW1RCFOkB7cubaL6FaJVQqqJLSw==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dev: true - - /jest-haste-map@29.1.2: - resolution: {integrity: sha512-xSjbY8/BF11Jh3hGSPfYTa/qBFrm3TPM7WU8pU93m2gqzORVLkHFWvuZmFsTEBPRKndfewXhMOuzJNHyJIZGsw==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/types': 29.1.2 - '@types/graceful-fs': 4.1.5 - '@types/node': 18.8.2 - anymatch: 3.1.2 - fb-watchman: 2.0.2 - graceful-fs: 4.2.10 - jest-regex-util: 29.0.0 - jest-util: 29.1.2 - jest-worker: 29.1.2 - micromatch: 4.0.5 - walker: 1.0.8 - optionalDependencies: - fsevents: 2.3.2 - dev: true - - /jest-leak-detector@29.1.2: - resolution: {integrity: sha512-TG5gAZJpgmZtjb6oWxBLf2N6CfQ73iwCe6cofu/Uqv9iiAm6g502CAnGtxQaTfpHECBdVEMRBhomSXeLnoKjiQ==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - jest-get-type: 29.0.0 - pretty-format: 29.1.2 - dev: true - - /jest-matcher-utils@29.1.2: - resolution: {integrity: sha512-MV5XrD3qYSW2zZSHRRceFzqJ39B2z11Qv0KPyZYxnzDHFeYZGJlgGi0SW+IXSJfOewgJp/Km/7lpcFT+cgZypw==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - chalk: 4.1.2 - jest-diff: 29.1.2 - jest-get-type: 29.0.0 - pretty-format: 29.1.2 - dev: true - - /jest-message-util@29.1.2: - resolution: {integrity: sha512-9oJ2Os+Qh6IlxLpmvshVbGUiSkZVc2FK+uGOm6tghafnB2RyjKAxMZhtxThRMxfX1J1SOMhTn9oK3/MutRWQJQ==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@babel/code-frame': 7.18.6 - '@jest/types': 29.1.2 - '@types/stack-utils': 2.0.1 - chalk: 4.1.2 - graceful-fs: 4.2.10 - micromatch: 4.0.5 - pretty-format: 29.1.2 - slash: 3.0.0 - stack-utils: 2.0.5 - dev: true - - /jest-mock@29.1.2: - resolution: {integrity: sha512-PFDAdjjWbjPUtQPkQufvniXIS3N9Tv7tbibePEjIIprzjgo0qQlyUiVMrT4vL8FaSJo1QXifQUOuPH3HQC/aMA==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/types': 29.1.2 - '@types/node': 18.8.2 - jest-util: 29.1.2 - dev: true - - /jest-pnp-resolver@1.2.2(jest-resolve@29.1.2): - resolution: {integrity: sha512-olV41bKSMm8BdnuMsewT4jqlZ8+3TCARAXjZGT9jcoSnrfUnRCqnMoF9XEeoWjbzObpqF9dRhHQj0Xb9QdF6/w==} - engines: {node: '>=6'} - peerDependencies: - jest-resolve: '*' - peerDependenciesMeta: - jest-resolve: - optional: true - dependencies: - jest-resolve: 29.1.2 - dev: true - - /jest-regex-util@29.0.0: - resolution: {integrity: sha512-BV7VW7Sy0fInHWN93MMPtlClweYv2qrSCwfeFWmpribGZtQPWNvRSq9XOVgOEjU1iBGRKXUZil0o2AH7Iy9Lug==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dev: true - - /jest-resolve-dependencies@29.1.2: - resolution: {integrity: sha512-44yYi+yHqNmH3OoWZvPgmeeiwKxhKV/0CfrzaKLSkZG9gT973PX8i+m8j6pDrTYhhHoiKfF3YUFg/6AeuHw4HQ==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - jest-regex-util: 29.0.0 - jest-snapshot: 29.1.2 - transitivePeerDependencies: - - supports-color - dev: true - - /jest-resolve@29.1.2: - resolution: {integrity: sha512-7fcOr+k7UYSVRJYhSmJHIid3AnDBcLQX3VmT9OSbPWsWz1MfT7bcoerMhADKGvKCoMpOHUQaDHtQoNp/P9JMGg==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - chalk: 4.1.2 - graceful-fs: 4.2.10 - jest-haste-map: 29.1.2 - jest-pnp-resolver: 1.2.2(jest-resolve@29.1.2) - jest-util: 29.1.2 - jest-validate: 29.1.2 - resolve: 1.22.1 - resolve.exports: 1.1.0 - slash: 3.0.0 - dev: true - - /jest-runner@29.1.2: - resolution: {integrity: sha512-yy3LEWw8KuBCmg7sCGDIqKwJlULBuNIQa2eFSVgVASWdXbMYZ9H/X0tnXt70XFoGf92W2sOQDOIFAA6f2BG04Q==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/console': 29.1.2 - '@jest/environment': 29.1.2 - '@jest/test-result': 29.1.2 - '@jest/transform': 29.1.2 - '@jest/types': 29.1.2 - '@types/node': 18.8.2 - chalk: 4.1.2 - emittery: 0.10.2 - graceful-fs: 4.2.10 - jest-docblock: 29.0.0 - jest-environment-node: 29.1.2 - jest-haste-map: 29.1.2 - jest-leak-detector: 29.1.2 - jest-message-util: 29.1.2 - jest-resolve: 29.1.2 - jest-runtime: 29.1.2 - jest-util: 29.1.2 - jest-watcher: 29.1.2 - jest-worker: 29.1.2 - p-limit: 3.1.0 - source-map-support: 0.5.13 - transitivePeerDependencies: - - supports-color - dev: true - - /jest-runtime@29.1.2: - resolution: {integrity: sha512-jr8VJLIf+cYc+8hbrpt412n5jX3tiXmpPSYTGnwcvNemY+EOuLNiYnHJ3Kp25rkaAcTWOEI4ZdOIQcwYcXIAZw==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/environment': 29.1.2 - '@jest/fake-timers': 29.1.2 - '@jest/globals': 29.1.2 - '@jest/source-map': 29.0.0 - '@jest/test-result': 29.1.2 - '@jest/transform': 29.1.2 - '@jest/types': 29.1.2 - '@types/node': 18.8.2 - chalk: 4.1.2 - cjs-module-lexer: 1.2.2 - collect-v8-coverage: 1.0.1 - glob: 7.2.3 - graceful-fs: 4.2.10 - jest-haste-map: 29.1.2 - jest-message-util: 29.1.2 - jest-mock: 29.1.2 - jest-regex-util: 29.0.0 - jest-resolve: 29.1.2 - jest-snapshot: 29.1.2 - jest-util: 29.1.2 - slash: 3.0.0 - strip-bom: 4.0.0 - transitivePeerDependencies: - - supports-color - dev: true - - /jest-snapshot@29.1.2: - resolution: {integrity: sha512-rYFomGpVMdBlfwTYxkUp3sjD6usptvZcONFYNqVlaz4EpHPnDvlWjvmOQ9OCSNKqYZqLM2aS3wq01tWujLg7gg==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@babel/core': 7.19.3 - '@babel/generator': 7.19.3 - '@babel/plugin-syntax-jsx': 7.18.6(@babel/core@7.19.3) - '@babel/plugin-syntax-typescript': 7.18.6(@babel/core@7.19.3) - '@babel/traverse': 7.19.3 - '@babel/types': 7.19.3 - '@jest/expect-utils': 29.1.2 - '@jest/transform': 29.1.2 - '@jest/types': 29.1.2 - '@types/babel__traverse': 7.18.2 - '@types/prettier': 2.7.1 - babel-preset-current-node-syntax: 1.0.1(@babel/core@7.19.3) - chalk: 4.1.2 - expect: 29.1.2 - graceful-fs: 4.2.10 - jest-diff: 29.1.2 - jest-get-type: 29.0.0 - jest-haste-map: 29.1.2 - jest-matcher-utils: 29.1.2 - jest-message-util: 29.1.2 - jest-util: 29.1.2 - natural-compare: 1.4.0 - pretty-format: 29.1.2 - semver: 7.3.8 - transitivePeerDependencies: - - supports-color - dev: true - - /jest-util@29.1.2: - resolution: {integrity: sha512-vPCk9F353i0Ymx3WQq3+a4lZ07NXu9Ca8wya6o4Fe4/aO1e1awMMprZ3woPFpKwghEOW+UXgd15vVotuNN9ONQ==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/types': 29.1.2 - '@types/node': 18.8.2 - chalk: 4.1.2 - ci-info: 3.4.0 - graceful-fs: 4.2.10 - picomatch: 2.3.1 - dev: true - - /jest-validate@29.1.2: - resolution: {integrity: sha512-k71pOslNlV8fVyI+mEySy2pq9KdXdgZtm7NHrBX8LghJayc3wWZH0Yr0mtYNGaCU4F1OLPXRkwZR0dBm/ClshA==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/types': 29.1.2 - camelcase: 6.3.0 - chalk: 4.1.2 - jest-get-type: 29.0.0 - leven: 3.1.0 - pretty-format: 29.1.2 - dev: true - - /jest-watcher@29.1.2: - resolution: {integrity: sha512-6JUIUKVdAvcxC6bM8/dMgqY2N4lbT+jZVsxh0hCJRbwkIEnbr/aPjMQ28fNDI5lB51Klh00MWZZeVf27KBUj5w==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/test-result': 29.1.2 - '@jest/types': 29.1.2 - '@types/node': 18.8.2 - ansi-escapes: 4.3.2 - chalk: 4.1.2 - emittery: 0.10.2 - jest-util: 29.1.2 - string-length: 4.0.2 - dev: true - - /jest-worker@29.1.2: - resolution: {integrity: sha512-AdTZJxKjTSPHbXT/AIOjQVmoFx0LHFcVabWu0sxI7PAy7rFf8c0upyvgBKgguVXdM4vY74JdwkyD4hSmpTW8jA==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@types/node': 18.8.2 - jest-util: 29.1.2 - merge-stream: 2.0.0 - supports-color: 8.1.1 - dev: true - - /jest@29.1.2(@types/node@18.8.2)(ts-node@10.9.1): - resolution: {integrity: sha512-5wEIPpCezgORnqf+rCaYD1SK+mNN7NsstWzIsuvsnrhR/hSxXWd82oI7DkrbJ+XTD28/eG8SmxdGvukrGGK6Tw==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - hasBin: true - peerDependencies: - node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0 - peerDependenciesMeta: - node-notifier: - optional: true - dependencies: - '@jest/core': 29.1.2(ts-node@10.9.1) - '@jest/types': 29.1.2 - import-local: 3.1.0 - jest-cli: 29.1.2(@types/node@18.8.2)(ts-node@10.9.1) - transitivePeerDependencies: - - '@types/node' - - supports-color - - ts-node - dev: true - - /js-tokens@4.0.0: - resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==} - dev: true - - /js-yaml@3.14.1: - resolution: {integrity: sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==} - hasBin: true - dependencies: - argparse: 1.0.10 - esprima: 4.0.1 - dev: true - - /jsesc@2.5.2: - resolution: {integrity: sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==} - engines: {node: '>=4'} - hasBin: true - dev: true - - /json-parse-even-better-errors@2.3.1: - resolution: {integrity: sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==} - dev: true - - /json5@2.2.1: - resolution: {integrity: sha512-1hqLFMSrGHRHxav9q9gNjJ5EXznIxGVO09xQRrwplcS8qs28pZ8s8hupZAmqDwZUmVZ2Qb2jnyPOWcDH8m8dlA==} - engines: {node: '>=6'} - hasBin: true - dev: true - - /jsonfile@6.1.0: - resolution: {integrity: sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==} - dependencies: - universalify: 2.0.0 - optionalDependencies: - graceful-fs: 4.2.10 - dev: false - - /kleur@3.0.3: - resolution: {integrity: sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==} - engines: {node: '>=6'} - dev: true - - /leven@3.1.0: - resolution: {integrity: sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==} - engines: {node: '>=6'} - dev: true - - /lines-and-columns@1.2.4: - resolution: {integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==} - dev: true - - /locate-path@5.0.0: - resolution: {integrity: sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==} - engines: {node: '>=8'} - dependencies: - p-locate: 4.1.0 - dev: true - - /lodash.memoize@4.1.2: - resolution: {integrity: sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==} - dev: true - - /lru-cache@6.0.0: - resolution: {integrity: sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==} - engines: {node: '>=10'} - dependencies: - yallist: 4.0.0 - dev: true - - /make-dir@3.1.0: - resolution: {integrity: sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==} - engines: {node: '>=8'} - dependencies: - semver: 6.3.0 - dev: true - - /make-error@1.3.6: - resolution: {integrity: sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==} - - /makeerror@1.0.12: - resolution: {integrity: sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg==} - dependencies: - tmpl: 1.0.5 - dev: true - - /map-stream@0.1.0: - resolution: {integrity: sha512-CkYQrPYZfWnu/DAmVCpTSX/xHpKZ80eKh2lAkyA6AJTef6bW+6JpbQZN5rofum7da+SyN1bi5ctTm+lTfcCW3g==} - dev: false - - /merge-stream@2.0.0: - resolution: {integrity: sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==} - dev: true - - /merge2@1.4.1: - resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==} - engines: {node: '>= 8'} - dev: false - - /micromatch@4.0.5: - resolution: {integrity: sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==} - engines: {node: '>=8.6'} - dependencies: - braces: 3.0.2 - picomatch: 2.3.1 - - /mimic-fn@2.1.0: - resolution: {integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==} - engines: {node: '>=6'} - dev: true - - /minimatch@3.1.2: - resolution: {integrity: sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==} - dependencies: - brace-expansion: 1.1.11 - dev: true - - /minimist@1.2.6: - resolution: {integrity: sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==} - dev: false - - /ms@2.1.2: - resolution: {integrity: sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==} - dev: true - - /natural-compare@1.4.0: - resolution: {integrity: sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==} - dev: true - - /node-domexception@1.0.0: - resolution: {integrity: sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==} - engines: {node: '>=10.5.0'} - dev: false - - /node-fetch@3.2.8: - resolution: {integrity: sha512-KtpD1YhGszhntMpBDyp5lyagk8KIMopC1LEb7cQUAh7zcosaX5uK8HnbNb2i3NTQK3sIawCItS0uFC3QzcLHdg==} - engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} - dependencies: - data-uri-to-buffer: 4.0.0 - fetch-blob: 3.2.0 - formdata-polyfill: 4.0.10 - dev: false - - /node-int64@0.4.0: - resolution: {integrity: sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw==} - dev: true - - /node-releases@2.0.6: - resolution: {integrity: sha512-PiVXnNuFm5+iYkLBNeq5211hvO38y63T0i2KKh2KnUs3RpzJ+JtODFjkD8yjLwnDkTYF1eKXheUwdssR+NRZdg==} - dev: true - - /normalize-path@3.0.0: - resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==} - engines: {node: '>=0.10.0'} - dev: true - - /npm-run-path@4.0.1: - resolution: {integrity: sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==} - engines: {node: '>=8'} - dependencies: - path-key: 3.1.1 - dev: true - - /once@1.4.0: - resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==} - dependencies: - wrappy: 1.0.2 - dev: true - - /onetime@5.1.2: - resolution: {integrity: sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==} - engines: {node: '>=6'} - dependencies: - mimic-fn: 2.1.0 - dev: true - - /p-limit@2.3.0: - resolution: {integrity: sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==} - engines: {node: '>=6'} - dependencies: - p-try: 2.2.0 - dev: true - - /p-limit@3.1.0: - resolution: {integrity: sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==} - engines: {node: '>=10'} - dependencies: - yocto-queue: 0.1.0 - dev: true - - /p-locate@4.1.0: - resolution: {integrity: sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==} - engines: {node: '>=8'} - dependencies: - p-limit: 2.3.0 - dev: true - - /p-try@2.2.0: - resolution: {integrity: sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==} - engines: {node: '>=6'} - dev: true - - /parse-json@5.2.0: - resolution: {integrity: sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==} - engines: {node: '>=8'} - dependencies: - '@babel/code-frame': 7.18.6 - error-ex: 1.3.2 - json-parse-even-better-errors: 2.3.1 - lines-and-columns: 1.2.4 - dev: true - - /path-exists@4.0.0: - resolution: {integrity: sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==} - engines: {node: '>=8'} - dev: true - - /path-is-absolute@1.0.1: - resolution: {integrity: sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==} - engines: {node: '>=0.10.0'} - dev: true - - /path-key@3.1.1: - resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==} - engines: {node: '>=8'} - dev: true - - /path-parse@1.0.7: - resolution: {integrity: sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==} - dev: true - - /path-type@4.0.0: - resolution: {integrity: sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==} - engines: {node: '>=8'} - dev: false - - /pause-stream@0.0.11: - resolution: {integrity: sha512-e3FBlXLmN/D1S+zHzanP4E/4Z60oFAa3O051qt1pxa7DEJWKAyil6upYVXCWadEnuoqa4Pkc9oUx9zsxYeRv8A==} - dependencies: - through: 2.3.8 - dev: false - - /picocolors@1.0.0: - resolution: {integrity: sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==} - dev: true - - /picomatch@2.3.1: - resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==} - engines: {node: '>=8.6'} - - /pirates@4.0.5: - resolution: {integrity: sha512-8V9+HQPupnaXMA23c5hvl69zXvTwTzyAYasnkb0Tts4XvO4CliqONMOnvlq26rkhLC3nWDFBJf73LU1e1VZLaQ==} - engines: {node: '>= 6'} - dev: true - - /pkg-dir@4.2.0: - resolution: {integrity: sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==} - engines: {node: '>=8'} - dependencies: - find-up: 4.1.0 - dev: true - - /pretty-format@29.1.2: - resolution: {integrity: sha512-CGJ6VVGXVRP2o2Dorl4mAwwvDWT25luIsYhkyVQW32E4nL+TgW939J7LlKT/npq5Cpq6j3s+sy+13yk7xYpBmg==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/schemas': 29.0.0 - ansi-styles: 5.2.0 - react-is: 18.2.0 - dev: true - - /prompts@2.4.2: - resolution: {integrity: sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==} - engines: {node: '>= 6'} - dependencies: - kleur: 3.0.3 - sisteransi: 1.0.5 - dev: true - - /ps-tree@1.2.0: - resolution: {integrity: sha512-0VnamPPYHl4uaU/nSFeZZpR21QAWRz+sRv4iW9+v/GS/J5U5iZB5BNN6J0RMoOvdx2gWM2+ZFMIm58q24e4UYA==} - engines: {node: '>= 0.10'} - hasBin: true - dependencies: - event-stream: 3.3.4 - dev: false - - /queue-microtask@1.2.3: - resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} - dev: false - - /react-is@18.2.0: - resolution: {integrity: sha512-xWGDIW6x921xtzPkhiULtthJHoJvBbF3q26fzloPCK0hsvxtPVelvftw3zjbHWSkR2km9Z+4uxbDDK/6Zw9B8w==} - dev: true - - /require-directory@2.1.1: - resolution: {integrity: sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==} - engines: {node: '>=0.10.0'} - dev: true - - /resolve-cwd@3.0.0: - resolution: {integrity: sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg==} - engines: {node: '>=8'} - dependencies: - resolve-from: 5.0.0 - dev: true - - /resolve-from@5.0.0: - resolution: {integrity: sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==} - engines: {node: '>=8'} - dev: true - - /resolve.exports@1.1.0: - resolution: {integrity: sha512-J1l+Zxxp4XK3LUDZ9m60LRJF/mAe4z6a4xyabPHk7pvK5t35dACV32iIjJDFeWZFfZlO29w6SZ67knR0tHzJtQ==} - engines: {node: '>=10'} - dev: true - - /resolve@1.22.1: - resolution: {integrity: sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw==} - hasBin: true - dependencies: - is-core-module: 2.10.0 - path-parse: 1.0.7 - supports-preserve-symlinks-flag: 1.0.0 - dev: true - - /reusify@1.0.4: - resolution: {integrity: sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==} - engines: {iojs: '>=1.0.0', node: '>=0.10.0'} - dev: false - - /run-parallel@1.2.0: - resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==} - dependencies: - queue-microtask: 1.2.3 - dev: false - - /safe-buffer@5.1.2: - resolution: {integrity: sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==} - dev: true - - /semver@6.3.0: - resolution: {integrity: sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==} - hasBin: true - dev: true - - /semver@7.3.8: - resolution: {integrity: sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==} - engines: {node: '>=10'} - hasBin: true - dependencies: - lru-cache: 6.0.0 - dev: true - - /shebang-command@2.0.0: - resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==} - engines: {node: '>=8'} - dependencies: - shebang-regex: 3.0.0 - dev: true - - /shebang-regex@3.0.0: - resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==} - engines: {node: '>=8'} - dev: true - - /signal-exit@3.0.7: - resolution: {integrity: sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==} - dev: true - - /sisteransi@1.0.5: - resolution: {integrity: sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==} - dev: true - - /slash@3.0.0: - resolution: {integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==} - engines: {node: '>=8'} - dev: true - - /slash@4.0.0: - resolution: {integrity: sha512-3dOsAHXXUkQTpOYcoAxLIorMTp4gIQr5IW3iVb7A7lFIp0VHhnynm9izx6TssdrIcVIESAlVjtnO2K8bg+Coew==} - engines: {node: '>=12'} - dev: false - - /source-map-support@0.5.13: - resolution: {integrity: sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w==} - dependencies: - buffer-from: 1.1.2 - source-map: 0.6.1 - dev: true - - /source-map@0.6.1: - resolution: {integrity: sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==} - engines: {node: '>=0.10.0'} - dev: true - - /split@0.3.3: - resolution: {integrity: sha512-wD2AeVmxXRBoX44wAycgjVpMhvbwdI2aZjCkvfNcH1YqHQvJVa1duWc73OyVGJUc05fhFaTZeQ/PYsrmyH0JVA==} - dependencies: - through: 2.3.8 - dev: false - - /sprintf-js@1.0.3: - resolution: {integrity: sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==} - dev: true - - /stack-utils@2.0.5: - resolution: {integrity: sha512-xrQcmYhOsn/1kX+Vraq+7j4oE2j/6BFscZ0etmYg81xuM8Gq0022Pxb8+IqgOFUIaxHs0KaSb7T1+OegiNrNFA==} - engines: {node: '>=10'} - dependencies: - escape-string-regexp: 2.0.0 - dev: true - - /stream-combiner@0.0.4: - resolution: {integrity: sha512-rT00SPnTVyRsaSz5zgSPma/aHSOic5U1prhYdRy5HS2kTZviFpmDgzilbtsJsxiroqACmayynDN/9VzIbX5DOw==} - dependencies: - duplexer: 0.1.2 - dev: false - - /string-length@4.0.2: - resolution: {integrity: sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ==} - engines: {node: '>=10'} - dependencies: - char-regex: 1.0.2 - strip-ansi: 6.0.1 - dev: true - - /string-width@4.2.3: - resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==} - engines: {node: '>=8'} - dependencies: - emoji-regex: 8.0.0 - is-fullwidth-code-point: 3.0.0 - strip-ansi: 6.0.1 - dev: true - - /strip-ansi@6.0.1: - resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==} - engines: {node: '>=8'} - dependencies: - ansi-regex: 5.0.1 - dev: true - - /strip-bom@4.0.0: - resolution: {integrity: sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==} - engines: {node: '>=8'} - dev: true - - /strip-final-newline@2.0.0: - resolution: {integrity: sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==} - engines: {node: '>=6'} - dev: true - - /strip-json-comments@3.1.1: - resolution: {integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==} - engines: {node: '>=8'} - dev: true - - /supports-color@5.5.0: - resolution: {integrity: sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==} - engines: {node: '>=4'} - dependencies: - has-flag: 3.0.0 - dev: true - - /supports-color@7.2.0: - resolution: {integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==} - engines: {node: '>=8'} - dependencies: - has-flag: 4.0.0 - dev: true - - /supports-color@8.1.1: - resolution: {integrity: sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==} - engines: {node: '>=10'} - dependencies: - has-flag: 4.0.0 - dev: true - - /supports-hyperlinks@2.3.0: - resolution: {integrity: sha512-RpsAZlpWcDwOPQA22aCH4J0t7L8JmAvsCxfOSEwm7cQs3LshN36QaTkwd70DnBOXDWGssw2eUoc8CaRWT0XunA==} - engines: {node: '>=8'} - dependencies: - has-flag: 4.0.0 - supports-color: 7.2.0 - dev: true - - /supports-preserve-symlinks-flag@1.0.0: - resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==} - engines: {node: '>= 0.4'} - dev: true - - /terminal-link@2.1.1: - resolution: {integrity: sha512-un0FmiRUQNr5PJqy9kP7c40F5BOfpGlYTrxonDChEZB7pzZxRNp/bt+ymiy9/npwXya9KH99nJ/GXFIiUkYGFQ==} - engines: {node: '>=8'} - dependencies: - ansi-escapes: 4.3.2 - supports-hyperlinks: 2.3.0 - dev: true - - /test-exclude@6.0.0: - resolution: {integrity: sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==} - engines: {node: '>=8'} - dependencies: - '@istanbuljs/schema': 0.1.3 - glob: 7.2.3 - minimatch: 3.1.2 - dev: true - - /through@2.3.8: - resolution: {integrity: sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg==} - dev: false - - /tmpl@1.0.5: - resolution: {integrity: sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==} - dev: true - - /to-fast-properties@2.0.0: - resolution: {integrity: sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==} - engines: {node: '>=4'} - dev: true - - /to-regex-range@5.0.1: - resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==} - engines: {node: '>=8.0'} - dependencies: - is-number: 7.0.0 - - /ts-jest@29.0.3(@babel/core@7.19.3)(jest@29.1.2)(typescript@5.2.2): - resolution: {integrity: sha512-Ibygvmuyq1qp/z3yTh9QTwVVAbFdDy/+4BtIQR2sp6baF2SJU/8CKK/hhnGIDY2L90Az2jIqTwZPnN2p+BweiQ==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - hasBin: true - peerDependencies: - '@babel/core': '>=7.0.0-beta.0 <8' - '@jest/types': ^29.0.0 - babel-jest: ^29.0.0 - esbuild: '*' - jest: ^29.0.0 - typescript: '>=4.3' - peerDependenciesMeta: - '@babel/core': - optional: true - '@jest/types': - optional: true - babel-jest: - optional: true - esbuild: - optional: true - dependencies: - '@babel/core': 7.19.3 - bs-logger: 0.2.6 - fast-json-stable-stringify: 2.1.0 - jest: 29.1.2(@types/node@18.8.2)(ts-node@10.9.1) - jest-util: 29.1.2 - json5: 2.2.1 - lodash.memoize: 4.1.2 - make-error: 1.3.6 - semver: 7.3.8 - typescript: 5.2.2 - yargs-parser: 21.1.1 - dev: true - - /ts-node@10.9.1(@types/node@18.8.2)(typescript@5.2.2): - resolution: {integrity: sha512-NtVysVPkxxrwFGUUxGYhfux8k78pQB3JqYBXlLRZgdGUqTO5wU/UyHop5p70iEbGhB7q5KmiZiU0Y3KlJrScEw==} - hasBin: true - peerDependencies: - '@swc/core': '>=1.2.50' - '@swc/wasm': '>=1.2.50' - '@types/node': '*' - typescript: '>=2.7' - peerDependenciesMeta: - '@swc/core': - optional: true - '@swc/wasm': - optional: true - dependencies: - '@cspotcode/source-map-support': 0.8.1 - '@tsconfig/node10': 1.0.9 - '@tsconfig/node12': 1.0.11 - '@tsconfig/node14': 1.0.3 - '@tsconfig/node16': 1.0.3 - '@types/node': 18.8.2 - acorn: 8.8.0 - acorn-walk: 8.2.0 - arg: 4.1.3 - create-require: 1.1.1 - diff: 4.0.2 - make-error: 1.3.6 - typescript: 5.2.2 - v8-compile-cache-lib: 3.0.1 - yn: 3.1.1 - - /tunnel@0.0.6: - resolution: {integrity: sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==} - engines: {node: '>=0.6.11 <=0.7.0 || >=0.7.3'} - dev: false - - /type-detect@4.0.8: - resolution: {integrity: sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==} - engines: {node: '>=4'} - dev: true - - /type-fest@0.21.3: - resolution: {integrity: sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==} - engines: {node: '>=10'} - dev: true - - /typescript@5.2.2: - resolution: {integrity: sha512-mI4WrpHsbCIcwT9cF4FZvr80QUeKvsUsUvKDoR+X/7XHQH98xYD8YHZg7ANtz2GtZt/CBq2QJ0thkGJMHfqc1w==} - engines: {node: '>=14.17'} - hasBin: true - - /universalify@2.0.0: - resolution: {integrity: sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==} - engines: {node: '>= 10.0.0'} - dev: false - - /update-browserslist-db@1.0.10(browserslist@4.21.4): - resolution: {integrity: sha512-OztqDenkfFkbSG+tRxBeAnCVPckDBcvibKd35yDONx6OU8N7sqgwc7rCbkJ/WcYtVRZ4ba68d6byhC21GFh7sQ==} - hasBin: true - peerDependencies: - browserslist: '>= 4.21.0' - dependencies: - browserslist: 4.21.4 - escalade: 3.1.1 - picocolors: 1.0.0 - dev: true - - /uuid@8.3.2: - resolution: {integrity: sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==} - hasBin: true - dev: false - - /v8-compile-cache-lib@3.0.1: - resolution: {integrity: sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==} - - /v8-to-istanbul@9.0.1: - resolution: {integrity: sha512-74Y4LqY74kLE6IFyIjPtkSTWzUZmj8tdHT9Ii/26dvQ6K9Dl2NbEfj0XgU2sHCtKgt5VupqhlO/5aWuqS+IY1w==} - engines: {node: '>=10.12.0'} - dependencies: - '@jridgewell/trace-mapping': 0.3.15 - '@types/istanbul-lib-coverage': 2.0.4 - convert-source-map: 1.8.0 - dev: true - - /walker@1.0.8: - resolution: {integrity: sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ==} - dependencies: - makeerror: 1.0.12 - dev: true - - /web-streams-polyfill@3.2.1: - resolution: {integrity: sha512-e0MO3wdXWKrLbL0DgGnUV7WHVuw9OUvL4hjgnPkIeEvESk74gAITi5G606JtZPp39cd8HA9VQzCIvA49LpPN5Q==} - engines: {node: '>= 8'} - dev: false - - /which@2.0.2: - resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==} - engines: {node: '>= 8'} - hasBin: true - dependencies: - isexe: 2.0.0 - - /wrap-ansi@7.0.0: - resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==} - engines: {node: '>=10'} - dependencies: - ansi-styles: 4.3.0 - string-width: 4.2.3 - strip-ansi: 6.0.1 - dev: true - - /wrappy@1.0.2: - resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==} - dev: true - - /write-file-atomic@4.0.2: - resolution: {integrity: sha512-7KxauUdBmSdWnmpaGFg+ppNjKF8uNLry8LyzjauQDOVONfFLNKrKvQOxZ/VuTIcS/gge/YNahf5RIIQWTSarlg==} - engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0} - dependencies: - imurmurhash: 0.1.4 - signal-exit: 3.0.7 - dev: true - - /y18n@5.0.8: - resolution: {integrity: sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==} - engines: {node: '>=10'} - dev: true - - /yallist@4.0.0: - resolution: {integrity: sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==} - dev: true - - /yaml@2.1.2: - resolution: {integrity: sha512-VSdf2/K3FqAetooKQv45Hcu6sA00aDgWZeGcG6V9IYJnVLTnb6988Tie79K5nx2vK7cEpf+yW8Oy+7iPAbdiHA==} - engines: {node: '>= 14'} - dev: false - - /yargs-parser@21.1.1: - resolution: {integrity: sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==} - engines: {node: '>=12'} - dev: true - - /yargs@17.6.0: - resolution: {integrity: sha512-8H/wTDqlSwoSnScvV2N/JHfLWOKuh5MVla9hqLjK3nsfyy6Y4kDSYSvkU5YCUEPOSnRXfIyx3Sq+B/IWudTo4g==} - engines: {node: '>=12'} - dependencies: - cliui: 8.0.1 - escalade: 3.1.1 - get-caller-file: 2.0.5 - require-directory: 2.1.1 - string-width: 4.2.3 - y18n: 5.0.8 - yargs-parser: 21.1.1 - dev: true - - /yn@3.1.1: - resolution: {integrity: sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==} - engines: {node: '>=6'} - - /yocto-queue@0.1.0: - resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==} - engines: {node: '>=10'} - dev: true - - /zx@7.0.8: - resolution: {integrity: sha512-sNjfDHzskqrSkWNj0TVhaowVK5AbpvuyuO1RBU4+LrFcgYI5u9CtyWWgUBRtRZl3bgGEF31zByszoBmwS47d1w==} - engines: {node: '>= 16.0.0'} - hasBin: true - dependencies: - '@types/fs-extra': 9.0.13 - '@types/minimist': 1.2.2 - '@types/node': 18.8.2 - '@types/ps-tree': 1.1.2 - '@types/which': 2.0.1 - chalk: 5.0.1 - fs-extra: 10.1.0 - globby: 13.1.2 - minimist: 1.2.6 - node-fetch: 3.2.8 - ps-tree: 1.2.0 - which: 2.0.2 - yaml: 2.1.2 - dev: false diff --git a/.github/actions/split-tests/src/index.mts b/.github/actions/split-tests/src/index.mts deleted file mode 100644 index 43f9eb1316..0000000000 --- a/.github/actions/split-tests/src/index.mts +++ /dev/null @@ -1,74 +0,0 @@ -import { $, cd, glob, fs } from "zx"; -import path from "node:path"; -import { setOutput } from "@actions/core"; -import { SolidityConfig, SoliditySplit } from "./types.mjs"; -import { sieveSlowTests } from "./sieve.mjs"; -import { simpleSplit } from "./splitter.mjs"; - -/** - * Get a JSON formatted config file - * - * @param path The path to the config relative to the git root - */ -function getConfigFrom(path?: string): SolidityConfig { - if (!path) { - throw Error("No config path given, specify a path via $CONFIG"); - } - try { - const config = fs.readJsonSync(path); - return config; - } catch (e: unknown) { - throw Error( - `Could not find config file at path: ${path}. ${(e as Error).message}` - ); - } -} - -async function main() { - $.verbose = false; - await runAtGitRoot(); - const configPath = process.env.CONFIG; - const config = getConfigFrom(configPath); - if (config.type === "solidity") { - await handleSolidity(config); - } else { - throw Error(`Invalid config given`); - } -} -main(); - -async function handleSolidity(config: SolidityConfig) { - const { basePath, splits: configBySplit } = config; - const splits = await Promise.all( - configBySplit.map( - async ({ dir, numOfSplits, slowTests: slowTestMatchers }) => { - const globPath = path.join(basePath, dir, "/**/*.test.ts"); - const rawTests = await glob(globPath); - const pathMappedTests = rawTests.map((r) => - r.replace("contracts/", "") - ); - const { filteredTests, slowTests } = sieveSlowTests( - pathMappedTests, - slowTestMatchers - ); - const testsBySplit = simpleSplit(filteredTests, slowTests, numOfSplits); - const splits: SoliditySplit[] = testsBySplit.map((tests, i) => ({ - idx: `${dir}_${i + 1}`, - id: `${dir} ${i + 1}/${numOfSplits}`, - tests: tests.join(" "), - coverageTests: - tests.length === 1 ? tests.join(",") : `{${tests.join(",")}}`, - })); - return splits; - } - ) - ); - - const serializedSplits = JSON.stringify(splits.flat()); - setOutput("splits", serializedSplits); -} - -async function runAtGitRoot() { - const gitRoot = await $`git rev-parse --show-toplevel`; - cd(gitRoot.stdout.trimEnd()); -} diff --git a/.github/actions/split-tests/src/sieve.mts b/.github/actions/split-tests/src/sieve.mts deleted file mode 100644 index 93573669a7..0000000000 --- a/.github/actions/split-tests/src/sieve.mts +++ /dev/null @@ -1,27 +0,0 @@ -import {Tests} from "./types.mjs"; - -export function sieveSlowTests(tests: Tests, slowTestMatchers?: string[]) { - const slowTests: Tests = []; - const filteredTests: Tests = []; - - if (!slowTestMatchers) { - return {slowTests, filteredTests: tests}; - } - - // If the user supplies slow test matchers - // then we go through each test to see if we get a case sensitive match - - tests.forEach((t) => { - const isSlow = slowTestMatchers.reduce( - (isSlow, matcher) => t.includes(matcher) || isSlow, - false - ); - if (isSlow) { - slowTests.push(t); - } else { - filteredTests.push(t); - } - }); - - return {slowTests, filteredTests}; -} diff --git a/.github/actions/split-tests/src/splitter.mts b/.github/actions/split-tests/src/splitter.mts deleted file mode 100644 index f924df5508..0000000000 --- a/.github/actions/split-tests/src/splitter.mts +++ /dev/null @@ -1,43 +0,0 @@ -import {Tests, TestsBySplit} from "./types.mjs"; - -/** - * Split tests by first prioritizing slow tests being spread over each split, then filling each split by test list order. - * - * @example - * Given the following arguments: - * tests: ['foo.test', 'bar.test', 'baz.test', 'yup.test', 'nope.test'] - * slowTests: ['bonk.test', 'bop.test', 'ouch.test.ts'] - * numOfSplits: 2 - * - * We get the following output: - * 1. Spread slow tests across splits: [['bonk.test', 'ouch.test.ts'], ['bop.test']] - * 2. Insert list of tests: [['bonk.test', 'ouch.test.ts', 'foo.test', 'bar.test'], ['bop.test', 'baz.test', 'yup.test', 'nope.test']] - * - * @param tests A list of tests to distribute across splits by the test list order - * @param slowTests A list of slow tests, where the list of tests is evenly distributed across all splits before inserting regular tests - * @param numOfSplits The number of splits to spread tests across - */ -export function simpleSplit( - tests: Tests, - slowTests: Tests, - numOfSplits: number -): TestsBySplit { - const maxTestsPerSplit = Math.max(tests.length / numOfSplits); - - const testsBySplit: TestsBySplit = new Array(numOfSplits) - .fill(null) - .map(() => []); - - // Evenly distribute slow tests over each split - slowTests.forEach((test, i) => { - const splitIndex = i % numOfSplits; - testsBySplit[splitIndex].push(test); - }); - - tests.forEach((test, i) => { - const splitIndex = Math.floor(i / maxTestsPerSplit); - testsBySplit[splitIndex].push(test); - }); - - return testsBySplit; -} diff --git a/.github/actions/split-tests/src/types.mts b/.github/actions/split-tests/src/types.mts deleted file mode 100644 index 3eae2f0eb9..0000000000 --- a/.github/actions/split-tests/src/types.mts +++ /dev/null @@ -1,75 +0,0 @@ -/** - * An array of all tests - */ -export type Tests = string[]; - -/** - * An array of tests, indexed by split - */ -export type TestsBySplit = string[][]; - -export interface Split { - /** - * The split index - * @example "4" - */ - idx: string; - - /** - * The split index in the context of all splits - * @example "4/10" - */ - id: string; -} - -export interface SoliditySplit extends Split { - /** - * A string that contains a whitespace delimited list of tests to run - * - * This format is to support the `hardhat test` command. - * @example test/foo.test.ts test/bar.test.ts - */ - tests: string; - - /** - * A string that contains a glob that expresses the list of tests to run. - * - * This format is used to conform to the --testfiles flag of solidity-coverage - * @example {test/foo.test.ts,test/bar.test.ts} - */ - coverageTests: string; -} - -/** - * Configuration file for solidity tests - */ -export interface SolidityConfig { - type: "solidity"; - /** - * The path to the contracts tests directory, relative to the git root - */ - basePath: string; - splits: { - /** - * The number of sub-splits to run across - */ - numOfSplits: number; - /** - * The directory of the tests to create sub-splits across, relative to the basePath - */ - dir: string; - /** - * An array of known slow tests, to better distribute across sub-splits - * - * Each string is a case-sensitive matcher that will match against any substring within the list of test file paths within the `dir` configuration. - * - * @example - * Given the dir `v0.8`, we get the following tests: ['v0.8/Foo1.test.ts','v0.8/bar.test.ts','v0.8/dev/eolpe/Foo.test.ts'] - * - * If we supply the following `slowTests` argument: ['Foo'] - * - * Then it'll match against both 'v0.8/Foo1.test.ts' and 'v0.8/dev/eolpe/Foo.test.ts'. - */ - slowTests?: string[]; - }[]; -} diff --git a/.github/actions/split-tests/test/__snapshots__/sieve.test.ts.snap b/.github/actions/split-tests/test/__snapshots__/sieve.test.ts.snap deleted file mode 100644 index d55bc175b8..0000000000 --- a/.github/actions/split-tests/test/__snapshots__/sieve.test.ts.snap +++ /dev/null @@ -1,99 +0,0 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP - -exports[`sieveSlowTests works 1`] = ` -{ - "filteredTests": [], - "slowTests": [], -} -`; - -exports[`sieveSlowTests works 2`] = ` -{ - "filteredTests": [], - "slowTests": [], -} -`; - -exports[`sieveSlowTests works 3`] = ` -{ - "filteredTests": [ - "keepme", - ], - "slowTests": [], -} -`; - -exports[`sieveSlowTests works 4`] = ` -{ - "filteredTests": [ - "keepme", - ], - "slowTests": [], -} -`; - -exports[`sieveSlowTests works 5`] = ` -{ - "filteredTests": [ - "foo.test", - "bar.test", - "baz.test", - "yup.test", - "nope.test", - "bonk.test", - "bop.test", - "ouch.test.ts", - ], - "slowTests": [], -} -`; - -exports[`sieveSlowTests works 6`] = ` -{ - "filteredTests": [ - "foo.test", - "bar.test", - "baz.test", - "yup.test", - "nope.test", - "bonk.test", - "bop.test", - "ouch.test.ts", - ], - "slowTests": [], -} -`; - -exports[`sieveSlowTests works 7`] = ` -{ - "filteredTests": [ - "foo.test", - "bar.test", - "baz.test", - "yup.test", - "nope.test", - "bonk.test", - "bop.test", - ], - "slowTests": [ - "ouch.test.ts", - ], -} -`; - -exports[`sieveSlowTests works 8`] = ` -{ - "filteredTests": [ - "foo.test", - "bar.test", - "baz.test", - "yup.test", - "nope.test", - ], - "slowTests": [ - "bonk.test", - "bop.test", - "ouch.test.ts", - ], -} -`; diff --git a/.github/actions/split-tests/test/__snapshots__/splitter.test.ts.snap b/.github/actions/split-tests/test/__snapshots__/splitter.test.ts.snap deleted file mode 100644 index 70dfe70ec1..0000000000 --- a/.github/actions/split-tests/test/__snapshots__/splitter.test.ts.snap +++ /dev/null @@ -1,119 +0,0 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP - -exports[`simpleSplit doesn't error on empty arrays 1`] = ` -[ - [], -] -`; - -exports[`simpleSplit doesn't error on empty arrays 2`] = ` -[ - [], - [], - [], - [], - [], -] -`; - -exports[`simpleSplit handles no slow test splitting 1`] = ` -[ - [ - "foo.test", - "bar.test", - "baz.test", - "yup.test", - "nope.test", - "bonk.test", - "bop.test", - "ouch.test.ts", - ], -] -`; - -exports[`simpleSplit handles no slow test splitting 2`] = ` -[ - [ - "foo.test", - "bar.test", - "baz.test", - "yup.test", - ], - [ - "nope.test", - "bonk.test", - "bop.test", - "ouch.test.ts", - ], -] -`; - -exports[`simpleSplit handles no slow test splitting 3`] = ` -[ - [ - "foo.test", - "bar.test", - "baz.test", - ], - [ - "yup.test", - "nope.test", - "bonk.test", - ], - [ - "bop.test", - "ouch.test.ts", - ], -] -`; - -exports[`simpleSplit handles slow test splitting 1`] = ` -[ - [ - "bonk.test", - "bop.test", - "ouch.test.ts", - "foo.test", - "bar.test", - "baz.test", - "yup.test", - "nope.test", - ], -] -`; - -exports[`simpleSplit handles slow test splitting 2`] = ` -[ - [ - "bonk.test", - "ouch.test.ts", - "foo.test", - "bar.test", - "baz.test", - ], - [ - "bop.test", - "yup.test", - "nope.test", - ], -] -`; - -exports[`simpleSplit handles slow test splitting 3`] = ` -[ - [ - "bonk.test", - "foo.test", - "bar.test", - ], - [ - "bop.test", - "baz.test", - "yup.test", - ], - [ - "ouch.test.ts", - "nope.test", - ], -] -`; diff --git a/.github/actions/split-tests/test/fixtures.mts b/.github/actions/split-tests/test/fixtures.mts deleted file mode 100644 index aa87ba4c35..0000000000 --- a/.github/actions/split-tests/test/fixtures.mts +++ /dev/null @@ -1,20 +0,0 @@ -export const testArr: string[] = [ - "foo.test", - "bar.test", - "baz.test", - "yup.test", - "nope.test", - "bonk.test", - "bop.test", - "ouch.test.ts", -]; - -export const testSievedArr: string[] = [ - "foo.test", - "bar.test", - "baz.test", - "yup.test", - "nope.test", -]; - -export const testSlowArr: string[] = ["bonk.test", "bop.test", "ouch.test.ts"]; diff --git a/.github/actions/split-tests/test/sieve.test.ts b/.github/actions/split-tests/test/sieve.test.ts deleted file mode 100644 index bc296a435f..0000000000 --- a/.github/actions/split-tests/test/sieve.test.ts +++ /dev/null @@ -1,15 +0,0 @@ -import {sieveSlowTests} from "../src/sieve.mjs"; -import {testArr} from "./fixtures.mjs"; - -describe("sieveSlowTests", () => { - it("works", () => { - expect(sieveSlowTests([])).toMatchSnapshot(); - expect(sieveSlowTests([], [])).toMatchSnapshot(); - expect(sieveSlowTests(["keepme"], [])).toMatchSnapshot(); - expect(sieveSlowTests(["keepme"])).toMatchSnapshot(); - expect(sieveSlowTests(testArr, [])).toMatchSnapshot(); - expect(sieveSlowTests(testArr, ["noself"])).toMatchSnapshot(); - expect(sieveSlowTests(testArr, ["ouch.test.ts"])).toMatchSnapshot(); - expect(sieveSlowTests(testArr, ["bo", "ouch.test.ts"])).toMatchSnapshot(); - }); -}); diff --git a/.github/actions/split-tests/test/splitter.test.ts b/.github/actions/split-tests/test/splitter.test.ts deleted file mode 100644 index 85ae7726fe..0000000000 --- a/.github/actions/split-tests/test/splitter.test.ts +++ /dev/null @@ -1,21 +0,0 @@ -import {simpleSplit} from "../src/splitter.mjs"; -import {testArr, testSievedArr, testSlowArr} from "./fixtures.mjs"; - -describe("simpleSplit", () => { - it("doesn't error on empty arrays", () => { - expect(simpleSplit([], [], 1)).toMatchSnapshot(); - expect(simpleSplit([], [], 5)).toMatchSnapshot(); - }); - - it("handles no slow test splitting", () => { - expect(simpleSplit(testArr, [], 1)).toMatchSnapshot(); - expect(simpleSplit(testArr, [], 2)).toMatchSnapshot(); - expect(simpleSplit(testArr, [], 3)).toMatchSnapshot(); - }); - - it("handles slow test splitting", () => { - expect(simpleSplit(testSievedArr, testSlowArr, 1)).toMatchSnapshot(); - expect(simpleSplit(testSievedArr, testSlowArr, 2)).toMatchSnapshot(); - expect(simpleSplit(testSievedArr, testSlowArr, 3)).toMatchSnapshot(); - }); -}); diff --git a/.github/actions/split-tests/tsconfig.json b/.github/actions/split-tests/tsconfig.json deleted file mode 100644 index 4b36d4a178..0000000000 --- a/.github/actions/split-tests/tsconfig.json +++ /dev/null @@ -1,104 +0,0 @@ -{ - "compilerOptions": { - /* Visit https://aka.ms/tsconfig to read more about this file */ - - /* Projects */ - // "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */ - // "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */ - // "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */ - // "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */ - // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */ - // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ - - /* Language and Environment */ - "target": "ESNext" /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */, - // "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */ - // "jsx": "preserve", /* Specify what JSX code is generated. */ - // "experimentalDecorators": true, /* Enable experimental support for TC39 stage 2 draft decorators. */ - // "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */ - // "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */ - // "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */ - // "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */ - // "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */ - // "noLib": true, /* Disable including any library files, including the default lib.d.ts. */ - // "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */ - // "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */ - - /* Modules */ - "module": "NodeNext" /* Specify what module code is generated. */, - // "rootDir": "./", /* Specify the root folder within your source files. */ - "moduleResolution": "NodeNext" /* Specify how TypeScript looks up a file from a given module specifier. */, - // "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */ - // "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */ - // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */ - // "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */ - // "types": [], /* Specify type package names to be included without being referenced in a source file. */ - // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ - // "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */ - // "resolveJsonModule": true, /* Enable importing .json files. */ - // "noResolve": true, /* Disallow 'import's, 'require's or ''s from expanding the number of files TypeScript should add to a project. */ - - /* JavaScript Support */ - // "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */ - // "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */ - // "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */ - - /* Emit */ - // "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */ - // "declarationMap": true, /* Create sourcemaps for d.ts files. */ - // "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */ - // "sourceMap": true, /* Create source map files for emitted JavaScript files. */ - // "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */ - // "outDir": "./", /* Specify an output folder for all emitted files. */ - // "removeComments": true, /* Disable emitting comments. */ - "noEmit": true /* Disable emitting files from a compilation. */, - // "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */ - // "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types. */ - // "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */ - // "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */ - // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ - // "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */ - // "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */ - // "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */ - // "newLine": "crlf", /* Set the newline character for emitting files. */ - // "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */ - // "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */ - // "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */ - // "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */ - // "declarationDir": "./", /* Specify the output directory for generated declaration files. */ - // "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */ - - /* Interop Constraints */ - // "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */ - // "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */ - "esModuleInterop": true /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */, - // "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */ - "forceConsistentCasingInFileNames": true /* Ensure that casing is correct in imports. */, - - /* Type Checking */ - "strict": true /* Enable all strict type-checking options. */, - // "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */ - // "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */ - // "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */ - // "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */ - // "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */ - // "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */ - // "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */ - // "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */ - // "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */ - // "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */ - // "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */ - // "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */ - // "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */ - // "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */ - // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */ - // "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */ - // "allowUnusedLabels": true, /* Disable error reporting for unused labels. */ - // "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */ - - /* Completeness */ - // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */ - "skipLibCheck": false /* Skip type checking all .d.ts files. */ - }, - "include": ["src", "test"] -} diff --git a/.github/actions/version-file-bump/action.yml b/.github/actions/version-file-bump/action.yml deleted file mode 100644 index 2083217400..0000000000 --- a/.github/actions/version-file-bump/action.yml +++ /dev/null @@ -1,51 +0,0 @@ -name: version-file-bump -description: "Ensure that the VERSION file has been bumped since the last release." -inputs: - github-token: - description: "Github access token" - default: ${{ github.token }} - required: true -outputs: - result: - value: ${{ steps.compare.outputs.result }} - description: 'Result of the comparison' -runs: - using: composite - steps: - - name: Get latest release version - id: get-latest-version - shell: bash - run: | - untrimmed_ver=$( - curl --header "Authorization: token ${{ inputs.github-token }}" \ - --request GET \ - "https://api.github.com/repos/${{ github.repository }}/releases/latest?draft=false&prerelease=false" \ - | jq -r .name - ) - latest_version="${untrimmed_ver:1}" - echo "latest_version=${latest_version}" | tee -a "$GITHUB_OUTPUT" - - name: Get current version - id: get-current-version - shell: bash - run: | - current_version=$(head -n1 ./VERSION) - echo "current_version=${current_version}" | tee -a "$GITHUB_OUTPUT" - - name: Compare semantic versions - uses: smartcontractkit/chainlink-github-actions/semver-compare@e865e376b8c2d594028c8d645dd6c47169b72974 # v2.2.16 - id: compare - with: - version1: ${{ steps.get-current-version.outputs.current_version }} - operator: eq - version2: ${{ steps.get-latest-version.outputs.latest_version }} - - name: Fail if version not bumped - # XXX: The reason we are not checking if the current is greater than the - # latest release is to account for hot fixes which may have been branched - # from a previous tag. - shell: bash - env: - VERSION_NOT_BUMPED: ${{ steps.compare.outputs.result }} - run: | - if [[ "${VERSION_NOT_BUMPED:-}" = "true" ]]; then - echo "Version file not bumped since last release. Please bump the ./VERSION file in the root of the repo and commit the change." - exit 1 - fi diff --git a/.github/cr.yaml b/.github/cr.yaml deleted file mode 100644 index b526aa963f..0000000000 --- a/.github/cr.yaml +++ /dev/null @@ -1,2 +0,0 @@ -pages_branch: helm-release -packages_with_index: true diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md deleted file mode 100644 index f9b33ef456..0000000000 --- a/.github/pull_request_template.md +++ /dev/null @@ -1,4 +0,0 @@ -## Motivation - - -## Solution \ No newline at end of file diff --git a/.github/scripts/functions.sh b/.github/scripts/functions.sh deleted file mode 100644 index 53b5339226..0000000000 --- a/.github/scripts/functions.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash - -# Function to convert a comma-separated list into a TOML array format. -# Usage: convert_to_toml_array "elem1,elem2,elem3" -# Effect: "a,b,c" -> ["a","b","c"] -function convert_to_toml_array() { - local IFS=',' - local input_array=($1) - local toml_array_format="[" - - for element in "${input_array[@]}"; do - toml_array_format+="\"$element\"," - done - - toml_array_format="${toml_array_format%,}]" - echo "$toml_array_format" -} \ No newline at end of file diff --git a/.github/tracing/README.md b/.github/tracing/README.md deleted file mode 100644 index 06b2eef665..0000000000 --- a/.github/tracing/README.md +++ /dev/null @@ -1,112 +0,0 @@ -# Distributed Tracing - -As part of the LOOP plugin effort, we've added distributed tracing to the core node. This is helpful for initial development and maintenance of LOOPs, but will also empower product teams building on top of core. - -## Dev environment - -One way to generate traces locally today is with the OCR2 basic smoke test. - -1. navigate to `.github/tracing/` and then run `docker compose --file local-smoke-docker-compose.yaml up` -2. setup a local docker registry at `127.0.0.1:5000` (https://www.docker.com/blog/how-to-use-your-own-registry-2/) -3. run `make build_push_plugin_docker_image` in `chainlink/integration-tests/Makefile` -4. preapre your `overrides.toml` file with selected network and CL image name and version and place it anywhere -inside `integration-tests` directory. Sample `overrides.toml` file: -```toml -[ChainlinkImage] -image="127.0.0.1:5000/chainlink" -version="develop" - -[Network] -selected_networks=["simulated"] -``` -5. run `go test -run TestOCRv2Basic ./smoke/ocr2_test.go` -6. navigate to `localhost:3000/explore` in a web browser to query for traces - -Core and the median plugins are instrumented with open telemetry traces, which are sent to the OTEL collector and forwarded to the Tempo backend. The grafana UI can then read the trace data from the Tempo backend. - - - -## CI environment - -Another way to generate traces is by enabling traces for PRs. This will instrument traces for `TestOCRv2Basic` in the CI run. - -1. Cut a PR in the core repo -2. Add the `enable tracing` label to the PR -3. Navigate to `Integration Tests / ETH Smoke Tests ocr2-plugins (pull_request)` details -4. Navigate to the summary of the integration tests -5. After the test completes, the generated trace data will be saved as an artifact, currently called `trace-data` -6. Download the artifact to this directory (`chainlink/.github/tracing`) -7. `docker compose --file local-smoke-docker-compose.yaml up` -8. Run `sh replay.sh` to replay those traces to the otel-collector container that was spun up in the last step. -9. navigate to `localhost:3000/explore` in a web browser to query for traces - -The artifact is not json encoded - each individual line is a well formed and complete json object. - - -## Production and NOPs environments - -In a production environment, we suggest coupling the lifecycle of nodes and otel-collectors. A best practice is to deploy the otel-collector alongside your node, using infrastructure as code (IAC) to automate deployments and certificate lifecycles. While there are valid use cases for using `Tracing.Mode = unencrypted`, we have set the default encryption setting to `Tracing.Mode = tls`. Externally deployed otel-collectors can not be used with `Tracing.Mode = unencrypted`. i.e. If `Tracing.Mode = unencrypted` and an external URI is detected for `Tracing.CollectorTarget` node configuration will fail to validate and the node will not boot. The node requires a valid encryption mode and collector target to send traces. - -Once traces reach the otel-collector, the rest of the observability pipeline is flexible. We recommend deploying (through automation) centrally managed Grafana Tempo and Grafana UI instances to receive from one or many otel-collector instances. Always use networking best practices and encrypt trace data, especially at network boundaries. - -## Configuration -This folder contains the following config files: -* otel-collector-ci.yaml -* otel-collector-dev.yaml -* tempo.yaml -* grafana-datasources.yaml - -These config files are for an OTEL collector, grafana Tempo, and a grafana UI instance to run as containers on the same network. -`otel-collector-dev.yaml` is the configuration for dev (i.e. your local machine) environments, and forwards traces from the otel collector to the grafana tempo instance on the same network. -`otel-collector-ci.yaml` is the configuration for the CI runs, and exports the trace data to the artifact from the github run. - -## Adding Traces to Plugins and to core - -Adding traces requires identifying an observability gap in a related group of code executions or a critical path in your application. This is intuitive for the developer: - -- "What's the flow of component interaction in this distributed system?" -- "What's the behavior of the JobProcessorOne component when jobs with [x, y, z] attributes are processed?" -- "Is this critical path workflow behaving the way we expect?" - -The developer will measure a flow of execution from end to end in one trace. Each logically separate measure of this flow is called a span. Spans have either one or no parent span and multiple children span. The relationship between parent and child spans in agreggate will form a directed acyclic graph. The trace begins at the root of this graph. - -The most trivial application of a span is measuring top level performance in one critical path. There is much more you can do, including creating human readable and timestamped events within a span (useful for monitoring concurrent access to resources), recording errors, linking parent and children spans through large parts of an application, and even extending a span beyond a single process. - -Spans are created by `tracers` and passed through go applications by `Context`s. A tracer must be initialized first. Both core and plugin developers will initialize a tracer from the globally registered trace provider: - -``` -tracer := otel.GetTracerProvider().Tracer("example.com/foo") -``` - -The globally registered tracer provider is available for plugins after they are initialized, and available in core after configuration is processed (`initGlobals`). - -Add spans by: -``` - func interestingFunc() { - // Assuming there is an appropriate parentContext - ctx, span := tracer.Start(parentContext, "hello-span") - defer span.End() - - // do some work to track with hello-span - } -``` -As implied by the example, `span` is a child of its parent span captured by `parentContext`. - - -Note that in certain situations, there are 3rd party libraries that will setup spans. For instance: - -``` -import ( - "github.com/gin-gonic/gin" - "go.opentelemetry.io/contrib/instrumentation/github.com/gin-gonic/gin/otelgin" -) - -router := gin.Default() -router.Use(otelgin.Middleware("service-name")) -``` - -The developer aligns with best practices when they: -- Start with critical paths -- Measure paths from end to end (Context is wired all the way through) -- Emphasize broadness of measurement over depth -- Use automatic instrumentation if possible \ No newline at end of file diff --git a/.github/tracing/grafana-datasources.yaml b/.github/tracing/grafana-datasources.yaml deleted file mode 100644 index 098b06ec76..0000000000 --- a/.github/tracing/grafana-datasources.yaml +++ /dev/null @@ -1,18 +0,0 @@ -apiVersion: 1 - -datasources: -- name: Tempo - type: tempo - access: proxy - orgId: 1 - url: http://tempo:3200 - basicAuth: false - isDefault: true - version: 1 - editable: false - apiVersion: 1 - uid: tempo - jsonData: - httpMethod: GET - serviceMap: - datasourceUid: prometheus \ No newline at end of file diff --git a/.github/tracing/local-smoke-docker-compose.yaml b/.github/tracing/local-smoke-docker-compose.yaml deleted file mode 100644 index 744ba88ef6..0000000000 --- a/.github/tracing/local-smoke-docker-compose.yaml +++ /dev/null @@ -1,48 +0,0 @@ -version: "3" -services: - - # ... the OpenTelemetry Collector configured to receive traces and export to Tempo ... - otel-collector: - image: otel/opentelemetry-collector:0.61.0 - command: [ "--config=/etc/otel-collector.yaml" ] - volumes: - - ./otel-collector-dev.yaml:/etc/otel-collector.yaml - - ../../integration-tests/smoke/traces/trace-data.json:/etc/trace-data.json # local trace data stored consistent with smoke/logs - ports: - - "4317:4317" # otlp grpc - - "3100:3100" - depends_on: - - tempo - networks: - - tracing-network - - # .. Which accepts requests from grafana ... - tempo: - image: grafana/tempo:latest - command: [ "-config.file=/etc/tempo.yaml" ] - volumes: - - ./tempo.yaml:/etc/tempo.yaml - - ./tempo-data:/tmp/tempo - ports: - - "4317" # otlp grpc - networks: - - tracing-network - - grafana: - image: grafana/grafana:9.4.3 - volumes: - - ./grafana-datasources.yaml:/etc/grafana/provisioning/datasources/datasources.yaml - environment: - - GF_AUTH_ANONYMOUS_ENABLED=true - - GF_AUTH_ANONYMOUS_ORG_ROLE=Admin - - GF_AUTH_DISABLE_LOGIN_FORM=true - - GF_FEATURE_TOGGLES_ENABLE=traceqlEditor - ports: - - "3000:3000" - networks: - - tracing-network - -networks: - tracing-network: - name: tracing - driver: bridge \ No newline at end of file diff --git a/.github/tracing/otel-collector-ci.yaml b/.github/tracing/otel-collector-ci.yaml deleted file mode 100644 index 0bf123d29b..0000000000 --- a/.github/tracing/otel-collector-ci.yaml +++ /dev/null @@ -1,22 +0,0 @@ -receivers: - otlp: - protocols: - grpc: - endpoint: "0.0.0.0:4317" - http: - endpoint: "0.0.0.0:3100" -exporters: - file: - path: /tracing/trace-data.json - otlp: - endpoint: tempo:4317 - tls: - insecure: true -service: - telemetry: - logs: - level: "debug" # Set log level to debug - pipelines: - traces: - receivers: [otlp] - exporters: [file,otlp] \ No newline at end of file diff --git a/.github/tracing/otel-collector-dev.yaml b/.github/tracing/otel-collector-dev.yaml deleted file mode 100644 index dd059127b8..0000000000 --- a/.github/tracing/otel-collector-dev.yaml +++ /dev/null @@ -1,20 +0,0 @@ -receivers: - otlp: - protocols: - grpc: - endpoint: "0.0.0.0:4317" - http: - endpoint: "0.0.0.0:3100" -exporters: - otlp: - endpoint: tempo:4317 - tls: - insecure: true -service: - telemetry: - logs: - level: "debug" # Set log level to debug - pipelines: - traces: - receivers: [otlp] - exporters: [otlp] \ No newline at end of file diff --git a/.github/tracing/replay.sh b/.github/tracing/replay.sh deleted file mode 100644 index b2e564567c..0000000000 --- a/.github/tracing/replay.sh +++ /dev/null @@ -1,6 +0,0 @@ -# Read JSON file and loop through each trace -while IFS= read -r trace; do - curl -X POST http://localhost:3100/v1/traces \ - -H "Content-Type: application/json" \ - -d "$trace" -done < "trace-data" diff --git a/.github/tracing/tempo.yaml b/.github/tracing/tempo.yaml deleted file mode 100644 index e61f744f78..0000000000 --- a/.github/tracing/tempo.yaml +++ /dev/null @@ -1,24 +0,0 @@ -server: - http_listen_port: 3200 - -distributor: - receivers: - otlp: - protocols: - http: - grpc: - -ingester: - max_block_duration: 5m # cut the headblock when this much time passes. this is being set for demo purposes and should probably be left alone normally - -compactor: - compaction: - block_retention: 1h # overall Tempo trace retention. set for demo purposes - -storage: - trace: - backend: local # backend configuration to use - wal: - path: /tmp/tempo/wal # where to store the the wal locally - local: - path: /tmp/tempo/blocks \ No newline at end of file diff --git a/.github/workflows/auto-update.yml b/.github/workflows/auto-update.yml deleted file mode 100644 index 963145c404..0000000000 --- a/.github/workflows/auto-update.yml +++ /dev/null @@ -1,17 +0,0 @@ -name: Auto Update -on: - push: - branches: - - develop -jobs: - autoupdate: - name: Auto Update - runs-on: ubuntu-latest - steps: - - uses: docker://chinthakagodawita/autoupdate-action:v1 - env: - GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}" - PR_FILTER: "labelled" - PR_LABELS: "auto-update" - MERGE_MSG: "Branch was auto-updated." - MERGE_CONFLICT_ACTION: "ignore" diff --git a/.github/workflows/automation-benchmark-tests.yml b/.github/workflows/automation-benchmark-tests.yml deleted file mode 100644 index bc807ed807..0000000000 --- a/.github/workflows/automation-benchmark-tests.yml +++ /dev/null @@ -1,99 +0,0 @@ -name: Automation Benchmark Test -on: - workflow_dispatch: - inputs: - testType: - description: Type of test to run (benchmark, soak) - required: true - default: benchmark - type: string - base64Config: - description: base64-ed config - required: true - type: string - slackMemberID: - description: Notifies test results (Not your @) - required: true - default: U02Q14G80TY - type: string -jobs: - automation_benchmark: - environment: integration - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - name: Automation Benchmark Test - runs-on: ubuntu20.04-16cores-64GB - env: - SLACK_API_KEY: ${{ secrets.QA_SLACK_API_KEY }} - SLACK_CHANNEL: C03KJ5S7KEK - CHAINLINK_ENV_USER: ${{ github.actor }} - REF_NAME: ${{ github.head_ref || github.ref_name }} - steps: - - name: Checkout the repo - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - ref: ${{ env.REF_NAME }} - - name: Get Slack config and mask base64 config - run: | - SLACK_USER=$(jq -r '.inputs.slackMemberID' $GITHUB_EVENT_PATH) - echo ::add-mask::$SLACK_USER - echo SLACK_USER=$SLACK_USER >> $GITHUB_ENV - - BASE64_CONFIG_OVERRIDE=$(jq -r '.inputs.base64Config' $GITHUB_EVENT_PATH) - echo ::add-mask::$BASE64_CONFIG_OVERRIDE - echo "BASE64_CONFIG_OVERRIDE=$BASE64_CONFIG_OVERRIDE" >> $GITHUB_ENV - - name: Parse base64 config - uses: ./.github/actions/setup-parse-base64-config - with: - base64Config: ${{ env.BASE64_CONFIG_OVERRIDE }} - - name: Send details to Step Summary - shell: bash - run: | - echo "### chainlink image used for this test run :link:" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.CHAINLINK_IMAGE }}\`" >>$GITHUB_STEP_SUMMARY - echo "### chainlink-tests image tag for this test run :ship:" >>$GITHUB_STEP_SUMMARY - echo "\`${GITHUB_SHA}\`" >>$GITHUB_STEP_SUMMARY - echo "### Networks on which test was run" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.NETWORKS }}\`" >>$GITHUB_STEP_SUMMARY - - name: Build Test Image - uses: ./.github/actions/build-test-image - with: - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ACCOUNT_NUMBER: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - suites: benchmark load/automationv2_1 chaos reorg - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@e865e376b8c2d594028c8d645dd6c47169b72974 # v2.2.16 - env: - DETACH_RUNNER: true - TEST_SUITE: benchmark - TEST_ARGS: -test.timeout 720h - ENV_JOB_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink-tests:${{ github.sha }} - INTERNAL_DOCKER_REPO: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com - TEST_TYPE: ${{ github.event.inputs.testType }} - TEST_TEST_TYPE: ${{ github.event.inputs.testType }} - with: - test_command_to_run: cd integration-tests && go test -timeout 30m -v -run ^TestAutomationBenchmark$ ./benchmark -count=1 - test_download_vendor_packages_command: make gomod - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ env.CHAINLINK_VERSION }} - token: ${{ secrets.GITHUB_TOKEN }} - should_cleanup: false - go_mod_path: ./integration-tests/go.mod - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Automation Benchmark Test - test-results-file: '{"testType":"go","filePath":"/tmp/gotest.log"}' - continue-on-error: true diff --git a/.github/workflows/automation-load-tests.yml b/.github/workflows/automation-load-tests.yml deleted file mode 100644 index 6638f6e062..0000000000 --- a/.github/workflows/automation-load-tests.yml +++ /dev/null @@ -1,97 +0,0 @@ -name: Automation Load Test -on: - workflow_dispatch: - inputs: - base64Config: - description: base64-ed config - required: true - type: string - slackMemberID: - description: Notifies test results (Not your @) - required: true - default: U02Q14G80TY - type: string - -jobs: - automation_load: - environment: integration - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - name: Automation Load Test - runs-on: ubuntu20.04-16cores-64GB - env: - SLACK_API_KEY: ${{ secrets.QA_SLACK_API_KEY }} - SLACK_CHANNEL: C03KJ5S7KEK - CHAINLINK_ENV_USER: ${{ github.actor }} - REF_NAME: ${{ github.head_ref || github.ref_name }} - steps: - - name: Checkout the repo - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - ref: ${{ env.REF_NAME }} - - name: Get Slack config and mask base64 config - run: | - SLACK_USER=$(jq -r '.inputs.slackMemberID' $GITHUB_EVENT_PATH) - echo ::add-mask::$SLACK_USER - echo SLACK_USER=$SLACK_USER >> $GITHUB_ENV - - BASE64_CONFIG_OVERRIDE=$(jq -r '.inputs.base64Config' $GITHUB_EVENT_PATH) - echo ::add-mask::$BASE64_CONFIG_OVERRIDE - echo "BASE64_CONFIG_OVERRIDE=$BASE64_CONFIG_OVERRIDE" >> $GITHUB_ENV - - name: Parse base64 config - uses: ./.github/actions/setup-parse-base64-config - with: - base64Config: ${{ env.BASE64_CONFIG_OVERRIDE }} - - name: Send details to Step Summary - shell: bash - run: | - echo "### chainlink image used for this test run :link:" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.CHAINLINK_IMAGE }}\`" >>$GITHUB_STEP_SUMMARY - echo "### chainlink-tests image tag for this test run :ship:" >>$GITHUB_STEP_SUMMARY - echo "\`${GITHUB_SHA}\`" >>$GITHUB_STEP_SUMMARY - echo "### Networks on which test was run" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.NETWORKS }}\`" >>$GITHUB_STEP_SUMMARY - - name: Build Test Image - uses: ./.github/actions/build-test-image - with: - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ACCOUNT_NUMBER: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - suites: benchmark load/automationv2_1 chaos reorg - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@e865e376b8c2d594028c8d645dd6c47169b72974 # v2.2.16 - env: - RR_CPU: 4000m - RR_MEM: 4Gi - DETACH_RUNNER: true - TEST_SUITE: automationv2_1 - TEST_ARGS: -test.timeout 720h - ENV_JOB_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink-tests:${{ github.sha }} - INTERNAL_DOCKER_REPO: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com - PYROSCOPE_SERVER: ${{ secrets.QA_PYROSCOPE_INSTANCE }} - PYROSCOPE_KEY: ${{ secrets.QA_PYROSCOPE_KEY }} - with: - test_command_to_run: cd integration-tests && go test -timeout 1h -v -run TestLogTrigger ./load/automationv2_1 -count=1 - test_download_vendor_packages_command: make gomod - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ env.CHAINLINK_VERSION }} - token: ${{ secrets.GITHUB_TOKEN }} - should_cleanup: false - go_mod_path: ./integration-tests/go.mod - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Automation Load Test - test-results-file: '{"testType":"go","filePath":"/tmp/gotest.log"}' - continue-on-error: true diff --git a/.github/workflows/automation-nightly-tests.yml b/.github/workflows/automation-nightly-tests.yml deleted file mode 100644 index 3e1f66abcd..0000000000 --- a/.github/workflows/automation-nightly-tests.yml +++ /dev/null @@ -1,262 +0,0 @@ -name: Automation Nightly Tests -on: - schedule: - - cron: "0 0 * * *" # Run nightly - push: - tags: - - "*" - workflow_dispatch: - -env: - CHAINLINK_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink - -jobs: - build-chainlink: - environment: integration - permissions: - id-token: write - contents: read - name: Build Chainlink Image - runs-on: ubuntu20.04-16cores-64GB - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Build Chainlink Image - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - ref: ${{ github.event.pull_request.head.sha || github.event.merge_group.head_sha }} - - name: Build Chainlink Image - uses: ./.github/actions/build-chainlink-image - with: - tag_suffix: "" - dockerfile: core/chainlink.Dockerfile - git_commit_sha: ${{ github.sha }} - GRAFANA_CLOUD_BASIC_AUTH: ${{ secrets.GRAFANA_CLOUD_BASIC_AUTH }} - GRAFANA_CLOUD_HOST: ${{ secrets.GRAFANA_CLOUD_HOST }} - AWS_REGION: ${{ secrets.QA_AWS_REGION }} - AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - - automation-upgrade-tests: - environment: integration - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - needs: [build-chainlink] - env: - CHAINLINK_COMMIT_SHA: ${{ github.sha }} - CHAINLINK_ENV_USER: ${{ github.actor }} - TEST_LOG_LEVEL: debug - strategy: - fail-fast: false - matrix: - tests: - - name: Upgrade - suite: smoke - nodes: 6 - os: ubuntu20.04-8cores-32GB - network: SIMULATED - command: -run ^TestAutomationNodeUpgrade$ ./smoke - runs-on: ${{ matrix.tests.os }} - name: Automation ${{ matrix.tests.name }} Test - steps: - - name: Checkout the repo - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - ref: ${{ github.head_ref || github.ref_name }} - - name: Prepare Base64 TOML override - uses: ./.github/actions/setup-create-base64-upgrade-config - with: - selectedNetworks: ${{ env.SELECTED_NETWORKS }} - chainlinkImage: "public.ecr.aws/chainlink/chainlink" - chainlinkVersion: "latest" - upgradeImage: ${{ env.UPGRADE_IMAGE }} - upgradeVersion: ${{ env.UPGRADE_VERSION }} - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@e865e376b8c2d594028c8d645dd6c47169b72974 # v2.2.16 - env: - TEST_SUITE: ${{ matrix.tests.suite }} - with: - test_command_to_run: cd ./integration-tests && go test -timeout 60m -count=1 -json -test.parallel=${{ matrix.tests.nodes }} ${{ matrix.tests.command }} 2>&1 | tee /tmp/gotest.log | gotestfmt - test_download_vendor_packages_command: cd ./integration-tests && go mod download - cl_repo: 'public.ecr.aws/chainlink/chainlink' - cl_image_tag: 'latest' - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - artifacts_location: ./integration-tests/${{ matrix.tests.suite }}/logs - publish_check_name: Automation Results ${{ matrix.tests.name }} - token: ${{ secrets.GITHUB_TOKEN }} - go_mod_path: ./integration-tests/go.mod - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - - name: Upload test log - uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce # v3.1.2 - if: failure() - with: - name: test-log-${{ matrix.tests.name }} - path: /tmp/gotest.log - retention-days: 7 - continue-on-error: true - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Automation ${{ matrix.tests.name }} Test - test-results-file: '{"testType":"go","filePath":"/tmp/gotest.log"}' - continue-on-error: true - - test-notify: - name: Start Slack Thread - if: ${{ always() && needs.*.result != 'skipped' && needs.*.result != 'cancelled' }} - environment: integration - outputs: - thread_ts: ${{ steps.slack.outputs.thread_ts }} - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - runs-on: ubuntu-latest - needs: [ automation-upgrade-tests ] - steps: - - name: Debug Result - run: echo ${{ join(needs.*.result, ',') }} - - name: Main Slack Notification - uses: slackapi/slack-github-action@e28cf165c92ffef168d23c5c9000cffc8a25e117 # v1.24.0 - id: slack - with: - channel-id: C03KJ5S7KEK - payload: | - { - "attachments": [ - { - "color": "${{ contains(join(needs.*.result, ','), 'failure') && '#C62828' || '#2E7D32' }}", - "blocks": [ - { - "type": "header", - "text": { - "type": "plain_text", - "text": "Automation Nightly Tests ${{ contains(join(needs.*.result, ','), 'failure') && ':x:' || ':white_check_mark:'}}", - "emoji": true - } - }, - { - "type": "divider" - }, - { - "type": "section", - "text": { - "type": "mrkdwn", - "text": "<${{ github.server_url }}/${{ github.repository }}/releases/tag/${{ github.ref_name }}|${{ github.ref_name }}> | <${{ github.server_url }}/${{ github.repository }}/commit/${{ github.sha }}|${{ github.sha }}> | <${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|Run>" - } - } - ] - } - ] - } - env: - SLACK_BOT_TOKEN: ${{ secrets.QA_SLACK_API_KEY }} - - test-results: - name: Post Test Results for ${{ matrix.name }} - if: ${{ always() && needs.*.result != 'skipped' && needs.*.result != 'cancelled' }} - environment: integration - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - runs-on: ubuntu-latest - needs: test-notify - strategy: - fail-fast: false - matrix: - name: [ Upgrade ] - steps: - - name: Get Results - id: test-results - run: | - # I feel like there's some clever, fully jq way to do this, but I ain't got the motivation to figure it out - echo "Querying test results" - - PARSED_RESULTS=$(curl \ - -H "Authorization: Bearer ${{ github.token }}" \ - 'https://api.github.com/repos/${{github.repository}}/actions/runs/${{ github.run_id }}/jobs' \ - | jq -r --arg pattern "${{ matrix.name }} Test" '.jobs[] - | select(.name | test($pattern)) as $job - | $job.steps[] - | select(.name == "Run Tests") - | { conclusion: (if .conclusion == "success" then ":white_check_mark:" else ":x:" end), product: ("*" + ($job.name | capture($pattern).product) + "*") }') - - echo "Parsed Results:" - echo $PARSED_RESULTS - - ALL_SUCCESS=true - for row in $(echo "$PARSED_RESULTS" | jq -s | jq -r '.[] | select(.conclusion != ":white_check_mark:")'); do - success=false - break - done - - echo all_success=$ALL_SUCCESS >> $GITHUB_OUTPUT - - FORMATTED_RESULTS=$(echo $PARSED_RESULTS | jq -s '[.[] - | { - conclusion: .conclusion, - product: .product - } - ] - | map("{\"type\": \"section\", \"text\": {\"type\": \"mrkdwn\", \"text\": \"\(.product): \(.conclusion)\"}}") - | join(",")') - - echo "Formatted Results:" - echo $FORMATTED_RESULTS - - # Cleans out backslashes and quotes from jq - CLEAN_RESULTS=$(echo "$FORMATTED_RESULTS" | sed 's/\\\"/"/g' | sed 's/^"//;s/"$//') - - echo "Clean Results" - echo $CLEAN_RESULTS - - echo results=$CLEAN_RESULTS >> $GITHUB_OUTPUT - - - name: Test Details - uses: slackapi/slack-github-action@e28cf165c92ffef168d23c5c9000cffc8a25e117 # v1.24.0 - with: - channel-id: C03KJ5S7KEK - payload: | - { - "thread_ts": "${{ needs.test-notify.outputs.thread_ts }}", - "attachments": [ - { - "color": "${{ steps.test-results.outputs.all_success && '#2E7D32' || '#C62828' }}", - "blocks": [ - { - "type": "header", - "text": { - "type": "plain_text", - "text": "${{ matrix.name }} ${{ steps.test-results.outputs.all_success && ':white_check_mark:' || ':x: Notifying <@U02Q14G80TY>'}}", - "emoji": true - } - }, - { - "type": "divider" - }, - ${{ steps.test-results.outputs.results }} - ] - } - ] - } - env: - SLACK_BOT_TOKEN: ${{ secrets.QA_SLACK_API_KEY }} \ No newline at end of file diff --git a/.github/workflows/automation-ondemand-tests.yml b/.github/workflows/automation-ondemand-tests.yml deleted file mode 100644 index 62c74243a1..0000000000 --- a/.github/workflows/automation-ondemand-tests.yml +++ /dev/null @@ -1,262 +0,0 @@ -name: Automation On Demand Tests -on: - workflow_dispatch: - inputs: - chainlinkVersion: - description: Chainlink image version to use - required: false - type: string - chainlinkImage: - description: Chainlink image repo to use (Leave empty to build from head/ref) - required: false - type: string - chainlinkVersionUpdate: - description: Chainlink image version to use initially for upgrade test - default: latest - required: true - type: string - chainlinkImageUpdate: - description: Chainlink image repo to use initially for upgrade test - required: true - default: public.ecr.aws/chainlink/chainlink - type: string - -env: - ENV_JOB_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink-tests:${{ github.sha }} - CHAINLINK_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink - -jobs: - build-chainlink: - environment: integration - permissions: - id-token: write - contents: read - strategy: - matrix: - image: - - name: "" - dockerfile: core/chainlink.Dockerfile - tag-suffix: "" - - name: (plugins) - dockerfile: plugins/chainlink.Dockerfile - tag-suffix: -plugins - name: Build Chainlink Image ${{ matrix.image.name }} - runs-on: ubuntu20.04-16cores-64GB - steps: - - name: Collect Metrics - if: inputs.chainlinkImage == '' - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Build Chainlink Image ${{ matrix.image.name }} - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - ref: ${{ github.head_ref || github.ref_name }} - - name: Check if image exists - if: inputs.chainlinkImage == '' - id: check-image - uses: smartcontractkit/chainlink-github-actions/docker/image-exists@e865e376b8c2d594028c8d645dd6c47169b72974 # v2.2.16 - with: - repository: chainlink - tag: ${{ github.sha }}${{ matrix.image.tag-suffix }} - AWS_REGION: ${{ secrets.QA_AWS_REGION }} - AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - - name: Build Image - if: steps.check-image.outputs.exists == 'false' && inputs.chainlinkImage == '' - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/build-image@e865e376b8c2d594028c8d645dd6c47169b72974 # v2.2.16 - with: - cl_repo: smartcontractkit/chainlink - cl_ref: ${{ github.sha }} - cl_dockerfile: ${{ matrix.image.dockerfile }} - push_tag: ${{ env.CHAINLINK_IMAGE }}:${{ github.sha }}${{ matrix.image.tag-suffix }} - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - - name: Print Chainlink Image Built - if: inputs.chainlinkImage == '' - run: | - echo "### chainlink node image tag used for this test run :link:" >>$GITHUB_STEP_SUMMARY - echo "\`${GITHUB_SHA}\`" >>$GITHUB_STEP_SUMMARY - - build-test-image: - environment: integration - permissions: - id-token: write - contents: read - name: Build Test Image - runs-on: ubuntu20.04-16cores-64GB - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Build Test Image - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - ref: ${{ github.head_ref || github.ref_name }} - - name: Build Test Image - uses: ./.github/actions/build-test-image - with: - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ACCOUNT_NUMBER: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - - automation-on-demand-tests: - environment: integration - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - needs: [build-chainlink, build-test-image] - env: - CHAINLINK_COMMIT_SHA: ${{ github.sha }} - CHAINLINK_ENV_USER: ${{ github.actor }} - TEST_LOG_LEVEL: debug - strategy: - fail-fast: false - matrix: - tests: - - name: chaos - suite: chaos - nodes: 5 - os: ubuntu-latest - pyroscope_env: ci-automation-on-demand-chaos - network: SIMULATED - command: -run ^TestAutomationChaos$ ./chaos - - name: reorg - suite: reorg - nodes: 1 - os: ubuntu-latest - pyroscope_env: ci-automation-on-demand-reorg - network: SIMULATED_NONDEV - command: -run ^TestAutomationReorg$ ./reorg - - name: upgrade - suite: smoke - nodes: 6 - os: ubuntu20.04-8cores-32GB - pyroscope_env: ci-automation-on-demand-upgrade - network: SIMULATED - command: -run ^TestAutomationNodeUpgrade$ ./smoke - runs-on: ${{ matrix.tests.os }} - name: Automation On Demand ${{ matrix.tests.name }} Test - steps: - - name: Checkout the repo - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - ref: ${{ github.head_ref || github.ref_name }} - - name: Determine build to use - id: determine-build - shell: bash - run: | - if [[ "${{ inputs.chainlinkImage }}" == "" ]]; then - echo "image=${{ env.CHAINLINK_IMAGE }}" >>$GITHUB_OUTPUT - echo "version=${{ github.sha }}" >>$GITHUB_OUTPUT - echo "upgrade_version=${{ github.sha }}" >>$GITHUB_OUTPUT - echo "upgrade_image=${{ env.CHAINLINK_IMAGE }}" >>$GITHUB_OUTPUT - else - READ_CL_IMAGE=$(jq -r '.inputs.chainlinkImage' $GITHUB_EVENT_PATH) - echo ::add-mask::$READ_CL_IMAGE - echo "image=$READ_CL_IMAGE" >>$GITHUB_OUTPUT - echo "version=${{ inputs.chainlinkVersion }}" >>$GITHUB_OUTPUT - echo "upgrade_version=${{ inputs.chainlinkVersion }}" >>$GITHUB_OUTPUT - echo "upgrade_image=$READ_CL_IMAGE" >>$GITHUB_OUTPUT - fi - if [[ "${{ matrix.tests.name }}" == "upgrade" ]]; then - READ_CL_UPGR_IMAGE=$(jq -r '.inputs.chainlinkImageUpdate' $GITHUB_EVENT_PATH) - echo ::add-mask::$READ_CL_UPGR_IMAGE - echo "image=$READ_CL_UPGR_IMAGE" >>$GITHUB_OUTPUT - echo "version=${{ inputs.chainlinkVersionUpdate }}" >>$GITHUB_OUTPUT - fi - - name: Prepare Base64 TOML config - env: - SELECTED_NETWORKS: ${{ matrix.tests.network }} - OLD_IMAGE: ${{ steps.determine-build.outputs.image }} - OLD_VERSION: ${{ steps.determine-build.outputs.version }} - UPGRADE_VERSION: ${{ steps.determine-build.outputs.upgrade_version }} - UPGRADE_IMAGE: ${{ steps.determine-build.outputs.upgrade_image }} - PYROSCOPE_SERVER: ${{ matrix.tests.pyroscope_env == '' && '' || !startsWith(github.ref, 'refs/tags/') && '' || secrets.QA_PYROSCOPE_INSTANCE }} # Avoid sending blank envs https://github.com/orgs/community/discussions/25725 - PYROSCOPE_ENVIRONMENT: ${{ matrix.tests.pyroscope_env }} - PYROSCOPE_KEY: ${{ secrets.QA_PYROSCOPE_KEY }} - run: | - echo ::add-mask::$UPGRADE_IMAGE - echo ::add-mask::$OLD_IMAGE - - # load reusable functions - source ./.github/scripts/functions.sh - - selected_networks=$(convert_to_toml_array "$SELECTED_NETWORKS") - - if [ -n "$PYROSCOPE_SERVER" ]; then - pyroscope_enabled=true - else - pyroscope_enabled=false - fi - - cat << EOF > config.toml - [Network] - selected_networks=$selected_networks - - [ChainlinkImage] - image="$OLD_IMAGE" - version="$OLD_VERSION" - - [ChainlinkUpgradeImage] - image="$UPGRADE_IMAGE" - version="$UPGRADE_VERSION" - - [Pyroscope] - enabled=$pyroscope_enabled - server_url="$PYROSCOPE_SERVER" - environment="$PYROSCOPE_ENVIRONMENT" - key="$PYROSCOPE_KEY" - EOF - - BASE64_CONFIG_OVERRIDE=$(cat config.toml | base64 -w 0) - echo ::add-mask::$BASE64_CONFIG_OVERRIDE - echo "BASE64_CONFIG_OVERRIDE=$BASE64_CONFIG_OVERRIDE" >> $GITHUB_ENV - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@e865e376b8c2d594028c8d645dd6c47169b72974 # v2.2.16 - env: - TEST_SUITE: ${{ matrix.tests.suite }} - with: - test_command_to_run: cd ./integration-tests && go test -timeout 60m -count=1 -json -test.parallel=${{ matrix.tests.nodes }} ${{ matrix.tests.command }} 2>&1 | tee /tmp/gotest.log | gotestfmt - test_download_vendor_packages_command: cd ./integration-tests && go mod download - cl_repo: ${{ steps.determine-build.outputs.image }} - cl_image_tag: ${{ steps.determine-build.outputs.version }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - artifacts_location: ./integration-tests/${{ matrix.tests.suite }}/logs - publish_check_name: Automation On Demand Results ${{ matrix.tests.name }} - token: ${{ secrets.GITHUB_TOKEN }} - go_mod_path: ./integration-tests/go.mod - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - - name: Upload test log - uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce # v3.1.2 - if: failure() - with: - name: test-log-${{ matrix.tests.name }} - path: /tmp/gotest.log - retention-days: 7 - continue-on-error: true - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Automation On Demand ${{ matrix.tests.name }} Test - test-results-file: '{"testType":"go","filePath":"/tmp/gotest.log"}' - continue-on-error: true diff --git a/.github/workflows/bash-scripts.yml b/.github/workflows/bash-scripts.yml deleted file mode 100644 index 55950a0a9f..0000000000 --- a/.github/workflows/bash-scripts.yml +++ /dev/null @@ -1,37 +0,0 @@ -name: Bash Scripts - -on: - push: - branches: - - this-workflow-is-disabled-for-ccip - -jobs: - changes: - name: detect changes - runs-on: ubuntu-latest - outputs: - bash-scripts-src: ${{ steps.bash-scripts.outputs.src }} - steps: - - name: Checkout the repo - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - - uses: dorny/paths-filter@4512585405083f25c027a35db413c2b3b9006d50 # v2.11.1 - id: bash-scripts - with: - filters: | - src: - - 'tools/bin/**' - - '.github/workflows/bash-scripts.yml' - shellcheck: - name: ShellCheck Lint - runs-on: ubuntu-latest - needs: [changes] - steps: - - name: Checkout the repo - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - - name: Run ShellCheck - if: needs.changes.outputs.bash-scripts-src == 'true' - uses: ludeeus/action-shellcheck@00cae500b08a931fb5698e11e79bfbd38e612a38 # v2.0.0 - with: - scandir: "./tools/bin" - # Consider changing this to check for warnings once all warnings are fixed. - severity: error diff --git a/.github/workflows/build-publish-pr.yml b/.github/workflows/build-publish-pr.yml deleted file mode 100644 index 57b22a565e..0000000000 --- a/.github/workflows/build-publish-pr.yml +++ /dev/null @@ -1,112 +0,0 @@ -name: "Build and Publish from PR" - -## -# This workflow builds and publishes a Docker image for Chainlink from a PR. -# It has its own special IAM role, does not sign the image, and publishes to -# a special ECR repo. -## - -on: - push: - branches: - - this-workflow-is-disabled-for-ccip - -jobs: - build-publish-untrusted: - if: ${{ ! startsWith(github.ref_name, 'release/') }} - runs-on: ubuntu-20.04 - environment: sdlc - permissions: - id-token: write - contents: read - env: - ECR_IMAGE_NAME: crib-chainlink-untrusted - steps: - - name: Checkout repository - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - - - name: Git Short SHA - shell: bash - env: - GIT_PR_HEAD_SHA: ${{ github.event.pull_request.head.sha }} - run: | - echo "GIT_SHORT_SHA=${GIT_PR_HEAD_SHA:0:7}" | tee -a "$GITHUB_ENV" - - - name: Check if image exists - id: check-image - uses: smartcontractkit/chainlink-github-actions/docker/image-exists@912bed7e07a1df4d06ea53a031e9773bb65dc7bd # v2.3.0 - with: - repository: ${{ env.ECR_IMAGE_NAME}} - tag: sha-${{ env.GIT_SHORT_SHA }} - AWS_REGION: ${{ secrets.AWS_REGION }} - AWS_ROLE_TO_ASSUME: ${{ secrets.AWS_OIDC_IAM_ROLE_PUBLISH_PR_ARN }} - - - name: Build and publish chainlink image - if: steps.check-image.outputs.exists == 'false' - uses: ./.github/actions/build-sign-publish-chainlink - with: - publish: true - aws-role-to-assume: ${{ secrets.AWS_OIDC_IAM_ROLE_PUBLISH_PR_ARN }} - aws-role-duration-seconds: ${{ secrets.AWS_ROLE_DURATION_SECONDS_DEFAULT }} - aws-region: ${{ secrets.AWS_REGION }} - sign-images: false - ecr-hostname: ${{ secrets.AWS_SDLC_ECR_HOSTNAME }} - ecr-image-name: ${{ env.ECR_IMAGE_NAME }} - dockerhub_username: ${{ secrets.DOCKERHUB_READONLY_USERNAME }} - dockerhub_password: ${{ secrets.DOCKERHUB_READONLY_PASSWORD }} - - - name: Get PR labels - id: pr-labels - env: - GH_TOKEN: ${{ github.token }} - PR_NUMBER: ${{ github.event.number }} - run: | - RESPONSE=$(gh pr view ${PR_NUMBER} --json labels) - # Check if the labels command was successful - if [[ $? -ne 0 ]]; then - echo "Error fetching labels" - exit 1 - fi - echo "RESPONSE=${RESPONSE}" - LABELS=$(echo "$RESPONSE" | jq -r '.labels | map(.name) | join(", ")') - # Check if any labels were found - if [[ -z "${LABELS:-}" ]]; then - echo "No labels found" - else - echo "labels=${LABELS}" | tee -a "${GITHUB_OUTPUT}" - fi - - - name: Setup GAP - if: contains(steps.pr-labels.outputs.labels, 'crib') - uses: smartcontractkit/.github/actions/setup-gap@main - with: - aws-region: ${{ secrets.AWS_REGION }} - aws-role-arn: ${{ secrets.AWS_OIDC_IAM_ROLE_PUBLISH_PR_ARN }} - api-gateway-host: ${{ secrets.AWS_API_GW_HOST_ARGO_SAND }} - use-argocd: "true" - argocd-user: ${{ secrets.ARGOCD_USER_SAND }} - argocd-pass: ${{ secrets.ARGOCD_PASS_SAND }} - - # Run an Argo CD sync after the image is built. - - name: Argo CD App Sync - if: contains(steps.pr-labels.outputs.labels, 'crib') - shell: bash - env: - PR_NUMBER: ${{ github.event.number }} - run: | - argocd app sync \ - --plaintext \ - --grpc-web \ - --async \ - "crib-chainlink-${PR_NUMBER}" - - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: build-publish-untrusted - continue-on-error: true diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml deleted file mode 100644 index c4983bfac0..0000000000 --- a/.github/workflows/build.yml +++ /dev/null @@ -1,32 +0,0 @@ -name: "Build Chainlink" - -on: - pull_request: - push: - branches: - - master - -jobs: - build-chainlink: - runs-on: ubuntu-20.04 - steps: - - name: Checkout repository - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - - - name: Build chainlink image - uses: ./.github/actions/build-sign-publish-chainlink - with: - dockerhub_username: ${{ secrets.DOCKERHUB_READONLY_USERNAME }} - dockerhub_password: ${{ secrets.DOCKERHUB_READONLY_PASSWORD }} - publish: false - sign-images: false - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: build-chainlink - continue-on-error: true diff --git a/.github/workflows/ccip-chaos-tests.yml b/.github/workflows/ccip-chaos-tests.yml deleted file mode 100644 index 489e69f3a4..0000000000 --- a/.github/workflows/ccip-chaos-tests.yml +++ /dev/null @@ -1,231 +0,0 @@ -name: CCIP Chaos Tests -on: - workflow_run: - workflows: [ CCIP Load Test ] - types: [ completed ] - branches: [ ccip-develop ] - workflow_dispatch: - - - -# Only run 1 of this workflow at a time per PR -concurrency: - group: chaos-ccip-tests-chainlink-${{ github.ref }} - cancel-in-progress: true - -env: - CL_ECR: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink - ENV_JOB_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink-ccip-tests:${{ github.sha }} - -jobs: - build-chainlink: - environment: integration - permissions: - id-token: write - contents: read - name: Build Chainlink Image - runs-on: ubuntu20.04-16cores-64GB - steps: - - name: Checkout the repo - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 - - name: Check if image exists - id: check-image - uses: smartcontractkit/chainlink-github-actions/docker/image-exists@eccde1970eca69f079d3efb3409938a72ade8497 # v2.2.13 - with: - repository: chainlink - tag: ${{ github.sha }} - AWS_REGION: ${{ secrets.QA_AWS_REGION }} - AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - - name: Build Image - if: steps.check-image.outputs.exists == 'false' - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/build-image@eccde1970eca69f079d3efb3409938a72ade8497 # v2.2.13 - env: - GH_TOKEN: ${{ github.token }} - with: - cl_repo: smartcontractkit/chainlink-ccip - cl_ref: ${{ github.sha }} - push_tag: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink:${{ github.sha }} - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Build Chainlink Image - continue-on-error: true - - build-test-image: - environment: integration - permissions: - id-token: write - contents: read - name: Build Test Image - runs-on: ubuntu20.04-16cores-64GB - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Build Test Image - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 - - name: Build Test Image - uses: ./.github/actions/build-test-image - with: - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ACCOUNT_NUMBER: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - - ccip-chaos-tests: - environment: integration - permissions: - issues: read - checks: write - pull-requests: write - id-token: write - contents: read - name: ccip-chaos-tests - runs-on: ubuntu-latest - needs: [ build-chainlink, build-test-image ] - env: - TEST_SUITE: chaos - TEST_ARGS: -test.timeout 30m - CHAINLINK_COMMIT_SHA: ${{ github.sha }} - CHAINLINK_ENV_USER: ${{ github.actor }} - TEST_TRIGGERED_BY: ccip-cron-chaos-eth - TEST_LOG_LEVEL: debug - DATABASE_URL: postgresql://postgres:node@localhost:5432/chainlink_test?sslmode=disable - GH_TOKEN: ${{ github.token }} - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: CCIP chaos Test - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 - - name: Prepare Base64 TOML override for CCIP secrets - uses: ./.github/actions/setup-create-base64-config-ccip - with: - runId: ${{ github.run_id }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - chainlinkImage: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink - chainlinkVersion: ${{ github.sha }} - lokiEndpoint: ${{ secrets.LOKI_URL }} - lokiTenantId: ${{ vars.LOKI_TENANT_ID }} - lokiBasicAuth: ${{ secrets.LOKI_BASIC_AUTH }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - grafanaUrl: ${{ vars.GRAFANA_URL }} - grafanaDashboardUrl: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - - name: Run Chaos Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@eccde1970eca69f079d3efb3409938a72ade8497 # v2.2.13 - with: - test_command_to_run: cd ./integration-tests && go test -timeout 1h -count=1 -json -test.parallel 11 -run 'TestChaosCCIP' ./chaos 2>&1 | tee /tmp/gotest.log | gotestfmt - test_download_vendor_packages_command: make gomod - cl_repo: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink - cl_image_tag: ${{ github.sha }} - artifacts_location: ./integration-tests/chaos/logs - publish_check_name: CCIP Chaos Test Results - publish_report_paths: ./tests-chaos-report.xml - triggered_by: ${{ env.TEST_TRIGGERED_BY }} - token: ${{ secrets.GITHUB_TOKEN }} - go_mod_path: ./integration-tests/go.mod - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - CGO_ENABLED: "1" - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - cache_key_id: ccip-load-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - - ## Run Cleanup if the job succeeds - - name: cleanup - if: always() - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/cleanup@eccde1970eca69f079d3efb3409938a72ade8497 # v2.2.13 - with: - triggered_by: ${{ env.TEST_TRIGGERED_BY }} - - ccip-chaos-with-load-tests: - environment: integration - permissions: - issues: read - checks: write - pull-requests: write - id-token: write - contents: read - name: ccip-load-with-chaos-tests - runs-on: ubuntu-latest - needs: [ build-chainlink, build-test-image ] - env: - TEST_SUITE: load - TEST_ARGS: -test.timeout 1h - CHAINLINK_COMMIT_SHA: ${{ github.sha }} - CHAINLINK_ENV_USER: ${{ github.actor }} - TEST_TRIGGERED_BY: ccip-cron-chaos-and-load-eth - TEST_LOG_LEVEL: debug - DATABASE_URL: postgresql://postgres:node@localhost:5432/chainlink_test?sslmode=disable - GH_TOKEN: ${{ github.token }} - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: CCIP load with chaos test - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 - - name: Prepare Base64 TOML override for CCIP secrests - uses: ./.github/actions/setup-create-base64-config-ccip - with: - runId: ${{ github.run_id }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - chainlinkImage: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink - chainlinkVersion: ${{ github.sha }} - lokiEndpoint: ${{ secrets.LOKI_URL }} - lokiTenantId: ${{ vars.LOKI_TENANT_ID }} - lokiBasicAuth: ${{ secrets.LOKI_BASIC_AUTH }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - grafanaUrl: ${{ vars.GRAFANA_URL }} - grafanaDashboardUrl: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - - name: Run Load With Chaos Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@eccde1970eca69f079d3efb3409938a72ade8497 # v2.2.13 - with: - test_command_to_run: cd ./integration-tests/ccip-tests && go test -timeout 2h -count=1 -json -test.parallel 4 -run '^TestLoadCCIPStableWithPodChaosDiffCommitAndExec' ./load 2>&1 | tee /tmp/gotest.log | gotestfmt - test_download_vendor_packages_command: make gomod - cl_repo: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink - cl_image_tag: ${{ github.sha }} - artifacts_location: ./integration-tests/load/logs - publish_check_name: CCIP Chaos With Load Test Results - publish_report_paths: ./tests-chaos-with-load-report.xml - triggered_by: ${{ env.TEST_TRIGGERED_BY }} - token: ${{ secrets.GITHUB_TOKEN }} - go_mod_path: ./integration-tests/go.mod - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - CGO_ENABLED: "1" - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - cache_key_id: ccip-load-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - - ## Run Cleanup if the job succeeds - - name: cleanup - if: always() - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/cleanup@eccde1970eca69f079d3efb3409938a72ade8497 # v2.2.13 - with: - triggered_by: ${{ env.TEST_TRIGGERED_BY }} diff --git a/.github/workflows/ccip-live-testnet-tests.yml b/.github/workflows/ccip-live-testnet-tests.yml deleted file mode 100644 index 5b86b1e1a1..0000000000 --- a/.github/workflows/ccip-live-testnet-tests.yml +++ /dev/null @@ -1,302 +0,0 @@ -name: CCIP On-Demand Live Testnet Tests -on: - schedule: - - cron: '0 */12 * * *' - workflow_dispatch: - inputs: - base64_test_input : # base64 encoded toml for test input - description: 'Base64 encoded toml test input' - required: false - slackMemberID: - description: 'Slack member ID to notify' - required: false - test_type: - description: 'Type of test to run' - required: false - type: choice - options: - - 'load' - - 'smoke' - -# Only run 1 of this workflow at a time per PR -concurrency: - group: live-testnet-tests - cancel-in-progress: true - -env: - CHAINLINK_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink - CHAINLINK_VERSION: ${{ github.sha}} - CHAINLINK_TEST_VERSION: ${{ github.sha}} - ENV_JOB_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink-ccip-tests:${{ github.sha }} - INTERNAL_DOCKER_REPO: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com - AWS_ECR_REPO_PUBLIC_REGISTRY: public.ecr.aws - -jobs: - build-chainlink: - environment: integration - permissions: - id-token: write - contents: read - name: Build Chainlink Image - runs-on: ubuntu20.04-16cores-64GB - steps: - - name: Checkout the repo - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 - - name: Check if image exists - id: check-image - uses: smartcontractkit/chainlink-github-actions/docker/image-exists@e865e376b8c2d594028c8d645dd6c47169b72974 # v2.2.16 - with: - repository: chainlink - tag: ${{ env.CHAINLINK_VERSION }} - AWS_REGION: ${{ secrets.QA_AWS_REGION }} - AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - - name: Build Image - if: steps.check-image.outputs.exists == 'false' - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/build-image@e865e376b8c2d594028c8d645dd6c47169b72974 # v2.2.16 - env: - GH_TOKEN: ${{ github.token }} - with: - cl_repo: smartcontractkit/chainlink-ccip - cl_ref: ${{ env.CHAINLINK_VERSION }} - push_tag: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink:${{ env.CHAINLINK_VERSION }} - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Build Chainlink Image - continue-on-error: true - - build-test-image: - environment: integration - permissions: - id-token: write - contents: read - name: Build Test Image - runs-on: ubuntu20.04-16cores-64GB - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Build Test Image - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 - - name: Build Test Image - uses: ./.github/actions/build-test-image - with: - tag: ${{ env.CHAINLINK_TEST_VERSION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ACCOUNT_NUMBER: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - - ccip-load-test: - name: CCIP Load Test - environment: integration - runs-on: ubuntu-latest - needs: [ build-chainlink, build-test-image ] - # if the event is a scheduled event or the test type is load and no previous job failed - if: ${{ (github.event_name == 'schedule' || inputs.test_type == 'load') && !contains(needs.*.result, 'failure') }} - permissions: - issues: read - checks: write - pull-requests: write - id-token: write - contents: read - env: - CHAINLINK_ENV_USER: ${{ github.actor }} - SLACK_API_KEY: ${{ secrets.QA_SLACK_API_KEY }} - SLACK_CHANNEL: ${{ secrets.QA_SLACK_CHANNEL }} - TEST_LOG_LEVEL: info - REF_NAME: ${{ github.head_ref || github.ref_name }} - ENV_JOB_IMAGE_BASE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink-ccip-tests - BASE64_NETWORK_CONFIG: ${{ secrets.BASE64_NETWORK_CONFIG }} - - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: CCIP Load Test - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 - with: - ref: ${{ env.REF_NAME }} - - name: Prepare Base64 TOML override - shell: bash - run: | - SLACK_USER=$(jq -r '.inputs.slackMemberID' $GITHUB_EVENT_PATH) - echo ::add-mask::$SLACK_USER - if [[ -z "$SLACK_USER" ]]; then - SLACK_USER="${{ secrets.QA_SLACK_USER }}" - fi - echo "SLACK_USER=$SLACK_USER" >> "$GITHUB_ENV" - if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then - BASE64_CCIP_CONFIG_OVERRIDE=$(jq -r '.inputs.base64_test_input' $GITHUB_EVENT_PATH) - echo ::add-mask::$BASE64_CCIP_CONFIG_OVERRIDE - echo "BASE64_CCIP_CONFIG_OVERRIDE=$BASE64_CCIP_CONFIG_OVERRIDE" >> $GITHUB_ENV - echo "TEST_BASE64_CCIP_CONFIG_OVERRIDE=$BASE64_CCIP_CONFIG_OVERRIDE" >> $GITHUB_ENV - fi - if [[ "${{ github.event_name }}" == "schedule" ]]; then - BASE64_CCIP_CONFIG_OVERRIDE=$(base64 -w 0 -i ./integration-tests/ccip-tests/testconfig/override/mainnet.toml) - echo ::add-mask::$BASE64_CCIP_CONFIG_OVERRIDE - echo "BASE64_CCIP_CONFIG_OVERRIDE=$BASE64_CCIP_CONFIG_OVERRIDE" >> $GITHUB_ENV - echo "TEST_BASE64_CCIP_CONFIG_OVERRIDE=$BASE64_CCIP_CONFIG_OVERRIDE" >> $GITHUB_ENV - fi - - name: step summary - shell: bash - run: | - echo "### chainlink image used for this test run :link:" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.CHAINLINK_VERSION }}\`" >> $GITHUB_STEP_SUMMARY - echo "### chainlink-tests image tag for this test run :ship:" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.CHAINLINK_TEST_VERSION }}\`" >> $GITHUB_STEP_SUMMARY - - name: Prepare Base64 TOML override for CCIP secrets - uses: ./.github/actions/setup-create-base64-config-ccip - with: - runId: ${{ github.run_id }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - chainlinkImage: ${{ env.CHAINLINK_IMAGE }} - chainlinkVersion: ${{ github.sha }} - lokiEndpoint: ${{ secrets.LOKI_URL }} - lokiTenantId: ${{ vars.LOKI_TENANT_ID }} - lokiBasicAuth: ${{ secrets.LOKI_BASIC_AUTH }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - grafanaUrl: ${{ vars.GRAFANA_URL }} - grafanaDashboardUrl: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@e865e376b8c2d594028c8d645dd6c47169b72974 # v2.2.16 - env: - TEST_SUITE: load - TEST_ARGS: -test.timeout 900h - DATABASE_URL: postgresql://postgres:node@localhost:5432/chainlink_test?sslmode=disable - RR_MEM: 8Gi - RR_CPU: 4 - DETACH_RUNNER: true - TEST_TRIGGERED_BY: ccip-load-test-ci - with: - test_command_to_run: cd ./integration-tests/ccip-tests && go test -v -timeout 70m -count=1 -json -run ^TestLoadCCIPStableRPS$ ./load 2>&1 | tee /tmp/gotest.log | gotestfmt - test_download_vendor_packages_command: cd ./integration-tests && go mod download - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ env.CHAINLINK_VERSION }} - token: ${{ secrets.GITHUB_TOKEN }} - go_mod_path: ./integration-tests/go.mod - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - triggered_by: ${{ env.TEST_TRIGGERED_BY }} - artifacts_location: ./integration-tests/load/logs/payload_ccip.json - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - cache_key_id: ccip-load-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - should_cleanup: false - - ccip-smoke-test: - name: CCIP smoke Test - environment: integration - runs-on: ubuntu-latest - needs: [ build-chainlink, build-test-image ] - # if the event is a scheduled event or the test type is load and no previous job failed - if: ${{ github.event_name == 'workflow_dispatch' && inputs.test_type == 'smoke' && !contains(needs.*.result, 'failure') }} - permissions: - issues: read - checks: write - pull-requests: write - id-token: write - contents: read - env: - CHAINLINK_ENV_USER: ${{ github.actor }} - SLACK_API_KEY: ${{ secrets.QA_SLACK_API_KEY }} - SLACK_CHANNEL: ${{ secrets.QA_SLACK_CHANNEL }} - TEST_LOG_LEVEL: info - REF_NAME: ${{ github.head_ref || github.ref_name }} - ENV_JOB_IMAGE_BASE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink-ccip-tests - BASE64_NETWORK_CONFIG: ${{ secrets.BASE64_NETWORK_CONFIG }} - - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: CCIP Smoke Test - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 - with: - ref: ${{ env.REF_NAME }} - - name: Prepare Base64 TOML override - shell: bash - run: | - SLACK_USER=$(jq -r '.inputs.slackMemberID' $GITHUB_EVENT_PATH) - echo ::add-mask::$SLACK_USER - if [[ -z "$SLACK_USER" ]]; then - SLACK_USER="${{ secrets.QA_SLACK_USER }}" - fi - echo "SLACK_USER=$SLACK_USER" >> "$GITHUB_ENV" - if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then - BASE64_CCIP_CONFIG_OVERRIDE=$(jq -r '.inputs.base64_test_input' $GITHUB_EVENT_PATH) - echo ::add-mask::$BASE64_CCIP_CONFIG_OVERRIDE - echo "BASE64_CCIP_CONFIG_OVERRIDE=$BASE64_CCIP_CONFIG_OVERRIDE" >> $GITHUB_ENV - echo "TEST_BASE64_CCIP_CONFIG_OVERRIDE=$BASE64_CCIP_CONFIG_OVERRIDE" >> $GITHUB_ENV - fi - - name: step summary - shell: bash - run: | - echo "### chainlink image used for this test run :link:" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.CHAINLINK_VERSION }}\`" >> $GITHUB_STEP_SUMMARY - echo "### chainlink-tests image tag for this test run :ship:" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.CHAINLINK_TEST_VERSION }}\`" >> $GITHUB_STEP_SUMMARY - - name: Prepare Base64 TOML override for CCIP secrets - uses: ./.github/actions/setup-create-base64-config-ccip - with: - runId: ${{ github.run_id }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - chainlinkImage: ${{ env.CHAINLINK_IMAGE }} - chainlinkVersion: ${{ github.sha }} - lokiEndpoint: ${{ secrets.LOKI_URL }} - lokiTenantId: ${{ vars.LOKI_TENANT_ID }} - lokiBasicAuth: ${{ secrets.LOKI_BASIC_AUTH }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - grafanaUrl: ${{ vars.GRAFANA_URL }} - grafanaDashboardUrl: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@e865e376b8c2d594028c8d645dd6c47169b72974 # v2.2.16 - env: - TEST_SUITE: smoke - TEST_ARGS: -test.timeout 900h - DETACH_RUNNER: true - DATABASE_URL: postgresql://postgres:node@localhost:5432/chainlink_test?sslmode=disable - RR_MEM: 8Gi - RR_CPU: 4 - TEST_TRIGGERED_BY: ccip-smoke-test-ci - with: - test_command_to_run: cd ./integration-tests/ccip-tests && go test -v -timeout 70m -count=1 -p 30 -json -run ^TestSmokeCCIPForBidirectionalLane$ ./smoke 2>&1 | tee /tmp/gotest.log | gotestfmt - test_download_vendor_packages_command: cd ./integration-tests && go mod download - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ env.CHAINLINK_VERSION }} - token: ${{ secrets.GITHUB_TOKEN }} - go_mod_path: ./integration-tests/go.mod - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - triggered_by: ${{ env.TEST_TRIGGERED_BY }} - artifacts_location: ./integration-tests/smoke/logs/payload_ccip.json - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - cache_key_id: ccip-smoke-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - should_cleanup: false \ No newline at end of file diff --git a/.github/workflows/ccip-load-tests.yml b/.github/workflows/ccip-load-tests.yml deleted file mode 100644 index 14c8b3543d..0000000000 --- a/.github/workflows/ccip-load-tests.yml +++ /dev/null @@ -1,280 +0,0 @@ -name: CCIP Load Test -on: - push: - branches: - - ccip-develop - workflow_dispatch: - inputs: - base64_test_input : # base64 encoded toml for test input - description: 'Base64 encoded toml test input' - required: false - -# Only run 1 of this workflow at a time per PR -concurrency: - group: load-ccip-tests-chainlink-${{ github.ref }} - cancel-in-progress: true - -env: - CHAINLINK_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink - CHAINLINK_VERSION: ${{ github.sha}} - INPUT_CHAINLINK_TEST_VERSION: ${{ github.sha}} - ENV_JOB_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink-ccip-tests:${{ github.sha }} - INTERNAL_DOCKER_REPO: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com - AWS_ECR_REPO_PUBLIC_REGISTRY: public.ecr.aws - -jobs: - build-chainlink: - environment: integration - permissions: - id-token: write - contents: read - name: Build Chainlink Image - runs-on: ubuntu20.04-16cores-64GB - steps: - - name: Checkout the repo - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 - - name: Check if image exists - id: check-image - uses: smartcontractkit/chainlink-github-actions/docker/image-exists@e865e376b8c2d594028c8d645dd6c47169b72974 # v2.2.16 - with: - repository: chainlink - tag: ${{ env.CHAINLINK_VERSION }} - AWS_REGION: ${{ secrets.QA_AWS_REGION }} - AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - - name: Build Image - if: steps.check-image.outputs.exists == 'false' - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/build-image@e865e376b8c2d594028c8d645dd6c47169b72974 # v2.2.16 - env: - GH_TOKEN: ${{ github.token }} - with: - cl_repo: smartcontractkit/chainlink-ccip - cl_ref: ${{ env.CHAINLINK_VERSION }} - push_tag: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink:${{ env.CHAINLINK_VERSION }} - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Build Chainlink Image - continue-on-error: true - - build-test-image: - environment: integration - permissions: - id-token: write - contents: read - name: Build Test Image - runs-on: ubuntu20.04-16cores-64GB - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Build Test Image - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 - - name: Build Test Image - uses: ./.github/actions/build-test-image - with: - tag: ${{ env.INPUT_CHAINLINK_TEST_VERSION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ACCOUNT_NUMBER: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - - ccip-load-test: - name: CCIP Load Test - environment: integration - runs-on: ubuntu-latest - needs: [ build-chainlink, build-test-image ] - if: ${{ always() && !contains(needs.*.result, 'failure') }} - permissions: - issues: read - checks: write - pull-requests: write - id-token: write - contents: read - env: - CHAINLINK_ENV_USER: ${{ github.actor }} - SLACK_USER: ${{ inputs.slackMemberID }} - SLACK_API_KEY: ${{ secrets.QA_SLACK_API_KEY }} - SLACK_CHANNEL: ${{ secrets.QA_SLACK_CHANNEL }} - TEST_LOG_LEVEL: info - REF_NAME: ${{ github.head_ref || github.ref_name }} - ENV_JOB_IMAGE_BASE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink-ccip-tests - BASE64_NETWORK_CONFIG: ${{ secrets.BASE64_NETWORK_CONFIG }} - - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: CCIP Load Test - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 - with: - ref: ${{ env.REF_NAME }} - - name: Sets env vars - shell: bash - run: | - if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then - BASE64_CCIP_CONFIG_OVERRIDE=$(jq -r '.inputs.base64_test_input' $GITHUB_EVENT_PATH) - echo ::add-mask::$BASE64_CCIP_CONFIG_OVERRIDE - echo "BASE64_CCIP_CONFIG_OVERRIDE=$BASE64_CCIP_CONFIG_OVERRIDE" >> $GITHUB_ENV - echo "TEST_BASE64_CCIP_CONFIG_OVERRIDE=$BASE64_CCIP_CONFIG_OVERRIDE" >> $GITHUB_ENV - fi - - name: step summary - shell: bash - run: | - echo "### chainlink image used for this test run :link:" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.CHAINLINK_VERSION }}\`" >> $GITHUB_STEP_SUMMARY - echo "### chainlink-tests image tag for this test run :ship:" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.INPUT_CHAINLINK_TEST_VERSION }}\`" >> $GITHUB_STEP_SUMMARY - - name: Prepare Base64 TOML override for CCIP secrets - uses: ./.github/actions/setup-create-base64-config-ccip - with: - runId: ${{ github.run_id }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - chainlinkImage: ${{ env.CHAINLINK_IMAGE }} - chainlinkVersion: ${{ github.sha }} - lokiEndpoint: ${{ secrets.LOKI_URL }} - lokiTenantId: ${{ vars.LOKI_TENANT_ID }} - lokiBasicAuth: ${{ secrets.LOKI_BASIC_AUTH }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - grafanaUrl: ${{ vars.GRAFANA_URL }} - grafanaDashboardUrl: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@e865e376b8c2d594028c8d645dd6c47169b72974 # v2.2.16 - env: - TEST_SUITE: load - TEST_ARGS: -test.timeout 900h - DATABASE_URL: postgresql://postgres:node@localhost:5432/chainlink_test?sslmode=disable - RR_MEM: 8Gi - RR_CPU: 4 - TEST_TRIGGERED_BY: ccip-load-test-ci - with: - test_command_to_run: cd ./integration-tests/ccip-tests && go test -v -timeout 70m -count=1 -json -run ^TestLoadCCIPStableRPS$ ./load 2>&1 | tee /tmp/gotest.log | gotestfmt - test_download_vendor_packages_command: cd ./integration-tests && go mod download - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ env.CHAINLINK_VERSION }} - token: ${{ secrets.GITHUB_TOKEN }} - go_mod_path: ./integration-tests/go.mod - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - triggered_by: ${{ env.TEST_TRIGGERED_BY }} - artifacts_location: ./integration-tests/load/logs/payload_ccip.json - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - cache_key_id: ccip-load-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - should_cleanup: "true" - - - ccip-version-compatibility-test: - name: CCIP Load With Version Compatibility Test - environment: integration - runs-on: ubuntu-latest - needs: [ build-chainlink, build-test-image ] - if: ${{ always() && !contains(needs.*.result, 'failure') }} - permissions: - issues: read - checks: write - pull-requests: write - id-token: write - contents: read - env: - SLACK_USER: ${{ inputs.slackMemberID }} - SLACK_API_KEY: ${{ secrets.QA_SLACK_API_KEY }} - SLACK_CHANNEL: ${{ secrets.QA_SLACK_CHANNEL }} - TEST_LOG_LEVEL: info - REF_NAME: ${{ github.head_ref || github.ref_name }} - ENV_JOB_IMAGE_BASE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink-ccip-tests - - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: CCIP Load Test - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 - with: - ref: ${{ env.REF_NAME }} - - name: Sets env vars - id : set_env_vars - shell: bash - run: | - if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then - BASE64_CCIP_CONFIG_OVERRIDE=$(jq -r '.inputs.base64_test_input' $GITHUB_EVENT_PATH) - echo ::add-mask::$BASE64_CCIP_CONFIG_OVERRIDE - echo "BASE64_CCIP_CONFIG_OVERRIDE=$BASE64_CCIP_CONFIG_OVERRIDE" >> $GITHUB_ENV - echo "TEST_BASE64_CCIP_CONFIG_OVERRIDE=$BASE64_CCIP_CONFIG_OVERRIDE" >> $GITHUB_ENV - fi - echo "UPGRADE_VERSION=${{ env.CHAINLINK_VERSION }}" >> $GITHUB_ENV - echo "UPGRADE_IMAGE=${{ env.CHAINLINK_IMAGE }}" >> $GITHUB_ENV - untrimmed_ver=$(curl --header "Authorization: token ${{ secrets.GITHUB_TOKEN }}" --request GET https://api.github.com/repos/${{ github.repository }}/releases | jq -r --arg SUFFIX "release" '.[] | select(.tag_name | endswith("release")) | .tag_name' | sort -V | tail -n 1) - last_release="${untrimmed_ver:1}" - echo "last_release=${last_release}" >> $GITHUB_OUTPUT - - name: step summary - shell: bash - run: | - echo "### chainlink upgrade image used for this test run :link:" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.CHAINLINK_VERSION }}\`" >> $GITHUB_STEP_SUMMARY - echo "### chainlink-tests image tag for this test run :ship:" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.INPUT_CHAINLINK_TEST_VERSION }}\`" >> $GITHUB_STEP_SUMMARY - echo "### testing upgrade against release :link:" >>$GITHUB_STEP_SUMMARY - echo "\`${{ steps.set_env_vars.outputs.last_release }}\`" >>$GITHUB_STEP_SUMMARY - - name: Prepare Base64 TOML override for CCIP secrets - uses: ./.github/actions/setup-create-base64-config-ccip - with: - runId: ${{ github.run_id }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - chainlinkImage: ${{ env.CHAINLINK_IMAGE }} - chainlinkVersion: ${{ github.sha }} - lokiEndpoint: ${{ secrets.LOKI_URL }} - lokiTenantId: ${{ vars.LOKI_TENANT_ID }} - lokiBasicAuth: ${{ secrets.LOKI_BASIC_AUTH }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - grafanaUrl: ${{ vars.GRAFANA_URL }} - grafanaDashboardUrl: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@e865e376b8c2d594028c8d645dd6c47169b72974 # v2.2.16 - env: - TEST_SUITE: load - TEST_ARGS: -test.timeout 900h - DATABASE_URL: postgresql://postgres:node@localhost:5432/chainlink_test?sslmode=disable - RR_MEM: 8Gi - RR_CPU: 4 - TEST_TRIGGERED_BY: ccip-load-upgrade-test-ci - with: - test_command_to_run: cd ./integration-tests/ccip-tests && go test -v -timeout 70m -count=1 -json -run ^TestLoadCCIPWithUpgradeNodeVersion$ ./load 2>&1 | tee /tmp/gotest.log | gotestfmt - test_download_vendor_packages_command: cd ./integration-tests && go mod download - cl_repo: ${{ env.AWS_ECR_REPO_PUBLIC_REGISTRY }}/w0i8p0z9/chainlink-ccip # releases are published to public registry - cl_image_tag: ${{ steps.set_env_vars.outputs.last_release }} - token: ${{ secrets.GITHUB_TOKEN }} - go_mod_path: ./integration-tests/go.mod - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - triggered_by: ${{ env.TEST_TRIGGERED_BY }} - artifacts_location: ./integration-tests/load/logs/payload_ccip.json - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - cache_key_id: ccip-load-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - should_cleanup: "true" \ No newline at end of file diff --git a/.github/workflows/certora.yml b/.github/workflows/certora.yml index e3a89ebf27..7a2c63b81a 100644 --- a/.github/workflows/certora.yml +++ b/.github/workflows/certora.yml @@ -1,18 +1,6 @@ name: certora -on: - push: - branches: - - main - - certora - - ccip-gho - pull_request: - branches: - - main - - certora - - ccip-gho - - workflow_dispatch: +on: push jobs: verify: @@ -29,7 +17,7 @@ jobs: - name: Install java uses: actions/setup-java@v1 - with: { java-version: '11', java-package: jre } + with: { java-version: "11", java-package: jre } - name: Install certora cli run: pip install certora-cli==7.6.3 diff --git a/.github/workflows/changelog.yml b/.github/workflows/changelog.yml deleted file mode 100644 index c9f1b3626b..0000000000 --- a/.github/workflows/changelog.yml +++ /dev/null @@ -1,40 +0,0 @@ -# -# This action checks PRs to see if any CHANGELOG* files were updated. -# If none were, it will add a message to the PR asking if it would make sense to do so. -# -name: Changelog - -on: pull_request - -jobs: - changelog: - # For security reasons, GITHUB_TOKEN is read-only on forks, so we cannot leave comments on PRs. - # This check skips the job if it is detected we are running on a fork. - if: ${{ github.event.pull_request.head.repo.full_name == 'smartcontractkit/chainlink' }} - name: Changelog checker - runs-on: ubuntu-latest - steps: - - name: Check for changed files - id: changedfiles - uses: umani/changed-files@d7f842d11479940a6036e3aacc6d35523e6ba978 # Version 4.1.0 - with: - repo-token: ${{ secrets.GITHUB_TOKEN }} - pattern: '^docs/CHANGELOG.*$' - - name: Make a comment - uses: unsplash/comment-on-pr@ffe8f97ccc63ce12c3c23c6885b169db67958d3b # Version 1.3.0 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - if: contains(steps.changedfiles.outputs.files_updated, 'CHANGELOG') != true && contains(steps.changedfiles.outputs.files_created, 'CHANGELOG') != true - with: - msg: "I see that you haven't updated any CHANGELOG files. Would it make sense to do so?" - check_for_duplicate_msg: true - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Changelog checker - continue-on-error: true diff --git a/.github/workflows/ci-core.yml b/.github/workflows/ci-core.yml deleted file mode 100644 index 5ef5083062..0000000000 --- a/.github/workflows/ci-core.yml +++ /dev/null @@ -1,278 +0,0 @@ -name: CI Core - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -# Run on key branches to make sure integration is good, otherwise run on all PR's -on: - push: - branches: - - develop - - ccip-develop - - "release/*" - - "ccip-release/*" - merge_group: - pull_request: - schedule: - - cron: "0 0 * * *" - -jobs: - golangci: - if: ${{ github.event_name == 'pull_request' || github.event_name == 'schedule' }} - name: lint - runs-on: ubuntu20.04-8cores-32GB - steps: - - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - - name: Golang Lint - uses: ./.github/actions/golangci-lint - with: - gc-basic-auth: ${{ secrets.GRAFANA_CLOUD_BASIC_AUTH }} - gc-host: ${{ secrets.GRAFANA_CLOUD_HOST }} - - name: Notify Slack - if: ${{ failure() && (github.event_name == 'merge_group' || github.event.branch == 'develop')}} - uses: slackapi/slack-github-action@e28cf165c92ffef168d23c5c9000cffc8a25e117 # v1.24.0 - env: - SLACK_BOT_TOKEN: ${{ secrets.QA_SLACK_API_KEY }} - with: - channel-id: "#team-core" - slack-message: "golangci-lint failed: ${{ job.html_url }}\n${{ format('https://github.com/smartcontractkit/chainlink/actions/runs/{0}', github.run_id) }}" - - core: - strategy: - fail-fast: false - matrix: - cmd: ["go_core_tests", "go_core_race_tests", "go_core_fuzz"] - name: Core Tests (${{ matrix.cmd }}) - runs-on: ubuntu20.04-64cores-256GB - env: - CL_DATABASE_URL: postgresql://postgres:postgres@localhost:5432/chainlink_test?sslmode=disable - steps: - - name: Checkout the repo - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - - name: Setup node - uses: actions/setup-node@b39b52d1213e96004bfcb1c61a8a6fa8ab84f3e8 # v4.0.1 - - name: Setup NodeJS - uses: ./.github/actions/setup-nodejs - with: - prod: "true" - - name: Setup Go - uses: ./.github/actions/setup-go - - name: Setup Solana - uses: ./.github/actions/setup-solana - - name: Setup wasmd - uses: ./.github/actions/setup-wasmd - - name: Setup Postgres - uses: ./.github/actions/setup-postgres - - name: Touching core/web/assets/index.html - run: mkdir -p core/web/assets && touch core/web/assets/index.html - - name: Download Go vendor packages - run: go mod download - - name: Build binary - run: go build -o chainlink.test . - - name: Setup DB - run: ./chainlink.test local db preparetest - - name: Install LOOP Plugins - run: | - pushd $(go list -m -f "{{.Dir}}" github.com/smartcontractkit/chainlink-feeds) - go install ./cmd/chainlink-feeds - popd - pushd $(go list -m -f "{{.Dir}}" github.com/smartcontractkit/chainlink-solana) - go install ./pkg/solana/cmd/chainlink-solana - popd - pushd $(go list -m -f "{{.Dir}}" github.com/smartcontractkit/chainlink-starknet/relayer) - go install ./pkg/chainlink/cmd/chainlink-starknet - popd - - name: Increase Race Timeout - if: github.event.schedule != '' - run: | - echo "TIMEOUT=10m" >> $GITHUB_ENV - echo "COUNT=50" >> $GITHUB_ENV - - name: Run tests - id: run-tests - env: - OUTPUT_FILE: ./output.txt - USE_TEE: false - run: ./tools/bin/${{ matrix.cmd }} ./... - - name: Print Filtered Test Results - if: ${{ failure() && matrix.cmd == 'go_core_tests' }} - uses: smartcontractkit/chainlink-github-actions/go/go-test-results-parsing@a052942591aaa12716eb9835b490d812a77d0831 # v2.3.1 - with: - results-file: ./output.txt - output-file: ./output-short.txt - - name: Print Races - if: ${{ failure() && matrix.cmd == 'go_core_race_tests' }} - run: find race.* | xargs cat - - name: Print postgres logs - if: always() - run: docker compose logs postgres | tee ../../../postgres_logs.txt - working-directory: ./.github/actions/setup-postgres - - name: Store logs artifacts - if: always() - uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce # v3.1.2 - with: - name: ${{ matrix.cmd }}_logs - path: | - ./output.txt - ./output-short.txt - ./race.* - ./coverage.txt - ./postgres_logs.txt - - name: Notify Slack - if: ${{ failure() && matrix.cmd == 'go_core_race_tests' && (github.event_name == 'merge_group' || github.event.branch == 'develop') }} - uses: slackapi/slack-github-action@e28cf165c92ffef168d23c5c9000cffc8a25e117 # v1.24.0 - env: - SLACK_BOT_TOKEN: ${{ secrets.QA_SLACK_API_KEY }} - with: - channel-id: "#topic-data-races" - slack-message: "Race tests failed: ${{ job.html_url }}\n${{ format('https://github.com/smartcontractkit/chainlink/actions/runs/{0}', github.run_id) }}" - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Core Tests (${{ matrix.cmd }}) - test-results-file: '{"testType":"go","filePath":"./output.txt"}' - continue-on-error: true - - detect-flakey-tests: - needs: [core] - name: Flakey Test Detection - runs-on: ubuntu-latest - if: always() - env: - CL_DATABASE_URL: postgresql://postgres:postgres@localhost:5432/chainlink_test?sslmode=disable - steps: - - name: Checkout the repo - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - - name: Setup node - uses: actions/setup-node@b39b52d1213e96004bfcb1c61a8a6fa8ab84f3e8 # v4.0.1 - - name: Setup NodeJS - uses: ./.github/actions/setup-nodejs - with: - prod: "true" - - name: Setup Go - uses: ./.github/actions/setup-go - - name: Setup Postgres - uses: ./.github/actions/setup-postgres - - name: Touching core/web/assets/index.html - run: mkdir -p core/web/assets && touch core/web/assets/index.html - - name: Download Go vendor packages - run: go mod download - - name: Build binary - run: go build -o chainlink.test . - - name: Setup DB - run: ./chainlink.test local db preparetest - - name: Load test outputs - uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a # v3.0.2 - with: - path: ./artifacts - - name: Build flakey test runner - run: go build ./tools/flakeytests/cmd/runner - - name: Re-run tests - env: - GRAFANA_CLOUD_BASIC_AUTH: ${{ secrets.GRAFANA_CLOUD_BASIC_AUTH }} - GRAFANA_CLOUD_HOST: ${{ secrets.GRAFANA_CLOUD_HOST }} - GITHUB_EVENT_PATH: ${{ github.event_path }} - GITHUB_EVENT_NAME: ${{ github.event_name }} - GITHUB_REPO: ${{ github.repository }} - GITHUB_RUN_ID: ${{ github.run_id }} - run: | - ./runner \ - -grafana_auth=$GRAFANA_CLOUD_BASIC_AUTH \ - -grafana_host=$GRAFANA_CLOUD_HOST \ - -gh_sha=$GITHUB_SHA \ - -gh_event_path=$GITHUB_EVENT_PATH \ - -gh_event_name=$GITHUB_EVENT_NAME \ - -gh_run_id=$GITHUB_RUN_ID \ - -gh_repo=$GITHUB_REPO \ - -command=./tools/bin/go_core_tests \ - `ls -R ./artifacts/go_core_tests*/output.txt` - - name: Store logs artifacts - if: always() - uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8 # v3.1.0 - with: - name: flakey_test_runner_logs - path: | - ./output.txt - - scan: - name: SonarQube Scan - needs: [core] - if: ${{ always() }} - runs-on: ubuntu-latest - steps: - - name: Checkout the repo - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - fetch-depth: 0 # fetches all history for all tags and branches to provide more metadata for sonar reports - - name: Download all workflow run artifacts - uses: actions/download-artifact@9782bd6a9848b53b110e712e20e42d89988822b7 # v3.0.1 - - name: Set SonarQube Report Paths - id: sonarqube_report_paths - shell: bash - run: | - echo "sonarqube_tests_report_paths=$(find go_core_tests_logs -name output.txt | paste -sd "," -)" >> $GITHUB_OUTPUT - echo "sonarqube_coverage_report_paths=$(find go_core_tests_logs -name coverage.txt | paste -sd "," -)" >> $GITHUB_OUTPUT - - name: SonarQube Scan - uses: sonarsource/sonarqube-scan-action@69c1a75940dec6249b86dace6b630d3a2ae9d2a7 # v2.0.1 - with: - args: > - -Dsonar.go.tests.reportPaths=${{ steps.sonarqube_report_paths.outputs.sonarqube_tests_report_paths }} - -Dsonar.go.coverage.reportPaths=${{ steps.sonarqube_report_paths.outputs.sonarqube_coverage_report_paths }} - -Dsonar.go.golangci-lint.reportPaths=golangci-lint-report/golangci-lint-report.xml - env: - SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} - SONAR_HOST_URL: ${{ secrets.SONAR_HOST_URL }} - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: SonarQube Scan - continue-on-error: true - - clean: - name: Clean Go Tidy & Generate - if: ${{ !contains(join(github.event.pull_request.labels.*.name, ' '), 'skip-smoke-tests') }} - runs-on: ubuntu20.04-8cores-32GB - defaults: - run: - shell: bash - steps: - - name: Check for Skip Tests Label - if: contains(join(github.event.pull_request.labels.*.name, ' '), 'skip-smoke-tests') - run: | - echo "## \`skip-smoke-tests\` label is active, skipping E2E smoke tests" >>$GITHUB_STEP_SUMMARY - exit 0 - - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - fetch-depth: 0 - - name: Setup Go - uses: ./.github/actions/setup-go - with: - only-modules: "true" - - name: Setup NodeJS - uses: ./.github/actions/setup-nodejs - - run: make generate # generate install go deps - - name: Ensure clean after generate - run: git diff --stat --exit-code - - run: make gomodtidy - - name: Ensure clean after tidy - run: git diff --minimal --exit-code - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Clean Go Tidy & Generate - continue-on-error: true diff --git a/.github/workflows/ci-scripts.yml b/.github/workflows/ci-scripts.yml deleted file mode 100644 index 2e2e031c92..0000000000 --- a/.github/workflows/ci-scripts.yml +++ /dev/null @@ -1,46 +0,0 @@ -name: CI Scripts - -on: - push: - pull_request: - -jobs: - lint-scripts: - if: ${{ github.event_name == 'pull_request' }} - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - - name: Golang Lint - uses: ./.github/actions/golangci-lint - with: - name: lint-scripts - go-directory: core/scripts/ccip - go-version-file: core/scripts/go.mod - go-module-file: core/scripts/go.sum - gc-basic-auth: ${{ secrets.GRAFANA_CLOUD_BASIC_AUTH }} - gc-host: ${{ secrets.GRAFANA_CLOUD_HOST }} - - test-scripts: - if: ${{ github.event_name == 'pull_request' }} - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - - name: Setup Go - uses: ./.github/actions/setup-go - with: - go-version-file: core/scripts/go.mod - go-module-file: core/scripts/go.sum - - name: Run Tests - shell: bash - working-directory: core/scripts/ccip - run: go test ./... - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: test-scripts - continue-on-error: true diff --git a/.github/workflows/client-compatibility-tests.yml b/.github/workflows/client-compatibility-tests.yml deleted file mode 100644 index 6feb61d78f..0000000000 --- a/.github/workflows/client-compatibility-tests.yml +++ /dev/null @@ -1,333 +0,0 @@ -name: Client Compatibility Tests -on: - schedule: - - cron: "30 5 * * *" # Run every night at midnight + 30min EST - push: - tags: - - "*" - workflow_dispatch: - -env: - CHAINLINK_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink - INTERNAL_DOCKER_REPO: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com - MOD_CACHE_VERSION: 2 - -jobs: - # Build Test Dependencies - - build-chainlink: - environment: integration - permissions: - id-token: write - contents: read - name: Build Chainlink Image - runs-on: ubuntu-latest - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Build Chainlink Image - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - ref: ${{ github.event.pull_request.head.sha || github.event.merge_group.head_sha }} - - name: Build Chainlink Image - uses: ./.github/actions/build-chainlink-image - with: - tag_suffix: "" - dockerfile: core/chainlink.Dockerfile - git_commit_sha: ${{ github.sha }} - GRAFANA_CLOUD_BASIC_AUTH: ${{ secrets.GRAFANA_CLOUD_BASIC_AUTH }} - GRAFANA_CLOUD_HOST: ${{ secrets.GRAFANA_CLOUD_HOST }} - AWS_REGION: ${{ secrets.QA_AWS_REGION }} - AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - - build-tests: - environment: integration - permissions: - id-token: write - contents: read - name: Build Tests Binary - runs-on: ubuntu-latest - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Build Tests Binary - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - ref: ${{ github.event.pull_request.head.sha || github.event.merge_group.head_sha }} - - name: Build Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/build-tests@bce4caa154b1e0e652d042788e14c8870832acd2 # v2.3.0 - with: - test_download_vendor_packages_command: cd ./integration-tests && go mod download - token: ${{ secrets.GITHUB_TOKEN }} - go_mod_path: ./integration-tests/go.mod - go_tags: embed - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - binary_name: tests - - # End Build Test Dependencies - - client-compatibility-matrix: - environment: integration - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - needs: [build-chainlink, build-tests] - env: - SELECTED_NETWORKS: SIMULATED,SIMULATED_1,SIMULATED_2 - CHAINLINK_COMMIT_SHA: ${{ github.sha }} - CHAINLINK_ENV_USER: ${{ github.actor }} - TEST_LOG_LEVEL: debug - strategy: - fail-fast: false - matrix: - include: - - name: ocr-geth - os: ubuntu-latest - test: TestOCRBasic - file: ocr - client: geth - timeout: 30m - pyroscope_env: ci-smoke-ocr-geth-simulated - - name: ocr-nethermind - test: TestOCRBasic - file: ocr - client: nethermind - timeout: 30m - pyroscope_env: ci-smoke-ocr-nethermind-simulated - - name: ocr-besu - test: TestOCRBasic - file: ocr - client: besu - timeout: 30m - pyroscope_env: ci-smoke-ocr-besu-simulated - - name: ocr-erigon - test: TestOCRBasic - file: ocr - client: erigon - timeout: 30m - pyroscope_env: ci-smoke-ocr-erigon-simulated - - name: ocr2-geth - test: "^TestOCRv2Basic/plugins$" - file: ocr2 - client: geth - timeout: 30m - pyroscope_env: ci-smoke-ocr2-geth-simulated - - name: ocr2-nethermind - test: "^TestOCRv2Basic/plugins$" - file: ocr2 - client: nethermind - timeout: 30m - pyroscope_env: ci-smoke-nethermind-evm-simulated - - name: ocr2-besu - test: "^TestOCRv2Basic/plugins$" - file: ocr2 - client: besu - timeout: 30m - pyroscope_env: ci-smoke-ocr2-besu-simulated - - name: ocr2-erigon - test: "^TestOCRv2Basic/plugins$" - file: ocr2 - client: erigon - timeout: 60m - pyroscope_env: ci-smoke-ocr2-erigon-simulated - runs-on: ubuntu-latest - name: Client Compatibility Test ${{ matrix.name }} - steps: - - name: Download Tests Binary - uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a # v3.0.2 - with: - name: tests - - name: Prepare Base64 TOML config - env: - SELECTED_NETWORKS: SIMULATED,SIMULATED_1,SIMULATED_2 - PYROSCOPE_SERVER: ${{ secrets.QA_PYROSCOPE_INSTANCE }} - PYROSCOPE_ENVIRONMENT: ci-client-compatability-${{ matrix.client }}-testnet - PYROSCOPE_KEY: ${{ secrets.QA_PYROSCOPE_KEY }} - ETH2_EL_CLIENT: ${{matrix.client}} - CHAINLINK_VERSION: ${{ github.sha }} - run: | - convert_to_toml_array() { - local IFS=',' - local input_array=($1) - local toml_array_format="[" - - for element in "${input_array[@]}"; do - toml_array_format+="\"$element\"," - done - - toml_array_format="${toml_array_format%,}]" - echo "$toml_array_format" - } - - selected_networks=$(convert_to_toml_array "$SELECTED_NETWORKS") - - if [ -n "$ETH2_EL_CLIENT" ]; then - execution_layer="$ETH2_EL_CLIENT" - else - execution_layer="geth" - fi - - if [ -n "$PYROSCOPE_SERVER" ]; then - pyroscope_enabled=true - else - pyroscope_enabled=false - fi - - cat << EOF > config.toml - [Network] - selected_networks=$selected_networks - - [ChainlinkImage] - image="$CHAINLINK_IMAGE" - version="$CHAINLINK_VERSION" - - [Pyroscope] - enabled=$pyroscope_enabled - server_url="$PYROSCOPE_SERVER" - environment="$PYROSCOPE_ENVIRONMENT" - key="$PYROSCOPE_KEY" - - [PrivateEthereumNetwork] - consensus_type="pos" - consensus_layer="prysm" - execution_layer="$execution_layer" - wait_for_finalization=false - - [PrivateEthereumNetwork.EthereumChainConfig] - chain_id=1337 - genesis_delay=15 - seconds_per_slot=3 - validator_count=8 - slots_per_epoch=2 - addresses_to_fund=["0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266"] - EOF - - BASE64_CONFIG_OVERRIDE=$(cat config.toml | base64 -w 0) - echo ::add-mask::$BASE64_CONFIG_OVERRIDE - echo "BASE64_CONFIG_OVERRIDE=$BASE64_CONFIG_OVERRIDE" >> $GITHUB_ENV - touch .root_dir - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests-binary@912bed7e07a1df4d06ea53a031e9773bb65dc7bd # v2.3.0 - with: - test_command_to_run: ./tests -test.timeout ${{ matrix.timeout }} -test.run ${{ matrix.test }} - binary_name: tests - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ github.sha }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - dockerhub_username: ${{ secrets.DOCKERHUB_READONLY_USERNAME }} - dockerhub_password: ${{ secrets.DOCKERHUB_READONLY_PASSWORD }} - artifacts_location: ./logs - token: ${{ secrets.GITHUB_TOKEN }} - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - - start-slack-thread: - name: Start Slack Thread - if: ${{ always() && needs.*.result != 'skipped' && needs.*.result != 'cancelled' }} - environment: integration - outputs: - thread_ts: ${{ steps.slack.outputs.thread_ts }} - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - runs-on: ubuntu-latest - needs: [client-compatibility-matrix] - steps: - - name: Debug Result - run: echo ${{ join(needs.*.result, ',') }} - - name: Main Slack Notification - uses: slackapi/slack-github-action@e28cf165c92ffef168d23c5c9000cffc8a25e117 # v1.24.0 - id: slack - with: - channel-id: ${{ secrets.QA_SLACK_CHANNEL }} - payload: | - { - "attachments": [ - { - "color": "${{ contains(join(needs.*.result, ','), 'failure') && '#C62828' || '#2E7D32' }}", - "blocks": [ - { - "type": "header", - "text": { - "type": "plain_text", - "text": "Client Compatability Test Results ${{ contains(join(needs.*.result, ','), 'failure') && ':x:' || ':white_check_mark:'}}", - "emoji": true - } - }, - { - "type": "section", - "text": { - "type": "mrkdwn", - "text": "${{ contains(join(needs.*.result, ','), 'failure') && 'Some tests failed, notifying <@U060CGGPY8H>' || 'All Good!' }}" - } - }, - { - "type": "divider" - }, - { - "type": "section", - "text": { - "type": "mrkdwn", - "text": "<${{ github.server_url }}/${{ github.repository }}/releases/tag/${{ github.ref_name }}|${{ github.ref_name }}> | <${{ github.server_url }}/${{ github.repository }}/commit/${{ github.sha }}|${{ github.sha }}> | <${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|Run>" - } - } - ] - } - ] - } - env: - SLACK_BOT_TOKEN: ${{ secrets.QA_SLACK_API_KEY }} - - post-test-results-to-slack: - name: Post Test Results for ${{matrix.product}} - if: ${{ always() && needs.*.result != 'skipped' && needs.*.result != 'cancelled' }} - environment: integration - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - runs-on: ubuntu-latest - needs: start-slack-thread - strategy: - fail-fast: false - matrix: - product: [ocr, ocr2] - steps: - - name: Checkout the repo - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - ref: ${{ github.event.pull_request.head.sha || github.event.merge_group.head_sha }} - - name: Post Test Results to Slack - uses: ./.github/actions/notify-slack-jobs-result - with: - github_token: ${{ github.token }} - github_repository: ${{ github.repository }} - workflow_run_id: ${{ github.run_id }} - github_job_name_regex: ^Client Compatibility Test ${{ matrix.product }}-(?.*?)$ - message_title: ${{ matrix.product }} - slack_channel_id: ${{ secrets.QA_SLACK_CHANNEL }} - slack_bot_token: ${{ secrets.QA_SLACK_API_KEY }} - slack_thread_ts: ${{ needs.start-slack-thread.outputs.thread_ts }} diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml deleted file mode 100644 index a14b776d6e..0000000000 --- a/.github/workflows/codeql-analysis.yml +++ /dev/null @@ -1,54 +0,0 @@ -name: 'CodeQL' - -on: - push: - branches: - - develop - pull_request: - # The branches below must be a subset of the branches above - branches: [develop] - schedule: - - cron: '23 19 * * 4' - -jobs: - analyze: - name: Analyze ${{ matrix.language }} - runs-on: ubuntu-latest - - strategy: - fail-fast: false - matrix: - language: ['go', 'javascript'] - - steps: - - name: Checkout repository - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - - - name: Set up Go - if: ${{ matrix.language == 'go' }} - uses: actions/setup-go@0c52d547c9bc32b1aa3301fd7a9cb496313a4491 # v5.0.0 - with: - go-version-file: 'go.mod' - - - name: Touching core/web/assets/index.html - if: ${{ matrix.language == 'go' }} - run: mkdir -p core/web/assets && touch core/web/assets/index.html - - - name: Initialize CodeQL - uses: github/codeql-action/init@cdcdbb579706841c47f7063dda365e292e5cad7a # v2.13.4 - with: - languages: ${{ matrix.language }} - - - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@cdcdbb579706841c47f7063dda365e292e5cad7a # v2.13.4 - - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Analyze ${{ matrix.language }} - continue-on-error: true diff --git a/.github/workflows/delete-deployments.yml b/.github/workflows/delete-deployments.yml deleted file mode 100644 index 6c2aa8482f..0000000000 --- a/.github/workflows/delete-deployments.yml +++ /dev/null @@ -1,33 +0,0 @@ -name: Cleanup integration deployments -on: - workflow_dispatch: - schedule: - # every 10 mins - - cron: "*/10 * * * *" - -jobs: - cleanup: - name: Clean up integration environment deployments - runs-on: ubuntu-latest - steps: - - name: Checkout repo - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - - - name: Clean up integration environment - uses: ./.github/actions/delete-deployments - with: - environment: integration - # Delete 300 deployments at a time - num-of-pages: 3 - # We start with page 2 because usually the first 200 deployments are still active, so we cannot delete them - starting-page: 2 - - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Clean up integration environment deployments - continue-on-error: true diff --git a/.github/workflows/dependency-check.yml b/.github/workflows/dependency-check.yml deleted file mode 100644 index 1ad3c50b35..0000000000 --- a/.github/workflows/dependency-check.yml +++ /dev/null @@ -1,56 +0,0 @@ -name: Dependency Vulnerability Check - -on: - push: - -jobs: - changes: - name: Detect changes - runs-on: ubuntu-latest - outputs: - changes: ${{ steps.changes.outputs.src }} - steps: - - name: Checkout the repo - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - - uses: dorny/paths-filter@4512585405083f25c027a35db413c2b3b9006d50 # v2.11.1 - id: changes - with: - filters: | - src: - - '**/*go.sum' - - '**/*go.mod' - - '.github/workflows/dependency-check.yml' - Go: - runs-on: ubuntu-latest - needs: [changes] - steps: - - name: Check out code - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - - - name: Set up Go - if: needs.changes.outputs.src == 'true' - uses: actions/setup-go@0c52d547c9bc32b1aa3301fd7a9cb496313a4491 # v5.0.0 - with: - go-version-file: 'go.mod' - id: go - - - name: Write Go Modules list - if: needs.changes.outputs.src == 'true' - run: go list -json -m all > go.list - - - name: Check vulnerabilities - if: needs.changes.outputs.src == 'true' - uses: sonatype-nexus-community/nancy-github-action@main - with: - nancyVersion: "v1.0.39" - - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Go - continue-on-error: true diff --git a/.github/workflows/goreleaser-build-publish-develop.yml b/.github/workflows/goreleaser-build-publish-develop.yml deleted file mode 100644 index 942f7d2243..0000000000 --- a/.github/workflows/goreleaser-build-publish-develop.yml +++ /dev/null @@ -1,89 +0,0 @@ -name: "Build publish Chainlink develop on private ECR" - -on: - push: - branches: - - develop - -jobs: - push-chainlink-develop-goreleaser: - runs-on: - labels: ubuntu20.04-16cores-64GB - outputs: - goreleaser-metadata: ${{ steps.build-sign-publish.outputs.goreleaser-metadata }} - goreleaser-artifacts: ${{ steps.build-sign-publish.outputs.goreleaser-artifacts }} - environment: build-develop - permissions: - id-token: write - contents: read - steps: - - name: Checkout repository - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - - name: Configure aws credentials - uses: aws-actions/configure-aws-credentials@010d0da01d0b5a38af31e9c3470dbfdabdecca3a # v4.0.1 - with: - role-to-assume: ${{ secrets.AWS_OIDC_IAM_ROLE_ARN }} - role-duration-seconds: ${{ secrets.AWS_ROLE_DURATION_SECONDS }} - aws-region: ${{ secrets.AWS_REGION }} - - name: Build, sign, and publish image - id: build-sign-publish - uses: ./.github/actions/goreleaser-build-sign-publish - with: - enable-docker-publish: "true" - docker-registry: ${{ secrets.AWS_DEVELOP_ECR_HOSTNAME }} - enable-goreleaser-snapshot: "true" - goreleaser-exec: ./tools/bin/goreleaser_wrapper - goreleaser-config: .goreleaser.develop.yaml - # ISSUE: https://github.com/golang/go/issues/52690 - zig-version: 0.11.0-dev.3380+7e0a02ee2 # TODO: update action to v0.11.x once released - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: push-chainlink-develop-goreleaser - continue-on-error: true - mercury-e2e-tests: - needs: [push-chainlink-develop-goreleaser] - runs-on: - labels: ubuntu-latest - environment: build-develop - permissions: - id-token: write - contents: read - steps: - - name: Checkout repository - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - - name: Configure aws credentials - uses: aws-actions/configure-aws-credentials@010d0da01d0b5a38af31e9c3470dbfdabdecca3a # v4.0.1 - with: - role-to-assume: ${{ secrets.AWS_ROLE_ARN_GATI }} - role-duration-seconds: ${{ secrets.AWS_ROLE_DURATION_SECONDS }} - aws-region: ${{ secrets.AWS_REGION }} - - name: Get Github Token - id: get-gh-token - uses: smartcontractkit/chainlink-github-actions/github-app-token-issuer@main - with: - url: ${{ secrets.GATI_LAMBDA_FUNCTION_URL }} - - name: 'Dispatch Workflow: E2E Functional Tests' - id: dispatch-workflow-e2e-functional-tests - shell: bash - run: | - image_build_metadata=$(jq -n \ - --arg commit_sha "$GITHUB_SHA" \ - --arg run_url "https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}" \ - '{ - commit_sha: $commit_sha, - originating_run_url: $run_url - }') - gh workflow run "e2e-functional-tests.yml" \ - --repo ${{ secrets.MERCURY_SERVER_REPO }} \ - --ref "main" \ - --field chainlink-ecr-repo-account="sdlc" \ - --field chainlink-image-build-metadata="${image_build_metadata}" \ - --field chainlink-image-tag="develop" - env: - GH_TOKEN: ${{ steps.get-gh-token.outputs.access-token }} diff --git a/.github/workflows/helm-chart-publish.yml b/.github/workflows/helm-chart-publish.yml deleted file mode 100644 index 156268d66b..0000000000 --- a/.github/workflows/helm-chart-publish.yml +++ /dev/null @@ -1,39 +0,0 @@ -name: Helm Publish - -on: - workflow_dispatch: - -jobs: - helm_release: - runs-on: ubuntu-latest - environment: build-develop - permissions: - id-token: write - contents: read - steps: - - name: Checkout repo - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - - - name: Configure aws credentials - uses: aws-actions/configure-aws-credentials@010d0da01d0b5a38af31e9c3470dbfdabdecca3a # v4.0.1 - with: - role-to-assume: ${{ secrets.AWS_ROLE_ARN_GATI }} - role-duration-seconds: ${{ secrets.AWS_ROLE_DURATION_SECONDS }} - aws-region: ${{ secrets.AWS_REGION }} - - - name: Get Github Token - id: get-gh-token - uses: smartcontractkit/chainlink-github-actions/github-app-token-issuer@main - with: - url: ${{ secrets.GATI_LAMBDA_FUNCTION_URL }} - - - name: Install Helm - uses: azure/setup-helm@5119fcb9089d432beecbf79bb2c7915207344b78 # v3.5 - - - name: Run chart-releaser - uses: helm/chart-releaser-action@a917fd15b20e8b64b94d9158ad54cd6345335584 # v1.6.0 - with: - charts_dir: charts - config: .github/cr.yaml - env: - CR_TOKEN: "${{ steps.get-gh-token.outputs.access-token }}" diff --git a/.github/workflows/helm-chart.yml b/.github/workflows/helm-chart.yml deleted file mode 100644 index c988d14f30..0000000000 --- a/.github/workflows/helm-chart.yml +++ /dev/null @@ -1,25 +0,0 @@ -name: Helm Chart - -on: - pull_request: - paths: - - "charts/**" - - ".github/workflows/helm-chart.yml" - -jobs: - ci-lint-helm-charts: - runs-on: ubuntu-latest - permissions: - id-token: write - contents: read - actions: read - steps: - - name: ci-lint-helm-charts - uses: smartcontractkit/.github/actions/ci-lint-charts@9fd15fe8e698a5e28bfd06b3a91471c56568dcb3 # ci-lint-charts@0.1.1 - with: - # chart testing inputs - chart-testing-extra-args: "--lint-conf=lintconf.yaml" - # grafana inputs - metrics-job-name: ci-lint-helm-charts - gc-basic-auth: ${{ secrets.GRAFANA_CLOUD_BASIC_AUTH }} - gc-host: ${{ secrets.GRAFANA_CLOUD_HOST }} diff --git a/.github/workflows/integration-chaos-tests.yml b/.github/workflows/integration-chaos-tests.yml deleted file mode 100644 index 8fa0aba971..0000000000 --- a/.github/workflows/integration-chaos-tests.yml +++ /dev/null @@ -1,148 +0,0 @@ -#name: Integration Chaos Test -#on: -# schedule: -# - cron: "0 0 * * *" -# push: -# tags: -# - "*" -# workflow_dispatch: -# -#env: -# CHAINLINK_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink -# ENV_JOB_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink-tests:${{ github.sha }} -# TEST_SUITE: chaos -# TEST_ARGS: -test.timeout 1h -# CHAINLINK_COMMIT_SHA: ${{ github.sha }} -# CHAINLINK_ENV_USER: ${{ github.actor }} -# TEST_LOG_LEVEL: debug -# -#jobs: -# build-chainlink: -# environment: integration -# permissions: -# id-token: write -# contents: read -# name: Build Chainlink Image -# runs-on: ubuntu-latest -# steps: -# - name: Checkout the repo -# uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 -# - name: Check if image exists -# id: check-image -# uses: smartcontractkit/chainlink-github-actions/docker/image-exists@e865e376b8c2d594028c8d645dd6c47169b72974 # v2.2.16 -# with: -# repository: chainlink -# tag: ${{ github.sha }} -# AWS_REGION: ${{ secrets.QA_AWS_REGION }} -# AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} -# - name: Build Image -# if: steps.check-image.outputs.exists == 'false' -# uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/build-image@e865e376b8c2d594028c8d645dd6c47169b72974 # v2.2.16 -# with: -# cl_repo: smartcontractkit/chainlink -# cl_ref: ${{ github.sha }} -# push_tag: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink:${{ github.sha }} -# QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} -# QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} -# - name: Print Chainlink Image Built -# id: push -# run: | -# echo "### chainlink node image tag used for this test run :link:" >>$GITHUB_STEP_SUMMARY -# echo "\`${GITHUB_SHA}\`" >>$GITHUB_STEP_SUMMARY -# - name: Collect Metrics -# if: always() -# id: collect-gha-metrics -# uses: smartcontractkit/push-gha-metrics-action@d1618b772a97fd87e6505de97b872ee0b1f1729a # v2.0.2 -# with: -# basic-auth: ${{ secrets.GRAFANA_CLOUD_BASIC_AUTH }} -# hostname: ${{ secrets.GRAFANA_CLOUD_HOST }} -# this-job-name: Build Chainlink Image -# continue-on-error: true -# -# build-test-runner: -# environment: integration -# permissions: -# id-token: write -# contents: read -# name: Build Test Runner Image -# runs-on: ubuntu-latest -# steps: -# - name: Checkout the repo -# uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 -# - name: Build Test Image -# uses: ./.github/actions/build-test-image -# with: -# QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} -# QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} -# QA_AWS_ACCOUNT_NUMBER: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} -# - name: Collect Metrics -# if: always() -# id: collect-gha-metrics -# uses: smartcontractkit/push-gha-metrics-action@d1618b772a97fd87e6505de97b872ee0b1f1729a # v2.0.2 -# with: -# basic-auth: ${{ secrets.GRAFANA_CLOUD_BASIC_AUTH }} -# hostname: ${{ secrets.GRAFANA_CLOUD_HOST }} -# this-job-name: Build Test Runner Image -# continue-on-error: true -# -# chaos-tests: -# environment: integration -# permissions: -# checks: write -# pull-requests: write -# id-token: write -# contents: read -# name: EVM Pods Chaos Tests -# runs-on: ubuntu-latest -# needs: [build-test-runner, build-chainlink] -# steps: -# - name: Collect Metrics -# id: collect-gha-metrics -# uses: smartcontractkit/push-gha-metrics-action@d1618b772a97fd87e6505de97b872ee0b1f1729a # v2.0.2 -# with: -# basic-auth: ${{ secrets.GRAFANA_CLOUD_BASIC_AUTH }} -# hostname: ${{ secrets.GRAFANA_CLOUD_HOST }} -# this-job-name: EVM Pods Chaos Tests -# test-results-file: '{"testType":"go","filePath":"/tmp/gotest.log"}' -# continue-on-error: true -# - name: Checkout the repo -# uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 -# - name: Prepare Base64 TOML config -# env: -# CHAINLINK_VERSION: ${{ github.sha }} -# run: | -# echo ::add-mask::$CHAINLINK_IMAGE -# -# cat << EOF > config.toml -# [Network] -# selected_networks=["SIMULATED"] -# -# [ChainlinkImage] -# image="$CHAINLINK_IMAGE" -# version="$CHAINLINK_VERSION" -# EOF -# -# BASE64_CONFIG_OVERRIDE=$(cat config.toml | base64 -w 0) -# echo ::add-mask::$BASE64_CONFIG_OVERRIDE -# echo "BASE64_CONFIG_OVERRIDE=$BASE64_CONFIG_OVERRIDE" >> $GITHUB_ENV -# - name: Run Tests -# uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@e865e376b8c2d594028c8d645dd6c47169b72974 # v2.2.16 -# with: -# test_command_to_run: cd integration-tests && go test -timeout 1h -count=1 -json -test.parallel 11 ./chaos 2>&1 | tee /tmp/gotest.log | gotestfmt -# test_download_vendor_packages_command: cd ./integration-tests && go mod download -# cl_repo: ${{ env.CHAINLINK_IMAGE }} -# cl_image_tag: ${{ github.sha }} -# artifacts_location: ./integration-tests/chaos/logs -# publish_check_name: EVM Pods Chaos Test Results -# token: ${{ secrets.GITHUB_TOKEN }} -# go_mod_path: ./integration-tests/go.mod -# QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} -# QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} -# QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} -# - name: Upload test log -# uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce # v3.1.2 -# if: failure() -# with: -# name: Test Results Log -# path: /tmp/gotest.log -# retention-days: 7 diff --git a/.github/workflows/integration-staging-tests.yml b/.github/workflows/integration-staging-tests.yml deleted file mode 100644 index 24e4acfe0f..0000000000 --- a/.github/workflows/integration-staging-tests.yml +++ /dev/null @@ -1,132 +0,0 @@ -# NEEDS ADJUSTING TO TOML CONFIG BEFORE USING!! -name: E2E Functions staging tests - -on: -# TODO: enable when env will be stable -# schedule: -# - cron: "0 0 * * *" - workflow_dispatch: - inputs: - network: - description: Blockchain network (testnet) - type: choice - default: "MUMBAI" - options: - - "MUMBAI" - test_type: - description: Test type - type: choice - default: "mumbai_functions_soak_test_real" - options: - - "mumbai_functions_soak_test_http" - - "mumbai_functions_stress_test_http" - - "mumbai_functions_soak_test_only_secrets" - - "mumbai_functions_stress_test_only_secrets" - - "mumbai_functions_soak_test_real" - - "mumbai_functions_stress_test_real" -# TODO: disabled, need GATI access -# - "gateway_secrets_set_soak_test" -# - "gateway_secrets_list_soak_test" - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -jobs: - e2e-soak-test: - environment: sdlc - runs-on: ubuntu20.04-8cores-32GB - permissions: - contents: read - id-token: write - env: - LOKI_URL: ${{ secrets.LOKI_URL }} - LOKI_TOKEN: ${{ secrets.LOKI_TOKEN }} - SELECTED_NETWORKS: ${{ inputs.network }} - SELECTED_TEST: ${{ inputs.test_type }} - MUMBAI_URLS: ${{ secrets.FUNCTIONS_STAGING_MUMBAI_URLS }} - MUMBAI_KEYS: ${{ secrets.FUNCTIONS_STAGING_MUMBAI_KEYS }} - WASP_LOG_LEVEL: info - steps: - - name: Checkout code - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - fetch-depth: 0 - - name: Prepare Base64 TOML override - env: - PYROSCOPE_SERVER: ${{ secrets.QA_PYROSCOPE_INSTANCE }} - PYROSCOPE_ENVIRONMENT: ci-smoke-${{ matrix.product }}-sepolia - PYROSCOPE_KEY: ${{ secrets.QA_PYROSCOPE_KEY }} - run: | - convert_to_toml_array() { - local IFS=',' - local input_array=($1) - local toml_array_format="[" - - for element in "${input_array[@]}"; do - toml_array_format+="\"$element\"," - done - - toml_array_format="${toml_array_format%,}]" - echo "$toml_array_format" - } - - if [ -n "$PYROSCOPE_SERVER" ]; then - pyroscope_enabled=true - else - pyroscope_enabled=false - fi - - cat << EOF > config.toml - [Common] - chainlink_node_funding=0.5 - - [ChainlinkImage] - image="$CHAINLINK_IMAGE" - version="${{ github.sha }}" - - [Pyroscope] - enabled=$pyroscope_enabled - server_url="$PYROSCOPE_SERVER" - environment="$PYROSCOPE_ENVIRONMENT" - key="$PYROSCOPE_KEY" - - [Logging] - run_id="$RUN_ID" - - [Logging.LogStream] - log_targets=$log_targets - - [Logging.Loki] - tenant_id="$LOKI_TENANT_ID" - endpoint="$LOKI_URL" - basic_auth="$LOKI_BASIC_AUTH" - - [Logging.Grafana] - base_url="$GRAFANA_URL" - dasboard_url="/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - - [Network] - selected_networks=["sepolia"] - - [Network.RpcHttpUrls] - sepolia = $(convert_to_toml_array "$SEPOLIA_HTTP_URLS") - - [Network.RpcWsUrls] - sepolia = $(convert_to_toml_array "$SEPOLIA_URLS") - - [Network.WalletKeys] - sepolia = $(convert_to_toml_array "$EVM_KEYS") - EOF - - BASE64_CONFIG_OVERRIDE=$(cat config.toml | base64 -w 0) - echo ::add-mask::$BASE64_CONFIG_OVERRIDE - echo "BASE64_CONFIG_OVERRIDE=$BASE64_CONFIG_OVERRIDE" >> $GITHUB_ENV - - name: Run E2E soak tests - run: | - cd integration-tests/load/functions - if [[ $SELECTED_TEST == mumbai_functions* ]]; then - go test -v -timeout 6h -run TestFunctionsLoad/$SELECTED_TEST - elif [[ $SELECTED_TEST == gateway* ]]; then - go test -v -timeout 6h -run TestGatewayLoad/$SELECTED_TEST - fi \ No newline at end of file diff --git a/.github/workflows/integration-tests-publish.yml b/.github/workflows/integration-tests-publish.yml deleted file mode 100644 index 5972725ec1..0000000000 --- a/.github/workflows/integration-tests-publish.yml +++ /dev/null @@ -1,101 +0,0 @@ -name: Integration Tests Publish -# Publish the compiled integration tests -on: - push: - tags: - - "v*" - branches: - - ccip-develop - workflow_dispatch: - -env: - ECR_TAG: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink-ccip-tests:develop - CHAINLINK_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink - -jobs: - publish-integration-test-image: - environment: integration - permissions: - id-token: write - contents: read - name: Publish Integration Test Image - runs-on: ubuntu20.04-16cores-64GB - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Publish Integration Test Image - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - ref: ${{ github.event.pull_request.head.sha || github.sha }} - - name: Setup Other Tags If Not Workflow Dispatch - id: tags - if: github.event_name != 'workflow_dispatch' - run: | - echo "other_tags=${ECR_TAG}" >> $GITHUB_OUTPUT - echo 'release_tag="${{ format('{0}.dkr.ecr.{1}.amazonaws.com/chainlink-ccip-tests:{2}', secrets.QA_AWS_ACCOUNT_NUMBER, secrets.QA_AWS_REGION, github.ref_name) }}"' >> $GITHUB_OUTPUT - - name: Build Image - uses: ./.github/actions/build-test-image - with: - other_tags: ${{ steps.tags.outputs.other_tags }},${{steps.tags.outputs.release_tag}} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ACCOUNT_NUMBER: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - - name: Notify Slack - # Only run this notification for merge to develop failures - if: failure() && github.event_name != 'workflow_dispatch' - uses: slackapi/slack-github-action@e28cf165c92ffef168d23c5c9000cffc8a25e117 # v1.24.0 - env: - SLACK_BOT_TOKEN: ${{ secrets.QA_SLACK_API_KEY }} - with: - channel-id: "#team-test-tooling-internal" - slack-message: ":x: :mild-panic-intensifies: Publish Integration Test Image failed: ${{ job.html_url }}\n${{ format('https://github.com/smartcontractkit/chainlink/actions/runs/{0}', github.run_id) }}" - build-chainlink-image: - environment: integration - # Only run this build for workflow_dispatch - if: github.event_name == 'workflow_dispatch' - permissions: - id-token: write - contents: read - strategy: - matrix: - image: - - name: "" - dockerfile: core/chainlink.Dockerfile - tag-suffix: "" - # uncomment in the future if we end up needing to soak test the plugins image - # - name: (plugins) - # dockerfile: plugins/chainlink.Dockerfile - # tag-suffix: -plugins - name: Build Chainlink Image ${{ matrix.image.name }} - runs-on: ubuntu20.04-8cores-32GB - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Build Chainlink Image ${{ matrix.image.name }} - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - ref: ${{ github.sha }} - - name: Build Chainlink Image - uses: ./.github/actions/build-chainlink-image - with: - tag_suffix: ${{ matrix.image.tag-suffix }} - dockerfile: ${{ matrix.image.dockerfile }} - git_commit_sha: ${{ github.sha }} - GRAFANA_CLOUD_BASIC_AUTH: ${{ secrets.GRAFANA_CLOUD_BASIC_AUTH }} - GRAFANA_CLOUD_HOST: ${{ secrets.GRAFANA_CLOUD_HOST }} - AWS_REGION: ${{ secrets.QA_AWS_REGION }} - AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml deleted file mode 100644 index 2b6be446fb..0000000000 --- a/.github/workflows/integration-tests.yml +++ /dev/null @@ -1,1171 +0,0 @@ -name: Integration Tests -on: - merge_group: - pull_request: - push: - tags: - - "*" - workflow_dispatch: - -# Only run 1 of this workflow at a time per PR -concurrency: - group: integration-tests-chainlink-${{ github.ref }} - cancel-in-progress: true - -env: - # for run-test variables and environment - ENV_JOB_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink-ccip-tests:${{ github.sha }} - CHAINLINK_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink - TEST_SUITE: smoke - TEST_ARGS: -test.timeout 12m - INTERNAL_DOCKER_REPO: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com - MOD_CACHE_VERSION: 2 - -jobs: - enforce-ctf-version: - name: Enforce CTF Version - runs-on: ubuntu-latest - steps: - - name: Checkout the repo - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - - name: Check Merge Group Condition - id: condition-check - run: | - echo "Checking event condition..." - SHOULD_ENFORCE="false" - if [[ "$GITHUB_EVENT_NAME" == "merge_group" ]]; then - echo "We are in a merge_group event, now check if we are on the develop branch" - target_branch=$(cat $GITHUB_EVENT_PATH | jq -r .merge_group.base_ref) - if [[ "$target_branch" == "refs/heads/develop" ]]; then - echo "We are on the develop branch, we should enforce ctf version" - SHOULD_ENFORCE="true" - fi - fi - echo "should we enforce ctf version = $SHOULD_ENFORCE" - echo "should-enforce=$SHOULD_ENFORCE" >> $GITHUB_OUTPUT - - name: Enforce CTF Version - if: steps.condition-check.outputs.should-enforce == 'true' - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/mod-version@ea889b3133bd7f16ab19ba4ba130de5d9162c669 # v2.3.4 - with: - go-project-path: ./integration-tests - module-name: github.com/smartcontractkit/chainlink-testing-framework - enforce-semantic-tag: "true" - changes: - environment: integration - name: Check Paths That Require Tests To Run - runs-on: ubuntu-latest - steps: - - name: Checkout the repo - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - - uses: dorny/paths-filter@4512585405083f25c027a35db413c2b3b9006d50 # v2.11.1 - id: changes - with: - filters: | - src: - - '**/*.go' - - '**/*go.sum' - - '**/*go.mod' - - '.github/workflows/integration-tests.yml' - - '**/*Dockerfile' - - 'core/**/config/**/*.toml' - - 'integration-tests/**/*.toml' - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Check Paths That Require Tests To Run - continue-on-error: true - outputs: - src: ${{ steps.changes.outputs.src }} - - build-lint-integration-tests: - name: Build and Lint integration-tests - runs-on: ubuntu20.04-16cores-64GB - steps: - - name: Checkout the repo - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - - name: Setup Go - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/setup-go@ea889b3133bd7f16ab19ba4ba130de5d9162c669 # v2.3.4 - with: - test_download_vendor_packages_command: cd ./integration-tests && go mod download - go_mod_path: ./integration-tests/go.mod - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - - name: Build Go - run: | - cd ./integration-tests - go build ./... - go test -run=^# ./... - - name: Lint Go - uses: golangci/golangci-lint-action@3a919529898de77ec3da873e3063ca4b10e7f5cc # v3.7.0 - with: - version: v1.55.2 - # We already cache these directories in setup-go - skip-pkg-cache: true - skip-build-cache: true - # only-new-issues is only applicable to PRs, otherwise it is always set to false - only-new-issues: false # disabled for PRs due to unreliability - args: --out-format colored-line-number,checkstyle:golangci-lint-report.xml - working-directory: ./integration-tests - - build-chainlink: - environment: integration - permissions: - id-token: write - contents: read - strategy: - matrix: - image: - - name: "" - dockerfile: core/chainlink.Dockerfile - tag-suffix: "" - # - name: (plugins) - # dockerfile: plugins/chainlink.Dockerfile - # tag-suffix: -plugins - name: Build Chainlink Image ${{ matrix.image.name }} - runs-on: ubuntu20.04-16cores-64GB - needs: [changes, enforce-ctf-version] - steps: - - name: Collect Metrics - if: needs.changes.outputs.src == 'true' || github.event_name == 'workflow_dispatch' - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Build Chainlink Image ${{ matrix.image.name }} - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - ref: ${{ github.event.pull_request.head.sha || github.event.merge_group.head_sha }} - - name: Build Chainlink Image - if: needs.changes.outputs.src == 'true' || github.event_name == 'workflow_dispatch' - uses: ./.github/actions/build-chainlink-image - with: - tag_suffix: ${{ matrix.image.tag-suffix }} - dockerfile: ${{ matrix.image.dockerfile }} - git_commit_sha: ${{ github.sha }} - GRAFANA_CLOUD_BASIC_AUTH: ${{ secrets.GRAFANA_CLOUD_BASIC_AUTH }} - GRAFANA_CLOUD_HOST: ${{ secrets.GRAFANA_CLOUD_HOST }} - AWS_REGION: ${{ secrets.QA_AWS_REGION }} - AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - - build-test-image: - if: startsWith(github.ref, 'refs/tags/') || github.event_name == 'schedule' || contains(join(github.event.pull_request.labels.*.name, ' '), 'build-test-image') - environment: integration - permissions: - id-token: write - contents: read - name: Build Test Image - runs-on: ubuntu20.04-16cores-64GB - needs: [changes] - steps: - - name: Collect Metrics - if: needs.changes.outputs.src == 'true' || github.event_name == 'workflow_dispatch' - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Build Test Image - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - ref: ${{ github.event.pull_request.head.sha || github.event.merge_group.head_sha }} - - name: Build Test Image - if: needs.changes.outputs.src == 'true' || github.event_name == 'workflow_dispatch' - uses: ./.github/actions/build-test-image - with: - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ACCOUNT_NUMBER: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - -# compare-tests: -# needs: [changes] -# runs-on: ubuntu-latest -# name: Compare/Build Automation Test List -# outputs: -# automation-matrix: ${{ env.AUTOMATION_JOB_MATRIX_JSON }} -# lp-matrix: ${{ env.LP_JOB_MATRIX_JSON }} -# steps: -# - name: Check for Skip Tests Label -# if: contains(join(github.event.pull_request.labels.*.name, ' '), 'skip-smoke-tests') -# run: | -# echo "## \`skip-smoke-tests\` label is active, skipping E2E smoke tests" >>$GITHUB_STEP_SUMMARY -# exit 0 -# - name: Checkout the repo -# uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 -# - name: Compare Test Lists -# run: | -# cd ./integration-tests -# ./scripts/compareTestList.sh ./smoke/automation_test.go -# ./scripts/compareTestList.sh ./smoke/keeper_test.go -# ./scripts/compareTestList.sh ./smoke/log_poller_test.go -# - name: Build Test Matrix Lists -# id: build-test-matrix-list -# run: | -# cd ./integration-tests -# MATRIX_JSON_AUTOMATION=$(./scripts/buildTestMatrixList.sh ./smoke/automation_test.go automation ubuntu-latest 1) -# MATRIX_JSON_KEEPER=$(./scripts/buildTestMatrixList.sh ./smoke/keeper_test.go keeper ubuntu-latest 1) -# COMBINED_ARRAY=$(jq -c -n "$MATRIX_JSON_AUTOMATION + $MATRIX_JSON_KEEPER") -# -# LOG_POLLER_MATRIX_JSON=$(./scripts/buildTestMatrixList.sh ./smoke/log_poller_test.go log_poller ubuntu-latest 1) -# echo "LP_JOB_MATRIX_JSON=${LOG_POLLER_MATRIX_JSON}" >> $GITHUB_ENV -# -# # if we running a PR against the develop branch we should only run the automation tests unless we are in the merge group event -# if [[ "$GITHUB_EVENT_NAME" == "merge_group" ]]; then -# echo "We are in a merge_group event, run both automation and keepers tests" -# echo "AUTOMATION_JOB_MATRIX_JSON=${COMBINED_ARRAY}" >> $GITHUB_ENV -# else -# echo "we are not in a merge_group event, if this is a PR to develop run only automation tests, otherwise run everything because we could be running against a release branch" -# target_branch=$(cat $GITHUB_EVENT_PATH | jq -r .pull_request.base.ref) -# if [[ "$target_branch" == "develop" ]]; then -# echo "only run automation tests" -# echo "AUTOMATION_JOB_MATRIX_JSON=${MATRIX_JSON_AUTOMATION}" >> $GITHUB_ENV -# else -# echo "run both automation and keepers tests" -# echo "AUTOMATION_JOB_MATRIX_JSON=${COMBINED_ARRAY}" >> $GITHUB_ENV -# fi -# fi -# -# eth-smoke-tests-matrix-automation: -# if: ${{ !contains(join(github.event.pull_request.labels.*.name, ' '), 'skip-smoke-tests') }} -# environment: integration -# permissions: -# checks: write -# pull-requests: write -# id-token: write -# contents: read -# needs: -# [build-chainlink, changes, compare-tests, build-lint-integration-tests] -# env: -# SELECTED_NETWORKS: SIMULATED,SIMULATED_1,SIMULATED_2 -# CHAINLINK_COMMIT_SHA: ${{ github.sha }} -# CHAINLINK_ENV_USER: ${{ github.actor }} -# TEST_LOG_LEVEL: debug -# strategy: -# fail-fast: false -# matrix: -# product: ${{fromJson(needs.compare-tests.outputs.automation-matrix)}} -# runs-on: ${{ matrix.product.os }} -# name: ETH Smoke Tests ${{ matrix.product.name }} -# steps: -# - name: Collect Metrics -# if: needs.changes.outputs.src == 'true' || github.event_name == 'workflow_dispatch' -# id: collect-gha-metrics -# uses: smartcontractkit/push-gha-metrics-action@d1618b772a97fd87e6505de97b872ee0b1f1729a # v2.0.2 -# with: -# basic-auth: ${{ secrets.GRAFANA_CLOUD_BASIC_AUTH }} -# hostname: ${{ secrets.GRAFANA_CLOUD_HOST }} -# this-job-name: ETH Smoke Tests ${{ matrix.product.name }} -# test-results-file: '{"testType":"go","filePath":"/tmp/gotest.log"}' -# continue-on-error: true -# - name: Checkout the repo -# uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 -# with: -# ref: ${{ github.event.pull_request.head.sha || github.event.merge_group.head_sha }} -# - name: Build Go Test Command -# id: build-go-test-command -# run: | -# # if the matrix.product.run is set, use it for a different command -# if [ "${{ matrix.product.run }}" != "" ]; then -# echo "run_command=${{ matrix.product.run }} ./smoke/${{ matrix.product.file }}_test.go" >> "$GITHUB_OUTPUT" -# else -# echo "run_command=./smoke/${{ matrix.product.name }}_test.go" >> "$GITHUB_OUTPUT" -# fi -# - name: Prepare Base64 TOML override -# uses: ./.github/actions/setup-create-base64-config -# with: -# runId: ${{ github.run_id }} -# testLogCollect: ${{ vars.TEST_LOG_COLLECT }} -# selectedNetworks: ${{ env.SELECTED_NETWORKS }} -# chainlinkImage: ${{ env.CHAINLINK_IMAGE }} -# chainlinkVersion: ${{ github.sha }} -# pyroscopeServer: ${{ matrix.product.pyroscope_env == '' && '' || !startsWith(github.ref, 'refs/tags/') && '' || secrets.QA_PYROSCOPE_INSTANCE }} # Avoid sending blank envs https://github.com/orgs/community/discussions/25725 -# pyroscopeEnvironment: ${{ matrix.product.pyroscope_env }} -# pyroscopeKey: ${{ secrets.QA_PYROSCOPE_KEY }} -# lokiEndpoint: ${{ secrets.LOKI_URL }} -# lokiTenantId: ${{ vars.LOKI_TENANT_ID }} -# lokiBasicAuth: ${{ secrets.LOKI_BASIC_AUTH }} -# logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} -# grafanaUrl: ${{ vars.GRAFANA_URL }} -# grafanaDashboardUrl: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" -# -# ## Run this step when changes that require tests to be run are made -# - name: Run Tests -# if: needs.changes.outputs.src == 'true' || github.event_name == 'workflow_dispatch' -# uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@ea889b3133bd7f16ab19ba4ba130de5d9162c669 # v2.3.4 -# with: -# test_command_to_run: cd ./integration-tests && go test -timeout 30m -count=1 -json -test.parallel=${{ matrix.product.nodes }} ${{ steps.build-go-test-command.outputs.run_command }} 2>&1 | tee /tmp/gotest.log | gotestfmt -# test_download_vendor_packages_command: cd ./integration-tests && go mod download -# cl_repo: ${{ env.CHAINLINK_IMAGE }} -# cl_image_tag: ${{ github.sha }} -# aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} -# artifacts_location: ./integration-tests/smoke/logs/ -# publish_check_name: ${{ matrix.product.name }} -# token: ${{ secrets.GITHUB_TOKEN }} -# go_mod_path: ./integration-tests/go.mod -# cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} -# cache_restore_only: "true" -# QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} -# QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} -# QA_KUBECONFIG: "" -# - name: Print failed test summary -# if: always() -# uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/show-test-summary@ea889b3133bd7f16ab19ba4ba130de5d9162c669 # v2.3.4 -# -# eth-smoke-tests-matrix-log-poller: -# if: ${{ !(contains(join(github.event.pull_request.labels.*.name, ' '), 'skip-smoke-tests') || github.event_name == 'workflow_dispatch') }} -# environment: integration -# permissions: -# checks: write -# pull-requests: write -# id-token: write -# contents: read -# needs: -# [build-chainlink, changes, compare-tests, build-lint-integration-tests] -# env: -# SELECTED_NETWORKS: SIMULATED,SIMULATED_1,SIMULATED_2 -# CHAINLINK_COMMIT_SHA: ${{ github.sha }} -# CHAINLINK_ENV_USER: ${{ github.actor }} -# TEST_LOG_LEVEL: debug -# strategy: -# fail-fast: false -# matrix: -# product: ${{fromJson(needs.compare-tests.outputs.lp-matrix)}} -# runs-on: ${{ matrix.product.os }} -# name: ETH Smoke Tests ${{ matrix.product.name }} -# steps: -# - name: Collect Metrics -# if: needs.changes.outputs.src == 'true' -# id: collect-gha-metrics -# uses: smartcontractkit/push-gha-metrics-action@d1618b772a97fd87e6505de97b872ee0b1f1729a # v2.0.2 -# with: -# basic-auth: ${{ secrets.GRAFANA_CLOUD_BASIC_AUTH }} -# hostname: ${{ secrets.GRAFANA_CLOUD_HOST }} -# this-job-name: ETH Smoke Tests ${{ matrix.product.name }} -# test-results-file: '{"testType":"go","filePath":"/tmp/gotest.log"}' -# continue-on-error: true -# - name: Checkout the repo -# uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 -# with: -# ref: ${{ github.event.pull_request.head.sha || github.event.merge_group.head_sha }} -# - name: Build Go Test Command -# id: build-go-test-command -# run: | -# # if the matrix.product.run is set, use it for a different command -# if [ "${{ matrix.product.run }}" != "" ]; then -# echo "run_command=${{ matrix.product.run }} ./smoke/${{ matrix.product.file }}_test.go" >> "$GITHUB_OUTPUT" -# else -# echo "run_command=./smoke/${{ matrix.product.name }}_test.go" >> "$GITHUB_OUTPUT" -# fi -# - name: Prepare Base64 TOML override -# uses: ./.github/actions/setup-create-base64-config -# with: -# runId: ${{ github.run_id }} -# testLogCollect: ${{ vars.TEST_LOG_COLLECT }} -# selectedNetworks: ${{ env.SELECTED_NETWORKS }} -# chainlinkImage: ${{ env.CHAINLINK_IMAGE }} -# chainlinkVersion: ${{ github.sha }} -# pyroscopeServer: ${{ matrix.product.pyroscope_env == '' && '' || !startsWith(github.ref, 'refs/tags/') && '' || secrets.QA_PYROSCOPE_INSTANCE }} # Avoid sending blank envs https://github.com/orgs/community/discussions/25725 -# pyroscopeEnvironment: ${{ matrix.product.pyroscope_env }} -# pyroscopeKey: ${{ secrets.QA_PYROSCOPE_KEY }} -# lokiEndpoint: ${{ secrets.LOKI_URL }} -# lokiTenantId: ${{ vars.LOKI_TENANT_ID }} -# lokiBasicAuth: ${{ secrets.LOKI_BASIC_AUTH }} -# logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} -# grafanaUrl: ${{ vars.GRAFANA_URL }} -# grafanaDashboardUrl: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" -# ## Run this step when changes that require tests to be run are made -# - name: Run Tests -# if: needs.changes.outputs.src == 'true' -# uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@e865e376b8c2d594028c8d645dd6c47169b72974 # v2.2.16 -# with: -# test_command_to_run: cd ./integration-tests && go test -timeout 30m -count=1 -json -test.parallel=${{ matrix.product.nodes }} ${{ steps.build-go-test-command.outputs.run_command }} 2>&1 | tee /tmp/gotest.log | gotestfmt -# test_download_vendor_packages_command: cd ./integration-tests && go mod download -# cl_repo: ${{ env.CHAINLINK_IMAGE }} -# cl_image_tag: ${{ github.sha }} -# aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} -# artifacts_location: ./integration-tests/smoke/logs/ -# publish_check_name: ${{ matrix.product.name }} -# token: ${{ secrets.GITHUB_TOKEN }} -# go_mod_path: ./integration-tests/go.mod -# cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} -# cache_restore_only: "true" -# QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} -# QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} -# QA_KUBECONFIG: "" - - eth-smoke-tests-matrix: - if: ${{ !contains(join(github.event.pull_request.labels.*.name, ' '), 'skip-smoke-tests') }} - environment: integration - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - needs: [build-chainlink, changes, build-lint-integration-tests] - env: - SELECTED_NETWORKS: SIMULATED - CHAINLINK_COMMIT_SHA: ${{ github.sha }} - CHAINLINK_ENV_USER: ${{ github.actor }} - TEST_LOG_LEVEL: debug - strategy: - fail-fast: false - matrix: - product: - - name: ccip-smoke - nodes: 1 - os: ubuntu-latest - file: ccip - dir: ccip-tests/smoke - run: -run ^TestSmokeCCIPForBidirectionalLane$ - - name: ccip-smoke-usdc - nodes: 1 - os: ubuntu-latest - file: ccip - dir: ccip-tests/smoke - run: -run ^TestSmokeCCIPForBidirectionalLane$ - config_path: ./integration-tests/ccip-tests/testconfig/tomls/usdc_mock_deployment.toml - - name: ccip-smoke-db-compatibility - nodes: 1 - os: ubuntu-latest - file: ccip - dir: ccip-tests/smoke - run: -run ^TestSmokeCCIPForBidirectionalLane$ - config_path: ./integration-tests/ccip-tests/testconfig/tomls/db-compatibility.toml - - name: ccip-smoke-rate-limit - nodes: 1 - dir: ccip-tests/smoke - os: ubuntu20.04-16cores-64GB - file: ccip - run: -run ^TestSmokeCCIPRateLimit$ - - name: ccip-smoke-multicall - nodes: 1 - dir: ccip-tests/smoke - os: ubuntu20.04-16cores-64GB - file: ccip - run: -run ^TestSmokeCCIPMulticall$ - - name: ccip-smoke-manual-exec - nodes: 1 - dir: ccip-tests/smoke - os: ubuntu20.04-16cores-64GB - file: ccip - run: -run ^TestSmokeCCIPManuallyExecuteAfterExecutionFailingDueToInsufficientGas$ - - name: runlog - nodes: 2 - os: ubuntu-latest - pyroscope_env: "ci-smoke-runlog-evm-simulated" - - name: cron - nodes: 2 - os: ubuntu-latest - pyroscope_env: "ci-smoke-cron-evm-simulated" - - name: flux - nodes: 1 - os: ubuntu-latest - pyroscope_env: "ci-smoke-flux-evm-simulated" - - name: ocr - nodes: 2 - os: ubuntu-latest - file: ocr - pyroscope_env: ci-smoke-ocr-evm-simulated - - name: ocr2 - nodes: 6 - os: ubuntu-latest - run: -run TestOCRBasic - file: ocr - client: geth - pyroscope_env: ci-smoke-ocr-evm-simulated - # Uncomment, when https://smartcontract-it.atlassian.net/browse/TT-753 is DONE - # - name: ocr-nethermind - # nodes: 1 - # os: ubuntu20.04-8cores-32GB - # run: -run TestOCRBasic - # file: ocr - # client: nethermind - # pyroscope_env: ci-smoke-ocr-evm-simulated - - name: ocr-besu - nodes: 1 - os: ubuntu20.04-8cores-32GB - run: -run TestOCRBasic - file: ocr - client: besu - pyroscope_env: ci-smoke-ocr-evm-simulated - - name: ocr-erigon - nodes: 1 - os: ubuntu20.04-8cores-32GB - run: -run TestOCRBasic - file: ocr - client: erigon - pyroscope_env: ci-smoke-ocr-evm-simulated - - name: ocr2 - nodes: 1 - os: ubuntu20.04-8cores-32GB - run: -run TestOCRv2JobReplacement - file: ocr2 - pyroscope_env: ci-smoke-ocr2-evm-simulated - - name: ocr2-geth - nodes: 1 - os: ubuntu20.04-8cores-32GB - run: -run TestOCRv2Basic - file: ocr2 - client: geth - pyroscope_env: ci-smoke-ocr2-evm-simulated - # Uncomment, when https://smartcontract-it.atlassian.net/browse/TT-753 is DONE - # - name: ocr2-nethermind - # nodes: 1 - # os: ubuntu20.04-8cores-32GB - # run: -run TestOCRv2Basic - # file: ocr2 - # client: nethermind - # pyroscope_env: ci-smoke-ocr2-evm-simulated - - name: ocr2-besu - nodes: 1 - os: ubuntu20.04-8cores-32GB - run: -run TestOCRv2Basic - file: ocr2 - client: besu - pyroscope_env: ci-smoke-ocr2-evm-simulated - - name: ocr2-erigon - nodes: 1 - os: ubuntu20.04-8cores-32GB - run: -run TestOCRv2Basic - file: ocr2 - client: erigon - pyroscope_env: ci-smoke-ocr2-evm-simulated - - name: ocr2 - nodes: 6 - os: ubuntu-latest - pyroscope_env: ci-smoke-ocr2-plugins-evm-simulated - tag_suffix: "-plugins" - - name: vrf - nodes: 2 - os: ubuntu-latest - pyroscope_env: ci-smoke-vrf-evm-simulated - - name: vrfv2 - nodes: 2 - os: ubuntu-latest - pyroscope_env: ci-smoke-vrf2-evm-simulated - - name: vrfv2plus - nodes: 3 - os: ubuntu-latest - pyroscope_env: ci-smoke-vrf2plus-evm-simulated - - name: forwarder_ocr - nodes: 2 - os: ubuntu-latest - pyroscope_env: ci-smoke-forwarder-ocr-evm-simulated - - name: forwarders_ocr2 - nodes: 2 - os: ubuntu-latest - pyroscope_env: ci-smoke-forwarder-ocr-evm-simulated - runs-on: ${{ matrix.product.os }} - name: ETH Smoke Tests ${{ matrix.product.name }}${{ matrix.product.tag_suffix }} - steps: - - name: Collect Metrics - if: needs.changes.outputs.src == 'true' || github.event_name == 'workflow_dispatch' - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: ETH Smoke Tests ${{ matrix.product.name }}${{ matrix.product.tag_suffix }} - test-results-file: '{"testType":"go","filePath":"/tmp/gotest.log"}' - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - ref: ${{ github.event.pull_request.head.sha || github.event.merge_group.head_sha }} - - name: Build Go Test Command - id: build-go-test-command - run: | - # if dir is provided use it, otherwise use the smoke dir - if [ "${{ matrix.product.dir }}" != "" ]; then - dir=${{ matrix.product.dir }} - else - dir=smoke - fi - # if the matrix.product.run is set, use it for a different command - if [ "${{ matrix.product.run }}" != "" ]; then - echo "run_command=${{ matrix.product.run }} ./${dir}/${{ matrix.product.file }}_test.go" >> "$GITHUB_OUTPUT" - else - echo "run_command=./${dir}/${{ matrix.product.name }}_test.go" >> "$GITHUB_OUTPUT" - fi - - name: Check for "enable tracing" label - id: check-label - run: | - label=$(jq -r '.pull_request.labels[]?.name // empty' "$GITHUB_EVENT_PATH") - - if [[ -n "$label" ]]; then - if [[ "$label" == "enable tracing" ]]; then - echo "Enable tracing label found." - echo "trace=true" >> $GITHUB_OUTPUT - else - echo "Enable tracing label not found." - echo "trace=false" >> $GITHUB_OUTPUT - fi - else - echo "No labels present or labels are null." - echo "trace=false" >> $GITHUB_OUTPUT - fi - - - name: Setup Grafana and OpenTelemetry - id: docker-setup - if: steps.check-label.outputs.trace == 'true' && matrix.product.name == 'ocr2' && matrix.product.tag_suffix == '-plugins' - run: | - # Create network - docker network create --driver bridge tracing - - # Make trace directory - cd integration-tests/smoke/ - mkdir ./traces - chmod -R 777 ./traces - - # Switch directory - cd ../../.github/tracing - - # Create a Docker volume for traces - # docker volume create otel-traces - - # Start OpenTelemetry Collector - # Note the user must be set to the same user as the runner for the trace data to be accessible - docker run -d --network=tracing --name=otel-collector \ - -v $PWD/otel-collector-ci.yaml:/etc/otel-collector.yaml \ - -v $PWD/../../integration-tests/smoke/traces:/tracing \ - --user "$(id -u):$(id -g)" \ - -p 4317:4317 otel/opentelemetry-collector:0.88.0 --config=/etc/otel-collector.yaml - - name: Locate Docker Volume - id: locate-volume - if: false - run: | - echo "VOLUME_PATH=$(docker volume inspect --format '{{ .Mountpoint }}' otel-traces)" >> $GITHUB_OUTPUT - - name: Show Otel-Collector Logs - if: steps.check-label.outputs.trace == 'true' && matrix.product.name == 'ocr2' && matrix.product.tag_suffix == '-plugins' - run: | - docker logs otel-collector - - name: Set Override Config - id: set_override_config - run: | - # if the matrix.product.config_path is set, use it as the override config - if [ "${{ matrix.product.config_path }}" != "" ]; then - echo "base_64_override=$(base64 -w 0 -i ${{ matrix.product.config_path }})" >> "$GITHUB_OUTPUT" - fi - - name: Prepare Base64 TOML override - uses: ./.github/actions/setup-create-base64-config - with: - runId: ${{ github.run_id }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - selectedNetworks: ${{ env.SELECTED_NETWORKS }} - chainlinkImage: ${{ env.CHAINLINK_IMAGE }} - chainlinkVersion: ${{ github.sha }} - pyroscopeServer: ${{ matrix.product.pyroscope_env == '' && '' || !startsWith(github.ref, 'refs/tags/') && '' || secrets.QA_PYROSCOPE_INSTANCE }} # Avoid sending blank envs https://github.com/orgs/community/discussions/25725 - pyroscopeEnvironment: ${{ matrix.product.pyroscope_env }} - pyroscopeKey: ${{ secrets.QA_PYROSCOPE_KEY }} - lokiEndpoint: ${{ secrets.LOKI_URL }} - lokiTenantId: ${{ vars.LOKI_TENANT_ID }} - lokiBasicAuth: ${{ secrets.LOKI_BASIC_AUTH }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - grafanaUrl: ${{ vars.GRAFANA_URL }} - grafanaDashboardUrl: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - - name: Prepare Base64 TOML override for CCIP secrets - uses: ./.github/actions/setup-create-base64-config-ccip - with: - runId: ${{ github.run_id }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - selectedNetworks: SIMULATED_1,SIMULATED_2 - chainlinkImage: ${{ env.CHAINLINK_IMAGE }} - chainlinkVersion: ${{ github.sha }} - lokiEndpoint: ${{ secrets.LOKI_URL }} - lokiTenantId: ${{ vars.LOKI_TENANT_ID }} - lokiBasicAuth: ${{ secrets.LOKI_BASIC_AUTH }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - grafanaUrl: ${{ vars.GRAFANA_URL }} - grafanaDashboardUrl: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - ## Run this step when changes that require tests to be run are made - - name: Run Tests - if: needs.changes.outputs.src == 'true' || github.event_name == 'workflow_dispatch' - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@e865e376b8c2d594028c8d645dd6c47169b72974 # v2.2.16 - env: - BASE64_CCIP_CONFIG_OVERRIDE: ${{ steps.set_override_config.outputs.base_64_override }} - TEST_BASE64_CCIP_CONFIG_OVERRIDE: ${{ steps.set_override_config.outputs.base_64_override }} - with: - test_command_to_run: cd ./integration-tests && go test -timeout 30m -count=1 -json -test.parallel=${{ matrix.product.nodes }} ${{ steps.build-go-test-command.outputs.run_command }} 2>&1 | tee /tmp/gotest.log | gotestfmt - test_download_vendor_packages_command: cd ./integration-tests && go mod download - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ github.sha }}${{ matrix.product.tag_suffix }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - artifacts_name: ${{ matrix.product.name }}${{ matrix.product.tag_suffix }}-test-logs - artifacts_location: ./integration-tests/smoke/logs/ - publish_check_name: ${{ matrix.product.name }} - token: ${{ secrets.GITHUB_TOKEN }} - go_mod_path: ./integration-tests/go.mod - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: "" - # Run this step when changes that do not need the test to run are made - - name: Run Setup - if: needs.changes.outputs.src == 'false' - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/setup-run-tests-environment@ea889b3133bd7f16ab19ba4ba130de5d9162c669 # v2.3.4 - with: - test_download_vendor_packages_command: cd ./integration-tests && go mod download - go_mod_path: ./integration-tests/go.mod - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - - name: Show Otel-Collector Logs - if: steps.check-label.outputs.trace == 'true' && matrix.product.name == 'ocr2' && matrix.product.tag_suffix == '-plugins' - run: | - docker logs otel-collector - - name: Permissions on traces - if: steps.check-label.outputs.trace == 'true' && matrix.product.name == 'ocr2' && matrix.product.tag_suffix == '-plugins' - run: | - ls -l ./integration-tests/smoke/traces - - name: Upload Trace Data - if: steps.check-label.outputs.trace == 'true' && matrix.product.name == 'ocr2' && matrix.product.tag_suffix == '-plugins' - uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce # v3.1.2 - with: - name: trace-data - path: ./integration-tests/smoke/traces/trace-data.json - - name: Print failed test summary - if: always() - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/show-test-summary@ea889b3133bd7f16ab19ba4ba130de5d9162c669 # v2.3.4 - with: - test_directory: ./integration-tests/smoke/ - - ### Used to check the required checks box when the matrix completes - eth-smoke-tests: - if: always() - runs-on: ubuntu-latest - name: ETH Smoke Tests - needs: [eth-smoke-tests-matrix] - steps: - - name: Check smoke test matrix status - if: needs.eth-smoke-tests-matrix.result != 'success' - run: | - echo "${{ needs.eth-smoke-tests-matrix.result }}" - exit 1 - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: ETH Smoke Tests - matrix-aggregator-status: ${{ needs.eth-smoke-tests-matrix.result }} - continue-on-error: true - - cleanup: - name: Clean up integration environment deployments - if: always() - needs: [eth-smoke-tests] - runs-on: ubuntu-latest - steps: - - name: Checkout repo - if: ${{ github.event_name == 'pull_request' }} - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - - - name: 🧼 Clean up Environment - if: ${{ github.event_name == 'pull_request' }} - uses: ./.github/actions/delete-deployments - with: - environment: integration - ref: ${{ github.head_ref }} # See https://github.com/github/docs/issues/15319#issuecomment-1476705663 - - - name: Collect Metrics - if: ${{ github.event_name == 'pull_request' }} - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Clean up integration environment deployments - continue-on-error: true - - # Run the setup if the matrix finishes but this time save the cache if we have a cache hit miss - # this will also only run if both of the matrix jobs pass - eth-smoke-go-mod-cache: - environment: integration - needs: [eth-smoke-tests] - runs-on: ubuntu20.04-16cores-64GB - name: ETH Smoke Tests Go Mod Cache - continue-on-error: true - steps: - - name: Checkout the repo - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - ref: ${{ github.event.pull_request.head.sha || github.event.merge_group.head_sha }} - - name: Run Setup - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/setup-go@ea889b3133bd7f16ab19ba4ba130de5d9162c669 # v2.3.4 - with: - test_download_vendor_packages_command: | - cd ./integration-tests - go mod download - # force download of test dependencies - go test -run=NonExistentTest ./smoke/... || echo "ignore expected test failure" - go_mod_path: ./integration-tests/go.mod - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "false" - - ### Migration tests - node-migration-tests: - name: Version Migration Tests - environment: integration - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - runs-on: ubuntu-latest - needs: [build-chainlink, changes, build-test-image] - # Only run migration tests on new tags - if: startsWith(github.ref, 'refs/tags/') - env: - SELECTED_NETWORKS: SIMULATED,SIMULATED_1,SIMULATED_2 - CHAINLINK_COMMIT_SHA: ${{ github.sha }} - CHAINLINK_ENV_USER: ${{ github.actor }} - CHAINLINK_IMAGE: public.ecr.aws/chainlink/chainlink - UPGRADE_VERSION: ${{ github.sha }} - UPGRADE_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink - TEST_LOG_LEVEL: debug - TEST_SUITE: migration - steps: - - name: Checkout the repo - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - ref: ${{ github.event.pull_request.head.sha || github.event.merge_group.head_sha }} - - name: Get Latest Version - id: get_latest_version - run: | - untrimmed_ver=$(curl --header "Authorization: token ${{ secrets.GITHUB_TOKEN }}" --request GET https://api.github.com/repos/${{ github.repository }}/releases/latest | jq -r .name) - latest_version="${untrimmed_ver:1}" - echo "latest_version=${latest_version} | tee -a $GITHUB_OUTPUT" - - name: Name Versions - run: | - echo "Running migration tests from version '${{ steps.get_latest_version.outputs.latest_version }}' to: '${{ github.sha }}'" - - name: Prepare Base64 TOML override - uses: ./.github/actions/setup-create-base64-upgrade-config - with: - selectedNetworks: ${{ env.SELECTED_NETWORKS }} - chainlinkImage: ${{ env.CHAINLINK_IMAGE }} - chainlinkVersion: ${{ steps.get_latest_version.outputs.latest_version }} - upgradeImage: ${{ env.UPGRADE_IMAGE }} - upgradeVersion: ${{ env.UPGRADE_VERSION }} - - name: Run Migration Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@ea889b3133bd7f16ab19ba4ba130de5d9162c669 # v2.3.4 - with: - test_command_to_run: cd ./integration-tests && go test -timeout 30m -count=1 -json ./migration 2>&1 | tee /tmp/gotest.log | gotestfmt - test_download_vendor_packages_command: cd ./integration-tests && go mod download - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ steps.get_latest_version.outputs.latest_version }} - artifacts_location: ./integration-tests/migration/logs - publish_check_name: Node Migration Test Results - token: ${{ secrets.GITHUB_TOKEN }} - go_mod_path: ./integration-tests/go.mod - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - - name: Upload test log - uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce # v3.1.2 - if: failure() - with: - name: test-log-${{ matrix.product.name }} - path: /tmp/gotest.log - retention-days: 7 - continue-on-error: true - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Version Migration Tests - test-results-file: '{"testType":"go","filePath":"/tmp/gotest.log"}' - continue-on-error: true - - ## Solana Section -# get_solana_sha: -# name: Get Solana Sha From Go Mod -# environment: Integration -# runs-on: ubuntu-latest -# outputs: -# sha: ${{ steps.getsha.outputs.sha }} -# steps: -# - name: Checkout the repo -# uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 -# with: -# ref: ${{ github.event.pull_request.head.sha || github.event.merge_group.head_sha }} -# - name: Setup Go -# uses: ./.github/actions/setup-go -# with: -# only-modules: "true" -# - name: Get the sha from go mod -# id: getshortsha -# run: | -# sol_ver=$(go list -m -json github.com/smartcontractkit/chainlink-solana | jq -r .Version) -# if [ -z "${sol_ver}" ]; then -# echo "Error: could not get the solana version from the go.mod file, look above for error(s)" -# exit 1 -# fi -# short_sha="${sol_ver##*-}" -# echo "short sha is: ${short_sha}" -# echo "short_sha=${short_sha}" >> "$GITHUB_OUTPUT" -# - name: Checkout solana -# uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 -# with: -# repository: smartcontractkit/chainlink-solana -# ref: develop -# fetch-depth: 0 -# path: solanapath -# - name: Get long sha -# id: getsha -# run: | -# cd solanapath -# full_sha=$(git rev-parse ${{steps.getshortsha.outputs.short_sha}}) -# if [ -z "${full_sha}" ]; then -# echo "Error: could not get the full sha from the short sha using git, look above for error(s)" -# exit 1 -# fi -# echo "sha is: ${full_sha}" -# echo "sha=${full_sha}" >> "$GITHUB_OUTPUT" -# -# get_projectserum_version: -# name: Get ProjectSerum Version -# environment: integration -# runs-on: ubuntu-latest -# needs: [get_solana_sha] -# outputs: -# projectserum_version: ${{ steps.psversion.outputs.projectserum_version }} -# steps: -# - name: Checkout the solana repo -# uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 -# with: -# repository: smartcontractkit/chainlink-solana -# ref: ${{ needs.get_solana_sha.outputs.sha }} -# - name: Get ProjectSerum Version -# id: psversion -# uses: smartcontractkit/chainlink-solana/.github/actions/projectserum_version@4b971869e26b79c7ce3fb7c98005cc2e3f350915 # stable action on Oct 12 2022 -# -# solana-test-image-exists: -# environment: integration -# permissions: -# checks: write -# pull-requests: write -# id-token: write -# contents: read -# name: Check If Solana Test Image Exists -# runs-on: ubuntu-latest -# needs: [get_solana_sha] -# outputs: -# exists: ${{ steps.check-image.outputs.exists }} -# steps: -# - name: Check if image exists -# id: check-image -# uses: smartcontractkit/chainlink-github-actions/docker/image-exists@ea889b3133bd7f16ab19ba4ba130de5d9162c669 # v2.3.4 -# with: -# repository: chainlink-solana-tests -# tag: ${{ needs.get_solana_sha.outputs.sha }} -# AWS_REGION: ${{ secrets.QA_AWS_REGION }} -# AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} -# -# solana-build-contracts: -# environment: integration -# permissions: -# checks: write -# pull-requests: write -# id-token: write -# contents: read -# name: Solana Build Artifacts -# runs-on: ubuntu20.04-16cores-64GB -# needs: -# [ -# changes, -# get_projectserum_version, -# solana-test-image-exists, -# get_solana_sha, -# ] -# container: -# image: projectserum/build:${{ needs.get_projectserum_version.outputs.projectserum_version }} -# env: -# RUSTUP_HOME: "/root/.rustup" -# FORCE_COLOR: 1 -# steps: -# - name: Collect Metrics -# if: needs.changes.outputs.src == 'true' || github.event_name == 'workflow_dispatch' -# id: collect-gha-metrics -# uses: smartcontractkit/push-gha-metrics-action@d1618b772a97fd87e6505de97b872ee0b1f1729a # v2.0.2 -# with: -# basic-auth: ${{ secrets.GRAFANA_CLOUD_BASIC_AUTH }} -# hostname: ${{ secrets.GRAFANA_CLOUD_HOST }} -# this-job-name: Solana Build Artifacts -# continue-on-error: true -# - name: Checkout the solana repo -# # Use v3.6.0 because the custom runner (container configured above) -# # doesn't have node20 installed which is required for versions >=4 -# uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0 -# with: -# repository: smartcontractkit/chainlink-solana -# ref: ${{ needs.get_solana_sha.outputs.sha }} -# - name: Build contracts -# if: (needs.changes.outputs.src == 'true' || github.event_name == 'workflow_dispatch') && needs.solana-test-image-exists.outputs.exists == 'false' -# uses: smartcontractkit/chainlink-solana/.github/actions/build_contract_artifacts@21675b3a7dcdff8e790391708d4763020cace21e # stable action on December 18 2023 -# with: -# ref: ${{ needs.get_solana_sha.outputs.sha }} -# -# solana-build-test-image: -# environment: integration -# permissions: -# checks: write -# pull-requests: write -# id-token: write -# contents: read -# name: Solana Build Test Image -# runs-on: ubuntu20.04-16cores-64GB -# needs: -# [ -# solana-build-contracts, -# solana-test-image-exists, -# changes, -# get_solana_sha, -# ] -# env: -# CONTRACT_ARTIFACTS_PATH: contracts/target/deploy -# steps: -# - name: Collect Metrics -# if: (needs.changes.outputs.src == 'true' || github.event_name == 'workflow_dispatch') && needs.solana-test-image-exists.outputs.exists == 'false' -# id: collect-gha-metrics -# uses: smartcontractkit/push-gha-metrics-action@d1618b772a97fd87e6505de97b872ee0b1f1729a # v2.0.2 -# with: -# basic-auth: ${{ secrets.GRAFANA_CLOUD_BASIC_AUTH }} -# hostname: ${{ secrets.GRAFANA_CLOUD_HOST }} -# this-job-name: Solana Build Test Image -# continue-on-error: true -# - name: Checkout the repo -# if: (needs.changes.outputs.src == 'true' || github.event_name == 'workflow_dispatch') && needs.solana-test-image-exists.outputs.exists == 'false' -# uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 -# with: -# repository: smartcontractkit/chainlink-solana -# ref: ${{ needs.get_solana_sha.outputs.sha }} -# - name: Build Test Image -# if: (needs.changes.outputs.src == 'true' || github.event_name == 'workflow_dispatch') && needs.solana-test-image-exists.outputs.exists == 'false' -# uses: ./.github/actions/build-test-image -# with: -# tag: ${{ needs.get_solana_sha.outputs.sha }} -# artifacts_path: ${{ env.CONTRACT_ARTIFACTS_PATH }} -# QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} -# QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} -# QA_AWS_ACCOUNT_NUMBER: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} -# - run: echo "this exists so we don't have to run anything else if the build is skipped" -# if: needs.changes.outputs.src == 'false' || needs.solana-test-image-exists.outputs.exists == 'true' -# -# solana-smoke-tests: -# if: ${{ !contains(join(github.event.pull_request.labels.*.name, ' '), 'skip-smoke-tests') }} -# environment: integration -# permissions: -# checks: write -# pull-requests: write -# id-token: write -# contents: read -# name: Solana Smoke Tests -# runs-on: ubuntu20.04-16cores-64GB -# needs: -# [ -# build-chainlink, -# solana-build-contracts, -# solana-build-test-image, -# changes, -# get_solana_sha, -# ] -# env: -# CHAINLINK_COMMIT_SHA: ${{ github.sha }} -# CHAINLINK_ENV_USER: ${{ github.actor }} -# TEST_LOG_LEVEL: debug -# CONTRACT_ARTIFACTS_PATH: contracts/target/deploy -# steps: -# - name: Collect Metrics -# if: needs.changes.outputs.src == 'true' || github.event_name == 'workflow_dispatch' -# id: collect-gha-metrics -# uses: smartcontractkit/push-gha-metrics-action@d1618b772a97fd87e6505de97b872ee0b1f1729a # v2.0.2 -# with: -# basic-auth: ${{ secrets.GRAFANA_CLOUD_BASIC_AUTH }} -# hostname: ${{ secrets.GRAFANA_CLOUD_HOST }} -# this-job-name: Solana Smoke Tests -# test-results-file: '{"testType":"go","filePath":"/tmp/gotest.log"}' -# continue-on-error: true -# - name: Checkout the repo -# uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 -# with: -# repository: smartcontractkit/chainlink-solana -# ref: ${{ needs.get_solana_sha.outputs.sha }} -# - name: Run Setup -# if: needs.changes.outputs.src == 'true' || github.event_name == 'workflow_dispatch' -# uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/setup-run-tests-environment@ea889b3133bd7f16ab19ba4ba130de5d9162c669 # v2.3.4 -# with: -# go_mod_path: ./integration-tests/go.mod -# cache_restore_only: true -# cache_key_id: core-solana-e2e-${{ env.MOD_CACHE_VERSION }} -# aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} -# dockerhub_username: ${{ secrets.DOCKERHUB_READONLY_USERNAME }} -# dockerhub_password: ${{ secrets.DOCKERHUB_READONLY_PASSWORD }} -# QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} -# QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} -# QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} -# - name: Pull Artfacts -# if: needs.changes.outputs.src == 'true' || github.event_name == 'workflow_dispatch' -# run: | -# IMAGE_NAME=${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink-solana-tests:${{ needs.get_solana_sha.outputs.sha }} -# # Pull the Docker image -# docker pull "$IMAGE_NAME" -# -# # Create a container without starting it -# CONTAINER_ID=$(docker create "$IMAGE_NAME") -# -# # Copy the artifacts from the container -# mkdir -p ./${{env.CONTRACT_ARTIFACTS_PATH}}/ -# docker cp "$CONTAINER_ID:/go/testdir/${{env.CONTRACT_ARTIFACTS_PATH}}/" "./${{env.CONTRACT_ARTIFACTS_PATH}}/../" -# -# # Remove the created container -# docker rm "$CONTAINER_ID" -# - name: Run Tests -# if: needs.changes.outputs.src == 'true' || github.event_name == 'workflow_dispatch' -# uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@ea889b3133bd7f16ab19ba4ba130de5d9162c669 # v2.3.4 -# with: -# test_command_to_run: export ENV_JOB_IMAGE=${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink-solana-tests:${{ needs.get_solana_sha.outputs.sha }} && make test_smoke -# cl_repo: ${{ env.CHAINLINK_IMAGE }} -# cl_image_tag: ${{ github.sha }} -# artifacts_location: /home/runner/work/chainlink-solana/chainlink-solana/integration-tests/logs -# publish_check_name: Solana Smoke Test Results -# go_mod_path: ./integration-tests/go.mod -# cache_key_id: core-solana-e2e-${{ env.MOD_CACHE_VERSION }} -# token: ${{ secrets.GITHUB_TOKEN }} -# aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} -# QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} -# QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} -# QA_KUBECONFIG: "" -# run_setup: false -# - name: Upload test log -# uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce # v3.1.2 -# if: failure() -# with: -# name: test-log-solana -# path: /tmp/gotest.log -# retention-days: 7 -# continue-on-error: true diff --git a/.github/workflows/lint-gh-workflows.yml b/.github/workflows/lint-gh-workflows.yml deleted file mode 100644 index 1d546905a7..0000000000 --- a/.github/workflows/lint-gh-workflows.yml +++ /dev/null @@ -1,22 +0,0 @@ -name: Lint GH Workflows -on: - push: -jobs: - lint_workflows: - name: Validate Github Action Workflows - runs-on: ubuntu-latest - steps: - - name: Check out Code - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - - name: Run actionlint - uses: reviewdog/action-actionlint@82693e9e3b239f213108d6e412506f8b54003586 # v1.39.1 - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Validate Github Action Workflows - continue-on-error: true diff --git a/.github/workflows/live-testnet-tests.yml b/.github/workflows/live-testnet-tests.yml deleted file mode 100644 index 028d65dff7..0000000000 --- a/.github/workflows/live-testnet-tests.yml +++ /dev/null @@ -1,957 +0,0 @@ -# *** -# This workflow is a monstrosity of copy-paste, and that's to increase legibility in reporting and running, so the code be damned. -# I suspect this can be cleaned up significantly with some clever trickery of the GitHub actions matrices, but I am not that clever. -# We want each chain to run in parallel, but each test within the chain needs to be able to run sequentially -# (we're trying to eliminate this as a requirement, should make it a lot easier). -# Each chain can have a variety of tests to run. -# We also want reporting to be clear in the start-slack-thread and post-test-results-to-slack jobs. -# *** - -name: Live Testnet Tests -on: - schedule: - - cron: "0 5 * * *" # Run every night at midnight EST - push: - tags: - - "*" - workflow_dispatch: - -env: - CHAINLINK_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink - INTERNAL_DOCKER_REPO: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com - MOD_CACHE_VERSION: 2 - CHAINLINK_NODE_FUNDING: .5 - PYROSCOPE_KEY: ${{ secrets.QA_PYROSCOPE_KEY }} - LOKI_TENANT_ID: ${{ vars.LOKI_TENANT_ID }} - LOKI_URL: ${{ secrets.LOKI_URL }} - LOKI_BASIC_AUTH: ${{ secrets.LOKI_BASIC_AUTH }} - LOGSTREAM_LOG_TARGETS: loki - GRAFANA_URL: ${{ vars.GRAFANA_URL }} - RUN_ID: ${{ github.run_id }} - - CHAINLINK_COMMIT_SHA: ${{ github.sha }} - CHAINLINK_ENV_USER: ${{ github.actor }} - TEST_LOG_LEVEL: debug - -jobs: - - # Build Test Dependencies - - build-chainlink: - environment: integration - permissions: - id-token: write - contents: read - name: Build Chainlink Image - runs-on: ubuntu-latest - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Build Chainlink Image - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - ref: ${{ github.event.pull_request.head.sha || github.event.merge_group.head_sha }} - - name: Build Chainlink Image - uses: ./.github/actions/build-chainlink-image - with: - tag_suffix: "" - dockerfile: core/chainlink.Dockerfile - git_commit_sha: ${{ github.sha }} - GRAFANA_CLOUD_BASIC_AUTH: ${{ secrets.GRAFANA_CLOUD_BASIC_AUTH }} - GRAFANA_CLOUD_HOST: ${{ secrets.GRAFANA_CLOUD_HOST }} - AWS_REGION: ${{ secrets.QA_AWS_REGION }} - AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - - build-tests: - environment: integration - permissions: - id-token: write - contents: read - name: Build Tests Binary - runs-on: ubuntu-latest - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Build Tests Binary - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - ref: ${{ github.event.pull_request.head.sha || github.event.merge_group.head_sha }} - - name: Build Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/build-tests@ea889b3133bd7f16ab19ba4ba130de5d9162c669 # v2.3.4 - with: - test_download_vendor_packages_command: cd ./integration-tests && go mod download - token: ${{ secrets.GITHUB_TOKEN }} - go_mod_path: ./integration-tests/go.mod - go_tags: embed - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - binary_name: tests - - # End Build Test Dependencies - - # Reporting Jobs - - start-slack-thread: - name: Start Slack Thread - if: ${{ always() && needs.*.result != 'skipped' && needs.*.result != 'cancelled' }} - environment: integration - outputs: - thread_ts: ${{ steps.slack.outputs.thread_ts }} - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - runs-on: ubuntu-latest - needs: [sepolia-smoke-tests, optimism-sepolia-smoke-tests, arbitrum-sepolia-smoke-tests, base-sepolia-smoke-tests, polygon-mumbai-smoke-tests, avalanche-fuji-smoke-tests, fantom-testnet-smoke-tests, celo-alfajores-smoke-tests, linea-goerli-smoke-tests] - steps: - - name: Debug Result - run: echo ${{ join(needs.*.result, ',') }} - - name: Main Slack Notification - uses: slackapi/slack-github-action@e28cf165c92ffef168d23c5c9000cffc8a25e117 # v1.24.0 - id: slack - with: - channel-id: ${{ secrets.QA_SLACK_CHANNEL }} - payload: | - { - "attachments": [ - { - "color": "${{ contains(join(needs.*.result, ','), 'failure') && '#C62828' || '#2E7D32' }}", - "blocks": [ - { - "type": "header", - "text": { - "type": "plain_text", - "text": "Live Smoke Test Results ${{ contains(join(needs.*.result, ','), 'failure') && ':x:' || ':white_check_mark:'}}", - "emoji": true - } - }, - { - "type": "section", - "text": { - "type": "mrkdwn", - "text": "${{ contains(join(needs.*.result, ','), 'failure') && 'Some tests failed, notifying <@U01Q4N37KFG>' || 'All Good!' }}" - } - }, - { - "type": "divider" - }, - { - "type": "section", - "text": { - "type": "mrkdwn", - "text": "<${{ github.server_url }}/${{ github.repository }}/releases/tag/${{ github.ref_name }}|${{ github.ref_name }}> | <${{ github.server_url }}/${{ github.repository }}/commit/${{ github.sha }}|${{ github.sha }}> | <${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|Run>" - } - } - ] - } - ] - } - env: - SLACK_BOT_TOKEN: ${{ secrets.QA_SLACK_API_KEY }} - - post-test-results-to-slack: - name: Post Test Results for ${{ matrix.network }} - if: ${{ always() && needs.*.result != 'skipped' && needs.*.result != 'cancelled' }} - environment: integration - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - runs-on: ubuntu-latest - needs: start-slack-thread - strategy: - fail-fast: false - matrix: - network: [Sepolia, Optimism Sepolia, Arbitrum Sepolia, Base Sepolia, Polygon Mumbai, Avalanche Fuji, Fantom Testnet, Celo Alfajores, Linea Goerli] - steps: - - name: Checkout the repo - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - ref: ${{ github.event.pull_request.head.sha || github.event.merge_group.head_sha }} - - name: Post Test Results - uses: ./.github/actions/notify-slack-jobs-result - with: - github_token: ${{ github.token }} - github_repository: ${{ github.repository }} - workflow_run_id: ${{ github.run_id }} - github_job_name_regex: ^${{ matrix.network }} (?.*?) Tests$ - message_title: ${{ matrix.network }} - slack_channel_id: ${{ secrets.QA_SLACK_CHANNEL }} - slack_bot_token: ${{ secrets.QA_SLACK_API_KEY }} - slack_thread_ts: ${{ needs.start-slack-thread.outputs.thread_ts }} - - # End Reporting Jobs - - sepolia-smoke-tests: - environment: integration - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - needs: [build-chainlink, build-tests] - strategy: - max-parallel: 1 - fail-fast: false - matrix: - include: # https://docs.github.com/en/actions/using-jobs/using-a-matrix-for-your-jobs#example-adding-configurations - - product: OCR - test: TestOCRBasic - - product: Automation Conditional - test: TestAutomationBasic/registry_2_1_conditional - - product: Automation Log Trigger - test: TestAutomationBasic/registry_2_1_logtrigger - name: Sepolia ${{ matrix.product }} Tests - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - fetch-depth: 0 - - name: Prepare Base64 TOML override - uses: ./.github/actions/setup-create-base64-config-live-testnets - with: - runId: ${{ github.run_id }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - chainlinkImage: ${{ env.CHAINLINK_IMAGE }} - chainlinkVersion: ${{ github.sha }} - pyroscopeServer: ${{ matrix.product.pyroscope_env == '' && '' || !startsWith(github.ref, 'refs/tags/') && '' || secrets.QA_PYROSCOPE_INSTANCE }} # Avoid sending blank envs https://github.com/orgs/community/discussions/25725 - pyroscopeEnvironment: ci-smoke-${{ matrix.product }}-sepolia - pyroscopeKey: ${{ secrets.QA_PYROSCOPE_KEY }} - lokiEndpoint: ${{ secrets.LOKI_URL }} - lokiTenantId: ${{ vars.LOKI_TENANT_ID }} - lokiBasicAuth: ${{ secrets.LOKI_BASIC_AUTH }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - grafanaUrl: ${{ vars.GRAFANA_URL }} - grafanaDashboardUrl: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - network: "sepolia" - httpEndpoints: ${{ secrets.QA_SEPOLIA_HTTP_URLS }} - wsEndpoints: ${{ secrets.QA_SEPOLIA_URLS }} - fundingKeys: ${{ secrets.QA_EVM_KEYS }} - - name: Download Tests Binary - uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a # v3.0.2 - with: - name: tests - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests-binary@ea889b3133bd7f16ab19ba4ba130de5d9162c669 # v2.3.4 - with: - test_command_to_run: ./tests -test.timeout 30m -test.count=1 -test.parallel=1 -test.run ${{ matrix.test }} - binary_name: tests - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ github.sha }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - dockerhub_username: ${{ secrets.DOCKERHUB_READONLY_USERNAME }} - dockerhub_password: ${{ secrets.DOCKERHUB_READONLY_PASSWORD }} - artifacts_location: ./logs - token: ${{ secrets.GITHUB_TOKEN }} - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - - name: Print failed test summary - if: always() - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/show-test-summary@ea889b3133bd7f16ab19ba4ba130de5d9162c669 # v2.3.4 - - bsc-testnet-tests: - # TODO: BSC RPCs are all in a bad state right now, so we're skipping these tests until they're fixed - if: false - environment: integration - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - needs: [build-chainlink, build-tests] - strategy: - max-parallel: 1 - fail-fast: false - matrix: - include: # https://docs.github.com/en/actions/using-jobs/using-a-matrix-for-your-jobs#example-adding-configurations - - product: OCR - test: TestOCRBasic - - product: Automation Conditional - test: TestAutomationBasic/registry_2_1_conditional - - product: Automation Log Trigger - test: TestAutomationBasic/registry_2_1_logtrigger - name: BSC Testnet ${{ matrix.product }} Tests - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - fetch-depth: 0 - - name: Prepare Base64 TOML override - uses: ./.github/actions/setup-create-base64-config-live-testnets - with: - runId: ${{ github.run_id }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - chainlinkImage: ${{ env.CHAINLINK_IMAGE }} - chainlinkVersion: ${{ github.sha }} - pyroscopeServer: ${{ matrix.product.pyroscope_env == '' && '' || !startsWith(github.ref, 'refs/tags/') && '' || secrets.QA_PYROSCOPE_INSTANCE }} # Avoid sending blank envs https://github.com/orgs/community/discussions/25725 - pyroscopeEnvironment: ci-smoke-${{ matrix.product }}-bsc-testnet - pyroscopeKey: ${{ secrets.QA_PYROSCOPE_KEY }} - lokiEndpoint: ${{ secrets.LOKI_URL }} - lokiTenantId: ${{ vars.LOKI_TENANT_ID }} - lokiBasicAuth: ${{ secrets.LOKI_BASIC_AUTH }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - grafanaUrl: ${{ vars.GRAFANA_URL }} - grafanaDashboardUrl: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - network: "bsc_testnet" - httpEndpoints: ${{ secrets.QA_BSC_TESTNET_HTTP_URLS }} - wsEndpoints: ${{ secrets.QA_BSC_TESTNET_URLS }} - fundingKeys: ${{ secrets.QA_EVM_KEYS }} - - name: Download Tests Binary - uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a # v3.0.2 - with: - name: tests - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests-binary@ea889b3133bd7f16ab19ba4ba130de5d9162c669 # v2.3.4 - with: - test_command_to_run: ./tests -test.timeout 30m -test.count=1 -test.parallel=1 -test.run ${{ matrix.test }} - binary_name: tests - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ github.sha }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - dockerhub_username: ${{ secrets.DOCKERHUB_READONLY_USERNAME }} - dockerhub_password: ${{ secrets.DOCKERHUB_READONLY_PASSWORD }} - artifacts_location: ./logs - token: ${{ secrets.GITHUB_TOKEN }} - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - - name: Print failed test summary - if: always() - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/show-test-summary@ea889b3133bd7f16ab19ba4ba130de5d9162c669 # v2.3.4 - - optimism-sepolia-smoke-tests: - environment: integration - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - needs: [build-chainlink, build-tests] - strategy: - max-parallel: 1 - fail-fast: false - matrix: - include: # https://docs.github.com/en/actions/using-jobs/using-a-matrix-for-your-jobs#example-adding-configurations - - product: OCR - test: TestOCRBasic - - product: Automation Conditional - test: TestAutomationBasic/registry_2_1_conditional - - product: Automation Log Trigger - test: TestAutomationBasic/registry_2_1_logtrigger - name: Optimism Sepolia ${{ matrix.product }} Tests - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - fetch-depth: 0 - - name: Prepare Base64 TOML override - uses: ./.github/actions/setup-create-base64-config-live-testnets - with: - runId: ${{ github.run_id }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - chainlinkImage: ${{ env.CHAINLINK_IMAGE }} - chainlinkVersion: ${{ github.sha }} - pyroscopeServer: ${{ matrix.product.pyroscope_env == '' && '' || !startsWith(github.ref, 'refs/tags/') && '' || secrets.QA_PYROSCOPE_INSTANCE }} # Avoid sending blank envs https://github.com/orgs/community/discussions/25725 - pyroscopeEnvironment: ci-smoke-${{ matrix.product }}-optimism-sepolia - pyroscopeKey: ${{ secrets.QA_PYROSCOPE_KEY }} - lokiEndpoint: ${{ secrets.LOKI_URL }} - lokiTenantId: ${{ vars.LOKI_TENANT_ID }} - lokiBasicAuth: ${{ secrets.LOKI_BASIC_AUTH }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - grafanaUrl: ${{ vars.GRAFANA_URL }} - grafanaDashboardUrl: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - network: "optimism_sepolia" - httpEndpoints: ${{ secrets.QA_OPTIMISM_SEPOLIA_HTTP_URLS }} - wsEndpoints: ${{ secrets.QA_OPTIMISM_SEPOLIA_URLS }} - fundingKeys: ${{ secrets.QA_EVM_KEYS }} - - name: Download Tests Binary - uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a # v3.0.2 - with: - name: tests - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests-binary@ea889b3133bd7f16ab19ba4ba130de5d9162c669 # v2.3.4 - with: - test_command_to_run: ./tests -test.timeout 30m -test.count=1 -test.parallel=1 -test.run ${{ matrix.test }} - binary_name: tests - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ github.sha }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - dockerhub_username: ${{ secrets.DOCKERHUB_READONLY_USERNAME }} - dockerhub_password: ${{ secrets.DOCKERHUB_READONLY_PASSWORD }} - artifacts_location: ./logs - token: ${{ secrets.GITHUB_TOKEN }} - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - - name: Print failed test summary - if: always() - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/show-test-summary@ea889b3133bd7f16ab19ba4ba130de5d9162c669 # v2.3.4 - - arbitrum-sepolia-smoke-tests: - environment: integration - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - needs: [build-chainlink, build-tests] - strategy: - max-parallel: 1 - fail-fast: false - matrix: - include: # https://docs.github.com/en/actions/using-jobs/using-a-matrix-for-your-jobs#example-adding-configurations - - product: OCR - test: TestOCRBasic - - product: Automation Conditional - test: TestAutomationBasic/registry_2_1_conditional - - product: Automation Log Trigger - test: TestAutomationBasic/registry_2_1_logtrigger - name: Arbitrum Sepolia ${{ matrix.product }} Tests - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - fetch-depth: 0 - - name: Prepare Base64 TOML override - uses: ./.github/actions/setup-create-base64-config-live-testnets - with: - runId: ${{ github.run_id }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - chainlinkImage: ${{ env.CHAINLINK_IMAGE }} - chainlinkVersion: ${{ github.sha }} - pyroscopeServer: ${{ matrix.product.pyroscope_env == '' && '' || !startsWith(github.ref, 'refs/tags/') && '' || secrets.QA_PYROSCOPE_INSTANCE }} # Avoid sending blank envs https://github.com/orgs/community/discussions/25725 - pyroscopeEnvironment: ci-smoke-${{ matrix.product }}-arbitrum-sepolia - pyroscopeKey: ${{ secrets.QA_PYROSCOPE_KEY }} - lokiEndpoint: ${{ secrets.LOKI_URL }} - lokiTenantId: ${{ vars.LOKI_TENANT_ID }} - lokiBasicAuth: ${{ secrets.LOKI_BASIC_AUTH }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - grafanaUrl: ${{ vars.GRAFANA_URL }} - grafanaDashboardUrl: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - network: "arbitrum_sepolia" - httpEndpoints: ${{ secrets.QA_ARBITRUM_SEPOLIA_HTTP_URLS }} - wsEndpoints: ${{ secrets.QA_ARBITRUM_SEPOLIA_URLS }} - fundingKeys: ${{ secrets.QA_EVM_KEYS }} - - name: Download Tests Binary - uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a # v3.0.2 - with: - name: tests - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests-binary@ea889b3133bd7f16ab19ba4ba130de5d9162c669 # v2.3.4 - with: - test_command_to_run: ./tests -test.timeout 30m -test.count=1 -test.parallel=1 -test.run ${{ matrix.test }} - binary_name: tests - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ github.sha }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - dockerhub_username: ${{ secrets.DOCKERHUB_READONLY_USERNAME }} - dockerhub_password: ${{ secrets.DOCKERHUB_READONLY_PASSWORD }} - artifacts_location: ./logs - token: ${{ secrets.GITHUB_TOKEN }} - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - - name: Print failed test summary - if: always() - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/show-test-summary@ea889b3133bd7f16ab19ba4ba130de5d9162c669 # v2.3.4 - - base-sepolia-smoke-tests: - environment: integration - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - needs: [build-chainlink, build-tests] - strategy: - max-parallel: 1 - fail-fast: false - matrix: - include: # https://docs.github.com/en/actions/using-jobs/using-a-matrix-for-your-jobs#example-adding-configurations - - product: OCR - test: TestOCRBasic - name: Base Sepolia ${{ matrix.product }} Tests - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - fetch-depth: 0 - - name: Prepare Base64 TOML override - uses: ./.github/actions/setup-create-base64-config-live-testnets - with: - runId: ${{ github.run_id }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - chainlinkImage: ${{ env.CHAINLINK_IMAGE }} - chainlinkVersion: ${{ github.sha }} - pyroscopeServer: ${{ matrix.product.pyroscope_env == '' && '' || !startsWith(github.ref, 'refs/tags/') && '' || secrets.QA_PYROSCOPE_INSTANCE }} # Avoid sending blank envs https://github.com/orgs/community/discussions/25725 - pyroscopeEnvironment: ci-smoke-${{ matrix.product }}-base-sepolia - pyroscopeKey: ${{ secrets.QA_PYROSCOPE_KEY }} - lokiEndpoint: ${{ secrets.LOKI_URL }} - lokiTenantId: ${{ vars.LOKI_TENANT_ID }} - lokiBasicAuth: ${{ secrets.LOKI_BASIC_AUTH }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - grafanaUrl: ${{ vars.GRAFANA_URL }} - grafanaDashboardUrl: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - network: "base_sepolia" - httpEndpoints: ${{ secrets.QA_BASE_SEPOLIA_HTTP_URLS }} - wsEndpoints: ${{ secrets.QA_BASE_SEPOLIA_URLS }} - fundingKeys: ${{ secrets.QA_EVM_KEYS }} - - name: Download Tests Binary - uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a # v3.0.2 - with: - name: tests - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests-binary@ea889b3133bd7f16ab19ba4ba130de5d9162c669 # v2.3.4 - with: - test_command_to_run: ./tests -test.timeout 30m -test.count=1 -test.parallel=1 -test.run ${{ matrix.test }} - binary_name: tests - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ github.sha }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - dockerhub_username: ${{ secrets.DOCKERHUB_READONLY_USERNAME }} - dockerhub_password: ${{ secrets.DOCKERHUB_READONLY_PASSWORD }} - artifacts_location: ./logs - token: ${{ secrets.GITHUB_TOKEN }} - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - - name: Print failed test summary - if: always() - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/show-test-summary@ea889b3133bd7f16ab19ba4ba130de5d9162c669 # v2.3.4 - - polygon-mumbai-smoke-tests: - environment: integration - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - needs: [build-chainlink, build-tests] - strategy: - max-parallel: 1 - fail-fast: false - matrix: - include: # https://docs.github.com/en/actions/using-jobs/using-a-matrix-for-your-jobs#example-adding-configurations - - product: OCR - test: TestOCRBasic - - product: Automation Conditional - test: TestAutomationBasic/registry_2_1_conditional - - product: Automation Log Trigger - test: TestAutomationBasic/registry_2_1_logtrigger - name: Polygon Mumbai ${{ matrix.product }} Tests - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - fetch-depth: 0 - - name: Prepare Base64 TOML override - uses: ./.github/actions/setup-create-base64-config-live-testnets - with: - runId: ${{ github.run_id }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - chainlinkImage: ${{ env.CHAINLINK_IMAGE }} - chainlinkVersion: ${{ github.sha }} - pyroscopeServer: ${{ matrix.product.pyroscope_env == '' && '' || !startsWith(github.ref, 'refs/tags/') && '' || secrets.QA_PYROSCOPE_INSTANCE }} # Avoid sending blank envs https://github.com/orgs/community/discussions/25725 - pyroscopeEnvironment: ci-smoke-${{ matrix.product }}-polygon-mumbai - pyroscopeKey: ${{ secrets.QA_PYROSCOPE_KEY }} - lokiEndpoint: ${{ secrets.LOKI_URL }} - lokiTenantId: ${{ vars.LOKI_TENANT_ID }} - lokiBasicAuth: ${{ secrets.LOKI_BASIC_AUTH }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - grafanaUrl: ${{ vars.GRAFANA_URL }} - grafanaDashboardUrl: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - network: "polygon_mumbai" - httpEndpoints: ${{ secrets.QA_POLYGON_MUMBAI_HTTP_URLS }} - wsEndpoints: ${{ secrets.QA_POLYGON_MUMBAI_URLS }} - fundingKeys: ${{ secrets.QA_EVM_KEYS }} - - name: Download Tests Binary - uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a # v3.0.2 - with: - name: tests - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests-binary@ea889b3133bd7f16ab19ba4ba130de5d9162c669 # v2.3.4 - with: - test_command_to_run: ./tests -test.timeout 30m -test.count=1 -test.parallel=1 -test.run ${{ matrix.test }} - binary_name: tests - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ github.sha }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - dockerhub_username: ${{ secrets.DOCKERHUB_READONLY_USERNAME }} - dockerhub_password: ${{ secrets.DOCKERHUB_READONLY_PASSWORD }} - artifacts_location: ./logs - token: ${{ secrets.GITHUB_TOKEN }} - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - - name: Print failed test summary - if: always() - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/show-test-summary@ea889b3133bd7f16ab19ba4ba130de5d9162c669 # v2.3.4 - - avalanche-fuji-smoke-tests: - environment: integration - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - needs: [build-chainlink, build-tests] - strategy: - max-parallel: 1 - fail-fast: false - matrix: - include: # https://docs.github.com/en/actions/using-jobs/using-a-matrix-for-your-jobs#example-adding-configurations - - product: OCR - test: TestOCRBasic - - product: Automation Conditional - test: TestAutomationBasic/registry_2_1_conditional - - product: Automation Log Trigger - test: TestAutomationBasic/registry_2_1_logtrigger - name: Avalanche Fuji ${{ matrix.product }} Tests - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - fetch-depth: 0 - - name: Prepare Base64 TOML override - uses: ./.github/actions/setup-create-base64-config-live-testnets - with: - runId: ${{ github.run_id }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - chainlinkImage: ${{ env.CHAINLINK_IMAGE }} - chainlinkVersion: ${{ github.sha }} - pyroscopeServer: ${{ matrix.product.pyroscope_env == '' && '' || !startsWith(github.ref, 'refs/tags/') && '' || secrets.QA_PYROSCOPE_INSTANCE }} # Avoid sending blank envs https://github.com/orgs/community/discussions/25725 - pyroscopeEnvironment: ci-smoke-${{ matrix.product }}-avalanche-fuji - pyroscopeKey: ${{ secrets.QA_PYROSCOPE_KEY }} - lokiEndpoint: ${{ secrets.LOKI_URL }} - lokiTenantId: ${{ vars.LOKI_TENANT_ID }} - lokiBasicAuth: ${{ secrets.LOKI_BASIC_AUTH }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - grafanaUrl: ${{ vars.GRAFANA_URL }} - grafanaDashboardUrl: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - network: "avalanche_fuji" - httpEndpoints: ${{ secrets.QA_AVALANCHE_FUJI_HTTP_URLS }} - wsEndpoints: ${{ secrets.QA_AVALANCHE_FUJI_URLS }} - fundingKeys: ${{ secrets.QA_EVM_KEYS }} - - name: Download Tests Binary - uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a # v3.0.2 - with: - name: tests - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests-binary@ea889b3133bd7f16ab19ba4ba130de5d9162c669 # v2.3.4 - with: - test_command_to_run: ./tests -test.timeout 30m -test.count=1 -test.parallel=1 -test.run ${{ matrix.test }} - binary_name: tests - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ github.sha }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - dockerhub_username: ${{ secrets.DOCKERHUB_READONLY_USERNAME }} - dockerhub_password: ${{ secrets.DOCKERHUB_READONLY_PASSWORD }} - artifacts_location: ./logs - token: ${{ secrets.GITHUB_TOKEN }} - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - - name: Print failed test summary - if: always() - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/show-test-summary@ea889b3133bd7f16ab19ba4ba130de5d9162c669 # v2.3.4 - - fantom-testnet-smoke-tests: - environment: integration - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - needs: [build-chainlink, build-tests] - strategy: - max-parallel: 1 - fail-fast: false - matrix: - include: # https://docs.github.com/en/actions/using-jobs/using-a-matrix-for-your-jobs#example-adding-configurations - - product: OCR - test: TestOCRBasic - - product: Automation Conditional - test: TestAutomationBasic/registry_2_1_conditional - - product: Automation Log Trigger - test: TestAutomationBasic/registry_2_1_logtrigger - name: Fantom Testnet ${{ matrix.product }} Tests - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - fetch-depth: 0 - - name: Prepare Base64 TOML override - uses: ./.github/actions/setup-create-base64-config-live-testnets - with: - runId: ${{ github.run_id }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - chainlinkImage: ${{ env.CHAINLINK_IMAGE }} - chainlinkVersion: ${{ github.sha }} - pyroscopeServer: ${{ matrix.product.pyroscope_env == '' && '' || !startsWith(github.ref, 'refs/tags/') && '' || secrets.QA_PYROSCOPE_INSTANCE }} # Avoid sending blank envs https://github.com/orgs/community/discussions/25725 - pyroscopeEnvironment: ci-smoke-${{ matrix.product }}-fantom-testnet - pyroscopeKey: ${{ secrets.QA_PYROSCOPE_KEY }} - lokiEndpoint: ${{ secrets.LOKI_URL }} - lokiTenantId: ${{ vars.LOKI_TENANT_ID }} - lokiBasicAuth: ${{ secrets.LOKI_BASIC_AUTH }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - grafanaUrl: ${{ vars.GRAFANA_URL }} - grafanaDashboardUrl: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - network: "fantom_testnet" - httpEndpoints: ${{ secrets.QA_FANTOM_TESTNET_HTTP_URLS }} - wsEndpoints: ${{ secrets.QA_FANTOM_TESTNET_URLS }} - fundingKeys: ${{ secrets.QA_EVM_KEYS }} - - name: Download Tests Binary - uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a # v3.0.2 - with: - name: tests - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests-binary@ea889b3133bd7f16ab19ba4ba130de5d9162c669 # v2.3.4 - with: - test_command_to_run: ./tests -test.timeout 30m -test.count=1 -test.parallel=1 -test.run ${{ matrix.test }} - binary_name: tests - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ github.sha }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - dockerhub_username: ${{ secrets.DOCKERHUB_READONLY_USERNAME }} - dockerhub_password: ${{ secrets.DOCKERHUB_READONLY_PASSWORD }} - artifacts_location: ./logs - token: ${{ secrets.GITHUB_TOKEN }} - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - - name: Print failed test summary - if: always() - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/show-test-summary@ea889b3133bd7f16ab19ba4ba130de5d9162c669 # v2.3.4 - - celo-alfajores-smoke-tests: - environment: integration - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - needs: [build-chainlink, build-tests] - strategy: - max-parallel: 1 - fail-fast: false - matrix: - include: # https://docs.github.com/en/actions/using-jobs/using-a-matrix-for-your-jobs#example-adding-configurations - - product: OCR - test: TestOCRBasic - name: Celo Alfajores ${{ matrix.product }} Tests - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - fetch-depth: 0 - - name: Prepare Base64 TOML override - uses: ./.github/actions/setup-create-base64-config-live-testnets - with: - runId: ${{ github.run_id }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - chainlinkImage: ${{ env.CHAINLINK_IMAGE }} - chainlinkVersion: ${{ github.sha }} - pyroscopeServer: ${{ matrix.product.pyroscope_env == '' && '' || !startsWith(github.ref, 'refs/tags/') && '' || secrets.QA_PYROSCOPE_INSTANCE }} # Avoid sending blank envs https://github.com/orgs/community/discussions/25725 - pyroscopeEnvironment: ci-smoke-${{ matrix.product }}-celo-alfajores - pyroscopeKey: ${{ secrets.QA_PYROSCOPE_KEY }} - lokiEndpoint: ${{ secrets.LOKI_URL }} - lokiTenantId: ${{ vars.LOKI_TENANT_ID }} - lokiBasicAuth: ${{ secrets.LOKI_BASIC_AUTH }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - grafanaUrl: ${{ vars.GRAFANA_URL }} - grafanaDashboardUrl: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - network: "celo_alfajores" - httpEndpoints: ${{ secrets.QA_CELO_ALFAJORES_HTTP_URLS }} - wsEndpoints: ${{ secrets.QA_CELO_ALFAJORES_URLS }} - fundingKeys: ${{ secrets.QA_EVM_KEYS }} - - name: Download Tests Binary - uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a # v3.0.2 - with: - name: tests - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests-binary@ea889b3133bd7f16ab19ba4ba130de5d9162c669 # v2.3.4 - with: - test_command_to_run: ./tests -test.timeout 30m -test.count=1 -test.parallel=1 -test.run ${{ matrix.test }} - binary_name: tests - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ github.sha }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - dockerhub_username: ${{ secrets.DOCKERHUB_READONLY_USERNAME }} - dockerhub_password: ${{ secrets.DOCKERHUB_READONLY_PASSWORD }} - artifacts_location: ./logs - token: ${{ secrets.GITHUB_TOKEN }} - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - - name: Print failed test summary - if: always() - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/show-test-summary@ea889b3133bd7f16ab19ba4ba130de5d9162c669 # v2.3.4 - - scroll-sepolia-smoke-tests: - # TODO: Disabled until bug TT-767 is fixed - if: false - environment: integration - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - needs: [build-chainlink, build-tests] - strategy: - max-parallel: 1 - fail-fast: false - matrix: - include: # https://docs.github.com/en/actions/using-jobs/using-a-matrix-for-your-jobs#example-adding-configurations - - product: OCR - test: TestOCRBasic - name: Scroll Sepolia ${{ matrix.product }} Tests - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - fetch-depth: 0 - - name: Prepare Base64 TOML override - uses: ./.github/actions/setup-create-base64-config-live-testnets - with: - runId: ${{ github.run_id }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - chainlinkImage: ${{ env.CHAINLINK_IMAGE }} - chainlinkVersion: ${{ github.sha }} - pyroscopeServer: ${{ matrix.product.pyroscope_env == '' && '' || !startsWith(github.ref, 'refs/tags/') && '' || secrets.QA_PYROSCOPE_INSTANCE }} # Avoid sending blank envs https://github.com/orgs/community/discussions/25725 - pyroscopeEnvironment: ci-smoke-${{ matrix.product }}-scroll-sepolia - pyroscopeKey: ${{ secrets.QA_PYROSCOPE_KEY }} - lokiEndpoint: ${{ secrets.LOKI_URL }} - lokiTenantId: ${{ vars.LOKI_TENANT_ID }} - lokiBasicAuth: ${{ secrets.LOKI_BASIC_AUTH }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - grafanaUrl: ${{ vars.GRAFANA_URL }} - grafanaDashboardUrl: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - network: "scroll_sepolia" - httpEndpoints: ${{ secrets.QA_SCROLL_SEPOLIA_HTTP_URLS }} - wsEndpoints: ${{ secrets.QA_SCROLL_SEPOLIA_URLS }} - fundingKeys: ${{ secrets.QA_EVM_KEYS }} - - name: Download Tests Binary - uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a # v3.0.2 - with: - name: tests - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests-binary@ea889b3133bd7f16ab19ba4ba130de5d9162c669 # v2.3.4 - with: - test_command_to_run: ./tests -test.timeout 30m -test.count=1 -test.parallel=1 -test.run ${{ matrix.test }} - binary_name: tests - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ github.sha }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - dockerhub_username: ${{ secrets.DOCKERHUB_READONLY_USERNAME }} - dockerhub_password: ${{ secrets.DOCKERHUB_READONLY_PASSWORD }} - artifacts_location: ./logs - token: ${{ secrets.GITHUB_TOKEN }} - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - - name: Print failed test summary - if: always() - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/show-test-summary@ea889b3133bd7f16ab19ba4ba130de5d9162c669 # v2.3.4 - - linea-goerli-smoke-tests: - environment: integration - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - needs: [build-chainlink, build-tests] - strategy: - max-parallel: 1 - fail-fast: false - matrix: - include: # https://docs.github.com/en/actions/using-jobs/using-a-matrix-for-your-jobs#example-adding-configurations - - product: OCR - test: TestOCRBasic - name: Linea Goerli ${{ matrix.product }} Tests - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - fetch-depth: 0 - - name: Prepare Base64 TOML override - uses: ./.github/actions/setup-create-base64-config-live-testnets - with: - runId: ${{ github.run_id }} - testLogCollect: ${{ vars.TEST_LOG_COLLECT }} - chainlinkImage: ${{ env.CHAINLINK_IMAGE }} - chainlinkVersion: ${{ github.sha }} - pyroscopeServer: ${{ matrix.product.pyroscope_env == '' && '' || !startsWith(github.ref, 'refs/tags/') && '' || secrets.QA_PYROSCOPE_INSTANCE }} # Avoid sending blank envs https://github.com/orgs/community/discussions/25725 - pyroscopeEnvironment: ci-smoke-${{ matrix.product }}-linea-goerli - pyroscopeKey: ${{ secrets.QA_PYROSCOPE_KEY }} - lokiEndpoint: ${{ secrets.LOKI_URL }} - lokiTenantId: ${{ vars.LOKI_TENANT_ID }} - lokiBasicAuth: ${{ secrets.LOKI_BASIC_AUTH }} - logstreamLogTargets: ${{ vars.LOGSTREAM_LOG_TARGETS }} - grafanaUrl: ${{ vars.GRAFANA_URL }} - grafanaDashboardUrl: "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - network: "linea_goerli" - httpEndpoints: ${{ secrets.QA_LINEA_GOERLI_HTTP_URLS }} - wsEndpoints: ${{ secrets.QA_LINEA_GOERLI_URLS }} - fundingKeys: ${{ secrets.QA_EVM_KEYS }} - - name: Download Tests Binary - uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a # v3.0.2 - with: - name: tests - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests-binary@ea889b3133bd7f16ab19ba4ba130de5d9162c669 # v2.3.4 - with: - test_command_to_run: ./tests -test.timeout 30m -test.count=1 -test.parallel=1 -test.run ${{ matrix.test }} - binary_name: tests - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ github.sha }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - dockerhub_username: ${{ secrets.DOCKERHUB_READONLY_USERNAME }} - dockerhub_password: ${{ secrets.DOCKERHUB_READONLY_PASSWORD }} - artifacts_location: ./logs - token: ${{ secrets.GITHUB_TOKEN }} - cache_key_id: core-e2e-${{ env.MOD_CACHE_VERSION }} - cache_restore_only: "true" - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} - - name: Print failed test summary - if: always() - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/show-test-summary@ea889b3133bd7f16ab19ba4ba130de5d9162c669 # v2.3.4 \ No newline at end of file diff --git a/.github/workflows/on-demand-log-poller.yml b/.github/workflows/on-demand-log-poller.yml deleted file mode 100644 index ad3617841d..0000000000 --- a/.github/workflows/on-demand-log-poller.yml +++ /dev/null @@ -1,34 +0,0 @@ -name: On Demand Log Poller Consistency Test -on: - workflow_dispatch: - inputs: - base64Config: - description: base64-ed config - required: true - type: string - -jobs: - test: - env: - REF_NAME: ${{ github.head_ref || github.ref_name }} - runs-on: ubuntu20.04-8cores-32GB - steps: - - name: Add masks and export base64 config - run: | - BASE64_CONFIG_OVERRIDE=$(jq -r '.inputs.base64Config' $GITHUB_EVENT_PATH) - echo ::add-mask::$BASE64_CONFIG_OVERRIDE - echo "BASE64_CONFIG_OVERRIDE=$BASE64_CONFIG_OVERRIDE" >> $GITHUB_ENV - - name: Checkout the repo - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - ref: ${{ env.REF_NAME }} - - name: Setup Go - uses: actions/setup-go@0c52d547c9bc32b1aa3301fd7a9cb496313a4491 # v5.0.0 - with: - go-version-file: "integration-tests/go.mod" - cache: true - - name: Run tests - run: | - cd integration-tests - go mod download - go test -v -timeout 5h -v -count=1 -run ^TestLogPollerFewFiltersFixedDepth$ ./smoke/log_poller_test.go diff --git a/.github/workflows/on-demand-ocr-soak-test.yml b/.github/workflows/on-demand-ocr-soak-test.yml deleted file mode 100644 index 952c51f9fc..0000000000 --- a/.github/workflows/on-demand-ocr-soak-test.yml +++ /dev/null @@ -1,94 +0,0 @@ -name: On Demand OCR Soak Test -on: - workflow_dispatch: - inputs: - base64Config: - description: base64-ed config - required: true - type: string - slackMemberID: - description: Slack Member ID (Not your @) - required: true - default: U01A2B2C3D4 - type: string - -jobs: - ocr_soak_test: - name: OCR Soak Test - environment: integration - runs-on: ubuntu-latest - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - env: - CHAINLINK_ENV_USER: ${{ github.actor }} - SLACK_API_KEY: ${{ secrets.QA_SLACK_API_KEY }} - SLACK_CHANNEL: ${{ secrets.QA_SLACK_CHANNEL }} - TEST_LOG_LEVEL: debug - REF_NAME: ${{ github.head_ref || github.ref_name }} - ENV_JOB_IMAGE_BASE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink-tests - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: ${{ inputs.network }} OCR Soak Test - continue-on-error: true - - name: Checkout the repo - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - ref: ${{ env.REF_NAME }} - - name: Get Slack config and mask base64 config - run: | - SLACK_USER=$(jq -r '.inputs.slackMemberID' $GITHUB_EVENT_PATH) - echo ::add-mask::$SLACK_USER - echo SLACK_USER=$SLACK_USER >> $GITHUB_ENV - - BASE64_CONFIG_OVERRIDE=$(jq -r '.inputs.base64Config' $GITHUB_EVENT_PATH) - echo ::add-mask::$BASE64_CONFIG_OVERRIDE - echo "BASE64_CONFIG_OVERRIDE=$BASE64_CONFIG_OVERRIDE" >> $GITHUB_ENV - - name: Parse base64 config - uses: ./.github/actions/setup-parse-base64-config - with: - base64Config: ${{ env.BASE64_CONFIG_OVERRIDE }} - - name: Setup Push Tag - shell: bash - run: | - echo "### chainlink image used for this test run :link:" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.CHAINLINK_IMAGE }}\`" >>$GITHUB_STEP_SUMMARY - echo "### chainlink-tests image tag for this test run :ship:" >>$GITHUB_STEP_SUMMARY - echo "\`${GITHUB_SHA}\`" >>$GITHUB_STEP_SUMMARY - echo "### Networks on which test was run" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.NETWORKS }}\`" >>$GITHUB_STEP_SUMMARY - - name: Build Image - uses: ./.github/actions/build-test-image - with: - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ACCOUNT_NUMBER: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@e865e376b8c2d594028c8d645dd6c47169b72974 # v2.2.16 - env: - DETACH_RUNNER: true - TEST_SUITE: soak - TEST_ARGS: -test.timeout 900h -test.memprofile memprofile.out -test.cpuprofile profile.out - ENV_JOB_IMAGE: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }}.dkr.ecr.${{ secrets.QA_AWS_REGION }}.amazonaws.com/chainlink-tests:${{ github.sha }} - # We can comment these out when we have a stable soak test and aren't worried about resource consumption - TEST_UPLOAD_CPU_PROFILE: true - TEST_UPLOAD_MEM_PROFILE: true - with: - test_command_to_run: cd ./integration-tests && go test -v -count=1 -run ^TestOCRSoak$ ./soak - test_download_vendor_packages_command: make gomod - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ env.CHAINLINK_VERSION }} - token: ${{ secrets.GITHUB_TOKEN }} - should_cleanup: false - go_mod_path: ./integration-tests/go.mod - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} diff --git a/.github/workflows/on-demand-vrfv2-eth2-clients-test.yml b/.github/workflows/on-demand-vrfv2-eth2-clients-test.yml deleted file mode 100644 index de53b493a5..0000000000 --- a/.github/workflows/on-demand-vrfv2-eth2-clients-test.yml +++ /dev/null @@ -1,63 +0,0 @@ -name: On Demand VRFV2 Smoke Test (Ethereum clients) -on: - workflow_dispatch: - inputs: - base64Config: - description: base64-ed config - required: true - type: string - -jobs: - vrfv2_smoke_test: - name: VRFV2 Smoke Test with custom EL client client - environment: integration - runs-on: ubuntu20.04-8cores-32GB - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - env: - TEST_LOG_LEVEL: debug - REF_NAME: ${{ github.head_ref || github.ref_name }} - steps: - - name: Checkout code - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - fetch-depth: 0 - - name: Mask base64 config - run: | - BASE64_CONFIG_OVERRIDE=$(jq -r '.inputs.base64Config' $GITHUB_EVENT_PATH) - echo ::add-mask::$BASE64_CONFIG_OVERRIDE - echo "BASE64_CONFIG_OVERRIDE=$BASE64_CONFIG_OVERRIDE" >> $GITHUB_ENV - - name: Parse base64 config - uses: ./.github/actions/setup-parse-base64-config - with: - base64Config: ${{ env.BASE64_CONFIG_OVERRIDE }} - - name: Send details to Step Summary - shell: bash - run: | - echo "### chainlink image used for this test run :link:" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.CHAINLINK_IMAGE }}\`" >>$GITHUB_STEP_SUMMARY - echo "### chainlink-tests image tag for this test run :ship:" >>$GITHUB_STEP_SUMMARY - echo "\`${GITHUB_SHA}\`" >>$GITHUB_STEP_SUMMARY - echo "### Networks on which test was run" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.NETWORKS }}\`" >>$GITHUB_STEP_SUMMARY - echo "### Execution client used" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.ETH2_EL_CLIENT }}\`" >>$GITHUB_STEP_SUMMARY - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@7d541cbbca52d45b8a718257af86d9cf49774d1f # v2.2.15 - with: - test_command_to_run: cd ./integration-tests && go test -timeout 30m -count=1 -json -run TestVRFv2Basic ./smoke/vrfv2_test.go 2>&1 | tee /tmp/gotest.log | gotestfmt - test_download_vendor_packages_command: cd ./integration-tests && go mod download - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ env.CHAINLINK_VERSION }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - artifacts_name: vrf-test-logs - artifacts_location: ./integration-tests/smoke/logs/ - token: ${{ secrets.GITHUB_TOKEN }} - go_mod_path: ./integration-tests/go.mod - should_cleanup: false - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: "" diff --git a/.github/workflows/on-demand-vrfv2-performance-test.yml b/.github/workflows/on-demand-vrfv2-performance-test.yml deleted file mode 100644 index 097f1b56f4..0000000000 --- a/.github/workflows/on-demand-vrfv2-performance-test.yml +++ /dev/null @@ -1,84 +0,0 @@ -name: On Demand VRFV2 Performance Test -on: - workflow_dispatch: - inputs: - base64Config: - description: base64-ed config - required: true - type: string - performanceTestType: - description: Performance Test Type of test to run - type: choice - options: - - "Soak" - - "Load" - - "Stress" - - "Spike" -jobs: - vrfv2_performance_test: - name: VRFV2 Performance Test - environment: integration - runs-on: ubuntu20.04-8cores-32GB - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - env: - LOKI_URL: ${{ secrets.LOKI_URL }} - LOKI_TENANT_ID: ${{ secrets.LOKI_TENANT_ID }} - LOKI_BASIC_AUTH: ${{ secrets.LOKI_BASIC_AUTH }} - TEST_TYPE: ${{ inputs.performanceTestType }} - TEST_LOG_LEVEL: debug - REF_NAME: ${{ github.head_ref || github.ref_name }} - SLACK_API_KEY: ${{ secrets.QA_SLACK_API_KEY }} - SLACK_CHANNEL: ${{ secrets.QA_VRF_SLACK_CHANNEL }} - WASP_LOG_LEVEL: info - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: ${{ inputs.network }} VRFV2 Performance Test - continue-on-error: true - - name: Checkout code - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - fetch-depth: 0 - - name: Mask base64 config - run: | - BASE64_CONFIG_OVERRIDE=$(jq -r '.inputs.base64Config' $GITHUB_EVENT_PATH) - echo ::add-mask::$BASE64_CONFIG_OVERRIDE - echo "BASE64_CONFIG_OVERRIDE=$BASE64_CONFIG_OVERRIDE" >> $GITHUB_ENV - - name: Merge and export base64 config - uses: ./.github/actions/setup-merge-base64-config - with: - base64Config: ${{ env.BASE64_CONFIG_OVERRIDE }} - - name: Send details to Step Summary - shell: bash - run: | - echo "### chainlink image used for this test run :link:" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.CHAINLINK_IMAGE }}\`" >>$GITHUB_STEP_SUMMARY - echo "### chainlink-tests image tag for this test run :ship:" >>$GITHUB_STEP_SUMMARY - echo "\`${GITHUB_SHA}\`" >>$GITHUB_STEP_SUMMARY - echo "### Networks on which test was run" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.NETWORKS }}\`" >>$GITHUB_STEP_SUMMARY - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@7d541cbbca52d45b8a718257af86d9cf49774d1f # v2.2.15 - with: - test_command_to_run: cd ./integration-tests && go test -v -count=1 -timeout 24h -run TestVRFV2Performance/vrfv2_performance_test ./load/vrfv2 - test_download_vendor_packages_command: cd ./integration-tests && go mod download - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ env.CHAINLINK_VERSION }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - artifacts_name: vrf-test-logs - artifacts_location: ./integration-tests/load/vrfv2/logs/ - token: ${{ secrets.GITHUB_TOKEN }} - go_mod_path: ./integration-tests/go.mod - should_cleanup: false - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} diff --git a/.github/workflows/on-demand-vrfv2plus-eth2-clients-test.yml b/.github/workflows/on-demand-vrfv2plus-eth2-clients-test.yml deleted file mode 100644 index 1772730075..0000000000 --- a/.github/workflows/on-demand-vrfv2plus-eth2-clients-test.yml +++ /dev/null @@ -1,63 +0,0 @@ -name: On Demand VRFV2Plus Smoke Test (Ethereum clients) -on: - workflow_dispatch: - inputs: - base64Config: - description: base64-ed config - required: true - type: string - -jobs: - vrfv2plus_smoke_test: - name: VRFV2Plus Smoke Test with custom EL client - environment: integration - runs-on: ubuntu20.04-8cores-32GB - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - env: - TEST_LOG_LEVEL: debug - REF_NAME: ${{ github.head_ref || github.ref_name }} - steps: - - name: Checkout code - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - fetch-depth: 0 - - name: Mask base64 config - run: | - BASE64_CONFIG_OVERRIDE=$(jq -r '.inputs.base64Config' $GITHUB_EVENT_PATH) - echo ::add-mask::$BASE64_CONFIG_OVERRIDE - echo "BASE64_CONFIG_OVERRIDE=$BASE64_CONFIG_OVERRIDE" >> $GITHUB_ENV - - name: Parse base64 config - uses: ./.github/actions/setup-parse-base64-config - with: - base64Config: ${{ env.BASE64_CONFIG_OVERRIDE }} - - name: Send details to Step Summary - shell: bash - run: | - echo "### chainlink image used for this test run :link:" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.CHAINLINK_IMAGE }}\`" >>$GITHUB_STEP_SUMMARY - echo "### chainlink-tests image tag for this test run :ship:" >>$GITHUB_STEP_SUMMARY - echo "\`${GITHUB_SHA}\`" >>$GITHUB_STEP_SUMMARY - echo "### Networks on which test was run" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.NETWORKS }}\`" >>$GITHUB_STEP_SUMMARY - echo "### Execution client used" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.ETH2_EL_CLIENT }}\`" >>$GITHUB_STEP_SUMMARY - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@7d541cbbca52d45b8a718257af86d9cf49774d1f # v2.2.15 - with: - test_command_to_run: cd ./integration-tests && go test -timeout 30m -count=1 -json -run ^TestVRFv2Plus$/^Link_Billing$ ./smoke/vrfv2plus_test.go 2>&1 | tee /tmp/gotest.log | gotestfmt - test_download_vendor_packages_command: cd ./integration-tests && go mod download - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ env.CHAINLINK_VERSION }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - artifacts_name: vrfplus-test-logs - artifacts_location: ./integration-tests/smoke/logs/ - token: ${{ secrets.GITHUB_TOKEN }} - go_mod_path: ./integration-tests/go.mod - should_cleanup: false - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: "" diff --git a/.github/workflows/on-demand-vrfv2plus-performance-test.yml b/.github/workflows/on-demand-vrfv2plus-performance-test.yml deleted file mode 100644 index b1f9ee5b5f..0000000000 --- a/.github/workflows/on-demand-vrfv2plus-performance-test.yml +++ /dev/null @@ -1,85 +0,0 @@ -name: On Demand VRFV2 Plus Performance Test -on: - workflow_dispatch: - inputs: - base64Config: - description: base64-ed config - required: true - type: string - performanceTestType: - description: Performance Test Type of test to run - type: choice - options: - - "Soak" - - "Load" - - "Stress" - - "Spike" - -jobs: - vrfv2plus_performance_test: - name: VRFV2 Plus Performance Test - environment: integration - runs-on: ubuntu20.04-8cores-32GB - permissions: - checks: write - pull-requests: write - id-token: write - contents: read - env: - LOKI_URL: ${{ secrets.LOKI_URL }} - LOKI_TENANT_ID: ${{ secrets.LOKI_TENANT_ID }} - LOKI_BASIC_AUTH: ${{ secrets.LOKI_BASIC_AUTH }} - TEST_TYPE: ${{ inputs.performanceTestType }} - TEST_LOG_LEVEL: debug - REF_NAME: ${{ github.head_ref || github.ref_name }} - SLACK_API_KEY: ${{ secrets.QA_SLACK_API_KEY }} - SLACK_CHANNEL: ${{ secrets.QA_VRF_SLACK_CHANNEL }} - WASP_LOG_LEVEL: info - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: ${{ inputs.network }} VRFV2 Plus Performance Test - continue-on-error: true - - name: Checkout code - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - fetch-depth: 0 - - name: Mask base64 config - run: | - BASE64_CONFIG_OVERRIDE=$(jq -r '.inputs.base64Config' $GITHUB_EVENT_PATH) - echo ::add-mask::$BASE64_CONFIG_OVERRIDE - echo "BASE64_CONFIG_OVERRIDE=$BASE64_CONFIG_OVERRIDE" >> $GITHUB_ENV - - name: Merge and export base64 config - uses: ./.github/actions/setup-merge-base64-config - with: - base64Config: ${{ env.BASE64_CONFIG_OVERRIDE }} - - name: Send details to Step Summary - shell: bash - run: | - echo "### chainlink image used for this test run :link:" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.CHAINLINK_IMAGE }}\`" >>$GITHUB_STEP_SUMMARY - echo "### chainlink-tests image tag for this test run :ship:" >>$GITHUB_STEP_SUMMARY - echo "\`${GITHUB_SHA}\`" >>$GITHUB_STEP_SUMMARY - echo "### Networks on which test was run" >>$GITHUB_STEP_SUMMARY - echo "\`${{ env.NETWORKS }}\`" >>$GITHUB_STEP_SUMMARY - - name: Run Tests - uses: smartcontractkit/chainlink-github-actions/chainlink-testing-framework/run-tests@e865e376b8c2d594028c8d645dd6c47169b72974 # v2.2.16 - with: - test_command_to_run: cd ./integration-tests && go test -v -count=1 -timeout 24h -run TestVRFV2PlusPerformance/vrfv2plus_performance_test ./load/vrfv2plus - test_download_vendor_packages_command: cd ./integration-tests && go mod download - cl_repo: ${{ env.CHAINLINK_IMAGE }} - cl_image_tag: ${{ env.CHAINLINK_VERSION }} - aws_registries: ${{ secrets.QA_AWS_ACCOUNT_NUMBER }} - artifacts_name: vrf-test-logs - artifacts_location: ./integration-tests/load/vrfv2plus/logs/ - token: ${{ secrets.GITHUB_TOKEN }} - go_mod_path: ./integration-tests/go.mod - should_cleanup: false - QA_AWS_REGION: ${{ secrets.QA_AWS_REGION }} - QA_AWS_ROLE_TO_ASSUME: ${{ secrets.QA_AWS_ROLE_TO_ASSUME }} - QA_KUBECONFIG: ${{ secrets.QA_KUBECONFIG }} diff --git a/.github/workflows/operator-ui-cd.yml b/.github/workflows/operator-ui-cd.yml deleted file mode 100644 index e674686100..0000000000 --- a/.github/workflows/operator-ui-cd.yml +++ /dev/null @@ -1,59 +0,0 @@ -name: Operator UI CD - -on: - push: - branches: - - develop - workflow_dispatch: - schedule: - - cron: "0 */1 * * *" # Run every hour - -jobs: - update-version: - permissions: - id-token: write - name: Update Version - runs-on: ubuntu-latest - steps: - - name: Checkout the repo - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - - - name: Update version - id: update - env: - GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: ./operator_ui/check.sh - -# - name: Assume role capable of dispatching action -# uses: aws-actions/configure-aws-credentials@010d0da01d0b5a38af31e9c3470dbfdabdecca3a # v4.0.1 -# with: -# role-to-assume: ${{ secrets.AWS_OIDC_CHAINLINK_CI_AUTO_PR_TOKEN_ISSUER_ROLE_ARN }} -# role-duration-seconds: ${{ secrets.aws-role-duration-seconds }} -# role-session-name: operator-ui-cd.update-version -# aws-region: ${{ secrets.AWS_REGION }} -# -# - name: Get Github Token -# id: get-gh-token -# uses: smartcontractkit/chainlink-github-actions/github-app-token-issuer@chore/update-github-app-token-issuer -# with: -# url: ${{ secrets.AWS_INFRA_RELENG_TOKEN_ISSUER_LAMBDA_URL }} -# -# - name: Open PR -# uses: peter-evans/create-pull-request@153407881ec5c347639a548ade7d8ad1d6740e38 # v5.0.2 -# with: -# title: Update Operator UI from ${{ steps.update.outputs.current_tag }} to ${{ steps.update.outputs.latest_tag }} -# token: ${{ steps.get-gh-token.outputs.access-token }} -# branch: chore/update-operator-ui -# commit-message: Update Operator UI from ${{ steps.update.outputs.current_tag }} to ${{ steps.update.outputs.latest_tag }} -# body: ${{ steps.update.outputs.body }} - - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Update Version - continue-on-error: true diff --git a/.github/workflows/operator-ui-ci.yml b/.github/workflows/operator-ui-ci.yml deleted file mode 100644 index 44dd03a817..0000000000 --- a/.github/workflows/operator-ui-ci.yml +++ /dev/null @@ -1,46 +0,0 @@ -#name: Operator UI CI -#on: -# pull_request: -# -#jobs: -# check-gql: -# permissions: -# id-token: write -# contents: read -# # To allow writing comments to the current PR -# pull-requests: write -# -# name: Breaking Changes GQL Check -# runs-on: ubuntu-latest -# steps: -# - name: Collect Metrics -# id: collect-gha-metrics -# uses: smartcontractkit/push-gha-metrics-action@v1 -# with: -# basic-auth: ${{ secrets.GRAFANA_CLOUD_BASIC_AUTH }} -# hostname: ${{ secrets.GRAFANA_CLOUD_HOST }} -# this-job-name: Breaking Changes GQL Check -# continue-on-error: true -# -# - name: Assume role capable of dispatching action -# uses: aws-actions/configure-aws-credentials@010d0da01d0b5a38af31e9c3470dbfdabdecca3a # v4.0.1 -# with: -# role-to-assume: ${{ secrets.AWS_OIDC_CHAINLINK_CI_OPERATOR_UI_ACCESS_TOKEN_ISSUER_ROLE_ARN }} -# role-duration-seconds: 3600 -# role-session-name: operator-ui-ci.check-gql -# aws-region: ${{ secrets.AWS_REGION }} -# -# - name: Get Github Token -# id: get-gh-token -# uses: smartcontractkit/chainlink-github-actions/github-app-token-issuer@main -# with: -# url: ${{ secrets.AWS_INFRA_RELENG_TOKEN_ISSUER_LAMBDA_URL }} -# -# - uses: convictional/trigger-workflow-and-wait@f69fa9eedd3c62a599220f4d5745230e237904be #v1.6.5 -# with: -# owner: smartcontractkit -# repo: operator-ui -# github_token: ${{ steps.get-gh-token.outputs.access-token }} -# workflow_file_name: chainlink-ci.yml -# client_payload: '{"ref": "${{ github.event.pull_request.head.sha }}"}' - diff --git a/.github/workflows/pr-labels.yml b/.github/workflows/pr-labels.yml deleted file mode 100644 index 66ef95434c..0000000000 --- a/.github/workflows/pr-labels.yml +++ /dev/null @@ -1,54 +0,0 @@ -name: PR Labels - -on: - pull_request: - types: [labeled] - -jobs: - crib: - runs-on: ubuntu-latest - permissions: - issues: write - pull-requests: write - steps: - - name: Comment on PR - uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 - with: - script: | - const labelsToCheck = ["crib"]; - const { owner, repo, number: prNumber } = context.issue; - const { data: labels } = await github.rest.issues.listLabelsOnIssue({ owner, repo, issue_number: prNumber }); - const labelMatches = labels.some(label => labelsToCheck.includes(label.name)); - - if (!labelMatches) { - core.info("No 'crib' PR label found. Proceeding."); - return; - } - - const comment = `## CRIB Environment Details :information_source: - - CRIB activated via the 'crib' label. To destroy the environment, remove the 'crib' PR label or close the PR. - - Please review the following details: - - ### Subdomains - - _Use these subdomains to access the CRIB environment. They are prefixes to the internal base domain._ - - - crib-chain-${prNumber}-node1. - - crib-chain-${prNumber}-node2. - - crib-chain-${prNumber}-node3. - - crib-chain-${prNumber}-node4. - - crib-chain-${prNumber}-node5. - - crib-chain-${prNumber}-node6. - - crib-chain-${prNumber}-geth-http. - - crib-chain-${prNumber}-geth-ws. - - crib-chain-${prNumber}-mockserver. - `; - - await github.rest.issues.createComment({ - owner, - repo, - issue_number: prNumber, - body: comment - }); diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml deleted file mode 100644 index f0136cfbd0..0000000000 --- a/.github/workflows/publish.yml +++ /dev/null @@ -1,81 +0,0 @@ -name: publish - -on: - push: - tags: - - "v*" - branches: - - ccip-develop - - "release/**" - -jobs: - build-and-publish: - # Do not trigger from versioned tags. - if: ${{ ! startsWith(github.ref, 'refs/tags/v') }} - environment: publish - permissions: - id-token: write - contents: read - runs-on: ubuntu-latest - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: build-and-publish - continue-on-error: true - - - name: Checkout the repo - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - - name: Build and publish chainlink image - uses: ./.github/actions/build-sign-publish-chainlink - with: - publish: true - aws-role-to-assume: ${{ secrets.AWS_OIDC_IAM_ROLE_PROD_PUBLISH_ARN }} - aws-role-duration-seconds: ${{ secrets.AWS_ROLE_DURATION_SECONDS }} - aws-region: ${{ secrets.AWS_REGION }} - ecr-hostname: ${{ secrets.AWS_ECR_REPO_URL }} - ecr-image-name: chainlink-ccip - sign-images: false - dockerfile: ./core/chainlink.Dockerfile - - build-and-publish-release: - # Trigger only from versioned tags. - if: ${{ startsWith(github.ref, 'refs/tags/v') }} - environment: publish - env: - # Public ECR is only available in us-east-1; not a secret. - AWS_REGION: us-east-1 - AWS_ECR_REPO_PUBLIC_REGISTRY: public.ecr.aws - permissions: - id-token: write - contents: read - runs-on: ubuntu-latest - steps: - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: build-and-publish-release - continue-on-error: true - - - name: Checkout the repo - uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f # v3.4.0 - - name: Build and publish chainlink image - uses: ./.github/actions/build-sign-publish-chainlink - with: - publish: true - aws-role-to-assume: ${{ secrets.AWS_OIDC_IAM_ROLE_PROD_PUBLISH_ARN }} - aws-role-duration-seconds: ${{ secrets.AWS_ROLE_DURATION_SECONDS }} - aws-region: ${{ env.AWS_REGION }} - ecr-hostname: ${{ env.AWS_ECR_REPO_PUBLIC_REGISTRY }} - ecr-image-name: w0i8p0z9/chainlink-ccip - sign-images: false - dockerfile: ./core/chainlink.Dockerfile - diff --git a/.github/workflows/readme.yml b/.github/workflows/readme.yml deleted file mode 100644 index 54fa3d7e43..0000000000 --- a/.github/workflows/readme.yml +++ /dev/null @@ -1,40 +0,0 @@ -# -# This action checks PRs to see if any README* files were updated. -# If none were, it will add a message to the PR asking if it would make sense to do so. -# -name: Readme - -on: pull_request - -jobs: - readme: - # For security reasons, GITHUB_TOKEN is read-only on forks, so we cannot leave comments on PRs. - # This check skips the job if it is detected we are running on a fork. - if: ${{ github.event.pull_request.head.repo.full_name == 'smartcontractkit/chainlink' }} - name: Readme checker - runs-on: ubuntu-latest - steps: - - name: Check for changed files - id: changedfiles - uses: umani/changed-files@d7f842d11479940a6036e3aacc6d35523e6ba978 # Version 4.1.0 - with: - repo-token: ${{ secrets.GITHUB_TOKEN }} - pattern: '^(?!.*node_modules).*README\.md$' - - name: Make a comment - uses: unsplash/comment-on-pr@ffe8f97ccc63ce12c3c23c6885b169db67958d3b # Version 1.3.0 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - if: contains(steps.changedfiles.outputs.files_updated, 'README') != true && contains(steps.changedfiles.outputs.files_created, 'README') != true - with: - msg: "I see that you haven't updated any README files. Would it make sense to do so?" - check_for_duplicate_msg: true - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Readme checker - continue-on-error: true \ No newline at end of file diff --git a/.github/workflows/sigscanner.yml b/.github/workflows/sigscanner.yml deleted file mode 100644 index c245380c23..0000000000 --- a/.github/workflows/sigscanner.yml +++ /dev/null @@ -1,35 +0,0 @@ -name: 'SigScanner Check' - -on: - merge_group: - push: - -jobs: - sigscanner-check: - runs-on: ubuntu-latest - steps: - - name: "SigScanner checking ${{ github.sha }} by ${{ github.actor }}" - env: - API_TOKEN: ${{ secrets.SIGSCANNER_API_TOKEN }} - API_URL: ${{ secrets.SIGSCANNER_API_URL }} - run: | - echo "🔎 Checking commit ${{ github.sha }} by ${{ github.actor }} in ${{ github.repository }} - ${{ github.event_name }}" - CODE=`curl --write-out '%{http_code}' -X POST -H "Content-Type: application/json" -H "Authorization: $API_TOKEN" --silent --output /dev/null --url "$API_URL" --data '{"commit":"${{ github.sha }}","repository":"${{ github.repository }}","author":"${{ github.actor }}"}'` - echo "Received $CODE" - if [[ "$CODE" == "200" ]]; then - echo "✅ Commit is verified" - exit 0 - else - echo "❌ Commit is NOT verified" - exit 1 - fi - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: sigscanner-check - continue-on-error: true diff --git a/.github/workflows/solidity-foundry.yml b/.github/workflows/solidity-foundry.yml deleted file mode 100644 index e19b12fc73..0000000000 --- a/.github/workflows/solidity-foundry.yml +++ /dev/null @@ -1,164 +0,0 @@ -name: Solidity Foundry -on: [pull_request] - -env: - FOUNDRY_PROFILE: ci - -jobs: - changes: - name: Detect changes - runs-on: ubuntu-latest - outputs: - changes: ${{ steps.changes.outputs.src }} - steps: - - name: Checkout the repo - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - - uses: dorny/paths-filter@4512585405083f25c027a35db413c2b3b9006d50 # v2.11.1 - id: changes - with: - # Foundry is only used for Solidity v0.8 contracts, therefore we can ignore - # changes to older contracts. - filters: | - src: - - 'contracts/src/v0.8/**/*' - - 'contracts/test/v0.8/foundry/**/*' - - '.github/workflows/solidity-foundry.yml' - - 'contracts/foundry.toml' - - 'contracts/gas-snapshots/*.gas-snapshot' - - '.gitmodules' - - 'contracts/foundry-lib' - - coverage: - needs: [changes] - name: Coverage - runs-on: ubuntu-latest - env: - FOUNDRY_PROFILE: ccip - - steps: - - name: Collect Metrics - if: ${{ needs.changes.outputs.changes == 'true' }} - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Coverage - continue-on-error: true - - - uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f # v3.4.0 - with: - submodules: recursive - - # Only needed because we use the NPM versions of packages - # and not native Foundry. This is to make sure the dependencies - # stay in sync. - - name: Setup NodeJS - uses: ./.github/actions/setup-nodejs - - - name: Install Foundry - uses: foundry-rs/foundry-toolchain@v1 - with: - version: nightly - - - name: Run Forge build - if: ${{ needs.changes.outputs.changes == 'true' }} - working-directory: contracts - run: | - forge --version - forge build - id: build - - - name: Run coverage - if: ${{ needs.changes.outputs.changes == 'true' }} - working-directory: contracts - run: forge coverage --report lcov - - - name: Prune report - if: ${{ needs.changes.outputs.changes == 'true' }} - run: | - sudo apt-get install lcov - ./tools/ci/ccip_lcov_prune ./contracts/lcov.info ./lcov.info.pruned - - - name: Report code coverage - if: ${{ needs.changes.outputs.changes == 'true' }} - uses: zgosalvez/github-actions-report-lcov@v1 - with: - update-comment: true - coverage-files: lcov.info.pruned - minimum-coverage: 98.5 - artifact-name: code-coverage-report - working-directory: ./contracts - github-token: ${{ secrets.GITHUB_TOKEN }} - - tests: - strategy: - fail-fast: false - matrix: - product: [vrf, automation, llo-feeds, l2ep, functions, shared, ccip, rebalancer] - needs: [changes] - name: Foundry Tests ${{ matrix.product }} - # See https://github.com/foundry-rs/foundry/issues/3827 - runs-on: ubuntu-22.04 - - # The if statements for steps after checkout repo is workaround for - # passing required check for PRs that don't have filtered changes. - steps: - - name: Checkout the repo - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - submodules: recursive - - # Only needed because we use the NPM versions of packages - # and not native Foundry. This is to make sure the dependencies - # stay in sync. - - name: Setup NodeJS - if: needs.changes.outputs.changes == 'true' - uses: ./.github/actions/setup-nodejs - - - name: Install Foundry - if: needs.changes.outputs.changes == 'true' - uses: foundry-rs/foundry-toolchain@v1 - with: - # Has to match the `make foundry` version. - version: nightly-2cb875799419c907cc3709e586ece2559e6b340e - - - name: Run Forge build - if: needs.changes.outputs.changes == 'true' - run: | - forge --version - forge build - id: build - working-directory: contracts - env: - FOUNDRY_PROFILE: ${{ matrix.product }} - - - name: Run Forge tests - if: needs.changes.outputs.changes == 'true' - run: | - forge test -vvv - id: test - working-directory: contracts - env: - FOUNDRY_PROFILE: ${{ matrix.product }} - - - name: Run Forge snapshot - if: ${{ !contains(fromJson('["vrf"]'), matrix.product) && !contains(fromJson('["automation"]'), matrix.product) && needs.changes.outputs.changes == 'true' }} - run: | - forge snapshot --nmt "testFuzz_\w{1,}?" --check gas-snapshots/${{ matrix.product }}.gas-snapshot - id: snapshot - working-directory: contracts - env: - FOUNDRY_PROFILE: ${{ matrix.product }} - - - name: Collect Metrics - if: needs.changes.outputs.changes == 'true' - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Foundry Tests ${{ matrix.product }} - continue-on-error: true \ No newline at end of file diff --git a/.github/workflows/solidity-hardhat.yml b/.github/workflows/solidity-hardhat.yml deleted file mode 100644 index 0781840b19..0000000000 --- a/.github/workflows/solidity-hardhat.yml +++ /dev/null @@ -1,184 +0,0 @@ -name: Solidity-Hardhat - -on: - merge_group: - push: - branches: - - develop - -env: - NODE_OPTIONS: --max_old_space_size=8192 - -defaults: - run: - shell: bash - -jobs: - changes: - name: Detect changes - runs-on: ubuntu-latest - outputs: - changes: ${{ steps.changes.outputs.src }} - steps: - - name: Checkout the repo - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - - uses: dorny/paths-filter@4512585405083f25c027a35db413c2b3b9006d50 # v2.11.1 - id: changes - with: - filters: | - src: - - 'contracts/src/!(v0.8/(llo-feeds|ccip|rebalancer)/**)/**/*' - - 'contracts/test/**/*' - - 'contracts/package.json' - - 'contracts/pnpm-lock.yaml' - - 'contracts/hardhat.config.ts' - - 'contracts/ci.json' - - '.github/workflows/solidity-hardhat.yml' - - split-tests: - name: Split Solidity Tests - runs-on: ubuntu-latest - outputs: - splits: ${{ steps.split.outputs.splits }} - steps: - - name: Checkout the repo - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - - name: Generate splits - id: split - uses: ./.github/actions/split-tests - with: - config: ./contracts/ci.json - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Split Solidity Tests - continue-on-error: true - - solidity-coverage-splits: - needs: [changes, split-tests] - if: needs.changes.outputs.changes == 'true' - name: Solidity Coverage ${{ matrix.split.id }} ${{ fromJSON('["(skipped)", ""]')[needs.changes.outputs.changes == 'true'] }} - strategy: - fail-fast: false - matrix: - split: ${{ fromJson(needs.split-tests.outputs.splits) }} - runs-on: ubuntu-latest - steps: - - name: Checkout the repo - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - - name: Setup NodeJS - uses: ./.github/actions/setup-nodejs - - name: Setup Hardhat - uses: ./.github/actions/setup-hardhat - with: - namespace: coverage - - name: Run coverage - env: - SPLIT: ${{ matrix.split.coverageTests }} - shell: bash - run: pnpm coverage --testfiles "$SPLIT" - working-directory: contracts - - name: Push coverage - run: ./tools/bin/codecov -f ./contracts/coverage.json - - name: Rename coverage - run: mv ./contracts/coverage.json ./contracts/coverage-${{ matrix.split.idx }}.json - - name: Upload coverage - uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce # v3.1.2 - with: - name: solidity-coverage-${{ matrix.split.idx }} - path: ./contracts/coverage-${{ matrix.split.idx }}.json - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Solidity Coverage ${{ matrix.split.id }} - continue-on-error: true - - solidity-coverage: - needs: [changes, solidity-coverage-splits] - if: needs.changes.outputs.changes == 'true' - name: Solidity Coverage ${{ fromJSON('["(skipped)", ""]')[needs.changes.outputs.changes == 'true'] }} - runs-on: ubuntu-latest - steps: - - name: Checkout the repo - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - - name: Setup NodeJS - uses: ./.github/actions/setup-nodejs - - name: Make coverage directory - run: mkdir ./contracts/coverage-reports - - name: Download coverage - uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a # v3.0.2 - with: - path: ./contracts/coverage-reports - - name: Display structure of downloaded files - run: ls -R coverage-reports - working-directory: contracts - - name: Generate merged report - run: pnpm istanbul report text text-summary - working-directory: contracts - - solidity-splits: - needs: [changes, split-tests] - if: needs.changes.outputs.changes == 'true' - name: Solidity ${{ matrix.split.id }} ${{ fromJSON('["(skipped)", ""]')[needs.changes.outputs.changes == 'true'] }} - strategy: - fail-fast: false - matrix: - split: ${{ fromJson(needs.split-tests.outputs.splits) }} - runs-on: ubuntu-latest - steps: - - name: Checkout the repo - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - - name: Setup NodeJS - uses: ./.github/actions/setup-nodejs - - name: Setup Hardhat - uses: ./.github/actions/setup-hardhat - with: - namespace: coverage - - name: Run tests - env: - SPLIT: ${{ matrix.split.tests }} - working-directory: contracts - run: pnpm test -- $SPLIT - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Solidity ${{ matrix.split.id }} - continue-on-error: true - - solidity: - needs: [changes, solidity-splits] - name: Solidity - runs-on: ubuntu-latest - if: always() - steps: - - run: echo 'Solidity tests finished!' - - name: Check test results - run: | - if [[ "${{ needs.changes.result }}" = "failure" || "${{ needs.solidity-splits.result }}" = "failure" ]]; then - echo "One or more changes / solidity-splits jobs failed" - exit 1 - else - echo "All test jobs passed successfully" - fi - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Solidity - continue-on-error: true \ No newline at end of file diff --git a/.github/workflows/solidity.yml b/.github/workflows/solidity.yml deleted file mode 100644 index 50f4c9c56e..0000000000 --- a/.github/workflows/solidity.yml +++ /dev/null @@ -1,247 +0,0 @@ -name: Solidity - -on: - merge_group: - push: - -defaults: - run: - shell: bash - -jobs: - initialize: - name: Initialize - runs-on: ubuntu-latest - outputs: - is-release: ${{ steps.release-tag-check.outputs.is-release }} - is-pre-release: ${{ steps.release-tag-check.outputs.is-pre-release }} - steps: - - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # v3.1.0 - - name: Check release tag - id: release-tag-check - uses: smartcontractkit/chainlink-github-actions/release/release-tag-check@main - env: - # Match semver git tags with a "contracts-ccip/" prefix. - RELEASE_REGEX: '^contracts-ccip/v[0-9]+\.[0-9]+\.[0-9]+$' - PRE_RELEASE_REGEX: '^contracts-ccip/v[0-9]+\.[0-9]+\.[0-9]+-(.+)$' - - changes: - name: Detect changes - runs-on: ubuntu-latest - outputs: - changes: ${{ steps.changes.outputs.src }} - steps: - - name: Checkout the repo - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - - uses: dorny/paths-filter@4512585405083f25c027a35db413c2b3b9006d50 # v2.11.1 - id: changes - with: - list-files: "csv" - filters: | - src: - - 'contracts/**/*' - - '.github/workflows/solidity.yml' - - '.github/workflows/solidity-foundry.yml' - old_sol: - - 'contracts/src/v0.4/**/*' - - 'contracts/src/v0.5/**/*' - - 'contracts/src/v0.6/**/*' - - 'contracts/src/v0.7/**/*' - - - name: Fail if read-only files have changed - if: ${{ steps.changes.outputs.old_sol == 'true' }} - run: | - echo "One or more read-only Solidity file(s) has changed." - for file in ${{ steps.changes.outputs.old_sol_files }}; do - echo "$file was changed" - done - exit 1 - -# prepublish-test: -# needs: [changes] -# if: needs.changes.outputs.changes == 'true' -# name: Prepublish Test ${{ fromJSON('["(skipped)", ""]')[needs.changes.outputs.changes == 'true'] }} -# runs-on: ubuntu-latest -# steps: -# - name: Checkout the repo -# uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 -# - name: Setup NodeJS -# uses: ./.github/actions/setup-nodejs -# - name: Run Prepublish test -# working-directory: contracts -# run: pnpm prepublishOnly -# - name: Collect Metrics -# id: collect-gha-metrics -# uses: smartcontractkit/push-gha-metrics-action@d1618b772a97fd87e6505de97b872ee0b1f1729a # v2.0.2 -# with: -# basic-auth: ${{ secrets.GRAFANA_CLOUD_BASIC_AUTH }} -# hostname: ${{ secrets.GRAFANA_CLOUD_HOST }} -# this-job-name: Prepublish Test -# continue-on-error: true - - native-compile: - needs: [changes] - if: needs.changes.outputs.changes == 'true' - name: Native Compilation ${{ fromJSON('["(skipped)", ""]')[needs.changes.outputs.changes == 'true'] }} - runs-on: ubuntu-latest - steps: - - name: Checkout the repo - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - - name: Checkout diff-so-fancy - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - repository: so-fancy/diff-so-fancy - ref: a673cb4d2707f64d92b86498a2f5f71c8e2643d5 # v1.4.3 - path: diff-so-fancy - - name: Install diff-so-fancy - run: echo "$GITHUB_WORKSPACE/diff-so-fancy" >> $GITHUB_PATH - - name: Setup NodeJS - uses: ./.github/actions/setup-nodejs - with: - prod: "true" - - name: Setup Go - uses: ./.github/actions/setup-go - - name: Run native compile and generate wrappers - run: make wrappers-all - working-directory: ./contracts - - name: Verify local solc binaries - run: ./tools/ci/check_solc_hashes - - name: Display git diff - if: ${{ needs.changes.outputs.changes == 'true' }} - run: git diff --minimal --color --exit-code | diff-so-fancy - - name: Comment on fix instructions - env: - GITHUB_TOKEN: ${{ github.token }} - if: ${{ failure() }} - run: gh pr comment -b 'Go solidity wrappers are out-of-date, regenerate them via the `make wrappers-all` command' - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Native Compilation - continue-on-error: true - - # The if statements for steps after checkout repo is a workaround for - # passing required check for PRs that don't have filtered changes. - lint: - defaults: - run: - working-directory: contracts - needs: [changes] - name: Solidity Lint - runs-on: ubuntu-latest - steps: - - name: Checkout the repo - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - - name: Setup NodeJS - if: needs.changes.outputs.changes == 'true' - uses: ./.github/actions/setup-nodejs - - name: Run pnpm lint - if: needs.changes.outputs.changes == 'true' - run: pnpm lint - - name: Run solhint - if: needs.changes.outputs.changes == 'true' - run: pnpm solhint - - name: Collect Metrics - if: needs.changes.outputs.changes == 'true' - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Lint - continue-on-error: true - - prettier: - defaults: - run: - working-directory: contracts - needs: [changes] - name: Prettier Formatting - runs-on: ubuntu-latest - steps: - - name: Checkout the repo - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - - name: Setup NodeJS - if: needs.changes.outputs.changes == 'true' - uses: ./.github/actions/setup-nodejs - - name: Run prettier check - if: needs.changes.outputs.changes == 'true' - run: pnpm prettier:check - - name: Collect Metrics - if: needs.changes.outputs.changes == 'true' - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Prettier Formatting - continue-on-error: true - - publish-beta: - needs: [initialize, changes, native-compile, lint, prettier] - name: Publish Beta NPM - runs-on: ubuntu-latest - if: needs.initialize.outputs.is-pre-release == 'true' - steps: - - name: Checkout the repo - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 - - name: Setup NodeJS - uses: ./.github/actions/setup-nodejs - - name: Configure npmrc - env: - NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} - run: | - echo "//registry.npmjs.org/:_authToken=${NODE_AUTH_TOKEN}" >> ~/.npmrc - echo "registry=https://registry.npmjs.org/" >> ~/.npmrc - - name: Publish Beta - run: pnpm publish-beta --publish-branch "${GITHUB_REF_NAME}" --no-git-checks - working-directory: contracts - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Publish-Beta - continue-on-error: true - - publish-prod: - needs: [initialize, changes, native-compile, lint, prettier] - name: Publish Prod NPM - runs-on: ubuntu-latest - if: needs.initialize.outputs.is-release == 'true' - steps: - - name: Checkout the repo - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 - - name: Setup NodeJS - uses: ./.github/actions/setup-nodejs - - name: Create GitHub Release - uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1 - with: - tag_name: ${{ github.ref_name }} - token: ${{ secrets.GITHUB_TOKEN }} - - name: Configure npmrc - env: - NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} - run: | - echo "//registry.npmjs.org/:_authToken=${NODE_AUTH_TOKEN}" >> ~/.npmrc - echo "registry=https://registry.npmjs.org/" >> ~/.npmrc - - name: Publish Prod - run: pnpm publish-prod --publish-branch "${GITHUB_REF_NAME}" --no-git-checks - working-directory: contracts - - name: Collect Metrics - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Publish-Prod - continue-on-error: true diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml deleted file mode 100644 index 8eb95f4147..0000000000 --- a/.github/workflows/stale.yml +++ /dev/null @@ -1,26 +0,0 @@ -# Workflow is triggered daily midnight UTC -# A PR with more than 60 days of inactivity will be marked as stale -# A PR that's stale for more than 7 days will be automatically closed -# Issues are exempt from auto marking as stale but issues with manually added 'stale' label are eligible for auto closure after 7 days. -# PRs with assignees are exempt from auto stale marking, it's the responsibility of the assignee to get the PR progressed either with review/merge or closure. -name: Manage stale Issues and PRs - -on: - schedule: - - cron: "0 0 * * *" # Will be triggered every day at midnight UTC - -jobs: - stale: - - runs-on: ubuntu-latest - permissions: - issues: write - pull-requests: write - - steps: - - uses: actions/stale@28ca1036281a5e5922ead5184a1bbf96e5fc984e # v9.0.0 - with: - repo-token: ${{ secrets.GITHUB_TOKEN }} - exempt-all-pr-assignees: true - stale-pr-message: 'This PR is stale because it has been open 60 days with no activity. Remove stale label or comment or this will be closed in 7 days.' - days-before-issue-stale: -1 # disables marking issues as stale automatically. Issues can still be marked as stale manually, in which the closure policy applies. diff --git a/.github/workflows/sync-develop-from-smartcontractkit-chainlink.yml b/.github/workflows/sync-develop-from-smartcontractkit-chainlink.yml deleted file mode 100644 index afdcfa156c..0000000000 --- a/.github/workflows/sync-develop-from-smartcontractkit-chainlink.yml +++ /dev/null @@ -1,39 +0,0 @@ -name: Sync develop from smartcontractkit/chainlink - -on: - schedule: - # * is a special character in YAML so you have to quote this string - - cron: '*/30 * * * *' - -jobs: - sync: - name: Sync - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - ref: develop - if: env.GITHUB_REPOSITORY != 'smartcontractkit/chainlink' - - name: Sync - run: | - git remote add upstream "https://github.com/smartcontractkit/chainlink.git" - COMMIT_HASH_UPSTREAM=$(git ls-remote upstream develop | grep -P '^[0-9a-f]{40}\trefs/heads/develop$' | cut -f 1) - COMMIT_HASH_ORIGIN=$(git ls-remote origin develop | grep -P '^[0-9a-f]{40}\trefs/heads/develop$' | cut -f 1) - if [ "$COMMIT_HASH_UPSTREAM" = "$COMMIT_HASH_ORIGIN" ]; then - echo "Both remotes have develop at $COMMIT_HASH_UPSTREAM. No need to sync." - else - echo "upstream has develop at $COMMIT_HASH_UPSTREAM. origin has develop at $COMMIT_HASH_ORIGIN. Syncing..." - git fetch upstream - git push origin upstream/develop:develop - fi - if: env.GITHUB_REPOSITORY != 'smartcontractkit/chainlink' - - name: Collect Metrics - if: always() - id: collect-gha-metrics - uses: smartcontractkit/push-gha-metrics-action@0281b09807758be1dcc41651e44e62b353808c47 # v2.1.0 - with: - org-id: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }} - basic-auth: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }} - hostname: ${{ secrets.GRAFANA_INTERNAL_HOST }} - this-job-name: Sync - continue-on-error: true diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml new file mode 100644 index 0000000000..5f535d2ef2 --- /dev/null +++ b/.github/workflows/tests.yml @@ -0,0 +1,62 @@ +name: solidity + +on: push + +env: + FOUNDRY_PROFILE: ci + +jobs: + tests: + strategy: + fail-fast: false + matrix: + product: [ccip] + name: Foundry Tests ${{ matrix.product }} + # See https://github.com/foundry-rs/foundry/issues/3827 + runs-on: ubuntu-22.04 + + # The if statements for steps after checkout repo is workaround for + # passing required check for PRs that don't have filtered changes. + steps: + - name: Checkout the repo + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 + with: + submodules: recursive + + # Only needed because we use the NPM versions of packages + # and not native Foundry. This is to make sure the dependencies + # stay in sync. + - name: Setup NodeJS + uses: ./.github/actions/setup-nodejs + + - name: Install Foundry + uses: foundry-rs/foundry-toolchain@v1 + with: + # Has to match the `make foundry` version. + version: nightly-2cb875799419c907cc3709e586ece2559e6b340e + + - name: Run Forge build + run: | + forge --version + forge build + id: build + working-directory: contracts + env: + FOUNDRY_PROFILE: ${{ matrix.product }} + + - name: Run Forge tests + run: | + forge test -vvv + id: test + working-directory: contracts + env: + FOUNDRY_PROFILE: ${{ matrix.product }} + + - name: Run Forge snapshot + if: ${{ !contains(fromJson('["vrf"]'), matrix.product) && !contains(fromJson('["automation"]'), matrix.product) && needs.changes.outputs.changes == 'true' }} + run: | + forge snapshot --nmt "testFuzz_\w{1,}?" --check gas-snapshots/${{ matrix.product }}.gas-snapshot + id: snapshot + working-directory: contracts + env: + FOUNDRY_PROFILE: ${{ matrix.product }} diff --git a/certora/confs/ccip.conf b/certora/confs/ccip.conf index 0bd201b8d4..02245a1353 100644 --- a/certora/confs/ccip.conf +++ b/certora/confs/ccip.conf @@ -8,7 +8,7 @@ ], "link": [ "UpgradeableLockReleaseTokenPool:i_token=SimpleERC20" - ], + ], "optimistic_loop": true, "process": "emv", "prover_args": ["-depth 10","-mediumTimeout 700"],