From 6892f7e26fc08061903be7169e819a7e134c0bc5 Mon Sep 17 00:00:00 2001 From: nicholaspai <9457025+nicholaspai@users.noreply.github.com> Date: Tue, 29 Oct 2024 11:23:35 -0400 Subject: [PATCH 01/44] feat(InventoryClient): Wrap ETH on Mainnet (#1882) * feat(InventoryClient): Wrap ETH on Mainnet Adds lightweight Ethereum adapter that inventory client can use to wrap ETH. Has no other side effects * lint * Update src/adapter/BaseChainAdapter.ts Co-authored-by: Paul <108695806+pxrl@users.noreply.github.com> --------- Co-authored-by: Paul <108695806+pxrl@users.noreply.github.com> --- src/adapter/BaseChainAdapter.ts | 7 ++++++- src/clients/bridges/AdapterManager.ts | 12 ++++++++++-- src/clients/bridges/EthereumAdapter.ts | 25 +++++++++++++++++++++++++ 3 files changed, 41 insertions(+), 3 deletions(-) create mode 100644 src/clients/bridges/EthereumAdapter.ts diff --git a/src/adapter/BaseChainAdapter.ts b/src/adapter/BaseChainAdapter.ts index 4ef899f3b..d72b83fd6 100644 --- a/src/adapter/BaseChainAdapter.ts +++ b/src/adapter/BaseChainAdapter.ts @@ -208,7 +208,12 @@ export class BaseChainAdapter { } const value = ethBalance.sub(target); - this.log("Wrapping ETH", { threshold, target, value, ethBalance }, "debug", "wrapEthIfAboveThreshold"); + this.log( + `Wrapping ETH on chain ${getNetworkName(this.chainId)}`, + { threshold, target, value, ethBalance }, + "debug", + "wrapEthIfAboveThreshold" + ); const method = "deposit"; const formatFunc = createFormatFunction(2, 4, false, 18); const mrkdwn = diff --git a/src/clients/bridges/AdapterManager.ts b/src/clients/bridges/AdapterManager.ts index e232d5089..3e9866064 100644 --- a/src/clients/bridges/AdapterManager.ts +++ b/src/clients/bridges/AdapterManager.ts @@ -13,6 +13,7 @@ import { ArbitrumAdapter, PolygonAdapter, ZKSyncAdapter, LineaAdapter, ScrollAda import { CHAIN_IDs, TOKEN_SYMBOLS_MAP } from "@across-protocol/constants"; import { BaseChainAdapter } from "../../adapter"; +import { EthereumAdapter } from "./EthereumAdapter"; export class AdapterManager { public adapters: { [chainId: number]: BaseChainAdapter } = {}; @@ -21,7 +22,7 @@ export class AdapterManager { // receiving ETH that needs to be wrapped on the L2. This array contains the chainIds of the chains that this // manager will attempt to wrap ETH on into WETH. This list also includes chains like Arbitrum where the relayer is // expected to receive ETH as a gas refund from an L1 to L2 deposit that was intended to rebalance inventory. - public chainsToWrapEtherOn = [...spokesThatHoldEthAndWeth, CHAIN_IDs.ARBITRUM]; + private chainsToWrapEtherOn = [...spokesThatHoldEthAndWeth, CHAIN_IDs.ARBITRUM, CHAIN_IDs.MAINNET]; constructor( readonly logger: winston.Logger, @@ -232,7 +233,14 @@ export class AdapterManager { wrapThreshold.gte(wrapTarget), `wrapEtherThreshold ${wrapThreshold.toString()} must be >= wrapEtherTarget ${wrapTarget.toString()}` ); - await this.adapters[chainId].wrapEthIfAboveThreshold(wrapThreshold, wrapTarget, simMode); + if (chainId === CHAIN_IDs.MAINNET) { + // For mainnet, construct one-off adapter to wrap ETH, because Ethereum is typically not a chain + // that we have an adapter for. + const ethAdapter = new EthereumAdapter(this.logger, this.spokePoolClients); + await ethAdapter.wrapEthIfAboveThreshold(wrapThreshold, wrapTarget, simMode); + } else { + await this.adapters[chainId].wrapEthIfAboveThreshold(wrapThreshold, wrapTarget, simMode); + } } ); } diff --git a/src/clients/bridges/EthereumAdapter.ts b/src/clients/bridges/EthereumAdapter.ts new file mode 100644 index 000000000..03f4787a3 --- /dev/null +++ b/src/clients/bridges/EthereumAdapter.ts @@ -0,0 +1,25 @@ +import { DEFAULT_GAS_MULTIPLIER } from "../../common"; +import { CHAIN_IDs, winston } from "../../utils"; +import { SpokePoolClient } from "../SpokePoolClient"; +import { BaseChainAdapter } from "../../adapter/BaseChainAdapter"; + +// This adapter is only used by the AdapterManager to wrap ETH on Mainnet, so we don't pass in any supported +// tokens or bridges. +export class EthereumAdapter extends BaseChainAdapter { + constructor(logger: winston.Logger, readonly spokePoolClients: { [chainId: number]: SpokePoolClient }) { + const { MAINNET } = CHAIN_IDs; + const bridges = {}; + const supportedTokens = []; + const monitoredAddresses = []; + super( + spokePoolClients, + MAINNET, + MAINNET, + monitoredAddresses, + logger, + supportedTokens, + bridges, + DEFAULT_GAS_MULTIPLIER[MAINNET] ?? 1 + ); + } +} From 6395d1782c3d99d7779a15410d517427c736b72d Mon Sep 17 00:00:00 2001 From: bmzig <57361391+bmzig@users.noreply.github.com> Date: Wed, 30 Oct 2024 12:35:25 -0500 Subject: [PATCH 02/44] fix: define OptimismPortal2 and DisputeGameFactory for Base (#1883) Signed-off-by: bennett --- src/common/Constants.ts | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/src/common/Constants.ts b/src/common/Constants.ts index 05d0a447e..654fcad38 100644 --- a/src/common/Constants.ts +++ b/src/common/Constants.ts @@ -540,6 +540,22 @@ export const EXPECTED_L1_TO_L2_MESSAGE_TIME = { }; export const OPSTACK_CONTRACT_OVERRIDES = { + [CHAIN_IDs.BASE]: { + // https://github.com/ethereum-optimism/ecosystem/blob/8df6ab1afcf49312dc7e89ed079f910843d74427/packages/sdk/src/utils/chain-constants.ts#L252 + l1: { + AddressManager: "0x8EfB6B5c4767B09Dc9AA6Af4eAA89F749522BaE2", + L1CrossDomainMessenger: "0x866E82a600A1414e583f7F13623F1aC5d58b0Afa", + L1StandardBridge: CONTRACT_ADDRESSES[CHAIN_IDs.MAINNET].ovmStandardBridge_8453.address, + StateCommitmentChain: ZERO_ADDRESS, + CanonicalTransactionChain: ZERO_ADDRESS, + BondManager: ZERO_ADDRESS, + OptimismPortal: "0x49048044D57e1C92A77f79988d21Fa8fAF74E97e", + L2OutputOracle: "0x56315b90c40730925ec5485cf004d835058518A0", + OptimismPortal2: "0x49048044D57e1C92A77f79988d21Fa8fAF74E97e", + DisputeGameFactory: "0x43edB88C4B80fDD2AdFF2412A7BebF9dF42cB40e", + }, + l2: DEFAULT_L2_CONTRACT_ADDRESSES, + }, [CHAIN_IDs.BLAST]: { l1: { AddressManager: "0xE064B565Cf2A312a3e66Fe4118890583727380C0", From 6bc6acbf5156c4a6d6ce3441e42804011b346740 Mon Sep 17 00:00:00 2001 From: nicholaspai <9457025+nicholaspai@users.noreply.github.com> Date: Thu, 31 Oct 2024 17:09:09 -0400 Subject: [PATCH 03/44] improve: Remove FundsDeposited and FilledRelay mentions (#1884) Same idea as across-protocol/sdk#762 --- src/dataworker/README.md | 41 +++++++++++++++------------- test/Dataworker.loadData.fill.ts | 16 +++++------ test/Dataworker.loadData.slowFill.ts | 2 +- 3 files changed, 31 insertions(+), 28 deletions(-) diff --git a/src/dataworker/README.md b/src/dataworker/README.md index 37eb36e12..265ca7671 100644 --- a/src/dataworker/README.md +++ b/src/dataworker/README.md @@ -123,41 +123,44 @@ flowchart TD ### Validating fills -A fill must match a deposit on every shared parameter that they have in common. The matched deposit does not have to be in the same bundle as the fill. A fill contains the following [event parameter](https://github.com/across-protocol/contracts/blob/master/contracts/SpokePool.sol#L139)'s: +A fill must match a deposit on every shared parameter that they have in common. The matched deposit does not have to be in the same bundle as the fill. A fill contains the following [event parameter](https://github.com/across-protocol/contracts/blob/a663586e8619bc74cb1da2375107bd5eef0f3144/contracts/interfaces/V3SpokePoolInterface.sol#L124)'s: ```solidity -event FilledRelay( - uint256 amount, - uint256 totalFilledAmount, - uint256 fillAmount, +event FilledV3Relay( + address inputToken, + address outputToken, + uint256 inputAmount, + uint256 outputAmount, uint256 repaymentChainId, uint256 indexed originChainId, - uint256 destinationChainId, - int64 relayerFeePct, - int64 realizedLpFeePct, uint32 indexed depositId, - address destinationToken, - address relayer, - address indexed depositor, + uint32 fillDeadline, + uint32 exclusivityDeadline, + address exclusiveRelayer, + address indexed relayer, + address depositor, address recipient, bytes message, - RelayExecutionInfo updatableRelayData + V3RelayExecutionEventInfo relayExecutionInfo ); ``` -A [deposit](https://github.com/across-protocol/contracts/blob/master/contracts/SpokePool.sol#L119) contains: +A [deposit](https://github.com/across-protocol/contracts/blob/a663586e8619bc74cb1da2375107bd5eef0f3144/contracts/interfaces/V3SpokePoolInterface.sol#L99) contains: ```solidity -event FundsDeposited( - uint256 amount, - uint256 originChainId, +event V3FundsDeposited( + address inputToken, + address outputToken, + uint256 inputAmount, + uint256 outputAmount, uint256 indexed destinationChainId, - int64 relayerFeePct, uint32 indexed depositId, uint32 quoteTimestamp, - address originToken, - address recipient, + uint32 fillDeadline, + uint32 exclusivityDeadline, address indexed depositor, + address recipient, + address exclusiveRelayer, bytes message ); ``` diff --git a/test/Dataworker.loadData.fill.ts b/test/Dataworker.loadData.fill.ts index 6d0d4c66d..d5e2a0a30 100644 --- a/test/Dataworker.loadData.fill.ts +++ b/test/Dataworker.loadData.fill.ts @@ -231,7 +231,7 @@ describe("Dataworker: Load data used in all functions", async function () { // Send expired deposit const expiredDeposits = [generateV3Deposit({ fillDeadline: bundleBlockTimestamps[destinationChainId][1] - 1 })]; const depositEvents = [...unexpiredDeposits, ...expiredDeposits]; - await mockOriginSpokePoolClient.update(["FundsDeposited", "V3FundsDeposited"]); + await mockOriginSpokePoolClient.update(["V3FundsDeposited"]); const data1 = await dataworkerInstance.clients.bundleDataClient.loadData( getDefaultBlockRange(5), spokePoolClients @@ -258,7 +258,7 @@ describe("Dataworker: Load data used in all functions", async function () { const originChainBlockRange = [deposits[1].blockNumber - 1, deposits[1].blockNumber + 1]; // Substitute origin chain bundle block range. const bundleBlockRanges = [originChainBlockRange].concat(getDefaultBlockRange(5).slice(1)); - await mockOriginSpokePoolClient.update(["FundsDeposited", "V3FundsDeposited"]); + await mockOriginSpokePoolClient.update(["V3FundsDeposited"]); expect(mockOriginSpokePoolClient.getDeposits().length).to.equal(deposits.length); const data1 = await dataworkerInstance.clients.bundleDataClient.loadData(bundleBlockRanges, spokePoolClients); expect(data1.bundleDepositsV3[originChainId][erc20_1.address].length).to.equal(1); @@ -299,7 +299,7 @@ describe("Dataworker: Load data used in all functions", async function () { depositV3Events.push(generateV3Deposit({ outputToken: randomAddress() })); depositV3Events.push(generateV3Deposit({ outputToken: randomAddress() })); depositV3Events.push(generateV3Deposit({ outputToken: randomAddress() })); - await mockOriginSpokePoolClient.update(["FundsDeposited", "V3FundsDeposited"]); + await mockOriginSpokePoolClient.update(["V3FundsDeposited"]); const deposits = mockOriginSpokePoolClient.getDeposits(); // Fill deposits from different relayers @@ -307,7 +307,7 @@ describe("Dataworker: Load data used in all functions", async function () { fillV3Events.push(generateV3FillFromDeposit(deposits[0])); fillV3Events.push(generateV3FillFromDeposit(deposits[1])); fillV3Events.push(generateV3FillFromDeposit(deposits[2], {}, relayer2)); - await mockDestinationSpokePoolClient.update(["FilledV3Relay", "FilledRelay"]); + await mockDestinationSpokePoolClient.update(["FilledV3Relay"]); const data1 = await dataworkerInstance.clients.bundleDataClient.loadData( getDefaultBlockRange(5), spokePoolClients @@ -361,7 +361,7 @@ describe("Dataworker: Load data used in all functions", async function () { depositV3Events.push(generateV3Deposit({ outputToken: randomAddress() })); depositV3Events.push(generateV3Deposit({ outputToken: randomAddress() })); depositV3Events.push(generateV3Deposit({ outputToken: randomAddress() })); - await mockOriginSpokePoolClient.update(["FundsDeposited", "V3FundsDeposited"]); + await mockOriginSpokePoolClient.update(["V3FundsDeposited"]); const deposits = mockOriginSpokePoolClient.getDeposits(); // Fill deposits from different relayers @@ -369,7 +369,7 @@ describe("Dataworker: Load data used in all functions", async function () { fillV3Events.push(generateV3FillFromDeposit(deposits[0])); fillV3Events.push(generateV3FillFromDeposit(deposits[1])); fillV3Events.push(generateV3FillFromDeposit(deposits[2], {}, relayer2)); - await mockDestinationSpokePoolClient.update(["FilledV3Relay", "FilledRelay"]); + await mockDestinationSpokePoolClient.update(["FilledV3Relay"]); const data1 = await dataworkerInstance.clients.bundleDataClient.loadData( getDefaultBlockRange(5), spokePoolClients @@ -548,7 +548,7 @@ describe("Dataworker: Load data used in all functions", async function () { generateV3Deposit({ outputToken: randomAddress() }); generateV3Deposit({ outputToken: randomAddress() }); generateV3Deposit({ outputToken: randomAddress() }); - await mockOriginSpokePoolClient.update(["FundsDeposited", "V3FundsDeposited"]); + await mockOriginSpokePoolClient.update(["V3FundsDeposited"]); const deposits = mockOriginSpokePoolClient.getDeposits(); const fills = [ @@ -569,7 +569,7 @@ describe("Dataworker: Load data used in all functions", async function () { const destinationChainIndex = dataworkerInstance.chainIdListForBundleEvaluationBlockNumbers.indexOf(destinationChainId); bundleBlockRanges[destinationChainIndex] = destinationChainBlockRange; - await mockDestinationSpokePoolClient.update(["FilledV3Relay", "FilledRelay"]); + await mockDestinationSpokePoolClient.update(["FilledV3Relay"]); expect(mockDestinationSpokePoolClient.getFills().length).to.equal(fills.length); const data1 = await dataworkerInstance.clients.bundleDataClient.loadData(bundleBlockRanges, spokePoolClients); expect(data1.bundleFillsV3[repaymentChainId][l1Token_1.address].fills.length).to.equal(1); diff --git a/test/Dataworker.loadData.slowFill.ts b/test/Dataworker.loadData.slowFill.ts index 8ae55d4f0..1fb54e715 100644 --- a/test/Dataworker.loadData.slowFill.ts +++ b/test/Dataworker.loadData.slowFill.ts @@ -428,7 +428,7 @@ describe("BundleDataClient: Slow fill handling & validation", async function () generateV3Deposit({ outputToken: erc20_2.address }); generateV3Deposit({ outputToken: erc20_2.address }); generateV3Deposit({ outputToken: erc20_2.address }); - await mockOriginSpokePoolClient.update(["FundsDeposited", "V3FundsDeposited"]); + await mockOriginSpokePoolClient.update(["V3FundsDeposited"]); const deposits = mockOriginSpokePoolClient.getDeposits(); const events = [ From 5f37034039dc323b986e2970c806194ece5fa948 Mon Sep 17 00:00:00 2001 From: Paul <108695806+pxrl@users.noreply.github.com> Date: Fri, 1 Nov 2024 22:26:33 +0100 Subject: [PATCH 04/44] chore: Bump SDK (#1889) Relayer performance should improve with an optimisation in the SDK provider layer. --- package.json | 2 +- yarn.lock | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/package.json b/package.json index 6d074695c..278bd9fe8 100644 --- a/package.json +++ b/package.json @@ -12,7 +12,7 @@ "dependencies": { "@across-protocol/constants": "^3.1.16", "@across-protocol/contracts": "^3.0.11", - "@across-protocol/sdk": "^3.2.7", + "@across-protocol/sdk": "^3.2.11", "@arbitrum/sdk": "^3.1.3", "@consensys/linea-sdk": "^0.2.1", "@defi-wonderland/smock": "^2.3.5", diff --git a/yarn.lock b/yarn.lock index effb7bb56..7f13b4ab5 100644 --- a/yarn.lock +++ b/yarn.lock @@ -45,10 +45,10 @@ axios "^1.7.4" zksync-web3 "^0.14.3" -"@across-protocol/sdk@^3.2.7": - version "3.2.7" - resolved "https://registry.yarnpkg.com/@across-protocol/sdk/-/sdk-3.2.7.tgz#c0d2ad49065a33d05f01f846e328989c4e6585c2" - integrity sha512-oUZ8uIw10/y+ZsRjlZTivJnK3AGpwCJbt3VLMHhv6TUYGNcQW/Vr71LFhyeieKfoY4+lGZq/UV4QIqsNSWiSZA== +"@across-protocol/sdk@^3.2.11": + version "3.2.11" + resolved "https://registry.yarnpkg.com/@across-protocol/sdk/-/sdk-3.2.11.tgz#8d5190863240e9a5062a38a03406f8e3b0390d84" + integrity sha512-eNUKYlV1ClzVHFHwDDRKaK2u/eFCTrZrCTglATtuQVJCUlpLlIR369qx/kquVbH6E5rRjSZm2Lo9hiuUJ3oH2Q== dependencies: "@across-protocol/across-token" "^1.0.0" "@across-protocol/constants" "^3.1.16" From f1ce2097d7dd96cd2a9f914dfb853479253d80ee Mon Sep 17 00:00:00 2001 From: Dong-Ha Kim Date: Mon, 4 Nov 2024 10:54:55 +0100 Subject: [PATCH 05/44] fix: error log stringify (#1880) --- index.ts | 17 +++++++++++++---- src/clients/TransactionClient.ts | 3 ++- src/utils/LogUtils.ts | 14 ++++++++++++++ src/utils/TransactionUtils.ts | 5 +++-- 4 files changed, 32 insertions(+), 7 deletions(-) diff --git a/index.ts b/index.ts index 1fa60dae5..01dd29a63 100644 --- a/index.ts +++ b/index.ts @@ -1,5 +1,15 @@ import minimist from "minimist"; -import { config, delay, exit, retrieveSignerFromCLIArgs, help, Logger, usage, waitForLogger } from "./src/utils"; +import { + config, + delay, + exit, + retrieveSignerFromCLIArgs, + help, + Logger, + usage, + waitForLogger, + stringifyThrownValue, +} from "./src/utils"; import { runRelayer } from "./src/relayer"; import { runDataworker } from "./src/dataworker"; import { runMonitor } from "./src/monitor"; @@ -61,12 +71,11 @@ if (require.main === module) { run(args) .catch(async (error) => { exitCode = 1; + const stringifiedError = stringifyThrownValue(error); logger.error({ at: cmd ?? "unknown process", message: "There was an execution error!", - reason: error, - e: error, - error, + error: stringifiedError, args, notificationPath: "across-error", }); diff --git a/src/clients/TransactionClient.ts b/src/clients/TransactionClient.ts index ffb38b1f5..3c8aacc86 100644 --- a/src/clients/TransactionClient.ts +++ b/src/clients/TransactionClient.ts @@ -11,6 +11,7 @@ import { TransactionResponse, TransactionSimulationResult, willSucceed, + stringifyThrownValue, } from "../utils"; export interface AugmentedTransaction { @@ -100,7 +101,7 @@ export class TransactionClient { mrkdwn, // @dev `error` _sometimes_ doesn't decode correctly (especially on Polygon), so fish for the reason. errorMessage: isError(error) ? (error as Error).message : undefined, - error, + error: stringifyThrownValue(error), notificationPath: "across-error", }); return txnResponses; diff --git a/src/utils/LogUtils.ts b/src/utils/LogUtils.ts index b84973e95..0161fbbc9 100644 --- a/src/utils/LogUtils.ts +++ b/src/utils/LogUtils.ts @@ -1 +1,15 @@ export type DefaultLogLevels = "debug" | "info" | "warn" | "error"; + +export function stringifyThrownValue(value: unknown): string { + if (value instanceof Error) { + const errToString = value.toString(); + return value.stack || value.message || errToString !== "[object Object]" + ? errToString + : "could not extract error from 'Error' instance"; + } else if (value instanceof Object) { + const objStringified = JSON.stringify(value); + return objStringified !== "{}" ? objStringified : "could not extract error from 'Object' instance"; + } else { + return `ThrownValue: ${value.toString()}`; + } +} diff --git a/src/utils/TransactionUtils.ts b/src/utils/TransactionUtils.ts index 6670bdcce..6db601be4 100644 --- a/src/utils/TransactionUtils.ts +++ b/src/utils/TransactionUtils.ts @@ -16,6 +16,7 @@ import { Signer, toBNWei, winston, + stringifyThrownValue, } from "../utils"; dotenv.config(); @@ -107,7 +108,7 @@ export async function runTransaction( logger.debug({ at: "TxUtil#runTransaction", message: "Retrying txn due to expected error", - error: JSON.stringify(error), + error: stringifyThrownValue(error), retriesRemaining, }); @@ -143,7 +144,7 @@ export async function runTransaction( } else { logger[txnRetryable(error) ? "warn" : "error"]({ ...commonFields, - error: JSON.stringify(error), + error: stringifyThrownValue(error), }); } throw error; From 30b7003bac89168dd0849a419d696f8ca92e8edb Mon Sep 17 00:00:00 2001 From: nicholaspai <9457025+nicholaspai@users.noreply.github.com> Date: Mon, 4 Nov 2024 16:27:17 -0500 Subject: [PATCH 06/44] improve(README): Remove more legacy references (#1886) * improve: Remove FundsDeposited and FilledRelay mentions * improve(README): Remove more legacy references forgotten commit from 7802c9420b27ea8fcce1c9dfce95dd76f80c2359 * Update README.md * Update src/dataworker/README.md * Update src/dataworker/README.md * Update src/dataworker/README.md --- src/dataworker/README.md | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/src/dataworker/README.md b/src/dataworker/README.md index 265ca7671..575190851 100644 --- a/src/dataworker/README.md +++ b/src/dataworker/README.md @@ -2,6 +2,8 @@ This document explains how the Dataworker constructs "root bundles", which instruct the Across system on how to reallocate LP capital to keep the system functioning and providing an effective bridging service. +The following README is a simplified summary of the exact protocol rules described in [UMIP-179](https://github.com/UMAprotocol/UMIPs/blob/2046d3cc228cacfb27688fc02e9674aef13445c9/UMIPs/umip-179.md) and its predecessor [UMIP-157](https://github.com/UMAprotocol/UMIPs/blob/92abcfcb676b0c24690811751521cd0782af5372/UMIPs/umip-157). These UMIP's describe the explicit rules that the Dataworker implements. In other words, the "Dataworker" code is an implementation of the UMIP-179 "interface". + ## Why does Across need to move capital around? There are two types of capital pools in Across: liquidity provider capital in the `HubPool` ("LP capital") and capital held in the `SpokePools` ("SpokePool balance"). @@ -165,9 +167,9 @@ event V3FundsDeposited( ); ``` -Therefore, `amount`, `originChainId`, `destinationChainId`, `relayerFeePct`, `depositId`, `destinationToken`, `message`, and `recipient` must match. In addition, the fill's matched deposit must have been emitted from the SpokePool at the `originChainId` of the fill. The fill must have been emitted from the SpokePool on the `destinationChainId`. +All of the shared event properties must match. In addition, the fill's matched deposit must have been emitted from the SpokePool at the `originChainId` of the fill. The fill must have been emitted from the SpokePool on the `destinationChainId`. -Finally, the fill's `realizedLpFeePct` must be correct. Currently this is deterministically linked with the deposit's `quoteTimestamp`: there is a correct `realizedLpFeePct` for each `quoteTimestamp`, which is computed by querying the HubPool's "utilization" at the Ethereum block height corresponding to the `quoteTimestamp`. This is described in the [UMIP here](https://github.com/UMAprotocol/UMIPs/blob/e3198578b1d339914afa5243a80e3ac8055fba34/UMIPs/umip-157.md#validating-realizedlpfeepct). +The only exception to the above rule is if `outputToken` is equal to the zero address, then the "equivalent" token address should be substituted in as described in [UMIP-179](https://github.com/UMAprotocol/UMIPs/blob/2046d3cc228cacfb27688fc02e9674aef13445c9/UMIPs/umip-179.md#finding-valid-relays). ## Incorporating slow fills @@ -234,7 +236,7 @@ This is everything that the Dataworker needs to construct a root bundle! All tha Root bundle merkle leaf formats -- [PoolRebalanceLeaf](https://github.com/across-protocol/contracts/blob/master/contracts/interfaces/HubPoolInterface.sol#L11): One per chain -- [RelayerRefundLeaf](https://github.com/across-protocol/contracts/blob/master/contracts/interfaces/SpokePoolInterface.sol#L9) One per token per chain -- [SlowFillLeaf](https://github.com/across-protocol/contracts/blob/master/contracts/interfaces/SpokePoolInterface.sol#L29) One per unfilled deposit -- [RootBundle](https://github.com/across-protocol/contracts/blob/master/contracts/interfaces/HubPoolInterface.sol#L53) how the Dataworker's proposal is stored in the HubPool throughout its pending challenge window +- [PoolRebalanceLeaf](https://github.com/across-protocol/contracts/blob/95c4f923932d597d3e63449718bba5c674b084eb/contracts/interfaces/HubPoolInterface.sol#L11): One per chain +- [RelayerRefundLeaf](https://github.com/across-protocol/contracts/blob/95c4f923932d597d3e63449718bba5c674b084eb/contracts/interfaces/SpokePoolInterface.sol#L9) One per token per chain +- [SlowFillLeaf](https://github.com/across-protocol/contracts/blob/95c4f923932d597d3e63449718bba5c674b084eb/contracts/interfaces/V3SpokePoolInterface.sol#L66) One per unfilled deposit +- [RootBundle](https://github.com/across-protocol/contracts/blob/95c4f923932d597d3e63449718bba5c674b084eb/contracts/interfaces/HubPoolInterface.sol#L53) how the Dataworker's proposal is stored in the HubPool throughout its pending challenge window From 10b8cdb6e3018791483b84cea9cd24af593ee80d Mon Sep 17 00:00:00 2001 From: nicholaspai <9457025+nicholaspai@users.noreply.github.com> Date: Thu, 7 Nov 2024 17:00:30 -0500 Subject: [PATCH 07/44] feat: handle Polygon finalizer edge case when CCTP limit is breached (#1897) * feat: handle Polygon finalizer edge case when CCTP limit is breached There is a bug in the Polygon finalizer where a single transaction contains multiple token withdrawals from Spoke to Hub in which more than 1 million USDC are withdrawn, resulting in a single `TokensBridged` event getting emitted but multiple USDC "burns" happening (due to the CCTP 1mil withdrawal limit). This messes up the `logIndex` computation math for any withdrawals that take place after the USDC withdrawals. This logic isn't needed in other chains with CCTP like Arbitrum or Optimism because the `logIndex` is relative to all other non-CCTP withdrawals, whereas in Polygon it seems that CCTP withdrawals also count towards the `logIndex` of non CCTP withdrawals * Update polygon.ts --- src/finalizer/utils/polygon.ts | 26 +++++++++++++++++++++++++- 1 file changed, 25 insertions(+), 1 deletion(-) diff --git a/src/finalizer/utils/polygon.ts b/src/finalizer/utils/polygon.ts index c0c3d6f28..8a81bc8b3 100644 --- a/src/finalizer/utils/polygon.ts +++ b/src/finalizer/utils/polygon.ts @@ -16,6 +16,8 @@ import { compareAddressesSimple, TOKEN_SYMBOLS_MAP, CHAIN_IDs, + sortEventsAscending, + toBNWei, } from "../../utils"; import { EthersError, TokensBridged } from "../../interfaces"; import { HubPoolClient, SpokePoolClient } from "../../clients"; @@ -36,6 +38,9 @@ enum POLYGON_MESSAGE_STATUS { // canonical bridge. Do not change. const BURN_SIG = "0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef"; +// We should ideally read this limit from a contract call, but for now we'll hardcode it. +const CCTP_WITHDRAWAL_LIMIT_WEI = toBNWei(1_000_000, 6); + export interface PolygonTokensBridged extends TokensBridged { payload: string; } @@ -61,7 +66,26 @@ export async function polygonFinalizer( // Unlike the rollups, withdrawals process very quickly on polygon, so we can conservatively remove any events // that are older than 1 day old: - const recentTokensBridgedEvents = spokePoolClient.getTokensBridged().filter((e) => e.blockNumber >= fromBlock); + let recentTokensBridgedEvents = spokePoolClient.getTokensBridged().filter((e) => e.blockNumber >= fromBlock); + + // The SpokePool emits one TokensBridged event even if the token is USDC and it gets withdrawn in two separate + // CCTP events. We can't filter out these USDC events here (see comment below in `getFinalizableTransactions()`) + // but we do need to add in more TokensBridged events so that the call to `getUniqueLogIndex` will work. + recentTokensBridgedEvents.forEach((e) => { + if ( + compareAddressesSimple(e.l2TokenAddress, TOKEN_SYMBOLS_MAP.USDC.addresses[CHAIN_ID]) && + e.amountToReturn.gt(CCTP_WITHDRAWAL_LIMIT_WEI) + ) { + // Inject one TokensBridged event for each CCTP withdrawal that needs to be processed. + const numberOfEventsToAdd = Math.ceil(e.amountToReturn.div(CCTP_WITHDRAWAL_LIMIT_WEI).toNumber()); + for (let i = 0; i < numberOfEventsToAdd; i++) { + recentTokensBridgedEvents.push({ + ...e, + }); + } + } + }); + recentTokensBridgedEvents = sortEventsAscending(recentTokensBridgedEvents); return multicallPolygonFinalizations(recentTokensBridgedEvents, posClient, signer, hubPoolClient, logger); } From ba9a76da445ad4f78e8deaa207f2271affd70618 Mon Sep 17 00:00:00 2001 From: Paul <108695806+pxrl@users.noreply.github.com> Date: Fri, 8 Nov 2024 14:14:48 +0100 Subject: [PATCH 08/44] improve(finalizer): Don't globally suppress unused variables (#1894) --- src/finalizer/utils/cctp/l1ToL2.ts | 6 ++---- src/finalizer/utils/cctp/l2ToL1.ts | 5 ++--- 2 files changed, 4 insertions(+), 7 deletions(-) diff --git a/src/finalizer/utils/cctp/l1ToL2.ts b/src/finalizer/utils/cctp/l1ToL2.ts index aca9992b1..fa7985b8a 100644 --- a/src/finalizer/utils/cctp/l1ToL2.ts +++ b/src/finalizer/utils/cctp/l1ToL2.ts @@ -1,15 +1,13 @@ -/* eslint-disable @typescript-eslint/no-unused-vars */ import { TransactionReceipt, TransactionRequest } from "@ethersproject/abstract-provider"; import { ethers } from "ethers"; import { HubPoolClient, SpokePoolClient } from "../../../clients"; -import { CHAIN_MAX_BLOCK_LOOKBACK, CONTRACT_ADDRESSES, Multicall2Call, chainIdsToCctpDomains } from "../../../common"; +import { CHAIN_MAX_BLOCK_LOOKBACK, CONTRACT_ADDRESSES, Multicall2Call } from "../../../common"; import { Contract, EventSearchConfig, Signer, TOKEN_SYMBOLS_MAP, assert, - formatUnitsForToken, getBlockForTimestamp, getCachedProvider, getCurrentTime, @@ -28,7 +26,7 @@ import { uniqWith } from "lodash"; export async function cctpL1toL2Finalizer( logger: winston.Logger, - signer: Signer, + _signer: Signer, hubPoolClient: HubPoolClient, spokePoolClient: SpokePoolClient, l1ToL2AddressesToFinalize: string[] diff --git a/src/finalizer/utils/cctp/l2ToL1.ts b/src/finalizer/utils/cctp/l2ToL1.ts index 7d369a096..337f53b4c 100644 --- a/src/finalizer/utils/cctp/l2ToL1.ts +++ b/src/finalizer/utils/cctp/l2ToL1.ts @@ -1,8 +1,7 @@ -/* eslint-disable @typescript-eslint/no-unused-vars */ import { TransactionRequest } from "@ethersproject/abstract-provider"; import { ethers } from "ethers"; import { HubPoolClient, SpokePoolClient } from "../../../clients"; -import { CONTRACT_ADDRESSES, Multicall2Call, chainIdsToCctpDomains } from "../../../common"; +import { CONTRACT_ADDRESSES, Multicall2Call } from "../../../common"; import { Contract, Signer, @@ -23,7 +22,7 @@ import { FinalizerPromise, CrossChainMessage } from "../../types"; export async function cctpL2toL1Finalizer( logger: winston.Logger, - signer: Signer, + _signer: Signer, hubPoolClient: HubPoolClient, spokePoolClient: SpokePoolClient ): Promise { From e2fa91f5a1dad94a1d02962703d500f4e0c1642f Mon Sep 17 00:00:00 2001 From: Paul <108695806+pxrl@users.noreply.github.com> Date: Sun, 10 Nov 2024 14:36:13 +0100 Subject: [PATCH 09/44] fix(relayer): Don't fast fill any deposit made by a slow depositor (#1899) The existing logic incorrectly only short-circuits when the fill status is unfilled. Once this has happened, the fill status changes to SlowFillRequested and the relayer will proceed to evaluate it as per its normal fill logic. --- src/relayer/Relayer.ts | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/src/relayer/Relayer.ts b/src/relayer/Relayer.ts index 7049ebb83..08d2b67a2 100644 --- a/src/relayer/Relayer.ts +++ b/src/relayer/Relayer.ts @@ -628,13 +628,15 @@ export class Relayer { } // If depositor is on the slow deposit list, then send a zero fill to initiate a slow relay and return early. - if (slowDepositors?.includes(depositor) && fillStatus === FillStatus.Unfilled) { - this.logger.debug({ - at: "Relayer::evaluateFill", - message: "Initiating slow fill for grey listed depositor", - depositor, - }); - this.requestSlowFill(deposit); + if (slowDepositors?.includes(depositor)) { + if (fillStatus === FillStatus.Unfilled) { + this.logger.debug({ + at: "Relayer::evaluateFill", + message: "Initiating slow fill for grey listed depositor", + depositor, + }); + this.requestSlowFill(deposit); + } return; } From 80b6bad19dbc9b822ad9849ebafa4495de2f18ef Mon Sep 17 00:00:00 2001 From: Paul <108695806+pxrl@users.noreply.github.com> Date: Mon, 11 Nov 2024 11:45:25 +0100 Subject: [PATCH 10/44] fix: Use CacheProvider in script utils (#1900) This ensures that providers have cache and fallback behaviour. --- scripts/spokepool.ts | 11 ++++++----- scripts/utils.ts | 25 +++---------------------- 2 files changed, 9 insertions(+), 27 deletions(-) diff --git a/scripts/spokepool.ts b/scripts/spokepool.ts index 4d4d1b056..9f597984b 100644 --- a/scripts/spokepool.ts +++ b/scripts/spokepool.ts @@ -12,6 +12,7 @@ import { formatFeePct, getDeploymentBlockNumber, getNetworkName, + getProvider, getSigner, isDefined, resolveTokenSymbols, @@ -194,7 +195,7 @@ async function deposit(args: Record, signer: Signer): P const tokenSymbol = token.symbol.toUpperCase(); const amount = ethers.utils.parseUnits(baseAmount.toString(), args.decimals ? 0 : token.decimals); - const provider = new ethers.providers.StaticJsonRpcProvider(utils.getProviderUrl(fromChainId)); + const provider = await getProvider(fromChainId); signer = signer.connect(provider); const spokePool = (await utils.getSpokePoolContract(fromChainId)).connect(signer); @@ -246,7 +247,7 @@ async function fillDeposit(args: Record, sign throw new Error(`Missing or malformed transaction hash: ${txnHash}`); } - const originProvider = new ethers.providers.StaticJsonRpcProvider(utils.getProviderUrl(originChainId)); + const originProvider = await getProvider(originChainId); const originSpokePool = await utils.getSpokePoolContract(originChainId); const spokePools: { [chainId: number]: Contract } = {}; @@ -321,7 +322,7 @@ async function fillDeposit(args: Record, sign } const sender = await signer.getAddress(); - const destProvider = new ethers.providers.StaticJsonRpcProvider(utils.getProviderUrl(destinationChainId)); + const destProvider = await getProvider(destinationChainId); const destSigner = signer.connect(destProvider); const erc20 = new Contract(outputToken, ERC20.abi, destSigner); @@ -348,7 +349,7 @@ async function dumpConfig(args: Record, _signer: Signer const _spokePool = await utils.getSpokePoolContract(chainId); const hubChainId = utils.resolveHubChainId(chainId); - const spokeProvider = new ethers.providers.StaticJsonRpcProvider(utils.getProviderUrl(chainId)); + const spokeProvider = await getProvider(chainId); const spokePool = _spokePool.connect(spokeProvider); const [spokePoolChainId, hubPool, crossDomainAdmin, weth, _currentTime] = await Promise.all([ @@ -436,7 +437,7 @@ async function fetchTxn(args: Record, _signer: Signer): return false; } - const provider = new ethers.providers.StaticJsonRpcProvider(utils.getProviderUrl(chainId)); + const provider = await getProvider(chainId); const spokePool = (await utils.getSpokePoolContract(chainId)).connect(provider); let deposits: Log[] = []; diff --git a/scripts/utils.ts b/scripts/utils.ts index 701620b03..2018077c9 100644 --- a/scripts/utils.ts +++ b/scripts/utils.ts @@ -1,9 +1,9 @@ import assert from "assert"; -import { Contract, ethers, utils as ethersUtils, Signer } from "ethers"; +import { Contract, utils as ethersUtils, Signer } from "ethers"; import readline from "readline"; import * as contracts from "@across-protocol/contracts"; import { utils as sdkUtils } from "@across-protocol/sdk"; -import { getDeployedContract, getNodeUrlList, CHAIN_IDs } from "../src/utils"; +import { getDeployedContract, getProvider, CHAIN_IDs } from "../src/utils"; // https://nodejs.org/api/process.html#exit-codes export const NODE_SUCCESS = 0; @@ -16,12 +16,6 @@ export type ERC20 = { symbol: string; }; -// Public RPC endpoints to be used if preferred providers are not defined in the environment. -const fallbackProviders: { [chainId: number]: string } = { - [CHAIN_IDs.MAINNET]: "https://eth.llamarpc.com", - [CHAIN_IDs.SEPOLIA]: "https://sepolia.infura.io/v3/9aa3d95b3bc440fa88ea12eaa4456161", -}; - async function askQuestion(query: string) { const rl = readline.createInterface({ input: process.stdin, output: process.stdout }); @@ -81,19 +75,6 @@ export function validateChainIds(chainIds: number[]): boolean { ); } -/** - * @description Resolve a default provider URL. - * @param chainId Chain ID for the provider to select. - * @returns URL of the provider endpoint. - */ -export function getProviderUrl(chainId: number): string { - try { - return getNodeUrlList(chainId, 1)[0]; - } catch { - return fallbackProviders[chainId]; - } -} - /** * @description For a SpokePool chain ID, resolve its corresponding HubPool chain ID. * @param spokeChainId Chain ID of the SpokePool. @@ -116,7 +97,7 @@ export function resolveHubChainId(spokeChainId: number): number { */ export async function getContract(chainId: number, contractName: string): Promise { const contract = getDeployedContract(contractName, chainId); - const provider = new ethers.providers.StaticJsonRpcProvider(getProviderUrl(chainId)); + const provider = await getProvider(chainId); return contract.connect(provider); } From c5229ba23c35a8a1d9fb5db951fe2258362d6dbc Mon Sep 17 00:00:00 2001 From: Paul <108695806+pxrl@users.noreply.github.com> Date: Wed, 13 Nov 2024 02:23:51 +0100 Subject: [PATCH 11/44] refactor: Use sdk multicall3 implementation (#1881) * refactor: Use sdk multicall3 implementation * lint * Cleanup --------- Co-authored-by: nicholaspai <9457025+nicholaspai@users.noreply.github.com> --- src/clients/MultiCallerClient.ts | 3 ++- src/clients/TokenClient.ts | 2 +- src/common/Constants.ts | 33 +----------------------------- src/finalizer/index.ts | 2 +- src/finalizer/types.ts | 3 +-- src/finalizer/utils/arbitrum.ts | 3 ++- src/finalizer/utils/cctp/l1ToL2.ts | 3 ++- src/finalizer/utils/cctp/l2ToL1.ts | 3 ++- src/finalizer/utils/opStack.ts | 3 ++- src/finalizer/utils/polygon.ts | 2 +- src/finalizer/utils/scroll.ts | 5 ++--- src/finalizer/utils/zkSync.ts | 3 ++- src/utils/TransactionUtils.ts | 12 ++++++----- 13 files changed, 26 insertions(+), 51 deletions(-) diff --git a/src/clients/MultiCallerClient.ts b/src/clients/MultiCallerClient.ts index 42bbbe26b..851fbe3cc 100644 --- a/src/clients/MultiCallerClient.ts +++ b/src/clients/MultiCallerClient.ts @@ -1,5 +1,5 @@ import { utils as sdkUtils } from "@across-protocol/sdk"; -import { DEFAULT_MULTICALL_CHUNK_SIZE, Multicall2Call } from "../common"; +import { DEFAULT_MULTICALL_CHUNK_SIZE } from "../common"; import { BigNumber, winston, @@ -14,6 +14,7 @@ import { Signer, getMultisender, getProvider, + Multicall2Call, assert, } from "../utils"; import { AugmentedTransaction, TransactionClient } from "./TransactionClient"; diff --git a/src/clients/TokenClient.ts b/src/clients/TokenClient.ts index 8f3e1bd4c..f2d2b68e8 100644 --- a/src/clients/TokenClient.ts +++ b/src/clients/TokenClient.ts @@ -211,7 +211,7 @@ export class TokenClient { ): Promise> { const { spokePool } = this.spokePoolClients[chainId]; - const multicall3 = await sdkUtils.getMulticall3(chainId, spokePool.provider); + const multicall3 = sdkUtils.getMulticall3(chainId, spokePool.provider); if (!isDefined(multicall3)) { return this.fetchTokenData(chainId, hubPoolTokens); } diff --git a/src/common/Constants.ts b/src/common/Constants.ts index 654fcad38..853db265d 100644 --- a/src/common/Constants.ts +++ b/src/common/Constants.ts @@ -1,4 +1,4 @@ -import { CHAIN_IDs, TOKEN_SYMBOLS_MAP, ethers, Signer, Provider, ZERO_ADDRESS, bnUint32Max } from "../utils"; +import { CHAIN_IDs, TOKEN_SYMBOLS_MAP, Signer, Provider, ZERO_ADDRESS, bnUint32Max } from "../utils"; import { BaseBridgeAdapter, OpStackDefaultERC20Bridge, @@ -281,37 +281,6 @@ export const BLOCK_NUMBER_TTL = 60; export const PROVIDER_CACHE_TTL = 3600; export const PROVIDER_CACHE_TTL_MODIFIER = 0.15; -// Multicall3 Constants: -export const multicall3Addresses = { - [CHAIN_IDs.ARBITRUM]: "0xcA11bde05977b3631167028862bE2a173976CA11", - [CHAIN_IDs.BASE]: "0xcA11bde05977b3631167028862bE2a173976CA11", - [CHAIN_IDs.BLAST]: "0xcA11bde05977b3631167028862bE2a173976CA11", - [CHAIN_IDs.BOBA]: "0xcA11bde05977b3631167028862bE2a173976CA11", - [CHAIN_IDs.LINEA]: "0xcA11bde05977b3631167028862bE2a173976CA11", - [CHAIN_IDs.LISK]: "0xcA11bde05977b3631167028862bE2a173976CA11", - [CHAIN_IDs.MAINNET]: "0xcA11bde05977b3631167028862bE2a173976CA11", - [CHAIN_IDs.MODE]: "0xcA11bde05977b3631167028862bE2a173976CA11", - [CHAIN_IDs.OPTIMISM]: "0xcA11bde05977b3631167028862bE2a173976CA11", - [CHAIN_IDs.POLYGON]: "0xcA11bde05977b3631167028862bE2a173976CA11", - [CHAIN_IDs.REDSTONE]: "0xcA11bde05977b3631167028862bE2a173976CA11", - [CHAIN_IDs.SCROLL]: "0xcA11bde05977b3631167028862bE2a173976CA11", - [CHAIN_IDs.WORLD_CHAIN]: "0xcA11bde05977b3631167028862bE2a173976CA11", - [CHAIN_IDs.ZK_SYNC]: "0xF9cda624FBC7e059355ce98a31693d299FACd963", - [CHAIN_IDs.ZORA]: "0xcA11bde05977b3631167028862bE2a173976CA11", - // Testnet: - [CHAIN_IDs.POLYGON_AMOY]: "0xcA11bde05977b3631167028862bE2a173976CA11", - [CHAIN_IDs.BASE_SEPOLIA]: "0xcA11bde05977b3631167028862bE2a173976CA11", - [CHAIN_IDs.BLAST_SEPOLIA]: "0xcA11bde05977b3631167028862bE2a173976CA11", - [CHAIN_IDs.POLYGON_AMOY]: "0xcA11bde05977b3631167028862bE2a173976CA11", - [CHAIN_IDs.SCROLL_SEPOLIA]: "0xcA11bde05977b3631167028862bE2a173976CA11", - [CHAIN_IDs.SEPOLIA]: "0xcA11bde05977b3631167028862bE2a173976CA11", -}; - -export type Multicall2Call = { - callData: ethers.utils.BytesLike; - target: string; -}; - // These are the spokes that can hold both ETH and WETH, so they should be added together when calculating whether // a bundle execution is possible with the funds in the pool. export const spokesThatHoldEthAndWeth = [ diff --git a/src/finalizer/index.ts b/src/finalizer/index.ts index baeb3e720..87a4f30c7 100644 --- a/src/finalizer/index.ts +++ b/src/finalizer/index.ts @@ -8,7 +8,6 @@ import { CONTRACT_ADDRESSES, Clients, FINALIZER_TOKENBRIDGE_LOOKBACK, - Multicall2Call, ProcessEnv, constructClients, constructSpokePoolClientsWithLookback, @@ -25,6 +24,7 @@ import { disconnectRedisClients, getMultisender, getNetworkName, + Multicall2Call, processEndPollingLoop, startupLogLevel, winston, diff --git a/src/finalizer/types.ts b/src/finalizer/types.ts index b4138264e..69f0edabe 100644 --- a/src/finalizer/types.ts +++ b/src/finalizer/types.ts @@ -1,7 +1,6 @@ import { Signer } from "ethers"; import { HubPoolClient, SpokePoolClient } from "../clients"; -import { Multicall2Call } from "../common"; -import { winston } from "../utils"; +import { Multicall2Call, winston } from "../utils"; /** * A cross-chain message is a message sent from one chain to another. This can be a token withdrawal from L2 to L1, diff --git a/src/finalizer/utils/arbitrum.ts b/src/finalizer/utils/arbitrum.ts index 0f4877329..b660ed3f3 100644 --- a/src/finalizer/utils/arbitrum.ts +++ b/src/finalizer/utils/arbitrum.ts @@ -11,13 +11,14 @@ import { getRedisCache, getBlockForTimestamp, getL1TokenInfo, + Multicall2Call, compareAddressesSimple, TOKEN_SYMBOLS_MAP, CHAIN_IDs, } from "../../utils"; import { TokensBridged } from "../../interfaces"; import { HubPoolClient, SpokePoolClient } from "../../clients"; -import { CONTRACT_ADDRESSES, Multicall2Call } from "../../common"; +import { CONTRACT_ADDRESSES } from "../../common"; import { FinalizerPromise, CrossChainMessage } from "../types"; const CHAIN_ID = CHAIN_IDs.ARBITRUM; diff --git a/src/finalizer/utils/cctp/l1ToL2.ts b/src/finalizer/utils/cctp/l1ToL2.ts index fa7985b8a..276553a0f 100644 --- a/src/finalizer/utils/cctp/l1ToL2.ts +++ b/src/finalizer/utils/cctp/l1ToL2.ts @@ -1,7 +1,7 @@ import { TransactionReceipt, TransactionRequest } from "@ethersproject/abstract-provider"; import { ethers } from "ethers"; import { HubPoolClient, SpokePoolClient } from "../../../clients"; -import { CHAIN_MAX_BLOCK_LOOKBACK, CONTRACT_ADDRESSES, Multicall2Call } from "../../../common"; +import { CHAIN_MAX_BLOCK_LOOKBACK, CONTRACT_ADDRESSES } from "../../../common"; import { Contract, EventSearchConfig, @@ -15,6 +15,7 @@ import { getRedisCache, groupObjectCountsByProp, isDefined, + Multicall2Call, paginatedEventQuery, winston, convertFromWei, diff --git a/src/finalizer/utils/cctp/l2ToL1.ts b/src/finalizer/utils/cctp/l2ToL1.ts index 337f53b4c..2695915de 100644 --- a/src/finalizer/utils/cctp/l2ToL1.ts +++ b/src/finalizer/utils/cctp/l2ToL1.ts @@ -1,7 +1,7 @@ import { TransactionRequest } from "@ethersproject/abstract-provider"; import { ethers } from "ethers"; import { HubPoolClient, SpokePoolClient } from "../../../clients"; -import { CONTRACT_ADDRESSES, Multicall2Call } from "../../../common"; +import { CONTRACT_ADDRESSES } from "../../../common"; import { Contract, Signer, @@ -13,6 +13,7 @@ import { getNetworkName, getRedisCache, groupObjectCountsByProp, + Multicall2Call, isDefined, winston, convertFromWei, diff --git a/src/finalizer/utils/opStack.ts b/src/finalizer/utils/opStack.ts index 3d4e82883..3504f51fc 100644 --- a/src/finalizer/utils/opStack.ts +++ b/src/finalizer/utils/opStack.ts @@ -22,9 +22,10 @@ import { chainIsProd, Contract, ethers, + Multicall2Call, paginatedEventQuery, } from "../../utils"; -import { CONTRACT_ADDRESSES, Multicall2Call, OPSTACK_CONTRACT_OVERRIDES } from "../../common"; +import { CONTRACT_ADDRESSES, OPSTACK_CONTRACT_OVERRIDES } from "../../common"; import { FinalizerPromise, CrossChainMessage } from "../types"; const { utils } = ethers; diff --git a/src/finalizer/utils/polygon.ts b/src/finalizer/utils/polygon.ts index 8a81bc8b3..2ed1e39f3 100644 --- a/src/finalizer/utils/polygon.ts +++ b/src/finalizer/utils/polygon.ts @@ -14,6 +14,7 @@ import { getBlockForTimestamp, getL1TokenInfo, compareAddressesSimple, + Multicall2Call, TOKEN_SYMBOLS_MAP, CHAIN_IDs, sortEventsAscending, @@ -21,7 +22,6 @@ import { } from "../../utils"; import { EthersError, TokensBridged } from "../../interfaces"; import { HubPoolClient, SpokePoolClient } from "../../clients"; -import { Multicall2Call } from "../../common"; import { FinalizerPromise, CrossChainMessage } from "../types"; // Note!!: This client will only work for PoS tokens. Matic also has Plasma tokens which have a different finalization diff --git a/src/finalizer/utils/scroll.ts b/src/finalizer/utils/scroll.ts index 98ce04e3a..2446d739f 100644 --- a/src/finalizer/utils/scroll.ts +++ b/src/finalizer/utils/scroll.ts @@ -1,15 +1,15 @@ /* eslint-disable @typescript-eslint/no-unused-vars */ import { utils as sdkUtils } from "@across-protocol/sdk"; -import { TransactionRequest } from "@ethersproject/abstract-provider"; import axios from "axios"; import { HubPoolClient, SpokePoolClient } from "../../clients"; -import { CONTRACT_ADDRESSES, Multicall2Call } from "../../common"; +import { CONTRACT_ADDRESSES } from "../../common"; import { Contract, Signer, getBlockForTimestamp, getCurrentTime, getRedisCache, + Multicall2Call, winston, convertFromWei, } from "../../utils"; @@ -57,7 +57,6 @@ export async function scrollFinalizer( // Why are we breaking with the existing pattern--is it faster? // Scroll takes up to 4 hours with finalize a withdrawal so lets search // up to 12 hours for withdrawals. - const redis = await getRedisCache(logger); logger.debug({ at: "Finalizer#ScrollFinalizer", message: "Scroll TokensBridged event filter", diff --git a/src/finalizer/utils/zkSync.ts b/src/finalizer/utils/zkSync.ts index 2afc1e829..48a970865 100644 --- a/src/finalizer/utils/zkSync.ts +++ b/src/finalizer/utils/zkSync.ts @@ -3,7 +3,7 @@ import { Contract, Wallet, Signer } from "ethers"; import { groupBy } from "lodash"; import { Provider as zksProvider, Wallet as zkWallet } from "zksync-ethers"; import { HubPoolClient, SpokePoolClient } from "../../clients"; -import { CONTRACT_ADDRESSES, Multicall2Call } from "../../common"; +import { CONTRACT_ADDRESSES } from "../../common"; import { convertFromWei, getBlockForTimestamp, @@ -12,6 +12,7 @@ import { getL1TokenInfo, getRedisCache, getUniqueLogIndex, + Multicall2Call, winston, zkSync as zkSyncUtils, } from "../../utils"; diff --git a/src/utils/TransactionUtils.ts b/src/utils/TransactionUtils.ts index 6db601be4..a8b0fa1ea 100644 --- a/src/utils/TransactionUtils.ts +++ b/src/utils/TransactionUtils.ts @@ -2,7 +2,7 @@ import { gasPriceOracle, typeguards, utils as sdkUtils } from "@across-protocol/ import { FeeData } from "@ethersproject/abstract-provider"; import dotenv from "dotenv"; import { AugmentedTransaction } from "../clients"; -import { DEFAULT_GAS_FEE_SCALERS, multicall3Addresses } from "../common"; +import { DEFAULT_GAS_FEE_SCALERS } from "../common"; import { EthersError } from "../interfaces"; import { BigNumber, @@ -29,6 +29,11 @@ export type TransactionSimulationResult = { const { isError, isEthersError } = typeguards; +export type Multicall2Call = { + callData: ethers.utils.BytesLike; + target: string; +}; + const nonceReset: { [chainId: number]: boolean } = {}; const txnRetryErrors = new Set(["INSUFFICIENT_FUNDS", "NONCE_EXPIRED", "REPLACEMENT_UNDERPRICED"]); @@ -46,10 +51,7 @@ export function getNetworkError(err: unknown): string { } export async function getMultisender(chainId: number, baseSigner: Signer): Promise { - if (!multicall3Addresses[chainId] || !baseSigner) { - return undefined; - } - return new Contract(multicall3Addresses[chainId], await sdkUtils.getABI("Multicall3"), baseSigner); + return sdkUtils.getMulticall3(chainId, baseSigner); } // Note that this function will throw if the call to the contract on method for given args reverts. Implementers From 668e051aeb22fe5c546cb4165dde5082664d0115 Mon Sep 17 00:00:00 2001 From: bmzig <57361391+bmzig@users.noreply.github.com> Date: Wed, 13 Nov 2024 09:02:55 -0600 Subject: [PATCH 12/44] improve: track slow fills in the monitor (#1888) * improve: track slow fills in the monitor --------- Signed-off-by: bennett Co-authored-by: nicholaspai <9457025+nicholaspai@users.noreply.github.com> --- src/monitor/Monitor.ts | 143 +++++++++++++++++++++++++++++++---------- 1 file changed, 109 insertions(+), 34 deletions(-) diff --git a/src/monitor/Monitor.ts b/src/monitor/Monitor.ts index 2cf4155ab..34ab55801 100644 --- a/src/monitor/Monitor.ts +++ b/src/monitor/Monitor.ts @@ -29,6 +29,7 @@ import { getNetworkName, getUnfilledDeposits, mapAsync, + getEndBlockBuffers, parseUnits, providers, toBN, @@ -42,6 +43,7 @@ import { isDefined, resolveTokenDecimals, sortEventsDescending, + getWidestPossibleExpectedBlockRange, } from "../utils"; import { MonitorClients, updateMonitorClients } from "./MonitorClientHelper"; @@ -629,6 +631,8 @@ export class Monitor { const hubPoolClient = this.clients.hubPoolClient; const monitoredTokenSymbols = this.monitorConfig.monitoredTokenSymbols; + + // Define the chain IDs in the same order as `enabledChainIds` so that block range ordering is preserved. const chainIds = this.monitorConfig.monitoredSpokePoolChains.length !== 0 ? this.monitorChains.filter((chain) => this.monitorConfig.monitoredSpokePoolChains.includes(chain)) @@ -637,14 +641,91 @@ export class Monitor { const l2TokenForChain = (chainId: number, symbol: string) => { return TOKEN_SYMBOLS_MAP[symbol]?.addresses[chainId]; }; - const currentSpokeBalances = {}; const pendingRelayerRefunds = {}; const pendingRebalanceRoots = {}; - // Get the pool rebalance leaves of the currently outstanding proposed root bundle. - const poolRebalanceRoot = await this.clients.bundleDataClient.getLatestPoolRebalanceRoot(); + // Take the validated bundles from the hub pool client. + const validatedBundles = sortEventsDescending(hubPoolClient.getValidatedRootBundles()).slice( + 0, + this.monitorConfig.bundlesCount + ); + + // Fetch the data from the latest root bundle. + const bundle = hubPoolClient.getLatestProposedRootBundle(); + // If there is an outstanding root bundle, then add it to the bundles to check. Otherwise, ignore it. + const bundlesToCheck = validatedBundles + .map((validatedBundle) => validatedBundle.transactionHash) + .includes(bundle.transactionHash) + ? validatedBundles + : [...validatedBundles, bundle]; + + const nextBundleMainnetStartBlock = hubPoolClient.getNextBundleStartBlockNumber( + this.clients.bundleDataClient.chainIdListForBundleEvaluationBlockNumbers, + hubPoolClient.latestBlockSearched, + hubPoolClient.chainId + ); + const enabledChainIds = this.clients.configStoreClient.getChainIdIndicesForBlock(nextBundleMainnetStartBlock); + + const slowFillBlockRange = getWidestPossibleExpectedBlockRange( + enabledChainIds, + this.clients.spokePoolClients, + getEndBlockBuffers(enabledChainIds, this.clients.bundleDataClient.blockRangeEndBlockBuffer), + this.clients, + hubPoolClient.latestBlockSearched, + this.clients.configStoreClient.getEnabledChains(hubPoolClient.latestBlockSearched) + ); + const blockRangeTail = bundle.bundleEvaluationBlockNumbers.map((endBlockForChain, idx) => { + const endBlockNumber = Number(endBlockForChain); + const spokeLatestBlockSearched = this.clients.spokePoolClients[enabledChainIds[idx]]?.latestBlockSearched ?? 0; + return spokeLatestBlockSearched === 0 + ? [endBlockNumber, endBlockNumber] + : [endBlockNumber + 1, spokeLatestBlockSearched > endBlockNumber ? spokeLatestBlockSearched : endBlockNumber]; + }); + + // Do all async tasks in parallel. We want to know about the pool rebalances, slow fills in the most recent proposed bundle, refunds + // from the last `n` bundles, pending refunds which have not been made official via a root bundle proposal, and the current balances of + // all the spoke pools. + const [ + poolRebalanceRoot, + currentBundleData, + accountedBundleRefunds, + unaccountedBundleRefunds, + currentSpokeBalances, + ] = await Promise.all([ + this.clients.bundleDataClient.getLatestPoolRebalanceRoot(), + this.clients.bundleDataClient.loadData(slowFillBlockRange, this.clients.spokePoolClients, false), + mapAsync(bundlesToCheck, async (proposedBundle) => + this.clients.bundleDataClient.getPendingRefundsFromBundle(proposedBundle) + ), + this.clients.bundleDataClient.getApproximateRefundsForBlockRange(enabledChainIds, blockRangeTail), + Object.fromEntries( + await mapAsync(chainIds, async (chainId) => { + const spokePool = this.clients.spokePoolClients[chainId].spokePool.address; + const l2TokenAddresses = monitoredTokenSymbols + .map((symbol) => l2TokenForChain(chainId, symbol)) + .filter(isDefined); + const balances = Object.fromEntries( + await mapAsync(l2TokenAddresses, async (l2Token) => [ + l2Token, + ( + await this._getBalances([ + { + token: l2Token, + chainId: chainId, + account: spokePool, + }, + ]) + )[0], + ]) + ); + return [chainId, balances]; + }) + ), + ]); + const poolRebalanceLeaves = poolRebalanceRoot.root.leaves; + // Get the pool rebalance leaf amounts. const enabledTokens = [...hubPoolClient.getL1Tokens()]; for (const leaf of poolRebalanceLeaves) { if (!chainIds.includes(leaf.chainId)) { @@ -658,29 +739,14 @@ export class Monitor { }); } - // Take the validated bundles from the hub pool client. - const validatedBundles = sortEventsDescending(hubPoolClient.getValidatedRootBundles()).slice( - 0, - this.monitorConfig.bundlesCount - ); - const previouslyValidatedBundleRefunds: CombinedRefunds[] = await mapAsync( - validatedBundles, - async (bundle) => await this.clients.bundleDataClient.getPendingRefundsFromBundle(bundle) - ); - - // Here are the current outstanding refunds. - const nextBundleRefunds = await this.clients.bundleDataClient.getNextBundleRefunds(); - - // Calculate the pending refunds and the spoke pool balances in parallel. + // Calculate the pending refunds. for (const chainId of chainIds) { - const spokePool = this.clients.spokePoolClients[chainId].spokePool.address; const l2TokenAddresses = monitoredTokenSymbols .map((symbol) => l2TokenForChain(chainId, symbol)) .filter(isDefined); - currentSpokeBalances[chainId] = {}; pendingRelayerRefunds[chainId] = {}; - void (await mapAsync(l2TokenAddresses, async (l2Token) => { - const pendingValidatedDeductions = previouslyValidatedBundleRefunds + l2TokenAddresses.forEach((l2Token) => { + const pendingValidatedDeductions = accountedBundleRefunds .map((refund) => refund[chainId]?.[l2Token]) .filter(isDefined) .reduce( @@ -691,7 +757,7 @@ export class Monitor { ), bnZero ); - const nextBundleDeductions = nextBundleRefunds + const nextBundleDeductions = [unaccountedBundleRefunds] .map((refund) => refund[chainId]?.[l2Token]) .filter(isDefined) .reduce( @@ -702,20 +768,29 @@ export class Monitor { ), bnZero ); - const totalObligations = pendingValidatedDeductions.add(nextBundleDeductions); - currentSpokeBalances[chainId][l2Token] = ( - await this._getBalances([ - { - token: l2Token, - chainId: chainId, - account: spokePool, - }, - ]) - )[0]; - pendingRelayerRefunds[chainId][l2Token] = totalObligations; - })); + pendingRelayerRefunds[chainId][l2Token] = pendingValidatedDeductions.add(nextBundleDeductions); + }); } + // Get the slow fill amounts. Only do this step if there were slow fills in the most recent root bundle. + Object.entries(currentBundleData.bundleSlowFillsV3) + .filter(([chainId]) => chainIds.includes(+chainId)) + .map(([chainId, bundleSlowFills]) => { + const l2TokenAddresses = monitoredTokenSymbols + .map((symbol) => l2TokenForChain(+chainId, symbol)) + .filter(isDefined); + Object.entries(bundleSlowFills) + .filter(([l2Token]) => l2TokenAddresses.includes(l2Token)) + .map(([l2Token, fills]) => { + const pendingSlowFillAmounts = fills + .map((fill) => fill.outputAmount) + .filter(isDefined) + .reduce((totalAmounts, outputAmount) => totalAmounts.add(outputAmount), bnZero); + pendingRelayerRefunds[chainId][l2Token] = + pendingRelayerRefunds[chainId][l2Token].sub(pendingSlowFillAmounts); + }); + }); + // Print the output: The current spoke pool balance, the amount of refunds to payout, the pending pool rebalances, and then the sum of the three. let tokenMarkdown = "Token amounts: current, pending relayer refunds, pool rebalances, adjusted spoke pool balance\n"; From 28cffcd9fc6224944a6fbd09154c485c5ca03316 Mon Sep 17 00:00:00 2001 From: Melisa Guevara Date: Wed, 13 Nov 2024 14:19:47 -0500 Subject: [PATCH 13/44] feat: Support AlephZero (#1887) * Support AlephZero * change time based constants * define gasToken in BaseBridgeAdapter * set aleph zero min deposit confirmations equal to arbitrum values * fix typo * feat(alephzero): make arb finalizer generic (#1895) Signed-off-by: james-a-morris * Delete duplicated multicall address * update across constants repo * Change contract addresses name pattern * Fix arbitrum outbox definition * Rename orbit contract addresses entries * Update contracts and sdk, delete deprecated events * fix tests * fix test * use native token symbol when deciding to unwrap eth or not * improve: bump sdk (#1904) * refactor: arbitrum orbit bridge (#1903) * refactor: aleph zero bridge inherits arbitrum one bridge Signed-off-by: bennett * make arbitrum bridge generic Signed-off-by: bennett * add bridge Signed-off-by: bennett * native token Signed-off-by: bennett * build Signed-off-by: bennett --------- Signed-off-by: bennett * fix: change contract constant name Signed-off-by: james-a-morris * Rebasing and removing DATAWORKER_FAST_LOOKBACK as it is not used anymore * feat: support custom gas token bridges in the rebalancer (#1873) * feat: support custom gas token bridges in the rebalancer Signed-off-by: bennett * Support AlephZero * Support AlephZero * feat(alephzero): add multicall3 (#1893) Signed-off-by: james-a-morris * feat(alephzero): make arb finalizer generic (#1895) Signed-off-by: james-a-morris * fix: dedup approvals and encode proper outbound data Signed-off-by: bennett * remove await Signed-off-by: bennett * custom azero gas values Signed-off-by: bennett * fix package.json * Delete unused file --------- Signed-off-by: bennett Signed-off-by: james-a-morris Co-authored-by: Melisa Guevara Co-authored-by: James Morris, MS <96435344+james-a-morris@users.noreply.github.com> --------- Signed-off-by: james-a-morris Signed-off-by: bennett Co-authored-by: James Morris, MS <96435344+james-a-morris@users.noreply.github.com> Co-authored-by: nicholaspai Co-authored-by: bmzig <57361391+bmzig@users.noreply.github.com> Co-authored-by: james-a-morris --- package.json | 6 +- src/adapter/BaseChainAdapter.ts | 32 +- ...rumOneBridge.ts => ArbitrumOrbitBridge.ts} | 55 +- src/adapter/bridges/BaseBridgeAdapter.ts | 1 + src/adapter/bridges/index.ts | 2 +- src/clients/InventoryClient.ts | 7 +- src/clients/ProfitClient.ts | 1 + src/clients/bridges/AdapterManager.ts | 15 +- src/common/Constants.ts | 79 +-- src/common/ContractAddresses.ts | 26 +- src/common/abi/ArbitrumErc20GatewayL1.json | 14 + .../abi/ArbitrumErc20GatewayRouterL1.json | 12 - src/finalizer/index.ts | 8 +- .../utils/{arbitrum.ts => arbStack.ts} | 44 +- src/finalizer/utils/index.ts | 2 +- src/interfaces/index.ts | 1 - test/AdapterManager.SendTokensCrossChain.ts | 4 +- test/Relayer.BasicFill.ts | 34 +- .../AdapterManager.SendTokensCrossChain.ts | 4 +- yarn.lock | 553 +++++++++++++++++- 20 files changed, 739 insertions(+), 161 deletions(-) rename src/adapter/bridges/{ArbitrumOneBridge.ts => ArbitrumOrbitBridge.ts} (55%) create mode 100644 src/common/abi/ArbitrumErc20GatewayL1.json rename src/finalizer/utils/{arbitrum.ts => arbStack.ts} (87%) diff --git a/package.json b/package.json index 278bd9fe8..126c8ca02 100644 --- a/package.json +++ b/package.json @@ -10,9 +10,9 @@ "node": ">=20" }, "dependencies": { - "@across-protocol/constants": "^3.1.16", - "@across-protocol/contracts": "^3.0.11", - "@across-protocol/sdk": "^3.2.11", + "@across-protocol/constants": "^3.1.19", + "@across-protocol/contracts": "^3.0.16", + "@across-protocol/sdk": "^3.2.13", "@arbitrum/sdk": "^3.1.3", "@consensys/linea-sdk": "^0.2.1", "@defi-wonderland/smock": "^2.3.5", diff --git a/src/adapter/BaseChainAdapter.ts b/src/adapter/BaseChainAdapter.ts index d72b83fd6..a7c5fc7eb 100644 --- a/src/adapter/BaseChainAdapter.ts +++ b/src/adapter/BaseChainAdapter.ts @@ -106,8 +106,10 @@ export class BaseChainAdapter { async checkTokenApprovals(l1Tokens: string[]): Promise { const unavailableTokens: string[] = []; - const tokensToApprove = ( - await mapAsync( + // Approve tokens to bridges. This includes the tokens we want to send over a bridge as well as the custom gas tokens + // each bridge supports (if applicable). + const [bridgeTokensToApprove, gasTokensToApprove] = await Promise.all([ + mapAsync( l1Tokens.map((token) => [token, this.bridges[token]?.l1Gateways] as [string, string[]]), async ([l1Token, bridges]) => { const erc20 = ERC20.connect(l1Token, this.getSigner(this.hubChainId)); @@ -126,8 +128,30 @@ export class BaseChainAdapter { }); return { token: erc20, bridges: bridgesToApprove }; } - ) - ).filter(({ bridges }) => bridges.length > 0); + ), + mapAsync( + Object.values(this.bridges).filter((bridge) => isDefined(bridge.gasToken)), + async (bridge) => { + const gasToken = bridge.gasToken; + const erc20 = ERC20.connect(gasToken, this.getSigner(this.hubChainId)); + const bridgesToApprove = await filterAsync(bridge.l1Gateways, async (gateway) => { + const senderAddress = await erc20.signer.getAddress(); + const cachedResult = await getTokenAllowanceFromCache(gasToken, senderAddress, gateway); + const allowance = cachedResult ?? (await erc20.allowance(senderAddress, gateway)); + if (!isDefined(cachedResult) && aboveAllowanceThreshold(allowance)) { + await setTokenAllowanceInCache(gasToken, senderAddress, gateway, allowance); + } + return !aboveAllowanceThreshold(allowance); + }); + return { token: erc20, bridges: bridgesToApprove }; + } + ), + ]); + // Dedup the `gasTokensToApprove` array so that we don't approve the same bridge to send the same token multiple times. + const tokensToApprove = gasTokensToApprove + .filter(({ token, bridges }, idx) => gasTokensToApprove.indexOf({ token, bridges }) === idx) + .concat(bridgeTokensToApprove) + .filter(({ bridges }) => bridges.length > 0); if (unavailableTokens.length > 0) { this.log("Some tokens do not have a bridge contract", { unavailableTokens }); } diff --git a/src/adapter/bridges/ArbitrumOneBridge.ts b/src/adapter/bridges/ArbitrumOrbitBridge.ts similarity index 55% rename from src/adapter/bridges/ArbitrumOneBridge.ts rename to src/adapter/bridges/ArbitrumOrbitBridge.ts index 0dd573d6d..89d045a16 100644 --- a/src/adapter/bridges/ArbitrumOneBridge.ts +++ b/src/adapter/bridges/ArbitrumOrbitBridge.ts @@ -7,24 +7,35 @@ import { Provider, toBN, toWei, + TOKEN_SYMBOLS_MAP, + isDefined, + ethers, + bnZero, + CHAIN_IDs, } from "../../utils"; -import { CONTRACT_ADDRESSES, CUSTOM_ARBITRUM_GATEWAYS } from "../../common"; +import { CONTRACT_ADDRESSES, CUSTOM_ARBITRUM_GATEWAYS, DEFAULT_ARBITRUM_GATEWAY } from "../../common"; import { BridgeTransactionDetails, BaseBridgeAdapter, BridgeEvents } from "./BaseBridgeAdapter"; import { processEvent } from "../utils"; +import { PRODUCTION_NETWORKS } from "@across-protocol/constants"; -const DEFAULT_ERC20_GATEWAY = { - l1: "0xa3A7B6F88361F48403514059F1F16C8E78d60EeC", - l2: "0x09e9222E96E7B4AE2a407B98d48e330053351EEe", +const bridgeSubmitValue: { [chainId: number]: BigNumber } = { + [CHAIN_IDs.ARBITRUM]: toWei(0.013), + [CHAIN_IDs.ALEPH_ZERO]: toWei(0.45), }; -export class ArbitrumOneBridge extends BaseBridgeAdapter { - protected l1Gateway: Contract; +const maxFeePerGas: { [chainId: number]: BigNumber } = { + [CHAIN_IDs.ARBITRUM]: toBN(20e9), + [CHAIN_IDs.ALEPH_ZERO]: toBN(24e10), +}; + +export class ArbitrumOrbitBridge extends BaseBridgeAdapter { + protected l1GatewayRouter: Contract; private readonly transactionSubmissionData = "0x000000000000000000000000000000000000000000000000002386f26fc1000000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000000"; private readonly l2GasLimit = toBN(150000); - private readonly l2GasPrice = toBN(20e9); - private readonly l1SubmitValue = toWei(0.013); + private readonly l2GasPrice; + private readonly l1SubmitValue; constructor( l2chainId: number, @@ -33,15 +44,25 @@ export class ArbitrumOneBridge extends BaseBridgeAdapter { l2SignerOrProvider: Signer | Provider, l1Token: string ) { - const { address: gatewayAddress, abi: l1Abi } = CONTRACT_ADDRESSES[hubChainId].arbitrumErc20GatewayRouter; - const { l1: l1Address, l2: l2Address } = CUSTOM_ARBITRUM_GATEWAYS[l1Token] ?? DEFAULT_ERC20_GATEWAY; + const { address: gatewayAddress, abi: gatewayRouterAbi } = + CONTRACT_ADDRESSES[hubChainId][`orbitErc20GatewayRouter_${l2chainId}`]; + const { l1: l1Address, l2: l2Address } = + CUSTOM_ARBITRUM_GATEWAYS[l2chainId]?.[l1Token] ?? DEFAULT_ARBITRUM_GATEWAY[l2chainId]; + const l1Abi = CONTRACT_ADDRESSES[hubChainId][`orbitErc20Gateway_${l2chainId}`].abi; const l2Abi = CONTRACT_ADDRESSES[l2chainId].erc20Gateway.abi; super(l2chainId, hubChainId, l1Signer, l2SignerOrProvider, [l1Address]); + const nativeToken = PRODUCTION_NETWORKS[l2chainId].nativeToken; + // Only set nonstandard gas tokens. + if (nativeToken !== "ETH") { + this.gasToken = TOKEN_SYMBOLS_MAP[nativeToken].addresses[hubChainId]; + } + this.l1SubmitValue = bridgeSubmitValue[l2chainId]; + this.l2GasPrice = maxFeePerGas[l2chainId]; this.l1Bridge = new Contract(l1Address, l1Abi, l1Signer); this.l2Bridge = new Contract(l2Address, l2Abi, l2SignerOrProvider); - this.l1Gateway = new Contract(gatewayAddress, l1Abi, l1Signer); + this.l1GatewayRouter = new Contract(gatewayAddress, gatewayRouterAbi, l1Signer); } async constructL1ToL2Txn( @@ -50,12 +71,18 @@ export class ArbitrumOneBridge extends BaseBridgeAdapter { l2Token: string, amount: BigNumber ): Promise { - const { l1Gateway, l2GasLimit, l2GasPrice, transactionSubmissionData, l1SubmitValue } = this; + const { l1GatewayRouter, l2GasLimit, l2GasPrice, l1SubmitValue } = this; + const transactionSubmissionData = isDefined(this.gasToken) + ? ethers.utils.defaultAbiCoder.encode( + ["uint256", "bytes", "uint256"], + [l1SubmitValue, "0x", l2GasLimit.mul(l2GasPrice).add(l1SubmitValue)] + ) + : this.transactionSubmissionData; return Promise.resolve({ - contract: l1Gateway, + contract: l1GatewayRouter, method: "outboundTransfer", args: [l1Token, toAddress, amount, l2GasLimit, l2GasPrice, transactionSubmissionData], - value: l1SubmitValue, + value: isDefined(this.gasToken) ? bnZero : l1SubmitValue, }); } diff --git a/src/adapter/bridges/BaseBridgeAdapter.ts b/src/adapter/bridges/BaseBridgeAdapter.ts index a54dbc395..d9f4320c9 100644 --- a/src/adapter/bridges/BaseBridgeAdapter.ts +++ b/src/adapter/bridges/BaseBridgeAdapter.ts @@ -28,6 +28,7 @@ export type BridgeEvents = { [l2Token: string]: BridgeEvent[] }; export abstract class BaseBridgeAdapter { protected l1Bridge: Contract; protected l2Bridge: Contract; + public gasToken: string | undefined; constructor( protected l2chainId: number, diff --git a/src/adapter/bridges/index.ts b/src/adapter/bridges/index.ts index ef672f619..8c3a6a87c 100644 --- a/src/adapter/bridges/index.ts +++ b/src/adapter/bridges/index.ts @@ -3,7 +3,7 @@ export * from "./SnxOptimismBridge"; export * from "./BaseBridgeAdapter"; export * from "./UsdcTokenSplitterBridge"; export * from "./OpStackWethBridge"; -export * from "./ArbitrumOneBridge"; +export * from "./ArbitrumOrbitBridge"; export * from "./LineaBridge"; export * from "./LineaUSDCBridge"; export * from "./PolygonERC20Bridge"; diff --git a/src/clients/InventoryClient.ts b/src/clients/InventoryClient.ts index d02f0204a..1e25e19aa 100644 --- a/src/clients/InventoryClient.ts +++ b/src/clients/InventoryClient.ts @@ -25,6 +25,7 @@ import { assert, compareAddressesSimple, getUsdcSymbol, + getNativeTokenSymbol, } from "../utils"; import { HubPoolClient, TokenClient, BundleDataClient } from "."; import { Deposit } from "../interfaces"; @@ -981,7 +982,11 @@ export class InventoryClient { const { unwrapWethThreshold, unwrapWethTarget } = tokenConfig; // Ignore chains where ETH isn't the native gas token. Returning null will result in these being filtered. - if (chainId === CHAIN_IDs.POLYGON || unwrapWethThreshold === undefined || unwrapWethTarget === undefined) { + if ( + getNativeTokenSymbol(chainId) !== "ETH" || + unwrapWethThreshold === undefined || + unwrapWethTarget === undefined + ) { return; } const weth = TOKEN_SYMBOLS_MAP.WETH.addresses[chainId]; diff --git a/src/clients/ProfitClient.ts b/src/clients/ProfitClient.ts index ec5907e52..9bd93c325 100644 --- a/src/clients/ProfitClient.ts +++ b/src/clients/ProfitClient.ts @@ -587,6 +587,7 @@ export class ProfitClient { // The relayer _cannot_ be the recipient because the SpokePool skips the ERC20 transfer. Instead, use // the main RL address because it has all supported tokens and approvals in place on all chains. const testSymbols = { + [CHAIN_IDs.ALEPH_ZERO]: "USDT", // USDC is not yet supported on AlephZero, so revert to USDT. @todo: Update. [CHAIN_IDs.BLAST]: "USDB", [CHAIN_IDs.LISK]: "USDT", // USDC is not yet supported on Lisk, so revert to USDT. @todo: Update. [CHAIN_IDs.REDSTONE]: "WETH", // Redstone only supports WETH. diff --git a/src/clients/bridges/AdapterManager.ts b/src/clients/bridges/AdapterManager.ts index 3e9866064..8d83fc6c4 100644 --- a/src/clients/bridges/AdapterManager.ts +++ b/src/clients/bridges/AdapterManager.ts @@ -46,7 +46,8 @@ export class AdapterManager { ); }; - const { OPTIMISM, ARBITRUM, POLYGON, ZK_SYNC, BASE, MODE, LINEA, LISK, BLAST, REDSTONE, SCROLL, ZORA } = CHAIN_IDs; + const { OPTIMISM, ARBITRUM, POLYGON, ZK_SYNC, BASE, MODE, LINEA, LISK, BLAST, REDSTONE, SCROLL, ZORA, ALEPH_ZERO } = + CHAIN_IDs; const hubChainId = hubPoolClient.chainId; const l1Signer = spokePoolClients[hubChainId].spokePool.signer; const constructBridges = (chainId: number) => { @@ -171,6 +172,18 @@ export class AdapterManager { DEFAULT_GAS_MULTIPLIER[ZORA] ?? 1 ); } + if (this.spokePoolClients[ALEPH_ZERO] !== undefined) { + this.adapters[ALEPH_ZERO] = new BaseChainAdapter( + spokePoolClients, + ALEPH_ZERO, + hubChainId, + filterMonitoredAddresses(ALEPH_ZERO), + logger, + SUPPORTED_TOKENS[ALEPH_ZERO], + constructBridges(ALEPH_ZERO), + DEFAULT_GAS_MULTIPLIER[ALEPH_ZERO] ?? 1 + ); + } logger.debug({ at: "AdapterManager#constructor", diff --git a/src/common/Constants.ts b/src/common/Constants.ts index 853db265d..90c0225a4 100644 --- a/src/common/Constants.ts +++ b/src/common/Constants.ts @@ -10,7 +10,7 @@ import { PolygonERC20Bridge, ZKSyncBridge, ZKSyncWethBridge, - ArbitrumOneBridge, + ArbitrumOrbitBridge, LineaBridge, LineaUSDCBridge, LineaWethBridge, @@ -38,25 +38,6 @@ export const INFINITE_FILL_DEADLINE = bnUint32Max; // Target ~4 hours export const MAX_RELAYER_DEPOSIT_LOOK_BACK = 4 * 60 * 60; -// Target ~4 days per chain. Should cover all events needed to construct pending bundle. -export const DATAWORKER_FAST_LOOKBACK: { [chainId: number]: number } = { - [CHAIN_IDs.ARBITRUM]: 1382400, - [CHAIN_IDs.BASE]: 172800, // Same as Optimism. - [CHAIN_IDs.BLAST]: 172800, - [CHAIN_IDs.BOBA]: 11520, - [CHAIN_IDs.LINEA]: 115200, // 1 block every 3 seconds - [CHAIN_IDs.LISK]: 172800, // Same as Optimism. - [CHAIN_IDs.MAINNET]: 28800, - [CHAIN_IDs.MODE]: 172800, // Same as Optimism. - [CHAIN_IDs.OPTIMISM]: 172800, // 1 block every 2 seconds after bedrock - [CHAIN_IDs.POLYGON]: 138240, - [CHAIN_IDs.REDSTONE]: 172800, // OP stack - [CHAIN_IDs.SCROLL]: 115200, // 4 * 24 * 20 * 60, - [CHAIN_IDs.WORLD_CHAIN]: 172800, // OP stack - [CHAIN_IDs.ZK_SYNC]: 345600, // 4 * 24 * 60 * 60, - [CHAIN_IDs.ZORA]: 172800, // OP stack -}; - // Target ~14 days per chain. Should cover all events that could be finalized, so 2x the optimistic // rollup challenge period seems safe. export const FINALIZER_TOKENBRIDGE_LOOKBACK = 14 * 24 * 60 * 60; @@ -70,7 +51,7 @@ export const FINALIZER_TOKENBRIDGE_LOOKBACK = 14 * 24 * 60 * 60; // The key of the following dictionary is used as the USD threshold to determine the MDC: // - Searching from highest USD threshold to lowest // - If the key is >= deposited USD amount, then use the MDC associated with the key for the origin chain -// - If no keys are >= depostied USD amount, ignore the deposit. +// - If no keys are >= deposited USD amount, ignore the deposit. // To see the latest block reorg events go to: // - Ethereum: https://etherscan.io/blocks_forked // - Polygon: https://polygonscan.com/blocks_forked @@ -79,6 +60,7 @@ export const FINALIZER_TOKENBRIDGE_LOOKBACK = 14 * 24 * 60 * 60; // anything under 7 days. export const MIN_DEPOSIT_CONFIRMATIONS: { [threshold: number | string]: { [chainId: number]: number } } = { 10000: { + [CHAIN_IDs.ALEPH_ZERO]: 0, [CHAIN_IDs.ARBITRUM]: 0, [CHAIN_IDs.BASE]: 120, [CHAIN_IDs.BLAST]: 120, @@ -95,6 +77,7 @@ export const MIN_DEPOSIT_CONFIRMATIONS: { [threshold: number | string]: { [chain [CHAIN_IDs.ZORA]: 120, }, 1000: { + [CHAIN_IDs.ALEPH_ZERO]: 0, [CHAIN_IDs.ARBITRUM]: 0, [CHAIN_IDs.BASE]: 60, [CHAIN_IDs.BLAST]: 60, @@ -111,6 +94,7 @@ export const MIN_DEPOSIT_CONFIRMATIONS: { [threshold: number | string]: { [chain [CHAIN_IDs.ZORA]: 60, }, 100: { + [CHAIN_IDs.ALEPH_ZERO]: 0, [CHAIN_IDs.ARBITRUM]: 0, [CHAIN_IDs.BASE]: 60, [CHAIN_IDs.BLAST]: 60, @@ -136,6 +120,7 @@ export const REDIS_URL_DEFAULT = "redis://localhost:6379"; // if the RPC provider allows it. This is why the user should override these lookbacks if they are not using // Quicknode for example. export const CHAIN_MAX_BLOCK_LOOKBACK = { + [CHAIN_IDs.ALEPH_ZERO]: 10000, [CHAIN_IDs.ARBITRUM]: 10000, [CHAIN_IDs.BASE]: 10000, [CHAIN_IDs.BLAST]: 10000, @@ -167,6 +152,7 @@ export const CHAIN_MAX_BLOCK_LOOKBACK = { // can be matched with a deposit on the origin chain, so something like // ~1-2 mins per chain. export const BUNDLE_END_BLOCK_BUFFERS = { + [CHAIN_IDs.ALEPH_ZERO]: 240, // Same as Arbitrum [CHAIN_IDs.ARBITRUM]: 240, // ~0.25s/block. Arbitrum is a centralized sequencer [CHAIN_IDs.BASE]: 60, // 2s/block. Same finality profile as Optimism [CHAIN_IDs.BLAST]: 60, @@ -212,6 +198,7 @@ export const IGNORED_HUB_EXECUTED_BUNDLES: number[] = []; // Provider caching will not be allowed for queries whose responses depend on blocks closer than this many blocks. // This is intended to be conservative. export const CHAIN_CACHE_FOLLOW_DISTANCE: { [chainId: number]: number } = { + [CHAIN_IDs.ALEPH_ZERO]: 60, [CHAIN_IDs.ARBITRUM]: 32, [CHAIN_IDs.BASE]: 120, [CHAIN_IDs.BLAST]: 120, @@ -242,6 +229,7 @@ export const CHAIN_CACHE_FOLLOW_DISTANCE: { [chainId: number]: number } = { // These are all intended to be roughly 2 days of blocks for each chain. // blocks = 172800 / avg_block_time export const DEFAULT_NO_TTL_DISTANCE: { [chainId: number]: number } = { + [CHAIN_IDs.ALEPH_ZERO]: 691200, [CHAIN_IDs.ARBITRUM]: 691200, [CHAIN_IDs.BASE]: 86400, [CHAIN_IDs.BLAST]: 86400, @@ -318,6 +306,7 @@ export const chainIdsToCctpDomains: { [chainId: number]: number } = { // A mapping of L2 chain IDs to an array of tokens Across supports on that chain. export const SUPPORTED_TOKENS: { [chainId: number]: string[] } = { + [CHAIN_IDs.ALEPH_ZERO]: ["USDT", "WETH"], [CHAIN_IDs.ARBITRUM]: ["USDC", "USDT", "WETH", "DAI", "WBTC", "UMA", "BAL", "ACX", "POOL"], [CHAIN_IDs.BASE]: ["BAL", "DAI", "ETH", "WETH", "USDC", "POOL"], [CHAIN_IDs.BLAST]: ["DAI", "WBTC", "WETH"], @@ -371,7 +360,8 @@ export const CANONICAL_BRIDGE: { ): BaseBridgeAdapter; }; } = { - [CHAIN_IDs.ARBITRUM]: ArbitrumOneBridge, + [CHAIN_IDs.ALEPH_ZERO]: ArbitrumOrbitBridge, + [CHAIN_IDs.ARBITRUM]: ArbitrumOrbitBridge, [CHAIN_IDs.BASE]: OpStackDefaultERC20Bridge, [CHAIN_IDs.BLAST]: OpStackDefaultERC20Bridge, [CHAIN_IDs.LINEA]: LineaBridge, @@ -458,22 +448,38 @@ export const DEFAULT_ARWEAVE_GATEWAY = { url: "arweave.net", port: 443, protocol // relayer to unintentionally overdraw the HubPool's available reserves. export const SLOW_WITHDRAWAL_CHAINS = [CHAIN_IDs.ARBITRUM, CHAIN_IDs.BASE, CHAIN_IDs.OPTIMISM, CHAIN_IDs.BLAST]; -export const CUSTOM_ARBITRUM_GATEWAYS: { [chainId: number]: { l1: string; l2: string } } = { - [TOKEN_SYMBOLS_MAP.USDT.addresses[CHAIN_IDs.MAINNET]]: { - l1: "0xcEe284F754E854890e311e3280b767F80797180d", // USDT - l2: "0x096760F208390250649E3e8763348E783AEF5562", - }, - [TOKEN_SYMBOLS_MAP.USDC.addresses[CHAIN_IDs.MAINNET]]: { - l1: "0xcEe284F754E854890e311e3280b767F80797180d", // USDC - l2: "0x096760F208390250649E3e8763348E783AEF5562", // If we want to bridge to USDC.e, we need to specify a unique Arbitrum Gateway. +// Arbitrum Orbit chains may have custom gateways for certain tokens. These gateways need to be specified since token approvals are directed at the +// gateway, while function calls are directed at the gateway router. +export const CUSTOM_ARBITRUM_GATEWAYS: { [chainId: number]: { [address: string]: { l1: string; l2: string } } } = { + [CHAIN_IDs.ARBITRUM]: { + [TOKEN_SYMBOLS_MAP.USDT.addresses[CHAIN_IDs.MAINNET]]: { + l1: "0xcEe284F754E854890e311e3280b767F80797180d", // USDT + l2: "0x096760F208390250649E3e8763348E783AEF5562", + }, + [TOKEN_SYMBOLS_MAP.USDC.addresses[CHAIN_IDs.MAINNET]]: { + l1: "0xcEe284F754E854890e311e3280b767F80797180d", // USDC + l2: "0x096760F208390250649E3e8763348E783AEF5562", // If we want to bridge to USDC.e, we need to specify a unique Arbitrum Gateway. + }, + [TOKEN_SYMBOLS_MAP.WETH.addresses[CHAIN_IDs.MAINNET]]: { + l1: "0xd92023E9d9911199a6711321D1277285e6d4e2db", // WETH + l2: "0x6c411aD3E74De3E7Bd422b94A27770f5B86C623B", + }, + [TOKEN_SYMBOLS_MAP.DAI.addresses[CHAIN_IDs.MAINNET]]: { + l1: "0xD3B5b60020504bc3489D6949d545893982BA3011", // DAI + l2: "0x467194771dAe2967Aef3ECbEDD3Bf9a310C76C65", + }, }, - [TOKEN_SYMBOLS_MAP.WETH.addresses[CHAIN_IDs.MAINNET]]: { - l1: "0xd92023E9d9911199a6711321D1277285e6d4e2db", // WETH - l2: "0x6c411aD3E74De3E7Bd422b94A27770f5B86C623B", +}; + +// The default ERC20 gateway is the generic gateway used by Arbitrum Orbit chains to mint tokens which do not have a custom gateway set. +export const DEFAULT_ARBITRUM_GATEWAY: { [chainId: number]: { l1: string; l2: string } } = { + [CHAIN_IDs.ALEPH_ZERO]: { + l1: "0xccaF21F002EAF230c9Fa810B34837a3739B70F7B", + l2: "0x2A5a79061b723BBF453ef7E07c583C750AFb9BD6", }, - [TOKEN_SYMBOLS_MAP.DAI.addresses[CHAIN_IDs.MAINNET]]: { - l1: "0xD3B5b60020504bc3489D6949d545893982BA3011", // DAI - l2: "0x467194771dAe2967Aef3ECbEDD3Bf9a310C76C65", + [CHAIN_IDs.ARBITRUM]: { + l1: "0xa3A7B6F88361F48403514059F1F16C8E78d60EeC", + l2: "0x09e9222E96E7B4AE2a407B98d48e330053351EEe", }, }; @@ -493,6 +499,7 @@ export const SCROLL_CUSTOM_GATEWAY: { [chainId: number]: { l1: string; l2: strin // Expected worst-case time for message from L1 to propogate to L2 in seconds export const EXPECTED_L1_TO_L2_MESSAGE_TIME = { + [CHAIN_IDs.ALEPH_ZERO]: 20 * 60, [CHAIN_IDs.ARBITRUM]: 20 * 60, [CHAIN_IDs.BASE]: 20 * 60, [CHAIN_IDs.BLAST]: 20 * 60, diff --git a/src/common/ContractAddresses.ts b/src/common/ContractAddresses.ts index 4433ed226..cdd9473ee 100644 --- a/src/common/ContractAddresses.ts +++ b/src/common/ContractAddresses.ts @@ -19,6 +19,7 @@ import ZK_SYNC_DEFAULT_ERC20_BRIDGE_L1_ABI from "./abi/ZkSyncDefaultErc20BridgeL import ZK_SYNC_DEFAULT_ERC20_BRIDGE_L2_ABI from "./abi/ZkSyncDefaultErc20BridgeL2.json"; import ZK_SYNC_MAILBOX_ABI from "./abi/ZkSyncMailbox.json"; import ARBITRUM_ERC20_GATEWAY_ROUTER_L1_ABI from "./abi/ArbitrumErc20GatewayRouterL1.json"; +import ARBITRUM_ERC20_GATEWAY_L1_ABI from "./abi/ArbitrumErc20GatewayL1.json"; import ARBITRUM_ERC20_GATEWAY_L2_ABI from "./abi/ArbitrumErc20GatewayL2.json"; import ARBITRUM_OUTBOX_ABI from "./abi/ArbitrumOutbox.json"; import LINEA_MESSAGE_SERVICE_ABI from "./abi/LineaMessageService.json"; @@ -127,10 +128,24 @@ export const CONTRACT_ADDRESSES: { address: "0x8484Ef722627bf18ca5Ae6BcF031c23E6e922B30", abi: POLYGON_BRIDGE_ABI, }, - arbitrumErc20GatewayRouter: { + orbitOutbox_42161: { + address: "0x0B9857ae2D4A3DBe74ffE1d7DF045bb7F96E4840", + abi: ARBITRUM_OUTBOX_ABI, + }, + orbitErc20GatewayRouter_42161: { address: "0x72Ce9c846789fdB6fC1f34aC4AD25Dd9ef7031ef", abi: ARBITRUM_ERC20_GATEWAY_ROUTER_L1_ABI, }, + orbitErc20Gateway_42161: { + abi: ARBITRUM_ERC20_GATEWAY_L1_ABI, + }, + orbitErc20GatewayRouter_41455: { + address: "0xeBb17f398ed30d02F2e8733e7c1e5cf566e17812", + abi: ARBITRUM_ERC20_GATEWAY_ROUTER_L1_ABI, + }, + orbitErc20Gateway_41455: { + abi: ARBITRUM_ERC20_GATEWAY_L1_ABI, + }, VotingV2: { address: "0x004395edb43EFca9885CEdad51EC9fAf93Bd34ac", abi: VOTING_V2_ABI, @@ -308,10 +323,6 @@ export const CONTRACT_ADDRESSES: { erc20Gateway: { abi: ARBITRUM_ERC20_GATEWAY_L2_ABI, }, - outbox: { - address: "0x0B9857ae2D4A3DBe74ffE1d7DF045bb7F96E4840", - abi: ARBITRUM_OUTBOX_ABI, - }, cctpMessageTransmitter: { address: "0xC30362313FBBA5cf9163F0bb16a0e01f01A896ca", abi: CCTP_MESSAGE_TRANSMITTER_ABI, @@ -321,6 +332,11 @@ export const CONTRACT_ADDRESSES: { abi: CCTP_TOKEN_MESSENGER_ABI, }, }, + 41455: { + erc20Gateway: { + abi: ARBITRUM_ERC20_GATEWAY_L2_ABI, + }, + }, 59144: { l2MessageService: { address: "0x508Ca82Df566dCD1B0DE8296e70a96332cD644ec", diff --git a/src/common/abi/ArbitrumErc20GatewayL1.json b/src/common/abi/ArbitrumErc20GatewayL1.json new file mode 100644 index 000000000..47f31c802 --- /dev/null +++ b/src/common/abi/ArbitrumErc20GatewayL1.json @@ -0,0 +1,14 @@ +[ + { + "anonymous": false, + "inputs": [ + { "indexed": false, "internalType": "address", "name": "l1Token", "type": "address" }, + { "indexed": true, "internalType": "address", "name": "_from", "type": "address" }, + { "indexed": true, "internalType": "address", "name": "_to", "type": "address" }, + { "indexed": true, "internalType": "uint256", "name": "_sequenceNumber", "type": "uint256" }, + { "indexed": false, "internalType": "uint256", "name": "_amount", "type": "uint256" } + ], + "name": "DepositInitiated", + "type": "event" + } +] diff --git a/src/common/abi/ArbitrumErc20GatewayRouterL1.json b/src/common/abi/ArbitrumErc20GatewayRouterL1.json index a1526e9d4..28029092c 100644 --- a/src/common/abi/ArbitrumErc20GatewayRouterL1.json +++ b/src/common/abi/ArbitrumErc20GatewayRouterL1.json @@ -12,17 +12,5 @@ "outputs": [{ "internalType": "bytes", "name": "", "type": "bytes" }], "stateMutability": "payable", "type": "function" - }, - { - "anonymous": false, - "inputs": [ - { "indexed": false, "internalType": "address", "name": "l1Token", "type": "address" }, - { "indexed": true, "internalType": "address", "name": "_from", "type": "address" }, - { "indexed": true, "internalType": "address", "name": "_to", "type": "address" }, - { "indexed": true, "internalType": "uint256", "name": "_sequenceNumber", "type": "uint256" }, - { "indexed": false, "internalType": "uint256", "name": "_amount", "type": "uint256" } - ], - "name": "DepositInitiated", - "type": "event" } ] diff --git a/src/finalizer/index.ts b/src/finalizer/index.ts index 87a4f30c7..8b648dcf9 100644 --- a/src/finalizer/index.ts +++ b/src/finalizer/index.ts @@ -32,7 +32,7 @@ import { } from "../utils"; import { ChainFinalizer, CrossChainMessage } from "./types"; import { - arbitrumOneFinalizer, + arbStackFinalizer, cctpL1toL2Finalizer, cctpL2toL1Finalizer, lineaL1ToL2Finalizer, @@ -78,8 +78,12 @@ const chainFinalizers: { [chainId: number]: { finalizeOnL2: ChainFinalizer[]; fi finalizeOnL1: [opStackFinalizer, cctpL2toL1Finalizer], finalizeOnL2: [cctpL1toL2Finalizer], }, + [CHAIN_IDs.ALEPH_ZERO]: { + finalizeOnL1: [arbStackFinalizer], + finalizeOnL2: [], + }, [CHAIN_IDs.ARBITRUM]: { - finalizeOnL1: [arbitrumOneFinalizer, cctpL2toL1Finalizer], + finalizeOnL1: [arbStackFinalizer, cctpL2toL1Finalizer], finalizeOnL2: [cctpL1toL2Finalizer], }, [CHAIN_IDs.LINEA]: { diff --git a/src/finalizer/utils/arbitrum.ts b/src/finalizer/utils/arbStack.ts similarity index 87% rename from src/finalizer/utils/arbitrum.ts rename to src/finalizer/utils/arbStack.ts index b660ed3f3..0373c0cf3 100644 --- a/src/finalizer/utils/arbitrum.ts +++ b/src/finalizer/utils/arbStack.ts @@ -13,17 +13,15 @@ import { getL1TokenInfo, Multicall2Call, compareAddressesSimple, - TOKEN_SYMBOLS_MAP, CHAIN_IDs, + TOKEN_SYMBOLS_MAP, } from "../../utils"; import { TokensBridged } from "../../interfaces"; import { HubPoolClient, SpokePoolClient } from "../../clients"; import { CONTRACT_ADDRESSES } from "../../common"; import { FinalizerPromise, CrossChainMessage } from "../types"; -const CHAIN_ID = CHAIN_IDs.ARBITRUM; - -export async function arbitrumOneFinalizer( +export async function arbStackFinalizer( logger: winston.Logger, signer: Signer, hubPoolClient: HubPoolClient, @@ -49,25 +47,26 @@ export async function arbitrumOneFinalizer( (e) => e.blockNumber <= latestBlockToFinalize && // USDC withdrawals for Arbitrum should be finalized via the CCTP Finalizer. - !compareAddressesSimple(e.l2TokenAddress, TOKEN_SYMBOLS_MAP["USDC"].addresses[CHAIN_ID]) + !compareAddressesSimple(e.l2TokenAddress, TOKEN_SYMBOLS_MAP["USDC"].addresses[chainId]) ); - return await multicallArbitrumFinalizations(olderTokensBridgedEvents, signer, hubPoolClient, logger); + return await multicallArbitrumFinalizations(olderTokensBridgedEvents, signer, hubPoolClient, logger, chainId); } async function multicallArbitrumFinalizations( tokensBridged: TokensBridged[], hubSigner: Signer, hubPoolClient: HubPoolClient, - logger: winston.Logger + logger: winston.Logger, + chainId: number ): Promise { - const finalizableMessages = await getFinalizableMessages(logger, tokensBridged, hubSigner); - const callData = await Promise.all(finalizableMessages.map((message) => finalizeArbitrum(message.message))); + const finalizableMessages = await getFinalizableMessages(logger, tokensBridged, hubSigner, chainId); + const callData = await Promise.all(finalizableMessages.map((message) => finalizeArbitrum(message.message, chainId))); const crossChainTransfers = finalizableMessages.map(({ info: { l2TokenAddress, amountToReturn } }) => { - const l1TokenInfo = getL1TokenInfo(l2TokenAddress, CHAIN_ID); + const l1TokenInfo = getL1TokenInfo(l2TokenAddress, chainId); const amountFromWei = convertFromWei(amountToReturn.toString(), l1TokenInfo.decimals); const withdrawal: CrossChainMessage = { - originationChainId: CHAIN_ID, + originationChainId: chainId, l1TokenSymbol: l1TokenInfo.symbol, amount: amountFromWei, type: "withdrawal", @@ -82,10 +81,10 @@ async function multicallArbitrumFinalizations( }; } -async function finalizeArbitrum(message: L2ToL1MessageWriter): Promise { - const l2Provider = getCachedProvider(CHAIN_ID, true); +async function finalizeArbitrum(message: L2ToL1MessageWriter, chainId: number): Promise { + const l2Provider = getCachedProvider(chainId, true); const proof = await message.getOutboxProof(l2Provider); - const { address, abi } = CONTRACT_ADDRESSES[CHAIN_ID].outbox; + const { address, abi } = CONTRACT_ADDRESSES[CHAIN_IDs.MAINNET][`orbitOutbox_${chainId}`]; const outbox = new Contract(address, abi); // eslint-disable-next-line @typescript-eslint/no-explicit-any const eventData = (message as any).nitroWriter.event; // nitroWriter is a private property on the @@ -111,7 +110,8 @@ async function finalizeArbitrum(message: L2ToL1MessageWriter): Promise { - const allMessagesWithStatuses = await getAllMessageStatuses(tokensBridged, logger, l1Signer); + const allMessagesWithStatuses = await getAllMessageStatuses(tokensBridged, logger, l1Signer, chainId); const statusesGrouped = groupObjectCountsByProp( allMessagesWithStatuses, (message: { status: string }) => message.status @@ -135,7 +135,8 @@ async function getFinalizableMessages( async function getAllMessageStatuses( tokensBridged: TokensBridged[], logger: winston.Logger, - mainnetSigner: Signer + mainnetSigner: Signer, + chainId: number ): Promise< { info: TokensBridged; @@ -148,7 +149,9 @@ async function getAllMessageStatuses( const logIndexesForMessage = getUniqueLogIndex(tokensBridged); return ( await Promise.all( - tokensBridged.map((e, i) => getMessageOutboxStatusAndProof(logger, e, mainnetSigner, logIndexesForMessage[i])) + tokensBridged.map((e, i) => + getMessageOutboxStatusAndProof(logger, e, mainnetSigner, logIndexesForMessage[i], chainId) + ) ) ) .map((result, i) => { @@ -164,12 +167,13 @@ async function getMessageOutboxStatusAndProof( logger: winston.Logger, event: TokensBridged, l1Signer: Signer, - logIndex: number + logIndex: number, + chainId: number ): Promise<{ message: L2ToL1MessageWriter; status: string; }> { - const l2Provider = getCachedProvider(CHAIN_ID, true); + const l2Provider = getCachedProvider(chainId, true); const receipt = await l2Provider.getTransactionReceipt(event.transactionHash); const l2Receipt = new L2TransactionReceipt(receipt); diff --git a/src/finalizer/utils/index.ts b/src/finalizer/utils/index.ts index b3d8d45c3..4e93f4533 100644 --- a/src/finalizer/utils/index.ts +++ b/src/finalizer/utils/index.ts @@ -1,5 +1,5 @@ export * from "./polygon"; -export * from "./arbitrum"; +export * from "./arbStack"; export * from "./opStack"; export * from "./zkSync"; export * from "./scroll"; diff --git a/src/interfaces/index.ts b/src/interfaces/index.ts index 5cbdeac7d..8993ad9c8 100644 --- a/src/interfaces/index.ts +++ b/src/interfaces/index.ts @@ -53,7 +53,6 @@ export type PendingRootBundle = interfaces.PendingRootBundle; // SpokePool interfaces export type RelayData = interfaces.RelayData; -export type FundsDepositedEvent = interfaces.FundsDepositedEvent; export type Deposit = interfaces.Deposit; export type DepositWithBlock = interfaces.DepositWithBlock; export type Fill = interfaces.Fill; diff --git a/test/AdapterManager.SendTokensCrossChain.ts b/test/AdapterManager.SendTokensCrossChain.ts index f6bd5e77f..a75541918 100644 --- a/test/AdapterManager.SendTokensCrossChain.ts +++ b/test/AdapterManager.SendTokensCrossChain.ts @@ -427,8 +427,8 @@ async function constructChainSpecificFakes() { // Arbitrum contracts l1ArbitrumBridge = await makeFake( - "arbitrumErc20GatewayRouter", - CONTRACT_ADDRESSES[1].arbitrumErc20GatewayRouter.address + "orbitErc20GatewayRouter_42161", + CONTRACT_ADDRESSES[1].orbitErc20GatewayRouter_42161.address ); // zkSync contracts diff --git a/test/Relayer.BasicFill.ts b/test/Relayer.BasicFill.ts index 19cb1b314..f9d322cc4 100644 --- a/test/Relayer.BasicFill.ts +++ b/test/Relayer.BasicFill.ts @@ -1,4 +1,5 @@ import { clients, constants, utils as sdkUtils } from "@across-protocol/sdk"; +import hre from "hardhat"; import { AcrossApiClient, ConfigStoreClient, MultiCallerClient, TokenClient } from "../src/clients"; import { FillStatus, Deposit, RelayData } from "../src/interfaces"; import { CONFIG_STORE_VERSION } from "../src/common"; @@ -510,9 +511,9 @@ describe("Relayer: Check for Unfilled Deposits and Fill", async function () { }); it("Correctly defers destination chain fills", async function () { - let { average: avgBlockTime } = await averageBlockTime(spokePool_2.provider); - avgBlockTime = Math.ceil(avgBlockTime); - const minFillTime = 4 * avgBlockTime; // Fill after deposit has aged 4 blocks. + const { average: avgBlockTime } = await averageBlockTime(spokePool_2.provider); + const minDepositAgeBlocks = 4; // Fill after deposit has aged this # of blocks. + const minFillTime = Math.ceil(minDepositAgeBlocks * avgBlockTime); relayerInstance = new Relayer( relayer.address, @@ -537,15 +538,7 @@ describe("Relayer: Check for Unfilled Deposits and Fill", async function () { } as unknown as RelayerConfig ); - const deposit = await depositV3( - spokePool_1, - destinationChainId, - depositor, - inputToken, - inputAmount, - outputToken, - outputAmount - ); + await depositV3(spokePool_1, destinationChainId, depositor, inputToken, inputAmount, outputToken, outputAmount); await updateAllClients(); let txnReceipts = await relayerInstance.checkForUnfilledDepositsAndFill(); for (const receipts of Object.values(txnReceipts)) { @@ -553,19 +546,10 @@ describe("Relayer: Check for Unfilled Deposits and Fill", async function () { } expect(lastSpyLogIncludes(spy, "due to insufficient fill time for")).to.be.true; - // SpokePool time is overridden and does not increment; it must be cranked manually. - const startTime = Number(await spokePool_2.getCurrentTime()); - let nextTime: number; - do { - await fillV3Relay( - spokePool_2, - { ...deposit, depositId: deposit.depositId + 1, outputAmount: bnZero, recipient: randomAddress() }, - relayer - ); - nextTime = Number(await spokePool_2.getCurrentTime()) + avgBlockTime; - await spokePool_2.setCurrentTime(nextTime); - } while (startTime + minFillTime > nextTime); - + // Mine enough blocks such that the deposit has aged sufficiently. + for (let i = 0; i < minFillTime * minDepositAgeBlocks * 10; i++) { + await hre.network.provider.send("evm_mine"); + } await updateAllClients(); txnReceipts = await relayerInstance.checkForUnfilledDepositsAndFill(); const receipts = await txnReceipts[destinationChainId]; diff --git a/test/generic-adapters/AdapterManager.SendTokensCrossChain.ts b/test/generic-adapters/AdapterManager.SendTokensCrossChain.ts index 21890b602..9bf7e797c 100644 --- a/test/generic-adapters/AdapterManager.SendTokensCrossChain.ts +++ b/test/generic-adapters/AdapterManager.SendTokensCrossChain.ts @@ -428,8 +428,8 @@ async function constructChainSpecificFakes() { // Arbitrum contracts l1ArbitrumBridge = await makeFake( - "arbitrumErc20GatewayRouter", - CONTRACT_ADDRESSES[1].arbitrumErc20GatewayRouter.address + "orbitErc20GatewayRouter_42161", + CONTRACT_ADDRESSES[1].orbitErc20GatewayRouter_42161.address ); // zkSync contracts diff --git a/yarn.lock b/yarn.lock index 7f13b4ab5..959ce422c 100644 --- a/yarn.lock +++ b/yarn.lock @@ -11,10 +11,10 @@ "@uma/common" "^2.17.0" hardhat "^2.9.3" -"@across-protocol/constants@^3.1.16": - version "3.1.16" - resolved "https://registry.yarnpkg.com/@across-protocol/constants/-/constants-3.1.16.tgz#c126085d29d4d051fd02a04c833d804d37c3c219" - integrity sha512-+U+AecGWnfY4b4sSfKBvsDj/+yXKEqpTXcZgI8GVVmUTkUhs1efA0kN4q3q10yy5TXI5TtagaG7R9yZg1zgKKg== +"@across-protocol/constants@^3.1.19": + version "3.1.19" + resolved "https://registry.yarnpkg.com/@across-protocol/constants/-/constants-3.1.19.tgz#3c29b52ec5f2eece93a6abd50d580668b03dd7b3" + integrity sha512-XOFF+o64TDn57xNfUB38kWy8lYyE9lB7PBdyoMOadsXx00HC3KMznFi/paLRKT1iZ50vDwHp00tNZbr7Z7umzA== "@across-protocol/contracts@^0.1.4": version "0.1.4" @@ -25,12 +25,13 @@ "@openzeppelin/contracts" "4.1.0" "@uma/core" "^2.18.0" -"@across-protocol/contracts@^3.0.11": - version "3.0.11" - resolved "https://registry.yarnpkg.com/@across-protocol/contracts/-/contracts-3.0.11.tgz#d010e2a1a44a7ac8184848a54bb9c7b2d41875b0" - integrity sha512-T2C8jOetkcqFDbp8fqI894Dd9qm7D9X7h1kqsI7rYu9paXdaqAUVSR/XcMTq2aHhNAVgb0OlKY/do982ujd0xw== +"@across-protocol/contracts@^3.0.16": + version "3.0.16" + resolved "https://registry.yarnpkg.com/@across-protocol/contracts/-/contracts-3.0.16.tgz#22eb0c1dcdb01e8ca504dc2351d46513d9f71cc6" + integrity sha512-vwg+PmWaenlrx7kTHZdjDTTj1PwXWFU3rMlFyfKM8xBXbPWhIfMQCKCYOwFrGmZw2nRTYgoyhoKN/f6rUs/snw== dependencies: - "@across-protocol/constants" "^3.1.16" + "@across-protocol/constants" "^3.1.19" + "@coral-xyz/anchor" "^0.30.1" "@defi-wonderland/smock" "^2.3.4" "@eth-optimism/contracts" "^0.5.40" "@ethersproject/abstract-provider" "5.7.0" @@ -39,20 +40,27 @@ "@openzeppelin/contracts" "4.9.6" "@openzeppelin/contracts-upgradeable" "4.9.6" "@scroll-tech/contracts" "^0.1.0" + "@solana-developers/helpers" "^2.4.0" + "@solana/spl-token" "^0.4.6" + "@solana/web3.js" "^1.31.0" + "@types/yargs" "^17.0.33" "@uma/common" "^2.34.0" "@uma/contracts-node" "^0.4.17" "@uma/core" "^2.56.0" axios "^1.7.4" + bs58 "^6.0.0" + prettier-plugin-rust "^0.1.9" + yargs "^17.7.2" zksync-web3 "^0.14.3" -"@across-protocol/sdk@^3.2.11": - version "3.2.11" - resolved "https://registry.yarnpkg.com/@across-protocol/sdk/-/sdk-3.2.11.tgz#8d5190863240e9a5062a38a03406f8e3b0390d84" - integrity sha512-eNUKYlV1ClzVHFHwDDRKaK2u/eFCTrZrCTglATtuQVJCUlpLlIR369qx/kquVbH6E5rRjSZm2Lo9hiuUJ3oH2Q== +"@across-protocol/sdk@^3.2.13": + version "3.2.13" + resolved "https://registry.yarnpkg.com/@across-protocol/sdk/-/sdk-3.2.13.tgz#8f7fd14dabdd7da0be2a548f9a3b0b49c7f19eba" + integrity sha512-lyzP7bgaABygeIwWISQtBWmzlxyZVvlmp0Au518W8TZ1vkagt7sZa24SV4do8TP9z4JhfsRJVnKGqQJIAWd5hQ== dependencies: "@across-protocol/across-token" "^1.0.0" - "@across-protocol/constants" "^3.1.16" - "@across-protocol/contracts" "^3.0.11" + "@across-protocol/constants" "^3.1.19" + "@across-protocol/contracts" "^3.0.16" "@eth-optimism/sdk" "^3.3.1" "@ethersproject/bignumber" "^5.7.0" "@pinata/sdk" "^2.1.0" @@ -257,6 +265,13 @@ dependencies: regenerator-runtime "^0.13.11" +"@babel/runtime@^7.25.0": + version "7.26.0" + resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.26.0.tgz#8600c2f595f277c60815256418b85356a65173c1" + integrity sha512-FDSOghenHTiToteC/QRlv2q3DhPZ/oOXTBoirfWNx1Cx3TMVcGWQtMMmQcSvb/JjpNeGzx8Pq/b4fKEJuWm1sw== + dependencies: + regenerator-runtime "^0.14.0" + "@babel/template@^7.22.15": version "7.22.15" resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.22.15.tgz#09576efc3830f0430f4548ef971dde1350ef2f38" @@ -357,6 +372,40 @@ typeorm-naming-strategies "^4.1.0" winston "^3.9.0" +"@coral-xyz/anchor-errors@^0.30.1": + version "0.30.1" + resolved "https://registry.yarnpkg.com/@coral-xyz/anchor-errors/-/anchor-errors-0.30.1.tgz#bdfd3a353131345244546876eb4afc0e125bec30" + integrity sha512-9Mkradf5yS5xiLWrl9WrpjqOrAV+/W2RQHDlbnAZBivoGpOs1ECjoDCkVk4aRG8ZdiFiB8zQEVlxf+8fKkmSfQ== + +"@coral-xyz/anchor@^0.30.1": + version "0.30.1" + resolved "https://registry.yarnpkg.com/@coral-xyz/anchor/-/anchor-0.30.1.tgz#17f3e9134c28cd0ea83574c6bab4e410bcecec5d" + integrity sha512-gDXFoF5oHgpriXAaLpxyWBHdCs8Awgf/gLHIo6crv7Aqm937CNdY+x+6hoj7QR5vaJV7MxWSQ0NGFzL3kPbWEQ== + dependencies: + "@coral-xyz/anchor-errors" "^0.30.1" + "@coral-xyz/borsh" "^0.30.1" + "@noble/hashes" "^1.3.1" + "@solana/web3.js" "^1.68.0" + bn.js "^5.1.2" + bs58 "^4.0.1" + buffer-layout "^1.2.2" + camelcase "^6.3.0" + cross-fetch "^3.1.5" + crypto-hash "^1.3.0" + eventemitter3 "^4.0.7" + pako "^2.0.3" + snake-case "^3.0.4" + superstruct "^0.15.4" + toml "^3.0.0" + +"@coral-xyz/borsh@^0.30.1": + version "0.30.1" + resolved "https://registry.yarnpkg.com/@coral-xyz/borsh/-/borsh-0.30.1.tgz#869d8833abe65685c72e9199b8688477a4f6b0e3" + integrity sha512-aaxswpPrCFKl8vZTbxLssA2RvwX2zmKLlRCIktJOwW+VpVwYtXRtlWiIP+c2pPRKneiTiWCN2GEMSH9j1zTlWQ== + dependencies: + bn.js "^5.1.2" + buffer-layout "^1.2.0" + "@cspotcode/source-map-support@^0.8.0": version "0.8.1" resolved "https://registry.yarnpkg.com/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz#00629c35a688e05a88b1cda684fb9d5e73f000a1" @@ -1503,7 +1552,7 @@ resolved "https://registry.yarnpkg.com/@multiformats/base-x/-/base-x-4.0.1.tgz#95ff0fa58711789d53aefb2590a8b7a4e715d121" integrity sha512-eMk0b9ReBbV23xXU693TAIrLyeO5iTgBZGSJfpqriG8UkYvr/hC9u9pyMlAakDNHWmbhMZCDs6KQO0jzKD8OTw== -"@noble/curves@1.6.0", "@noble/curves@^1.4.0", "@noble/curves@~1.6.0": +"@noble/curves@1.6.0", "@noble/curves@^1.4.0", "@noble/curves@^1.4.2", "@noble/curves@~1.6.0": version "1.6.0" resolved "https://registry.yarnpkg.com/@noble/curves/-/curves-1.6.0.tgz#be5296ebcd5a1730fccea4786d420f87abfeb40b" integrity sha512-TlaHRXDehJuRNR9TfZDNQ45mMEd5dwUwmicsafcIX4SsNiqnCHKjE/1alYPd/lDRVhxdhUAlv8uEhMCI5zjIJQ== @@ -1515,7 +1564,7 @@ resolved "https://registry.yarnpkg.com/@noble/hashes/-/hashes-1.0.0.tgz#d5e38bfbdaba174805a4e649f13be9a9ed3351ae" integrity sha512-DZVbtY62kc3kkBtMHqwCOfXrT/hnoORy5BJ4+HU1IR59X0KWAOqsfzQPcUl/lQLlG7qXbe/fZ3r/emxtAl+sqg== -"@noble/hashes@1.5.0", "@noble/hashes@^1.4.0", "@noble/hashes@~1.5.0": +"@noble/hashes@1.5.0", "@noble/hashes@^1.3.1", "@noble/hashes@^1.4.0", "@noble/hashes@~1.5.0": version "1.5.0" resolved "https://registry.yarnpkg.com/@noble/hashes/-/hashes-1.5.0.tgz#abadc5ca20332db2b1b2aa3e496e9af1213570b0" integrity sha512-1j6kQFb7QRru7eKN3ZDvRcP13rugwdxZqCjbiAVZfIJwgj2A65UmT4TgARXGlXgnRkORLTDTrO19ZErt7+QXgA== @@ -2542,6 +2591,143 @@ resolved "https://registry.yarnpkg.com/@sinonjs/text-encoding/-/text-encoding-0.7.1.tgz#8da5c6530915653f3a1f38fd5f101d8c3f8079c5" integrity sha512-+iTbntw2IZPb/anVDbypzfQa+ay64MW0Zo8aJ8gZPWMMK6/OubMVb6lUPMagqjOPnmtauXnFCACVl3O7ogjeqQ== +"@solana-developers/helpers@^2.4.0": + version "2.5.6" + resolved "https://registry.yarnpkg.com/@solana-developers/helpers/-/helpers-2.5.6.tgz#2af7613ea6848ce087c0dec7cf38e6f172abcbd4" + integrity sha512-NPWZblVMl4LuVVSJOZG0ZF0VYnrMUjCyMNTiGwNUXPK2WWYJCqpuDyzs/PMqwvM4gMTjk4pEToBX8N2UxDvZkQ== + dependencies: + "@solana/spl-token" "^0.4.8" + "@solana/spl-token-metadata" "^0.1.4" + "@solana/web3.js" "^1.95.2" + bs58 "^6.0.0" + dotenv "^16.4.5" + +"@solana/buffer-layout-utils@^0.2.0": + version "0.2.0" + resolved "https://registry.yarnpkg.com/@solana/buffer-layout-utils/-/buffer-layout-utils-0.2.0.tgz#b45a6cab3293a2eb7597cceb474f229889d875ca" + integrity sha512-szG4sxgJGktbuZYDg2FfNmkMi0DYQoVjN2h7ta1W1hPrwzarcFLBq9UpX1UjNXsNpT9dn+chgprtWGioUAr4/g== + dependencies: + "@solana/buffer-layout" "^4.0.0" + "@solana/web3.js" "^1.32.0" + bigint-buffer "^1.1.5" + bignumber.js "^9.0.1" + +"@solana/buffer-layout@^4.0.0", "@solana/buffer-layout@^4.0.1": + version "4.0.1" + resolved "https://registry.yarnpkg.com/@solana/buffer-layout/-/buffer-layout-4.0.1.tgz#b996235eaec15b1e0b5092a8ed6028df77fa6c15" + integrity sha512-E1ImOIAD1tBZFRdjeM4/pzTiTApC0AOBGwyAMS4fwIodCWArzJ3DWdoh8cKxeFM2fElkxBh2Aqts1BPC373rHA== + dependencies: + buffer "~6.0.3" + +"@solana/codecs-core@2.0.0-rc.1": + version "2.0.0-rc.1" + resolved "https://registry.yarnpkg.com/@solana/codecs-core/-/codecs-core-2.0.0-rc.1.tgz#1a2d76b9c7b9e7b7aeb3bd78be81c2ba21e3ce22" + integrity sha512-bauxqMfSs8EHD0JKESaNmNuNvkvHSuN3bbWAF5RjOfDu2PugxHrvRebmYauvSumZ3cTfQ4HJJX6PG5rN852qyQ== + dependencies: + "@solana/errors" "2.0.0-rc.1" + +"@solana/codecs-data-structures@2.0.0-rc.1": + version "2.0.0-rc.1" + resolved "https://registry.yarnpkg.com/@solana/codecs-data-structures/-/codecs-data-structures-2.0.0-rc.1.tgz#d47b2363d99fb3d643f5677c97d64a812982b888" + integrity sha512-rinCv0RrAVJ9rE/rmaibWJQxMwC5lSaORSZuwjopSUE6T0nb/MVg6Z1siNCXhh/HFTOg0l8bNvZHgBcN/yvXog== + dependencies: + "@solana/codecs-core" "2.0.0-rc.1" + "@solana/codecs-numbers" "2.0.0-rc.1" + "@solana/errors" "2.0.0-rc.1" + +"@solana/codecs-numbers@2.0.0-rc.1": + version "2.0.0-rc.1" + resolved "https://registry.yarnpkg.com/@solana/codecs-numbers/-/codecs-numbers-2.0.0-rc.1.tgz#f34978ddf7ea4016af3aaed5f7577c1d9869a614" + integrity sha512-J5i5mOkvukXn8E3Z7sGIPxsThRCgSdgTWJDQeZvucQ9PT6Y3HiVXJ0pcWiOWAoQ3RX8e/f4I3IC+wE6pZiJzDQ== + dependencies: + "@solana/codecs-core" "2.0.0-rc.1" + "@solana/errors" "2.0.0-rc.1" + +"@solana/codecs-strings@2.0.0-rc.1": + version "2.0.0-rc.1" + resolved "https://registry.yarnpkg.com/@solana/codecs-strings/-/codecs-strings-2.0.0-rc.1.tgz#e1d9167075b8c5b0b60849f8add69c0f24307018" + integrity sha512-9/wPhw8TbGRTt6mHC4Zz1RqOnuPTqq1Nb4EyuvpZ39GW6O2t2Q7Q0XxiB3+BdoEjwA2XgPw6e2iRfvYgqty44g== + dependencies: + "@solana/codecs-core" "2.0.0-rc.1" + "@solana/codecs-numbers" "2.0.0-rc.1" + "@solana/errors" "2.0.0-rc.1" + +"@solana/codecs@2.0.0-rc.1": + version "2.0.0-rc.1" + resolved "https://registry.yarnpkg.com/@solana/codecs/-/codecs-2.0.0-rc.1.tgz#146dc5db58bd3c28e04b4c805e6096c2d2a0a875" + integrity sha512-qxoR7VybNJixV51L0G1RD2boZTcxmwUWnKCaJJExQ5qNKwbpSyDdWfFJfM5JhGyKe9DnPVOZB+JHWXnpbZBqrQ== + dependencies: + "@solana/codecs-core" "2.0.0-rc.1" + "@solana/codecs-data-structures" "2.0.0-rc.1" + "@solana/codecs-numbers" "2.0.0-rc.1" + "@solana/codecs-strings" "2.0.0-rc.1" + "@solana/options" "2.0.0-rc.1" + +"@solana/errors@2.0.0-rc.1": + version "2.0.0-rc.1" + resolved "https://registry.yarnpkg.com/@solana/errors/-/errors-2.0.0-rc.1.tgz#3882120886eab98a37a595b85f81558861b29d62" + integrity sha512-ejNvQ2oJ7+bcFAYWj225lyRkHnixuAeb7RQCixm+5mH4n1IA4Qya/9Bmfy5RAAHQzxK43clu3kZmL5eF9VGtYQ== + dependencies: + chalk "^5.3.0" + commander "^12.1.0" + +"@solana/options@2.0.0-rc.1": + version "2.0.0-rc.1" + resolved "https://registry.yarnpkg.com/@solana/options/-/options-2.0.0-rc.1.tgz#06924ba316dc85791fc46726a51403144a85fc4d" + integrity sha512-mLUcR9mZ3qfHlmMnREdIFPf9dpMc/Bl66tLSOOWxw4ml5xMT2ohFn7WGqoKcu/UHkT9CrC6+amEdqCNvUqI7AA== + dependencies: + "@solana/codecs-core" "2.0.0-rc.1" + "@solana/codecs-data-structures" "2.0.0-rc.1" + "@solana/codecs-numbers" "2.0.0-rc.1" + "@solana/codecs-strings" "2.0.0-rc.1" + "@solana/errors" "2.0.0-rc.1" + +"@solana/spl-token-group@^0.0.7": + version "0.0.7" + resolved "https://registry.yarnpkg.com/@solana/spl-token-group/-/spl-token-group-0.0.7.tgz#83c00f0cd0bda33115468cd28b89d94f8ec1fee4" + integrity sha512-V1N/iX7Cr7H0uazWUT2uk27TMqlqedpXHRqqAbVO2gvmJyT0E0ummMEAVQeXZ05ZhQ/xF39DLSdBp90XebWEug== + dependencies: + "@solana/codecs" "2.0.0-rc.1" + +"@solana/spl-token-metadata@^0.1.4", "@solana/spl-token-metadata@^0.1.6": + version "0.1.6" + resolved "https://registry.yarnpkg.com/@solana/spl-token-metadata/-/spl-token-metadata-0.1.6.tgz#d240947aed6e7318d637238022a7b0981b32ae80" + integrity sha512-7sMt1rsm/zQOQcUWllQX9mD2O6KhSAtY1hFR2hfFwgqfFWzSY9E9GDvFVNYUI1F0iQKcm6HmePU9QbKRXTEBiA== + dependencies: + "@solana/codecs" "2.0.0-rc.1" + +"@solana/spl-token@^0.4.6", "@solana/spl-token@^0.4.8": + version "0.4.9" + resolved "https://registry.yarnpkg.com/@solana/spl-token/-/spl-token-0.4.9.tgz#24d032d2935f237925c3b058ba6bb1e1ece5428c" + integrity sha512-g3wbj4F4gq82YQlwqhPB0gHFXfgsC6UmyGMxtSLf/BozT/oKd59465DbnlUK8L8EcimKMavxsVAMoLcEdeCicg== + dependencies: + "@solana/buffer-layout" "^4.0.0" + "@solana/buffer-layout-utils" "^0.2.0" + "@solana/spl-token-group" "^0.0.7" + "@solana/spl-token-metadata" "^0.1.6" + buffer "^6.0.3" + +"@solana/web3.js@^1.31.0", "@solana/web3.js@^1.32.0", "@solana/web3.js@^1.68.0", "@solana/web3.js@^1.95.2": + version "1.95.4" + resolved "https://registry.yarnpkg.com/@solana/web3.js/-/web3.js-1.95.4.tgz#771603f60d75cf7556ad867e1fd2efae32f9ad09" + integrity sha512-sdewnNEA42ZSMxqkzdwEWi6fDgzwtJHaQa5ndUGEJYtoOnM6X5cvPmjoTUp7/k7bRrVAxfBgDnvQQHD6yhlLYw== + dependencies: + "@babel/runtime" "^7.25.0" + "@noble/curves" "^1.4.2" + "@noble/hashes" "^1.4.0" + "@solana/buffer-layout" "^4.0.1" + agentkeepalive "^4.5.0" + bigint-buffer "^1.1.5" + bn.js "^5.2.1" + borsh "^0.7.0" + bs58 "^4.0.1" + buffer "6.0.3" + fast-stable-stringify "^1.0.0" + jayson "^4.1.1" + node-fetch "^2.7.0" + rpc-websockets "^9.0.2" + superstruct "^2.0.2" + "@solidity-parser/parser@^0.14.0", "@solidity-parser/parser@^0.14.1": version "0.14.1" resolved "https://registry.yarnpkg.com/@solidity-parser/parser/-/parser-0.14.1.tgz#179afb29f4e295a77cc141151f26b3848abc3c46" @@ -2561,6 +2747,13 @@ resolved "https://registry.yarnpkg.com/@sqltools/formatter/-/formatter-1.2.5.tgz#3abc203c79b8c3e90fd6c156a0c62d5403520e12" integrity sha512-Uy0+khmZqUrUGm5dmMqVlnvufZRSK0FbYzVgp0UMstm+F5+W2/jnEEQyc9vo1ZR/E5ZI/B1WjjoTqBqwJL6Krw== +"@swc/helpers@^0.5.11": + version "0.5.13" + resolved "https://registry.yarnpkg.com/@swc/helpers/-/helpers-0.5.13.tgz#33e63ff3cd0cade557672bd7888a39ce7d115a8c" + integrity sha512-UoKGxQ3r5kYI9dALKJapMmuK+1zWM/H17Z1+iwnNmzcJRnfFuevZs375TA5rW31pu4BS4NoSy1fRsexDXfWn5w== + dependencies: + tslib "^2.4.0" + "@szmarczak/http-timer@^1.1.2": version "1.1.2" resolved "https://registry.yarnpkg.com/@szmarczak/http-timer/-/http-timer-1.1.2.tgz#b1665e2c461a2cd92f4c1bbf50d5454de0d4b421" @@ -2904,6 +3097,13 @@ dependencies: "@types/node" "*" +"@types/connect@^3.4.33": + version "3.4.38" + resolved "https://registry.yarnpkg.com/@types/connect/-/connect-3.4.38.tgz#5ba7f3bc4fbbdeaff8dded952e5ff2cc53f8d858" + integrity sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug== + dependencies: + "@types/node" "*" + "@types/ethereum-protocol@^1.0.0": version "1.0.2" resolved "https://registry.yarnpkg.com/@types/ethereum-protocol/-/ethereum-protocol-1.0.2.tgz#e765d4c6f4b5ebe906932bd20333e307c56a9bc7" @@ -3060,6 +3260,11 @@ resolved "https://registry.yarnpkg.com/@types/node/-/node-10.17.60.tgz#35f3d6213daed95da7f0f73e75bcc6980e90597b" integrity sha512-F0KIgDJfy2nA3zMLmWGKxcH2ZVEtCZXHHdOQs2gSaQ27+lNeEfGxzkIw90aXswATX7AZ33tahPbzy6KAfUreVw== +"@types/node@^12.12.54": + version "12.20.55" + resolved "https://registry.yarnpkg.com/@types/node/-/node-12.20.55.tgz#c329cbd434c42164f846b909bd6f85b5537f6240" + integrity sha512-J8xLz7q2OFulZ2cyGTLE1TbbZcjpno7FaN6zdJNrgAdrJ+DZzh/uFR6YrTb4C+nXakvud8Q4+rbhoIWlYQbUFQ== + "@types/node@^12.12.6": version "12.20.48" resolved "https://registry.yarnpkg.com/@types/node/-/node-12.20.48.tgz#55f70bd432b6515828c0298689776861b90ca4fa" @@ -3134,11 +3339,30 @@ resolved "https://registry.yarnpkg.com/@types/sinonjs__fake-timers/-/sinonjs__fake-timers-8.1.2.tgz#bf2e02a3dbd4aecaf95942ecd99b7402e03fad5e" integrity sha512-9GcLXF0/v3t80caGs5p2rRfkB+a8VBGLJZVih6CNFkx8IZ994wiKKLSRs9nuFwk1HevWs/1mnUmkApGrSGsShA== +"@types/uuid@^8.3.4": + version "8.3.4" + resolved "https://registry.yarnpkg.com/@types/uuid/-/uuid-8.3.4.tgz#bd86a43617df0594787d38b735f55c805becf1bc" + integrity sha512-c/I8ZRb51j+pYGAu5CrFMRxqZ2ke4y2grEBO5AUjgSkSk+qT2Ea+OdWElz/OiMf5MNpn2b17kuVBwZLQJXzihw== + "@types/validator@^13.11.8": version "13.11.8" resolved "https://registry.yarnpkg.com/@types/validator/-/validator-13.11.8.tgz#bb1162ec0fe6f87c95ca812f15b996fcc5e1e2dc" integrity sha512-c/hzNDBh7eRF+KbCf+OoZxKbnkpaK/cKp9iLQWqB7muXtM+MtL9SUUH8vCFcLn6dH1Qm05jiexK0ofWY7TfOhQ== +"@types/ws@^7.4.4": + version "7.4.7" + resolved "https://registry.yarnpkg.com/@types/ws/-/ws-7.4.7.tgz#f7c390a36f7a0679aa69de2d501319f4f8d9b702" + integrity sha512-JQbbmxZTZehdc2iszGKs5oC3NFnjeay7mtAWrdt7qNtAVK0g19muApzAy4bm9byz79xa2ZnO/BOBC2R8RC5Lww== + dependencies: + "@types/node" "*" + +"@types/ws@^8.2.2": + version "8.5.13" + resolved "https://registry.yarnpkg.com/@types/ws/-/ws-8.5.13.tgz#6414c280875e2691d0d1e080b05addbf5cb91e20" + integrity sha512-osM/gWBTPKgHV8XkTunnegTRIsvF6owmf5w+JtAfOw472dptdm0dlGv4xCt6GwQRcC2XVOvvRE/0bAoQcL2QkA== + dependencies: + "@types/node" "*" + "@types/ws@^8.5.5": version "8.5.7" resolved "https://registry.yarnpkg.com/@types/ws/-/ws-8.5.7.tgz#1ca585074fe5d2c81dec7a3d451f244a2a6d83cb" @@ -3146,6 +3370,18 @@ dependencies: "@types/node" "*" +"@types/yargs-parser@*": + version "21.0.3" + resolved "https://registry.yarnpkg.com/@types/yargs-parser/-/yargs-parser-21.0.3.tgz#815e30b786d2e8f0dcd85fd5bcf5e1a04d008f15" + integrity sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ== + +"@types/yargs@^17.0.33": + version "17.0.33" + resolved "https://registry.yarnpkg.com/@types/yargs/-/yargs-17.0.33.tgz#8c32303da83eec050a84b3c7ae7b9f922d13e32d" + integrity sha512-WpxBCKWPLr4xSsHgz511rFJAM+wS28w2zEO1QDNY5zM/S8ok70NNfztH0xwhqKyaK0OHCbN98LDAZuy1ctxDkA== + dependencies: + "@types/yargs-parser" "*" + "@typescript-eslint/eslint-plugin@^4.29.1": version "4.33.0" resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-4.33.0.tgz#c24dc7c8069c7706bc40d99f6fa87edcb2005276" @@ -3436,6 +3672,14 @@ resolved "https://registry.yarnpkg.com/@zxing/text-encoding/-/text-encoding-0.9.0.tgz#fb50ffabc6c7c66a0c96b4c03e3d9be74864b70b" integrity sha512-U/4aVJ2mxI0aDNI8Uq0wEhMgY+u4CNtEb0om3+y3+niDAsoTCOB33UF0sxpzqzdqXLqmvc+vZyAt4O8pPdfkwA== +JSONStream@^1.3.5: + version "1.3.5" + resolved "https://registry.yarnpkg.com/JSONStream/-/JSONStream-1.3.5.tgz#3208c1f08d3a4d99261ab64f92302bc15e111ca0" + integrity sha512-E+iruNOY8VV9s4JEbe1aNEm6MiszPRr/UfcHMz0TQh1BXSxHK+ASV1R6W4HpjBhSeS+54PIsAMCBmwD06LLsqQ== + dependencies: + jsonparse "^1.2.0" + through ">=2.2.7 <3" + abbrev@1, abbrev@~1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.1.tgz#f8f2c887ad10bf67f634f005b6987fed3179aac8" @@ -3607,6 +3851,13 @@ agentkeepalive@^4.1.3: depd "^1.1.2" humanize-ms "^1.2.1" +agentkeepalive@^4.5.0: + version "4.5.0" + resolved "https://registry.yarnpkg.com/agentkeepalive/-/agentkeepalive-4.5.0.tgz#2673ad1389b3c418c5a20c5d7364f93ca04be923" + integrity sha512-5GG/5IbQQpC9FpkRGsSvZI5QYeSCzlJHdpBQntCsuTOxhKD8lqKhrleg2Yi7yvMIf82Ycmmqln9U8V9qwEiJew== + dependencies: + humanize-ms "^1.2.1" + aggregate-error@^3.0.0: version "3.1.0" resolved "https://registry.yarnpkg.com/aggregate-error/-/aggregate-error-3.1.0.tgz#92670ff50f5359bdb7a3e0d40d0ec30c5737687a" @@ -4092,6 +4343,11 @@ base-x@^3.0.2, base-x@^3.0.8: dependencies: safe-buffer "^5.0.1" +base-x@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/base-x/-/base-x-5.0.0.tgz#6d835ceae379130e1a4cb846a70ac4746f28ea9b" + integrity sha512-sMW3VGSX1QWVFA6l8U62MLKz29rRfpTlYdCqLdpLo1/Yd4zZwSbnUaDfciIAowAqvq7YFnWq9hrhdg1KYgc1lQ== + base64-js@^1.0.2, base64-js@^1.3.0, base64-js@^1.3.1, base64-js@^1.5.1: version "1.5.1" resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a" @@ -4137,6 +4393,13 @@ big.js@^6.0.3: resolved "https://registry.yarnpkg.com/big.js/-/big.js-6.1.1.tgz#63b35b19dc9775c94991ee5db7694880655d5537" integrity sha512-1vObw81a8ylZO5ePrtMay0n018TcftpTA5HFKDaSuiUDBo8biRBtjIobw60OpwuvrGk+FsxKamqN4cnmj/eXdg== +bigint-buffer@^1.1.5: + version "1.1.5" + resolved "https://registry.yarnpkg.com/bigint-buffer/-/bigint-buffer-1.1.5.tgz#d038f31c8e4534c1f8d0015209bf34b4fa6dd442" + integrity sha512-trfYco6AoZ+rKhKnxA0hgX0HAbVP/s808/EuDSe2JDzUnCp/xAsli35Orvk67UrTEcwuxZqYZDmfA2RXJgxVvA== + dependencies: + bindings "^1.3.0" + bigint-crypto-utils@^3.0.23: version "3.1.7" resolved "https://registry.yarnpkg.com/bigint-crypto-utils/-/bigint-crypto-utils-3.1.7.tgz#c4c1b537c7c1ab7aadfaecf3edfd45416bf2c651" @@ -4186,7 +4449,7 @@ binary-extensions@^2.0.0, binary-extensions@^2.2.0: resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d" integrity sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA== -bindings@^1.5.0: +bindings@^1.3.0, bindings@^1.5.0: version "1.5.0" resolved "https://registry.yarnpkg.com/bindings/-/bindings-1.5.0.tgz#10353c9e945334bc0511a6d90b38fbc7c9c504df" integrity sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ== @@ -4287,6 +4550,15 @@ borc@^2.1.2: json-text-sequence "~0.1.0" readable-stream "^3.6.0" +borsh@^0.7.0: + version "0.7.0" + resolved "https://registry.yarnpkg.com/borsh/-/borsh-0.7.0.tgz#6e9560d719d86d90dc589bca60ffc8a6c51fec2a" + integrity sha512-CLCsZGIBCFnPtkNnieW/a8wmreDmfUtjU2m9yHrzPXIlNbqVs0AQrSatSG6vdNYUqdc83tkQi2eHfF98ubzQLA== + dependencies: + bn.js "^5.2.0" + bs58 "^4.0.0" + text-encoding-utf-8 "^1.0.2" + brace-expansion@^1.1.7: version "1.1.11" resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" @@ -4411,6 +4683,13 @@ bs58@^4.0.0, bs58@^4.0.1: dependencies: base-x "^3.0.2" +bs58@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/bs58/-/bs58-6.0.0.tgz#a2cda0130558535dd281a2f8697df79caaf425d8" + integrity sha512-PD0wEnEYg6ijszw/u8s+iI3H17cTymlrwkKhDhPZq+Sokl3AU4htyBFTjAeNAlCCmg0f53g6ih3jATyCKftTfw== + dependencies: + base-x "^5.0.0" + bs58check@^2.1.2: version "2.1.2" resolved "https://registry.yarnpkg.com/bs58check/-/bs58check-2.1.2.tgz#53b018291228d82a5aa08e7d796fdafda54aebfc" @@ -4435,6 +4714,11 @@ buffer-from@^1.0.0: resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5" integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ== +buffer-layout@^1.2.0, buffer-layout@^1.2.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/buffer-layout/-/buffer-layout-1.2.2.tgz#b9814e7c7235783085f9ca4966a0cfff112259d5" + integrity sha512-kWSuLN694+KTk8SrYvCqwP2WcgQjoRCiF5b4QDvkkz8EmgD+aWAIceGFKMIAdmF/pH+vpgNV3d3kAKorcdAmWA== + buffer-reverse@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/buffer-reverse/-/buffer-reverse-1.0.1.tgz#49283c8efa6f901bc01fa3304d06027971ae2f60" @@ -4471,7 +4755,7 @@ buffer@4.9.2: ieee754 "^1.1.4" isarray "^1.0.0" -buffer@6.0.3, buffer@^6.0.1, buffer@^6.0.3: +buffer@6.0.3, buffer@^6.0.1, buffer@^6.0.3, buffer@~6.0.3: version "6.0.3" resolved "https://registry.yarnpkg.com/buffer/-/buffer-6.0.3.tgz#2ace578459cc8fbe2a70aaa8f52ee63b6a74c6c6" integrity sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA== @@ -4630,7 +4914,7 @@ camelcase@^5.0.0: resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== -camelcase@^6.0.0: +camelcase@^6.0.0, camelcase@^6.3.0: version "6.3.0" resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-6.3.0.tgz#5685b95eb209ac9c0c177467778c9c84df58ba9a" integrity sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA== @@ -4743,6 +5027,11 @@ chalk@^4.0.0, chalk@^4.1.0, chalk@^4.1.2: ansi-styles "^4.1.0" supports-color "^7.1.0" +chalk@^5.3.0: + version "5.3.0" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-5.3.0.tgz#67c20a7ebef70e7f3970a01f90fa210cb6860385" + integrity sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w== + change-case@3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/change-case/-/change-case-3.0.2.tgz#fd48746cce02f03f0a672577d1d3a8dc2eceb037" @@ -5174,7 +5463,12 @@ commander@^10.0.0: resolved "https://registry.yarnpkg.com/commander/-/commander-10.0.1.tgz#881ee46b4f77d1c1dccc5823433aa39b022cbe06" integrity sha512-y4Mg2tXshplEbSGzx7amzPwKKOCGuoSRP/CjEdwwk0FOGlUbq6lKuoyDZTNZkmxHdJtp54hdfY/JUrdL7Xfdug== -commander@^2.15.0, commander@^2.19.0: +commander@^12.1.0: + version "12.1.0" + resolved "https://registry.yarnpkg.com/commander/-/commander-12.1.0.tgz#01423b36f501259fdaac4d0e4d60c96c991585d3" + integrity sha512-Vw8qHK3bZM9y/P10u3Vib8o/DdkvA2OtPtZvD871QKjy74Wj1WSKFILMPRPSdUSx5RFK1arlJzEtA4PkFgnbuA== + +commander@^2.15.0, commander@^2.19.0, commander@^2.20.3: version "2.20.3" resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.3.tgz#fd485e84c03eb4881c20722ba48035e8531aeb33" integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ== @@ -5400,6 +5694,13 @@ cross-fetch@^3.0.6, cross-fetch@^3.1.4: dependencies: node-fetch "2.6.7" +cross-fetch@^3.1.5: + version "3.1.8" + resolved "https://registry.yarnpkg.com/cross-fetch/-/cross-fetch-3.1.8.tgz#0327eba65fd68a7d119f8fb2bf9334a1a7956f82" + integrity sha512-cvA+JwZoU0Xq+h6WkMvAUqPEYy92Obet6UdKLfW60qn99ftItKjB5T+BkyWOFWe2pUyfQ+IJHmpOTznqk1M6Kg== + dependencies: + node-fetch "^2.6.12" + cross-spawn@^7.0.0, cross-spawn@^7.0.2: version "7.0.3" resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" @@ -5444,6 +5745,11 @@ crypto-browserify@3.12.0: randombytes "^2.0.0" randomfill "^1.0.3" +crypto-hash@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/crypto-hash/-/crypto-hash-1.3.0.tgz#b402cb08f4529e9f4f09346c3e275942f845e247" + integrity sha512-lyAZ0EMyjDkVvz8WOeVnuCPvKVBXcMv1l5SVqO1yC7PzTwrD/pPje/BIRbWhMoPe436U+Y2nD7f5bFx0kt+Sbg== + crypto-js@^4.2.0: version "4.2.0" resolved "https://registry.yarnpkg.com/crypto-js/-/crypto-js-4.2.0.tgz#4d931639ecdfd12ff80e8186dba6af2c2e856631" @@ -5640,6 +5946,11 @@ define-properties@^1.1.2, define-properties@^1.1.3, define-properties@^1.1.4, de has-property-descriptors "^1.0.0" object-keys "^1.1.1" +delay@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/delay/-/delay-5.0.0.tgz#137045ef1b96e5071060dd5be60bf9334436bd1d" + integrity sha512-ReEBKkIfe4ya47wlPYf/gu5ib6yUG0/Aez0JQZQz94kiWtRQvZIQbTiehsnwHvLSWJnQdhVeqYue7Id1dKr0qw== + delayed-stream@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" @@ -5837,6 +6148,14 @@ dot-case@^2.1.0: dependencies: no-case "^2.2.0" +dot-case@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/dot-case/-/dot-case-3.0.4.tgz#9b2b670d00a431667a8a75ba29cd1b98809ce751" + integrity sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w== + dependencies: + no-case "^3.0.4" + tslib "^2.0.3" + dot-prop@^6.0.0: version "6.0.1" resolved "https://registry.yarnpkg.com/dot-prop/-/dot-prop-6.0.1.tgz#fc26b3cf142b9e59b74dbd39ed66ce620c681083" @@ -5854,6 +6173,11 @@ dotenv@^16.3.1: resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-16.3.1.tgz#369034de7d7e5b120972693352a3bf112172cc3e" integrity sha512-IPzF4w4/Rd94bA9imS68tZBaYyBWSCE47V1RGuMrB94iyTOIEwRmVL2x/4An+6mETpLrKJ5hQkB8W4kFAadeIQ== +dotenv@^16.4.5: + version "16.4.5" + resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-16.4.5.tgz#cdd3b3b604cb327e286b4762e13502f717cb099f" + integrity sha512-ZmdL2rui+eB2YwhsWzjInR8LldtZHGDoQ1ugH85ppHKwpUHL7j7rN0Ti9NCnGiQbhaZ11FpR+7ao1dNsmduNUg== + dotenv@^9.0.0: version "9.0.2" resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-9.0.2.tgz#dacc20160935a37dea6364aa1bef819fb9b6ab05" @@ -6163,11 +6487,18 @@ es6-iterator@^2.0.3: es5-ext "^0.10.35" es6-symbol "^3.1.1" -es6-promise@4.2.8, es6-promise@^4.2.8: +es6-promise@4.2.8, es6-promise@^4.0.3, es6-promise@^4.2.8: version "4.2.8" resolved "https://registry.yarnpkg.com/es6-promise/-/es6-promise-4.2.8.tgz#4eb21594c972bc40553d276e510539143db53e0a" integrity sha512-HJDGx5daxeIvxdBxvG2cb9g4tEvwIk3i8+nhX0yGrYmZUzbkdg8QbDevheDB8gd0//uPj4c1EQua8Q+MViT0/w== +es6-promisify@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/es6-promisify/-/es6-promisify-5.0.0.tgz#5109d62f3e56ea967c4b63505aef08291c8a5203" + integrity sha512-C+d6UdsYDk0lMebHNR4S2NybQMMngAOnOwYBQjTOiv0MkoJMP0Myw2mgpDLBcpfCmRLxyFqYhS/CfOENq4SJhQ== + dependencies: + es6-promise "^4.0.3" + es6-symbol@^3.1.1, es6-symbol@^3.1.3: version "3.1.3" resolved "https://registry.yarnpkg.com/es6-symbol/-/es6-symbol-3.1.3.tgz#bad5d3c1bcdac28269f4cb331e431c78ac705d18" @@ -6880,6 +7211,16 @@ eventemitter3@4.0.4: resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-4.0.4.tgz#b5463ace635a083d018bdc7c917b4c5f10a85384" integrity sha512-rlaVLnVxtxvoyLsQQFBx53YmXHDxRIzzTLbdfxqi4yocpSjAxXwkU0cScM5JgSKMqEhrZpnvQ2D9gjylR0AimQ== +eventemitter3@^4.0.7: + version "4.0.7" + resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-4.0.7.tgz#2de9b68f6528d5644ef5c59526a1b4a07306169f" + integrity sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw== + +eventemitter3@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-5.0.1.tgz#53f5ffd0a492ac800721bb42c66b841de96423c4" + integrity sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA== + eventid@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/eventid/-/eventid-2.0.1.tgz#574e860149457a79a2efe788c459f0c3062d02ec" @@ -6979,6 +7320,11 @@ extsprintf@^1.2.0: resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.4.1.tgz#8d172c064867f235c0c84a596806d279bf4bcc07" integrity sha512-Wrk35e8ydCKDj/ArClo1VrPVmN8zph5V4AtHwIuHhvMXsKf73UT3BOD+azBIW+3wOJ4FhEH7zyaJCFvChjYvMA== +eyes@^0.1.8: + version "0.1.8" + resolved "https://registry.yarnpkg.com/eyes/-/eyes-0.1.8.tgz#62cf120234c683785d902348a800ef3e0cc20bc0" + integrity sha512-GipyPsXO1anza0AOZdy69Im7hGFCNB7Y/NGjDlZGJ3GJJLtwNSb2vrzYrTYJRrRloVx7pl+bhUaTB8yiccPvFQ== + fake-merkle-patricia-tree@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/fake-merkle-patricia-tree/-/fake-merkle-patricia-tree-1.0.1.tgz#4b8c3acfb520afadf9860b1f14cd8ce3402cddd3" @@ -7039,6 +7385,11 @@ fast-safe-stringify@^2.0.6: resolved "https://registry.yarnpkg.com/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz#c406a83b6e70d9e35ce3b30a81141df30aeba884" integrity sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA== +fast-stable-stringify@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/fast-stable-stringify/-/fast-stable-stringify-1.0.0.tgz#5c5543462b22aeeefd36d05b34e51c78cb86d313" + integrity sha512-wpYMUmFu5f00Sm0cj2pfivpmawLZ0NKdviQ4w9zJeR8JVtOpOxHmLaJuj0vxvGqMJQWyP/COUkF75/57OKyRag== + fast-text-encoding@^1.0.0, fast-text-encoding@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/fast-text-encoding/-/fast-text-encoding-1.0.3.tgz#ec02ac8e01ab8a319af182dae2681213cfe9ce53" @@ -9074,6 +9425,11 @@ isomorphic-unfetch@^3.0.0: node-fetch "^2.6.1" unfetch "^4.2.0" +isomorphic-ws@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/isomorphic-ws/-/isomorphic-ws-4.0.1.tgz#55fd4cd6c5e6491e76dc125938dd863f5cd4f2dc" + integrity sha512-BhBvN2MBpWTaSHdWRb/bwdZJ1WaehQ2L1KngkCkfLUGF0mAWAT1sQUQacEmQ0jXkFw/czDXPNQSL5u2/Krsz1w== + isows@1.0.6: version "1.0.6" resolved "https://registry.yarnpkg.com/isows/-/isows-1.0.6.tgz#0da29d706fa51551c663c627ace42769850f86e7" @@ -9179,6 +9535,29 @@ jackspeak@^2.3.5: optionalDependencies: "@pkgjs/parseargs" "^0.11.0" +jayson@^4.1.1: + version "4.1.2" + resolved "https://registry.yarnpkg.com/jayson/-/jayson-4.1.2.tgz#443c26a8658703e0b2e881117b09395d88b6982e" + integrity sha512-5nzMWDHy6f+koZOuYsArh2AXs73NfWYVlFyJJuCedr93GpY+Ku8qq10ropSXVfHK+H0T6paA88ww+/dV+1fBNA== + dependencies: + "@types/connect" "^3.4.33" + "@types/node" "^12.12.54" + "@types/ws" "^7.4.4" + JSONStream "^1.3.5" + commander "^2.20.3" + delay "^5.0.0" + es6-promisify "^5.0.0" + eyes "^0.1.8" + isomorphic-ws "^4.0.1" + json-stringify-safe "^5.0.1" + uuid "^8.3.2" + ws "^7.5.10" + +jinx-rust@0.1.6: + version "0.1.6" + resolved "https://registry.yarnpkg.com/jinx-rust/-/jinx-rust-0.1.6.tgz#c7bce55d97bfbad76a9b930c01fe6a8629a170d7" + integrity sha512-qP+wtQL1PrDDFwtPKhNGtjWOmijCrKdfUHWTV2G/ikxfjrh+cjdvkQTmny9RAsVF0jiui9m+F0INWu4cuRcZeQ== + js-cookie@^2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/js-cookie/-/js-cookie-2.2.1.tgz#69e106dc5d5806894562902aa5baec3744e9b2b8" @@ -9339,7 +9718,7 @@ json-stringify-nice@^1.1.4: resolved "https://registry.yarnpkg.com/json-stringify-nice/-/json-stringify-nice-1.1.4.tgz#2c937962b80181d3f317dd39aa323e14f5a60a67" integrity sha512-5Z5RFW63yxReJ7vANgW6eZFGWaQvnPE3WNmZoOJrSkGju2etKA2L5rrOa1sm877TVTFt57A80BH1bArcmlLfPw== -json-stringify-safe@~5.0.1: +json-stringify-safe@^5.0.1, json-stringify-safe@~5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb" integrity sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus= @@ -9386,10 +9765,10 @@ jsonify@~0.0.0: resolved "https://registry.yarnpkg.com/jsonify/-/jsonify-0.0.0.tgz#2c74b6ee41d93ca51b7b5aaee8f503631d252a73" integrity sha1-LHS27kHZPKUbe1qu6PUDYx0lKnM= -jsonparse@^1.3.1: +jsonparse@^1.2.0, jsonparse@^1.3.1: version "1.3.1" resolved "https://registry.yarnpkg.com/jsonparse/-/jsonparse-1.3.1.tgz#3f4dae4a91fac315f71062f8521cc239f1366280" - integrity sha1-P02uSpH6wxX3EGL4UhzCOfE2YoA= + integrity sha512-POQXvpdL69+CluYsillJ7SUhKvytYjW9vG/GKpnf+xP8UWgYEM/RaMzHHofbALDiKbbP1W8UEYmgGl39WkPZsg== jsonschema@^1.2.4: version "1.4.0" @@ -10018,6 +10397,13 @@ lower-case@^1.1.0, lower-case@^1.1.1, lower-case@^1.1.2: resolved "https://registry.yarnpkg.com/lower-case/-/lower-case-1.1.4.tgz#9a2cabd1b9e8e0ae993a4bf7d5875c39c42e8eac" integrity sha1-miyr0bno4K6ZOkv31YdcOcQujqw= +lower-case@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/lower-case/-/lower-case-2.0.2.tgz#6fa237c63dbdc4a82ca0fd882e4722dc5e634e28" + integrity sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg== + dependencies: + tslib "^2.0.3" + lowercase-keys@^1.0.0, lowercase-keys@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-1.0.1.tgz#6f9e30b47084d971a7c820ff15a6c5167b74c26f" @@ -10949,6 +11335,14 @@ no-case@^2.2.0, no-case@^2.3.2: dependencies: lower-case "^1.1.1" +no-case@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/no-case/-/no-case-3.0.4.tgz#d361fd5c9800f558551a8369fc0dcd4662b6124d" + integrity sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg== + dependencies: + lower-case "^2.0.2" + tslib "^2.0.3" + node-abi@^3.3.0: version "3.54.0" resolved "https://registry.yarnpkg.com/node-abi/-/node-abi-3.54.0.tgz#f6386f7548817acac6434c6cba02999c9aebcc69" @@ -10990,7 +11384,7 @@ node-fetch@2.6.7: dependencies: whatwg-url "^5.0.0" -node-fetch@^2.6.0, node-fetch@^2.6.1, node-fetch@^2.6.7, node-fetch@^2.6.9: +node-fetch@^2.6.0, node-fetch@^2.6.1, node-fetch@^2.6.12, node-fetch@^2.6.7, node-fetch@^2.6.9, node-fetch@^2.7.0: version "2.7.0" resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.7.0.tgz#d0f0fa6e3e2dc1d27efcd8ad99d550bda94d187d" integrity sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A== @@ -11616,6 +12010,11 @@ pacote@^11.1.11, pacote@^11.2.6, pacote@^11.3.0, pacote@^11.3.1, pacote@^11.3.5: ssri "^8.0.1" tar "^6.1.0" +pako@^2.0.3: + version "2.1.0" + resolved "https://registry.yarnpkg.com/pako/-/pako-2.1.0.tgz#266cc37f98c7d883545d11335c00fbd4062c9a86" + integrity sha512-w+eufiZ1WuJYgPXbV/PO3NCMEc3xqylkKHzp8bxp1uW4qaSNQUkwmLLEc3kKsfz8lpV1F8Ht3U1Cm+9Srog2ug== + param-case@^2.1.0: version "2.1.1" resolved "https://registry.yarnpkg.com/param-case/-/param-case-2.1.1.tgz#df94fd8cf6531ecf75e6bef9a0858fbc72be2247" @@ -12011,6 +12410,14 @@ prettier-linter-helpers@^1.0.0: dependencies: fast-diff "^1.1.2" +prettier-plugin-rust@^0.1.9: + version "0.1.9" + resolved "https://registry.yarnpkg.com/prettier-plugin-rust/-/prettier-plugin-rust-0.1.9.tgz#1a93b035743fa02a006b4980a1035a260ea9e501" + integrity sha512-n1DTTJQaHMdnoG/+nKUvBm3EKsMVWsYES2UPCiOPiZdBrmuAO/pX++m7L3+Hz3uuhtddpH0HRKHB2F3jbtJBOQ== + dependencies: + jinx-rust "0.1.6" + prettier "^2.7.1" + prettier-plugin-solidity@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/prettier-plugin-solidity/-/prettier-plugin-solidity-1.1.3.tgz#9a35124f578404caf617634a8cab80862d726cba" @@ -12020,7 +12427,7 @@ prettier-plugin-solidity@^1.1.3: semver "^7.3.8" solidity-comments-extractor "^0.0.7" -prettier@^2.3.1, prettier@^2.8.3, prettier@^2.8.8: +prettier@^2.3.1, prettier@^2.7.1, prettier@^2.8.3, prettier@^2.8.8: version "2.8.8" resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.8.8.tgz#e8c5d7e98a4305ffe3de2e1fc4aca1a71c28b1da" integrity sha512-tdN8qQGvNjw4CHbY+XXk0JgCXn9QiF21a55rBe5LJAU+kDyC4WQn4+awm2Xfk2lQMk5fKup9XgzTZtGkjBdP9Q== @@ -12622,6 +13029,11 @@ regenerator-runtime@^0.13.11: resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.11.tgz#f6dca3e7ceec20590d07ada785636a90cdca17f9" integrity sha512-kY1AZVr2Ra+t+piVaJ4gxaFaReZVH40AKNo7UCX6W+dEwBo/2oZJzqfuN1qLq1oL45o56cPaTXELwrTh8Fpggg== +regenerator-runtime@^0.14.0: + version "0.14.1" + resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.14.1.tgz#356ade10263f685dda125100cd862c1db895327f" + integrity sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw== + regexp.prototype.flags@^1.4.3: version "1.5.0" resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.5.0.tgz#fe7ce25e7e4cca8db37b6634c8a2c7009199b9cb" @@ -12922,6 +13334,22 @@ rlp@^2.0.0, rlp@^2.2.3, rlp@^2.2.4, rlp@^2.2.7: dependencies: bn.js "^5.2.0" +rpc-websockets@^9.0.2: + version "9.0.4" + resolved "https://registry.yarnpkg.com/rpc-websockets/-/rpc-websockets-9.0.4.tgz#9d8ee82533b5d1e13d9ded729e3e38d0d8fa083f" + integrity sha512-yWZWN0M+bivtoNLnaDbtny4XchdAIF5Q4g/ZsC5UC61Ckbp0QczwO8fg44rV3uYmY4WHd+EZQbn90W1d8ojzqQ== + dependencies: + "@swc/helpers" "^0.5.11" + "@types/uuid" "^8.3.4" + "@types/ws" "^8.2.2" + buffer "^6.0.3" + eventemitter3 "^5.0.1" + uuid "^8.3.2" + ws "^8.5.0" + optionalDependencies: + bufferutil "^4.0.1" + utf-8-validate "^5.0.2" + run-parallel-limit@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/run-parallel-limit/-/run-parallel-limit-1.1.0.tgz#be80e936f5768623a38a963262d6bef8ff11e7ba" @@ -13371,6 +13799,14 @@ snake-case@^2.1.0: dependencies: no-case "^2.2.0" +snake-case@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/snake-case/-/snake-case-3.0.4.tgz#4f2bbd568e9935abdfd593f34c691dadb49c452c" + integrity sha512-LAOh4z89bGQvl9pFfNF8V146i7o7/CqFPbqzYgP+yYzDIDeS9HaNFtXABamRW+AQzEVODcvE79ljJ+8a9YSdMg== + dependencies: + dot-case "^3.0.4" + tslib "^2.0.3" + socks-proxy-agent@^6.0.0: version "6.2.0" resolved "https://registry.yarnpkg.com/socks-proxy-agent/-/socks-proxy-agent-6.2.0.tgz#f6b5229cc0cbd6f2f202d9695f09d871e951c85e" @@ -13668,7 +14104,7 @@ string-format@^2.0.0: resolved "https://registry.yarnpkg.com/string-format/-/string-format-2.0.0.tgz#f2df2e7097440d3b65de31b6d40d54c96eaffb9b" integrity sha512-bbEs3scLeYNXLecRRuk6uJxdXUSj6le/8rNPHChIJTn2V79aXVTR1EH2OH5zLKKoz0V02fOUKZZcw01pLUShZA== -"string-width-cjs@npm:string-width@^4.2.0", "string-width@^1.0.2 || 2 || 3 || 4", string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3: +"string-width-cjs@npm:string-width@^4.2.0": version "4.2.3" resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== @@ -13694,6 +14130,15 @@ string-width@^1.0.1: is-fullwidth-code-point "^2.0.0" strip-ansi "^4.0.0" +"string-width@^1.0.2 || 2 || 3 || 4", string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3: + version "4.2.3" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" + integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== + dependencies: + emoji-regex "^8.0.0" + is-fullwidth-code-point "^3.0.0" + strip-ansi "^6.0.1" + string-width@^3.0.0, string-width@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/string-width/-/string-width-3.1.0.tgz#22767be21b62af1081574306f69ac51b62203961" @@ -13763,7 +14208,7 @@ stringify-package@^1.0.1: resolved "https://registry.yarnpkg.com/stringify-package/-/stringify-package-1.0.1.tgz#e5aa3643e7f74d0f28628b72f3dad5cecfc3ba85" integrity sha512-sa4DUQsYciMP1xhKWGuFM04fB0LG/9DlluZoSVywUMRNvzid6XucHK0/90xGxRoHrAaROrcHK1aPKaijCtSrhg== -"strip-ansi-cjs@npm:strip-ansi@^6.0.1", strip-ansi@^6.0.0, strip-ansi@^6.0.1: +"strip-ansi-cjs@npm:strip-ansi@^6.0.1": version "6.0.1" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== @@ -13791,6 +14236,13 @@ strip-ansi@^5.0.0, strip-ansi@^5.1.0, strip-ansi@^5.2.0: dependencies: ansi-regex "^4.1.0" +strip-ansi@^6.0.0, strip-ansi@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" + integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== + dependencies: + ansi-regex "^5.0.1" + strip-ansi@^7.0.1: version "7.1.0" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-7.1.0.tgz#d5b6568ca689d8561370b0707685d22434faff45" @@ -13852,6 +14304,11 @@ superstruct@^1.0.3: resolved "https://registry.yarnpkg.com/superstruct/-/superstruct-1.0.3.tgz#de626a5b49c6641ff4d37da3c7598e7a87697046" integrity sha512-8iTn3oSS8nRGn+C2pgXSKPI3jmpm6FExNazNpjvqS6ZUJQCej3PUXEKM8NjHBOs54ExM+LPW/FBRhymrdcCiSg== +superstruct@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/superstruct/-/superstruct-2.0.2.tgz#3f6d32fbdc11c357deff127d591a39b996300c54" + integrity sha512-uV+TFRZdXsqXTL2pRvujROjdZQ4RAlBUS5BTh9IGm+jTqQntYThciG/qu57Gs69yjnVUSqdxF9YLmSnpupBW9A== + supports-color@6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-6.0.0.tgz#76cfe742cf1f41bb9b1c29ad03068c05b4c0e40a" @@ -14027,6 +14484,11 @@ testrpc@0.0.1: resolved "https://registry.yarnpkg.com/testrpc/-/testrpc-0.0.1.tgz#83e2195b1f5873aec7be1af8cbe6dcf39edb7aed" integrity sha512-afH1hO+SQ/VPlmaLUFj2636QMeDvPCeQMc/9RBMW0IfjNe9gFD9Ra3ShqYkB7py0do1ZcCna/9acHyzTJ+GcNA== +text-encoding-utf-8@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/text-encoding-utf-8/-/text-encoding-utf-8-1.0.2.tgz#585b62197b0ae437e3c7b5d0af27ac1021e10d13" + integrity sha512-8bw4MY9WjdsD2aMtO0OzOCY3pXGYNx2d2FfHRVUKkiCPDWjKuOlhLVASS+pD7VkLTVjW268LYJHwsnPFlBpbAg== + text-hex@1.0.x: version "1.0.0" resolved "https://registry.yarnpkg.com/text-hex/-/text-hex-1.0.0.tgz#69dc9c1b17446ee79a92bf5b884bb4b9127506f5" @@ -14068,6 +14530,11 @@ thenify-all@^1.0.0: dependencies: any-promise "^1.0.0" +"through@>=2.2.7 <3": + version "2.3.8" + resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" + integrity sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg== + timed-out@^4.0.0, timed-out@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/timed-out/-/timed-out-4.0.1.tgz#f32eacac5a175bea25d7fab565ab3ed8741ef56f" @@ -14130,6 +14597,11 @@ toidentifier@1.0.1: resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.1.tgz#3be34321a88a820ed1bd80dfaa33e479fbb8dd35" integrity sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA== +toml@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/toml/-/toml-3.0.0.tgz#342160f1af1904ec9d204d03a5d61222d762c5ee" + integrity sha512-y/mWCZinnvxjTKYhJ+pYxwD0mRLVvOtdS2Awbgxln6iEnt4rk0yBxeSBHkGJcPucRiG0e55mwWp+g/05rsrd6w== + tough-cookie@>=2.3.3: version "4.0.0" resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-4.0.0.tgz#d822234eeca882f991f0f908824ad2622ddbece4" @@ -14239,6 +14711,11 @@ tslib@^1.11.1, tslib@^1.8.1, tslib@^1.9.3: resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== +tslib@^2.0.3, tslib@^2.4.0: + version "2.8.1" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.8.1.tgz#612efe4ed235d567e8aba5f2a5fab70280ade83f" + integrity sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w== + tslib@^2.1.0, tslib@^2.2.0, tslib@^2.3.1, tslib@^2.5.0, tslib@^2.6.1, tslib@^2.6.2: version "2.6.2" resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.2.tgz#703ac29425e7b37cd6fd456e92404d46d1f3e4ae" @@ -15373,7 +15850,7 @@ workerpool@6.2.1: resolved "https://registry.yarnpkg.com/workerpool/-/workerpool-6.2.1.tgz#46fc150c17d826b86a008e5a4508656777e9c343" integrity sha512-ILEIE97kDZvF9Wb9f6h5aXK4swSlKGUcOEGiIYb2OOu/IrDU9iwj0fD//SsA6E5ibwJxpEvhullJY4Sl4GcpAw== -"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0", wrap-ansi@^7.0.0: +"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0": version "7.0.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== @@ -15399,6 +15876,15 @@ wrap-ansi@^5.1.0: string-width "^3.0.0" strip-ansi "^5.0.0" +wrap-ansi@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" + integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== + dependencies: + ansi-styles "^4.0.0" + string-width "^4.1.0" + strip-ansi "^6.0.0" + wrap-ansi@^8.1.0: version "8.1.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-8.1.0.tgz#56dc22368ee570face1b49819975d9b9a5ead214" @@ -15428,7 +15914,7 @@ ws@7.4.6: resolved "https://registry.yarnpkg.com/ws/-/ws-7.4.6.tgz#5654ca8ecdeee47c33a9a4bf6d28e2be2980377c" integrity sha512-YmhHDO4MzaDLB+M9ym/mDA5z0naX8j7SIlT8f8z+I0VtzsRbekxEutHSme7NPS2qE8StCYQNUnfWdXta/Yu85A== -ws@8.18.0: +ws@8.18.0, ws@^8.5.0: version "8.18.0" resolved "https://registry.yarnpkg.com/ws/-/ws-8.18.0.tgz#0d7505a6eafe2b0e712d232b42279f53bc289bbc" integrity sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw== @@ -15454,6 +15940,11 @@ ws@^7.4.6: resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.7.tgz#9e0ac77ee50af70d58326ecff7e85eb3fa375e67" integrity sha512-KMvVuFzpKBuiIXW3E4u3mySRO2/mCHSyZDJQM5NQ9Q9KHWHWh0NHgfbRMLLrceUK5qAL4ytALJbpRMjixFZh8A== +ws@^7.5.10: + version "7.5.10" + resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.10.tgz#58b5c20dc281633f6c19113f39b349bd8bd558d9" + integrity sha512-+dbF1tHwZpXcbOJdVOkzLDxZP1ailvSxM6ZweXTegylPny803bFhA+vqBYw4s31NSAk4S2Qz+AKXK9a4wkdjcQ== + ws@^8.13.0: version "8.14.2" resolved "https://registry.yarnpkg.com/ws/-/ws-8.14.2.tgz#6c249a806eb2db7a20d26d51e7709eab7b2e6c7f" From f644828099a2d3b865325a138f2fd556d247086a Mon Sep 17 00:00:00 2001 From: bmzig <57361391+bmzig@users.noreply.github.com> Date: Wed, 13 Nov 2024 15:52:46 -0600 Subject: [PATCH 14/44] fix: dedup bridges to approve properly (#1908) * fix: dedup bridges to approve properly Signed-off-by: bennett --------- Signed-off-by: bennett --- src/adapter/BaseChainAdapter.ts | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/adapter/BaseChainAdapter.ts b/src/adapter/BaseChainAdapter.ts index a7c5fc7eb..d2a110e07 100644 --- a/src/adapter/BaseChainAdapter.ts +++ b/src/adapter/BaseChainAdapter.ts @@ -148,8 +148,12 @@ export class BaseChainAdapter { ), ]); // Dedup the `gasTokensToApprove` array so that we don't approve the same bridge to send the same token multiple times. + const tokenBridgePairs = gasTokensToApprove.map(({ token, bridges }) => `${token.address}_${bridges.join("_")}`); const tokensToApprove = gasTokensToApprove - .filter(({ token, bridges }, idx) => gasTokensToApprove.indexOf({ token, bridges }) === idx) + .filter(({ token, bridges }, idx) => { + const tokenBridgePair = `${token.address}_${bridges.join("_")}`; + return tokenBridgePairs.indexOf(tokenBridgePair) === idx; + }) .concat(bridgeTokensToApprove) .filter(({ bridges }) => bridges.length > 0); if (unavailableTokens.length > 0) { From 0731f5c28a72f8077d6afbe7ce5e27c96278ee5c Mon Sep 17 00:00:00 2001 From: nicholaspai <9457025+nicholaspai@users.noreply.github.com> Date: Thu, 14 Nov 2024 11:54:51 -0500 Subject: [PATCH 15/44] feat: Manual withdrawals of ERC20 on OpStack lite chains (#1910) * feat: Manual withdrawals of ERC20 on OpStack lite chains Updates script `withdrawFromOpStack` to support ERC20 withdrawals (e.g. USDT) and updates finalizer to automate these. Helps with withdrawing ERC20's from Lite chains * Update OpStackStandardBridgeL2.json * Update withdrawFromOpStack.ts * Update opStack.ts * Refactor * Update scripts/withdrawFromOpStack.ts Co-authored-by: Paul <108695806+pxrl@users.noreply.github.com> * Update scripts/withdrawFromOpStack.ts Co-authored-by: Paul <108695806+pxrl@users.noreply.github.com> * Revert "Refactor" This reverts commit 19345e001deffb4a9d399e93f8be82de28a85eff. * Update withdrawFromOpStack.ts * Update withdrawFromOpStack.ts --------- Co-authored-by: Paul <108695806+pxrl@users.noreply.github.com> --- scripts/withdrawFromOpStack.ts | 65 +++++++++++------ src/common/abi/OpStackStandardBridgeL2.json | 81 +++++++++++++++++++++ src/finalizer/utils/opStack.ts | 68 +++++++++++++---- 3 files changed, 178 insertions(+), 36 deletions(-) diff --git a/scripts/withdrawFromOpStack.ts b/scripts/withdrawFromOpStack.ts index 10bd05b61..3b7b7121b 100644 --- a/scripts/withdrawFromOpStack.ts +++ b/scripts/withdrawFromOpStack.ts @@ -14,13 +14,14 @@ import { fromWei, blockExplorerLink, CHAIN_IDs, + ZERO_ADDRESS, } from "../src/utils"; import { CONTRACT_ADDRESSES } from "../src/common"; import { askYesNoQuestion, getOvmSpokePoolContract } from "./utils"; import minimist from "minimist"; -const cliArgs = ["amount", "chainId"]; +const cliArgs = ["amount", "chainId", "token"]; const args = minimist(process.argv.slice(2), { string: cliArgs, }); @@ -30,6 +31,7 @@ const args = minimist(process.argv.slice(2), { // \ --amount 3000000000000000000 // \ --chainId 1135 // \ --wallet gckms +// \ --token WETH // \ --keys bot1 export async function run(): Promise { @@ -41,11 +43,10 @@ export async function run(): Promise { const signerAddr = await baseSigner.getAddress(); const chainId = parseInt(args.chainId); const connectedSigner = baseSigner.connect(await getProvider(chainId)); - const l2Token = TOKEN_SYMBOLS_MAP.WETH?.addresses[chainId]; - assert(l2Token, `WETH not found on chain ${chainId} in TOKEN_SYMBOLS_MAP`); + const l2Token = TOKEN_SYMBOLS_MAP[args.token]?.addresses[chainId]; + assert(l2Token, `${args.token} not found on chain ${chainId} in TOKEN_SYMBOLS_MAP`); const l1TokenInfo = getL1TokenInfo(l2Token, chainId); console.log("Fetched L1 token info:", l1TokenInfo); - assert(l1TokenInfo.symbol === "ETH", "Only WETH withdrawals are supported for now."); const amount = args.amount; const amountFromWei = ethers.utils.formatUnits(amount, l1TokenInfo.decimals); console.log(`Amount to bridge from chain ${chainId}: ${amountFromWei} ${l2Token}`); @@ -54,33 +55,51 @@ export async function run(): Promise { const currentBalance = await erc20.balanceOf(signerAddr); const currentEthBalance = await connectedSigner.getBalance(); console.log( - `Current WETH balance for account ${signerAddr}: ${fromWei(currentBalance, l1TokenInfo.decimals)} ${l2Token}` + `Current ${l1TokenInfo.symbol} balance for account ${signerAddr}: ${fromWei( + currentBalance, + l1TokenInfo.decimals + )} ${l2Token}` ); console.log(`Current ETH balance for account ${signerAddr}: ${fromWei(currentEthBalance, l1TokenInfo.decimals)}`); - // First offer user option to unwrap WETH into ETH. - const weth = new Contract(l2Token, WETH9.abi, connectedSigner); - if (await askYesNoQuestion(`\nUnwrap ${amount} of WETH @ ${weth.address}?`)) { - const unwrap = await weth.withdraw(amount); - console.log(`Submitted transaction: ${blockExplorerLink(unwrap.hash, chainId)}.`); - const receipt = await unwrap.wait(); - console.log("Unwrap complete...", receipt); + // First offer user option to unwrap WETH into ETH + if (l1TokenInfo.symbol === "ETH") { + const weth = new Contract(l2Token, WETH9.abi, connectedSigner); + if (await askYesNoQuestion(`\nUnwrap ${amount} of WETH @ ${weth.address}?`)) { + const unwrap = await weth.withdraw(amount); + console.log(`Submitted transaction: ${blockExplorerLink(unwrap.hash, chainId)}.`); + const receipt = await unwrap.wait(); + console.log("Unwrap complete...", receipt); + } } - // Now, submit a withdrawal: + // Now, submit a withdrawal. This might fail if the ERC20 uses a non-standard OVM bridge to withdraw. const ovmStandardBridgeObj = CONTRACT_ADDRESSES[chainId].ovmStandardBridge; assert(CONTRACT_ADDRESSES[chainId].ovmStandardBridge, "ovmStandardBridge for chain not found in CONTRACT_ADDRESSES"); const ovmStandardBridge = new Contract(ovmStandardBridgeObj.address, ovmStandardBridgeObj.abi, connectedSigner); - const bridgeETHToArgs = [ - signerAddr, // to - 200_000, // minGasLimit - "0x", // extraData - { value: amount }, // msg.value - ]; + const bridgeArgs = + l1TokenInfo.symbol === "ETH" + ? [ + signerAddr, // to + 200_000, // minGasLimit + "0x", // extraData + { value: amount }, // msg.value + ] + : [ + l2Token, // _localToken + TOKEN_SYMBOLS_MAP[args.token]?.addresses[CHAIN_IDs.MAINNET], // Remote token to be received on L1 side. If the + // remoteL1Token on the other chain does not recognize the local token as the correct + // pair token, the ERC20 bridge will fail and the tokens will be returned to sender on + // this chain. + signerAddr, // _to + amount, // _amount + 200_000, // minGasLimit + "0x", // _data + ]; console.log( `Submitting bridgeETHTo on the OVM standard bridge @ ${ovmStandardBridge.address} with the following args: `, - ...bridgeETHToArgs + ...bridgeArgs ); // Sanity check that the ovmStandardBridge contract is the one we expect by comparing its stored addresses @@ -98,10 +117,14 @@ export async function run(): Promise { l1StandardBridge === expectedL1StandardBridge, `Unexpected L1 standard bridge address in ovmStandardBridge contract, expected: ${expectedL1StandardBridge}, got: ${l1StandardBridge}` ); + const customTokenBridge = await spokePool.tokenBridges(l2Token); + assert(customTokenBridge === ZERO_ADDRESS, `Custom token bridge set for token ${l2Token} (${customTokenBridge})`); if (!(await askYesNoQuestion("\nDo you want to proceed?"))) { return; } - const withdrawal = await ovmStandardBridge.bridgeETHTo(...bridgeETHToArgs); + const withdrawal = await ovmStandardBridge[l1TokenInfo.symbol === "ETH" ? "bridgeETHTo" : "bridgeERC20To"]( + ...bridgeArgs + ); console.log(`Submitted withdrawal: ${blockExplorerLink(withdrawal.hash, chainId)}.`); const receipt = await withdrawal.wait(); console.log("Receipt", receipt); diff --git a/src/common/abi/OpStackStandardBridgeL2.json b/src/common/abi/OpStackStandardBridgeL2.json index 72431186a..bdf4c29a7 100644 --- a/src/common/abi/OpStackStandardBridgeL2.json +++ b/src/common/abi/OpStackStandardBridgeL2.json @@ -23,6 +23,49 @@ "name": "ETHBridgeInitiated", "type": "event" }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "localToken", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "remoteToken", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "from", + "type": "address" + }, + { + "indexed": false, + "internalType": "address", + "name": "to", + "type": "address" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "amount", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "bytes", + "name": "extraData", + "type": "bytes" + } + ], + "name": "ERC20BridgeInitiated", + "type": "event" + }, { "inputs": [ { "internalType": "address", "name": "_to", "type": "address" }, @@ -34,6 +77,44 @@ "stateMutability": "payable", "type": "function" }, + { + "inputs": [ + { + "internalType": "address", + "name": "_localToken", + "type": "address" + }, + { + "internalType": "address", + "name": "_remoteToken", + "type": "address" + }, + { + "internalType": "address", + "name": "_to", + "type": "address" + }, + { + "internalType": "uint256", + "name": "_amount", + "type": "uint256" + }, + { + "internalType": "uint32", + "name": "_minGasLimit", + "type": "uint32" + }, + { + "internalType": "bytes", + "name": "_extraData", + "type": "bytes" + } + ], + "name": "bridgeERC20To", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, { "inputs": [], "name": "MESSENGER", diff --git a/src/finalizer/utils/opStack.ts b/src/finalizer/utils/opStack.ts index 3504f51fc..288cf659a 100644 --- a/src/finalizer/utils/opStack.ts +++ b/src/finalizer/utils/opStack.ts @@ -96,10 +96,10 @@ export async function opStackFinalizer( latestBlockToProve, }); - // Experimental feature: Add in all ETH withdrawals from OPStack chain to the finalizer. This will help us - // in the short term to automate ETH withdrawals from Lite chains, which can build up ETH balances over time - // and because they are lite chains, our only way to withdraw them is to initiate a slow bridge of ETH from the - // the lite chain to Ethereum. + // Add in all manual withdrawals from other EOA's from OPStack chain to the finalizer. This will help us + // automate token withdrawals from Lite chains, which can build up ETH and ERC20 balances over time + // and because they are lite chains, our only way to withdraw them is to initiate a manual bridge from the + // the lite chain to Ethereum via the canonical OVM standard bridge. const withdrawalToAddresses: string[] = process.env.FINALIZER_WITHDRAWAL_TO_ADDRESSES ? JSON.parse(process.env.FINALIZER_WITHDRAWAL_TO_ADDRESSES).map((address) => ethers.utils.getAddress(address)) : []; @@ -114,17 +114,55 @@ export async function opStackFinalizer( CONTRACT_ADDRESSES[chainId].ovmStandardBridge.abi, spokePoolClient.spokePool.provider ); - const withdrawalEvents = await paginatedEventQuery( - ovmStandardBridge, - ovmStandardBridge.filters.ETHBridgeInitiated( - null, // from - withdrawalToAddresses // to - ), - { - ...spokePoolClient.eventSearchConfig, - toBlock: spokePoolClient.latestBlockSearched, + const withdrawalEthEvents = ( + await paginatedEventQuery( + ovmStandardBridge, + ovmStandardBridge.filters.ETHBridgeInitiated( + null, // from + withdrawalToAddresses // to + ), + { + ...spokePoolClient.eventSearchConfig, + toBlock: spokePoolClient.latestBlockSearched, + } + ) + ).map((event) => { + return { + ...event, + l2TokenAddress: TOKEN_SYMBOLS_MAP.WETH.addresses[chainId], + }; + }); + const withdrawalErc20Events = ( + await paginatedEventQuery( + ovmStandardBridge, + ovmStandardBridge.filters.ERC20BridgeInitiated( + null, // localToken + null, // remoteToken + withdrawalToAddresses // from + ), + { + ...spokePoolClient.eventSearchConfig, + toBlock: spokePoolClient.latestBlockSearched, + } + ) + ).map((event) => { + // If we're aware of this token, then save the event as one we can finalize. + try { + getL1TokenInfo(event.args.localToken, chainId); + return { + ...event, + l2TokenAddress: event.args.localToken, + }; + } catch (err) { + logger.debug({ + at: "opStackFinalizer", + message: `Skipping ERC20 withdrawal event for unknown token ${event.args.localToken} on chain ${networkName}`, + event: event, + }); + return undefined; } - ); + }); + const withdrawalEvents = [...withdrawalEthEvents, ...withdrawalErc20Events].filter((event) => event !== undefined); // If there are any found withdrawal initiated events, then add them to the list of TokenBridged events we'll // submit proofs and finalizations for. withdrawalEvents.forEach((event) => { @@ -133,7 +171,7 @@ export async function opStackFinalizer( amountToReturn: event.args.amount, chainId, leafId: 0, - l2TokenAddress: TOKEN_SYMBOLS_MAP.WETH.addresses[chainId], + l2TokenAddress: event.l2TokenAddress, }; if (event.blockNumber >= latestBlockToProve) { recentTokensBridgedEvents.push(tokenBridgedEvent); From 6bfb2c837eeecf4856a6820606c56dfbb3f873b6 Mon Sep 17 00:00:00 2001 From: Paul <108695806+pxrl@users.noreply.github.com> Date: Fri, 15 Nov 2024 23:16:11 +0100 Subject: [PATCH 16/44] improve(relayer): Better support for custom Across API host (#1911) Permit the ACROSS_API_HOST env var to override the standard app.across.to and testnet.across.to API hostnames. While here, sub out some hardcoded chain IDs. --- scripts/spokepool.ts | 7 ++++--- src/clients/AcrossAPIClient.ts | 6 +++++- src/clients/ProfitClient.ts | 9 +++++---- 3 files changed, 14 insertions(+), 8 deletions(-) diff --git a/scripts/spokepool.ts b/scripts/spokepool.ts index 9f597984b..1816fcbb9 100644 --- a/scripts/spokepool.ts +++ b/scripts/spokepool.ts @@ -4,9 +4,11 @@ import { groupBy } from "lodash"; import { config } from "dotenv"; import { Contract, ethers, Signer } from "ethers"; import { LogDescription } from "@ethersproject/abi"; +import { CHAIN_IDs } from "@across-protocol/constants"; import { constants as sdkConsts, utils as sdkUtils } from "@across-protocol/sdk"; import { ExpandedERC20__factory as ERC20 } from "@across-protocol/contracts"; import { RelayData } from "../src/interfaces"; +import { getAcrossHost } from "../src/clients"; import { BigNumber, formatFeePct, @@ -33,8 +35,6 @@ type RelayerFeeQuery = { timestamp?: number; }; -const { ACROSS_API_HOST = "across.to" } = process.env; - const { NODE_SUCCESS, NODE_INPUT_ERR, NODE_APP_ERR } = utils; const { fixedPointAdjustment: fixedPoint } = sdkUtils; const { AddressZero } = ethers.constants; @@ -87,8 +87,9 @@ function printFill(destinationChainId: number, log: LogDescription): void { } async function getSuggestedFees(params: RelayerFeeQuery, timeout: number) { + const hubChainId = sdkUtils.chainIsProd(params.originChainId) ? CHAIN_IDs.MAINNET : CHAIN_IDs.SEPOLIA; const path = "api/suggested-fees"; - const url = `https://${ACROSS_API_HOST}/${path}`; + const url = `https://${getAcrossHost(hubChainId)}/${path}`; try { const quote = await axios.get(url, { timeout, params }); diff --git a/src/clients/AcrossAPIClient.ts b/src/clients/AcrossAPIClient.ts index 7877c7edc..189f51976 100644 --- a/src/clients/AcrossAPIClient.ts +++ b/src/clients/AcrossAPIClient.ts @@ -19,6 +19,10 @@ export interface DepositLimits { const API_UPDATE_RETENTION_TIME = 60; // seconds +export function getAcrossHost(hubChainId: number): string { + return process.env.ACROSS_API_HOST ?? (hubChainId === CHAIN_IDs.MAINNET ? "app.across.to" : "testnet.across.to"); +} + export class AcrossApiClient { private endpoint: string; private chainIds: number[]; @@ -36,7 +40,7 @@ export class AcrossApiClient { readonly timeout: number = 3000 ) { const hubChainId = hubPoolClient.chainId; - this.endpoint = `https://${hubChainId === CHAIN_IDs.MAINNET ? "app.across.to" : "testnet.across.to"}/api`; + this.endpoint = `https://${getAcrossHost(hubChainId)}/api`; if (Object.keys(tokensQuery).length === 0) { this.tokensQuery = dedupArray(Object.values(TOKEN_SYMBOLS_MAP).map(({ addresses }) => addresses[hubChainId])); } diff --git a/src/clients/ProfitClient.ts b/src/clients/ProfitClient.ts index 9bd93c325..afbb31f30 100644 --- a/src/clients/ProfitClient.ts +++ b/src/clients/ProfitClient.ts @@ -30,7 +30,8 @@ import { ZERO_ADDRESS, } from "../utils"; import { Deposit, DepositWithBlock, L1Token, SpokePoolClientsByChain } from "../interfaces"; -import { HubPoolClient } from "."; +import { getAcrossHost } from "./AcrossAPIClient"; +import { HubPoolClient } from "./HubPoolClient"; type TransactionCostEstimate = sdkUtils.TransactionCostEstimate; @@ -129,7 +130,7 @@ export class ProfitClient { ); this.priceClient = new PriceClient(logger, [ - new acrossApi.PriceFeed(), + new acrossApi.PriceFeed({ host: getAcrossHost(hubPoolClient.chainId) }), new coingecko.PriceFeed({ apiKey: process.env.COINGECKO_PRO_API_KEY }), new defiLlama.PriceFeed(), ]); @@ -527,7 +528,7 @@ export class ProfitClient { .filter(({ symbol }) => isDefined(TOKEN_SYMBOLS_MAP[symbol])) .map(({ symbol }) => { const { addresses } = TOKEN_SYMBOLS_MAP[symbol]; - const address = addresses[1]; + const address = addresses[CHAIN_IDs.MAINNET]; return [symbol, address]; }) ); @@ -550,7 +551,7 @@ export class ProfitClient { // Also ensure all gas tokens are included in the lookup. this.enabledChainIds.forEach((chainId) => { const symbol = getNativeTokenSymbol(chainId); - tokens[symbol] ??= TOKEN_SYMBOLS_MAP[symbol].addresses[1]; + tokens[symbol] ??= TOKEN_SYMBOLS_MAP[symbol].addresses[CHAIN_IDs.MAINNET]; }); this.logger.debug({ at: "ProfitClient", message: "Updating Profit client", tokens }); From 0823a68f1f70bed68150e2064b3449e9df6418d5 Mon Sep 17 00:00:00 2001 From: nicholaspai <9457025+nicholaspai@users.noreply.github.com> Date: Tue, 19 Nov 2024 13:53:15 -0500 Subject: [PATCH 17/44] improve(arweave): Change arweave data key from list of chain ID's to map (#1916) * improve(arweave): Change arweave data key from list of chain ID's to map We've received feedback from a user of the arweave data that its easier if the arweave `tag` (i.e. data key) is a mapping of chainId to bundle block ranges. I see no issue with this outside of any topic length constraints, which I've added logic to check for and protect against. The tag now only uses the end blocks, to reduce its length. * refactor * Update Dataworker.ts * Update Dataworker.ts --- src/common/Constants.ts | 2 ++ src/dataworker/Dataworker.ts | 29 +++++++++++++++++++++++++---- src/dataworker/DataworkerUtils.ts | 10 +++++++++- 3 files changed, 36 insertions(+), 5 deletions(-) diff --git a/src/common/Constants.ts b/src/common/Constants.ts index 90c0225a4..73e67b531 100644 --- a/src/common/Constants.ts +++ b/src/common/Constants.ts @@ -442,6 +442,8 @@ export const RELAYER_DEFAULT_SPOKEPOOL_INDEXER = "./dist/src/libexec/RelayerSpok export const DEFAULT_ARWEAVE_GATEWAY = { url: "arweave.net", port: 443, protocol: "https" }; +export const ARWEAVE_TAG_BYTE_LIMIT = 2048; + // Chains with slow (> 2 day liveness) canonical L2-->L1 bridges that we prioritize taking repayment on. // This does not include all 7-day withdrawal chains because we don't necessarily prefer being repaid on some of these 7-day chains, like Mode. // This list should generally exclude Lite chains because the relayer ignores HubPool liquidity in that case which could cause the diff --git a/src/dataworker/Dataworker.ts b/src/dataworker/Dataworker.ts index 215a7d224..5bb82d3e7 100644 --- a/src/dataworker/Dataworker.ts +++ b/src/dataworker/Dataworker.ts @@ -619,16 +619,34 @@ export class Dataworker { // Root bundle is valid, attempt to persist the raw bundle data and the merkle leaf data to DA layer // if not already there. if (persistBundleData && isDefined(bundleData)) { + const chainIds = this.clients.configStoreClient.getChainIdIndicesForBlock(nextBundleMainnetStartBlock); + // Store the bundle block ranges on Arweave as a map of chainId to block range to aid users in querying. + const bundleBlockRangeMap = Object.fromEntries( + bundleData.bundleBlockRanges.map((range, i) => { + const chainIdForRange = chainIds[i]; + // The arweave tag cannot exceed 2048 bytes so only keep the end block in the tag. + return [chainIdForRange, range]; + }) + ); + // As a unique key for this bundle, use the next bundle mainnet start block, which should + // never be duplicated between bundles as long as the mainnet end block in the bundle block range + // always progresses forwards, which I think is a safe assumption. Other chains might pause + // but mainnet should never pause. + const partialArweaveDataKey = nextBundleMainnetStartBlock; await Promise.all([ persistDataToArweave( this.clients.arweaveClient, - bundleData, + { + ...bundleData, + bundleBlockRanges: bundleBlockRangeMap, + }, this.logger, - `bundles-${bundleData.bundleBlockRanges}` + `bundles-${partialArweaveDataKey}` ), persistDataToArweave( this.clients.arweaveClient, { + bundleBlockRanges: bundleBlockRangeMap, poolRebalanceLeaves: expectedTrees.poolRebalanceTree.leaves.map((leaf) => { return { ...leaf, @@ -652,7 +670,7 @@ export class Dataworker { slowRelayRoot: expectedTrees.slowRelayTree.tree.getHexRoot(), }, this.logger, - `merkletree-${bundleData.bundleBlockRanges}` + `merkletree-${partialArweaveDataKey}` ), ]); } @@ -2297,7 +2315,10 @@ export class Dataworker { at: "Dataworker#_getPoolRebalanceRoot", message: "Constructed new pool rebalance root", key, - root: this.rootCache[key], + root: { + ...this.rootCache[key], + tree: this.rootCache[key].tree.getHexRoot(), + }, }); return _.cloneDeep(this.rootCache[key]); diff --git a/src/dataworker/DataworkerUtils.ts b/src/dataworker/DataworkerUtils.ts index a87f3ae8e..9e2736b45 100644 --- a/src/dataworker/DataworkerUtils.ts +++ b/src/dataworker/DataworkerUtils.ts @@ -1,7 +1,11 @@ import assert from "assert"; import { utils, interfaces, caching } from "@across-protocol/sdk"; import { SpokePoolClient } from "../clients"; -import { CONSERVATIVE_BUNDLE_FREQUENCY_SECONDS, spokesThatHoldEthAndWeth } from "../common/Constants"; +import { + ARWEAVE_TAG_BYTE_LIMIT, + CONSERVATIVE_BUNDLE_FREQUENCY_SECONDS, + spokesThatHoldEthAndWeth, +} from "../common/Constants"; import { CONTRACT_ADDRESSES } from "../common/ContractAddresses"; import { PoolRebalanceLeaf, @@ -342,6 +346,10 @@ export async function persistDataToArweave( logger: winston.Logger, tag?: string ): Promise { + assert( + Buffer.from(tag).length <= ARWEAVE_TAG_BYTE_LIMIT, + `Arweave tag cannot exceed ${ARWEAVE_TAG_BYTE_LIMIT} bytes` + ); const startTime = performance.now(); // Check if data already exists on Arweave with the given tag. // If so, we don't need to persist it again. From fc59ea0ceacee20d9b2255a51cd77c8e9ee937d9 Mon Sep 17 00:00:00 2001 From: Matt Rice Date: Wed, 20 Nov 2024 07:33:11 +0700 Subject: [PATCH 18/44] Revert "improve(arweave): Change arweave data key from list of chain ID's to map (#1916)" (#1917) This reverts commit 0823a68f1f70bed68150e2064b3449e9df6418d5. --- src/common/Constants.ts | 2 -- src/dataworker/Dataworker.ts | 29 ++++------------------------- src/dataworker/DataworkerUtils.ts | 10 +--------- 3 files changed, 5 insertions(+), 36 deletions(-) diff --git a/src/common/Constants.ts b/src/common/Constants.ts index 73e67b531..90c0225a4 100644 --- a/src/common/Constants.ts +++ b/src/common/Constants.ts @@ -442,8 +442,6 @@ export const RELAYER_DEFAULT_SPOKEPOOL_INDEXER = "./dist/src/libexec/RelayerSpok export const DEFAULT_ARWEAVE_GATEWAY = { url: "arweave.net", port: 443, protocol: "https" }; -export const ARWEAVE_TAG_BYTE_LIMIT = 2048; - // Chains with slow (> 2 day liveness) canonical L2-->L1 bridges that we prioritize taking repayment on. // This does not include all 7-day withdrawal chains because we don't necessarily prefer being repaid on some of these 7-day chains, like Mode. // This list should generally exclude Lite chains because the relayer ignores HubPool liquidity in that case which could cause the diff --git a/src/dataworker/Dataworker.ts b/src/dataworker/Dataworker.ts index 5bb82d3e7..215a7d224 100644 --- a/src/dataworker/Dataworker.ts +++ b/src/dataworker/Dataworker.ts @@ -619,34 +619,16 @@ export class Dataworker { // Root bundle is valid, attempt to persist the raw bundle data and the merkle leaf data to DA layer // if not already there. if (persistBundleData && isDefined(bundleData)) { - const chainIds = this.clients.configStoreClient.getChainIdIndicesForBlock(nextBundleMainnetStartBlock); - // Store the bundle block ranges on Arweave as a map of chainId to block range to aid users in querying. - const bundleBlockRangeMap = Object.fromEntries( - bundleData.bundleBlockRanges.map((range, i) => { - const chainIdForRange = chainIds[i]; - // The arweave tag cannot exceed 2048 bytes so only keep the end block in the tag. - return [chainIdForRange, range]; - }) - ); - // As a unique key for this bundle, use the next bundle mainnet start block, which should - // never be duplicated between bundles as long as the mainnet end block in the bundle block range - // always progresses forwards, which I think is a safe assumption. Other chains might pause - // but mainnet should never pause. - const partialArweaveDataKey = nextBundleMainnetStartBlock; await Promise.all([ persistDataToArweave( this.clients.arweaveClient, - { - ...bundleData, - bundleBlockRanges: bundleBlockRangeMap, - }, + bundleData, this.logger, - `bundles-${partialArweaveDataKey}` + `bundles-${bundleData.bundleBlockRanges}` ), persistDataToArweave( this.clients.arweaveClient, { - bundleBlockRanges: bundleBlockRangeMap, poolRebalanceLeaves: expectedTrees.poolRebalanceTree.leaves.map((leaf) => { return { ...leaf, @@ -670,7 +652,7 @@ export class Dataworker { slowRelayRoot: expectedTrees.slowRelayTree.tree.getHexRoot(), }, this.logger, - `merkletree-${partialArweaveDataKey}` + `merkletree-${bundleData.bundleBlockRanges}` ), ]); } @@ -2315,10 +2297,7 @@ export class Dataworker { at: "Dataworker#_getPoolRebalanceRoot", message: "Constructed new pool rebalance root", key, - root: { - ...this.rootCache[key], - tree: this.rootCache[key].tree.getHexRoot(), - }, + root: this.rootCache[key], }); return _.cloneDeep(this.rootCache[key]); diff --git a/src/dataworker/DataworkerUtils.ts b/src/dataworker/DataworkerUtils.ts index 9e2736b45..a87f3ae8e 100644 --- a/src/dataworker/DataworkerUtils.ts +++ b/src/dataworker/DataworkerUtils.ts @@ -1,11 +1,7 @@ import assert from "assert"; import { utils, interfaces, caching } from "@across-protocol/sdk"; import { SpokePoolClient } from "../clients"; -import { - ARWEAVE_TAG_BYTE_LIMIT, - CONSERVATIVE_BUNDLE_FREQUENCY_SECONDS, - spokesThatHoldEthAndWeth, -} from "../common/Constants"; +import { CONSERVATIVE_BUNDLE_FREQUENCY_SECONDS, spokesThatHoldEthAndWeth } from "../common/Constants"; import { CONTRACT_ADDRESSES } from "../common/ContractAddresses"; import { PoolRebalanceLeaf, @@ -346,10 +342,6 @@ export async function persistDataToArweave( logger: winston.Logger, tag?: string ): Promise { - assert( - Buffer.from(tag).length <= ARWEAVE_TAG_BYTE_LIMIT, - `Arweave tag cannot exceed ${ARWEAVE_TAG_BYTE_LIMIT} bytes` - ); const startTime = performance.now(); // Check if data already exists on Arweave with the given tag. // If so, we don't need to persist it again. From c4ada0fd9684f9757e83ad0e9e5bc259f3d2b87e Mon Sep 17 00:00:00 2001 From: Paul <108695806+pxrl@users.noreply.github.com> Date: Wed, 20 Nov 2024 16:20:51 +0100 Subject: [PATCH 19/44] improve(ProviderUtils): Tag 429 responses for datadog export (#1915) --- src/utils/ProviderUtils.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/src/utils/ProviderUtils.ts b/src/utils/ProviderUtils.ts index 108f37e7a..9f6e8e24a 100644 --- a/src/utils/ProviderUtils.ts +++ b/src/utils/ProviderUtils.ts @@ -139,6 +139,7 @@ export async function getProvider( rpc: getOriginFromURL(url), retryAfter: `${delayMs} ms`, workers: nodeMaxConcurrency, + datadog: true, }); } await delay(delayMs); From 92c1f9308bf131fde80b75e81217a3bd3e8868e6 Mon Sep 17 00:00:00 2001 From: nicholaspai <9457025+nicholaspai@users.noreply.github.com> Date: Wed, 20 Nov 2024 16:31:23 -0500 Subject: [PATCH 20/44] improve(dataworker): Update arweave bundle data (#1918) * Reapply "improve(arweave): Change arweave data key from list of chain ID's to map (#1916)" (#1917) This reverts commit fc59ea0ceacee20d9b2255a51cd77c8e9ee937d9. * improve(dataworker): Update arweave bundle data Duplicate of https://github.com/across-protocol/relayer/commit/0823a68f1f70bed68150e2064b3449e9df6418d5 but updates the arweave key logic to read from the SDK so we don't have two separate computations of the arweave key. * bump sdk * Update Dataworker.ts --- package.json | 2 +- src/common/Constants.ts | 2 ++ src/dataworker/Dataworker.ts | 30 +++++++++++++--- src/dataworker/DataworkerUtils.ts | 10 +++++- yarn.lock | 59 +++++++++++++++++++++++-------- 5 files changed, 81 insertions(+), 22 deletions(-) diff --git a/package.json b/package.json index 126c8ca02..574ef3d45 100644 --- a/package.json +++ b/package.json @@ -12,7 +12,7 @@ "dependencies": { "@across-protocol/constants": "^3.1.19", "@across-protocol/contracts": "^3.0.16", - "@across-protocol/sdk": "^3.2.13", + "@across-protocol/sdk": "^3.2.16", "@arbitrum/sdk": "^3.1.3", "@consensys/linea-sdk": "^0.2.1", "@defi-wonderland/smock": "^2.3.5", diff --git a/src/common/Constants.ts b/src/common/Constants.ts index 90c0225a4..73e67b531 100644 --- a/src/common/Constants.ts +++ b/src/common/Constants.ts @@ -442,6 +442,8 @@ export const RELAYER_DEFAULT_SPOKEPOOL_INDEXER = "./dist/src/libexec/RelayerSpok export const DEFAULT_ARWEAVE_GATEWAY = { url: "arweave.net", port: 443, protocol: "https" }; +export const ARWEAVE_TAG_BYTE_LIMIT = 2048; + // Chains with slow (> 2 day liveness) canonical L2-->L1 bridges that we prioritize taking repayment on. // This does not include all 7-day withdrawal chains because we don't necessarily prefer being repaid on some of these 7-day chains, like Mode. // This list should generally exclude Lite chains because the relayer ignores HubPool liquidity in that case which could cause the diff --git a/src/dataworker/Dataworker.ts b/src/dataworker/Dataworker.ts index 215a7d224..0aca04370 100644 --- a/src/dataworker/Dataworker.ts +++ b/src/dataworker/Dataworker.ts @@ -31,7 +31,7 @@ import { FillStatus, } from "../interfaces"; import { DataworkerClients } from "./DataworkerClientHelper"; -import { SpokePoolClient, BalanceAllocator } from "../clients"; +import { SpokePoolClient, BalanceAllocator, BundleDataClient } from "../clients"; import * as PoolRebalanceUtils from "./PoolRebalanceUtils"; import { blockRangesAreInvalidForSpokeClients, @@ -619,16 +619,33 @@ export class Dataworker { // Root bundle is valid, attempt to persist the raw bundle data and the merkle leaf data to DA layer // if not already there. if (persistBundleData && isDefined(bundleData)) { + const chainIds = this.clients.configStoreClient.getChainIdIndicesForBlock(nextBundleMainnetStartBlock); + // Store the bundle block ranges on Arweave as a map of chainId to block range to aid users in querying. + const bundleBlockRangeMap = Object.fromEntries( + bundleData.bundleBlockRanges.map((range, i) => { + const chainIdForRange = chainIds[i]; + return [chainIdForRange, range]; + }) + ); + // As a unique key for this bundle, use the next bundle mainnet start block, which should + // never be duplicated between bundles as long as the mainnet end block in the bundle block range + // always progresses forwards, which I think is a safe assumption. Other chains might pause + // but mainnet should never pause. + const partialArweaveDataKey = BundleDataClient.getArweaveClientKey(bundleData.bundleBlockRanges); await Promise.all([ persistDataToArweave( this.clients.arweaveClient, - bundleData, + { + ...bundleData, + bundleBlockRanges: bundleBlockRangeMap, + }, this.logger, - `bundles-${bundleData.bundleBlockRanges}` + `bundles-${partialArweaveDataKey}` ), persistDataToArweave( this.clients.arweaveClient, { + bundleBlockRanges: bundleBlockRangeMap, poolRebalanceLeaves: expectedTrees.poolRebalanceTree.leaves.map((leaf) => { return { ...leaf, @@ -652,7 +669,7 @@ export class Dataworker { slowRelayRoot: expectedTrees.slowRelayTree.tree.getHexRoot(), }, this.logger, - `merkletree-${bundleData.bundleBlockRanges}` + `merkletree-${partialArweaveDataKey}` ), ]); } @@ -2297,7 +2314,10 @@ export class Dataworker { at: "Dataworker#_getPoolRebalanceRoot", message: "Constructed new pool rebalance root", key, - root: this.rootCache[key], + root: { + ...this.rootCache[key], + tree: this.rootCache[key].tree.getHexRoot(), + }, }); return _.cloneDeep(this.rootCache[key]); diff --git a/src/dataworker/DataworkerUtils.ts b/src/dataworker/DataworkerUtils.ts index a87f3ae8e..9e2736b45 100644 --- a/src/dataworker/DataworkerUtils.ts +++ b/src/dataworker/DataworkerUtils.ts @@ -1,7 +1,11 @@ import assert from "assert"; import { utils, interfaces, caching } from "@across-protocol/sdk"; import { SpokePoolClient } from "../clients"; -import { CONSERVATIVE_BUNDLE_FREQUENCY_SECONDS, spokesThatHoldEthAndWeth } from "../common/Constants"; +import { + ARWEAVE_TAG_BYTE_LIMIT, + CONSERVATIVE_BUNDLE_FREQUENCY_SECONDS, + spokesThatHoldEthAndWeth, +} from "../common/Constants"; import { CONTRACT_ADDRESSES } from "../common/ContractAddresses"; import { PoolRebalanceLeaf, @@ -342,6 +346,10 @@ export async function persistDataToArweave( logger: winston.Logger, tag?: string ): Promise { + assert( + Buffer.from(tag).length <= ARWEAVE_TAG_BYTE_LIMIT, + `Arweave tag cannot exceed ${ARWEAVE_TAG_BYTE_LIMIT} bytes` + ); const startTime = performance.now(); // Check if data already exists on Arweave with the given tag. // If so, we don't need to persist it again. diff --git a/yarn.lock b/yarn.lock index 959ce422c..0d0782d70 100644 --- a/yarn.lock +++ b/yarn.lock @@ -53,10 +53,10 @@ yargs "^17.7.2" zksync-web3 "^0.14.3" -"@across-protocol/sdk@^3.2.13": - version "3.2.13" - resolved "https://registry.yarnpkg.com/@across-protocol/sdk/-/sdk-3.2.13.tgz#8f7fd14dabdd7da0be2a548f9a3b0b49c7f19eba" - integrity sha512-lyzP7bgaABygeIwWISQtBWmzlxyZVvlmp0Au518W8TZ1vkagt7sZa24SV4do8TP9z4JhfsRJVnKGqQJIAWd5hQ== +"@across-protocol/sdk@^3.2.16": + version "3.2.16" + resolved "https://registry.yarnpkg.com/@across-protocol/sdk/-/sdk-3.2.16.tgz#b9cf704420432ce7f27e165afd44985152a08a07" + integrity sha512-ZrFSh9FOMYzwWBlkqGUrhYzm1mhvfixzukzUQGR96ASVbR+qCPjXiUuRryy1l1xhaT+5u5jO/qPLdbMsUAHQFQ== dependencies: "@across-protocol/across-token" "^1.0.0" "@across-protocol/constants" "^3.1.19" @@ -76,8 +76,9 @@ lodash.get "^4.4.2" superstruct "^0.15.4" tslib "^2.6.2" + viem "^2.21.15" -"@adraffy/ens-normalize@1.11.0": +"@adraffy/ens-normalize@1.11.0", "@adraffy/ens-normalize@^1.10.1": version "1.11.0" resolved "https://registry.yarnpkg.com/@adraffy/ens-normalize/-/ens-normalize-1.11.0.tgz#42cc67c5baa407ac25059fcd7d405cc5ecdb0c33" integrity sha512-/3DDPKHqqIqxUULp8yP4zODUY1i+2xvVWsv8A79xGWdCAG+8sb0hRh0Rk2QyOJUnnbyPUAZYcpBuRe3nS2OIUg== @@ -1552,7 +1553,7 @@ resolved "https://registry.yarnpkg.com/@multiformats/base-x/-/base-x-4.0.1.tgz#95ff0fa58711789d53aefb2590a8b7a4e715d121" integrity sha512-eMk0b9ReBbV23xXU693TAIrLyeO5iTgBZGSJfpqriG8UkYvr/hC9u9pyMlAakDNHWmbhMZCDs6KQO0jzKD8OTw== -"@noble/curves@1.6.0", "@noble/curves@^1.4.0", "@noble/curves@^1.4.2", "@noble/curves@~1.6.0": +"@noble/curves@1.6.0", "@noble/curves@^1.4.0", "@noble/curves@^1.4.2", "@noble/curves@^1.6.0", "@noble/curves@~1.6.0": version "1.6.0" resolved "https://registry.yarnpkg.com/@noble/curves/-/curves-1.6.0.tgz#be5296ebcd5a1730fccea4786d420f87abfeb40b" integrity sha512-TlaHRXDehJuRNR9TfZDNQ45mMEd5dwUwmicsafcIX4SsNiqnCHKjE/1alYPd/lDRVhxdhUAlv8uEhMCI5zjIJQ== @@ -1564,7 +1565,7 @@ resolved "https://registry.yarnpkg.com/@noble/hashes/-/hashes-1.0.0.tgz#d5e38bfbdaba174805a4e649f13be9a9ed3351ae" integrity sha512-DZVbtY62kc3kkBtMHqwCOfXrT/hnoORy5BJ4+HU1IR59X0KWAOqsfzQPcUl/lQLlG7qXbe/fZ3r/emxtAl+sqg== -"@noble/hashes@1.5.0", "@noble/hashes@^1.3.1", "@noble/hashes@^1.4.0", "@noble/hashes@~1.5.0": +"@noble/hashes@1.5.0", "@noble/hashes@^1.3.1", "@noble/hashes@^1.4.0", "@noble/hashes@^1.5.0", "@noble/hashes@~1.5.0": version "1.5.0" resolved "https://registry.yarnpkg.com/@noble/hashes/-/hashes-1.5.0.tgz#abadc5ca20332db2b1b2aa3e496e9af1213570b0" integrity sha512-1j6kQFb7QRru7eKN3ZDvRcP13rugwdxZqCjbiAVZfIJwgj2A65UmT4TgARXGlXgnRkORLTDTrO19ZErt7+QXgA== @@ -2453,7 +2454,7 @@ "@noble/secp256k1" "~1.5.2" "@scure/base" "~1.0.0" -"@scure/bip32@1.5.0": +"@scure/bip32@1.5.0", "@scure/bip32@^1.5.0": version "1.5.0" resolved "https://registry.yarnpkg.com/@scure/bip32/-/bip32-1.5.0.tgz#dd4a2e1b8a9da60e012e776d954c4186db6328e6" integrity sha512-8EnFYkqEQdnkuGBVpCzKxyIwDCBLDVj3oiX0EKUFre/tOjL/Hqba1D6n/8RcmaQy4f95qQFrO2A8Sr6ybh4NRw== @@ -2470,7 +2471,7 @@ "@noble/hashes" "~1.0.0" "@scure/base" "~1.0.0" -"@scure/bip39@1.4.0": +"@scure/bip39@1.4.0", "@scure/bip39@^1.4.0": version "1.4.0" resolved "https://registry.yarnpkg.com/@scure/bip39/-/bip39-1.4.0.tgz#664d4f851564e2e1d4bffa0339f9546ea55960a6" integrity sha512-BEEm6p8IueV/ZTfQLp/0vhw4NPnT9oWf5+28nvmeUICjP99f4vr2d+qc7AVGDDtwRep6ifR43Yed9ERVmiITzw== @@ -3697,7 +3698,7 @@ abbrev@1.0.x: web3-eth-abi "^1.2.1" web3-utils "^1.2.1" -abitype@1.0.6: +abitype@1.0.6, abitype@^1.0.6: version "1.0.6" resolved "https://registry.yarnpkg.com/abitype/-/abitype-1.0.6.tgz#76410903e1d88e34f1362746e2d407513c38565b" integrity sha512-MMSqYh4+C/aVqI2RQaWqbvI4Kxo5cQV40WQ4QFtDnNzCkqChm8MuENhElmynZlO0qUy/ObkEUaXtKqYnx1Kp3A== @@ -7211,16 +7212,16 @@ eventemitter3@4.0.4: resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-4.0.4.tgz#b5463ace635a083d018bdc7c917b4c5f10a85384" integrity sha512-rlaVLnVxtxvoyLsQQFBx53YmXHDxRIzzTLbdfxqi4yocpSjAxXwkU0cScM5JgSKMqEhrZpnvQ2D9gjylR0AimQ== +eventemitter3@5.0.1, eventemitter3@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-5.0.1.tgz#53f5ffd0a492ac800721bb42c66b841de96423c4" + integrity sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA== + eventemitter3@^4.0.7: version "4.0.7" resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-4.0.7.tgz#2de9b68f6528d5644ef5c59526a1b4a07306169f" integrity sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw== -eventemitter3@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-5.0.1.tgz#53f5ffd0a492ac800721bb42c66b841de96423c4" - integrity sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA== - eventid@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/eventid/-/eventid-2.0.1.tgz#574e860149457a79a2efe788c459f0c3062d02ec" @@ -11874,6 +11875,19 @@ os-tmpdir@~1.0.2: resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" integrity sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ= +ox@0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/ox/-/ox-0.1.2.tgz#0f791be2ccabeaf4928e6d423498fe1c8094e560" + integrity sha512-ak/8K0Rtphg9vnRJlbOdaX9R7cmxD2MiSthjWGaQdMk3D7hrAlDoM+6Lxn7hN52Za3vrXfZ7enfke/5WjolDww== + dependencies: + "@adraffy/ens-normalize" "^1.10.1" + "@noble/curves" "^1.6.0" + "@noble/hashes" "^1.5.0" + "@scure/bip32" "^1.5.0" + "@scure/bip39" "^1.4.0" + abitype "^1.0.6" + eventemitter3 "5.0.1" + p-cancelable@^0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-0.3.0.tgz#b9e123800bcebb7ac13a479be195b507b98d30fa" @@ -15168,6 +15182,21 @@ verror@1.10.0: core-util-is "1.0.2" extsprintf "^1.2.0" +viem@^2.21.15: + version "2.21.48" + resolved "https://registry.yarnpkg.com/viem/-/viem-2.21.48.tgz#f8f1d0bf5381282e22e6a1f8b72ebd6e64426480" + integrity sha512-/hBHyG1gdIIuiQv0z9YmzXl5eWJa0UCZGwkeuQzH2Bmg6FIEwZeEcxgiytXZydip+p2wMBFa1jdr7o5O1+mrIg== + dependencies: + "@noble/curves" "1.6.0" + "@noble/hashes" "1.5.0" + "@scure/bip32" "1.5.0" + "@scure/bip39" "1.4.0" + abitype "1.0.6" + isows "1.0.6" + ox "0.1.2" + webauthn-p256 "0.0.10" + ws "8.18.0" + viem@^2.21.18: version "2.21.21" resolved "https://registry.yarnpkg.com/viem/-/viem-2.21.21.tgz#11a5001fa18c8a47548a4b20ae9ddd8cfb14de3f" From ff30e83fe6114ae5de0c390ea53712f8c1f022bc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 21 Nov 2024 11:57:03 -0500 Subject: [PATCH 21/44] chore(deps): bump cross-spawn from 7.0.3 to 7.0.6 (#1914) Bumps [cross-spawn](https://github.com/moxystudio/node-cross-spawn) from 7.0.3 to 7.0.6. - [Changelog](https://github.com/moxystudio/node-cross-spawn/blob/master/CHANGELOG.md) - [Commits](https://github.com/moxystudio/node-cross-spawn/compare/v7.0.3...v7.0.6) --- updated-dependencies: - dependency-name: cross-spawn dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: nicholaspai <9457025+nicholaspai@users.noreply.github.com> --- yarn.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/yarn.lock b/yarn.lock index 0d0782d70..ebf42b8c6 100644 --- a/yarn.lock +++ b/yarn.lock @@ -5703,9 +5703,9 @@ cross-fetch@^3.1.5: node-fetch "^2.6.12" cross-spawn@^7.0.0, cross-spawn@^7.0.2: - version "7.0.3" - resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" - integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== + version "7.0.6" + resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.6.tgz#8a58fe78f00dcd70c370451759dfbfaf03e8ee9f" + integrity sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA== dependencies: path-key "^3.1.0" shebang-command "^2.0.0" From e4f5e39d227bfefb4c041975501567f7dce78ffe Mon Sep 17 00:00:00 2001 From: nicholaspai <9457025+nicholaspai@users.noreply.github.com> Date: Thu, 21 Nov 2024 13:18:38 -0500 Subject: [PATCH 22/44] feat(AlephZero): Add manual withdrawal script and automate finalizations (#1909) * feat(AlephZero): Add manual withdrawal script and automate finalizations Similar to #1866 for OpStack but for Arbitrum Orbit. Replaces original PR #1898 with smaller commit history. I essentially took the core changes and copied and pasted here, to avoid rebase hell * WIP * WIP * wip * Update ContractAddresses.ts * Update arbStack.ts * Update arbStack.ts * Update arbStack.ts * fix --- package.json | 2 +- scripts/withdrawFromArbitrumOrbit.ts | 117 ++++++++++ scripts/withdrawFromOpStack.ts | 2 +- src/common/ContractAddresses.ts | 10 + src/common/abi/ArbSysL2.json | 82 +++++++ src/common/abi/ArbitrumErc20GatewayL2.json | 55 +++++ src/finalizer/utils/arbStack.ts | 239 ++++++++++++++++++--- yarn.lock | 16 +- 8 files changed, 489 insertions(+), 34 deletions(-) create mode 100644 scripts/withdrawFromArbitrumOrbit.ts create mode 100644 src/common/abi/ArbSysL2.json diff --git a/package.json b/package.json index 574ef3d45..2d7ce4a6b 100644 --- a/package.json +++ b/package.json @@ -13,7 +13,7 @@ "@across-protocol/constants": "^3.1.19", "@across-protocol/contracts": "^3.0.16", "@across-protocol/sdk": "^3.2.16", - "@arbitrum/sdk": "^3.1.3", + "@arbitrum/sdk": "^4.0.2", "@consensys/linea-sdk": "^0.2.1", "@defi-wonderland/smock": "^2.3.5", "@eth-optimism/sdk": "^3.3.2", diff --git a/scripts/withdrawFromArbitrumOrbit.ts b/scripts/withdrawFromArbitrumOrbit.ts new file mode 100644 index 000000000..3efe2fdbf --- /dev/null +++ b/scripts/withdrawFromArbitrumOrbit.ts @@ -0,0 +1,117 @@ +// Submits a bridge from Arbitrum Orbit L2 to L1. +// For now, this script only supports WETH withdrawals on AlephZero. + +import { + ethers, + retrieveSignerFromCLIArgs, + getProvider, + ERC20, + TOKEN_SYMBOLS_MAP, + assert, + getL1TokenInfo, + Contract, + fromWei, + blockExplorerLink, + getNativeTokenSymbol, +} from "../src/utils"; +import { CONTRACT_ADDRESSES } from "../src/common"; +import { askYesNoQuestion } from "./utils"; + +import minimist from "minimist"; + +const cliArgs = ["amount", "chainId", "token"]; +const args = minimist(process.argv.slice(2), { + string: cliArgs, +}); + +// Example run: +// ts-node ./scripts/withdrawFromArbitrumOrbit.ts +// \ --amount 3000000000000000000 +// \ --chainId 41455 +// \ --token WETH +// \ --wallet gckms +// \ --keys bot1 + +export async function run(): Promise { + assert( + cliArgs.every((cliArg) => Object.keys(args).includes(cliArg)), + `Missing cliArg, expected: ${cliArgs}` + ); + const baseSigner = await retrieveSignerFromCLIArgs(); + const signerAddr = await baseSigner.getAddress(); + const chainId = parseInt(args.chainId); + const connectedSigner = baseSigner.connect(await getProvider(chainId)); + const l2Token = TOKEN_SYMBOLS_MAP[args.token]?.addresses[chainId]; + assert(l2Token, `${args.token} not found on chain ${chainId} in TOKEN_SYMBOLS_MAP`); + const l1TokenInfo = getL1TokenInfo(l2Token, chainId); + console.log("Fetched L1 token info:", l1TokenInfo); + const amount = args.amount; + const amountFromWei = ethers.utils.formatUnits(amount, l1TokenInfo.decimals); + console.log(`Amount to bridge from chain ${chainId}: ${amountFromWei} ${l2Token}`); + + const erc20 = new Contract(l2Token, ERC20.abi, connectedSigner); + const currentBalance = await erc20.balanceOf(signerAddr); + const nativeTokenSymbol = getNativeTokenSymbol(chainId); + const currentNativeBalance = await connectedSigner.getBalance(); + console.log( + `Current ${l1TokenInfo.symbol} balance for account ${signerAddr}: ${fromWei( + currentBalance, + l1TokenInfo.decimals + )} ${l2Token}` + ); + console.log( + `Current native ${nativeTokenSymbol} token balance for account ${signerAddr}: ${fromWei(currentNativeBalance, 18)}` + ); + + // Now, submit a withdrawal: + let contract: Contract, functionName: string, functionArgs: any[]; + if (l1TokenInfo.symbol !== nativeTokenSymbol) { + const arbErc20GatewayObj = CONTRACT_ADDRESSES[chainId].erc20Gateway; + contract = new Contract(arbErc20GatewayObj.address, arbErc20GatewayObj.abi, connectedSigner); + functionName = "outboundTransfer"; + functionArgs = [ + l1TokenInfo.address, // l1Token + signerAddr, // to + amount, // amount + "0x", // data + ]; + + console.log( + `Submitting ${functionName} on the Arbitrum ERC20 gateway router @ ${contract.address} with the following args: `, + ...functionArgs + ); + } else { + const arbSys = CONTRACT_ADDRESSES[chainId].arbSys; + contract = new Contract(arbSys.address, arbSys.abi, connectedSigner); + functionName = "withdrawEth"; + functionArgs = [ + signerAddr, // to + { value: amount }, + ]; + console.log( + `Submitting ${functionName} on the ArbSys contract @ ${contract.address} with the following args: `, + ...functionArgs + ); + } + + if (!(await askYesNoQuestion("\nDo you want to proceed?"))) { + return; + } + const withdrawal = await contract[functionName](...functionArgs); + console.log(`Submitted withdrawal: ${blockExplorerLink(withdrawal.hash, chainId)}.`); + const receipt = await withdrawal.wait(); + console.log("Receipt", receipt); +} + +if (require.main === module) { + run() + .then(async () => { + // eslint-disable-next-line no-process-exit + process.exit(0); + }) + .catch(async (error) => { + console.error("Process exited with", error); + // eslint-disable-next-line no-process-exit + process.exit(1); + }); +} diff --git a/scripts/withdrawFromOpStack.ts b/scripts/withdrawFromOpStack.ts index 3b7b7121b..e4ba5041d 100644 --- a/scripts/withdrawFromOpStack.ts +++ b/scripts/withdrawFromOpStack.ts @@ -60,7 +60,7 @@ export async function run(): Promise { l1TokenInfo.decimals )} ${l2Token}` ); - console.log(`Current ETH balance for account ${signerAddr}: ${fromWei(currentEthBalance, l1TokenInfo.decimals)}`); + console.log(`Current ETH balance for account ${signerAddr}: ${fromWei(currentEthBalance)}`); // First offer user option to unwrap WETH into ETH if (l1TokenInfo.symbol === "ETH") { diff --git a/src/common/ContractAddresses.ts b/src/common/ContractAddresses.ts index cdd9473ee..2c4b78e39 100644 --- a/src/common/ContractAddresses.ts +++ b/src/common/ContractAddresses.ts @@ -22,6 +22,7 @@ import ARBITRUM_ERC20_GATEWAY_ROUTER_L1_ABI from "./abi/ArbitrumErc20GatewayRout import ARBITRUM_ERC20_GATEWAY_L1_ABI from "./abi/ArbitrumErc20GatewayL1.json"; import ARBITRUM_ERC20_GATEWAY_L2_ABI from "./abi/ArbitrumErc20GatewayL2.json"; import ARBITRUM_OUTBOX_ABI from "./abi/ArbitrumOutbox.json"; +import ARBSYS_L2_ABI from "./abi/ArbSysL2.json"; import LINEA_MESSAGE_SERVICE_ABI from "./abi/LineaMessageService.json"; import LINEA_TOKEN_BRIDGE_ABI from "./abi/LineaTokenBridge.json"; import LINEA_USDC_BRIDGE_ABI from "./abi/LineaUsdcBridge.json"; @@ -132,6 +133,10 @@ export const CONTRACT_ADDRESSES: { address: "0x0B9857ae2D4A3DBe74ffE1d7DF045bb7F96E4840", abi: ARBITRUM_OUTBOX_ABI, }, + orbitOutbox_41455: { + address: "0x73bb50c32a3BD6A1032aa5cFeA048fBDA3D6aF6e", + abi: ARBITRUM_OUTBOX_ABI, + }, orbitErc20GatewayRouter_42161: { address: "0x72Ce9c846789fdB6fC1f34aC4AD25Dd9ef7031ef", abi: ARBITRUM_ERC20_GATEWAY_ROUTER_L1_ABI, @@ -334,8 +339,13 @@ export const CONTRACT_ADDRESSES: { }, 41455: { erc20Gateway: { + address: "0x2A5a79061b723BBF453ef7E07c583C750AFb9BD6", abi: ARBITRUM_ERC20_GATEWAY_L2_ABI, }, + arbSys: { + address: "0x0000000000000000000000000000000000000064", + abi: ARBSYS_L2_ABI, + }, }, 59144: { l2MessageService: { diff --git a/src/common/abi/ArbSysL2.json b/src/common/abi/ArbSysL2.json new file mode 100644 index 000000000..f3b864195 --- /dev/null +++ b/src/common/abi/ArbSysL2.json @@ -0,0 +1,82 @@ +[ + { + "anonymous": false, + "inputs": [ + { + "indexed": false, + "internalType": "address", + "name": "caller", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "destination", + "type": "address" + }, + { + "indexed": true, + "internalType": "uint256", + "name": "hash", + "type": "uint256" + }, + { + "indexed": true, + "internalType": "uint256", + "name": "position", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "arbBlockNum", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "ethBlockNum", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "timestamp", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "callvalue", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "bytes", + "name": "data", + "type": "bytes" + } + ], + "name": "L2ToL1Tx", + "type": "event" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "destination", + "type": "address" + } + ], + "name": "withdrawEth", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "payable", + "type": "function" + } +] diff --git a/src/common/abi/ArbitrumErc20GatewayL2.json b/src/common/abi/ArbitrumErc20GatewayL2.json index 3ed24368a..48a1ae4af 100644 --- a/src/common/abi/ArbitrumErc20GatewayL2.json +++ b/src/common/abi/ArbitrumErc20GatewayL2.json @@ -9,5 +9,60 @@ ], "name": "DepositFinalized", "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": false, + "internalType": "address", + "name": "l1Token", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "_from", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "_to", + "type": "address" + }, + { + "indexed": true, + "internalType": "uint256", + "name": "_l2ToL1Id", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "_exitNum", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "_amount", + "type": "uint256" + } + ], + "name": "WithdrawalInitiated", + "type": "event" + }, + { + "inputs": [ + { "internalType": "address", "name": "_token", "type": "address" }, + { "internalType": "address", "name": "_to", "type": "address" }, + { "internalType": "uint256", "name": "_amount", "type": "uint256" }, + { "internalType": "bytes", "name": "_data", "type": "bytes" } + ], + "name": "outboundTransfer", + "outputs": [{ "internalType": "bytes", "name": "", "type": "bytes" }], + "stateMutability": "payable", + "type": "function" } ] diff --git a/src/finalizer/utils/arbStack.ts b/src/finalizer/utils/arbStack.ts index 0373c0cf3..b38e34723 100644 --- a/src/finalizer/utils/arbStack.ts +++ b/src/finalizer/utils/arbStack.ts @@ -1,4 +1,10 @@ -import { L2ToL1MessageStatus, L2TransactionReceipt, L2ToL1MessageWriter } from "@arbitrum/sdk"; +import { + ChildToParentMessageStatus, + ChildTransactionReceipt, + ChildToParentMessageWriter, + registerCustomArbitrumNetwork, + ArbitrumNetwork, +} from "@arbitrum/sdk"; import { winston, convertFromWei, @@ -15,41 +21,206 @@ import { compareAddressesSimple, CHAIN_IDs, TOKEN_SYMBOLS_MAP, + getProvider, + averageBlockTime, + paginatedEventQuery, + getNetworkName, + ethers, + getL2TokenAddresses, + getNativeTokenSymbol, + fromWei, } from "../../utils"; import { TokensBridged } from "../../interfaces"; import { HubPoolClient, SpokePoolClient } from "../../clients"; import { CONTRACT_ADDRESSES } from "../../common"; import { FinalizerPromise, CrossChainMessage } from "../types"; +let LATEST_MAINNET_BLOCK: number; +let MAINNET_BLOCK_TIME: number; + +type PartialArbitrumNetwork = Omit & { + challengePeriodSeconds: number; + registered: boolean; +}; +// These network configs are defined in the Arbitrum SDK, and we need to register them in the SDK's memory. +// We should export this out of a common file but we don't use this SDK elsewhere currentlyl. +export const ARB_ORBIT_NETWORK_CONFIGS: PartialArbitrumNetwork[] = [ + { + // Addresses are available here: + // https://raas.gelato.network/rollups/details/public/aleph-zero-evm + chainId: CHAIN_IDs.ALEPH_ZERO, + name: "Aleph Zero", + parentChainId: CHAIN_IDs.MAINNET, + ethBridge: { + bridge: "0x41Ec9456AB918f2aBA81F38c03Eb0B93b78E84d9", + inbox: "0x56D8EC76a421063e1907503aDd3794c395256AEb ", + sequencerInbox: "0xF75206c49c1694594E3e69252E519434f1579876", + outbox: CONTRACT_ADDRESSES[CHAIN_IDs.MAINNET][`orbitOutbox_${CHAIN_IDs.ALEPH_ZERO}`].address, + rollup: "0x1CA12290D954CFe022323b6A6Df92113ed6b1C98", + }, + challengePeriodSeconds: 6 * 60 * 60, // ~ 6 hours + retryableLifetimeSeconds: 7 * 24 * 60 * 60, + nativeToken: TOKEN_SYMBOLS_MAP.AZERO.addresses[CHAIN_IDs.MAINNET], + isTestnet: false, + registered: false, + // Must be set to true for L3's + isCustom: true, + }, +]; + +export function getOrbitNetwork(chainId: number): PartialArbitrumNetwork | undefined { + return ARB_ORBIT_NETWORK_CONFIGS.find((network) => network.chainId === chainId); +} +export function getArbitrumOrbitFinalizationTime(chainId: number): number { + return getOrbitNetwork(chainId)?.challengePeriodSeconds ?? 7 * 60 * 60 * 24; +} + export async function arbStackFinalizer( logger: winston.Logger, signer: Signer, hubPoolClient: HubPoolClient, spokePoolClient: SpokePoolClient ): Promise { + LATEST_MAINNET_BLOCK = hubPoolClient.latestBlockSearched; + const hubPoolProvider = await getProvider(hubPoolClient.chainId, logger); + MAINNET_BLOCK_TIME = (await averageBlockTime(hubPoolProvider)).average; + // Now that we know the L1 block time, we can calculate the confirmPeriodBlocks. + + ARB_ORBIT_NETWORK_CONFIGS.forEach((_networkConfig) => { + if (_networkConfig.registered) { + return; + } + const networkConfig: ArbitrumNetwork = { + ..._networkConfig, + confirmPeriodBlocks: _networkConfig.challengePeriodSeconds / MAINNET_BLOCK_TIME, + }; + // The network config object should be full now. + registerCustomArbitrumNetwork(networkConfig); + _networkConfig.registered = true; + }); + const { chainId } = spokePoolClient; + const networkName = getNetworkName(chainId); - // Arbitrum takes 7 days to finalize withdrawals, so don't look up events younger than that. + // Arbitrum orbit takes 7 days to finalize withdrawals, so don't look up events younger than that. const redis = await getRedisCache(logger); const latestBlockToFinalize = await getBlockForTimestamp( chainId, - getCurrentTime() - 7 * 60 * 60 * 24, + getCurrentTime() - getArbitrumOrbitFinalizationTime(chainId), undefined, redis ); logger.debug({ - at: "Finalizer#ArbitrumFinalizer", - message: "Arbitrum TokensBridged event filter", + at: `Finalizer#${networkName}Finalizer`, + message: `${networkName} TokensBridged event filter`, toBlock: latestBlockToFinalize, }); // Skip events that are likely not past the seven day challenge period. const olderTokensBridgedEvents = spokePoolClient.getTokensBridged().filter( (e) => e.blockNumber <= latestBlockToFinalize && - // USDC withdrawals for Arbitrum should be finalized via the CCTP Finalizer. - !compareAddressesSimple(e.l2TokenAddress, TOKEN_SYMBOLS_MAP["USDC"].addresses[chainId]) + // USDC withdrawals for chains that support CCTP should be finalized via the CCTP Finalizer. + // The way we detect if a chain supports CCTP is by checking if there is a `cctpMessageTransmitter` + // entry in CONTRACT_ADDRESSES + (CONTRACT_ADDRESSES[chainId].cctpMessageTransmitter === undefined || + !compareAddressesSimple(e.l2TokenAddress, TOKEN_SYMBOLS_MAP["USDC"].addresses[chainId])) ); + // Experimental feature: Add in all ETH withdrawals from Arbitrum Orbit chain to the finalizer. This will help us + // in the short term to automate ETH withdrawals from Lite chains, which can build up ETH balances over time + // and because they are lite chains, our only way to withdraw them is to initiate a slow bridge of ETH from the + // the lite chain to Ethereum. + const withdrawalToAddresses: string[] = process.env.FINALIZER_WITHDRAWAL_TO_ADDRESSES + ? JSON.parse(process.env.FINALIZER_WITHDRAWAL_TO_ADDRESSES).map((address) => ethers.utils.getAddress(address)) + : []; + if (getOrbitNetwork(chainId) !== undefined && withdrawalToAddresses.length > 0) { + // ERC20 withdrawals emit events in the erc20Gateway. + // Native token withdrawals emit events in the ArbSys contract. + const l2ArbSys = CONTRACT_ADDRESSES[chainId].arbSys; + const arbSys = new Contract(l2ArbSys.address, l2ArbSys.abi, spokePoolClient.spokePool.provider); + const l2Erc20Gateway = CONTRACT_ADDRESSES[chainId].erc20Gateway; + const arbitrumGateway = new Contract( + l2Erc20Gateway.address, + l2Erc20Gateway.abi, + spokePoolClient.spokePool.provider + ); + // TODO: For this to work for ArbitrumOrbit, we need to first query ERC20GatewayRouter.getGateway(l2Token) to + // get the ERC20 Gateway. Then, on the ERC20 Gateway, query the WithdrawalInitiated event. + // See example txn: https://evm-explorer.alephzero.org/tx/0xb493174af0822c1a5a5983c2cbd4fe74055ee70409c777b9c665f417f89bde92 + // which withdraws WETH to mainnet using dev wallet. + const withdrawalErc20Events = await paginatedEventQuery( + arbitrumGateway, + arbitrumGateway.filters.WithdrawalInitiated( + null, // l1Token, not-indexed so can't filter + null, // from + withdrawalToAddresses // to + ), + { + ...spokePoolClient.eventSearchConfig, + toBlock: spokePoolClient.latestBlockSearched, + } + ); + const withdrawalNativeEvents = await paginatedEventQuery( + arbSys, + arbSys.filters.L2ToL1Tx( + null, // caller, not-indexed so can't filter + withdrawalToAddresses // destination + ), + { + ...spokePoolClient.eventSearchConfig, + toBlock: spokePoolClient.latestBlockSearched, + } + ); + const withdrawalEvents = [ + ...withdrawalErc20Events.map((e) => { + const l2Token = getL2TokenAddresses(e.args.l1Token)[chainId]; + return { + ...e, + amount: e.args._amount, + l2TokenAddress: l2Token, + }; + }), + ...withdrawalNativeEvents.map((e) => { + const nativeTokenSymbol = getNativeTokenSymbol(chainId); + const l2Token = TOKEN_SYMBOLS_MAP[nativeTokenSymbol].addresses[chainId]; + return { + ...e, + amount: e.args.callvalue, + l2TokenAddress: l2Token, + }; + }), + ]; + // If there are any found withdrawal initiated events, then add them to the list of TokenBridged events we'll + // submit proofs and finalizations for. + withdrawalEvents.forEach((event) => { + try { + const tokenBridgedEvent: TokensBridged = { + ...event, + amountToReturn: event.amount, + chainId, + leafId: 0, + l2TokenAddress: event.l2TokenAddress, + }; + if (event.blockNumber <= latestBlockToFinalize) { + olderTokensBridgedEvents.push(tokenBridgedEvent); + } else { + const l1TokenInfo = getL1TokenInfo(tokenBridgedEvent.l2TokenAddress, chainId); + const amountFromWei = fromWei(tokenBridgedEvent.amountToReturn.toString(), l1TokenInfo.decimals); + logger.debug({ + at: `Finalizer#${networkName}Finalizer`, + message: `Withdrawal event for ${amountFromWei} of ${l1TokenInfo.symbol} is too recent to finalize`, + }); + } + } catch (err) { + logger.debug({ + at: `Finalizer#${networkName}Finalizer`, + message: `Skipping ERC20 withdrawal event for unknown token ${event.l2TokenAddress} on chain ${networkName}`, + event: event, + }); + } + }); + } + return await multicallArbitrumFinalizations(olderTokensBridgedEvents, signer, hubPoolClient, logger, chainId); } @@ -81,14 +252,14 @@ async function multicallArbitrumFinalizations( }; } -async function finalizeArbitrum(message: L2ToL1MessageWriter, chainId: number): Promise { +async function finalizeArbitrum(message: ChildToParentMessageWriter, chainId: number): Promise { const l2Provider = getCachedProvider(chainId, true); const proof = await message.getOutboxProof(l2Provider); const { address, abi } = CONTRACT_ADDRESSES[CHAIN_IDs.MAINNET][`orbitOutbox_${chainId}`]; const outbox = new Contract(address, abi); // eslint-disable-next-line @typescript-eslint/no-explicit-any const eventData = (message as any).nitroWriter.event; // nitroWriter is a private property on the - // L2ToL1MessageWriter class, which we need to form the calldata so unfortunately we must cast to `any`. + // ChildToParentMessageWriter class, which we need to form the calldata so unfortunately we must cast to `any`. const callData = await outbox.populateTransaction.executeTransaction( proof, eventData.position, @@ -115,7 +286,7 @@ async function getFinalizableMessages( ): Promise< { info: TokensBridged; - message: L2ToL1MessageWriter; + message: ChildToParentMessageWriter; status: string; }[] > { @@ -124,12 +295,15 @@ async function getFinalizableMessages( allMessagesWithStatuses, (message: { status: string }) => message.status ); + const networkName = getNetworkName(chainId); logger.debug({ - at: "ArbitrumFinalizer", - message: "Arbitrum outbox message statuses", + at: `Finalizer#${networkName}Finalizer`, + message: `${networkName} outbox message statuses`, statusesGrouped, }); - return allMessagesWithStatuses.filter((x) => x.status === L2ToL1MessageStatus[L2ToL1MessageStatus.CONFIRMED]); + return allMessagesWithStatuses.filter( + (x) => x.status === ChildToParentMessageStatus[ChildToParentMessageStatus.CONFIRMED] + ); } async function getAllMessageStatuses( @@ -140,7 +314,7 @@ async function getAllMessageStatuses( ): Promise< { info: TokensBridged; - message: L2ToL1MessageWriter; + message: ChildToParentMessageWriter; status: string; }[] > { @@ -170,22 +344,23 @@ async function getMessageOutboxStatusAndProof( logIndex: number, chainId: number ): Promise<{ - message: L2ToL1MessageWriter; + message: ChildToParentMessageWriter; status: string; }> { + const networkName = getNetworkName(chainId); const l2Provider = getCachedProvider(chainId, true); const receipt = await l2Provider.getTransactionReceipt(event.transactionHash); - const l2Receipt = new L2TransactionReceipt(receipt); + const l2Receipt = new ChildTransactionReceipt(receipt); try { - const l2ToL1Messages = await l2Receipt.getL2ToL1Messages(l1Signer); + const l2ToL1Messages = await l2Receipt.getChildToParentMessages(l1Signer); if (l2ToL1Messages.length === 0 || l2ToL1Messages.length - 1 < logIndex) { const error = new Error( `No outgoing messages found in transaction:${event.transactionHash} for l2 token ${event.l2TokenAddress}` ); logger.warn({ - at: "ArbitrumFinalizer", - message: "Arbitrum transaction that emitted TokensBridged event unexpectedly contains 0 L2-to-L1 messages 🤢!", + at: `Finalizer#${networkName}Finalizer`, + message: "Transaction that emitted TokensBridged event unexpectedly contains 0 L2-to-L1 messages 🤢!", logIndex, l2ToL1Messages: l2ToL1Messages.length, event, @@ -199,30 +374,38 @@ async function getMessageOutboxStatusAndProof( // Check if already executed or unconfirmed (i.e. not yet available to be executed on L1 following dispute // window) const outboxMessageExecutionStatus = await l2Message.status(l2Provider); - if (outboxMessageExecutionStatus === L2ToL1MessageStatus.EXECUTED) { + if (outboxMessageExecutionStatus === ChildToParentMessageStatus.EXECUTED) { return { message: l2Message, - status: L2ToL1MessageStatus[L2ToL1MessageStatus.EXECUTED], + status: ChildToParentMessageStatus[ChildToParentMessageStatus.EXECUTED], }; } - if (outboxMessageExecutionStatus !== L2ToL1MessageStatus.CONFIRMED) { - return { - message: l2Message, - status: L2ToL1MessageStatus[L2ToL1MessageStatus.UNCONFIRMED], - }; + if (outboxMessageExecutionStatus !== ChildToParentMessageStatus.CONFIRMED) { + const estimatedFinalizationBlock = await l2Message.getFirstExecutableBlock(l2Provider); + const estimatedFinalizationBlockDelta = estimatedFinalizationBlock.toNumber() - LATEST_MAINNET_BLOCK; + logger.debug({ + at: `Finalizer#${networkName}Finalizer`, + message: `Unconfirmed withdrawal can be finalized in ${ + (estimatedFinalizationBlockDelta * MAINNET_BLOCK_TIME) / 60 / 60 + } hours`, + chainId, + token: event.l2TokenAddress, + amount: event.amountToReturn, + receipt: l2Receipt.transactionHash, + }); } // Now that its confirmed and not executed, we can execute our // message in its outbox entry. return { message: l2Message, - status: L2ToL1MessageStatus[outboxMessageExecutionStatus], + status: ChildToParentMessageStatus[outboxMessageExecutionStatus], }; } catch (error) { // Likely L1 message hasn't been included in an arbitrum batch yet, so ignore it for now. return { message: undefined, - status: L2ToL1MessageStatus[L2ToL1MessageStatus.UNCONFIRMED], + status: ChildToParentMessageStatus[ChildToParentMessageStatus.UNCONFIRMED], }; } } diff --git a/yarn.lock b/yarn.lock index ebf42b8c6..9ff89fe7c 100644 --- a/yarn.lock +++ b/yarn.lock @@ -83,14 +83,15 @@ resolved "https://registry.yarnpkg.com/@adraffy/ens-normalize/-/ens-normalize-1.11.0.tgz#42cc67c5baa407ac25059fcd7d405cc5ecdb0c33" integrity sha512-/3DDPKHqqIqxUULp8yP4zODUY1i+2xvVWsv8A79xGWdCAG+8sb0hRh0Rk2QyOJUnnbyPUAZYcpBuRe3nS2OIUg== -"@arbitrum/sdk@^3.1.3": - version "3.1.3" - resolved "https://registry.yarnpkg.com/@arbitrum/sdk/-/sdk-3.1.3.tgz#75236043717a450b569faaa087687c51d525b0c3" - integrity sha512-Dn1or7/Guc3dItuiiWaoYQ37aCDwiWTZGPIrg4yBJW27BgiDGbo0mjPDAhKTh4p5NDOWyE8bZ0vZai86COZIUA== +"@arbitrum/sdk@^4.0.2": + version "4.0.2" + resolved "https://registry.yarnpkg.com/@arbitrum/sdk/-/sdk-4.0.2.tgz#23555858f49e2b237b94a65bd486c65edb7b1690" + integrity sha512-KkuXNwbG5c/hCT66EG2tFMHXxIDCvt9dxAIeykZYnW7KyEH5GNlRwaPzwo6MU0shHNc0qg6pZzy2XakJWuSw2Q== dependencies: "@ethersproject/address" "^5.0.8" "@ethersproject/bignumber" "^5.1.1" "@ethersproject/bytes" "^5.0.8" + async-mutex "^0.4.0" ethers "^5.1.0" "@aws-crypto/sha256-js@1.2.2": @@ -4211,6 +4212,13 @@ async-listener@^0.6.0: semver "^5.3.0" shimmer "^1.1.0" +async-mutex@^0.4.0: + version "0.4.1" + resolved "https://registry.yarnpkg.com/async-mutex/-/async-mutex-0.4.1.tgz#bccf55b96f2baf8df90ed798cb5544a1f6ee4c2c" + integrity sha512-WfoBo4E/TbCX1G95XTjbWTE3X2XLG0m1Xbv2cwOtuPdyH9CZvnaA5nCt1ucjaKEgW2A5IF71hxrRhr83Je5xjA== + dependencies: + tslib "^2.4.0" + async-retry@^1.3.3: version "1.3.3" resolved "https://registry.yarnpkg.com/async-retry/-/async-retry-1.3.3.tgz#0e7f36c04d8478e7a58bdbed80cedf977785f280" From 88a79c41862b655f2e8dfa1970f07295c1cdb28a Mon Sep 17 00:00:00 2001 From: nicholaspai <9457025+nicholaspai@users.noreply.github.com> Date: Thu, 21 Nov 2024 19:37:43 -0500 Subject: [PATCH 23/44] improve: bump sdk (#1922) --- package.json | 2 +- yarn.lock | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/package.json b/package.json index 2d7ce4a6b..56293b809 100644 --- a/package.json +++ b/package.json @@ -12,7 +12,7 @@ "dependencies": { "@across-protocol/constants": "^3.1.19", "@across-protocol/contracts": "^3.0.16", - "@across-protocol/sdk": "^3.2.16", + "@across-protocol/sdk": "^3.3.17", "@arbitrum/sdk": "^4.0.2", "@consensys/linea-sdk": "^0.2.1", "@defi-wonderland/smock": "^2.3.5", diff --git a/yarn.lock b/yarn.lock index 9ff89fe7c..488b0c089 100644 --- a/yarn.lock +++ b/yarn.lock @@ -53,10 +53,10 @@ yargs "^17.7.2" zksync-web3 "^0.14.3" -"@across-protocol/sdk@^3.2.16": - version "3.2.16" - resolved "https://registry.yarnpkg.com/@across-protocol/sdk/-/sdk-3.2.16.tgz#b9cf704420432ce7f27e165afd44985152a08a07" - integrity sha512-ZrFSh9FOMYzwWBlkqGUrhYzm1mhvfixzukzUQGR96ASVbR+qCPjXiUuRryy1l1xhaT+5u5jO/qPLdbMsUAHQFQ== +"@across-protocol/sdk@^3.3.17": + version "3.3.17" + resolved "https://registry.yarnpkg.com/@across-protocol/sdk/-/sdk-3.3.17.tgz#8d4c2982e99e6908412675d7892c881bc78f8778" + integrity sha512-IxRbsFrQYsu4urs9hApaEFHhSMZWAFdgI/6Vj05fKw3j8iJ6H2b2N6ia7Q2FzziMcvM7UH8+gbWdLyL4XQfjig== dependencies: "@across-protocol/across-token" "^1.0.0" "@across-protocol/constants" "^3.1.19" From d38be08bb2ef5a7994677599ed4ac42ba2a6b507 Mon Sep 17 00:00:00 2001 From: nicholaspai <9457025+nicholaspai@users.noreply.github.com> Date: Thu, 21 Nov 2024 20:16:32 -0500 Subject: [PATCH 24/44] improve: Update sdk (#1924) --- package.json | 2 +- yarn.lock | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/package.json b/package.json index 56293b809..d8fb23d01 100644 --- a/package.json +++ b/package.json @@ -12,7 +12,7 @@ "dependencies": { "@across-protocol/constants": "^3.1.19", "@across-protocol/contracts": "^3.0.16", - "@across-protocol/sdk": "^3.3.17", + "@across-protocol/sdk": "^3.3.18", "@arbitrum/sdk": "^4.0.2", "@consensys/linea-sdk": "^0.2.1", "@defi-wonderland/smock": "^2.3.5", diff --git a/yarn.lock b/yarn.lock index 488b0c089..ff63b9d57 100644 --- a/yarn.lock +++ b/yarn.lock @@ -53,10 +53,10 @@ yargs "^17.7.2" zksync-web3 "^0.14.3" -"@across-protocol/sdk@^3.3.17": - version "3.3.17" - resolved "https://registry.yarnpkg.com/@across-protocol/sdk/-/sdk-3.3.17.tgz#8d4c2982e99e6908412675d7892c881bc78f8778" - integrity sha512-IxRbsFrQYsu4urs9hApaEFHhSMZWAFdgI/6Vj05fKw3j8iJ6H2b2N6ia7Q2FzziMcvM7UH8+gbWdLyL4XQfjig== +"@across-protocol/sdk@^3.3.18": + version "3.3.18" + resolved "https://registry.yarnpkg.com/@across-protocol/sdk/-/sdk-3.3.18.tgz#d39ef359f9f639921fb412a1355167354014a80f" + integrity sha512-Ea40yDPL94T3uc6HhqDj8X7vovPSyOVSmA6Z3C1uZmdwRdDKt8hlg8k7yxIg+8aR5aEJJ7hCZy6bHdI5XHpbFQ== dependencies: "@across-protocol/across-token" "^1.0.0" "@across-protocol/constants" "^3.1.19" From f4043b785c6b6e461493093433fee41539ea1a27 Mon Sep 17 00:00:00 2001 From: bmzig <57361391+bmzig@users.noreply.github.com> Date: Fri, 22 Nov 2024 08:14:43 -0600 Subject: [PATCH 25/44] improve(reporter): log rebalance information for debugging (#1921) Signed-off-by: bennett --- src/monitor/Monitor.ts | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/src/monitor/Monitor.ts b/src/monitor/Monitor.ts index 34ab55801..9bee157a5 100644 --- a/src/monitor/Monitor.ts +++ b/src/monitor/Monitor.ts @@ -666,6 +666,13 @@ export class Monitor { ); const enabledChainIds = this.clients.configStoreClient.getChainIdIndicesForBlock(nextBundleMainnetStartBlock); + this.logger.debug({ + at: "Monitor#checkSpokePoolRunningBalances", + message: "Mainnet root bundles in scope", + validatedBundles, + outstandingBundle: bundle, + }); + const slowFillBlockRange = getWidestPossibleExpectedBlockRange( enabledChainIds, this.clients.spokePoolClients, @@ -682,6 +689,13 @@ export class Monitor { : [endBlockNumber + 1, spokeLatestBlockSearched > endBlockNumber ? spokeLatestBlockSearched : endBlockNumber]; }); + this.logger.debug({ + at: "Monitor#checkSpokePoolRunningBalances", + message: "Block ranges to search", + slowFillBlockRange, + blockRangeTail, + }); + // Do all async tasks in parallel. We want to know about the pool rebalances, slow fills in the most recent proposed bundle, refunds // from the last `n` bundles, pending refunds which have not been made official via a root bundle proposal, and the current balances of // all the spoke pools. @@ -739,6 +753,12 @@ export class Monitor { }); } + this.logger.debug({ + at: "Monitor#checkSpokePoolRunningBalances", + message: "Print pool rebalance leaves", + poolRebalanceRootLeaves: poolRebalanceLeaves, + }); + // Calculate the pending refunds. for (const chainId of chainIds) { const l2TokenAddresses = monitoredTokenSymbols @@ -770,6 +790,13 @@ export class Monitor { ); pendingRelayerRefunds[chainId][l2Token] = pendingValidatedDeductions.add(nextBundleDeductions); }); + + this.logger.debug({ + at: "Monitor#checkSpokePoolRunningBalances", + message: "Print refund amounts for chainId", + chainId, + pendingDeductions: pendingRelayerRefunds[chainId], + }); } // Get the slow fill amounts. Only do this step if there were slow fills in the most recent root bundle. From 5bf9721df894dcf52b59cd62f612e0e4ffd2f10f Mon Sep 17 00:00:00 2001 From: Gerhard Steenkamp <51655063+gsteenkamp89@users.noreply.github.com> Date: Tue, 26 Nov 2024 12:12:33 +0200 Subject: [PATCH 26/44] Feat/performance tracking (#1896) Goal: Provide a standardised way of logging performance metrics. The API is similar to using Node's native perf_hooks, but slightly easier to use. The Profiler will ensure { datadog: true } is appended to all logs so we can ingest this by searching for this key. Since the Profiler class holds some state, each instance should be created within a function scope so it can be garbage collected. --------- Co-authored-by: Paul <108695806+pxrl@users.noreply.github.com> --- package.json | 2 +- src/clients/InventoryClient.ts | 38 +++++++++++++---------- src/clients/TokenClient.ts | 15 +++++---- src/dataworker/DataworkerUtils.ts | 15 ++++++--- src/dataworker/index.ts | 51 +++++++++++++++++++------------ src/finalizer/index.ts | 35 ++++++++++++++++----- src/libexec/util/evm/util.ts | 19 +++++++----- src/relayer/Relayer.ts | 22 ++++++++----- src/relayer/index.ts | 28 ++++++++--------- src/utils/SDKUtils.ts | 1 + yarn.lock | 15 ++++++--- 11 files changed, 150 insertions(+), 91 deletions(-) diff --git a/package.json b/package.json index d8fb23d01..8f14ef3b4 100644 --- a/package.json +++ b/package.json @@ -12,7 +12,7 @@ "dependencies": { "@across-protocol/constants": "^3.1.19", "@across-protocol/contracts": "^3.0.16", - "@across-protocol/sdk": "^3.3.18", + "@across-protocol/sdk": "^3.3.21", "@arbitrum/sdk": "^4.0.2", "@consensys/linea-sdk": "^0.2.1", "@defi-wonderland/smock": "^2.3.5", diff --git a/src/clients/InventoryClient.ts b/src/clients/InventoryClient.ts index 1e25e19aa..ad42aad66 100644 --- a/src/clients/InventoryClient.ts +++ b/src/clients/InventoryClient.ts @@ -25,6 +25,7 @@ import { assert, compareAddressesSimple, getUsdcSymbol, + Profiler, getNativeTokenSymbol, } from "../utils"; import { HubPoolClient, TokenClient, BundleDataClient } from "."; @@ -59,6 +60,7 @@ export class InventoryClient { private readonly formatWei: ReturnType; private bundleRefundsPromise: Promise = undefined; private excessRunningBalancePromises: { [l1Token: string]: Promise<{ [chainId: number]: BigNumber }> } = {}; + private profiler: InstanceType; constructor( readonly relayer: string, @@ -75,6 +77,10 @@ export class InventoryClient { ) { this.scalar = sdkUtils.fixedPointAdjustment; this.formatWei = createFormatFunction(2, 4, false, 18); + this.profiler = new Profiler({ + logger: this.logger, + at: "InventoryClient", + }); } /** @@ -299,13 +305,15 @@ export class InventoryClient { async getBundleRefunds(l1Token: string): Promise<{ [chainId: string]: BigNumber }> { let refundsToConsider: CombinedRefunds[] = []; + let mark: ReturnType; // Increase virtual balance by pending relayer refunds from the latest valid bundle and the // upcoming bundle. We can assume that all refunds from the second latest valid bundle have already // been executed. - let startTimer: number; if (!isDefined(this.bundleRefundsPromise)) { - startTimer = performance.now(); // @dev Save this as a promise so that other parallel calls to this function don't make the same call. + mark = this.profiler.start("bundleRefunds", { + l1Token, + }); this.bundleRefundsPromise = this.getAllBundleRefunds(); } refundsToConsider = lodash.cloneDeep(await this.bundleRefundsPromise); @@ -327,12 +335,12 @@ export class InventoryClient { }, {} ); - if (startTimer) { - this.log(`Time taken to get bundle refunds: ${Math.round((performance.now() - startTimer) / 1000)}s`, { - l1Token, - totalRefundsPerChain, - }); - } + + mark?.stop({ + message: "Time to calculate total refunds per chain", + l1Token, + }); + return totalRefundsPerChain; } @@ -618,7 +626,8 @@ export class InventoryClient { ): Promise<{ [chainId: number]: BigNumber }> { const { root: latestPoolRebalanceRoot, blockRanges } = await this.bundleDataClient.getLatestPoolRebalanceRoot(); const chainIds = this.hubPoolClient.configStoreClient.getChainIdIndicesForBlock(); - const start = performance.now(); + + const mark = this.profiler.start("getLatestRunningBalances"); const runningBalances = Object.fromEntries( await sdkUtils.mapAsync(chainsToEvaluate, async (chainId) => { const chainIdIndex = chainIds.indexOf(chainId); @@ -674,13 +683,10 @@ export class InventoryClient { ]; }) ); - this.log( - `Approximated latest (abs. val) running balance for ORU chains for token ${l1Token} in ${ - Math.round(performance.now() - start) / 1000 - }s`, - { runningBalances } - ); - + mark.stop({ + message: "Time to get running balances", + runningBalances, + }); return Object.fromEntries(Object.entries(runningBalances).map(([k, v]) => [k, v.absLatestRunningBalance])); } diff --git a/src/clients/TokenClient.ts b/src/clients/TokenClient.ts index f2d2b68e8..613535260 100644 --- a/src/clients/TokenClient.ts +++ b/src/clients/TokenClient.ts @@ -12,8 +12,8 @@ import { MAX_UINT_VAL, assign, blockExplorerLink, - getCurrentTime, getNetworkName, + Profiler, runTransaction, toBN, winston, @@ -27,6 +27,7 @@ type TokenShortfallType = { }; export class TokenClient { + private profiler: InstanceType; tokenData: TokenDataType = {}; tokenShortfall: TokenShortfallType = {}; @@ -35,7 +36,9 @@ export class TokenClient { readonly relayerAddress: string, readonly spokePoolClients: { [chainId: number]: SpokePoolClient }, readonly hubPoolClient: HubPoolClient - ) {} + ) { + this.profiler = new Profiler({ at: "TokenClient", logger }); + } getAllTokenData(): TokenDataType { return this.tokenData; @@ -238,7 +241,7 @@ export class TokenClient { } async update(): Promise { - const start = getCurrentTime(); + const mark = this.profiler.start("update"); this.logger.debug({ at: "TokenBalanceClient", message: "Updating TokenBalance client" }); const { hubPoolClient } = this; @@ -272,11 +275,7 @@ export class TokenClient { }) ); - this.logger.debug({ - at: "TokenBalanceClient", - message: `Updated TokenBalance client in ${getCurrentTime() - start} seconds.`, - balanceData, - }); + mark.stop({ message: "Updated TokenBalance client.", balanceData }); } async fetchTokenData( diff --git a/src/dataworker/DataworkerUtils.ts b/src/dataworker/DataworkerUtils.ts index 9e2736b45..3352c5a30 100644 --- a/src/dataworker/DataworkerUtils.ts +++ b/src/dataworker/DataworkerUtils.ts @@ -24,6 +24,7 @@ import { getTimestampsForBundleEndBlocks, isDefined, MerkleTree, + Profiler, TOKEN_SYMBOLS_MAP, winston, } from "../utils"; @@ -350,7 +351,13 @@ export async function persistDataToArweave( Buffer.from(tag).length <= ARWEAVE_TAG_BYTE_LIMIT, `Arweave tag cannot exceed ${ARWEAVE_TAG_BYTE_LIMIT} bytes` ); - const startTime = performance.now(); + + const profiler = new Profiler({ + logger, + at: "DataworkerUtils#persistDataToArweave", + }); + const mark = profiler.start("persistDataToArweave"); + // Check if data already exists on Arweave with the given tag. // If so, we don't need to persist it again. const [matchingTxns, address, balance] = await Promise.all([ @@ -397,10 +404,8 @@ export async function persistDataToArweave( balance: formatWinston(balance), notificationPath: "across-arweave", }); - const endTime = performance.now(); - logger.debug({ - at: "Dataworker#index", - message: `Time to persist data to Arweave: ${endTime - startTime}ms`, + mark.stop({ + message: "Time to persist to Arweave", }); } } diff --git a/src/dataworker/index.ts b/src/dataworker/index.ts index 5458f6443..1c1bd646c 100644 --- a/src/dataworker/index.ts +++ b/src/dataworker/index.ts @@ -6,6 +6,7 @@ import { Signer, disconnectRedisClients, isDefined, + Profiler, } from "../utils"; import { spokePoolClientsToProviders } from "../common"; import { Dataworker } from "./Dataworker"; @@ -52,21 +53,29 @@ export async function createDataworker( dataworker, }; } + export async function runDataworker(_logger: winston.Logger, baseSigner: Signer): Promise { - logger = _logger; - let loopStart = performance.now(); - const { clients, config, dataworker } = await createDataworker(logger, baseSigner); - logger.debug({ + const profiler = new Profiler({ at: "Dataworker#index", - message: `Time to update non-spoke clients: ${(performance.now() - loopStart) / 1000}s`, + logger: _logger, }); - loopStart = performance.now(); + logger = _logger; + + const { clients, config, dataworker } = await profiler.measureAsync( + createDataworker(logger, baseSigner), + "createDataworker", + { + message: "Time to update non-spoke clients", + } + ); + let proposedBundleData: BundleData | undefined = undefined; let poolRebalanceLeafExecutionCount = 0; try { logger[startupLogLevel(config)]({ at: "Dataworker#index", message: "Dataworker started 👩‍🔬", config }); for (;;) { + profiler.mark("loopStart"); // Determine the spoke client's lookback: // 1. We initiate the spoke client event search windows based on a start bundle's bundle block end numbers and // how many bundles we want to look back from the start bundle blocks. @@ -108,7 +117,7 @@ export async function runDataworker(_logger: winston.Logger, baseSigner: Signer) fromBlocks, toBlocks ); - const dataworkerFunctionLoopTimerStart = performance.now(); + profiler.mark("dataworkerFunctionLoopTimerStart"); // Validate and dispute pending proposal before proposing a new one if (config.disputerEnabled) { await dataworker.validatePendingRootBundle( @@ -191,19 +200,23 @@ export async function runDataworker(_logger: winston.Logger, baseSigner: Signer) } else { await clients.multiCallerClient.executeTxnQueues(); } - - const dataworkerFunctionLoopTimerEnd = performance.now(); - logger.debug({ - at: "Dataworker#index", - message: `Time to update spoke pool clients and run dataworker function: ${Math.round( - (dataworkerFunctionLoopTimerEnd - loopStart) / 1000 - )}s`, - timeToLoadSpokes: Math.round((dataworkerFunctionLoopTimerStart - loopStart) / 1000), - timeToRunDataworkerFunctions: Math.round( - (dataworkerFunctionLoopTimerEnd - dataworkerFunctionLoopTimerStart) / 1000 - ), + profiler.mark("dataworkerFunctionLoopTimerEnd"); + profiler.measure("timeToLoadSpokes", { + message: "Time to load spokes in data worker loop", + from: "loopStart", + to: "dataworkerFunctionLoopTimerStart", + }); + profiler.measure("timeToRunDataworkerFunctions", { + message: "Time to run data worker functions in data worker loop", + from: "dataworkerFunctionLoopTimerStart", + to: "dataworkerFunctionLoopTimerEnd", + }); + // do we need to add an additional log for the sum of the previous? + profiler.measure("dataWorkerTotal", { + message: "Total time taken for dataworker loop", + from: "loopStart", + to: "dataworkerFunctionLoopTimerEnd", }); - loopStart = performance.now(); if (await processEndPollingLoop(logger, "Dataworker", config.pollingDelay)) { break; diff --git a/src/finalizer/index.ts b/src/finalizer/index.ts index 8b648dcf9..0e4f6fe3f 100644 --- a/src/finalizer/index.ts +++ b/src/finalizer/index.ts @@ -29,6 +29,7 @@ import { startupLogLevel, winston, CHAIN_IDs, + Profiler, } from "../utils"; import { ChainFinalizer, CrossChainMessage } from "./types"; import { @@ -471,17 +472,23 @@ export class FinalizerConfig extends DataworkerConfig { export async function runFinalizer(_logger: winston.Logger, baseSigner: Signer): Promise { logger = _logger; + // Same config as Dataworker for now. const config = new FinalizerConfig(process.env); + const profiler = new Profiler({ + logger, + at: "Finalizer#index", + config, + }); logger[startupLogLevel(config)]({ at: "Finalizer#index", message: "Finalizer started 🏋🏿‍♀️", config }); const { commonClients, spokePoolClients } = await constructFinalizerClients(logger, config, baseSigner); try { for (;;) { - const loopStart = performance.now(); + profiler.mark("loopStart"); await updateSpokePoolClients(spokePoolClients, ["TokensBridged"]); - const loopStartPostSpokePoolUpdates = performance.now(); + profiler.mark("loopStartPostSpokePoolUpdates"); if (config.finalizerEnabled) { const availableChains = commonClients.configStoreClient @@ -501,13 +508,25 @@ export async function runFinalizer(_logger: winston.Logger, baseSigner: Signer): } else { logger[startupLogLevel(config)]({ at: "Dataworker#index", message: "Finalizer disabled" }); } - const loopEndPostFinalizations = performance.now(); - logger.debug({ - at: "Finalizer#index", - message: `Time to loop: ${Math.round((loopEndPostFinalizations - loopStart) / 1000)}s`, - timeToUpdateSpokeClients: Math.round((loopStartPostSpokePoolUpdates - loopStart) / 1000), - timeToFinalize: Math.round((loopEndPostFinalizations - loopStartPostSpokePoolUpdates) / 1000), + profiler.mark("loopEndPostFinalizations"); + + profiler.measure("timeToUpdateSpokeClients", { + from: "loopStart", + to: "loopStartPostSpokePoolUpdates", + strategy: config.finalizationStrategy, + }); + + profiler.measure("timeToFinalize", { + from: "loopStartPostSpokePoolUpdates", + to: "loopEndPostFinalizations", + strategy: config.finalizationStrategy, + }); + + profiler.measure("loopTime", { + message: "Time to loop", + from: "loopStart", + to: "loopEndPostFinalizations", strategy: config.finalizationStrategy, }); diff --git a/src/libexec/util/evm/util.ts b/src/libexec/util/evm/util.ts index c19a86199..ff0272566 100644 --- a/src/libexec/util/evm/util.ts +++ b/src/libexec/util/evm/util.ts @@ -1,6 +1,6 @@ import assert from "assert"; import { Contract, EventFilter } from "ethers"; -import { getNetworkName, isDefined, paginatedEventQuery, winston } from "../../../utils"; +import { getNetworkName, isDefined, paginatedEventQuery, Profiler, winston } from "../../../utils"; import { Log, ScraperOpts } from "../../types"; /** @@ -45,8 +45,12 @@ export async function scrapeEvents( spokePool: Contract, eventName: string, opts: ScraperOpts & { toBlock: number }, - logger: winston.Logger + logger?: winston.Logger ): Promise { + const profiler = new Profiler({ + logger, + at: "scrapeEvents", + }); const { lookback, deploymentBlock, filterArgs, maxBlockRange, toBlock } = opts; const { chainId } = await spokePool.provider.getNetwork(); const chain = getNetworkName(chainId); @@ -55,13 +59,14 @@ export async function scrapeEvents( assert(toBlock > fromBlock, `${toBlock} > ${fromBlock}`); const searchConfig = { fromBlock, toBlock, maxBlockLookBack: maxBlockRange }; - const tStart = performance.now(); + const mark = profiler.start("paginatedEventQuery"); const filter = getEventFilter(spokePool, eventName, filterArgs[eventName]); const events = await paginatedEventQuery(spokePool, filter, searchConfig); - const tStop = performance.now(); - logger.debug({ - at: "scrapeEvents", - message: `Scraped ${events.length} ${chain} ${eventName} events in ${Math.round((tStop - tStart) / 1000)} seconds`, + mark.stop({ + message: `Scraped ${events.length} ${chain} ${eventName} events.`, + numEvents: events.length, + chain, + eventName, searchConfig, }); diff --git a/src/relayer/Relayer.ts b/src/relayer/Relayer.ts index 08d2b67a2..fa2428dd9 100644 --- a/src/relayer/Relayer.ts +++ b/src/relayer/Relayer.ts @@ -20,6 +20,7 @@ import { fixedPointAdjustment, TransactionResponse, ZERO_ADDRESS, + Profiler, } from "../utils"; import { RelayerClients } from "./RelayerClientHelper"; import { RelayerConfig } from "./RelayerConfig"; @@ -45,7 +46,7 @@ export class Relayer { private pendingTxnReceipts: { [chainId: number]: Promise } = {}; private lastLogTime = 0; private lastMaintenance = 0; - + private profiler: InstanceType; private hubPoolBlockBuffer: number; protected fillLimits: { [originChainId: number]: { fromBlock: number; limit: BigNumber }[] }; protected inventoryChainIds: number[]; @@ -69,7 +70,10 @@ export class Relayer { ]; } }); - + this.profiler = new Profiler({ + at: "Relayer", + logger: this.logger, + }); this.relayerAddress = getAddress(relayerAddress); this.inventoryChainIds = this.config.pollingDelay === 0 ? Object.values(clients.spokePoolClients).map(({ chainId }) => chainId) : []; @@ -1034,7 +1038,7 @@ export class Relayer { const originChain = getNetworkName(originChainId); const destinationChain = getNetworkName(destinationChainId); - const start = performance.now(); + const mark = this.profiler.start("resolveRepaymentChain"); const preferredChainIds = await inventoryClient.determineRefundChainId(deposit, hubPoolToken.address); if (preferredChainIds.length === 0) { // @dev If the origin chain is a lite chain and there are no preferred repayment chains, then we can assume @@ -1058,14 +1062,16 @@ export class Relayer { }; } - this.logger.debug({ - at: "Relayer::resolveRepaymentChain", + mark.stop({ message: `Determined eligible repayment chains ${JSON.stringify( preferredChainIds - )} for deposit ${depositId} from ${originChain} to ${destinationChain} in ${ - Math.round(performance.now() - start) / 1000 - }s.`, + )} for deposit ${depositId} from ${originChain} to ${destinationChain}.`, + preferredChainIds, + depositId, + originChain, + destinationChain, }); + const _repaymentFees = preferredChainIds.map((chainId) => repaymentFees.find(({ paymentChainId }) => paymentChainId === chainId) ); diff --git a/src/relayer/index.ts b/src/relayer/index.ts index a0187748e..a8e9afda7 100644 --- a/src/relayer/index.ts +++ b/src/relayer/index.ts @@ -3,9 +3,9 @@ import { config, delay, disconnectRedisClients, - getCurrentTime, getNetworkName, getRedisCache, + Profiler, Signer, winston, } from "../utils"; @@ -17,11 +17,12 @@ let logger: winston.Logger; const ACTIVE_RELAYER_EXPIRY = 600; // 10 minutes. const { RUN_IDENTIFIER: runIdentifier, BOT_IDENTIFIER: botIdentifier = "across-relayer" } = process.env; -const randomNumber = () => Math.floor(Math.random() * 1_000_000); export async function runRelayer(_logger: winston.Logger, baseSigner: Signer): Promise { - const relayerRun = randomNumber(); - const startTime = getCurrentTime(); + const profiler = new Profiler({ + at: "Relayer#run", + logger: _logger, + }); logger = _logger; const config = new RelayerConfig(process.env); @@ -42,7 +43,8 @@ export async function runRelayer(_logger: winston.Logger, baseSigner: Signer): P // Explicitly don't log ignoredAddresses because it can be huge and can overwhelm log transports. const { ignoredAddresses: _ignoredConfig, ...loggedConfig } = config; - logger.debug({ at: "Relayer#run", message: "Relayer started 🏃‍♂️", loggedConfig, relayerRun }); + logger.debug({ at: "Relayer#run", message: "Relayer started 🏃‍♂️", loggedConfig }); + const mark = profiler.start("relayer"); const relayerClients = await constructRelayerClients(logger, config, baseSigner); const relayer = new Relayer(await baseSigner.getAddress(), logger, relayerClients, config); await relayer.init(); @@ -56,8 +58,7 @@ export async function runRelayer(_logger: winston.Logger, baseSigner: Signer): P if (loop) { logger.debug({ at: "relayer#run", message: `Starting relayer execution loop ${run}.` }); } - - const tLoopStart = performance.now(); + const tLoopStart = profiler.start("Relayer execution loop"); const ready = await relayer.update(); const activeRelayer = redis ? await redis.get(botIdentifier) : undefined; @@ -65,7 +66,7 @@ export async function runRelayer(_logger: winston.Logger, baseSigner: Signer): P // If this instance can't update, throw an error (for now). if (!ready && activeRelayer) { if (run * pollingDelay < 120) { - const runTime = Math.round((performance.now() - tLoopStart) / 1000); + const runTime = Math.round((performance.now() - tLoopStart.startTime) / 1000); const delta = pollingDelay - runTime; logger.debug({ at: "Relayer#run", message: `Not ready to relay, waiting ${delta} seconds.` }); await delay(delta); @@ -100,11 +101,11 @@ export async function runRelayer(_logger: winston.Logger, baseSigner: Signer): P if (!loop) { stop = true; } else { - const runTime = Math.round((performance.now() - tLoopStart) / 1000); - logger.debug({ - at: "Relayer#run", - message: `Completed relayer execution loop ${run} in ${runTime} seconds.`, + const runTimeMilliseconds = tLoopStart.stop({ + message: "Completed relayer execution loop.", + loopCount: run, }); + const runTime = Math.round(runTimeMilliseconds / 1000); if (!stop && runTime < pollingDelay) { const delta = pollingDelay - runTime; @@ -136,6 +137,5 @@ export async function runRelayer(_logger: winston.Logger, baseSigner: Signer): P } } - const runtime = getCurrentTime() - startTime; - logger.debug({ at: "Relayer#index", message: `Completed relayer run ${relayerRun} in ${runtime} seconds.` }); + mark.stop({ message: "Relayer instance completed." }); } diff --git a/src/utils/SDKUtils.ts b/src/utils/SDKUtils.ts index f8ccb5287..9914e05bb 100644 --- a/src/utils/SDKUtils.ts +++ b/src/utils/SDKUtils.ts @@ -53,6 +53,7 @@ export const { getTokenInfo, getL1TokenInfo, getUsdcSymbol, + Profiler, } = sdk.utils; export const { diff --git a/yarn.lock b/yarn.lock index ff63b9d57..1b90d9793 100644 --- a/yarn.lock +++ b/yarn.lock @@ -16,6 +16,11 @@ resolved "https://registry.yarnpkg.com/@across-protocol/constants/-/constants-3.1.19.tgz#3c29b52ec5f2eece93a6abd50d580668b03dd7b3" integrity sha512-XOFF+o64TDn57xNfUB38kWy8lYyE9lB7PBdyoMOadsXx00HC3KMznFi/paLRKT1iZ50vDwHp00tNZbr7Z7umzA== +"@across-protocol/constants@^3.1.20": + version "3.1.20" + resolved "https://registry.yarnpkg.com/@across-protocol/constants/-/constants-3.1.20.tgz#305bd41f5644b7db5d9fd12a6a6b4bbbbe2fd016" + integrity sha512-B5RsvuOQsZdFgLk0WcFZGmoivm6g6gv95a+YKVBydcxZkNxAsyP065UQEDAmvRXvPhqGyehhd52515Xa/3bzyg== + "@across-protocol/contracts@^0.1.4": version "0.1.4" resolved "https://registry.yarnpkg.com/@across-protocol/contracts/-/contracts-0.1.4.tgz#64b3d91e639d2bb120ea94ddef3d160967047fa5" @@ -53,13 +58,13 @@ yargs "^17.7.2" zksync-web3 "^0.14.3" -"@across-protocol/sdk@^3.3.18": - version "3.3.18" - resolved "https://registry.yarnpkg.com/@across-protocol/sdk/-/sdk-3.3.18.tgz#d39ef359f9f639921fb412a1355167354014a80f" - integrity sha512-Ea40yDPL94T3uc6HhqDj8X7vovPSyOVSmA6Z3C1uZmdwRdDKt8hlg8k7yxIg+8aR5aEJJ7hCZy6bHdI5XHpbFQ== +"@across-protocol/sdk@^3.3.21": + version "3.3.21" + resolved "https://registry.yarnpkg.com/@across-protocol/sdk/-/sdk-3.3.21.tgz#f223a0d88b09c5f2335723b89e777a36df5255ca" + integrity sha512-N/0H5KwPS+iyMh8m1QvIPBNJuPFhHlRW1841AzcmhXdzwRtbarmSWXaXNDifeWDWMf3Fie8TN2WnSW4oQKd1HQ== dependencies: "@across-protocol/across-token" "^1.0.0" - "@across-protocol/constants" "^3.1.19" + "@across-protocol/constants" "^3.1.20" "@across-protocol/contracts" "^3.0.16" "@eth-optimism/sdk" "^3.3.1" "@ethersproject/bignumber" "^5.7.0" From be49e3c42f93da84582abba3785f85661ae5b680 Mon Sep 17 00:00:00 2001 From: Paul <108695806+pxrl@users.noreply.github.com> Date: Tue, 26 Nov 2024 12:29:47 +0100 Subject: [PATCH 27/44] chore: Bump constants (#1925) For new token defs. --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 8f14ef3b4..59fd040d0 100644 --- a/package.json +++ b/package.json @@ -10,7 +10,7 @@ "node": ">=20" }, "dependencies": { - "@across-protocol/constants": "^3.1.19", + "@across-protocol/constants": "^3.1.20", "@across-protocol/contracts": "^3.0.16", "@across-protocol/sdk": "^3.3.21", "@arbitrum/sdk": "^4.0.2", From 3e9c1e4108568b39fe007f8fcd71721db4bbe090 Mon Sep 17 00:00:00 2001 From: Matt Rice Date: Tue, 26 Nov 2024 20:52:41 -0500 Subject: [PATCH 28/44] fix: make datadog types numbers rather than strings (#1932) --- src/monitor/Monitor.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/monitor/Monitor.ts b/src/monitor/Monitor.ts index 9bee157a5..1122f8c5a 100644 --- a/src/monitor/Monitor.ts +++ b/src/monitor/Monitor.ts @@ -308,7 +308,7 @@ export class Monitor { throw new Error(`No decimals found for ${tokenSymbol}`); } return lodash.mapValues(columns, (cell: RelayerBalanceCell) => - lodash.mapValues(cell, (balance: BigNumber) => convertFromWei(balance.toString(), decimals)) + lodash.mapValues(cell, (balance: BigNumber) => Number(convertFromWei(balance.toString(), decimals))) ); }) ); From b7beea983d2649e8cd122bc3031c56620b5ec84f Mon Sep 17 00:00:00 2001 From: Paul <108695806+pxrl@users.noreply.github.com> Date: Sat, 30 Nov 2024 09:03:18 +0100 Subject: [PATCH 29/44] improve(relayer): Pass SpokePool addresses to listener processes (#1902) This skips an RPC call per SpokePool listener in the fast relayer configuration, and permits each SpokePool listener instance to communicate with only a single chain, instead of having to always query mainnet to resolve the current SpokePool address. --- src/clients/SpokePoolClient.ts | 7 ++-- src/common/Constants.ts | 2 +- src/libexec/RelayerSpokePoolIndexer.ts | 20 ++++++++--- src/libexec/SpokePoolListenerExperimental.ts | 22 +++++++++--- src/utils/ContractUtils.ts | 35 +++++++++++++------- 5 files changed, 61 insertions(+), 25 deletions(-) diff --git a/src/clients/SpokePoolClient.ts b/src/clients/SpokePoolClient.ts index dc2518cb6..cb6b51e52 100644 --- a/src/clients/SpokePoolClient.ts +++ b/src/clients/SpokePoolClient.ts @@ -78,14 +78,15 @@ export class IndexedSpokePoolClient extends clients.SpokePoolClient { */ protected startWorker(): void { const { - eventSearchConfig: { fromBlock, maxBlockLookBack: blockRange }, + eventSearchConfig: { fromBlock, maxBlockLookBack: blockrange }, + spokePool: { address: spokepool }, } = this; - const opts = { blockRange, lookback: `@${fromBlock}` }; + const opts = { spokepool, blockrange, lookback: `@${fromBlock}` }; const args = Object.entries(opts) .map(([k, v]) => [`--${k}`, `${v}`]) .flat(); - this.worker = spawn("node", [this.indexerPath, "--chainId", this.chainId.toString(), ...args], { + this.worker = spawn("node", [this.indexerPath, "--chainid", this.chainId.toString(), ...args], { stdio: ["ignore", "inherit", "inherit", "ipc"], }); diff --git a/src/common/Constants.ts b/src/common/Constants.ts index 73e67b531..59fe6b34e 100644 --- a/src/common/Constants.ts +++ b/src/common/Constants.ts @@ -127,7 +127,7 @@ export const CHAIN_MAX_BLOCK_LOOKBACK = { [CHAIN_IDs.BOBA]: 4990, [CHAIN_IDs.LINEA]: 5000, [CHAIN_IDs.LISK]: 10000, - [CHAIN_IDs.MAINNET]: 10000, + [CHAIN_IDs.MAINNET]: 5000, [CHAIN_IDs.MODE]: 10000, [CHAIN_IDs.OPTIMISM]: 10000, // Quick [CHAIN_IDs.POLYGON]: 10000, diff --git a/src/libexec/RelayerSpokePoolIndexer.ts b/src/libexec/RelayerSpokePoolIndexer.ts index e6b211734..cca604523 100644 --- a/src/libexec/RelayerSpokePoolIndexer.ts +++ b/src/libexec/RelayerSpokePoolIndexer.ts @@ -15,6 +15,7 @@ import { getOriginFromURL, getProvider, getRedisCache, + getSpokePool, getWSProviders, Logger, winston, @@ -117,11 +118,11 @@ async function listen( */ async function run(argv: string[]): Promise { const minimistOpts = { - string: ["lookback", "relayer"], + string: ["lookback", "relayer", "spokepool"], }; const args = minimist(argv, minimistOpts); - const { chainId, lookback, relayer = null, maxBlockRange = 10_000 } = args; + const { chainId: chainId, lookback, relayer = null, blockrange: maxBlockRange = 10_000 } = args; assert(Number.isInteger(chainId), "chainId must be numeric "); assert(Number.isInteger(maxBlockRange), "maxBlockRange must be numeric"); assert(!isDefined(relayer) || ethersUtils.isAddress(relayer), `relayer address is invalid (${relayer})`); @@ -129,6 +130,12 @@ async function run(argv: string[]): Promise { const { quorum = getChainQuorum(chainId) } = args; assert(Number.isInteger(quorum), "quorum must be numeric "); + let { spokepool: spokePoolAddr } = args; + assert( + !isDefined(spokePoolAddr) || ethersUtils.isAddress(spokePoolAddr), + `Invalid SpokePool address (${spokePoolAddr})` + ); + chain = getNetworkName(chainId); const quorumProvider = await getProvider(chainId); @@ -152,16 +159,21 @@ async function run(argv: string[]): Promise { logger.debug({ at: "RelayerSpokePoolIndexer::run", message: `Skipping lookback on ${chain}.` }); } + const spokePool = getSpokePool(chainId, spokePoolAddr); + if (!isDefined(spokePoolAddr)) { + ({ address: spokePoolAddr } = spokePool); + } + const opts = { - quorum, + spokePool: spokePoolAddr, deploymentBlock, lookback: latestBlock.number - startBlock, maxBlockRange, filterArgs: getEventFilterArgs(relayer), + quorum, }; logger.debug({ at: "RelayerSpokePoolIndexer::run", message: `Starting ${chain} SpokePool Indexer.`, opts }); - const spokePool = await utils.getSpokePoolContract(chainId); process.on("SIGHUP", () => { logger.debug({ at: "Relayer#run", message: `Received SIGHUP in ${chain} listener, stopping...` }); diff --git a/src/libexec/SpokePoolListenerExperimental.ts b/src/libexec/SpokePoolListenerExperimental.ts index 5236575c7..0a14c9d4f 100644 --- a/src/libexec/SpokePoolListenerExperimental.ts +++ b/src/libexec/SpokePoolListenerExperimental.ts @@ -18,6 +18,7 @@ import { getNodeUrlList, getOriginFromURL, getProvider, + getSpokePool, getRedisCache, Logger, winston, @@ -161,12 +162,12 @@ async function listen(eventMgr: EventManager, spokePool: Contract, eventNames: s */ async function run(argv: string[]): Promise { const minimistOpts = { - string: ["lookback", "relayer"], + string: ["lookback", "relayer", "spokepool"], }; const args = minimist(argv, minimistOpts); - ({ chainId } = args); - const { lookback, relayer = null, maxBlockRange = 10_000 } = args; + ({ chainid: chainId } = args); + const { lookback, relayer = null, blockrange: maxBlockRange = 10_000 } = args; assert(Number.isInteger(chainId), "chainId must be numeric "); assert(Number.isInteger(maxBlockRange), "maxBlockRange must be numeric"); assert(!isDefined(relayer) || ethersUtils.isAddress(relayer), `relayer address is invalid (${relayer})`); @@ -174,6 +175,12 @@ async function run(argv: string[]): Promise { const { quorum = getChainQuorum(chainId) } = args; assert(Number.isInteger(quorum), "quorum must be numeric "); + let { spokepool: spokePoolAddr } = args; + assert( + !isDefined(spokePoolAddr) || ethersUtils.isAddress(spokePoolAddr), + `Invalid SpokePool address (${spokePoolAddr})` + ); + chain = getNetworkName(chainId); const quorumProvider = await getProvider(chainId); @@ -197,16 +204,21 @@ async function run(argv: string[]): Promise { logger.debug({ at: "RelayerSpokePoolListener::run", message: `Skipping lookback on ${chain}.` }); } + const spokePool = getSpokePool(chainId, spokePoolAddr); + if (!isDefined(spokePoolAddr)) { + ({ address: spokePoolAddr } = spokePool); + } + const opts = { - quorum, + spokePool: spokePoolAddr, deploymentBlock, lookback: latestBlock.number - startBlock, maxBlockRange, filterArgs: getEventFilterArgs(relayer), + quorum, }; logger.debug({ at: "RelayerSpokePoolListener::run", message: `Starting ${chain} SpokePool Indexer.`, opts }); - const spokePool = await utils.getSpokePoolContract(chainId); process.on("SIGHUP", () => { logger.debug({ at: "Relayer#run", message: `Received SIGHUP in ${chain} listener, stopping...` }); diff --git a/src/utils/ContractUtils.ts b/src/utils/ContractUtils.ts index 31bf09e60..d4e4206b0 100644 --- a/src/utils/ContractUtils.ts +++ b/src/utils/ContractUtils.ts @@ -1,32 +1,43 @@ -import { getNetworkName, Contract, Signer, getDeployedAddress, getDeployedBlockNumber } from "."; - import * as typechain from "@across-protocol/contracts"; // TODO: refactor once we've fixed export from contract repo +import { CHAIN_IDs, getNetworkName, Contract, Signer, getDeployedAddress, getDeployedBlockNumber } from "."; // Return an ethers contract instance for a deployed contract, imported from the Across-protocol contracts repo. export function getDeployedContract(contractName: string, networkId: number, signer?: Signer): Contract { try { const address = getDeployedAddress(contractName, networkId); // If the contractName is SpokePool then we need to modify it to find the correct contract factory artifact. - const factoryName = contractName === "SpokePool" ? castSpokePoolName(networkId) : contractName; - const artifact = typechain[`${[factoryName.replace("_", "")]}__factory`]; + const factoryName = `${contractName === "SpokePool" ? castSpokePoolName(networkId) : contractName}__factory`; + const artifact = typechain[factoryName]; return new Contract(address, artifact.abi, signer); } catch (error) { - throw new Error(`Could not find address for contract ${contractName} on ${networkId}`); + throw new Error(`Could not find address for contract ${contractName} on ${networkId} (${error})`); } } // If the name of the contract is SpokePool then we need to apply a transformation on the name to get the correct // contract factory name. For example, if the network is "mainnet" then the contract is called Ethereum_SpokePool. export function castSpokePoolName(networkId: number): string { - let networkName = getNetworkName(networkId); - if (networkName == "Mainnet" || networkName == "Rinkeby" || networkName == "Kovan" || networkName == "Goerli") { - return "Ethereum_SpokePool"; + let networkName: string; + switch (networkId) { + case CHAIN_IDs.MAINNET: + case CHAIN_IDs.SEPOLIA: + return "Ethereum_SpokePool"; + case CHAIN_IDs.ARBITRUM: + return "Arbitrum_SpokePool"; + case CHAIN_IDs.ZK_SYNC: + return "ZkSync_SpokePool"; + default: + networkName = getNetworkName(networkId); } - if (networkName.includes("-")) { - networkName = networkName.substring(0, networkName.indexOf("-")); - } - return `${networkName}_SpokePool`; + return `${networkName.replace(" ", "")}_SpokePool`; +} + +// For a chain ID and optional SpokePool address, return a Contract instance with the corresponding ABI. +export function getSpokePool(chainId: number, address?: string): Contract { + const factoryName = castSpokePoolName(chainId); + const artifact = typechain[`${factoryName}__factory`]; + return new Contract(address ?? getDeployedAddress("SpokePool", chainId), artifact.abi); } export function getParamType(contractName: string, functionName: string, paramName: string): string { From 86368be75dc2d79c1bed435b780842c4b0616e1b Mon Sep 17 00:00:00 2001 From: Paul <108695806+pxrl@users.noreply.github.com> Date: Mon, 2 Dec 2024 12:06:24 +0100 Subject: [PATCH 30/44] fix(relayer): Respect chainId input argument in ethers listener (#1934) --- src/libexec/RelayerSpokePoolIndexer.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libexec/RelayerSpokePoolIndexer.ts b/src/libexec/RelayerSpokePoolIndexer.ts index cca604523..618413037 100644 --- a/src/libexec/RelayerSpokePoolIndexer.ts +++ b/src/libexec/RelayerSpokePoolIndexer.ts @@ -122,7 +122,7 @@ async function run(argv: string[]): Promise { }; const args = minimist(argv, minimistOpts); - const { chainId: chainId, lookback, relayer = null, blockrange: maxBlockRange = 10_000 } = args; + const { chainid: chainId, lookback, relayer = null, blockrange: maxBlockRange = 10_000 } = args; assert(Number.isInteger(chainId), "chainId must be numeric "); assert(Number.isInteger(maxBlockRange), "maxBlockRange must be numeric"); assert(!isDefined(relayer) || ethersUtils.isAddress(relayer), `relayer address is invalid (${relayer})`); From e393a92aad9273925160684eae6483b1aede228e Mon Sep 17 00:00:00 2001 From: Matt Rice Date: Thu, 5 Dec 2024 05:10:31 -0500 Subject: [PATCH 31/44] fix: make datadog logs more digestible in datadog (#1937) --- src/monitor/Monitor.ts | 37 +++++++++++++++++++++---------------- 1 file changed, 21 insertions(+), 16 deletions(-) diff --git a/src/monitor/Monitor.ts b/src/monitor/Monitor.ts index 1122f8c5a..0b488694e 100644 --- a/src/monitor/Monitor.ts +++ b/src/monitor/Monitor.ts @@ -8,8 +8,6 @@ import { L1Token, RelayerBalanceReport, RelayerBalanceTable, - RelayerBalanceColumns, - RelayerBalanceCell, TokenTransfer, } from "../interfaces"; import { @@ -44,14 +42,13 @@ import { resolveTokenDecimals, sortEventsDescending, getWidestPossibleExpectedBlockRange, + utils, } from "../utils"; import { MonitorClients, updateMonitorClients } from "./MonitorClientHelper"; import { MonitorConfig } from "./MonitorConfig"; import { CombinedRefunds } from "../dataworker/DataworkerUtils"; -import lodash from "lodash"; - // 60 minutes, which is the length of the challenge window, so if a rebalance takes longer than this to finalize, // then its finalizing after the subsequent challenge period has started, which is sub-optimal. export const REBALANCE_FINALIZE_GRACE_PERIOD = Number(process.env.REBALANCE_FINALIZE_GRACE_PERIOD ?? 60 * 60); @@ -301,22 +298,30 @@ export class Monitor { }); // Note: types are here for clarity, not necessity. - const machineReadableReport = lodash.mapValues(reports, (table: RelayerBalanceTable) => - lodash.mapValues(table, (columns: RelayerBalanceColumns, tokenSymbol: string) => { + + Object.entries(reports).forEach(([relayer, balanceTable]) => { + Object.entries(balanceTable).forEach(([tokenSymbol, columns]) => { const decimals = allL1Tokens.find((token) => token.symbol === tokenSymbol)?.decimals; if (!decimals) { throw new Error(`No decimals found for ${tokenSymbol}`); } - return lodash.mapValues(columns, (cell: RelayerBalanceCell) => - lodash.mapValues(cell, (balance: BigNumber) => Number(convertFromWei(balance.toString(), decimals))) - ); - }) - ); - - this.logger.debug({ - at: "Monitor#reportRelayerBalances", - message: "Machine-readable balance report", - report: machineReadableReport, + Object.entries(columns).forEach(([chainName, cell]) => { + Object.entries(cell).forEach(([balanceType, balance]) => { + this.logger.debug({ + at: "Monitor#reportRelayerBalances", + message: "Machine-readable single balance report", + relayer, + tokenSymbol, + decimals, + chainName, + balanceType, + balanceInWei: balance.toString(), + balance: Number(utils.formatUnits(balance, decimals)), + datadog: true, + }); + }); + }); + }); }); } } From 8d79323ff527250078eab54353473faddeec3550 Mon Sep 17 00:00:00 2001 From: bmzig <57361391+bmzig@users.noreply.github.com> Date: Fri, 6 Dec 2024 12:06:56 -0600 Subject: [PATCH 32/44] improve: only log relayer balances for tokens on supported chains (#1938) Signed-off-by: bennett --- src/monitor/Monitor.ts | 39 +++++++++++++++++++++++++-------------- 1 file changed, 25 insertions(+), 14 deletions(-) diff --git a/src/monitor/Monitor.ts b/src/monitor/Monitor.ts index 0b488694e..7202af238 100644 --- a/src/monitor/Monitor.ts +++ b/src/monitor/Monitor.ts @@ -44,10 +44,10 @@ import { getWidestPossibleExpectedBlockRange, utils, } from "../utils"; - import { MonitorClients, updateMonitorClients } from "./MonitorClientHelper"; import { MonitorConfig } from "./MonitorConfig"; import { CombinedRefunds } from "../dataworker/DataworkerUtils"; +import { PUBLIC_NETWORKS } from "@across-protocol/constants"; // 60 minutes, which is the length of the challenge window, so if a rebalance takes longer than this to finalize, // then its finalizing after the subsequent challenge period has started, which is sub-optimal. @@ -306,20 +306,22 @@ export class Monitor { throw new Error(`No decimals found for ${tokenSymbol}`); } Object.entries(columns).forEach(([chainName, cell]) => { - Object.entries(cell).forEach(([balanceType, balance]) => { - this.logger.debug({ - at: "Monitor#reportRelayerBalances", - message: "Machine-readable single balance report", - relayer, - tokenSymbol, - decimals, - chainName, - balanceType, - balanceInWei: balance.toString(), - balance: Number(utils.formatUnits(balance, decimals)), - datadog: true, + if (this._tokenEnabledForNetwork(tokenSymbol, chainName)) { + Object.entries(cell).forEach(([balanceType, balance]) => { + this.logger.debug({ + at: "Monitor#reportRelayerBalances", + message: "Machine-readable single balance report", + relayer, + tokenSymbol, + decimals, + chainName, + balanceType, + balanceInWei: balance.toString(), + balance: Number(utils.formatUnits(balance, decimals)), + datadog: true, + }); }); - }); + } }); }); }); @@ -1289,4 +1291,13 @@ export class Monitor { }) ); } + + private _tokenEnabledForNetwork(tokenSymbol: string, networkName: string): boolean { + for (const [chainId, network] of Object.entries(PUBLIC_NETWORKS)) { + if (network.name === networkName) { + return isDefined(TOKEN_SYMBOLS_MAP[tokenSymbol]?.addresses[chainId]); + } + } + return false; + } } From e32d3de5b043f12188319661623d96fcd2c09710 Mon Sep 17 00:00:00 2001 From: Matt Rice Date: Fri, 6 Dec 2024 14:58:04 -0500 Subject: [PATCH 33/44] fix: remove excess reporter datadog logs (#1939) Signed-off-by: Matt Rice --- src/monitor/Monitor.ts | 55 +++++++++++++++++++++--------------------- 1 file changed, 28 insertions(+), 27 deletions(-) diff --git a/src/monitor/Monitor.ts b/src/monitor/Monitor.ts index 7202af238..0182ef912 100644 --- a/src/monitor/Monitor.ts +++ b/src/monitor/Monitor.ts @@ -296,36 +296,37 @@ export class Monitor { message: `Balance report for ${relayer} 📖`, mrkdwn, }); - - // Note: types are here for clarity, not necessity. - - Object.entries(reports).forEach(([relayer, balanceTable]) => { - Object.entries(balanceTable).forEach(([tokenSymbol, columns]) => { - const decimals = allL1Tokens.find((token) => token.symbol === tokenSymbol)?.decimals; - if (!decimals) { - throw new Error(`No decimals found for ${tokenSymbol}`); - } - Object.entries(columns).forEach(([chainName, cell]) => { - if (this._tokenEnabledForNetwork(tokenSymbol, chainName)) { - Object.entries(cell).forEach(([balanceType, balance]) => { - this.logger.debug({ - at: "Monitor#reportRelayerBalances", - message: "Machine-readable single balance report", - relayer, - tokenSymbol, - decimals, - chainName, - balanceType, - balanceInWei: balance.toString(), - balance: Number(utils.formatUnits(balance, decimals)), - datadog: true, - }); + } + Object.entries(reports).forEach(([relayer, balanceTable]) => { + Object.entries(balanceTable).forEach(([tokenSymbol, columns]) => { + const decimals = allL1Tokens.find((token) => token.symbol === tokenSymbol)?.decimals; + if (!decimals) { + throw new Error(`No decimals found for ${tokenSymbol}`); + } + Object.entries(columns).forEach(([chainName, cell]) => { + if (this._tokenEnabledForNetwork(tokenSymbol, chainName)) { + Object.entries(cell).forEach(([balanceType, balance]) => { + // Don't log zero balances. + if (balance.isZero()) { + return; + } + this.logger.debug({ + at: "Monitor#reportRelayerBalances", + message: "Machine-readable single balance report", + relayer, + tokenSymbol, + decimals, + chainName, + balanceType, + balanceInWei: balance.toString(), + balance: Number(utils.formatUnits(balance, decimals)), + datadog: true, }); - } - }); + }); + } }); }); - } + }); } // Update current balances of all tokens on each supported chain for each relayer. From 452ed8138d863627157c32167e00cf08b8adff88 Mon Sep 17 00:00:00 2001 From: nicholaspai <9457025+nicholaspai@users.noreply.github.com> Date: Tue, 10 Dec 2024 09:51:18 -0600 Subject: [PATCH 34/44] improve(Relayer): Don't include log about outstanding xchain txfers if there are 0 txfers (#1944) * improve(Relayer): Don't include log about outstanding xchain txfers if there are 0 txfers * Update Relayer.ts --- src/relayer/Relayer.ts | 28 ++++++++++++++++++---------- 1 file changed, 18 insertions(+), 10 deletions(-) diff --git a/src/relayer/Relayer.ts b/src/relayer/Relayer.ts index fa2428dd9..3f992385e 100644 --- a/src/relayer/Relayer.ts +++ b/src/relayer/Relayer.ts @@ -1251,21 +1251,29 @@ export class Relayer { if (this.clients.inventoryClient.isInventoryManagementEnabled() && chainId !== hubChainId) { // Shortfalls are mapped to deposit output tokens so look up output token in token symbol map. const l1Token = this.clients.hubPoolClient.getL1TokenInfoForAddress(token, chainId); - crossChainLog = - "There is " + - formatter( - this.clients.inventoryClient.crossChainTransferClient - .getOutstandingCrossChainTransferAmount(this.relayerAddress, chainId, l1Token.address, token) - // TODO: Add in additional l2Token param here once we can specify it - .toString() - ) + - ` inbound L1->L2 ${symbol} transfers. `; + const outstandingCrossChainTransferAmount = + this.clients.inventoryClient.crossChainTransferClient.getOutstandingCrossChainTransferAmount( + this.relayerAddress, + chainId, + l1Token.address, + token + ); + crossChainLog = outstandingCrossChainTransferAmount.gt(0) + ? " There is " + + formatter( + this.clients.inventoryClient.crossChainTransferClient + .getOutstandingCrossChainTransferAmount(this.relayerAddress, chainId, l1Token.address, token) + // TODO: Add in additional l2Token param here once we can specify it + .toString() + ) + + ` inbound L1->L2 ${symbol} transfers. ` + : undefined; } mrkdwn += ` - ${symbol} cumulative shortfall of ` + `${formatter(shortfall.toString())} ` + `(have ${formatter(balance.toString())} but need ` + - `${formatter(needed.toString())}). ${crossChainLog}` + + `${formatter(needed.toString())}).${crossChainLog}` + `This is blocking deposits: ${deposits}.\n`; }); }); From 5ed67e430dc91d841528ffb333e51ad777781b96 Mon Sep 17 00:00:00 2001 From: Paul <108695806+pxrl@users.noreply.github.com> Date: Tue, 10 Dec 2024 21:35:18 +0100 Subject: [PATCH 35/44] chore: Bump constants (#1945) For POOL on World Chain. --- package.json | 2 +- yarn.lock | 5 +++++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/package.json b/package.json index 59fd040d0..81c5304f4 100644 --- a/package.json +++ b/package.json @@ -10,7 +10,7 @@ "node": ">=20" }, "dependencies": { - "@across-protocol/constants": "^3.1.20", + "@across-protocol/constants": "^3.1.21", "@across-protocol/contracts": "^3.0.16", "@across-protocol/sdk": "^3.3.21", "@arbitrum/sdk": "^4.0.2", diff --git a/yarn.lock b/yarn.lock index 1b90d9793..117f6cb6d 100644 --- a/yarn.lock +++ b/yarn.lock @@ -21,6 +21,11 @@ resolved "https://registry.yarnpkg.com/@across-protocol/constants/-/constants-3.1.20.tgz#305bd41f5644b7db5d9fd12a6a6b4bbbbe2fd016" integrity sha512-B5RsvuOQsZdFgLk0WcFZGmoivm6g6gv95a+YKVBydcxZkNxAsyP065UQEDAmvRXvPhqGyehhd52515Xa/3bzyg== +"@across-protocol/constants@^3.1.21": + version "3.1.21" + resolved "https://registry.yarnpkg.com/@across-protocol/constants/-/constants-3.1.21.tgz#e5852daa51b4e1a215a32672c252287fea593256" + integrity sha512-ajDGLpsbmse3XYPFKsih98RO/CSzpRj4iiPIzfOUvmslBfm3vIYj5nVdLKahgPumsQ+Yq2W3+PF+ZSr6Ac3tRg== + "@across-protocol/contracts@^0.1.4": version "0.1.4" resolved "https://registry.yarnpkg.com/@across-protocol/contracts/-/contracts-0.1.4.tgz#64b3d91e639d2bb120ea94ddef3d160967047fa5" From 3ea50ffe512a9adfd98678075df5f3d54f2ed5fe Mon Sep 17 00:00:00 2001 From: Paul <108695806+pxrl@users.noreply.github.com> Date: Mon, 16 Dec 2024 16:50:52 +0100 Subject: [PATCH 36/44] chore: Bump constants, contracts & sdk (#1941) For some RPC-related improvements & bugfixes, as well as: - WBTC on Lisk. - POOL on World Chain. - USDC.e on Aleph Zero. --- package.json | 6 +- test/Dataworker.loadData.slowFill.ts | 2 - test/Relayer.BasicFill.ts | 2 - test/Relayer.SlowFill.ts | 1 - yarn.lock | 152 ++++++++++++++++++--------- 5 files changed, 106 insertions(+), 57 deletions(-) diff --git a/package.json b/package.json index 81c5304f4..d0c2e86e3 100644 --- a/package.json +++ b/package.json @@ -10,9 +10,9 @@ "node": ">=20" }, "dependencies": { - "@across-protocol/constants": "^3.1.21", - "@across-protocol/contracts": "^3.0.16", - "@across-protocol/sdk": "^3.3.21", + "@across-protocol/constants": "^3.1.22", + "@across-protocol/contracts": "^3.0.18", + "@across-protocol/sdk": "^3.3.25", "@arbitrum/sdk": "^4.0.2", "@consensys/linea-sdk": "^0.2.1", "@defi-wonderland/smock": "^2.3.5", diff --git a/test/Dataworker.loadData.slowFill.ts b/test/Dataworker.loadData.slowFill.ts index 1fb54e715..926c9e873 100644 --- a/test/Dataworker.loadData.slowFill.ts +++ b/test/Dataworker.loadData.slowFill.ts @@ -321,8 +321,6 @@ describe("BundleDataClient: Slow fill handling & validation", async function () const destinationChainDeposit = spokePoolClient_2.getDeposits()[0]; // Generate slow fill requests for the slow fill-eligible deposits - await spokePool_1.setCurrentTime(depositsWithSlowFillRequests[1].exclusivityDeadline + 1); // Temporary workaround - await spokePool_2.setCurrentTime(depositsWithSlowFillRequests[0].exclusivityDeadline + 1); // Temporary workaround await requestSlowFill(spokePool_2, relayer, depositsWithSlowFillRequests[0]); await requestSlowFill(spokePool_1, relayer, depositsWithSlowFillRequests[1]); const lastDestinationChainSlowFillRequestBlock = await spokePool_2.provider.getBlockNumber(); diff --git a/test/Relayer.BasicFill.ts b/test/Relayer.BasicFill.ts index f9d322cc4..3522cc43a 100644 --- a/test/Relayer.BasicFill.ts +++ b/test/Relayer.BasicFill.ts @@ -937,7 +937,6 @@ describe("Relayer: Check for Unfilled Deposits and Fill", async function () { spy.getCalls().find(({ lastArg }) => lastArg.message.includes("Skipping fill for deposit with message")) ).to.not.be.undefined; } else { - await spokePool_2.setCurrentTime(deposit.exclusivityDeadline + 1); // Temporary workaround. // Now speed up deposit again with a higher fee and a message of 0x. This should be filled. expect((await txnReceipts[destinationChainId]).length).to.equal(1); expect(lastSpyLogIncludes(spy, "Filled v3 deposit")).to.be.true; @@ -1011,7 +1010,6 @@ describe("Relayer: Check for Unfilled Deposits and Fill", async function () { depositor ); - await spokePool_2.setCurrentTime(deposit.exclusivityDeadline + 1); // Temporary workaround. await updateAllClients(); txnReceipts = await relayerInstance.checkForUnfilledDepositsAndFill(); expect((await txnReceipts[destinationChainId]).length).to.equal(1); diff --git a/test/Relayer.SlowFill.ts b/test/Relayer.SlowFill.ts index 15e4d305a..008559a1b 100644 --- a/test/Relayer.SlowFill.ts +++ b/test/Relayer.SlowFill.ts @@ -209,7 +209,6 @@ describe("Relayer: Initiates slow fill requests", async function () { ); expect(deposit).to.exist; - await spokePool_2.setCurrentTime(deposit.exclusivityDeadline + 1); // Temporary workaround await updateAllClients(); const _txnReceipts = await relayerInstance.checkForUnfilledDepositsAndFill(); const txnHashes = await _txnReceipts[destinationChainId]; diff --git a/yarn.lock b/yarn.lock index 117f6cb6d..5c34e9a5e 100644 --- a/yarn.lock +++ b/yarn.lock @@ -11,20 +11,10 @@ "@uma/common" "^2.17.0" hardhat "^2.9.3" -"@across-protocol/constants@^3.1.19": - version "3.1.19" - resolved "https://registry.yarnpkg.com/@across-protocol/constants/-/constants-3.1.19.tgz#3c29b52ec5f2eece93a6abd50d580668b03dd7b3" - integrity sha512-XOFF+o64TDn57xNfUB38kWy8lYyE9lB7PBdyoMOadsXx00HC3KMznFi/paLRKT1iZ50vDwHp00tNZbr7Z7umzA== - -"@across-protocol/constants@^3.1.20": - version "3.1.20" - resolved "https://registry.yarnpkg.com/@across-protocol/constants/-/constants-3.1.20.tgz#305bd41f5644b7db5d9fd12a6a6b4bbbbe2fd016" - integrity sha512-B5RsvuOQsZdFgLk0WcFZGmoivm6g6gv95a+YKVBydcxZkNxAsyP065UQEDAmvRXvPhqGyehhd52515Xa/3bzyg== - -"@across-protocol/constants@^3.1.21": - version "3.1.21" - resolved "https://registry.yarnpkg.com/@across-protocol/constants/-/constants-3.1.21.tgz#e5852daa51b4e1a215a32672c252287fea593256" - integrity sha512-ajDGLpsbmse3XYPFKsih98RO/CSzpRj4iiPIzfOUvmslBfm3vIYj5nVdLKahgPumsQ+Yq2W3+PF+ZSr6Ac3tRg== +"@across-protocol/constants@^3.1.22": + version "3.1.22" + resolved "https://registry.yarnpkg.com/@across-protocol/constants/-/constants-3.1.22.tgz#888fb6852b9781aa9f872ac44e888d7bf2a643c7" + integrity sha512-l9CteL0FGHPPIbLaAztANpm/uNk8jV7hmDuecAToZdqAgqcN9E9Hfi44Fflr6H882uVsNlTU0/h1oWkTeifUnA== "@across-protocol/contracts@^0.1.4": version "0.1.4" @@ -35,12 +25,12 @@ "@openzeppelin/contracts" "4.1.0" "@uma/core" "^2.18.0" -"@across-protocol/contracts@^3.0.16": - version "3.0.16" - resolved "https://registry.yarnpkg.com/@across-protocol/contracts/-/contracts-3.0.16.tgz#22eb0c1dcdb01e8ca504dc2351d46513d9f71cc6" - integrity sha512-vwg+PmWaenlrx7kTHZdjDTTj1PwXWFU3rMlFyfKM8xBXbPWhIfMQCKCYOwFrGmZw2nRTYgoyhoKN/f6rUs/snw== +"@across-protocol/contracts@^3.0.18": + version "3.0.18" + resolved "https://registry.yarnpkg.com/@across-protocol/contracts/-/contracts-3.0.18.tgz#b5acbebcb249b193a4d9a019a7cd5af00131a70b" + integrity sha512-4eWgmK8D33ezNZPx/ePLnh3Za0FfcHj5mmDK5FfWbeOM+AjXOzV7miV2/xBcEZXclhIc52e3GcPdbegI15lA2w== dependencies: - "@across-protocol/constants" "^3.1.19" + "@across-protocol/constants" "^3.1.22" "@coral-xyz/anchor" "^0.30.1" "@defi-wonderland/smock" "^2.3.4" "@eth-optimism/contracts" "^0.5.40" @@ -54,28 +44,28 @@ "@solana/spl-token" "^0.4.6" "@solana/web3.js" "^1.31.0" "@types/yargs" "^17.0.33" - "@uma/common" "^2.34.0" + "@uma/common" "^2.37.3" "@uma/contracts-node" "^0.4.17" - "@uma/core" "^2.56.0" + "@uma/core" "^2.61.0" axios "^1.7.4" bs58 "^6.0.0" prettier-plugin-rust "^0.1.9" yargs "^17.7.2" zksync-web3 "^0.14.3" -"@across-protocol/sdk@^3.3.21": - version "3.3.21" - resolved "https://registry.yarnpkg.com/@across-protocol/sdk/-/sdk-3.3.21.tgz#f223a0d88b09c5f2335723b89e777a36df5255ca" - integrity sha512-N/0H5KwPS+iyMh8m1QvIPBNJuPFhHlRW1841AzcmhXdzwRtbarmSWXaXNDifeWDWMf3Fie8TN2WnSW4oQKd1HQ== +"@across-protocol/sdk@^3.3.25": + version "3.3.25" + resolved "https://registry.yarnpkg.com/@across-protocol/sdk/-/sdk-3.3.25.tgz#6eec255fb7a1025050e0415b56f1bf8681936b1e" + integrity sha512-nBBrXY/kslvfsYnVd6kTNOuDSomlfRTw6v4uI40au/rEzPQ6G8X5d/F+DGN3iPfi3ltHY5BEiqE+E6s7AxHA8A== dependencies: "@across-protocol/across-token" "^1.0.0" - "@across-protocol/constants" "^3.1.20" - "@across-protocol/contracts" "^3.0.16" + "@across-protocol/constants" "^3.1.22" + "@across-protocol/contracts" "^3.0.18" "@eth-optimism/sdk" "^3.3.1" "@ethersproject/bignumber" "^5.7.0" "@pinata/sdk" "^2.1.0" "@types/mocha" "^10.0.1" - "@uma/sdk" "^0.34.1" + "@uma/sdk" "^0.34.10" arweave "^1.14.4" async "^3.2.5" axios "^0.27.2" @@ -3539,17 +3529,63 @@ web3 "^1.6.0" winston "^3.2.1" -"@uma/contracts-frontend@^0.4.18": - version "0.4.18" - resolved "https://registry.yarnpkg.com/@uma/contracts-frontend/-/contracts-frontend-0.4.18.tgz#339093239ea6f2ba2914de424ad609e6f9346379" - integrity sha512-0UcA0Io+RB8p2BAeoyubNb0wQzvIWynQ2805ZzbwWhDB2jlW2xRNAKPRP6kcxaqtzCYcEoLnsTLwOP0ojmeWjw== +"@uma/common@^2.37.3": + version "2.37.3" + resolved "https://registry.yarnpkg.com/@uma/common/-/common-2.37.3.tgz#0d7fda1227e3a05563544bb36f418a790c81129d" + integrity sha512-DLcM2xtiFWDbty21r2gsL6AJbOc8G/CMqg0iMxssvkKbz8varsWS44zJF85XGxMlY8fE40w0ZS8MR92xpbsu4g== + dependencies: + "@across-protocol/contracts" "^0.1.4" + "@ethersproject/address" "^5.7.0" + "@ethersproject/bignumber" "^5.0.5" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/constants" "^5.7.0" + "@google-cloud/kms" "^3.0.1" + "@google-cloud/storage" "^6.4.2" + "@nomicfoundation/hardhat-verify" "^1.0.4" + "@nomiclabs/hardhat-ethers" "^2.2.1" + "@nomiclabs/hardhat-web3" "^2.0.0" + "@truffle/contract" "4.6.17" + "@truffle/hdwallet-provider" eip1559-beta + "@types/ethereum-protocol" "^1.0.0" + "@uniswap/v3-core" "^1.0.0-rc.2" + abi-decoder "github:UMAprotocol/abi-decoder" + async-retry "^1.3.3" + axios "^1.6.1" + bignumber.js "^8.0.1" + chalk-pipe "^3.0.0" + decimal.js "^10.2.1" + dotenv "^9.0.0" + eth-crypto "^2.4.0" + hardhat-deploy "0.9.1" + hardhat-gas-reporter "^1.0.4" + hardhat-typechain "^0.3.5" + lodash.uniqby "^4.7.0" + minimist "^1.2.0" + moment "^2.24.0" + node-fetch "^2.6.0" + node-metamask "github:UMAprotocol/node-metamask" + require-context "^1.1.0" + solidity-coverage "^0.7.13" + truffle-deploy-registry "^0.5.1" + web3 "^1.6.0" + winston "^3.2.1" -"@uma/contracts-node@^0.4.0", "@uma/contracts-node@^0.4.17", "@uma/contracts-node@^0.4.18": +"@uma/contracts-frontend@^0.4.25": + version "0.4.25" + resolved "https://registry.yarnpkg.com/@uma/contracts-frontend/-/contracts-frontend-0.4.25.tgz#86fd9e07d0466e04be41c48856e292b0f0de0722" + integrity sha512-LfkMw0lO+H+hUPevoAFogVu5iJTXp+Q2ChddqiynvvrwZ/lrNHrOjj0uEX1winjJXTLFs78jBK1AsIkkYK2VTQ== + +"@uma/contracts-node@^0.4.0", "@uma/contracts-node@^0.4.17": version "0.4.18" resolved "https://registry.yarnpkg.com/@uma/contracts-node/-/contracts-node-0.4.18.tgz#3d3e0ad7dc70b81b3d0dbe722b4eba93bd29f9bf" integrity sha512-JiICiNPEfL18JrddxjSNQUs0/gRMAvdqejIu7UP8JTG4Cup8tDJ6TejZJxBVHlmtB6hSOBnbLoPXb/uLtfdQiw== -"@uma/core@^2.18.0", "@uma/core@^2.56.0": +"@uma/contracts-node@^0.4.25": + version "0.4.25" + resolved "https://registry.yarnpkg.com/@uma/contracts-node/-/contracts-node-0.4.25.tgz#d5c82f1f2c7e0dc2dec26fe876db73ba3f0689d7" + integrity sha512-WaFojX4qyMmXpy5MBS7g0M0KnWESGusdSfTmlkZpCh65TksGaJwAyOM1YBRLL3xm3xSgxPoG+n6tTilSomUmOw== + +"@uma/core@^2.18.0": version "2.56.0" resolved "https://registry.yarnpkg.com/@uma/core/-/core-2.56.0.tgz#c19aa427f08691a85e99ec523d23abf359a6b0c3" integrity sha512-unylWwHeD/1mYcj1t2UPVgj1V+ceBLSo/BcYKgZyyBIHYAkC6bOx4egV/2NrhWPt3sX5CZFG1I1kMAqgp245tQ== @@ -3566,6 +3602,22 @@ "@uniswap/v3-core" "^1.0.0-rc.2" "@uniswap/v3-periphery" "^1.0.0-beta.23" +"@uma/core@^2.61.0": + version "2.61.0" + resolved "https://registry.yarnpkg.com/@uma/core/-/core-2.61.0.tgz#29580736349a47af8fb10beb4bb3b50bfcf912f5" + integrity sha512-bnk+CWW+uWpRilrgUny/gDXHKomG+h1Ug84OXdx+AAvj1/BtlMDOCNNt1OX8LSAz+a0hkiN9s24/zgHclTC/sg== + dependencies: + "@gnosis.pm/safe-contracts" "^1.3.0" + "@gnosis.pm/zodiac" "3.2.0" + "@maticnetwork/fx-portal" "^1.0.4" + "@openzeppelin/contracts" "4.9.6" + "@uma/common" "^2.37.3" + "@uniswap/lib" "4.0.1-alpha" + "@uniswap/v2-core" "1.0.0" + "@uniswap/v2-periphery" "1.1.0-beta.0" + "@uniswap/v3-core" "^1.0.0-rc.2" + "@uniswap/v3-periphery" "^1.0.0-beta.23" + "@uma/logger@^1.3.0": version "1.3.0" resolved "https://registry.yarnpkg.com/@uma/logger/-/logger-1.3.0.tgz#df5beb2efb4333aa3da320ba3a02168a627dbe72" @@ -3599,19 +3651,19 @@ mocha "^8.3.0" node-fetch "^2.6.1" -"@uma/sdk@^0.34.1": - version "0.34.3" - resolved "https://registry.yarnpkg.com/@uma/sdk/-/sdk-0.34.3.tgz#cd358e11df02abcf163703d94d4c5f448220b999" - integrity sha512-1DqzkculvR5qlRv0R1by9F/RJfMWgFgQa4nn4W2pWhyTvhsTi7/9ZFDbRgm5iU9xRHnWWZiEQE89SIZ8Vl+UiQ== +"@uma/sdk@^0.34.10": + version "0.34.10" + resolved "https://registry.yarnpkg.com/@uma/sdk/-/sdk-0.34.10.tgz#ae2bb4d1f5f4140aef0f7d6141620d70dbd57f35" + integrity sha512-Jo64XpbCxquuPIIktQCWFMNN/vCTyA1SbVXMrlmXgO7NAtPPMyPBlsKJr+N0/QrqymBQcO5wzdmo+EqJaeKIHw== dependencies: "@eth-optimism/core-utils" "^0.7.7" "@ethersproject/abstract-signer" "^5.4.0" "@ethersproject/providers" "^5.4.2" "@google-cloud/datastore" "^8.2.1" "@types/lodash-es" "^4.17.5" - "@uma/contracts-frontend" "^0.4.18" - "@uma/contracts-node" "^0.4.18" - axios "^0.24.0" + "@uma/contracts-frontend" "^0.4.25" + "@uma/contracts-node" "^0.4.25" + axios "^1.6.0" bluebird "^3.7.2" bn.js "^4.11.9" decimal.js "^10.3.1" @@ -4295,13 +4347,6 @@ axios@^0.21.1, axios@^0.21.2: dependencies: follow-redirects "^1.14.0" -axios@^0.24.0: - version "0.24.0" - resolved "https://registry.yarnpkg.com/axios/-/axios-0.24.0.tgz#804e6fa1e4b9c5288501dd9dff56a7a0940d20d6" - integrity sha512-Q6cWsys88HoPgAaFAVUb0WpPk0O8iTeisR9IMqy9G8AbO4NlpVknrnQS03zzF9PGAWgO3cgletO3VjV/P7VztA== - dependencies: - follow-redirects "^1.14.4" - axios@^0.27.2: version "0.27.2" resolved "https://registry.yarnpkg.com/axios/-/axios-0.27.2.tgz#207658cc8621606e586c85db4b41a750e756d972" @@ -4310,6 +4355,15 @@ axios@^0.27.2: follow-redirects "^1.14.9" form-data "^4.0.0" +axios@^1.6.0, axios@^1.6.1: + version "1.7.9" + resolved "https://registry.yarnpkg.com/axios/-/axios-1.7.9.tgz#d7d071380c132a24accda1b2cfc1535b79ec650a" + integrity sha512-LhLcE7Hbiryz8oMDdDptSrWowmB4Bl6RCt6sIJKpRB4XtVf0iEgewX3au/pJqm+Py1kCASkb/FFKjxQaLtxJvw== + dependencies: + follow-redirects "^1.15.6" + form-data "^4.0.0" + proxy-from-env "^1.1.0" + axios@^1.7.4: version "1.7.4" resolved "https://registry.yarnpkg.com/axios/-/axios-1.7.4.tgz#4c8ded1b43683c8dd362973c393f3ede24052aa2" @@ -7557,7 +7611,7 @@ fn.name@1.x.x: resolved "https://registry.yarnpkg.com/fn.name/-/fn.name-1.1.0.tgz#26cad8017967aea8731bc42961d04a3d5988accc" integrity sha512-GRnmB5gPyJpAhTQdSZTSp9uaPSvl09KoYcMQtsB9rQoOmzs9dH6ffeccH+Z+cv6P68Hu5bC6JjRh4Ah/mHSNRw== -follow-redirects@^1.12.1, follow-redirects@^1.14.0, follow-redirects@^1.14.4, follow-redirects@^1.14.9, follow-redirects@^1.15.6: +follow-redirects@^1.12.1, follow-redirects@^1.14.0, follow-redirects@^1.14.9, follow-redirects@^1.15.6: version "1.15.9" resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.9.tgz#a604fa10e443bf98ca94228d9eebcc2e8a2c8ee1" integrity sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ== From 4d9d83ddb00b7f2566ce3fc3e61b8e5db4e79739 Mon Sep 17 00:00:00 2001 From: Paul <108695806+pxrl@users.noreply.github.com> Date: Mon, 16 Dec 2024 18:11:52 +0100 Subject: [PATCH 37/44] chore: Enable WBTC/Lisk and POOL/WorldChain (#1951) --- src/common/Constants.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/common/Constants.ts b/src/common/Constants.ts index 59fe6b34e..b6dc7d6c7 100644 --- a/src/common/Constants.ts +++ b/src/common/Constants.ts @@ -311,13 +311,13 @@ export const SUPPORTED_TOKENS: { [chainId: number]: string[] } = { [CHAIN_IDs.BASE]: ["BAL", "DAI", "ETH", "WETH", "USDC", "POOL"], [CHAIN_IDs.BLAST]: ["DAI", "WBTC", "WETH"], [CHAIN_IDs.LINEA]: ["USDC", "USDT", "WETH", "WBTC", "DAI"], - [CHAIN_IDs.LISK]: ["WETH", "USDT", "LSK"], + [CHAIN_IDs.LISK]: ["WETH", "USDT", "LSK", "WBTC"], [CHAIN_IDs.MODE]: ["ETH", "WETH", "USDC", "USDT", "WBTC"], [CHAIN_IDs.OPTIMISM]: ["DAI", "SNX", "BAL", "WETH", "USDC", "POOL", "USDT", "WBTC", "UMA", "ACX"], [CHAIN_IDs.POLYGON]: ["USDC", "USDT", "WETH", "DAI", "WBTC", "UMA", "BAL", "ACX", "POOL"], [CHAIN_IDs.REDSTONE]: ["WETH"], [CHAIN_IDs.SCROLL]: ["WETH", "USDC", "USDT", "WBTC", "POOL"], - [CHAIN_IDs.WORLD_CHAIN]: ["WETH", "WBTC", "USDC"], + [CHAIN_IDs.WORLD_CHAIN]: ["WETH", "WBTC", "USDC", "POOL"], [CHAIN_IDs.ZK_SYNC]: ["USDC", "USDT", "WETH", "WBTC", "DAI"], [CHAIN_IDs.ZORA]: ["USDC", "WETH"], From 71c609db87844e1c6d006e61313ee3f0c3b39c44 Mon Sep 17 00:00:00 2001 From: "James Morris, MS" <96435344+james-a-morris@users.noreply.github.com> Date: Mon, 16 Dec 2024 12:45:15 -0600 Subject: [PATCH 38/44] chore: bump secp256k1 deps (#1936) Signed-off-by: james-a-morris --- package.json | 10 ++++++++++ yarn.lock | 48 +++++++++++++++++++++++++++++++++--------------- 2 files changed, 43 insertions(+), 15 deletions(-) diff --git a/package.json b/package.json index d0c2e86e3..3112de3fd 100644 --- a/package.json +++ b/package.json @@ -112,5 +112,15 @@ "publishConfig": { "registry": "https://registry.npmjs.com/", "access": "public" + }, + "resolutions": { + "secp256k1": "4.0.4", + "**/secp256k1": "4.0.4", + "eccrypto/secp256k1": "3.8.1" + }, + "overrides": { + "secp256k1@3.7.1": "3.8.1", + "secp256k1@4.0.3": "4.0.4", + "secp256k1@5.0.0": "5.0.1" } } diff --git a/yarn.lock b/yarn.lock index 5c34e9a5e..309ba67e0 100644 --- a/yarn.lock +++ b/yarn.lock @@ -6329,7 +6329,7 @@ electron-to-chromium@^1.4.84: resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.110.tgz#269208d7cf7e32123b1d87bf4e6e1fd9ac7ff51d" integrity sha512-TvHZrkj9anfWkxgblHlNr4IMQdm2N6D0o8Wu1BDpSL/RKT4DHyUt/tvDFtApgZ+LGFL3U9EO4LRZ1eSlQ8xMYA== -elliptic@6.5.4, elliptic@^6.4.0, elliptic@^6.4.1, elliptic@^6.5.2, elliptic@^6.5.3, elliptic@^6.5.4: +elliptic@6.5.4, elliptic@^6.4.0, elliptic@^6.5.2, elliptic@^6.5.3, elliptic@^6.5.4: version "6.5.4" resolved "https://registry.yarnpkg.com/elliptic/-/elliptic-6.5.4.tgz#da37cebd31e79a1367e941b592ed1fbebd58abbb" integrity sha512-iLhC6ULemrljPZb+QutR5TQGB+pdW6KGD5RSegS+8sorOZT+rdQFbsQFJgvN3eRqNALqJer4oQ16YvJHlU8hzQ== @@ -6342,6 +6342,19 @@ elliptic@6.5.4, elliptic@^6.4.0, elliptic@^6.4.1, elliptic@^6.5.2, elliptic@^6.5 minimalistic-assert "^1.0.1" minimalistic-crypto-utils "^1.0.1" +elliptic@^6.5.7: + version "6.6.1" + resolved "https://registry.yarnpkg.com/elliptic/-/elliptic-6.6.1.tgz#3b8ffb02670bf69e382c7f65bf524c97c5405c06" + integrity sha512-RaddvvMatK2LJHqFJ+YA4WysVN5Ita9E35botqIYspQ4TkRAlCicdzKOjlyv/1Za5RyTNn7di//eEV0uTAfe3g== + dependencies: + bn.js "^4.11.9" + brorand "^1.1.0" + hash.js "^1.0.0" + hmac-drbg "^1.0.1" + inherits "^2.0.4" + minimalistic-assert "^1.0.1" + minimalistic-crypto-utils "^1.0.1" + emitter-listener@^1.1.1: version "1.1.2" resolved "https://registry.yarnpkg.com/emitter-listener/-/emitter-listener-1.1.2.tgz#56b140e8f6992375b3d7cb2cab1cc7432d9632e8" @@ -11428,6 +11441,11 @@ node-addon-api@^2.0.0: resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-2.0.2.tgz#432cfa82962ce494b132e9d72a15b29f71ff5d32" integrity sha512-Ntyt4AIXyaLIuMHF6IOoTakB3K+RWxwtsHNRxllEoA6vPwP9o4866g6YWDLUdnucilZhmkxiHwHr11gAENw+QA== +node-addon-api@^5.0.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-5.1.0.tgz#49da1ca055e109a23d537e9de43c09cca21eb762" + integrity sha512-eh0GgfEkpnoWDq+VY8OyvYhFEzBk6jIYbRKdIlyTiAXIVJ8PyBaKb0rp7oDtoddbdoHWhq8wwr+XZ81F1rpNdA== + node-dir@^0.1.17: version "0.1.17" resolved "https://registry.yarnpkg.com/node-dir/-/node-dir-0.1.17.tgz#5f5665d93351335caabef8f1c554516cf5f1e4e5" @@ -13535,29 +13553,29 @@ scrypt-js@3.0.1, scrypt-js@^3.0.0, scrypt-js@^3.0.1: resolved "https://registry.yarnpkg.com/scrypt-js/-/scrypt-js-3.0.1.tgz#d314a57c2aef69d1ad98a138a21fe9eafa9ee312" integrity sha512-cdwTTnqPu0Hyvf5in5asVdZocVDTNRmR7XEcJuIzMjJeSHybHl7vpB66AzwTaIg6CLSbtjcxc8fqcySfnTkccA== -secp256k1@3.7.1: - version "3.7.1" - resolved "https://registry.yarnpkg.com/secp256k1/-/secp256k1-3.7.1.tgz#12e473e0e9a7c2f2d4d4818e722ad0e14cc1e2f1" - integrity sha512-1cf8sbnRreXrQFdH6qsg2H71Xw91fCCS9Yp021GnUNJzWJS/py96fS4lHbnTnouLp08Xj6jBoBB6V78Tdbdu5g== +secp256k1@3.7.1, secp256k1@4.0.3, secp256k1@4.0.4, secp256k1@^4.0.0, secp256k1@^4.0.1: + version "4.0.4" + resolved "https://registry.yarnpkg.com/secp256k1/-/secp256k1-4.0.4.tgz#58f0bfe1830fe777d9ca1ffc7574962a8189f8ab" + integrity sha512-6JfvwvjUOn8F/jUoBY2Q1v5WY5XS+rj8qSe0v8Y4ezH4InLgTEeOOPQsRll9OV429Pvo6BCHGavIyJfr3TAhsw== + dependencies: + elliptic "^6.5.7" + node-addon-api "^5.0.0" + node-gyp-build "^4.2.0" + +secp256k1@3.8.1: + version "3.8.1" + resolved "https://registry.yarnpkg.com/secp256k1/-/secp256k1-3.8.1.tgz#b62a62a882d6b16f9b51fe599c6b3a861e36c59f" + integrity sha512-tArjQw2P0RTdY7QmkNehgp6TVvQXq6ulIhxv8gaH6YubKG/wxxAoNKcbuXjDhybbc+b2Ihc7e0xxiGN744UIiQ== dependencies: bindings "^1.5.0" bip66 "^1.1.5" bn.js "^4.11.8" create-hash "^1.2.0" drbg.js "^1.0.1" - elliptic "^6.4.1" + elliptic "^6.5.7" nan "^2.14.0" safe-buffer "^5.1.2" -secp256k1@4.0.3, secp256k1@^4.0.0, secp256k1@^4.0.1: - version "4.0.3" - resolved "https://registry.yarnpkg.com/secp256k1/-/secp256k1-4.0.3.tgz#c4559ecd1b8d3c1827ed2d1b94190d69ce267303" - integrity sha512-NLZVf+ROMxwtEj3Xa562qgv2BK5e2WNmXPiOdVIPLgs6lyTzMvBq0aWTYMI5XCP9jZMVKOcqZLw/Wc4vDkuxhA== - dependencies: - elliptic "^6.5.4" - node-addon-api "^2.0.0" - node-gyp-build "^4.2.0" - seedrandom@3.0.5: version "3.0.5" resolved "https://registry.yarnpkg.com/seedrandom/-/seedrandom-3.0.5.tgz#54edc85c95222525b0c7a6f6b3543d8e0b3aa0a7" From 79c6f704b7303259debc16c97690db389cdfadbd Mon Sep 17 00:00:00 2001 From: nicholaspai <9457025+nicholaspai@users.noreply.github.com> Date: Mon, 16 Dec 2024 15:21:09 -0500 Subject: [PATCH 39/44] fix(Dataworker): Account for total required netSendAmount when executing PoolRebalanceLeaves (#1933) * fix(Dataworker): Update balanceAllocator properly when executing PoolRebalanceLeaves ## Context The dataworker executor functionality is supposed to detect when to call `sync()` before executing L1 PoolRebalance and RelayerRefund leaves depending on the `liquidReserves` value of l1 tokens before executing those leaves. We use pass around the `balanceAllocator` when simulating execution of the [PoolRebalanceLeaves](https://github.com/across-protocol/relayer/blob/3e9c1e4108568b39fe007f8fcd71721db4bbe090/src/dataworker/Dataworker.ts#L1491) and the [RelayerRefundLeaves](https://github.com/across-protocol/relayer/blob/3e9c1e4108568b39fe007f8fcd71721db4bbe090/src/dataworker/Dataworker.ts#L1512) in order to keep track of how many L1 tokens are withdrawn from and deposited to the HubPool following Hub-chain leaf executions. This way, we can use the `balanceAllocator` in [this function](https://github.com/across-protocol/relayer/blob/3e9c1e4108568b39fe007f8fcd71721db4bbe090/src/dataworker/Dataworker.ts#L1531) to detect when we're not going to have enough funds in LP reserves to execute a leaf [here](https://github.com/across-protocol/relayer/blob/3e9c1e4108568b39fe007f8fcd71721db4bbe090/src/dataworker/Dataworker.ts#L1823). ## Problem The problem is that when accounting for PoolRebalanceLeaf executions, we were ADDING not subtracting balance to the balanceAllocator's count of the hubpool's reserves. This means that if the current `liquidReserves` were good enough to cover execution of the Ethereum PoolRebalanceLeaf [here](https://github.com/across-protocol/relayer/blob/3e9c1e4108568b39fe007f8fcd71721db4bbe090/src/dataworker/Dataworker.ts#L1484), then the balance allocator would accidentally inflate the HubPool's balance [here](https://github.com/across-protocol/relayer/blob/3e9c1e4108568b39fe007f8fcd71721db4bbe090/src/dataworker/Dataworker.ts#L1488). This function [here](https://github.com/across-protocol/relayer/blob/3e9c1e4108568b39fe007f8fcd71721db4bbe090/src/dataworker/Dataworker.ts#L1529C40-L1529C92) would then have issues. Within this function, if any individual PoolRebalanceLeaf's `netSendAmount` was less than its liquid reserves, then a `sync` would be skipped [here](https://github.com/across-protocol/relayer/blob/3e9c1e4108568b39fe007f8fcd71721db4bbe090/src/dataworker/Dataworker.ts#L1802). This [line](https://github.com/across-protocol/relayer/blob/3e9c1e4108568b39fe007f8fcd71721db4bbe090/src/dataworker/Dataworker.ts#L1822) would then artificially inflate the hub pool's balance in the balance allocator, leading to a much more down stream simulation error when the pool rebalance leaf execution fails for unknown reasons. ## Examples of problems We saw a bundle of PoolRebalanceLeaves today fail to execute because three of the leaves, one Ethereum leaf and two non-Ethereum leaves, had a total `netSendAmount` greater than the HubPool's `liquidReserves`, but each individually had a `netSendAmount` < the `liquidReserves`. For example, the three leaves had `netSendAmounts` of: - 40 - 90 - 70 While the hubPool's liquidReserves was 180: - 40 + 90 + 70 = 200 > 180 - 40 < 180 - 90 < 180 - 70 < 180 If you take these numbers and run them through the `executePoolRebalanceLeaves` code above, you'll see how a PoolRebalanceLeaf execution was able to be submitted but then fail in simulation, without preceding the leaf executions with a `sync` transaction. * fix issue * Update Dataworker.executePoolRebalances.ts * Add more test cases * Update Dataworker.executePoolRebalances.ts * Update Monitor.ts * comment on tests * make test better * Add orbit-fee handling, remove balance allocator * Fix balanceAllocator call in _executePoolRebalanceLeaves * throw error if can't fund the DonationBox or loadEthForL2Calls call * add lifecycle test Signed-off-by: nicholaspai * Exit early if aggregate net send amount == 0 * Update Dataworker.executePoolRebalances.ts * Update log in _updateExchangeRatesBeforeExecutingNonHubChainLeaves when skipping exchange rate update early * Update Dataworker.ts * Fund more AZERO whenever we're short * remove hardcodes * Improve logs about lookback window being too short * Improve logs on funding orbit chain message * Update Dataworker.customSpokePoolClients.ts * Update index.ts * Update index.ts * Add invariant unit test * Remove l1 tokens with 0 net send amounts from _updateOldExchangeRates * Rename to amountWei * Refactor blockRangesAreInvalid to internal helper func * Squash feeData * Update src/dataworker/Dataworker.ts Co-authored-by: Paul <108695806+pxrl@users.noreply.github.com> * Update src/dataworker/Dataworker.ts Co-authored-by: Paul <108695806+pxrl@users.noreply.github.com> * Update src/dataworker/Dataworker.ts Co-authored-by: Paul <108695806+pxrl@users.noreply.github.com> * result * Add unit testing about exiting early if leaves are already executed * Add ability for some nonHubChain leaves to be executed even if they all cannot * Skip mainnet leaf execution if we cannot execute instead of throwing * Skip sync in _updateExchangeRatesBeforeExecutingNonHubChainLeaves if liquid reserves won't increase * refactor block range pretty printing * update comments * Add assert error messages * Add _getSpokeBalanceForL2Tokens helper and add to logs * Re-add balance allocator * Update Dataworker.executeRelayerRefunds.ts * Update Dataworker.ts * Remove canExecute return value from _updateExchangeRatesBeforeExecutingHubChainLeaves * Update Dataworker.executePoolRebalances.ts * Update Dataworker.executePoolRebalances.ts * Refactor error log * Clean up logs * Consider state of liquid reserves following eth pool rebalance leaf executions * Improve tests * Update name * Add unit test, split executePoolRebalanceLeaf tests in two files to take advantage of parallel test runs in CI * Remove SIMULATE_L1_EXECUTION * Add test for amountToReturn * add tests * Add test about hub chain slow fill leaves * Update BalanceAllocator.ts Co-authored-by: Paul <108695806+pxrl@users.noreply.github.com> * Update Dataworker.ts Co-authored-by: Paul <108695806+pxrl@users.noreply.github.com> * change blockRangesAreInvalidForSpokeClients to return list of chain ID's that are invalid; add DISABLED_CHAINS unit tests to BundleDataClient unit test files Signed-off-by: nicholaspai --------- Signed-off-by: nicholaspai Co-authored-by: Paul <108695806+pxrl@users.noreply.github.com> --- src/clients/BalanceAllocator.ts | 12 + src/common/Constants.ts | 28 + src/dataworker/Dataworker.ts | 731 +++++++++------ src/dataworker/DataworkerUtils.ts | 139 +-- src/dataworker/index.ts | 4 + test/BalanceAllocator.ts | 6 + test/Dataworker.blockRangeUtils.ts | 262 +++--- test/Dataworker.customSpokePoolClients.ts | 2 +- test/Dataworker.executePoolRebalanceUtils.ts | 901 ++++++++++++++++++ test/Dataworker.executePoolRebalances.ts | 933 +++++++++++-------- test/Dataworker.executeRelayerRefunds.ts | 34 +- test/Dataworker.loadData.fill.ts | 45 + test/Dataworker.loadData.slowFill.ts | 40 + test/Monitor.ts | 2 +- test/fixtures/Dataworker.Fixture.ts | 7 +- test/utils/utils.ts | 4 + 16 files changed, 2302 insertions(+), 848 deletions(-) create mode 100644 test/Dataworker.executePoolRebalanceUtils.ts diff --git a/src/clients/BalanceAllocator.ts b/src/clients/BalanceAllocator.ts index 666dd6f5e..c74bd7d01 100644 --- a/src/clients/BalanceAllocator.ts +++ b/src/clients/BalanceAllocator.ts @@ -97,6 +97,12 @@ export class BalanceAllocator { return this.requestBalanceAllocations([{ chainId, tokens, holder, amount }]); } + async getBalanceSubUsed(chainId: number, token: string, holder: string): Promise { + const balance = await this.getBalance(chainId, token, holder); + const used = this.getUsed(chainId, token, holder); + return balance.sub(used); + } + async getBalance(chainId: number, token: string, holder: string): Promise { if (!this.balances?.[chainId]?.[token]?.[holder]) { const balance = await this._queryBalance(chainId, token, holder); @@ -114,6 +120,12 @@ export class BalanceAllocator { return this.balances[chainId][token][holder]; } + testSetBalance(chainId: number, token: string, holder: string, balance: BigNumber): void { + this.balances[chainId] ??= {}; + this.balances[chainId][token] ??= {}; + this.balances[chainId][token][holder] = balance; + } + getUsed(chainId: number, token: string, holder: string): BigNumber { if (!this.used?.[chainId]?.[token]?.[holder]) { // Note: cannot use assign because it breaks the BigNumber object. diff --git a/src/common/Constants.ts b/src/common/Constants.ts index b6dc7d6c7..fb554b217 100644 --- a/src/common/Constants.ts +++ b/src/common/Constants.ts @@ -611,3 +611,31 @@ export const DEFAULT_GAS_MULTIPLIER: { [chainId: number]: number } = { }; export const CONSERVATIVE_BUNDLE_FREQUENCY_SECONDS = 3 * 60 * 60; // 3 hours is a safe assumption for the time + +export const ARBITRUM_ORBIT_L1L2_MESSAGE_FEE_DATA: { + [chainId: number]: { + // Amount of tokens required to send a single message to the L2 + amountWei: number; + // Multiple of the required amount above to send to the feePayer in case + // we are short funds. For example, if set to 10, then everytime we need to load more funds + // we'll send 10x the required amount. + amountMultipleToFund: number; + // Account that pays the fees on-chain that we will load more fee tokens into. + feePayer?: string; + // Token that the feePayer will pay the fees in. + feeToken?: string; + }; +} = { + // Leave feePayer undefined if feePayer is HubPool. + // Leave feeToken undefined if feeToken is ETH. + [CHAIN_IDs.ARBITRUM]: { + amountWei: 0.02, + amountMultipleToFund: 1, + }, + [CHAIN_IDs.ALEPH_ZERO]: { + amountWei: 0.49, + amountMultipleToFund: 20, + feePayer: "0x0d57392895Db5aF3280e9223323e20F3951E81B1", // DonationBox + feeToken: TOKEN_SYMBOLS_MAP.AZERO.addresses[CHAIN_IDs.MAINNET], + }, +}; diff --git a/src/dataworker/Dataworker.ts b/src/dataworker/Dataworker.ts index 0aca04370..f3db13fdc 100644 --- a/src/dataworker/Dataworker.ts +++ b/src/dataworker/Dataworker.ts @@ -18,6 +18,7 @@ import { getWidestPossibleExpectedBlockRange, getEndBlockBuffers, _buildPoolRebalanceRoot, + ERC20, } from "../utils"; import { ProposedRootBundle, @@ -37,12 +38,13 @@ import { blockRangesAreInvalidForSpokeClients, getBlockRangeForChain, getImpliedBundleBlockRanges, + InvalidBlockRange, l2TokensToCountTowardsSpokePoolLeafExecutionCapital, persistDataToArweave, } from "../dataworker/DataworkerUtils"; import { _buildRelayerRefundRoot, _buildSlowRelayRoot } from "./DataworkerUtils"; import _ from "lodash"; -import { CONTRACT_ADDRESSES, spokePoolClientsToProviders } from "../common"; +import { ARBITRUM_ORBIT_L1L2_MESSAGE_FEE_DATA, CONTRACT_ADDRESSES, spokePoolClientsToProviders } from "../common"; import * as sdk from "@across-protocol/sdk"; import { BundleData, @@ -309,24 +311,19 @@ export class Dataworker { // Exit early if spoke pool clients don't have early enough event data to satisfy block ranges for the // potential proposal - if ( - Object.keys(earliestBlocksInSpokePoolClients).length > 0 && - (await blockRangesAreInvalidForSpokeClients( - spokePoolClients, - blockRangesForProposal, - chainIds, - earliestBlocksInSpokePoolClients, - this.isV3(mainnetBlockRange[0]) - )) - ) { + const invalidBlockRanges = await this._validateBlockRanges( + spokePoolClients, + blockRangesForProposal, + chainIds, + earliestBlocksInSpokePoolClients, + this.isV3(mainnetBlockRange[0]) + ); + if (invalidBlockRanges.length > 0) { this.logger.warn({ at: "Dataworke#propose", message: "Cannot propose bundle with insufficient event data. Set a larger DATAWORKER_FAST_LOOKBACK_COUNT", - rootBundleRanges: blockRangesForProposal, - earliestBlocksInSpokePoolClients, - spokeClientsEventSearchConfigs: Object.fromEntries( - Object.entries(spokePoolClients).map(([chainId, client]) => [chainId, client.eventSearchConfig]) - ), + invalidBlockRanges, + bundleBlockRanges: this._prettifyBundleBlockRanges(chainIds, blockRangesForProposal), }); return; } @@ -836,25 +833,19 @@ export class Dataworker { // Exit early if spoke pool clients don't have early enough event data to satisfy block ranges for the // pending proposal. Log an error loudly so that user knows that disputer needs to increase its lookback. - if ( - Object.keys(earliestBlocksInSpokePoolClients).length > 0 && - (await blockRangesAreInvalidForSpokeClients( - spokePoolClients, - blockRangesImpliedByBundleEndBlocks, - chainIds, - earliestBlocksInSpokePoolClients, - this.isV3(mainnetBlockRange[0]) - )) - ) { - this.logger.debug({ + const invalidBlockRanges = await this._validateBlockRanges( + spokePoolClients, + blockRangesImpliedByBundleEndBlocks, + chainIds, + earliestBlocksInSpokePoolClients, + this.isV3(mainnetBlockRange[0]) + ); + if (invalidBlockRanges.length > 0) { + this.logger.warn({ at: "Dataworke#validate", message: "Cannot validate bundle with insufficient event data. Set a larger DATAWORKER_FAST_LOOKBACK_COUNT", - rootBundleRanges: blockRangesImpliedByBundleEndBlocks, - availableSpokePoolClients: Object.keys(spokePoolClients), - earliestBlocksInSpokePoolClients, - spokeClientsEventSearchConfigs: Object.fromEntries( - Object.entries(spokePoolClients).map(([chainId, client]) => [chainId, client.eventSearchConfig]) - ), + invalidBlockRanges, + bundleBlockRanges: this._prettifyBundleBlockRanges(chainIds, blockRangesImpliedByBundleEndBlocks), }); return { valid: false, @@ -1074,27 +1065,20 @@ export class Dataworker { ); const mainnetBlockRange = blockNumberRanges[0]; const chainIds = this.clients.configStoreClient.getChainIdIndicesForBlock(mainnetBlockRange[0]); - if ( - Object.keys(earliestBlocksInSpokePoolClients).length > 0 && - (await blockRangesAreInvalidForSpokeClients( - spokePoolClients, - blockNumberRanges, - chainIds, - earliestBlocksInSpokePoolClients, - this.isV3(mainnetBlockRange[0]) - )) - ) { + const invalidBlockRanges = await this._validateBlockRanges( + spokePoolClients, + blockNumberRanges, + chainIds, + earliestBlocksInSpokePoolClients, + this.isV3(mainnetBlockRange[0]) + ); + if (invalidBlockRanges.length > 0) { this.logger.warn({ at: "Dataworke#executeSlowRelayLeaves", message: "Cannot validate bundle with insufficient event data. Set a larger DATAWORKER_FAST_LOOKBACK_COUNT", - chainId, - rootBundleRanges: blockNumberRanges, - availableSpokePoolClients: Object.keys(spokePoolClients), - earliestBlocksInSpokePoolClients, - spokeClientsEventSearchConfigs: Object.fromEntries( - Object.entries(spokePoolClients).map(([chainId, client]) => [chainId, client.eventSearchConfig]) - ), + invalidBlockRanges, + bundleTxn: matchingRootBundle.transactionHash, }); continue; } @@ -1279,6 +1263,12 @@ export class Dataworker { chainId: destinationChainId, token: outputToken, amount: outputAmount, + spokeBalance: await this._getSpokeBalanceForL2Tokens( + balanceAllocator, + destinationChainId, + outputToken, + client.spokePool.address + ), }); } @@ -1356,7 +1346,7 @@ export class Dataworker { submitExecution = true, earliestBlocksInSpokePoolClients: { [chainId: number]: number } = {} ): Promise { - let leafCount = 0; + const leafCount = 0; this.logger.debug({ at: "Dataworker#executePoolRebalanceLeaves", message: "Executing pool rebalance leaves", @@ -1463,6 +1453,32 @@ export class Dataworker { return leafCount; } + return this._executePoolLeavesAndSyncL1Tokens( + spokePoolClients, + balanceAllocator, + unexecutedLeaves, + expectedTrees.poolRebalanceTree.tree, + expectedTrees.relayerRefundTree.leaves, + expectedTrees.relayerRefundTree.tree, + expectedTrees.slowRelayTree.leaves, + expectedTrees.slowRelayTree.tree, + submitExecution + ); + } + + async _executePoolLeavesAndSyncL1Tokens( + spokePoolClients: { [chainId: number]: SpokePoolClient }, + balanceAllocator: BalanceAllocator, + poolLeaves: PoolRebalanceLeaf[], + poolRebalanceTree: MerkleTree, + relayerRefundLeaves: RelayerRefundLeaf[], + relayerRefundTree: MerkleTree, + slowFillLeaves: SlowFillLeaf[], + slowFillTree: MerkleTree, + submitExecution: boolean + ): Promise { + const hubPoolChainId = this.clients.hubPoolClient.chainId; + // There are three times that we should look to update the HubPool's liquid reserves: // 1. First, before we attempt to execute the HubChain PoolRebalance leaves and RelayerRefund leaves. // We should see if there are new liquid reserves we need to account for before sending out these @@ -1473,15 +1489,21 @@ export class Dataworker { // back from the Ethereum RelayerRefundLeaves. // 3. Third, we haven't updated the exchange rate for an L1 token on a PoolRebalanceLeaf in a while that // we're going to execute, so we should batch in an update. - let updatedLiquidReserves: Record = {}; + + // Keep track of the HubPool.pooledTokens.liquidReserves state value before entering into any possible + // LP token update. This way we can efficiently update LP liquid reserves values if and only if we need to do so + // to execute a pool leaf. + let latestLiquidReserves: Record = {}; + let leafCount = 0; // First, execute mainnet pool rebalance leaves. Then try to execute any relayer refund and slow leaves for the // expected relayed root hash, then proceed with remaining pool rebalance leaves. This is an optimization that // takes advantage of the fact that mainnet transfers between HubPool and SpokePool are atomic. - const mainnetLeaves = unexecutedLeaves.filter((leaf) => leaf.chainId === hubPoolChainId); + const mainnetLeaves = poolLeaves.filter((leaf) => leaf.chainId === hubPoolChainId); if (mainnetLeaves.length > 0) { - assert(mainnetLeaves.length === 1); - updatedLiquidReserves = await this._updateExchangeRatesBeforeExecutingHubChainLeaves( + assert(mainnetLeaves.length === 1, "There should only be one Ethereum PoolRebalanceLeaf"); + latestLiquidReserves = await this._updateExchangeRatesBeforeExecutingHubChainLeaves( + balanceAllocator, mainnetLeaves[0], submitExecution ); @@ -1489,7 +1511,7 @@ export class Dataworker { spokePoolClients, mainnetLeaves, balanceAllocator, - expectedTrees.poolRebalanceTree.tree, + poolRebalanceTree, submitExecution ); @@ -1497,21 +1519,21 @@ export class Dataworker { // will be relayed after executing the above pool rebalance root. const nextRootBundleIdForMainnet = spokePoolClients[hubPoolChainId].getLatestRootBundleId(); - // Now, execute refund and slow fill leaves for Mainnet using new funds. These methods will return early if there + // Now, execute refund and slow fill leaves for Mainnet using any new funds. These methods will return early if there // are no relevant leaves to execute. await this._executeSlowFillLeaf( - expectedTrees.slowRelayTree.leaves.filter((leaf) => leaf.chainId === hubPoolChainId), + slowFillLeaves.filter((leaf) => leaf.chainId === hubPoolChainId), balanceAllocator, spokePoolClients[hubPoolChainId], - expectedTrees.slowRelayTree.tree, + slowFillTree, submitExecution, nextRootBundleIdForMainnet ); await this._executeRelayerRefundLeaves( - expectedTrees.relayerRefundTree.leaves.filter((leaf) => leaf.chainId === hubPoolChainId), + relayerRefundLeaves.filter((leaf) => leaf.chainId === hubPoolChainId), balanceAllocator, spokePoolClients[hubPoolChainId], - expectedTrees.relayerRefundTree.tree, + relayerRefundTree, submitExecution, nextRootBundleIdForMainnet ); @@ -1519,30 +1541,29 @@ export class Dataworker { // Before executing the other pool rebalance leaves, see if we should update any exchange rates to account for // any tokens returned to the hub pool via the EthereumSpokePool that we'll need to use to execute - // any of the remaining pool rebalance leaves. This might include tokens we've already enqueued to update - // in the previous step, but this captures any tokens that are sent back from the Ethereum_SpokePool to the - // HubPool that we want to capture an increased liquidReserves for. - const nonHubChainPoolRebalanceLeaves = unexecutedLeaves.filter((leaf) => leaf.chainId !== hubPoolChainId); + // any of the remaining pool rebalance leaves. This is also important if we failed to execute + // the mainnet leaf and haven't enqueued a sync call that could be used to execute some of the other leaves. + const nonHubChainPoolRebalanceLeaves = poolLeaves.filter((leaf) => leaf.chainId !== hubPoolChainId); if (nonHubChainPoolRebalanceLeaves.length === 0) { return leafCount; } - const updatedL1Tokens = await this._updateExchangeRatesBeforeExecutingNonHubChainLeaves( - updatedLiquidReserves, + const syncedL1Tokens = await this._updateExchangeRatesBeforeExecutingNonHubChainLeaves( + latestLiquidReserves, balanceAllocator, nonHubChainPoolRebalanceLeaves, submitExecution ); - Object.keys(updatedLiquidReserves).forEach((token) => { - if (!updatedL1Tokens.has(token)) { - updatedL1Tokens.add(token); + Object.keys(latestLiquidReserves).forEach((token) => { + if (!syncedL1Tokens.has(token)) { + syncedL1Tokens.add(token); } }); // Save all L1 tokens that we haven't updated exchange rates for in a different step. - const l1TokensWithPotentiallyOlderUpdate = expectedTrees.poolRebalanceTree.leaves.reduce((l1TokenSet, leaf) => { + const l1TokensWithPotentiallyOlderUpdate = poolLeaves.reduce((l1TokenSet, leaf) => { const currLeafL1Tokens = leaf.l1Tokens; - currLeafL1Tokens.forEach((l1Token) => { - if (!l1TokenSet.includes(l1Token) && !updatedL1Tokens.has(l1Token)) { + currLeafL1Tokens.forEach((l1Token, i) => { + if (leaf.netSendAmounts[i].gt(0) && !l1TokenSet.includes(l1Token) && !syncedL1Tokens.has(l1Token)) { l1TokenSet.push(l1Token); } }); @@ -1550,112 +1571,166 @@ export class Dataworker { }, []); await this._updateOldExchangeRates(l1TokensWithPotentiallyOlderUpdate, submitExecution); - // Perform similar funding checks for remaining non-mainnet pool rebalance leaves. + // Figure out which non-mainnet pool rebalance leaves we can execute and execute them: leafCount += await this._executePoolRebalanceLeaves( spokePoolClients, nonHubChainPoolRebalanceLeaves, balanceAllocator, - expectedTrees.poolRebalanceTree.tree, + poolRebalanceTree, submitExecution ); return leafCount; } + async _getExecutablePoolRebalanceLeaves( + poolLeaves: PoolRebalanceLeaf[], + balanceAllocator: BalanceAllocator + ): Promise { + // We evaluate these leaves iteratively rather than in parallel so we can keep track + // of the used balances after "executing" each leaf. + const executableLeaves: PoolRebalanceLeaf[] = []; + for (const leaf of poolLeaves) { + // We can evaluate the l1 tokens within the leaf in parallel because we can assume + // that there are not duplicate L1 tokens within the leaf. + const isExecutable = await sdkUtils.everyAsync(leaf.l1Tokens, async (l1Token, i) => { + const netSendAmountForLeaf = leaf.netSendAmounts[i]; + if (netSendAmountForLeaf.lte(0)) { + return true; + } + const hubChainId = this.clients.hubPoolClient.chainId; + const hubPoolAddress = this.clients.hubPoolClient.hubPool.address; + const success = await balanceAllocator.requestBalanceAllocation( + hubChainId, + [l1Token], + hubPoolAddress, + netSendAmountForLeaf + ); + return success; + }); + if (isExecutable) { + executableLeaves.push(leaf); + } else { + this.logger.error({ + at: "Dataworker#_getExecutablePoolRebalanceLeaves", + message: `Not enough funds to execute pool rebalance leaf for chain ${leaf.chainId}`, + l1Tokens: leaf.l1Tokens, + netSendAmounts: leaf.netSendAmounts, + }); + } + } + return executableLeaves; + } + async _executePoolRebalanceLeaves( spokePoolClients: { [chainId: number]: SpokePoolClient; }, - leaves: PoolRebalanceLeaf[], + allLeaves: PoolRebalanceLeaf[], balanceAllocator: BalanceAllocator, tree: MerkleTree, submitExecution: boolean ): Promise { const hubPoolChainId = this.clients.hubPoolClient.chainId; - const fundedLeaves = ( - await Promise.all( - leaves.map(async (leaf) => { - const requests = leaf.netSendAmounts.map((amount, i) => ({ - amount: amount.gt(bnZero) ? amount : bnZero, - tokens: [leaf.l1Tokens[i]], - holder: this.clients.hubPoolClient.hubPool.address, - chainId: hubPoolChainId, - })); - - if (sdkUtils.chainIsArbitrum(leaf.chainId)) { - const hubPoolBalance = await this.clients.hubPoolClient.hubPool.provider.getBalance( - this.clients.hubPoolClient.hubPool.address - ); - if (hubPoolBalance.lt(this._getRequiredEthForArbitrumPoolRebalanceLeaf(leaf))) { - requests.push({ - tokens: [ZERO_ADDRESS], - amount: this._getRequiredEthForArbitrumPoolRebalanceLeaf(leaf), - holder: await this.clients.hubPoolClient.hubPool.signer.getAddress(), - chainId: hubPoolChainId, - }); - } - } - - const success = await balanceAllocator.requestBalanceAllocations( - requests.filter((req) => req.amount.gt(bnZero)) - ); + const signer = this.clients.hubPoolClient.hubPool.signer; + + // Evaluate leaves iteratively because we will be modifying virtual balances and we want + // to make sure we are getting the virtual balance computations correct. + const fundedLeaves = await this._getExecutablePoolRebalanceLeaves(allLeaves, balanceAllocator); + const executableLeaves: PoolRebalanceLeaf[] = []; + for (const leaf of fundedLeaves) { + // For orbit leaves we need to check if we have enough gas tokens to pay for the L1 to L2 message. + if (!sdkUtils.chainIsArbitrum(leaf.chainId) && !sdkUtils.chainIsOrbit(leaf.chainId)) { + executableLeaves.push(leaf); + continue; + } - if (!success) { - // Note: this is an error because the HubPool should generally not run out of funds to put into - // netSendAmounts. This means that no new bundles can be proposed until this leaf is funded. + // Check if orbit leaf can be executed. + const { + amount: requiredAmount, + token: feeToken, + holder, + } = await this._getRequiredEthForOrbitPoolRebalanceLeaf(leaf); + const feeData = { + tokens: [feeToken], + amount: requiredAmount, + chainId: hubPoolChainId, + }; + const success = await balanceAllocator.requestBalanceAllocations([{ ...feeData, holder }]); + if (!success) { + this.logger.debug({ + at: "Dataworker#_executePoolRebalanceLeaves", + message: `Loading more orbit gas token to pay for L1->L2 message submission fees to ${getNetworkName( + leaf.chainId + )} 📨!`, + leaf, + feeToken, + requiredAmount, + }); + if (submitExecution) { + const canFund = await balanceAllocator.requestBalanceAllocations([ + { ...feeData, holder: await signer.getAddress() }, + ]); + if (!canFund) { this.logger.error({ - at: "Dataworker#executePoolRebalanceLeaves", - message: "Not executing pool rebalance leaf on HubPool due to lack of funds to send.", - root: tree.getHexRoot(), - leafId: leaf.leafId, - rebalanceChain: leaf.chainId, - token: leaf.l1Tokens, - netSendAmounts: leaf.netSendAmounts, + at: "Dataworker#_executePoolRebalanceLeaves", + message: `Failed to fund ${requiredAmount.toString()} of orbit gas token ${feeToken} for message to ${getNetworkName( + leaf.chainId + )}!`, }); - } else { - // Add balances to spoke pool on mainnet since we know it will be sent atomically. - if (leaf.chainId === hubPoolChainId) { - await Promise.all( - leaf.netSendAmounts.map(async (amount, i) => { - if (amount.gt(bnZero)) { - await balanceAllocator.addUsed( - leaf.chainId, - leaf.l1Tokens[i], - spokePoolClients[leaf.chainId].spokePool.address, - amount.mul(-1) - ); - } - }) - ); - } + continue; } - return success ? leaf : undefined; - }) - ) - ).filter(isDefined); - - let hubPoolBalance; - if (fundedLeaves.some((leaf) => sdkUtils.chainIsArbitrum(leaf.chainId))) { - hubPoolBalance = await this.clients.hubPoolClient.hubPool.provider.getBalance( - this.clients.hubPoolClient.hubPool.address - ); - } - fundedLeaves.forEach((leaf) => { - const proof = tree.getHexProof(leaf); - const mrkdwn = `Root hash: ${tree.getHexRoot()}\nLeaf: ${leaf.leafId}\nChain: ${leaf.chainId}`; - if (submitExecution) { - if (sdkUtils.chainIsArbitrum(leaf.chainId)) { - if (hubPoolBalance.lt(this._getRequiredEthForArbitrumPoolRebalanceLeaf(leaf))) { + if (feeToken === ZERO_ADDRESS) { this.clients.multiCallerClient.enqueueTransaction({ contract: this.clients.hubPoolClient.hubPool, chainId: hubPoolChainId, method: "loadEthForL2Calls", args: [], message: `Loaded ETH for message to ${getNetworkName(leaf.chainId)} 📨!`, - mrkdwn, - value: this._getRequiredEthForArbitrumPoolRebalanceLeaf(leaf), + mrkdwn: `Root hash: ${tree.getHexRoot()}\nLeaf: ${leaf.leafId}\nChain: ${leaf.chainId}`, + value: requiredAmount, + }); + } else { + this.clients.multiCallerClient.enqueueTransaction({ + contract: new Contract(feeToken, ERC20.abi, signer), + chainId: hubPoolChainId, + method: "transfer", + args: [holder, requiredAmount], + message: `Loaded orbit gas token for message to ${getNetworkName(leaf.chainId)} 📨!`, + mrkdwn: `Root hash: ${tree.getHexRoot()}\nLeaf: ${leaf.leafId}\nChain: ${leaf.chainId}`, }); } } + } else { + this.logger.debug({ + at: "Dataworker#_executePoolRebalanceLeaves", + message: `feePayer ${holder} has sufficient orbit gas token to pay for L1->L2 message submission fees to ${getNetworkName( + leaf.chainId + )}`, + feeToken, + requiredAmount, + feePayerBalance: await balanceAllocator.getBalanceSubUsed(hubPoolChainId, feeToken, holder), + }); + } + executableLeaves.push(leaf); + } + + // Execute the leaves: + executableLeaves.forEach((leaf) => { + // Add balances to spoke pool on mainnet since we know it will be sent atomically. + if (leaf.chainId === hubPoolChainId) { + leaf.netSendAmounts.forEach((amount, i) => { + if (amount.gt(bnZero)) { + balanceAllocator.addUsed( + leaf.chainId, + leaf.l1Tokens[i], + spokePoolClients[leaf.chainId].spokePool.address, + amount.mul(-1) + ); + } + }); + } + const mrkdwn = `Root hash: ${tree.getHexRoot()}\nLeaf: ${leaf.leafId}\nChain: ${leaf.chainId}`; + if (submitExecution) { this.clients.multiCallerClient.enqueueTransaction({ contract: this.clients.hubPoolClient.hubPool, chainId: hubPoolChainId, @@ -1668,9 +1743,9 @@ export class Dataworker { leaf.runningBalances, leaf.leafId, leaf.l1Tokens, - proof, + tree.getHexProof(leaf), ], - message: "Executed PoolRebalanceLeaf 🌿!", + message: `Executed PoolRebalanceLeaf for chain ${leaf.chainId} 🌿!`, mrkdwn, unpermissioned: true, // If simulating execution of leaves for non-mainnet chains, can fail as it may require funds to be returned @@ -1678,22 +1753,27 @@ export class Dataworker { canFailInSimulation: leaf.chainId !== hubPoolChainId, }); } else { - this.logger.debug({ at: "Dataworker#executePoolRebalanceLeaves", message: mrkdwn }); + this.logger.debug({ at: "Dataworker#_executePoolRebalanceLeaves", message: mrkdwn }); } }); - return fundedLeaves.length; + + return executableLeaves.length; } async _updateExchangeRatesBeforeExecutingHubChainLeaves( + balanceAllocator: BalanceAllocator, poolRebalanceLeaf: Pick, submitExecution: boolean ): Promise> { const hubPool = this.clients.hubPoolClient.hubPool; const chainId = this.clients.hubPoolClient.chainId; - const updatedL1Tokens: Record = {}; + const updatedLiquidReserves: Record = {}; const { netSendAmounts, l1Tokens } = poolRebalanceLeaf; await sdk.utils.forEachAsync(l1Tokens, async (l1Token, idx) => { + const currentLiquidReserves = this.clients.hubPoolClient.getLpTokenInfoForL1Token(l1Token)?.liquidReserves; + updatedLiquidReserves[l1Token] = currentLiquidReserves; + assert(currentLiquidReserves !== undefined && currentLiquidReserves.gte(0), "Liquid reserves should be >= 0"); const tokenSymbol = this.clients.hubPoolClient.getTokenInfo(chainId, l1Token)?.symbol; // If netSendAmounts is negative, there is no need to update this exchange rate. @@ -1701,17 +1781,6 @@ export class Dataworker { return; } - const multicallInput = [ - hubPool.interface.encodeFunctionData("pooledTokens", [l1Token]), - hubPool.interface.encodeFunctionData("sync", [l1Token]), - hubPool.interface.encodeFunctionData("pooledTokens", [l1Token]), - ]; - const multicallOutput = await hubPool.callStatic.multicall(multicallInput); - const currentPooledTokens = hubPool.interface.decodeFunctionResult("pooledTokens", multicallOutput[0]); - const updatedPooledTokens = hubPool.interface.decodeFunctionResult("pooledTokens", multicallOutput[2]); - const currentLiquidReserves = currentPooledTokens.liquidReserves; - const updatedLiquidReserves = updatedPooledTokens.liquidReserves; - // If current liquid reserves can cover the netSendAmount, then there is no need to update the exchange rate. if (currentLiquidReserves.gte(netSendAmounts[idx])) { this.logger.debug({ @@ -1721,45 +1790,47 @@ export class Dataworker { netSendAmount: netSendAmounts[idx], l1Token, }); + updatedLiquidReserves[l1Token] = currentLiquidReserves.sub(netSendAmounts[idx]); return; } - // If updated liquid reserves are not enough to cover the payment, then send a warning that - // we're short on funds. - if (updatedLiquidReserves.lt(netSendAmounts[idx])) { - this.logger.error({ + // @dev: post-sync liquid reserves should be equal to ERC20 balanceOf the HubPool. + const postSyncLiquidReserves = await balanceAllocator.getBalanceSubUsed(chainId, l1Token, hubPool.address); + + // If updated liquid reserves are not enough to cover the payment, then send an error log that + // we're short on funds. Otherwise, enqueue a sync() call and then update the availableLiquidReserves. + if (postSyncLiquidReserves.lt(netSendAmounts[idx])) { + this.logger.warn({ at: "Dataworker#_updateExchangeRatesBeforeExecutingHubChainLeaves", - message: `Not enough funds to execute pool rebalance leaf on HubPool for token: ${tokenSymbol}`, - poolRebalanceLeaf, + message: `Not enough funds to execute Ethereum pool rebalance leaf on HubPool for token: ${tokenSymbol}`, netSendAmount: netSendAmounts[idx], - currentPooledTokens, - updatedPooledTokens, + currentLiquidReserves, + postSyncLiquidReserves, }); - return; - } - - this.logger.debug({ - at: "Dataworker#_updateExchangeRatesBeforeExecutingHubChainLeaves", - message: `Updating exchange rate update for ${tokenSymbol} because we need to update the liquid reserves of the contract to execute the hubChain poolRebalanceLeaf.`, - poolRebalanceLeaf, - netSendAmount: netSendAmounts[idx], - currentPooledTokens, - updatedPooledTokens, - }); - updatedL1Tokens[l1Token] = updatedPooledTokens.liquidReserves; - if (submitExecution) { - this.clients.multiCallerClient.enqueueTransaction({ - contract: hubPool, - chainId, - method: "exchangeRateCurrent", - args: [l1Token], - message: "Updated exchange rate ♻️!", - mrkdwn: `Updated exchange rate for l1 token: ${tokenSymbol}`, - unpermissioned: true, + } else { + // At this point, we can assume that the liquid reserves increased post-sync so we'll enqueue an update. + updatedLiquidReserves[l1Token] = postSyncLiquidReserves.sub(netSendAmounts[idx]); + this.logger.debug({ + at: "Dataworker#_updateExchangeRatesBeforeExecutingHubChainLeaves", + message: `Updating exchange rate for ${tokenSymbol} because we need to update the liquid reserves of the contract to execute the hubChain poolRebalanceLeaf.`, + netSendAmount: netSendAmounts[idx], + currentLiquidReserves, + postSyncLiquidReserves, }); + if (submitExecution) { + this.clients.multiCallerClient.enqueueTransaction({ + contract: hubPool, + chainId, + method: "exchangeRateCurrent", + args: [l1Token], + message: "Updated exchange rate ♻️!", + mrkdwn: `Updated exchange rate for l1 token: ${tokenSymbol}`, + unpermissioned: true, + }); + } } }); - return updatedL1Tokens; + return updatedLiquidReserves; } async _updateExchangeRatesBeforeExecutingNonHubChainLeaves( @@ -1772,81 +1843,103 @@ export class Dataworker { const hubPool = this.clients.hubPoolClient.hubPool; const hubPoolChainId = this.clients.hubPoolClient.chainId; + const aggregateNetSendAmounts: Record = {}; + await sdkUtils.forEachAsync(poolRebalanceLeaves, async (leaf) => { await sdkUtils.forEachAsync(leaf.l1Tokens, async (l1Token, idx) => { - const tokenSymbol = this.clients.hubPoolClient.getTokenInfo(hubPoolChainId, l1Token)?.symbol; + aggregateNetSendAmounts[l1Token] ??= bnZero; - if (updatedL1Tokens.has(l1Token)) { - return; - } // If leaf's netSendAmount is negative, then we don't need to updateExchangeRates since the Hub will not // have a liquidity constraint because it won't be sending any tokens. if (leaf.netSendAmounts[idx].lte(0)) { return; } - // The "used" balance kept in the BalanceAllocator should have adjusted for the netSendAmounts and relayer refund leaf - // executions above. Therefore, check if the current liquidReserves is less than the pool rebalance leaf's netSendAmount - // and the virtual hubPoolBalance would be enough to execute it. If so, then add an update exchange rate call to make sure that - // the HubPool becomes "aware" of its inflow following the relayre refund leaf execution. - let currHubPoolLiquidReserves = latestLiquidReserves[l1Token]; - if (!currHubPoolLiquidReserves) { - // @dev If there aren't liquid reserves for this token then set them to max value so we won't update them. - currHubPoolLiquidReserves = this.clients.hubPoolClient.getLpTokenInfoForL1Token(l1Token).liquidReserves; - } - assert(currHubPoolLiquidReserves !== undefined); - // We only need to update the exchange rate in the case where tokens are returned to the HubPool increasing - // its balance enough that it can execute a pool rebalance leaf it otherwise would not be able to. - // This would only happen if the starting hub pool balance is below the net send amount. If it started - // above, then the dataworker would not purposefully send tokens out of it to fulfill the Ethereum - // PoolRebalanceLeaf and then return tokens to it to execute another chain's PoolRebalanceLeaf. - if (currHubPoolLiquidReserves.gte(leaf.netSendAmounts[idx])) { - this.logger.debug({ - at: "Dataworker#_updateExchangeRatesBeforeExecutingNonHubChainLeaves", - message: `Skipping exchange rate update for ${tokenSymbol} because current liquid reserves > netSendAmount for chain ${leaf.chainId}`, - l2ChainId: leaf.chainId, - currHubPoolLiquidReserves, - netSendAmount: leaf.netSendAmounts[idx], - l1Token, - }); - return; - } + aggregateNetSendAmounts[l1Token] = aggregateNetSendAmounts[l1Token].add(leaf.netSendAmounts[idx]); + }); + }); - // @dev: Virtual balance = post-sync liquid reserves + any used balance. - const multicallInput = [ - hubPool.interface.encodeFunctionData("sync", [l1Token]), - hubPool.interface.encodeFunctionData("pooledTokens", [l1Token]), - ]; - const multicallOutput = await hubPool.callStatic.multicall(multicallInput); - const updatedPooledTokens = hubPool.interface.decodeFunctionResult("pooledTokens", multicallOutput[1]); - const updatedLiquidReserves = updatedPooledTokens.liquidReserves; - const virtualHubPoolBalance = updatedLiquidReserves.sub( - balanceAllocator.getUsed(hubPoolChainId, l1Token, hubPool.address) - ); + // Now, go through each L1 token and see if we need to update the exchange rate for it. + await sdkUtils.forEachAsync(Object.keys(aggregateNetSendAmounts), async (l1Token) => { + const currHubPoolLiquidReserves = + latestLiquidReserves[l1Token] ?? this.clients.hubPoolClient.getLpTokenInfoForL1Token(l1Token)?.liquidReserves; + assert( + currHubPoolLiquidReserves !== undefined && currHubPoolLiquidReserves.gte(0), + "Liquid reserves should be >= 0" + ); - // If the virtual balance is still too low to execute the pool leaf, then log an error that this will - // pool rebalance leaf execution will fail. - if (virtualHubPoolBalance.lt(leaf.netSendAmounts[idx])) { - this.logger.error({ - at: "Dataworker#executePoolRebalanceLeaves", - message: "Executing pool rebalance leaf on HubPool will fail due to lack of funds to send.", - leaf: leaf, - l1Token, - netSendAmount: leaf.netSendAmounts[idx], - updatedLiquidReserves, - virtualHubPoolBalance, - }); - return; - } + const requiredNetSendAmountForL1Token = aggregateNetSendAmounts[l1Token]; + // If netSendAmounts is 0, there is no need to update this exchange rate. + assert(requiredNetSendAmountForL1Token.gte(0), "Aggregate net send amount should be >= 0"); + if (requiredNetSendAmountForL1Token.eq(0)) { + return; + } + + const tokenSymbol = this.clients.hubPoolClient.getTokenInfo(hubPoolChainId, l1Token)?.symbol; + if (currHubPoolLiquidReserves.gte(requiredNetSendAmountForL1Token)) { this.logger.debug({ - at: "Dataworker#executePoolRebalanceLeaves", - message: `Relayer refund leaf will return enough funds to HubPool to execute PoolRebalanceLeaf, updating exchange rate for ${tokenSymbol}`, + at: "Dataworker#_updateExchangeRatesBeforeExecutingNonHubChainLeaves", + message: `Skipping exchange rate update for ${tokenSymbol} because current liquid reserves > required netSendAmount for non-hubChain pool leaves`, + leavesWithNetSendAmountRequirementsFromHubPoolLiquidReserves: Object.fromEntries( + poolRebalanceLeaves + .filter((leaf) => { + const l1TokenIndex = leaf.l1Tokens.indexOf(l1Token); + if (l1TokenIndex === -1) { + return false; + } + const netSendAmount = leaf.netSendAmounts[l1TokenIndex]; + return netSendAmount.gt(0); + }) + .map((leaf) => [leaf.chainId, leaf.netSendAmounts[leaf.l1Tokens.indexOf(l1Token)]]) + ), + currHubPoolLiquidReserves, + requiredNetSendAmountForL1Token, + l1Token, + }); + return; + } + + // Current liquid reserves are insufficient to execute aggregate net send amount for this token so + // look at the updated liquid reserves post-sync. This will be equal the ERC20 balanceOf the hub pool + // including any netSendAmounts used in a prior pool leaf execution. + const updatedLiquidReserves = await balanceAllocator.getBalanceSubUsed(hubPoolChainId, l1Token, hubPool.address); + + // If the post-sync balance is still too low to execute all the pool leaves, then log an error + if (updatedLiquidReserves.lt(requiredNetSendAmountForL1Token)) { + this.logger.warn({ + at: "Dataworker#_updateExchangeRatesBeforeExecutingNonHubChainLeaves", + message: `Not enough funds to execute ALL non-Ethereum pool rebalance leaf on HubPool for token: ${tokenSymbol}, updating exchange rate anyways to try to execute as many leaves as possible`, + l1Token, + requiredNetSendAmountForL1Token, + currHubPoolLiquidReserves, + updatedLiquidReserves, + }); + } else { + this.logger.debug({ + at: "Dataworker#_updateExchangeRatesBeforeExecutingNonHubChainLeaves", + message: `Post-sync liquid reserves are sufficient to execute PoolRebalanceLeaf, updating exchange rate for ${tokenSymbol}`, + l1Token, + requiredNetSendAmountForL1Token, + currHubPoolLiquidReserves, updatedLiquidReserves, - virtualHubPoolBalance, - netSendAmount: leaf.netSendAmounts[idx], - leaf, }); + } + + // We don't know yet which leaves we can execute so we'll update the exchange rate for this token even if + // some leaves might not be executable. + // TODO: Be more precise about whether updating this l1 token is worth it. For example, if we update this l1 + // token and its reserves increase, depending on which other tokens are contained in the pool rebalance leaf + // with this token, increasing this token's reserves might not help us execute those leaves. + if (updatedLiquidReserves.gt(currHubPoolLiquidReserves)) { updatedL1Tokens.add(l1Token); - }); + } else { + this.logger.debug({ + at: "Dataworker#_updateExchangeRatesBeforeExecutingNonHubChainLeaves", + message: `Skipping exchange rate update for ${tokenSymbol} because liquid reserves would not increase`, + currHubPoolLiquidReserves, + updatedLiquidReserves, + l1Token, + }); + } }); // Submit executions at the end since the above double loop runs in parallel and we don't want to submit @@ -2008,26 +2101,19 @@ export class Dataworker { const blockNumberRanges = getImpliedBundleBlockRanges(hubPoolClient, configStoreClient, matchingRootBundle); const mainnetBlockRanges = blockNumberRanges[0]; const chainIds = this.clients.configStoreClient.getChainIdIndicesForBlock(mainnetBlockRanges[0]); - if ( - Object.keys(earliestBlocksInSpokePoolClients).length > 0 && - (await blockRangesAreInvalidForSpokeClients( - spokePoolClients, - blockNumberRanges, - chainIds, - earliestBlocksInSpokePoolClients, - this.isV3(mainnetBlockRanges[0]) - )) - ) { + const invalidBlockRanges = await this._validateBlockRanges( + spokePoolClients, + blockNumberRanges, + chainIds, + earliestBlocksInSpokePoolClients, + this.isV3(mainnetBlockRanges[0]) + ); + if (invalidBlockRanges.length > 0) { this.logger.warn({ at: "Dataworke#executeRelayerRefundLeaves", message: "Cannot validate bundle with insufficient event data. Set a larger DATAWORKER_FAST_LOOKBACK_COUNT", - chainId, - rootBundleRanges: blockNumberRanges, - availableSpokePoolClients: Object.keys(spokePoolClients), - earliestBlocksInSpokePoolClients, - spokeClientsEventSearchConfigs: Object.fromEntries( - Object.entries(spokePoolClients).map(([chainId, client]) => [chainId, client.eventSearchConfig]) - ), + invalidBlockRanges, + bundleTxn: matchingRootBundle.transactionHash, }); continue; } @@ -2163,15 +2249,27 @@ export class Dataworker { const success = await balanceAllocator.requestBalanceAllocations(balanceRequestsToQuery); if (!success) { this.logger.warn({ - at: "Dataworker#executeRelayerRefundLeaves", - message: "Not executing relayer refund leaf on SpokePool due to lack of funds.", + at: "Dataworker#_executeRelayerRefundLeaves", + message: `Not executing relayer refund leaf on chain ${leaf.chainId} due to lack of spoke or msg.sender funds for token ${l1TokenInfo?.symbol}`, root: relayerRefundTree.getHexRoot(), bundle: rootBundleId, leafId: leaf.leafId, - token: l1TokenInfo?.symbol, - chainId: leaf.chainId, amountToReturn: leaf.amountToReturn, - refunds: leaf.refundAmounts, + totalRefundAmount: leaf.refundAmounts.reduce((acc, curr) => acc.add(curr), BigNumber.from(0)), + spokeBalance: await this._getSpokeBalanceForL2Tokens( + balanceAllocator, + leaf.chainId, + leaf.l2TokenAddress, + client.spokePool.address + ), + requiredEthValue: valueToPassViaPayable, + senderEthValue: + valueToPassViaPayable && + (await balanceAllocator.getBalanceSubUsed( + leaf.chainId, + ZERO_ADDRESS, + await client.spokePool.signer.getAddress() + )), }); } else { // If mainnet leaf, then allocate balance to the HubPool since it will be atomically transferred. @@ -2214,7 +2312,7 @@ export class Dataworker { canFailInSimulation: leaf.chainId === this.clients.hubPoolClient.chainId, }); } else { - this.logger.debug({ at: "Dataworker#executeRelayerRefundLeaves", message: mrkdwn }); + this.logger.debug({ at: "Dataworker#_executeRelayerRefundLeaves", message: mrkdwn }); } }); } @@ -2281,6 +2379,19 @@ export class Dataworker { } } + _getSpokeBalanceForL2Tokens( + balanceAllocator: BalanceAllocator, + chainId: number, + token: string, + holder: string + ): Promise { + return sdkUtils.reduceAsync( + l2TokensToCountTowardsSpokePoolLeafExecutionCapital(token, chainId), + async (acc, token) => acc.add(await balanceAllocator.getBalanceSubUsed(chainId, token, holder)), + bnZero + ); + } + _getPoolRebalanceRoot( blockRangesForChains: number[][], latestMainnetBlock: number, @@ -2323,21 +2434,53 @@ export class Dataworker { return _.cloneDeep(this.rootCache[key]); } - _getRequiredEthForArbitrumPoolRebalanceLeaf(leaf: PoolRebalanceLeaf): BigNumber { - // For arbitrum, the bot needs enough ETH to pay for each L1 -> L2 message. + async _getRequiredEthForOrbitPoolRebalanceLeaf(leaf: PoolRebalanceLeaf): Promise<{ + amount: BigNumber; + token: string; + holder: string; + }> { + // TODO: Make this code more dynamic in the future. For now, hard code custom gas token fees. + let relayMessageFee: BigNumber; + let token: string; + let holder: string; + if (leaf.chainId === CHAIN_IDs.ALEPH_ZERO) { + // Unlike when handling native ETH, the monitor bot does NOT support sending arbitrary ERC20 tokens to any other + // EOA, so if we're short a custom gas token like AZERO, then we're going to have to keep sending over token + // amounts to the DonationBox contract. Therefore, we'll multiply the final amount by 10 to ensure we don't incur + // a transfer() gas cost on every single pool rebalance leaf execution involving this arbitrum orbit chain. + const { amountWei, feePayer, feeToken, amountMultipleToFund } = + ARBITRUM_ORBIT_L1L2_MESSAGE_FEE_DATA[CHAIN_IDs.ALEPH_ZERO]; + relayMessageFee = toBNWei(amountWei).mul(amountMultipleToFund); + token = feeToken; + holder = feePayer; + } else { + // For now, assume arbitrum message fees are the same for all non-custom gas token chains. This obviously needs + // to be changed if we add support for an orbit chains where we pay message fees in ETH but they are different + // parameters than for Arbitrum mainnet. + const { amountWei, amountMultipleToFund } = ARBITRUM_ORBIT_L1L2_MESSAGE_FEE_DATA[CHAIN_IDs.ARBITRUM]; + relayMessageFee = toBNWei(amountWei).mul(amountMultipleToFund); + token = ZERO_ADDRESS; + holder = this.clients.hubPoolClient.hubPool.address; + } + + // For orbit chains, the bot needs enough ETH to pay for each L1 -> L2 message. // The following executions trigger an L1 -> L2 message: - // 1. The first arbitrum leaf for a particular set of roots. This means the roots must be sent and is + // 1. The first orbit leaf for a particular set of roots. This means the roots must be sent and is // signified by groupIndex === 0. // 2. Any netSendAmount > 0 triggers an L1 -> L2 token send, which costs 0.02 ETH. let requiredAmount = leaf.netSendAmounts.reduce( - (acc, curr) => (curr.gt(0) ? acc.add(toBNWei("0.02")) : acc), + (acc, curr) => (curr.gt(0) ? acc.add(relayMessageFee) : acc), BigNumber.from(0) ); if (leaf.groupIndex === 0) { - requiredAmount = requiredAmount.add(toBNWei("0.02")); + requiredAmount = requiredAmount.add(relayMessageFee); } - return requiredAmount; + return { + amount: requiredAmount, + token, + holder, + }; } /** @@ -2406,4 +2549,24 @@ export class Dataworker { this.clients.configStoreClient.getEnabledChains(mainnetBundleStartBlock) ); } + + async _validateBlockRanges( + spokePoolClients: SpokePoolClientsByChain, + blockRanges: number[][], + chainIds: number[], + earliestBlocksInSpokePoolClients: { [chainId: number]: number }, + isV3: boolean + ): Promise { + return await blockRangesAreInvalidForSpokeClients( + spokePoolClients, + blockRanges, + chainIds, + earliestBlocksInSpokePoolClients, + isV3 + ); + } + + _prettifyBundleBlockRanges(chainIds: number[], blockRanges: number[][]): Record { + return Object.fromEntries(chainIds.map((chainId, i) => [chainId, blockRanges[i]])); + } } diff --git a/src/dataworker/DataworkerUtils.ts b/src/dataworker/DataworkerUtils.ts index 3352c5a30..8a592a4e2 100644 --- a/src/dataworker/DataworkerUtils.ts +++ b/src/dataworker/DataworkerUtils.ts @@ -42,13 +42,14 @@ import { any } from "superstruct"; // TODO: Move to SDK since this implements UMIP logic about validating block ranges. // Return true if we won't be able to construct a root bundle for the bundle block ranges ("blockRanges") because // the bundle wants to look up data for events that weren't in the spoke pool client's search range. +export type InvalidBlockRange = { chainId: number; reason: string }; export async function blockRangesAreInvalidForSpokeClients( spokePoolClients: Record, blockRanges: number[][], chainIdListForBundleEvaluationBlockNumbers: number[], earliestValidBundleStartBlock: { [chainId: number]: number }, isV3 = false -): Promise { +): Promise { assert(blockRanges.length === chainIdListForBundleEvaluationBlockNumbers.length); let endBlockTimestamps: { [chainId: number]: number } | undefined; if (isV3) { @@ -60,68 +61,94 @@ export async function blockRangesAreInvalidForSpokeClients( // There should be a spoke pool client instantiated for every bundle timestamp. assert(!Object.keys(endBlockTimestamps).some((chainId) => !isDefined(spokePoolClients[chainId]))); } - return utils.someAsync(blockRanges, async ([start, end], index) => { - const chainId = chainIdListForBundleEvaluationBlockNumbers[index]; - // If block range is 0 then chain is disabled, we don't need to query events for this chain. - if (isNaN(end) || isNaN(start)) { - return true; - } - if (start === end) { - return false; - } - const spokePoolClient = spokePoolClients[chainId]; - - // If spoke pool client doesn't exist for enabled chain then we clearly cannot query events for this chain. - if (spokePoolClient === undefined) { - return true; - } + // Return an undefined object if the block ranges are valid + return ( + await utils.mapAsync(blockRanges, async ([start, end], index): Promise => { + const chainId = chainIdListForBundleEvaluationBlockNumbers[index]; + if (isNaN(end) || isNaN(start)) { + return { + reason: `block range contains undefined block for: [isNaN(start): ${isNaN(start)}, isNaN(end): ${isNaN( + end + )}]`, + chainId, + }; + } + if (start === end) { + // If block range is 0 then chain is disabled, we don't need to query events for this chain. + return undefined; + } - const clientLastBlockQueried = spokePoolClient.latestBlockSearched; + const spokePoolClient = spokePoolClients[chainId]; - const earliestValidBundleStartBlockForChain = - earliestValidBundleStartBlock[chainId] ?? spokePoolClient.deploymentBlock; + // If spoke pool client doesn't exist for enabled chain then we clearly cannot query events for this chain. + if (spokePoolClient === undefined) { + return { + reason: "spoke pool client undefined", + chainId, + }; + } - // If range start block is less than the earliest spoke pool client we can validate or the range end block - // is greater than the latest client end block, then ranges are invalid. - // Note: Math.max the from block with the registration block of the spoke pool to handle the edge case for the first - // bundle that set its start blocks equal 0. - const bundleRangeFromBlock = Math.max(spokePoolClient.deploymentBlock, start); - if (bundleRangeFromBlock < earliestValidBundleStartBlockForChain || end > clientLastBlockQueried) { - return true; - } + const clientLastBlockQueried = spokePoolClient.latestBlockSearched; + + const earliestValidBundleStartBlockForChain = + earliestValidBundleStartBlock?.[chainId] ?? spokePoolClient.deploymentBlock; + + // If range start block is less than the earliest spoke pool client we can validate or the range end block + // is greater than the latest client end block, then ranges are invalid. + // Note: Math.max the from block with the registration block of the spoke pool to handle the edge case for the first + // bundle that set its start blocks equal 0. + const bundleRangeFromBlock = Math.max(spokePoolClient.deploymentBlock, start); + const bundleRangeFromBlockTooEarly = bundleRangeFromBlock < earliestValidBundleStartBlockForChain; + const endGreaterThanClientLastBlockQueried = end > clientLastBlockQueried; + if (bundleRangeFromBlockTooEarly || endGreaterThanClientLastBlockQueried) { + return { + reason: `${ + bundleRangeFromBlockTooEarly + ? `bundleRangeFromBlock ${bundleRangeFromBlock} < earliestValidBundleStartBlockForChain ${earliestValidBundleStartBlockForChain}` + : `end ${end} > clientLastBlockQueried ${clientLastBlockQueried}` + }`, + chainId, + }; + } - if (endBlockTimestamps !== undefined) { - const maxFillDeadlineBufferInBlockRange = await spokePoolClient.getMaxFillDeadlineInRange( - bundleRangeFromBlock, - end - ); - // Skip this check if the spokePoolClient.fromBlock is less than or equal to the spokePool deployment block. - // In this case, we have all the information for this SpokePool possible so there are no older deposits - // that might have expired that we might miss. - const conservativeBundleFrequencySeconds = Number( - process.env.CONSERVATIVE_BUNDLE_FREQUENCY_SECONDS ?? CONSERVATIVE_BUNDLE_FREQUENCY_SECONDS - ); - if ( - spokePoolClient.eventSearchConfig.fromBlock > spokePoolClient.deploymentBlock && - // @dev The maximum lookback window we need to evaluate expired deposits is the max fill deadline buffer, - // which captures all deposits that newly expired, plus the bundle time (e.g. 1 hour) to account for the - // maximum time it takes for a newly expired deposit to be included in a bundle. A conservative value for - // this bundle time is 3 hours. This `conservativeBundleFrequencySeconds` buffer also ensures that all deposits - // that are technically "expired", but have fills in the bundle, are also included. This can happen if a fill - // is sent pretty late into the deposit's expiry period. - endBlockTimestamps[chainId] - spokePoolClient.getOldestTime() < - maxFillDeadlineBufferInBlockRange + conservativeBundleFrequencySeconds - ) { - return true; + if (endBlockTimestamps !== undefined) { + const maxFillDeadlineBufferInBlockRange = await spokePoolClient.getMaxFillDeadlineInRange( + bundleRangeFromBlock, + end + ); + // Skip this check if the spokePoolClient.fromBlock is less than or equal to the spokePool deployment block. + // In this case, we have all the information for this SpokePool possible so there are no older deposits + // that might have expired that we might miss. + const conservativeBundleFrequencySeconds = Number( + process.env.CONSERVATIVE_BUNDLE_FREQUENCY_SECONDS ?? CONSERVATIVE_BUNDLE_FREQUENCY_SECONDS + ); + if ( + spokePoolClient.eventSearchConfig.fromBlock > spokePoolClient.deploymentBlock && + // @dev The maximum lookback window we need to evaluate expired deposits is the max fill deadline buffer, + // which captures all deposits that newly expired, plus the bundle time (e.g. 1 hour) to account for the + // maximum time it takes for a newly expired deposit to be included in a bundle. A conservative value for + // this bundle time is 3 hours. This `conservativeBundleFrequencySeconds` buffer also ensures that all deposits + // that are technically "expired", but have fills in the bundle, are also included. This can happen if a fill + // is sent pretty late into the deposit's expiry period. + endBlockTimestamps[chainId] - spokePoolClient.getOldestTime() < + maxFillDeadlineBufferInBlockRange + conservativeBundleFrequencySeconds + ) { + return { + reason: `cannot evaluate all possible expired deposits; endBlockTimestamp ${ + endBlockTimestamps[chainId] + } - spokePoolClient.getOldestTime ${spokePoolClient.getOldestTime()} < maxFillDeadlineBufferInBlockRange ${maxFillDeadlineBufferInBlockRange} + conservativeBundleFrequencySeconds ${conservativeBundleFrequencySeconds}`, + chainId, + }; + } } - } - // We must now assume that all newly expired deposits at the time of the bundle end blocks are contained within - // the spoke pool client's memory. + // We must now assume that all newly expired deposits at the time of the bundle end blocks are contained within + // the spoke pool client's memory. - // If we get to here, block ranges are valid, return false. - return false; - }); + // If we get to here, block ranges are valid, return false. + return undefined; + }) + ).filter(isDefined); } export function _buildSlowRelayRoot(bundleSlowFillsV3: BundleSlowFills): { diff --git a/src/dataworker/index.ts b/src/dataworker/index.ts index 1c1bd646c..3a6f893ee 100644 --- a/src/dataworker/index.ts +++ b/src/dataworker/index.ts @@ -194,7 +194,11 @@ export async function runDataworker(_logger: winston.Logger, baseSigner: Signer) at: "Dataworker#index", message: "Exiting early due to dataworker function collision", proposalCollision, + proposedBundleDataDefined: isDefined(proposedBundleData), executorCollision, + poolRebalanceLeafExecutionCount, + unclaimedPoolRebalanceLeafCount: pendingProposal.unclaimedPoolRebalanceLeafCount, + challengePeriodNotPassed: pendingProposal.challengePeriodEndTimestamp > clients.hubPoolClient.currentTime, pendingProposal, }); } else { diff --git a/test/BalanceAllocator.ts b/test/BalanceAllocator.ts index 8645b1566..4383da472 100644 --- a/test/BalanceAllocator.ts +++ b/test/BalanceAllocator.ts @@ -54,6 +54,12 @@ describe("BalanceAllocator", async function () { expect(balanceAllocator.getUsed(1, testToken1, testAccount1)).to.equal(BigNumber.from(100)); }); + it("Returns balance sub used", async function () { + balanceAllocator.addUsed(1, testToken1, testAccount1, BigNumber.from(100)); + balanceAllocator.setMockBalances(1, testToken1, testAccount1, BigNumber.from(150)); + expect(await balanceAllocator.getBalanceSubUsed(1, testToken1, testAccount1)).to.equal(BigNumber.from(50)); + }); + it("Simple request", async function () { balanceAllocator.setMockBalances(1, testToken1, testAccount1, BigNumber.from(100)); expect(await balanceAllocator.requestBalanceAllocation(1, [testToken1], testAccount1, BigNumber.from(50))).to.be diff --git a/test/Dataworker.blockRangeUtils.ts b/test/Dataworker.blockRangeUtils.ts index 418828bc9..f97e199f3 100644 --- a/test/Dataworker.blockRangeUtils.ts +++ b/test/Dataworker.blockRangeUtils.ts @@ -5,7 +5,7 @@ import { setupDataworker } from "./fixtures/Dataworker.Fixture"; import { DataworkerClients } from "../src/dataworker/DataworkerClientHelper"; import { HubPoolClient, SpokePoolClient } from "../src/clients"; import { originChainId } from "./constants"; -import { blockRangesAreInvalidForSpokeClients } from "../src/dataworker/DataworkerUtils"; +import { blockRangesAreInvalidForSpokeClients, InvalidBlockRange } from "../src/dataworker/DataworkerUtils"; import { getDeployedBlockNumber } from "@across-protocol/contracts"; import { MockHubPoolClient, MockSpokePoolClient } from "./mocks"; import { getTimestampsForBundleEndBlocks } from "../src/utils/BlockUtils"; @@ -155,6 +155,32 @@ describe("Dataworker block range-related utility methods", async function () { // and getDeploymentBlockNumber should be changed to work in test environments. const _spokePoolClients = { [chainId]: spokePoolClients[chainId] }; let chainIds = [chainId]; + let result: InvalidBlockRange[]; + + // Block ranges are invalid if any spoke pool client for a chain is undefined + result = await blockRangesAreInvalidForSpokeClients( + {}, + [[0, spokePoolClients[chainId].latestBlockSearched]], + chainIds, + {} + ); + expect(result.length).to.equal(1); + expect(result[0].chainId).to.equal(chainId); + expect(result[0].reason).to.contain("spoke pool client undefined"); + + // Block ranges are valid if the range = 0 + result = await blockRangesAreInvalidForSpokeClients(_spokePoolClients, [[0, 0]], chainIds, {}); + expect(result.length).to.equal(0); + + // Block ranges are invalid if a from or to block is undefined + result = await blockRangesAreInvalidForSpokeClients(_spokePoolClients, [[0, undefined]], chainIds, {}); + expect(result.length).to.equal(1); + expect(result[0].chainId).to.equal(chainId); + expect(result[0].reason).to.contain("isNaN(end)"); + result = await blockRangesAreInvalidForSpokeClients(_spokePoolClients, [[undefined, 0]], chainIds, {}); + expect(result.length).to.equal(1); + expect(result[0].chainId).to.equal(chainId); + expect(result[0].reason).to.contain("isNaN(start)"); // Look if bundle range from block is before the latest invalid // bundle start block. If so, then the range is invalid. @@ -169,6 +195,15 @@ describe("Dataworker block range-related utility methods", async function () { throw new Error(`Chain ${originChainId} SpokePoolClient has not been updated`); } + // Does not error if earliest block range object is empty: + result = await blockRangesAreInvalidForSpokeClients( + _spokePoolClients, + [[0, spokePoolClients[chainId].latestBlockSearched]], + chainIds, + {} + ); + expect(result.length).to.equal(0); + // latestInvalidBundleStartBlock is only used if its greater than the spoke pool deployment block, so in the // following tests, set latestInvalidBundleStartBlock > deployment blocks. @@ -178,81 +213,88 @@ describe("Dataworker block range-related utility methods", async function () { // Bundle block range fromBlocks are greater than // latest invalid bundle start blocks below and toBlocks are >= client's last block queried, return false meaning // that block ranges can be validated by spoke pool clients. - expect( - await blockRangesAreInvalidForSpokeClients( - _spokePoolClients, - [[mainnetDeploymentBlock + 3, spokePoolClients[chainId].latestBlockSearched]], - chainIds, - { [chainId]: mainnetDeploymentBlock + 2 } - ) - ).to.equal(false); + result = await blockRangesAreInvalidForSpokeClients( + _spokePoolClients, + [[mainnetDeploymentBlock + 3, spokePoolClients[chainId].latestBlockSearched]], + chainIds, + { [chainId]: mainnetDeploymentBlock + 2 } + ); + expect(result.length).to.equal(0); // Set block range toBlock > client's last block queried. Clients can no longer validate this block range. - expect( - await blockRangesAreInvalidForSpokeClients( - _spokePoolClients, - [[mainnetDeploymentBlock + 3, spokePoolClients[chainId].latestBlockSearched + 3]], - chainIds, - { [chainId]: mainnetDeploymentBlock + 2 } - ) - ).to.equal(true); + result = await blockRangesAreInvalidForSpokeClients( + _spokePoolClients, + [[mainnetDeploymentBlock + 3, spokePoolClients[chainId].latestBlockSearched + 3]], + chainIds, + { [chainId]: mainnetDeploymentBlock + 2 } + ); + expect(result.length).to.equal(1); + expect(result[0].chainId).to.equal(chainIds[0]); + expect(result[0].reason).to.contain("> clientLastBlockQueried"); // Bundle block range toBlocks is less than // latest invalid bundle start blocks below, so block ranges can't be validated by clients. - expect( - await blockRangesAreInvalidForSpokeClients( - _spokePoolClients, - [[mainnetDeploymentBlock + 1, spokePoolClients[chainId].latestBlockSearched]], - chainIds, - { [chainId]: mainnetDeploymentBlock + 2 } - ) - ).to.equal(true); + result = await blockRangesAreInvalidForSpokeClients( + _spokePoolClients, + [[mainnetDeploymentBlock + 1, spokePoolClients[chainId].latestBlockSearched]], + chainIds, + { [chainId]: mainnetDeploymentBlock + 2 } + ); + expect(result.length).to.equal(1); + expect(result[0].chainId).to.equal(chainIds[0]); + expect(result[0].reason).to.contain("< earliestValidBundleStartBlockForChain"); + // Works even if the condition is true for one chain. const optimismDeploymentBlock = getDeployedBlockNumber("SpokePool", 10); - expect( - await blockRangesAreInvalidForSpokeClients( - { [chainId]: spokePoolClients[chainId], [10]: spokePoolClients[originChainId] }, - [ - [mainnetDeploymentBlock + 1, spokePoolClients[chainId].latestBlockSearched], - [optimismDeploymentBlock + 3, spokePoolClients[originChainId].latestBlockSearched], - ], - [chainId, 10], - { [chainId]: mainnetDeploymentBlock + 2, [10]: optimismDeploymentBlock + 2 } - ) - ).to.equal(true); - expect( - await blockRangesAreInvalidForSpokeClients( - { [chainId]: spokePoolClients[chainId], [10]: spokePoolClients[originChainId] }, - [ - [mainnetDeploymentBlock + 3, spokePoolClients[chainId].latestBlockSearched], - [optimismDeploymentBlock + 3, spokePoolClients[originChainId].latestBlockSearched], - ], - [chainId, 10], - { [chainId]: mainnetDeploymentBlock + 2, [10]: optimismDeploymentBlock + 2 } - ) - ).to.equal(false); + result = await blockRangesAreInvalidForSpokeClients( + { [chainId]: spokePoolClients[chainId], [10]: spokePoolClients[originChainId] }, + [ + [mainnetDeploymentBlock + 1, spokePoolClients[chainId].latestBlockSearched], + [optimismDeploymentBlock + 3, spokePoolClients[originChainId].latestBlockSearched], + ], + [chainId, 10], + // hub chain start block is higher than block range from block passed in for hub chain above + { [chainId]: mainnetDeploymentBlock + 2, [10]: optimismDeploymentBlock + 2 } + ); + expect(result.length).to.equal(1); + expect(result[0].chainId).to.equal(chainIds[0]); + expect(result[0].reason).to.contain("< earliestValidBundleStartBlockForChain"); + + // Now both from blocks are above the earliest invalid start block. + result = await blockRangesAreInvalidForSpokeClients( + { [chainId]: spokePoolClients[chainId], [10]: spokePoolClients[originChainId] }, + [ + [mainnetDeploymentBlock + 3, spokePoolClients[chainId].latestBlockSearched], + [optimismDeploymentBlock + 3, spokePoolClients[originChainId].latestBlockSearched], + ], + [chainId, 10], + { [chainId]: mainnetDeploymentBlock + 2, [10]: optimismDeploymentBlock + 2 } + ); + expect(result.length).to.equal(0); // On these tests, set block range fromBlock < deployment block. The deployment block is now compared against // the latest invalid start block. This means that the dataworker will refuse to validate any bundles with clients // that don't have early enough data for the first bundle, which started at the deployment block height. - expect( - await blockRangesAreInvalidForSpokeClients( - _spokePoolClients, - [[0, spokePoolClients[chainId].latestBlockSearched]], - chainIds, - { - [chainId]: mainnetDeploymentBlock + 2, - } - ) - ).to.equal(true); - expect( - await blockRangesAreInvalidForSpokeClients( - _spokePoolClients, - [[0, spokePoolClients[chainId].latestBlockSearched]], - chainIds, - { - [chainId]: mainnetDeploymentBlock - 1, - } - ) - ).to.equal(false); + result = await blockRangesAreInvalidForSpokeClients( + _spokePoolClients, + [[0, spokePoolClients[chainId].latestBlockSearched]], + chainIds, + { + [chainId]: mainnetDeploymentBlock + 2, + } + ); + expect(result.length).to.equal(1); + expect(result[0].chainId).to.equal(chainIds[0]); + expect(result[0].reason).to.contain("< earliestValidBundleStartBlockForChain"); + + // This time, the deployment block is higher than the earliestValidBundleStartBlockForChain so the range is valid. + result = await blockRangesAreInvalidForSpokeClients( + _spokePoolClients, + [[0, spokePoolClients[chainId].latestBlockSearched]], + chainIds, + { + [chainId]: mainnetDeploymentBlock - 1, + } + ); + expect(result.length).to.equal(0); // Override spoke pool client fill deadline buffer and oldest time searched and check that it returns false // buffer if not great enough to cover the time between the end block and the oldest time searched by @@ -288,30 +330,30 @@ describe("Dataworker block range-related utility methods", async function () { ); const fillDeadlineOverride = expectedTimeBetweenOldestAndEndBlockTimestamp + 1; mockSpokePoolClient.setMaxFillDeadlineOverride(fillDeadlineOverride); - expect( - await blockRangesAreInvalidForSpokeClients( - { [originChainId]: mockSpokePoolClient as SpokePoolClient }, - blockRanges, - chainIds, - { - [originChainId]: mainnetDeploymentBlock, - }, - true // isV3 - ) - ).to.equal(true); + result = await blockRangesAreInvalidForSpokeClients( + { [originChainId]: mockSpokePoolClient as SpokePoolClient }, + blockRanges, + chainIds, + { + [originChainId]: mainnetDeploymentBlock, + }, + true // isV3 + ); + expect(result.length).to.equal(1); + expect(result[0].chainId).to.equal(chainIds[0]); + expect(result[0].reason).to.contain("cannot evaluate all possible expired deposits"); // Should be valid if not V3 - expect( - await blockRangesAreInvalidForSpokeClients( - { [originChainId]: mockSpokePoolClient as SpokePoolClient }, - blockRanges, - chainIds, - { - [originChainId]: mainnetDeploymentBlock, - }, - false // isV3 - ) - ).to.equal(false); + result = await blockRangesAreInvalidForSpokeClients( + { [originChainId]: mockSpokePoolClient as SpokePoolClient }, + blockRanges, + chainIds, + { + [originChainId]: mainnetDeploymentBlock, + }, + false // isV3 + ); + expect(result.length).to.equal(0); // Set oldest time older such that fill deadline buffer now exceeds the time between the end block and the oldest // time plus the conservative bundle time. Block ranges should now be valid. @@ -319,17 +361,16 @@ describe("Dataworker block range-related utility methods", async function () { endBlockTimestamps[originChainId] - fillDeadlineOverride - CONSERVATIVE_BUNDLE_FREQUENCY_SECONDS - 1; assert(oldestBlockTimestampOverride > 0, "unrealistic oldest block timestamp"); mockSpokePoolClient.setOldestBlockTimestampOverride(oldestBlockTimestampOverride); - expect( - await blockRangesAreInvalidForSpokeClients( - { [originChainId]: mockSpokePoolClient as SpokePoolClient }, - blockRanges, - chainIds, - { - [originChainId]: mainnetDeploymentBlock, - }, - true // isV3 - ) - ).to.equal(false); + result = await blockRangesAreInvalidForSpokeClients( + { [originChainId]: mockSpokePoolClient as SpokePoolClient }, + blockRanges, + chainIds, + { + [originChainId]: mainnetDeploymentBlock, + }, + true // isV3 + ); + expect(result.length).to.equal(0); // Finally, reset fill deadline buffer in contracts and reset the override in the mock to test that // the client calls from the contracts. @@ -337,16 +378,15 @@ describe("Dataworker block range-related utility methods", async function () { mockSpokePoolClient.setMaxFillDeadlineOverride(undefined); fakeSpokePool.fillDeadlineBuffer.returns(expectedTimeBetweenOldestAndEndBlockTimestamp); // This should be same // length as time between oldest time and end block timestamp so it should be a valid block range. - expect( - await blockRangesAreInvalidForSpokeClients( - { [originChainId]: mockSpokePoolClient as SpokePoolClient }, - blockRanges, - chainIds, - { - [originChainId]: mainnetDeploymentBlock, - }, - true // isV3 - ) - ).to.equal(false); + result = await blockRangesAreInvalidForSpokeClients( + { [originChainId]: mockSpokePoolClient as SpokePoolClient }, + blockRanges, + chainIds, + { + [originChainId]: mainnetDeploymentBlock, + }, + true // isV3 + ); + expect(result.length).to.equal(0); }); }); diff --git a/test/Dataworker.customSpokePoolClients.ts b/test/Dataworker.customSpokePoolClients.ts index 73509bb58..5cca2f878 100644 --- a/test/Dataworker.customSpokePoolClients.ts +++ b/test/Dataworker.customSpokePoolClients.ts @@ -70,7 +70,7 @@ describe("Dataworker: Using SpokePool clients with short lookback windows", asyn expect(lastSpyLogIncludes(spy, "Skipping dispute")).to.be.true; expect(spyLogLevel(spy, -1)).to.equal("error"); expect(spyLogIncludes(spy, -2, "Cannot validate bundle with insufficient event data")).to.be.true; - expect(spyLogLevel(spy, -2)).to.equal("debug"); + expect(spyLogLevel(spy, -2)).to.equal("warn"); expect(multiCallerClient.transactionCount()).to.equal(0); }); }); diff --git a/test/Dataworker.executePoolRebalanceUtils.ts b/test/Dataworker.executePoolRebalanceUtils.ts new file mode 100644 index 000000000..601743e40 --- /dev/null +++ b/test/Dataworker.executePoolRebalanceUtils.ts @@ -0,0 +1,901 @@ +import { ConfigStoreClient, HubPoolClient, MultiCallerClient, SpokePoolClient } from "../src/clients"; +import { + BaseContract, + bnZero, + buildPoolRebalanceLeafTree, + CHAIN_IDs, + ERC20, + getCurrentTime, + toBNWei, + TOKEN_SYMBOLS_MAP, +} from "../src/utils"; +import { MAX_L1_TOKENS_PER_POOL_REBALANCE_LEAF, MAX_REFUNDS_PER_RELAYER_REFUND_LEAF, ZERO_ADDRESS } from "./constants"; +import { setupDataworker } from "./fixtures/Dataworker.Fixture"; +import { + Contract, + FakeContract, + ethers, + expect, + smock, + sinon, + randomAddress, + lastSpyLogIncludes, + assert, + lastSpyLogLevel, +} from "./utils"; + +// Tested +import { BalanceAllocator } from "../src/clients/BalanceAllocator"; +import { ARBITRUM_ORBIT_L1L2_MESSAGE_FEE_DATA, spokePoolClientsToProviders } from "../src/common"; +import { Dataworker } from "../src/dataworker/Dataworker"; +import { MockHubPoolClient } from "./mocks/MockHubPoolClient"; +import { PoolRebalanceLeaf } from "../src/interfaces"; + +// Set to arbitrum to test that the dataworker sends ETH to the HubPool to test L1 --> Arbitrum message transfers. +const destinationChainId = 42161; + +let erc20_1: Contract; +let l1Token_1: Contract, hubPool: Contract; +let spy: sinon.SinonSpy; + +let hubPoolClient: HubPoolClient; +let dataworkerInstance: Dataworker, multiCallerClient: MultiCallerClient; +let spokePoolClients: { [chainId: number]: SpokePoolClient }; + +let updateAllClients: () => Promise; + +describe("Dataworker: Utilities to execute pool rebalance leaves", async function () { + function getNewBalanceAllocator(): BalanceAllocator { + const providers = { + ...spokePoolClientsToProviders(spokePoolClients), + [hubPoolClient.chainId]: hubPool.provider, + }; + return new BalanceAllocator(providers); + } + async function createMockHubPoolClient(): Promise<{ + mockHubPoolClient: MockHubPoolClient; + fakeHubPool: FakeContract; + }> { + const fakeHubPool = await smock.fake(hubPool.interface, { address: hubPool.address }); + const mockHubPoolClient = new MockHubPoolClient( + hubPoolClient.logger, + fakeHubPool as unknown as Contract, + hubPoolClient.configStoreClient as unknown as ConfigStoreClient + ); + mockHubPoolClient.chainId = hubPoolClient.chainId; + mockHubPoolClient.setTokenInfoToReturn({ address: l1Token_1.address, decimals: 18, symbol: "TEST" }); + + // Sub in a dummy root bundle proposal for use in HubPoolClient update. + const zero = "0x0000000000000000000000000000000000000000000000000000000000000000"; + fakeHubPool.multicall.returns([ + hubPool.interface.encodeFunctionResult("getCurrentTime", [getCurrentTime().toString()]), + hubPool.interface.encodeFunctionResult("rootBundleProposal", [zero, zero, zero, 0, ZERO_ADDRESS, 0, 0]), + ]); + return { + mockHubPoolClient, + fakeHubPool, + }; + } + beforeEach(async function () { + ({ + hubPool, + erc20_1, + hubPoolClient, + l1Token_1, + dataworkerInstance, + multiCallerClient, + updateAllClients, + spokePoolClients, + spy, + } = await setupDataworker( + ethers, + MAX_REFUNDS_PER_RELAYER_REFUND_LEAF, + MAX_L1_TOKENS_PER_POOL_REBALANCE_LEAF, + 0, + destinationChainId + )); + }); + describe("update exchange rates", function () { + let mockHubPoolClient: MockHubPoolClient, fakeHubPool: FakeContract; + beforeEach(async function () { + ({ mockHubPoolClient, fakeHubPool } = await createMockHubPoolClient()); + dataworkerInstance.clients.hubPoolClient = mockHubPoolClient; + + await updateAllClients(); + }); + describe("_updateExchangeRatesBeforeExecutingHubChainLeaves", function () { + let balanceAllocator: BalanceAllocator; + beforeEach(function () { + balanceAllocator = getNewBalanceAllocator(); + }); + it("ignores negative net send amounts", async function () { + const liquidReserves = toBNWei("1"); + mockHubPoolClient.setLpTokenInfo(l1Token_1.address, 0, liquidReserves); + const latestReserves = await dataworkerInstance._updateExchangeRatesBeforeExecutingHubChainLeaves( + balanceAllocator, + { netSendAmounts: [toBNWei(-1)], l1Tokens: [l1Token_1.address] }, + true + ); + expect(latestReserves[l1Token_1.address]).to.equal(liquidReserves); + expect(multiCallerClient.transactionCount()).to.equal(0); + }); + it("considers positive net send amounts", async function () { + const currentReserves = toBNWei("2"); + const netSendAmount = toBNWei("1"); + mockHubPoolClient.setLpTokenInfo(l1Token_1.address, 0, currentReserves); + + const latestReserves = await dataworkerInstance._updateExchangeRatesBeforeExecutingHubChainLeaves( + balanceAllocator, + { netSendAmounts: [netSendAmount], l1Tokens: [l1Token_1.address] }, + true + ); + expect(latestReserves[l1Token_1.address]).to.equal(currentReserves.sub(netSendAmount)); + expect(multiCallerClient.transactionCount()).to.equal(0); + expect(lastSpyLogIncludes(spy, "current liquid reserves > netSendAmount")).to.be.true; + }); + it("logs error if updated liquid reserves aren't enough to execute leaf", async function () { + const netSendAmount = toBNWei("1"); + const liquidReserves = netSendAmount.sub(1); + const postUpdateLiquidReserves = liquidReserves.sub(1); + mockHubPoolClient.setLpTokenInfo(l1Token_1.address, 0, liquidReserves); + balanceAllocator.testSetBalance( + hubPoolClient.chainId, + l1Token_1.address, + hubPool.address, + postUpdateLiquidReserves + ); + + const latestReserves = await dataworkerInstance._updateExchangeRatesBeforeExecutingHubChainLeaves( + balanceAllocator, + { netSendAmounts: [netSendAmount], l1Tokens: [l1Token_1.address] }, + true + ); + expect(lastSpyLogLevel(spy)).to.equal("warn"); + expect(lastSpyLogIncludes(spy, "Not enough funds to execute Ethereum pool rebalance leaf")).to.be.true; + expect(latestReserves[l1Token_1.address]).to.equal(liquidReserves); + expect(multiCallerClient.transactionCount()).to.equal(0); + }); + it("submits update if updated liquid reserves cover execution of pool leaf", async function () { + const netSendAmount = toBNWei("1"); + const updatedLiquidReserves = netSendAmount.add(1); + mockHubPoolClient.setLpTokenInfo(l1Token_1.address, 0, bnZero); + balanceAllocator.testSetBalance( + hubPoolClient.chainId, + l1Token_1.address, + hubPool.address, + updatedLiquidReserves + ); + + const latestReserves = await dataworkerInstance._updateExchangeRatesBeforeExecutingHubChainLeaves( + balanceAllocator, + { netSendAmounts: [netSendAmount], l1Tokens: [l1Token_1.address] }, + true + ); + expect(latestReserves[l1Token_1.address]).to.equal(updatedLiquidReserves.sub(netSendAmount)); + expect(multiCallerClient.transactionCount()).to.equal(1); + }); + }); + describe("_updateExchangeRatesBeforeExecutingNonHubChainLeaves", function () { + let balanceAllocator: BalanceAllocator; + beforeEach(function () { + balanceAllocator = getNewBalanceAllocator(); + }); + it("uses input liquid reserves value for a token if it exists", async function () { + // In this test the `liquidReserves` > `netSendAmount` but we pass in the + // `passedInLiquidReserves` value which is less than `liquidReserves`. So, the function + // should attempt an update. + const netSendAmount = toBNWei("1"); + const liquidReserves = toBNWei("3"); + const passedInLiquidReserves = toBNWei("0"); + + mockHubPoolClient.setLpTokenInfo(l1Token_1.address, 0, liquidReserves); + balanceAllocator.testSetBalance(hubPoolClient.chainId, l1Token_1.address, hubPool.address, netSendAmount); + + const updated = await dataworkerInstance._updateExchangeRatesBeforeExecutingNonHubChainLeaves( + { + [l1Token_1.address]: passedInLiquidReserves, + }, + balanceAllocator, + [ + { netSendAmounts: [netSendAmount], l1Tokens: [l1Token_1.address], chainId: 1 }, + { netSendAmounts: [netSendAmount], l1Tokens: [l1Token_1.address], chainId: 10 }, + ], + true + ); + expect(updated.size).to.equal(1); + expect(updated.has(l1Token_1.address)).to.be.true; + const errorLogs = spy.getCalls().filter((call) => call.lastArg.level === "warn"); + expect(errorLogs.length).to.equal(1); + expect(errorLogs[0].lastArg.message).to.contain("Not enough funds to execute ALL non-Ethereum"); + }); + it("exits early if current liquid reserves are greater than all individual net send amount", async function () { + const netSendAmount = toBNWei("1"); + const liquidReserves = toBNWei("3"); + // For this test, do not pass in a liquid reserves object and force dataworker to load + // from HubPoolClient + mockHubPoolClient.setLpTokenInfo(l1Token_1.address, 0, liquidReserves); + const updated = await dataworkerInstance._updateExchangeRatesBeforeExecutingNonHubChainLeaves( + {}, + balanceAllocator, + [ + { netSendAmounts: [netSendAmount], l1Tokens: [l1Token_1.address], chainId: 1 }, + { netSendAmounts: [netSendAmount], l1Tokens: [l1Token_1.address], chainId: 10 }, + ], + true + ); + expect(updated.size).to.equal(0); + expect(multiCallerClient.transactionCount()).to.equal(0); + expect(lastSpyLogIncludes(spy, "Skipping exchange rate update")).to.be.true; + }); + it("exits early if total required net send amount is 0", async function () { + mockHubPoolClient.setLpTokenInfo(l1Token_1.address, 0, toBNWei("0")); + const updated = await dataworkerInstance._updateExchangeRatesBeforeExecutingNonHubChainLeaves( + {}, + balanceAllocator, + [{ netSendAmounts: [toBNWei(0)], l1Tokens: [l1Token_1.address], chainId: 1 }], + true + ); + expect(updated.size).to.equal(0); + expect(multiCallerClient.transactionCount()).to.equal(0); + expect( + spy.getCalls().filter((call) => call.lastArg.message.includes("Skipping exchange rate update")).length + ).to.equal(0); + }); + it("groups aggregate net send amounts by L1 token", async function () { + // Total net send amount is 1 for each token but they are not summed together because they are different, + // so the liquid reserves of 1 for each individual token is enough. + const liquidReserves = toBNWei("1"); + const l1Token2 = erc20_1.address; + mockHubPoolClient.setLpTokenInfo(l1Token_1.address, 0, liquidReserves); + mockHubPoolClient.setLpTokenInfo(l1Token2, 0, liquidReserves); + const updated = await dataworkerInstance._updateExchangeRatesBeforeExecutingNonHubChainLeaves( + {}, + balanceAllocator, + [ + { netSendAmounts: [liquidReserves], l1Tokens: [l1Token_1.address], chainId: 1 }, + { netSendAmounts: [liquidReserves], l1Tokens: [l1Token2], chainId: 10 }, + ], + true + ); + expect(updated.size).to.equal(0); + expect(multiCallerClient.transactionCount()).to.equal(0); + }); + it("Logs error if any l1 token's aggregate net send amount exceeds post-sync liquid reserves", async function () { + const liquidReserves = toBNWei("1"); + const postUpdateLiquidReserves = liquidReserves.mul(toBNWei("1.1")).div(toBNWei("1")); + const l1Token2 = erc20_1.address; + + // Current reserves are 1 which is insufficient to execute all leaves. + mockHubPoolClient.setLpTokenInfo(l1Token_1.address, 0, liquidReserves); + mockHubPoolClient.setLpTokenInfo(l1Token2, 0, liquidReserves); + + // Post-sync reserves are still insufficient to execute all leaves. + balanceAllocator.testSetBalance( + hubPoolClient.chainId, + l1Token_1.address, + hubPool.address, + postUpdateLiquidReserves + ); + balanceAllocator.testSetBalance(hubPoolClient.chainId, l1Token2, hubPool.address, postUpdateLiquidReserves); + + const updated = await dataworkerInstance._updateExchangeRatesBeforeExecutingNonHubChainLeaves( + {}, + balanceAllocator, + [ + { netSendAmounts: [liquidReserves], l1Tokens: [l1Token_1.address], chainId: 1 }, + // This one exceeds the post-update liquid reserves for the l1 token. + { netSendAmounts: [liquidReserves.mul(2)], l1Tokens: [l1Token2], chainId: 10 }, + ], + true + ); + expect(updated.size).to.equal(1); + expect(updated.has(l1Token2)).to.be.true; + const errorLogs = spy.getCalls().filter((call) => call.lastArg.level === "warn"); + expect(errorLogs.length).to.equal(1); + expect(errorLogs[0].lastArg.message).to.contain("Not enough funds to execute ALL non-Ethereum"); + }); + it("Logs one error for each L1 token whose aggregate net send amount exceeds post-sync liquid reserves", async function () { + const liquidReserves = toBNWei("1"); + const postUpdateLiquidReserves = liquidReserves.mul(toBNWei("1.1")).div(toBNWei("1")); + const l1Token2 = erc20_1.address; + + mockHubPoolClient.setLpTokenInfo(l1Token_1.address, 0, liquidReserves); + mockHubPoolClient.setLpTokenInfo(l1Token2, 0, liquidReserves); + + balanceAllocator.testSetBalance( + hubPoolClient.chainId, + l1Token_1.address, + hubPool.address, + postUpdateLiquidReserves + ); + balanceAllocator.testSetBalance(hubPoolClient.chainId, l1Token2, hubPool.address, postUpdateLiquidReserves); + + const updated = await dataworkerInstance._updateExchangeRatesBeforeExecutingNonHubChainLeaves( + {}, + balanceAllocator, + [ + // Both net send amounts exceed the post update liquid reserves + { netSendAmounts: [liquidReserves.mul(2)], l1Tokens: [l1Token_1.address], chainId: 1 }, + { netSendAmounts: [liquidReserves.mul(2)], l1Tokens: [l1Token2], chainId: 10 }, + ], + true + ); + expect(updated.size).to.equal(2); + expect(updated.has(l1Token2)).to.be.true; + expect(updated.has(l1Token_1.address)).to.be.true; + const errorLogs = spy.getCalls().filter((call) => call.lastArg.level === "warn"); + expect(errorLogs.length).to.equal(2); + }); + it("ignores negative net send amounts", async function () { + const liquidReserves = toBNWei("2"); + const postUpdateLiquidReserves = liquidReserves; + + mockHubPoolClient.setLpTokenInfo(l1Token_1.address, 0, liquidReserves); + + balanceAllocator.testSetBalance( + hubPoolClient.chainId, + l1Token_1.address, + hubPool.address, + postUpdateLiquidReserves + ); + + await dataworkerInstance._updateExchangeRatesBeforeExecutingNonHubChainLeaves( + {}, + balanceAllocator, + [ + { netSendAmounts: [liquidReserves.mul(2)], l1Tokens: [l1Token_1.address], chainId: 1 }, + // This negative liquid reserves doesn't offset the positive one, it just gets ignored. + { netSendAmounts: [liquidReserves.mul(-10)], l1Tokens: [l1Token_1.address], chainId: 10 }, + ], + true + ); + const errorLog = spy.getCalls().filter((call) => call.lastArg.level === "warn"); + expect(errorLog.length).to.equal(1); + expect(errorLog[0].lastArg.message).to.contain("Not enough funds to execute ALL non-Ethereum"); + }); + it("submits update: liquid reserves post-sync are enough to execute leaf", async function () { + // Liquid reserves cover one leaf but not two. + const postUpdateLiquidReserves = toBNWei("20"); + + // Current reserves are insufficient to cover the two leaves: + mockHubPoolClient.setLpTokenInfo(l1Token_1.address, 0, bnZero); + + balanceAllocator.testSetBalance( + hubPoolClient.chainId, + l1Token_1.address, + hubPool.address, + postUpdateLiquidReserves + ); + + const updated = await dataworkerInstance._updateExchangeRatesBeforeExecutingNonHubChainLeaves( + {}, + balanceAllocator, + // Each leaf's net send amount is individually less than the post-updateliquid reserves, + // but the sum of the three is greater than the post-update liquid reserves. + // This should force the dataworker to submit an update. + [ + { netSendAmounts: [toBNWei("4")], l1Tokens: [l1Token_1.address], chainId: 1 }, + { netSendAmounts: [toBNWei("9")], l1Tokens: [l1Token_1.address], chainId: 10 }, + { netSendAmounts: [toBNWei("7")], l1Tokens: [l1Token_1.address], chainId: 137 }, + ], + true + ); + expect(updated.size).to.equal(1); + expect(updated.has(l1Token_1.address)).to.be.true; + expect(multiCallerClient.transactionCount()).to.equal(1); + }); + it("Logs error and does not submit update if liquid reserves post-sync are <= current liquid reserves and are insufficient to execute leaf", async function () { + const liquidReserves = toBNWei("1"); + const postUpdateLiquidReserves = liquidReserves.sub(1); + + mockHubPoolClient.setLpTokenInfo(l1Token_1.address, 0, liquidReserves); + + balanceAllocator.testSetBalance( + hubPoolClient.chainId, + l1Token_1.address, + hubPool.address, + postUpdateLiquidReserves + ); + + const updated = await dataworkerInstance._updateExchangeRatesBeforeExecutingNonHubChainLeaves( + {}, + balanceAllocator, + [{ netSendAmounts: [liquidReserves.mul(2)], l1Tokens: [l1Token_1.address], chainId: 1 }], + true + ); + expect(updated.size).to.equal(0); + const errorLogs = spy.getCalls().filter((call) => call.lastArg.level === "warn"); + expect(errorLogs.length).to.equal(1); + expect(errorLogs[0].lastArg.message).to.contain("Not enough funds to execute ALL non-Ethereum"); + expect(lastSpyLogIncludes(spy, "liquid reserves would not increase")).to.be.true; + }); + }); + describe("_updateOldExchangeRates", function () { + it("exits early if we recently synced l1 token", async function () { + mockHubPoolClient.currentTime = 10_000; + mockHubPoolClient.setLpTokenInfo(l1Token_1.address, 10_000, toBNWei("0")); + await dataworkerInstance._updateOldExchangeRates([l1Token_1.address], true); + expect(multiCallerClient.transactionCount()).to.equal(0); + }); + it("exits early if liquid reserves wouldn't increase for token post-update", async function () { + // Last update was at time 0, current time is at 1_000_000, so definitely past the update threshold + mockHubPoolClient.currentTime = 1_000_000; + mockHubPoolClient.setLpTokenInfo(l1Token_1.address, 0); + + // Hardcode multicall output such that it looks like liquid reserves stayed the same + fakeHubPool.multicall.returns([ + hubPool.interface.encodeFunctionResult("pooledTokens", [ + ZERO_ADDRESS, // lp token address + true, // enabled + 0, // last lp fee update + bnZero, // utilized reserves + bnZero, // liquid reserves + bnZero, // unaccumulated fees + ]), + ZERO_ADDRESS, // sync output + hubPool.interface.encodeFunctionResult("pooledTokens", [ + ZERO_ADDRESS, // lp token address + true, // enabled + 0, // last lp fee update + bnZero, // utilized reserves + bnZero, // liquid reserves, equal to "current" reserves + bnZero, // unaccumulated fees + ]), + ]); + + await dataworkerInstance._updateOldExchangeRates([l1Token_1.address], true); + expect(multiCallerClient.transactionCount()).to.equal(0); + + // Add test when liquid reserves decreases + fakeHubPool.multicall.returns([ + hubPool.interface.encodeFunctionResult("pooledTokens", [ + ZERO_ADDRESS, // lp token address + true, // enabled + 0, // last lp fee update + bnZero, // utilized reserves + toBNWei(1), // liquid reserves + bnZero, // unaccumulated fees + ]), + ZERO_ADDRESS, // sync output + hubPool.interface.encodeFunctionResult("pooledTokens", [ + ZERO_ADDRESS, // lp token address + true, // enabled + 0, // last lp fee update + bnZero, // utilized reserves + toBNWei(1).sub(1), // liquid reserves, less than "current" reserves + bnZero, // unaccumulated fees + ]), + ]); + + await dataworkerInstance._updateOldExchangeRates([l1Token_1.address], true); + expect(multiCallerClient.transactionCount()).to.equal(0); + }); + it("submits update if liquid reserves would increase for token post-update and last update was old enough", async function () { + // Last update was at time 0, current time is at 1_000_000, so definitely past the update threshold + mockHubPoolClient.currentTime = 1_000_000; + mockHubPoolClient.setLpTokenInfo(l1Token_1.address, 0); + + // Hardcode multicall output such that it looks like liquid reserves increased + fakeHubPool.multicall.returns([ + hubPool.interface.encodeFunctionResult("pooledTokens", [ + ZERO_ADDRESS, // lp token address + true, // enabled + 0, // last lp fee update + bnZero, // utilized reserves + toBNWei(1), // liquid reserves + bnZero, // unaccumulated fees + ]), + ZERO_ADDRESS, + hubPool.interface.encodeFunctionResult("pooledTokens", [ + ZERO_ADDRESS, // lp token address + true, // enabled + 0, // last lp fee update + bnZero, // utilized reserves + toBNWei(1).add(1), // liquid reserves, higher than "current" reserves + bnZero, // unaccumulated fees + ]), + ]); + + await dataworkerInstance._updateOldExchangeRates([l1Token_1.address], true); + expect(multiCallerClient.transactionCount()).to.equal(1); + }); + }); + }); + describe("_executePoolRebalanceLeaves", async function () { + let token1: string, token2: string, balanceAllocator: BalanceAllocator; + beforeEach(function () { + token1 = randomAddress(); + token2 = randomAddress(); + balanceAllocator = getNewBalanceAllocator(); + balanceAllocator.testSetBalance(hubPoolClient.chainId, token1, hubPool.address, toBNWei("2")); + balanceAllocator.testSetBalance(hubPoolClient.chainId, token2, hubPool.address, toBNWei("2")); + }); + it("non-orbit leaf", async function () { + // Should just submit execution + const leaves: PoolRebalanceLeaf[] = [ + { + chainId: 10, + groupIndex: 0, + bundleLpFees: [toBNWei("1"), toBNWei("1")], + netSendAmounts: [toBNWei("1"), toBNWei("1")], + runningBalances: [toBNWei("1"), toBNWei("1")], + leafId: 0, + l1Tokens: [token1, token2], + }, + { + chainId: 137, + groupIndex: 0, + bundleLpFees: [toBNWei("1"), toBNWei("1")], + netSendAmounts: [toBNWei("1"), toBNWei("1")], + runningBalances: [toBNWei("1"), toBNWei("1")], + leafId: 0, + l1Tokens: [token1, token2], + }, + ]; + const result = await dataworkerInstance._executePoolRebalanceLeaves( + spokePoolClients, + leaves, + balanceAllocator, + buildPoolRebalanceLeafTree(leaves), + true + ); + expect(result).to.equal(2); + + expect(multiCallerClient.transactionCount()).to.equal(2); + const queuedTransactions = multiCallerClient.getQueuedTransactions(hubPoolClient.chainId); + expect(queuedTransactions[0].method).to.equal("executeRootBundle"); + expect(queuedTransactions[0].message).to.match(/chain 10/); + expect(queuedTransactions[1].method).to.equal("executeRootBundle"); + expect(queuedTransactions[1].message).to.match(/chain 137/); + }); + it("Subtracts virtual balance from hub pool", async function () { + // All chain leaves remove virtual balance from hub pool + const leaves: PoolRebalanceLeaf[] = [ + { + chainId: 42161, + groupIndex: 0, + bundleLpFees: [toBNWei("1")], + netSendAmounts: [toBNWei("1")], + runningBalances: [toBNWei("1")], + leafId: 0, + l1Tokens: [token1], + }, + { + chainId: hubPoolClient.chainId, + groupIndex: 0, + bundleLpFees: [toBNWei("1")], + netSendAmounts: [toBNWei("1")], + runningBalances: [toBNWei("1")], + leafId: 0, + l1Tokens: [token1], + }, + ]; + const result = await dataworkerInstance._executePoolRebalanceLeaves( + spokePoolClients, + leaves, + balanceAllocator, + buildPoolRebalanceLeafTree(leaves), + true + ); + expect(result).to.equal(2); + expect(await balanceAllocator.getUsed(hubPoolClient.chainId, token1, hubPoolClient.hubPool.address)).to.equal( + toBNWei("2") + ); + }); + it("Adds virtual balance to SpokePool for ethereum leaves", async function () { + const leaves: PoolRebalanceLeaf[] = [ + { + chainId: hubPoolClient.chainId, + groupIndex: 0, + bundleLpFees: [toBNWei("1")], + netSendAmounts: [toBNWei("1")], + runningBalances: [toBNWei("1")], + leafId: 0, + l1Tokens: [token1], + }, + ]; + const result = await dataworkerInstance._executePoolRebalanceLeaves( + spokePoolClients, + leaves, + balanceAllocator, + buildPoolRebalanceLeafTree(leaves), + true + ); + expect(result).to.equal(1); + expect( + await balanceAllocator.getUsed( + hubPoolClient.chainId, + token1, + spokePoolClients[hubPoolClient.chainId].spokePool.address + ) + ).to.equal(toBNWei("-1")); + }); + it("funds arbitrum leaf", async function () { + // Adds one fee per net send amount + one extra if groupIndex = 0 + const leaves: PoolRebalanceLeaf[] = [ + { + chainId: 42161, + groupIndex: 0, + bundleLpFees: [toBNWei("1"), toBNWei("1")], + netSendAmounts: [toBNWei("1"), toBNWei("1")], + runningBalances: [toBNWei("1"), toBNWei("1")], + leafId: 0, + l1Tokens: [token1, token2], + }, + { + chainId: 42161, + groupIndex: 1, + bundleLpFees: [toBNWei("1"), toBNWei("1")], + netSendAmounts: [toBNWei("1"), toBNWei("1")], + runningBalances: [toBNWei("1"), toBNWei("1")], + leafId: 0, + l1Tokens: [token1, token2], + }, + ]; + // Should have a total of 2 + 1 + 2 = 5 fees. + const { amountWei, amountMultipleToFund } = ARBITRUM_ORBIT_L1L2_MESSAGE_FEE_DATA[CHAIN_IDs.ARBITRUM]; + const expectedFee = toBNWei(amountWei).mul(amountMultipleToFund); + const expectedFeeLeaf1 = expectedFee.mul(2).add(expectedFee); + const expectedFeeLeaf2 = expectedFee.mul(2); + const result = await dataworkerInstance._executePoolRebalanceLeaves( + spokePoolClients, + leaves, + balanceAllocator, + buildPoolRebalanceLeafTree(leaves), + true + ); + expect(result).to.equal(2); + + // Should submit two transactions to load ETH for each leaf plus pool rebalance leaf execution. + expect(multiCallerClient.transactionCount()).to.equal(4); + const queuedTransactions = multiCallerClient.getQueuedTransactions(hubPoolClient.chainId); + expect(queuedTransactions[0].method).to.equal("loadEthForL2Calls"); + expect(queuedTransactions[0].value).to.equal(expectedFeeLeaf1); + expect(queuedTransactions[1].method).to.equal("loadEthForL2Calls"); + expect(queuedTransactions[1].value).to.equal(expectedFeeLeaf2); + expect(queuedTransactions[2].method).to.equal("executeRootBundle"); + expect(queuedTransactions[3].method).to.equal("executeRootBundle"); + }); + it("funds custom gas token orbit leaf", async function () { + // Replicate custom gas token setups: + const azero = await smock.fake(ERC20.abi, { + address: TOKEN_SYMBOLS_MAP.AZERO.addresses[CHAIN_IDs.MAINNET], + provider: hubPoolClient.hubPool.signer.provider, + }); + // Custom gas token funder for AZERO + const { amountWei, amountMultipleToFund, feePayer } = ARBITRUM_ORBIT_L1L2_MESSAGE_FEE_DATA[CHAIN_IDs.ALEPH_ZERO]; + assert(feePayer !== undefined); + const customGasTokenFunder = feePayer; + azero.balanceOf.whenCalledWith(customGasTokenFunder).returns(0); + expect(await balanceAllocator.getBalance(hubPoolClient.chainId, azero.address, customGasTokenFunder)).to.equal(0); + + // Adds one fee per net send amount + one extra if groupIndex = 0 + const leaves: PoolRebalanceLeaf[] = [ + { + chainId: 41455, + groupIndex: 0, + bundleLpFees: [toBNWei("1"), toBNWei("1")], + netSendAmounts: [toBNWei("1"), toBNWei("1")], + runningBalances: [toBNWei("1"), toBNWei("1")], + leafId: 0, + l1Tokens: [token1, token2], + }, + { + chainId: 41455, + groupIndex: 1, + bundleLpFees: [toBNWei("1"), toBNWei("1")], + netSendAmounts: [toBNWei("1"), toBNWei("1")], + runningBalances: [toBNWei("1"), toBNWei("1")], + leafId: 0, + l1Tokens: [token1, token2], + }, + ]; + // Should have a total of 2 + 1 + 2 = 5 fees. + const expectedFee = toBNWei(amountWei).mul(amountMultipleToFund); + const expectedFeeLeaf1 = expectedFee.mul(2).add(expectedFee); + const expectedFeeLeaf2 = expectedFee.mul(2); + azero.balanceOf + .whenCalledWith(await hubPoolClient.hubPool.signer.getAddress()) + .returns(expectedFeeLeaf1.add(expectedFeeLeaf2)); + const result = await dataworkerInstance._executePoolRebalanceLeaves( + spokePoolClients, + leaves, + balanceAllocator, + buildPoolRebalanceLeafTree(leaves), + true + ); + expect(result).to.equal(2); + + // Should submit two transactions to load ETH for each leaf plus pool rebalance leaf execution. + expect(multiCallerClient.transactionCount()).to.equal(4); + const queuedTransactions = multiCallerClient.getQueuedTransactions(hubPoolClient.chainId); + expect(queuedTransactions[0].method).to.equal("transfer"); + expect(queuedTransactions[0].args).to.deep.equal([customGasTokenFunder, expectedFeeLeaf1]); + expect(queuedTransactions[1].method).to.equal("transfer"); + expect(queuedTransactions[1].args).to.deep.equal([customGasTokenFunder, expectedFeeLeaf2]); + expect(queuedTransactions[2].method).to.equal("executeRootBundle"); + expect(queuedTransactions[3].method).to.equal("executeRootBundle"); + }); + it("fails to fund custom gas token orbit leaf", async function () { + // Replicate custom gas token setups, but this time do not set a balance for the custom gas token funder. + const azero = await smock.fake(ERC20.abi, { + address: TOKEN_SYMBOLS_MAP.AZERO.addresses[CHAIN_IDs.MAINNET], + provider: hubPoolClient.hubPool.signer.provider, + }); + // Custom gas token funder for AZERO + const customGasTokenFunder = "0x0d57392895Db5aF3280e9223323e20F3951E81B1"; + azero.balanceOf.whenCalledWith(customGasTokenFunder).returns(0); + expect(await balanceAllocator.getBalance(hubPoolClient.chainId, azero.address, customGasTokenFunder)).to.equal(0); + + // Adds one fee per net send amount + one extra if groupIndex = 0 + const leaves: PoolRebalanceLeaf[] = [ + { + chainId: 41455, + groupIndex: 0, + bundleLpFees: [toBNWei("1"), toBNWei("1")], + netSendAmounts: [toBNWei("1"), toBNWei("1")], + runningBalances: [toBNWei("1"), toBNWei("1")], + leafId: 0, + l1Tokens: [token1, token2], + }, + ]; + // Should throw an error if caller doesn't have enough custom gas token to fund + // DonationBox. + const result = await dataworkerInstance._executePoolRebalanceLeaves( + spokePoolClients, + leaves, + balanceAllocator, + buildPoolRebalanceLeafTree(leaves), + true + ); + expect(result).to.equal(0); + expect(lastSpyLogLevel(spy)).to.equal("error"); + expect(lastSpyLogIncludes(spy, "Failed to fund")).to.be.true; + }); + it("Ignores leaves without sufficient reserves to execute", async function () { + // Should only be able to execute the first leaf + balanceAllocator.testSetBalance(hubPoolClient.chainId, token1, hubPoolClient.hubPool.address, toBNWei("1")); + + const leaves: PoolRebalanceLeaf[] = [ + { + chainId: 10, + groupIndex: 0, + bundleLpFees: [toBNWei("1")], + netSendAmounts: [toBNWei("1")], + runningBalances: [toBNWei("1")], + leafId: 0, + l1Tokens: [token1], + }, + { + chainId: 137, + groupIndex: 0, + bundleLpFees: [toBNWei("1")], + netSendAmounts: [toBNWei("1")], + runningBalances: [toBNWei("1")], + leafId: 0, + l1Tokens: [token1], + }, + ]; + const result = await dataworkerInstance._executePoolRebalanceLeaves( + spokePoolClients, + leaves, + balanceAllocator, + buildPoolRebalanceLeafTree(leaves), + true + ); + expect(result).to.equal(1); + }); + }); + describe("_getExecutablePoolRebalanceLeaves", function () { + let token1: string, token2: string, balanceAllocator: BalanceAllocator; + beforeEach(function () { + token1 = randomAddress(); + token2 = randomAddress(); + balanceAllocator = getNewBalanceAllocator(); + }); + it("All l1 tokens on single leaf are executable", async function () { + balanceAllocator.testSetBalance(hubPoolClient.chainId, token1, hubPoolClient.hubPool.address, toBNWei("1")); + balanceAllocator.testSetBalance(hubPoolClient.chainId, token2, hubPoolClient.hubPool.address, toBNWei("1")); + const leaves = await dataworkerInstance._getExecutablePoolRebalanceLeaves( + [ + { + chainId: 10, + groupIndex: 0, + bundleLpFees: [toBNWei("1"), toBNWei("1")], + netSendAmounts: [toBNWei("1"), toBNWei("1")], + runningBalances: [toBNWei("1"), toBNWei("1")], + leafId: 0, + l1Tokens: [token1, token2], + }, + ], + balanceAllocator + ); + expect(leaves.length).to.equal(1); + }); + it("Some l1 tokens on single leaf are not executable", async function () { + // Not enough to cover one net send amounts of 1 + balanceAllocator.testSetBalance(hubPoolClient.chainId, token1, hubPoolClient.hubPool.address, toBNWei("0")); + balanceAllocator.testSetBalance(hubPoolClient.chainId, token2, hubPoolClient.hubPool.address, toBNWei("1")); + const leaves = await dataworkerInstance._getExecutablePoolRebalanceLeaves( + [ + { + chainId: 10, + groupIndex: 0, + bundleLpFees: [toBNWei("1"), toBNWei("1")], + netSendAmounts: [toBNWei("1"), toBNWei("1")], + runningBalances: [toBNWei("1"), toBNWei("1")], + leafId: 0, + l1Tokens: [token1, token2], + }, + ], + balanceAllocator + ); + expect(leaves.length).to.equal(0); + const errorLogs = spy.getCalls().filter((call) => call.lastArg.level === "error"); + expect(errorLogs.length).to.equal(1); + expect(errorLogs[0].lastArg.message).to.contain("Not enough funds to execute"); + }); + it("All l1 tokens on multiple leaves are executable", async function () { + // Covers 2 leaves each with one net send amount of 1 + balanceAllocator.testSetBalance(hubPoolClient.chainId, token1, hubPoolClient.hubPool.address, toBNWei("2")); + balanceAllocator.testSetBalance(hubPoolClient.chainId, token2, hubPoolClient.hubPool.address, toBNWei("2")); + const leaves = await dataworkerInstance._getExecutablePoolRebalanceLeaves( + [ + { + chainId: 10, + groupIndex: 0, + bundleLpFees: [toBNWei("1"), toBNWei("1")], + netSendAmounts: [toBNWei("1"), toBNWei("1")], + runningBalances: [toBNWei("1"), toBNWei("1")], + leafId: 0, + l1Tokens: [token1, token2], + }, + { + chainId: 42161, + groupIndex: 0, + bundleLpFees: [toBNWei("1"), toBNWei("1")], + netSendAmounts: [toBNWei("1"), toBNWei("1")], + runningBalances: [toBNWei("1"), toBNWei("1")], + leafId: 0, + l1Tokens: [token1, token2], + }, + ], + balanceAllocator + ); + expect(leaves.length).to.equal(2); + }); + it("Some l1 tokens are not executable after first leaf is executed", async function () { + // 1 only covers the first leaf + balanceAllocator.testSetBalance(hubPoolClient.chainId, token1, hubPoolClient.hubPool.address, toBNWei("1")); + balanceAllocator.testSetBalance(hubPoolClient.chainId, token2, hubPoolClient.hubPool.address, toBNWei("2")); + + const leaves = await dataworkerInstance._getExecutablePoolRebalanceLeaves( + [ + { + chainId: 10, + groupIndex: 0, + bundleLpFees: [toBNWei("1"), toBNWei("1")], + netSendAmounts: [toBNWei("1"), toBNWei("1")], + runningBalances: [toBNWei("1"), toBNWei("1")], + leafId: 0, + l1Tokens: [token1, token2], + }, + { + chainId: 42161, + groupIndex: 0, + bundleLpFees: [toBNWei("1"), toBNWei("1")], + netSendAmounts: [toBNWei("1"), toBNWei("1")], + runningBalances: [toBNWei("1"), toBNWei("1")], + leafId: 0, + l1Tokens: [token1, token2], + }, + ], + balanceAllocator + ); + expect(leaves.length).to.equal(1); + expect(leaves[0].chainId).to.equal(10); + const errorLogs = spy.getCalls().filter((call) => call.lastArg.level === "error"); + expect(errorLogs.length).to.equal(1); + }); + }); +}); diff --git a/test/Dataworker.executePoolRebalances.ts b/test/Dataworker.executePoolRebalances.ts index 5a1472e0c..b6ffad369 100644 --- a/test/Dataworker.executePoolRebalances.ts +++ b/test/Dataworker.executePoolRebalances.ts @@ -1,5 +1,13 @@ -import { HubPoolClient, MultiCallerClient, SpokePoolClient } from "../src/clients"; -import { bnZero, getCurrentTime, MAX_UINT_VAL, toBNWei } from "../src/utils"; +import { ConfigStoreClient, HubPoolClient, MultiCallerClient, SpokePoolClient } from "../src/clients"; +import { + BaseContract, + bnZero, + getCurrentTime, + MAX_UINT_VAL, + MerkleTree, + RelayerRefundLeaf, + toBNWei, +} from "../src/utils"; import { MAX_L1_TOKENS_PER_POOL_REBALANCE_LEAF, MAX_REFUNDS_PER_RELAYER_REFUND_LEAF, @@ -15,10 +23,10 @@ import { ethers, expect, fillV3, - lastSpyLogLevel, smock, sinon, lastSpyLogIncludes, + randomAddress, } from "./utils"; // Tested @@ -26,12 +34,13 @@ import { BalanceAllocator } from "../src/clients/BalanceAllocator"; import { spokePoolClientsToProviders } from "../src/common"; import { Dataworker } from "../src/dataworker/Dataworker"; import { MockHubPoolClient } from "./mocks/MockHubPoolClient"; +import { PoolRebalanceLeaf, SlowFillLeaf } from "../src/interfaces"; // Set to arbitrum to test that the dataworker sends ETH to the HubPool to test L1 --> Arbitrum message transfers. const destinationChainId = 42161; let spokePool_1: Contract, erc20_1: Contract, spokePool_2: Contract, erc20_2: Contract; -let l1Token_1: Contract, hubPool: Contract; +let l1Token_1: Contract, hubPool: Contract, spokePool_4: Contract; let depositor: SignerWithAddress, spy: sinon.SinonSpy; let hubPoolClient: HubPoolClient; @@ -41,10 +50,43 @@ let spokePoolClients: { [chainId: number]: SpokePoolClient }; let updateAllClients: () => Promise; describe("Dataworker: Execute pool rebalances", async function () { + function getNewBalanceAllocator(): BalanceAllocator { + const providers = { + ...spokePoolClientsToProviders(spokePoolClients), + [hubPoolClient.chainId]: hubPool.provider, + }; + return new BalanceAllocator(providers); + } + async function createMockHubPoolClient(): Promise<{ + mockHubPoolClient: MockHubPoolClient; + fakeHubPool: FakeContract; + }> { + const fakeHubPool = await smock.fake(hubPool.interface, { address: hubPool.address }); + const mockHubPoolClient = new MockHubPoolClient( + hubPoolClient.logger, + fakeHubPool as unknown as Contract, + hubPoolClient.configStoreClient as unknown as ConfigStoreClient + ); + mockHubPoolClient.chainId = hubPoolClient.chainId; + mockHubPoolClient.setTokenInfoToReturn({ address: l1Token_1.address, decimals: 18, symbol: "TEST" }); + mockHubPoolClient.setTokenMapping(l1Token_1.address, hubPoolClient.chainId, l1Token_1.address); + + // Sub in a dummy root bundle proposal for use in HubPoolClient update. + const zero = "0x0000000000000000000000000000000000000000000000000000000000000000"; + fakeHubPool.multicall.returns([ + hubPool.interface.encodeFunctionResult("getCurrentTime", [getCurrentTime().toString()]), + hubPool.interface.encodeFunctionResult("rootBundleProposal", [zero, zero, zero, 0, ZERO_ADDRESS, 0, 0]), + ]); + return { + mockHubPoolClient, + fakeHubPool, + }; + } beforeEach(async function () { ({ hubPool, spokePool_1, + spokePool_4, erc20_1, erc20_2, spokePool_2, @@ -81,10 +123,10 @@ describe("Dataworker: Execute pool rebalances", async function () { await fillV3(spokePool_2, depositor, deposit, destinationChainId); await updateAllClients(); - const providers = { - ...spokePoolClientsToProviders(spokePoolClients), - [hubPoolClient.chainId]: hubPool.provider, - }; + // Executing leaves before there is a bundle should do nothing: + let leafCount = await dataworkerInstance.executePoolRebalanceLeaves(spokePoolClients, getNewBalanceAllocator()); + expect(leafCount).to.equal(0); + expect(lastSpyLogIncludes(spy, "No pending proposal")).to.be.true; await dataworkerInstance.proposeRootBundle(spokePoolClients); @@ -92,13 +134,16 @@ describe("Dataworker: Execute pool rebalances", async function () { await l1Token_1.approve(hubPool.address, MAX_UINT_VAL); await multiCallerClient.executeTxnQueues(); + // Executing leaves before bundle challenge period has passed should do nothing: + await updateAllClients(); + leafCount = await dataworkerInstance.executePoolRebalanceLeaves(spokePoolClients, getNewBalanceAllocator()); + expect(leafCount).to.equal(0); + expect(lastSpyLogIncludes(spy, "Challenge period not passed")).to.be.true; + // Advance time and execute leaves: await hubPool.setCurrentTime(Number(await hubPool.getCurrentTime()) + Number(await hubPool.liveness()) + 1); await updateAllClients(); - let leafCount = await dataworkerInstance.executePoolRebalanceLeaves( - spokePoolClients, - new BalanceAllocator(providers) - ); + leafCount = await dataworkerInstance.executePoolRebalanceLeaves(spokePoolClients, getNewBalanceAllocator()); expect(leafCount).to.equal(2); // Should be 4 transactions: 1 for the to chain, 1 for the from chain, 1 for the extra ETH sent to cover @@ -107,6 +152,11 @@ describe("Dataworker: Execute pool rebalances", async function () { expect(multiCallerClient.transactionCount()).to.equal(4); await multiCallerClient.executeTxnQueues(); + // If we attempt execution again, the hub pool client should show them as already executed. + await updateAllClients(); + leafCount = await dataworkerInstance.executePoolRebalanceLeaves(spokePoolClients, getNewBalanceAllocator()); + expect(leafCount).to.equal(0); + // TEST 3: // Submit another root bundle proposal and check bundle block range. There should be no leaves in the new range // yet. In the bundle block range, all chains should have increased their start block, including those without @@ -118,396 +168,497 @@ describe("Dataworker: Execute pool rebalances", async function () { // Advance time and execute leaves: await hubPool.setCurrentTime(Number(await hubPool.getCurrentTime()) + Number(await hubPool.liveness()) + 1); await updateAllClients(); - leafCount = await dataworkerInstance.executePoolRebalanceLeaves(spokePoolClients, new BalanceAllocator(providers)); + leafCount = await dataworkerInstance.executePoolRebalanceLeaves(spokePoolClients, getNewBalanceAllocator()); expect(leafCount).to.equal(0); expect(multiCallerClient.transactionCount()).to.equal(0); }); - describe("update exchange rates", function () { - let mockHubPoolClient: MockHubPoolClient, fakeHubPool: FakeContract; + it("Executes mainnet leaves before non-mainnet leaves", async function () { + // Send deposit on SpokePool with same chain ID as hub chain. + // Fill it on a different spoke pool. + await updateAllClients(); + + // Mainnet deposit should produce a mainnet pool leaf. + const deposit = await depositV3( + spokePool_4, + destinationChainId, + depositor, + l1Token_1.address, + amountToDeposit, + erc20_2.address, + amountToDeposit + ); + await updateAllClients(); + // Fill and take repayment on a non-mainnet spoke pool. + await fillV3(spokePool_2, depositor, deposit, destinationChainId); + await updateAllClients(); + + const balanceAllocator = getNewBalanceAllocator(); + await dataworkerInstance.proposeRootBundle(spokePoolClients); + + // Execute queue and check that root bundle is pending: + await l1Token_1.approve(hubPool.address, MAX_UINT_VAL); + await multiCallerClient.executeTxnQueues(); + + // Advance time and execute leaves: + await hubPool.setCurrentTime(Number(await hubPool.getCurrentTime()) + Number(await hubPool.liveness()) + 1); + await updateAllClients(); + const leafCount = await dataworkerInstance.executePoolRebalanceLeaves(spokePoolClients, balanceAllocator); + expect(leafCount).to.equal(2); + + const leafExecutions = multiCallerClient.getQueuedTransactions(hubPoolClient.chainId).map((tx, index) => { + return { + ...tx, + index, + }; + }); + const poolLeafExecutions = leafExecutions.filter((tx) => tx.method === "executeRootBundle"); + expect(poolLeafExecutions[0].args[0]).to.equal(hubPoolClient.chainId); + const refundLeafExecutions = leafExecutions.filter((tx) => tx.method === "executeRelayerRefundLeaf"); + expect(refundLeafExecutions.length).to.equal(1); + + // Hub chain relayer refund leaves should also execute before non-mainnet pool leaves + expect(refundLeafExecutions[0].index).to.be.greaterThan(poolLeafExecutions[0].index); + expect(refundLeafExecutions[0].index).to.be.lessThan(poolLeafExecutions[1].index); + expect(poolLeafExecutions[1].args[0]).to.equal(destinationChainId); + }); + describe("_executePoolLeavesAndSyncL1Tokens", function () { + let mockHubPoolClient: MockHubPoolClient, balanceAllocator: BalanceAllocator; beforeEach(async function () { - fakeHubPool = await smock.fake(hubPool.interface, { address: hubPool.address }); - mockHubPoolClient = new MockHubPoolClient(hubPoolClient.logger, fakeHubPool, hubPoolClient.configStoreClient); - mockHubPoolClient.setTokenInfoToReturn({ address: l1Token_1.address, decimals: 18, symbol: "TEST" }); + ({ mockHubPoolClient } = await createMockHubPoolClient()); dataworkerInstance.clients.hubPoolClient = mockHubPoolClient; - // Sub in a dummy root bundle proposal for use in HubPoolClient update. - const zero = "0x0000000000000000000000000000000000000000000000000000000000000000"; - fakeHubPool.multicall.returns([ - hubPool.interface.encodeFunctionResult("getCurrentTime", [getCurrentTime().toString()]), - hubPool.interface.encodeFunctionResult("rootBundleProposal", [zero, zero, zero, 0, ZERO_ADDRESS, 0, 0]), + // Make sure post-sync reserves are greater than the net send amount. + balanceAllocator = getNewBalanceAllocator(); + }); + it("Should not double update an LP token", async function () { + // In this test, the HubPool client returns the liquid reserves as 0 for a token. + + // So, executing the ethereum leaves results in an exchangeRate() update call. + + // The subsequent call to execute non-ethereum leaves should not result in an extra exchange rate call + // if a sync was already included. + + // Set LP reserves to 0 for the token. + mockHubPoolClient.setLpTokenInfo(l1Token_1.address, 0, bnZero); + + // Make sure post-sync reserves are greater than the net send amount. + balanceAllocator.testSetBalance(hubPoolClient.chainId, l1Token_1.address, hubPool.address, toBNWei("2")); + + const poolRebalanceLeaves: PoolRebalanceLeaf[] = [ + { + chainId: hubPoolClient.chainId, + groupIndex: 0, + bundleLpFees: [toBNWei("1")], + netSendAmounts: [toBNWei("1")], + runningBalances: [toBNWei("1")], + leafId: 0, + l1Tokens: [l1Token_1.address], + }, + { + chainId: 10, + groupIndex: 0, + bundleLpFees: [toBNWei("1")], + netSendAmounts: [toBNWei("1")], + runningBalances: [toBNWei("1")], + leafId: 0, + l1Tokens: [l1Token_1.address], + }, + ]; + + const leafCount = await dataworkerInstance._executePoolLeavesAndSyncL1Tokens( + spokePoolClients, + balanceAllocator, + poolRebalanceLeaves, + new MerkleTree(poolRebalanceLeaves, () => "test"), + [], + new MerkleTree([], () => "test"), + [], + new MerkleTree([], () => "test"), + true + ); + expect(leafCount).to.equal(2); + + // Should sync LP token for first leaf execution, but not for the second. This tests that latestLiquidReserves + // are passed correctly into _updateExchangeRatesBeforeExecutingNonHubChainLeaves so that currentReserves + // don't get set to the HubPool.pooledTokens.liquidReserves value. If this was done incorrectly then I would + // expect a second exchangeRateCurrent method before the second executeRootBundle call. + const enqueuedTxns = multiCallerClient.getQueuedTransactions(hubPoolClient.chainId); + expect(enqueuedTxns.map((txn) => txn.method)).to.deep.equal([ + "exchangeRateCurrent", + "executeRootBundle", + "executeRootBundle", + ]); + }); + it("Executing hub chain pool leaves should decrement available liquid reserves for subsequent executions", async function () { + // In this test, the HubPool client returns the liquid reserves as sufficient for + // executing Hub chain leaves for a token. + + // The subsequent call to execute non-ethereum leaves should force an LP token update + // before executing the non hub chain leaves. + + const netSendAmount = toBNWei("1"); + const liquidReserves = toBNWei("1"); + mockHubPoolClient.setLpTokenInfo(l1Token_1.address, 0, liquidReserves); + + // Make sure post-sync reserves are >= than the net send amount. + const postUpdateLiquidReserves = toBNWei("2"); + balanceAllocator.testSetBalance( + hubPoolClient.chainId, + l1Token_1.address, + hubPool.address, + postUpdateLiquidReserves + ); + + const poolRebalanceLeaves: PoolRebalanceLeaf[] = [ + { + chainId: 10, + groupIndex: 0, + bundleLpFees: [toBNWei("1")], + netSendAmounts: [netSendAmount], + runningBalances: [toBNWei("1")], + leafId: 0, + l1Tokens: [l1Token_1.address], + }, + { + chainId: hubPoolClient.chainId, + groupIndex: 0, + bundleLpFees: [toBNWei("1")], + netSendAmounts: [netSendAmount], + runningBalances: [toBNWei("1")], + leafId: 0, + l1Tokens: [l1Token_1.address], + }, + ]; + + const leafCount = await dataworkerInstance._executePoolLeavesAndSyncL1Tokens( + spokePoolClients, + balanceAllocator, + poolRebalanceLeaves, + new MerkleTree(poolRebalanceLeaves, () => "test"), + [], + new MerkleTree([], () => "test"), + [], + new MerkleTree([], () => "test"), + true + ); + expect(leafCount).to.equal(2); + + // The order should be: executeRootBundle, exchangeRateCurrent, execute + const enqueuedTxns = multiCallerClient.getQueuedTransactions(hubPoolClient.chainId); + expect(enqueuedTxns.map((txn) => txn.method)).to.deep.equal([ + "executeRootBundle", + "exchangeRateCurrent", + "executeRootBundle", ]); - - await updateAllClients(); }); - describe("_updateExchangeRatesBeforeExecutingHubChainLeaves", function () { - it("exits early if net send amount is negative", async function () { - const updated = await dataworkerInstance._updateExchangeRatesBeforeExecutingHubChainLeaves( - { netSendAmounts: [toBNWei(-1)], l1Tokens: [l1Token_1.address] }, - true - ); - expect(Object.keys(updated)).to.have.length(0); - expect(multiCallerClient.transactionCount()).to.equal(0); - }); - it("exits early if current reserves are sufficient to pay for net send amounts", async function () { - const netSendAmount = toBNWei("1"); - - fakeHubPool.multicall.returns([ - hubPool.interface.encodeFunctionResult("pooledTokens", [ - ZERO_ADDRESS, // lp token address - true, // enabled - 0, // last lp fee update - bnZero, // utilized reserves - netSendAmount, // liquid reserves - bnZero, // unaccumulated fees - ]), - ZERO_ADDRESS, // sync output - hubPool.interface.encodeFunctionResult("pooledTokens", [ - ZERO_ADDRESS, // lp token address - true, // enabled - 0, // last lp fee update - bnZero, // utilized reserves - bnZero, // liquid reserves, post update. Doesn't matter for this test - // because we should be early exiting if current liquid reserves are sufficient. - bnZero, // unaccumulated fees - ]), - ]); - - const updated = await dataworkerInstance._updateExchangeRatesBeforeExecutingHubChainLeaves( - { netSendAmounts: [netSendAmount], l1Tokens: [l1Token_1.address] }, - true - ); - expect(Object.keys(updated)).to.have.length(0); - expect(multiCallerClient.transactionCount()).to.equal(0); - }); - it("logs error if updated liquid reserves aren't enough to execute leaf", async function () { - const netSendAmount = toBNWei("1"); - - fakeHubPool.multicall.returns([ - hubPool.interface.encodeFunctionResult("pooledTokens", [ - ZERO_ADDRESS, // lp token address - true, // enabled - 0, // last lp fee update - bnZero, // utilized reserves - bnZero, // liquid reserves, set less than netSendAmount - bnZero, // unaccumulated fees - ]), - ZERO_ADDRESS, // sync output - hubPool.interface.encodeFunctionResult("pooledTokens", [ - ZERO_ADDRESS, // lp token address - true, // enabled - 0, // last lp fee update - bnZero, // utilized reserves - bnZero, // liquid reserves, still less than net send amount - bnZero, // unaccumulated fees - ]), - ]); - - const updated = await dataworkerInstance._updateExchangeRatesBeforeExecutingHubChainLeaves( - { netSendAmounts: [netSendAmount], l1Tokens: [l1Token_1.address] }, - true - ); - expect(Object.keys(updated)).to.have.length(0); - expect(lastSpyLogLevel(spy)).to.equal("error"); - expect(lastSpyLogIncludes(spy, "Not enough funds to execute")).to.be.true; - expect(multiCallerClient.transactionCount()).to.equal(0); - }); - it("submits update", async function () { - const netSendAmount = toBNWei("1"); - const updatedLiquidReserves = netSendAmount.add(1); - - fakeHubPool.multicall.returns([ - hubPool.interface.encodeFunctionResult("pooledTokens", [ - ZERO_ADDRESS, // lp token address - true, // enabled - 0, // last lp fee update - bnZero, // utilized reserves - bnZero, // liquid reserves, set less than netSendAmount - bnZero, // unaccumulated fees - ]), - ZERO_ADDRESS, // sync output - hubPool.interface.encodeFunctionResult("pooledTokens", [ - ZERO_ADDRESS, // lp token address - true, // enabled - 0, // last lp fee update - bnZero, // utilized reserves - updatedLiquidReserves, // liquid reserves, >= than netSendAmount - bnZero, // unaccumulated fees - ]), - ]); - - const updated = await dataworkerInstance._updateExchangeRatesBeforeExecutingHubChainLeaves( - { netSendAmounts: [netSendAmount], l1Tokens: [l1Token_1.address] }, - true - ); - expect(Object.keys(updated)).to.have.length(1); - expect(updated[l1Token_1.address]).to.equal(updatedLiquidReserves); - expect(multiCallerClient.transactionCount()).to.equal(1); - }); + it("Executing hub chain refund leaves should increment available liquid reserves for subsequent executions", async function () { + // In this test, the refund leaf returns reserves to the hub chain, which gives enough post-sync liquid + // reserves to execute the non hub chain leaf. + + const netSendAmount = toBNWei("1"); + const liquidReserves = toBNWei("0"); + mockHubPoolClient.setLpTokenInfo(l1Token_1.address, 0, liquidReserves); + + const poolRebalanceLeaves: PoolRebalanceLeaf[] = [ + { + chainId: 10, + groupIndex: 0, + bundleLpFees: [toBNWei("1")], + netSendAmounts: [netSendAmount], + runningBalances: [toBNWei("1")], + leafId: 0, + l1Tokens: [l1Token_1.address], + }, + { + chainId: hubPoolClient.chainId, + groupIndex: 0, + bundleLpFees: [toBNWei("1")], + netSendAmounts: [toBNWei("0")], + runningBalances: [toBNWei("1")], + leafId: 0, + l1Tokens: [l1Token_1.address], + }, + ]; + + // Need to set a balance for the spoke pool to make the dataworker believe this leaf can be executed. + balanceAllocator.testSetBalance( + hubPoolClient.chainId, + l1Token_1.address, + spokePoolClients[hubPoolClient.chainId].spokePool.address, + netSendAmount + ); + + const relayerRefundLeaves: RelayerRefundLeaf[] = [ + { + chainId: hubPoolClient.chainId, + l2TokenAddress: l1Token_1.address, + amountToReturn: netSendAmount, + refundAddresses: [], + refundAmounts: [], + leafId: 0, + }, + ]; + + const leafCount = await dataworkerInstance._executePoolLeavesAndSyncL1Tokens( + spokePoolClients, + balanceAllocator, + poolRebalanceLeaves, + new MerkleTree(poolRebalanceLeaves, () => "test"), + relayerRefundLeaves, + new MerkleTree(relayerRefundLeaves, () => "test"), + [], + new MerkleTree([], () => "test"), + true + ); + expect(leafCount).to.equal(2); + + // Execute mainnet refund leaf after mainnet pool leaf. Then update exchange rates to execute non-mainnet pool leaf. + const enqueuedTxns = multiCallerClient.getQueuedTransactions(hubPoolClient.chainId); + expect(enqueuedTxns.map((txn) => txn.method)).to.deep.equal([ + "executeRootBundle", + "executeRelayerRefundLeaf", + "exchangeRateCurrent", + "executeRootBundle", + ]); }); - describe("_updateExchangeRatesBeforeExecutingNonHubChainLeaves", function () { - let balanceAllocator; - beforeEach(async function () { - const providers = { - ...spokePoolClientsToProviders(spokePoolClients), - [hubPoolClient.chainId]: hubPool.provider, - }; - balanceAllocator = new BalanceAllocator(providers); - }); - it("exits early if net send amount is negative", async function () { - const updated = await dataworkerInstance._updateExchangeRatesBeforeExecutingNonHubChainLeaves( - {}, - balanceAllocator, - [{ netSendAmounts: [toBNWei(-1)], l1Tokens: [l1Token_1.address], chainId: 1 }], - true - ); - expect(updated.size).to.equal(0); - expect(multiCallerClient.transactionCount()).to.equal(0); - }); - it("exits early if current liquid reserves are greater than net send amount", async function () { - const netSendAmount = toBNWei("1"); - const liquidReserves = toBNWei("2"); - // For this test, do not pass in a liquid reserves object and force dataworker to load - // from HubPoolClient - mockHubPoolClient.setLpTokenInfo(l1Token_1.address, 0, liquidReserves); - const updated = await dataworkerInstance._updateExchangeRatesBeforeExecutingNonHubChainLeaves( - {}, - balanceAllocator, - [{ netSendAmounts: [netSendAmount], l1Tokens: [l1Token_1.address], chainId: 1 }], - true - ); - expect(updated.size).to.equal(0); - expect(multiCallerClient.transactionCount()).to.equal(0); - }); - it("exits early if passed in liquid reserves are greater than net send amount", async function () { - const netSendAmount = toBNWei("1"); - const liquidReserves = toBNWei("2"); - // For this test, pass in a liquid reserves object - const updated = await dataworkerInstance._updateExchangeRatesBeforeExecutingNonHubChainLeaves( - { - [l1Token_1.address]: liquidReserves, - }, - balanceAllocator, - [{ netSendAmounts: [netSendAmount], l1Tokens: [l1Token_1.address], chainId: 1 }], - true - ); - expect(updated.size).to.equal(0); - expect(multiCallerClient.transactionCount()).to.equal(0); - }); - it("logs error if updated liquid reserves aren't enough to execute leaf", async function () { - const netSendAmount = toBNWei("1"); - const liquidReserves = toBNWei("0"); - mockHubPoolClient.setLpTokenInfo(l1Token_1.address, 0, liquidReserves); - balanceAllocator.addUsed(hubPoolClient.chainId, l1Token_1.address, hubPool.address, toBNWei(0)); - - // Even after simulating sync, there are not enough liquid reserves. - fakeHubPool.multicall.returns([ - ZERO_ADDRESS, // sync output - hubPool.interface.encodeFunctionResult("pooledTokens", [ - ZERO_ADDRESS, // lp token address - true, // enabled - 0, // last lp fee update - bnZero, // utilized reserves - liquidReserves, // liquid reserves, >= than netSendAmount - bnZero, // unaccumulated fees - ]), - ]); - - const updated = await dataworkerInstance._updateExchangeRatesBeforeExecutingNonHubChainLeaves( - {}, - balanceAllocator, - [{ netSendAmounts: [netSendAmount], l1Tokens: [l1Token_1.address], chainId: 1 }], - true - ); - expect(lastSpyLogLevel(spy)).to.equal("error"); - expect(lastSpyLogIncludes(spy, "will fail due to lack of funds to send")).to.be.true; - expect(updated.size).to.equal(0); - expect(multiCallerClient.transactionCount()).to.equal(0); - }); - it("submits update: liquid reserves post-sync are enough to execute leaf", async function () { - const netSendAmount = toBNWei("10"); - const liquidReserves = toBNWei("1"); - mockHubPoolClient.setLpTokenInfo(l1Token_1.address, 0, liquidReserves); - balanceAllocator.addUsed(hubPoolClient.chainId, l1Token_1.address, hubPool.address, toBNWei(1)); - - // At this point, passed in liquid reserves will be 1 and the balance allocator will add 1. - // This won't be enough. However, we should test that the dataworker simulates sync-ing the exchange - // rate and sees that the liquid reserves post-sync are enough to execute the leaf. - fakeHubPool.multicall.returns([ - ZERO_ADDRESS, // sync output - hubPool.interface.encodeFunctionResult("pooledTokens", [ - ZERO_ADDRESS, // lp token address - true, // enabled - 0, // last lp fee update - bnZero, // utilized reserves - netSendAmount, // liquid reserves, >= than netSendAmount - bnZero, // unaccumulated fees - ]), - ]); - - const updated = await dataworkerInstance._updateExchangeRatesBeforeExecutingNonHubChainLeaves( - { - [l1Token_1.address]: liquidReserves, - }, - balanceAllocator, - [{ netSendAmounts: [netSendAmount], l1Tokens: [l1Token_1.address], chainId: 1 }], - true - ); - expect(updated.size).to.equal(1); - expect(updated.has(l1Token_1.address)).to.be.true; - expect(multiCallerClient.transactionCount()).to.equal(1); - }); - it("submits update: liquid reserves plus balanceAllocator.used are sufficient", async function () { - const netSendAmount = toBNWei("1"); - - // Liquid reserves are read from HubPoolClient. - // Liquid reserves are below net send amount, but virtual balance is above net send amount. - const liquidReserves = toBNWei("0"); - mockHubPoolClient.setLpTokenInfo(l1Token_1.address, 0, liquidReserves); - balanceAllocator.addUsed(1, l1Token_1.address, hubPool.address, netSendAmount.mul(-1)); - fakeHubPool.multicall.returns([ - ZERO_ADDRESS, // sync output - hubPool.interface.encodeFunctionResult("pooledTokens", [ - ZERO_ADDRESS, // lp token address - true, // enabled - 0, // last lp fee update - bnZero, // utilized reserves - liquidReserves, // liquid reserves, >= than netSendAmount - bnZero, // unaccumulated fees - ]), - ]); - const updated = await dataworkerInstance._updateExchangeRatesBeforeExecutingNonHubChainLeaves( - {}, - balanceAllocator, - [{ netSendAmounts: [netSendAmount], l1Tokens: [l1Token_1.address], chainId: 1 }], - true - ); - expect(updated.size).to.equal(1); - expect(updated.has(l1Token_1.address)).to.be.true; - expect(multiCallerClient.transactionCount()).to.equal(1); - }); - it("Skips duplicate L1 tokens", async function () { - const netSendAmount = toBNWei("1"); - - // Liquid reserves are passed as input. - // Liquid reserves are below net send amount, but virtual balance is above net send amount. - const liquidReserves = toBNWei("0"); - balanceAllocator.addUsed(1, l1Token_1.address, hubPool.address, netSendAmount.mul(-1)); - fakeHubPool.multicall.returns([ - ZERO_ADDRESS, // sync output - hubPool.interface.encodeFunctionResult("pooledTokens", [ - ZERO_ADDRESS, // lp token address - true, // enabled - 0, // last lp fee update - bnZero, // utilized reserves - netSendAmount, // liquid reserves, >= than netSendAmount - bnZero, // unaccumulated fees - ]), - ]); - const updated = await dataworkerInstance._updateExchangeRatesBeforeExecutingNonHubChainLeaves( - { - [l1Token_1.address]: liquidReserves, + it("Executes mainnet slow fill leaves", async function () { + // In this test, we verify slow fill leaves are executed after mainnet pool leaves. + + const slowFillAmount = toBNWei("1"); + const liquidReserves = toBNWei("0"); + mockHubPoolClient.setLpTokenInfo(l1Token_1.address, 0, liquidReserves); + + const poolRebalanceLeaves: PoolRebalanceLeaf[] = [ + { + chainId: hubPoolClient.chainId, + groupIndex: 0, + bundleLpFees: [toBNWei("1")], + netSendAmounts: [toBNWei("0")], + runningBalances: [toBNWei("1")], + leafId: 0, + l1Tokens: [l1Token_1.address], + }, + ]; + + // Need to set a balance for the spoke pool to make the dataworker believe this leaf can be executed. + balanceAllocator.testSetBalance( + hubPoolClient.chainId, + l1Token_1.address, + spokePoolClients[hubPoolClient.chainId].spokePool.address, + slowFillAmount + ); + const slowFillLeaves: SlowFillLeaf[] = [ + { + relayData: { + originChainId: 10, + depositor: randomAddress(), + recipient: randomAddress(), + depositId: 0, + inputToken: randomAddress(), + inputAmount: slowFillAmount, + outputToken: l1Token_1.address, + outputAmount: slowFillAmount, + message: "0x", + fillDeadline: 0, + exclusiveRelayer: randomAddress(), + exclusivityDeadline: 0, }, - balanceAllocator, - [ - { netSendAmounts: [netSendAmount], l1Tokens: [l1Token_1.address], chainId: 137 }, - { netSendAmounts: [netSendAmount], l1Tokens: [l1Token_1.address], chainId: 10 }, - ], - true - ); - expect(updated.size).to.equal(1); - expect(updated.has(l1Token_1.address)).to.be.true; - expect(multiCallerClient.transactionCount()).to.equal(1); - }); + chainId: hubPoolClient.chainId, + updatedOutputAmount: slowFillAmount, + }, + ]; + + const leafCount = await dataworkerInstance._executePoolLeavesAndSyncL1Tokens( + spokePoolClients, + balanceAllocator, + poolRebalanceLeaves, + new MerkleTree(poolRebalanceLeaves, () => "test"), + [], + new MerkleTree([], () => "test"), + slowFillLeaves, + new MerkleTree(slowFillLeaves, () => "test"), + true + ); + expect(leafCount).to.equal(1); + + // Execute mainnet refund leaf after mainnet pool leaf. Then update exchange rates to execute non-mainnet pool leaf. + const enqueuedTxns = multiCallerClient.getQueuedTransactions(hubPoolClient.chainId); + expect(enqueuedTxns.map((txn) => txn.method)).to.deep.equal(["executeRootBundle", "executeV3SlowRelayLeaf"]); + }); + it("No non-mainnet leaves", async function () { + // In this test, check that if there are no mainnet leaves, then the dataworker should just execute non + // mainnet leaves. + const netSendAmount = toBNWei("1"); + const liquidReserves = toBNWei("1"); + mockHubPoolClient.setLpTokenInfo(l1Token_1.address, 0, liquidReserves); + + const poolRebalanceLeaves: PoolRebalanceLeaf[] = [ + { + chainId: 10, + groupIndex: 0, + bundleLpFees: [toBNWei("1")], + netSendAmounts: [netSendAmount], + runningBalances: [toBNWei("1")], + leafId: 0, + l1Tokens: [l1Token_1.address], + }, + ]; + + const leafCount = await dataworkerInstance._executePoolLeavesAndSyncL1Tokens( + spokePoolClients, + balanceAllocator, + poolRebalanceLeaves, + new MerkleTree(poolRebalanceLeaves, () => "test"), + [], + new MerkleTree([], () => "test"), + [], + new MerkleTree([], () => "test"), + true + ); + expect(leafCount).to.equal(1); + + const enqueuedTxns = multiCallerClient.getQueuedTransactions(hubPoolClient.chainId); + expect(enqueuedTxns.map((txn) => txn.method)).to.deep.equal(["executeRootBundle"]); + }); + it("Fails to execute mainnet leaf, still executes non-mainnet leaves", async function () { + // In this test, the hub pool leaf can't be funded using liquid reserves, but the + // dataworker should still try to execute the non-mainnet leaves. + + const hubChainNetSendAmount = toBNWei("10"); + const nonHubChainNetSendAmount = toBNWei("1"); + const liquidReserves = toBNWei("1"); + mockHubPoolClient.setLpTokenInfo(l1Token_1.address, 0, liquidReserves); + balanceAllocator.testSetBalance(hubPoolClient.chainId, l1Token_1.address, hubPool.address, liquidReserves); + + const poolRebalanceLeaves: PoolRebalanceLeaf[] = [ + { + chainId: 10, + groupIndex: 0, + bundleLpFees: [toBNWei("1")], + netSendAmounts: [nonHubChainNetSendAmount], + runningBalances: [toBNWei("1")], + leafId: 0, + l1Tokens: [l1Token_1.address], + }, + { + chainId: hubPoolClient.chainId, + groupIndex: 0, + bundleLpFees: [toBNWei("1")], + netSendAmounts: [hubChainNetSendAmount], + runningBalances: [toBNWei("1")], + leafId: 0, + l1Tokens: [l1Token_1.address], + }, + ]; + + const leafCount = await dataworkerInstance._executePoolLeavesAndSyncL1Tokens( + spokePoolClients, + balanceAllocator, + poolRebalanceLeaves, + new MerkleTree(poolRebalanceLeaves, () => "test"), + [], + new MerkleTree([], () => "test"), + [], + new MerkleTree([], () => "test"), + true + ); + expect(leafCount).to.equal(1); + + const enqueuedTxns = multiCallerClient.getQueuedTransactions(hubPoolClient.chainId); + expect(enqueuedTxns.map((txn) => txn.method)).to.deep.equal(["executeRootBundle"]); + }); + it("Fails to execute some non-mainnet leaves", async function () { + // In this test, there is a mainnet leaf that can be executed, but one of the non-mainnet leaves cannot + // be executed. + const netSendAmount = toBNWei("1"); + + // This liquid reserve is only sufficient to execute one of the non-mainnet leaves. + const liquidReserves = toBNWei("1"); + mockHubPoolClient.setLpTokenInfo(l1Token_1.address, 0, liquidReserves); + balanceAllocator.testSetBalance(hubPoolClient.chainId, l1Token_1.address, hubPool.address, liquidReserves); + + const poolRebalanceLeaves: PoolRebalanceLeaf[] = [ + { + chainId: 10, + groupIndex: 0, + bundleLpFees: [toBNWei("1")], + netSendAmounts: [netSendAmount], + runningBalances: [toBNWei("1")], + leafId: 0, + l1Tokens: [l1Token_1.address], + }, + { + chainId: 137, + groupIndex: 0, + bundleLpFees: [toBNWei("1")], + netSendAmounts: [netSendAmount], + runningBalances: [toBNWei("1")], + leafId: 0, + l1Tokens: [l1Token_1.address], + }, + { + chainId: hubPoolClient.chainId, + groupIndex: 0, + bundleLpFees: [toBNWei("1")], + netSendAmounts: [toBNWei("0")], + runningBalances: [toBNWei("1")], + leafId: 0, + l1Tokens: [l1Token_1.address], + }, + ]; + + const leafCount = await dataworkerInstance._executePoolLeavesAndSyncL1Tokens( + spokePoolClients, + balanceAllocator, + poolRebalanceLeaves, + new MerkleTree(poolRebalanceLeaves, () => "test"), + [], + new MerkleTree([], () => "test"), + [], + new MerkleTree([], () => "test"), + true + ); + expect(leafCount).to.equal(2); + + const enqueuedTxns = multiCallerClient.getQueuedTransactions(hubPoolClient.chainId); + expect(enqueuedTxns.map((txn) => txn.method)).to.deep.equal(["executeRootBundle", "executeRootBundle"]); + + const errorLogs = spy.getCalls().filter((call) => call.lastArg.level === "error"); + expect(errorLogs.length).to.equal(1); + expect(errorLogs[0].lastArg.message).to.contain("Not enough funds to execute pool rebalance leaf for chain 137"); }); - describe("_updateOldExchangeRates", function () { - it("exits early if we recently synced l1 token", async function () { - mockHubPoolClient.currentTime = 10_000; - mockHubPoolClient.setLpTokenInfo(l1Token_1.address, 10_000, toBNWei("0")); - await dataworkerInstance._updateOldExchangeRates([l1Token_1.address], true); - expect(multiCallerClient.transactionCount()).to.equal(0); - }); - it("exits early if liquid reserves wouldn't increase for token post-update", async function () { - // Last update was at time 0, current time is at 1_000_000, so definitely past the update threshold - mockHubPoolClient.currentTime = 1_000_000; - mockHubPoolClient.setLpTokenInfo(l1Token_1.address, 0); - - // Hardcode multicall output such that it looks like liquid reserves stayed the same - fakeHubPool.multicall.returns([ - hubPool.interface.encodeFunctionResult("pooledTokens", [ - ZERO_ADDRESS, // lp token address - true, // enabled - 0, // last lp fee update - bnZero, // utilized reserves - bnZero, // liquid reserves - bnZero, // unaccumulated fees - ]), - ZERO_ADDRESS, // sync output - hubPool.interface.encodeFunctionResult("pooledTokens", [ - ZERO_ADDRESS, // lp token address - true, // enabled - 0, // last lp fee update - bnZero, // utilized reserves - bnZero, // liquid reserves, equal to "current" reserves - bnZero, // unaccumulated fees - ]), - ]); - - await dataworkerInstance._updateOldExchangeRates([l1Token_1.address], true); - expect(multiCallerClient.transactionCount()).to.equal(0); - - // Add test when liquid reserves decreases - fakeHubPool.multicall.returns([ - hubPool.interface.encodeFunctionResult("pooledTokens", [ - ZERO_ADDRESS, // lp token address - true, // enabled - 0, // last lp fee update - bnZero, // utilized reserves - toBNWei(1), // liquid reserves - bnZero, // unaccumulated fees - ]), - ZERO_ADDRESS, // sync output - hubPool.interface.encodeFunctionResult("pooledTokens", [ - ZERO_ADDRESS, // lp token address - true, // enabled - 0, // last lp fee update - bnZero, // utilized reserves - toBNWei(1).sub(1), // liquid reserves, less than "current" reserves - bnZero, // unaccumulated fees - ]), - ]); - - await dataworkerInstance._updateOldExchangeRates([l1Token_1.address], true); - expect(multiCallerClient.transactionCount()).to.equal(0); - }); - it("submits update if liquid reserves would increase for token post-update and last update was old enough", async function () { - // Last update was at time 0, current time is at 1_000_000, so definitely past the update threshold - mockHubPoolClient.currentTime = 1_000_000; - mockHubPoolClient.setLpTokenInfo(l1Token_1.address, 0); - - // Hardcode multicall output such that it looks like liquid reserves increased - fakeHubPool.multicall.returns([ - hubPool.interface.encodeFunctionResult("pooledTokens", [ - ZERO_ADDRESS, // lp token address - true, // enabled - 0, // last lp fee update - bnZero, // utilized reserves - toBNWei(1), // liquid reserves - bnZero, // unaccumulated fees - ]), - ZERO_ADDRESS, - hubPool.interface.encodeFunctionResult("pooledTokens", [ - ZERO_ADDRESS, // lp token address - true, // enabled - 0, // last lp fee update - bnZero, // utilized reserves - toBNWei(1).add(1), // liquid reserves, higher than "current" reserves - bnZero, // unaccumulated fees - ]), - ]); - - await dataworkerInstance._updateOldExchangeRates([l1Token_1.address], true); - expect(multiCallerClient.transactionCount()).to.equal(1); - }); + it("Only mainnet leaves", async function () { + // Shouuld not throw if there are only mainnet leaves. + const liquidReserves = toBNWei("1"); + mockHubPoolClient.setLpTokenInfo(l1Token_1.address, 0, liquidReserves); + + const poolRebalanceLeaves: PoolRebalanceLeaf[] = [ + { + chainId: hubPoolClient.chainId, + groupIndex: 0, + bundleLpFees: [toBNWei("1")], + netSendAmounts: [toBNWei("0")], + runningBalances: [toBNWei("1")], + leafId: 0, + l1Tokens: [l1Token_1.address], + }, + ]; + + const leafCount = await dataworkerInstance._executePoolLeavesAndSyncL1Tokens( + spokePoolClients, + balanceAllocator, + poolRebalanceLeaves, + new MerkleTree(poolRebalanceLeaves, () => "test"), + [], + new MerkleTree([], () => "test"), + [], + new MerkleTree([], () => "test"), + true + ); + expect(leafCount).to.equal(1); + + const enqueuedTxns = multiCallerClient.getQueuedTransactions(hubPoolClient.chainId); + expect(enqueuedTxns.map((txn) => txn.method)).to.deep.equal(["executeRootBundle"]); }); }); }); diff --git a/test/Dataworker.executeRelayerRefunds.ts b/test/Dataworker.executeRelayerRefunds.ts index a25a5a367..a2939b9a5 100644 --- a/test/Dataworker.executeRelayerRefunds.ts +++ b/test/Dataworker.executeRelayerRefunds.ts @@ -1,5 +1,5 @@ import { BundleDataClient, HubPoolClient, MultiCallerClient, SpokePoolClient } from "../src/clients"; -import { MAX_UINT_VAL, toBN } from "../src/utils"; +import { buildRelayerRefundTree, MAX_UINT_VAL, RelayerRefundLeaf, toBN, toBNWei } from "../src/utils"; import { MAX_L1_TOKENS_PER_POOL_REBALANCE_LEAF, MAX_REFUNDS_PER_RELAYER_REFUND_LEAF, @@ -16,7 +16,7 @@ import { spokePoolClientsToProviders } from "../src/common"; import { Dataworker } from "../src/dataworker/Dataworker"; let spokePool_1: Contract, erc20_1: Contract, spokePool_2: Contract, erc20_2: Contract; -let l1Token_1: Contract, hubPool: Contract, hubPoolClient: HubPoolClient; +let l1Token_1: Contract, hubPool: Contract, hubPoolClient: HubPoolClient, spokePool_4: Contract; let depositor: SignerWithAddress; let dataworkerInstance: Dataworker, multiCallerClient: MultiCallerClient; @@ -39,6 +39,7 @@ describe("Dataworker: Execute relayer refunds", async function () { spokePool_1, erc20_1, spokePool_2, + spokePool_4, erc20_2, l1Token_1, depositor, @@ -103,6 +104,35 @@ describe("Dataworker: Execute relayer refunds", async function () { await multiCallerClient.executeTxnQueues(); }); + it("Modifies BalanceAllocator when executing hub chain leaf", async function () { + const refundLeaves: RelayerRefundLeaf[] = [ + { + amountToReturn: toBNWei("1"), + chainId: hubPoolClient.chainId, + refundAmounts: [], + leafId: 0, + l2TokenAddress: l1Token_1.address, + refundAddresses: [], + }, + ]; + const relayerRefundTree = buildRelayerRefundTree(refundLeaves); + const balanceAllocator = await getNewBalanceAllocator(); + await spokePool_4.relayRootBundle( + relayerRefundTree.getHexRoot(), + "0x0000000000000000000000000000000000000000000000000000000000000000" + ); + await l1Token_1.mint(spokePool_4.address, amountToDeposit); + await updateAllClients(); + await dataworkerInstance._executeRelayerRefundLeaves( + refundLeaves, + balanceAllocator, + spokePoolClients[hubPoolClient.chainId], + relayerRefundTree, + true, + 0 + ); + expect(balanceAllocator.getUsed(hubPoolClient.chainId, l1Token_1.address, hubPool.address)).to.equal(toBNWei("-1")); + }); describe("Computing refunds for bundles", function () { let relayer: SignerWithAddress; let bundleDataClient: BundleDataClient; diff --git a/test/Dataworker.loadData.fill.ts b/test/Dataworker.loadData.fill.ts index d5e2a0a30..a78a704bc 100644 --- a/test/Dataworker.loadData.fill.ts +++ b/test/Dataworker.loadData.fill.ts @@ -18,6 +18,7 @@ import { expect, fillV3, getDefaultBlockRange, + getDisabledBlockRanges, randomAddress, sinon, smock, @@ -246,6 +247,28 @@ describe("Dataworker: Load data used in all functions", async function () { ).to.deep.equal(expiredDeposits.map((event) => event.args.depositId)); expect(data1.expiredDepositsToRefundV3[originChainId][erc20_1.address].length).to.equal(1); }); + + it("Ignores disabled chains", async function () { + const bundleBlockTimestamps = await dataworkerInstance.clients.bundleDataClient.getBundleBlockTimestamps( + [originChainId, destinationChainId], + getDefaultBlockRange(5), + spokePoolClients + ); + // Send unexpired deposit + generateV3Deposit(); + // Send expired deposit + generateV3Deposit({ fillDeadline: bundleBlockTimestamps[destinationChainId][1] - 1 }); + await mockOriginSpokePoolClient.update(["V3FundsDeposited"]); + + // Returns no data if block range is undefined + const emptyData = await dataworkerInstance.clients.bundleDataClient.loadData( + getDisabledBlockRanges(), + spokePoolClients + ); + expect(emptyData.bundleDepositsV3).to.deep.equal({}); + expect(emptyData.expiredDepositsToRefundV3).to.deep.equal({}); + }); + it("Filters unexpired deposit out of block range", async function () { // Send deposit behind and after origin chain block range. Should not be included in bundleDeposits. // First generate mock deposit events with some block time between events. @@ -338,7 +361,29 @@ describe("Dataworker: Load data used in all functions", async function () { .div(fixedPointAdjustment), }); }); + it("Ignores disabled chains", async function () { + const depositV3Events: Event[] = []; + const fillV3Events: Event[] = []; + // Create three valid deposits + depositV3Events.push(generateV3Deposit({ outputToken: randomAddress() })); + depositV3Events.push(generateV3Deposit({ outputToken: randomAddress() })); + depositV3Events.push(generateV3Deposit({ outputToken: randomAddress() })); + await mockOriginSpokePoolClient.update(["V3FundsDeposited"]); + const deposits = mockOriginSpokePoolClient.getDeposits(); + + // Fill deposits from different relayers + const relayer2 = randomAddress(); + fillV3Events.push(generateV3FillFromDeposit(deposits[0])); + fillV3Events.push(generateV3FillFromDeposit(deposits[1])); + fillV3Events.push(generateV3FillFromDeposit(deposits[2], {}, relayer2)); + await mockDestinationSpokePoolClient.update(["FilledV3Relay"]); + const emptyData = await dataworkerInstance.clients.bundleDataClient.loadData( + getDisabledBlockRanges(), + spokePoolClients + ); + expect(emptyData.bundleFillsV3).to.deep.equal({}); + }); it("Saves V3 fast fill under correct repayment chain and repayment token when dealing with lite chains", async function () { // Mock the config store client to include the lite chain index. mockConfigStore.updateGlobalConfig( diff --git a/test/Dataworker.loadData.slowFill.ts b/test/Dataworker.loadData.slowFill.ts index 926c9e873..39635325c 100644 --- a/test/Dataworker.loadData.slowFill.ts +++ b/test/Dataworker.loadData.slowFill.ts @@ -17,6 +17,7 @@ import { expect, fillV3, getDefaultBlockRange, + getDisabledBlockRanges, mineRandomBlocks, randomAddress, requestSlowFill, @@ -386,6 +387,27 @@ describe("BundleDataClient: Slow fill handling & validation", async function () ); }); + it("Ignores disabled chains", async function () { + // Only one deposit is eligible to be slow filled because its input and output tokens are equivalent. + generateV3Deposit({ outputToken: randomAddress() }); + generateV3Deposit({ outputToken: erc20_2.address }); + await mockOriginSpokePoolClient.update(["V3FundsDeposited"]); + const deposits = mockOriginSpokePoolClient.getDeposits(); + + generateSlowFillRequestFromDeposit(deposits[0]); + generateSlowFillRequestFromDeposit(deposits[1]); + await mockDestinationSpokePoolClient.update(["RequestedV3SlowFill"]); + expect(mockDestinationSpokePoolClient.getSlowFillRequestsForOriginChain(originChainId).length).to.equal(2); + + const emptyData = await dataworkerInstance.clients.bundleDataClient.loadData( + getDisabledBlockRanges(), + spokePoolClients + ); + expect(emptyData.bundleDepositsV3).to.deep.equal({}); + expect(emptyData.expiredDepositsToRefundV3).to.deep.equal({}); + expect(emptyData.bundleSlowFillsV3).to.deep.equal({}); + }); + it("Slow fill requests cannot coincide with fill in same bundle", async function () { generateV3Deposit({ outputToken: erc20_2.address }); generateV3Deposit({ outputToken: erc20_2.address }); @@ -422,6 +444,24 @@ describe("BundleDataClient: Slow fill handling & validation", async function () expect(data1.unexecutableSlowFills).to.deep.equal({}); }); + it("Ignores disabled chains", async function () { + generateV3Deposit({ outputToken: erc20_2.address }); + await mockOriginSpokePoolClient.update(["V3FundsDeposited"]); + const deposits = mockOriginSpokePoolClient.getDeposits(); + + generateSlowFillRequestFromDeposit(deposits[0]); + generateV3FillFromDeposit(deposits[0], undefined, undefined, undefined, interfaces.FillType.ReplacedSlowFill); + await mockDestinationSpokePoolClient.update(["RequestedV3SlowFill", "FilledV3Relay"]); + + const emptyData = await dataworkerInstance.clients.bundleDataClient.loadData( + getDisabledBlockRanges(), + spokePoolClients + ); + expect(emptyData.unexecutableSlowFills).to.deep.equal({}); + expect(emptyData.bundleFillsV3).to.deep.equal({}); + expect(emptyData.bundleSlowFillsV3).to.deep.equal({}); + }); + it("Handles slow fill requests out of block range", async function () { generateV3Deposit({ outputToken: erc20_2.address }); generateV3Deposit({ outputToken: erc20_2.address }); diff --git a/test/Monitor.ts b/test/Monitor.ts index 26fed2661..ce7a2abf8 100644 --- a/test/Monitor.ts +++ b/test/Monitor.ts @@ -237,7 +237,7 @@ describe("Monitor", async function () { await monitorInstance.updateCurrentRelayerBalances(reports); expect(reports[depositor.address]["L1Token1"][ALL_CHAINS_NAME][BalanceType.CURRENT].toString()).to.be.equal( - "60000000000000000000000" + "75000000000000000000000" ); }); diff --git a/test/fixtures/Dataworker.Fixture.ts b/test/fixtures/Dataworker.Fixture.ts index 09b073e59..cba81c698 100644 --- a/test/fixtures/Dataworker.Fixture.ts +++ b/test/fixtures/Dataworker.Fixture.ts @@ -69,6 +69,7 @@ export async function setupDataworker( spokePool_1: Contract; erc20_1: Contract; spokePool_2: Contract; + spokePool_4: Contract; erc20_2: Contract; l1Token_1: Contract; l1Token_2: Contract; @@ -127,7 +128,7 @@ export async function setupDataworker( // Enable deposit routes for second L2 tokens so relays can be sent between spoke pool 1 <--> 2. await enableRoutes(spokePool_1, [{ originToken: erc20_2.address, destinationChainId: destinationChainId }]); await enableRoutes(spokePool_2, [{ originToken: erc20_1.address, destinationChainId: originChainId }]); - + await enableRoutes(spokePool_4, [{ originToken: l1Token_1.address, destinationChainId: destinationChainId }]); // For each chain, enable routes to both erc20's so that we can fill relays await enableRoutesOnHubPool(hubPool, [ { destinationChainId: originChainId, l1Token: l1Token_1, destinationToken: erc20_1 }, @@ -245,7 +246,7 @@ export async function setupDataworker( // Give depositors the tokens they'll deposit into spoke pools: await setupTokensForWallet(spokePool_1, depositor, [erc20_1, erc20_2], undefined, 10); await setupTokensForWallet(spokePool_2, depositor, [erc20_2, erc20_1], undefined, 10); - + await setupTokensForWallet(spokePool_4, depositor, [l1Token_1], undefined, 10); // Give relayers the tokens they'll need to relay on spoke pools: await setupTokensForWallet(spokePool_1, relayer, [erc20_1, erc20_2, l1Token_1, l1Token_2], undefined, 10); await setupTokensForWallet(spokePool_2, relayer, [erc20_1, erc20_2, l1Token_1, l1Token_2], undefined, 10); @@ -254,12 +255,14 @@ export async function setupDataworker( // "reasonable" block number based off the block time when looking at quote timestamps. await spokePool_1.setCurrentTime(await getLastBlockTime(spokePool_1.provider)); await spokePool_2.setCurrentTime(await getLastBlockTime(spokePool_2.provider)); + await spokePool_4.setCurrentTime(await getLastBlockTime(spokePool_4.provider)); return { hubPool, spokePool_1, erc20_1, spokePool_2, + spokePool_4, erc20_2, l1Token_1, l1Token_2, diff --git a/test/utils/utils.ts b/test/utils/utils.ts index 409210a51..d07a63c90 100644 --- a/test/utils/utils.ts +++ b/test/utils/utils.ts @@ -481,6 +481,10 @@ export function getDefaultBlockRange(toBlockOffset: number): number[][] { return DEFAULT_BLOCK_RANGE_FOR_CHAIN.map((range) => [range[0], range[1] + toBlockOffset]); } +export function getDisabledBlockRanges(): number[][] { + return DEFAULT_BLOCK_RANGE_FOR_CHAIN.map((range) => [range[0], range[0]]); +} + export function createRefunds( outputToken: string, refundAmount: BigNumber, From 92775a0a2a6527c405ee065e1e9a2a1589f67812 Mon Sep 17 00:00:00 2001 From: nicholaspai <9457025+nicholaspai@users.noreply.github.com> Date: Mon, 16 Dec 2024 20:09:15 -0500 Subject: [PATCH 40/44] improve(LineaFinalizer): Force RPC calls through custom provider (#1931) * improve(LineaFinalizer): Force RPC calls through custom provider The functions that we use in the Linea SDK make event queries from block 0 to "latest" by default which means that some of our RPC's can't be used with the Finalizer. Additionally, we're missing out our custom provider retry and caching logic. This PR removes all instances of making RPC calls via the Linea SDK's provider and replace with making the same call through our custom Provider. The latest [Linea SDK](https://github.com/Consensys/linea-monorepo/blob/3fbe660683a318b6fa1b63ec518f948791536352/sdk/src/sdk/LineaSDK.ts#L56) code looks like it'll support injecting custom providers but its not released yet on NPM. So this code should be removed eventually once a later SDK version is released. * Refactor * fix * fix * Update common.ts * Update l2ToL1.ts * Update l2ToL1.ts * Update arbStack.ts * Update arbStack.ts * Add fix for getFinalizationMessagingInfo * Floor arbStack calc * Use .connect(provider) syntax for brevity * Move all imports to single file * Update l1ToL2.ts * Infer lookback dynamically --- src/finalizer/utils/arbStack.ts | 4 + src/finalizer/utils/linea/common.ts | 108 ++++++++++-- src/finalizer/utils/linea/imports.ts | 23 +++ src/finalizer/utils/linea/l1ToL2.ts | 36 +++- src/finalizer/utils/linea/l2ToL1.ts | 238 +++++++++++++++++++++++---- 5 files changed, 353 insertions(+), 56 deletions(-) create mode 100644 src/finalizer/utils/linea/imports.ts diff --git a/src/finalizer/utils/arbStack.ts b/src/finalizer/utils/arbStack.ts index b38e34723..319334af4 100644 --- a/src/finalizer/utils/arbStack.ts +++ b/src/finalizer/utils/arbStack.ts @@ -110,6 +110,7 @@ export async function arbStackFinalizer( undefined, redis ); + const l2BlockTime = (await averageBlockTime(spokePoolClient.spokePool.provider)).average; logger.debug({ at: `Finalizer#${networkName}Finalizer`, message: `${networkName} TokensBridged event filter`, @@ -209,6 +210,9 @@ export async function arbStackFinalizer( logger.debug({ at: `Finalizer#${networkName}Finalizer`, message: `Withdrawal event for ${amountFromWei} of ${l1TokenInfo.symbol} is too recent to finalize`, + timeUntilFinalization: `${Math.floor( + ((event.blockNumber - latestBlockToFinalize) * l2BlockTime) / 60 / 60 + )}`, }); } } catch (err) { diff --git a/src/finalizer/utils/linea/common.ts b/src/finalizer/utils/linea/common.ts index 5de1ffe90..8c7cf4bd2 100644 --- a/src/finalizer/utils/linea/common.ts +++ b/src/finalizer/utils/linea/common.ts @@ -1,7 +1,4 @@ import { LineaSDK, Message, OnChainMessageStatus } from "@consensys/linea-sdk"; -import { L1MessageServiceContract, L2MessageServiceContract } from "@consensys/linea-sdk/dist/lib/contracts"; -import { L1ClaimingService } from "@consensys/linea-sdk/dist/lib/sdk/claiming/L1ClaimingService"; -import { MessageSentEvent } from "@consensys/linea-sdk/dist/typechain/L2MessageService"; import { Linea_Adapter__factory } from "@across-protocol/contracts"; import { BigNumber, @@ -16,11 +13,12 @@ import { getNodeUrlList, getRedisCache, paginatedEventQuery, - retryAsync, CHAIN_IDs, } from "../../../utils"; import { HubPoolClient } from "../../../clients"; import { CONTRACT_ADDRESSES } from "../../../common"; +import { Log } from "../../../interfaces"; +import { L1ClaimingService, L1MessageServiceContract, L2MessageServiceContract, MessageSentEvent } from "./imports"; export type MessageWithStatus = Message & { logIndex: number; @@ -40,8 +38,12 @@ export function initLineaSdk(l1ChainId: number, l2ChainId: number): LineaSDK { } export function makeGetMessagesWithStatusByTxHash( - srcMessageService: L1MessageServiceContract | L2MessageServiceContract, - dstClaimingService: L1ClaimingService | L2MessageServiceContract + l2Provider: ethers.providers.Provider, + l1Provider: ethers.providers.Provider, + l2MessageService: L2MessageServiceContract, + l1ClaimingService: L1ClaimingService, + l1SearchConfig: EventSearchConfig, + l2SearchConfig: EventSearchConfig ) { /** * Retrieves Linea's MessageSent events for a given transaction hash and enhances them with their status. @@ -51,18 +53,16 @@ export function makeGetMessagesWithStatusByTxHash( */ return async (txHashOrReceipt: string | TransactionReceipt): Promise => { const txReceipt = - typeof txHashOrReceipt === "string" - ? await srcMessageService.provider.getTransactionReceipt(txHashOrReceipt) - : txHashOrReceipt; + typeof txHashOrReceipt === "string" ? await l2Provider.getTransactionReceipt(txHashOrReceipt) : txHashOrReceipt; if (!txReceipt) { return []; } const messages = txReceipt.logs - .filter((log) => log.address === srcMessageService.contract.address) + .filter((log) => log.address === l2MessageService.contract.address) .flatMap((log) => { - const parsedLog = srcMessageService.contract.interface.parseLog(log); + const parsedLog = l2MessageService.contract.interface.parseLog(log); if (!parsedLog || parsedLog.name !== "MessageSent") { return []; @@ -83,11 +83,17 @@ export function makeGetMessagesWithStatusByTxHash( logIndex: log.logIndex, }; }); - - // The Linea SDK MessageServiceContract constructs its own Provider without our retry logic so we retry each call - // twice with a 1 second delay between in case of intermittent RPC failures. const messageStatus = await Promise.all( - messages.map((message) => retryAsync(() => dstClaimingService.getMessageStatus(message.messageHash), 2, 1)) + messages.map((message) => + getL2L1MessageStatusUsingCustomProvider( + l1ClaimingService, + message.messageHash, + l1Provider, + l1SearchConfig, + l2Provider, + l2SearchConfig + ) + ) ); return messages.map((message, index) => ({ ...message, @@ -96,6 +102,72 @@ export function makeGetMessagesWithStatusByTxHash( }; } +// Temporary re-implementation of the SDK's `L1ClaimingService.getMessageStatus` functions that allow us to use +// our custom provider, with retry and caching logic, to get around the SDK's hardcoded logic to query events +// from 0 to "latest" which will not work on all RPC's. +async function getL2L1MessageStatusUsingCustomProvider( + messageService: L1ClaimingService, + messageHash: string, + l1Provider: ethers.providers.Provider, + l1SearchConfig: EventSearchConfig, + l2Provider: ethers.providers.Provider, + l2SearchConfig: EventSearchConfig +): Promise { + const l2Contract = getL2MessageServiceContractFromL1ClaimingService(messageService, l2Provider); + const messageEvent = await getMessageSentEventForMessageHash(messageHash, l2Contract, l2SearchConfig); + const l1Contract = getL1MessageServiceContractFromL1ClaimingService(messageService, l1Provider); + const [l2MessagingBlockAnchoredEvents, isMessageClaimed] = await Promise.all([ + getL2MessagingBlockAnchoredFromMessageSentEvent(messageEvent, l1Contract, l1SearchConfig), + l1Contract.isMessageClaimed(messageEvent.args?._nonce), + ]); + if (isMessageClaimed) { + return OnChainMessageStatus.CLAIMED; + } + if (l2MessagingBlockAnchoredEvents.length > 0) { + return OnChainMessageStatus.CLAIMABLE; + } + return OnChainMessageStatus.UNKNOWN; +} +export function getL2MessageServiceContractFromL1ClaimingService( + l1ClaimingService: L1ClaimingService, + l2Provider: ethers.providers.Provider +): Contract { + return l1ClaimingService.l2Contract.contract.connect(l2Provider); +} +export function getL1MessageServiceContractFromL1ClaimingService( + l1ClaimingService: L1ClaimingService, + l1Provider: ethers.providers.Provider +): Contract { + return l1ClaimingService.l1Contract.contract.connect(l1Provider); +} +export async function getMessageSentEventForMessageHash( + messageHash: string, + l2MessageServiceContract: Contract, + l2SearchConfig: EventSearchConfig +): Promise { + const [messageEvent] = await paginatedEventQuery( + l2MessageServiceContract, + l2MessageServiceContract.filters.MessageSent(null, null, null, null, null, null, messageHash), + l2SearchConfig + ); + if (!messageEvent) { + throw new Error(`Message hash does not exist on L2. Message hash: ${messageHash}`); + } + return messageEvent; +} +export async function getL2MessagingBlockAnchoredFromMessageSentEvent( + messageSentEvent: Log, + l1MessageServiceContract: Contract, + l1SearchConfig: EventSearchConfig +): Promise { + const l2MessagingBlockAnchoredEvents = await paginatedEventQuery( + l1MessageServiceContract, + l1MessageServiceContract.filters.L2MessagingBlockAnchored(messageSentEvent.blockNumber), + l1SearchConfig + ); + return l2MessagingBlockAnchoredEvents; +} + export async function getBlockRangeByHoursOffsets( chainId: number, fromBlockHoursOffsetToNow: number, @@ -188,13 +260,13 @@ export function determineMessageType( } export async function findMessageSentEvents( - contract: L1MessageServiceContract | L2MessageServiceContract, + contract: Contract, l1ToL2AddressesToFinalize: string[], searchConfig: EventSearchConfig ): Promise { return paginatedEventQuery( - contract.contract, - (contract.contract as Contract).filters.MessageSent(l1ToL2AddressesToFinalize, l1ToL2AddressesToFinalize), + contract, + contract.filters.MessageSent(l1ToL2AddressesToFinalize, l1ToL2AddressesToFinalize), searchConfig ) as Promise; } diff --git a/src/finalizer/utils/linea/imports.ts b/src/finalizer/utils/linea/imports.ts new file mode 100644 index 000000000..558e312fd --- /dev/null +++ b/src/finalizer/utils/linea/imports.ts @@ -0,0 +1,23 @@ +// Normally we avoid importing directly from a node_modules' /dist package but we need access to some +// of the internal classes and functions in order to replicate SDK logic so that we can by pass hardcoded +// ethers.Provider instances and use our own custom provider instead. +import { L1MessageServiceContract, L2MessageServiceContract } from "@consensys/linea-sdk/dist/lib/contracts"; +import { L1ClaimingService } from "@consensys/linea-sdk/dist/lib/sdk/claiming/L1ClaimingService"; +import { MessageSentEvent } from "@consensys/linea-sdk/dist/typechain/L2MessageService"; +import { SparseMerkleTreeFactory } from "@consensys/linea-sdk/dist/lib/sdk/merkleTree/MerkleTreeFactory"; +import { + DEFAULT_L2_MESSAGE_TREE_DEPTH, + L2_MERKLE_TREE_ADDED_EVENT_SIGNATURE, + L2_MESSAGING_BLOCK_ANCHORED_EVENT_SIGNATURE, +} from "@consensys/linea-sdk/dist/lib/utils/constants"; + +export { + L1ClaimingService, + L1MessageServiceContract, + L2MessageServiceContract, + MessageSentEvent, + SparseMerkleTreeFactory, + DEFAULT_L2_MESSAGE_TREE_DEPTH, + L2_MERKLE_TREE_ADDED_EVENT_SIGNATURE, + L2_MESSAGING_BLOCK_ANCHORED_EVENT_SIGNATURE, +}; diff --git a/src/finalizer/utils/linea/l1ToL2.ts b/src/finalizer/utils/linea/l1ToL2.ts index 1157c373d..93055e8e7 100644 --- a/src/finalizer/utils/linea/l1ToL2.ts +++ b/src/finalizer/utils/linea/l1ToL2.ts @@ -4,7 +4,7 @@ import { Contract } from "ethers"; import { groupBy } from "lodash"; import { HubPoolClient, SpokePoolClient } from "../../../clients"; import { CHAIN_MAX_BLOCK_LOOKBACK, CONTRACT_ADDRESSES } from "../../../common"; -import { EventSearchConfig, Signer, convertFromWei, retryAsync, winston, CHAIN_IDs } from "../../../utils"; +import { EventSearchConfig, Signer, convertFromWei, winston, CHAIN_IDs, ethers, BigNumber } from "../../../utils"; import { CrossChainMessage, FinalizerPromise } from "../../types"; import { determineMessageType, @@ -12,8 +12,28 @@ import { findMessageFromUsdcBridge, findMessageSentEvents, getBlockRangeByHoursOffsets, + getL1MessageServiceContractFromL1ClaimingService, initLineaSdk, } from "./common"; +import { L2MessageServiceContract } from "./imports"; + +const L1L2MessageStatuses = { + 0: "UNKNOWN", + 1: "CLAIMABLE", + 2: "CLAIMED", +}; +// Temporary re-implementation of the SDK's `L2MessageServiceContract.getMessageStatus` functions that allow us to use +// our custom provider, with retry and caching logic, to get around the SDK's hardcoded logic to query events +// from 0 to "latest" which will not work on all RPC's. +async function getL1ToL2MessageStatusUsingCustomProvider( + messageService: L2MessageServiceContract, + messageHash: string, + l2Provider: ethers.providers.Provider +): Promise { + const l2Contract = messageService.contract.connect(l2Provider); + const status: BigNumber = await l2Contract.inboxL1L2MessageStatus(messageHash); + return L1L2MessageStatuses[status.toString()]; +} export async function lineaL1ToL2Finalizer( logger: winston.Logger, @@ -58,7 +78,11 @@ export async function lineaL1ToL2Finalizer( }; const [wethAndRelayEvents, tokenBridgeEvents, usdcBridgeEvents] = await Promise.all([ - findMessageSentEvents(l1MessageServiceContract, l1ToL2AddressesToFinalize, searchConfig), + findMessageSentEvents( + getL1MessageServiceContractFromL1ClaimingService(lineaSdk.getL1ClaimingService(), hubPoolClient.hubPool.provider), + l1ToL2AddressesToFinalize, + searchConfig + ), findMessageFromTokenBridge(l1TokenBridge, l1MessageServiceContract, l1ToL2AddressesToFinalize, searchConfig), findMessageFromUsdcBridge(l1UsdcBridge, l1MessageServiceContract, l1ToL2AddressesToFinalize, searchConfig), ]); @@ -73,9 +97,11 @@ export async function lineaL1ToL2Finalizer( // It's unlikely that our multicall will have multiple transactions to bridge to Linea // so we can grab the statuses individually. - // The Linea SDK MessageServiceContract constructs its own Provider without our retry logic so we retry each call - // twice with a 1 second delay between in case of intermittent RPC failures. - const messageStatus = await retryAsync(() => l2MessageServiceContract.getMessageStatus(_messageHash), 2, 1); + const messageStatus = await getL1ToL2MessageStatusUsingCustomProvider( + l2MessageServiceContract, + _messageHash, + _spokePoolClient.spokePool.provider + ); return { messageSender: _from, destination: _to, diff --git a/src/finalizer/utils/linea/l2ToL1.ts b/src/finalizer/utils/linea/l2ToL1.ts index 6649b7ba7..0261afcf0 100644 --- a/src/finalizer/utils/linea/l2ToL1.ts +++ b/src/finalizer/utils/linea/l2ToL1.ts @@ -3,7 +3,19 @@ import { Wallet } from "ethers"; import { groupBy } from "lodash"; import { HubPoolClient, SpokePoolClient } from "../../../clients"; -import { Signer, winston, convertFromWei, getL1TokenInfo } from "../../../utils"; +import { + Signer, + winston, + convertFromWei, + getL1TokenInfo, + getProvider, + EventSearchConfig, + ethers, + Contract, + paginatedEventQuery, + mapAsync, + BigNumber, +} from "../../../utils"; import { FinalizerPromise, CrossChainMessage } from "../../types"; import { TokensBridged } from "../../../interfaces"; import { @@ -11,7 +23,137 @@ import { makeGetMessagesWithStatusByTxHash, MessageWithStatus, getBlockRangeByHoursOffsets, + getMessageSentEventForMessageHash, + getL1MessageServiceContractFromL1ClaimingService, + getL2MessageServiceContractFromL1ClaimingService, + getL2MessagingBlockAnchoredFromMessageSentEvent, } from "./common"; +import { CHAIN_MAX_BLOCK_LOOKBACK } from "../../../common"; +import { utils as sdkUtils } from "@across-protocol/sdk"; +import { + L1ClaimingService, + SparseMerkleTreeFactory, + DEFAULT_L2_MESSAGE_TREE_DEPTH, + L2_MERKLE_TREE_ADDED_EVENT_SIGNATURE, + L2_MESSAGING_BLOCK_ANCHORED_EVENT_SIGNATURE, +} from "./imports"; + +// Normally we avoid importing directly from a node_modules' /dist package but we need access to some +// of the internal classes and functions in order to replicate SDK logic so that we can by pass hardcoded +// ethers.Provider instances and use our own custom provider instead. + +// Ideally we could call this function through the LineaSDK but its hardcoded to use an ethers.Provider instance +// that doesn't have our custom caching logic or ability for us to customize the block lookback. This means we can't +// use the SDK on providers that have maxBlockLookbacks constraint. So, we re-implement this function here. +async function getMessageProof( + messageHash: string, + l1ClaimingService: L1ClaimingService, + l2Provider: ethers.providers.Provider, + l1Provider: ethers.providers.Provider, + l2SearchConfig: EventSearchConfig, + l1SearchConfig: EventSearchConfig +) { + const l2Contract = getL2MessageServiceContractFromL1ClaimingService(l1ClaimingService, l2Provider); + const messageEvent = await getMessageSentEventForMessageHash(messageHash, l2Contract, l2SearchConfig); + const l1Contract = getL1MessageServiceContractFromL1ClaimingService(l1ClaimingService, l1Provider); + const [l2MessagingBlockAnchoredEvent] = await getL2MessagingBlockAnchoredFromMessageSentEvent( + messageEvent, + l1Contract, + l1SearchConfig + ); + if (!l2MessagingBlockAnchoredEvent) { + throw new Error(`L2 block number ${messageEvent.blockNumber} has not been finalized on L1.`); + } + // This SDK function is complex but only makes one l1Provider.getTransactionReceipt call so we can make this + // through the SDK rather than re-implement it. + const finalizationInfo = await getFinalizationMessagingInfo( + l1ClaimingService, + l2MessagingBlockAnchoredEvent.transactionHash + ); + const l2MessageHashesInBlockRange = await getL2MessageHashesInBlockRange(l2Contract, { + fromBlock: finalizationInfo.l2MessagingBlocksRange.startingBlock, + toBlock: finalizationInfo.l2MessagingBlocksRange.endBlock, + maxBlockLookBack: l2SearchConfig.maxBlockLookBack, + }); + const l2Messages = l1ClaimingService.getMessageSiblings( + messageHash, + l2MessageHashesInBlockRange, + finalizationInfo.treeDepth + ); + // This part is really janky because the SDK doesn't expose any helper functions that use the + // merkle tree or the merkle tree class itself. + const merkleTreeFactory = new SparseMerkleTreeFactory(DEFAULT_L2_MESSAGE_TREE_DEPTH); + const tree = merkleTreeFactory.createAndAddLeaves(l2Messages); + if (!finalizationInfo.l2MerkleRoots.includes(tree.getRoot())) { + throw new Error("Merkle tree build failed."); + } + return tree.getProof(l2Messages.indexOf(messageHash)); +} + +async function getFinalizationMessagingInfo( + l1ClaimingService: L1ClaimingService, + transactionHash: string +): Promise<{ + l2MessagingBlocksRange: { startingBlock: number; endBlock: number }; + l2MerkleRoots: string[]; + treeDepth: number; +}> { + const l1Contract = l1ClaimingService.l1Contract; + const receipt = await l1Contract.provider.getTransactionReceipt(transactionHash); + if (!receipt || receipt.logs.length === 0) { + throw new Error(`Transaction does not exist or no logs found in this transaction: ${transactionHash}.`); + } + let treeDepth = 0; + const l2MerkleRoots = []; + const blocksNumber = []; + const filteredLogs = receipt.logs.filter((log) => log.address === l1Contract.contractAddress); + for (let i = 0; i < filteredLogs.length; i++) { + const log = filteredLogs[i]; + // This part changes from the SDK: remove any logs with topic hashes that don't exist in the current SDK's ABI, + // otherwise parseLog will fail. + const topic = log.topics[0]; + if (topic !== L2_MERKLE_TREE_ADDED_EVENT_SIGNATURE && topic !== L2_MESSAGING_BLOCK_ANCHORED_EVENT_SIGNATURE) { + continue; + } + + const parsedLog = l1Contract.contract.interface.parseLog(log); + if (topic === L2_MERKLE_TREE_ADDED_EVENT_SIGNATURE) { + treeDepth = BigNumber.from(parsedLog.args.treeDepth).toNumber(); + l2MerkleRoots.push(parsedLog.args.l2MerkleRoot); + } else if (topic === L2_MESSAGING_BLOCK_ANCHORED_EVENT_SIGNATURE) { + blocksNumber.push(parsedLog.args.l2Block); + } + } + if (l2MerkleRoots.length === 0) { + throw new Error("No L2MerkleRootAdded events found in this transaction."); + } + if (blocksNumber.length === 0) { + throw new Error("No L2MessagingBlocksAnchored events found in this transaction."); + } + return { + l2MessagingBlocksRange: { + startingBlock: Math.min(...blocksNumber), + endBlock: Math.max(...blocksNumber), + }, + l2MerkleRoots, + treeDepth, + }; +} + +async function getL2MessageHashesInBlockRange( + l2MessageServiceContract: Contract, + l2SearchConfig: EventSearchConfig +): Promise { + const events = await paginatedEventQuery( + l2MessageServiceContract, + l2MessageServiceContract.filters.MessageSent(), + l2SearchConfig + ); + if (events.length === 0) { + throw new Error("No MessageSent events found in this block range on L2."); + } + return events.map((event) => event.args._messageHash); +} export async function lineaL2ToL1Finalizer( logger: winston.Logger, @@ -24,21 +166,44 @@ export async function lineaL2ToL1Finalizer( const l2Contract = lineaSdk.getL2Contract(); const l1Contract = lineaSdk.getL1Contract(); const l1ClaimingService = lineaSdk.getL1ClaimingService(l1Contract.contractAddress); - const getMessagesWithStatusByTxHash = makeGetMessagesWithStatusByTxHash(l2Contract, l1ClaimingService); - + // We need a longer lookback period for L1 to ensure we find all L1 events containing finalized + // L2 block heights. Use the max spoke pool client lookback for the hub chain. + const l1FromBlock = spokePoolClient[hubPoolClient.chainId].eventSearchConfig.fromBlock; + const l1ToBlock = spokePoolClient[hubPoolClient.chainId].latestBlockSearched; // Optimize block range for querying relevant source events on L2. // Linea L2->L1 messages are claimable after 6 - 32 hours - const { fromBlock, toBlock } = await getBlockRangeByHoursOffsets(l2ChainId, 24 * 8, 6); + const { fromBlock: l2FromBlock, toBlock: l2ToBlock } = await getBlockRangeByHoursOffsets(l2ChainId, 24 * 8, 6); + const l1SearchConfig = { + fromBlock: l1FromBlock, + toBlock: l1ToBlock, + maxBlockLookBack: CHAIN_MAX_BLOCK_LOOKBACK[l1ChainId] || 10_000, + }; + const l2SearchConfig = { + fromBlock: l2FromBlock, + toBlock: l2ToBlock, + maxBlockLookBack: CHAIN_MAX_BLOCK_LOOKBACK[l2ChainId] || 5_000, + }; + const l2Provider = await getProvider(l2ChainId); + const l1Provider = await getProvider(l1ChainId); + const getMessagesWithStatusByTxHash = makeGetMessagesWithStatusByTxHash( + l2Provider, + l1Provider, + l2Contract, + l1ClaimingService, + l1SearchConfig, + l2SearchConfig + ); + logger.debug({ at: "Finalizer#LineaL2ToL1Finalizer", message: "Linea TokensBridged event filter", - fromBlock, - toBlock, + l1SearchConfig, + l2SearchConfig, }); // Get src events const l2SrcEvents = spokePoolClient .getTokensBridged() - .filter(({ blockNumber }) => blockNumber >= fromBlock && blockNumber <= toBlock); + .filter(({ blockNumber }) => blockNumber >= l2FromBlock && blockNumber <= l2ToBlock); // Get Linea's MessageSent events for each src event const uniqueTxHashes = Array.from(new Set(l2SrcEvents.map((event) => event.transactionHash))); @@ -65,33 +230,28 @@ export async function lineaL2ToL1Finalizer( // Populate txns for claimable messages const populatedTxns = await Promise.all( claimable.map(async ({ message }) => { - const isProofNeeded = await l1ClaimingService.isClaimingNeedingProof(message.messageHash); - - if (isProofNeeded) { - const proof = await l1ClaimingService.getMessageProof(message.messageHash); - return l1ClaimingService.l1Contract.contract.populateTransaction.claimMessageWithProof({ - from: message.messageSender, - to: message.destination, - fee: message.fee, - value: message.value, - feeRecipient: (signer as Wallet).address, - data: message.calldata, - messageNumber: message.messageNonce, - proof: proof.proof, - leafIndex: proof.leafIndex, - merkleRoot: proof.root, - }); - } - - return l1ClaimingService.l1Contract.contract.populateTransaction.claimMessage( - message.messageSender, - message.destination, - message.fee, - message.value, - (signer as Wallet).address, - message.calldata, - message.messageNonce + // As of this upgrade, proofs are always needed to submit claims: + // https://lineascan.build/tx/0x01ef3ec3c09c4fe828ec2c0e67a3f3adf768d34026adf8948e05f7871abaa327 + const proof = await getMessageProof( + message.messageHash, + l1ClaimingService, + l2Provider, + l1Provider, + l2SearchConfig, + l1SearchConfig ); + return l1ClaimingService.l1Contract.contract.populateTransaction.claimMessageWithProof({ + from: message.messageSender, + to: message.destination, + fee: message.fee, + value: message.value, + feeRecipient: (signer as Wallet).address, + data: message.calldata, + messageNumber: message.messageNonce, + proof: proof.proof, + leafIndex: proof.leafIndex, + merkleRoot: proof.root, + }); }) ); const multicall3Call = populatedTxns.map((txn) => ({ @@ -115,14 +275,26 @@ export async function lineaL2ToL1Finalizer( return transfer; }); + const averageBlockTimeSeconds = await sdkUtils.averageBlockTime(spokePoolClient.spokePool.provider); logger.debug({ at: "Finalizer#LineaL2ToL1Finalizer", message: "Linea L2->L1 message statuses", + averageBlockTimeSeconds, + latestSpokePoolBlock: spokePoolClient.latestBlockSearched, statuses: { claimed: claimed.length, claimable: claimable.length, notReceived: unknown.length, }, + notReceivedTxns: await mapAsync(unknown, async ({ message, tokensBridged }) => { + const withdrawalBlock = tokensBridged.blockNumber; + return { + txnHash: message.txHash, + withdrawalBlock, + maturedHours: + (averageBlockTimeSeconds.average * (spokePoolClient.latestBlockSearched - withdrawalBlock)) / 60 / 60, + }; + }), }); return { callData: multicall3Call, crossChainMessages: transfers }; From 5554920f40ee93d7fb33b7147a6a97b783860360 Mon Sep 17 00:00:00 2001 From: nicholaspai <9457025+nicholaspai@users.noreply.github.com> Date: Mon, 16 Dec 2024 20:53:52 -0500 Subject: [PATCH 41/44] fix(LineaFinalizer): Fix undefined object (#1953) * fix(LineaFinalizer): Fix spokepool client * Update l2ToL1.ts * Update l2ToL1.ts * Update l2ToL1.ts --- src/finalizer/utils/linea/l2ToL1.ts | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/finalizer/utils/linea/l2ToL1.ts b/src/finalizer/utils/linea/l2ToL1.ts index 0261afcf0..c25ce1621 100644 --- a/src/finalizer/utils/linea/l2ToL1.ts +++ b/src/finalizer/utils/linea/l2ToL1.ts @@ -167,9 +167,8 @@ export async function lineaL2ToL1Finalizer( const l1Contract = lineaSdk.getL1Contract(); const l1ClaimingService = lineaSdk.getL1ClaimingService(l1Contract.contractAddress); // We need a longer lookback period for L1 to ensure we find all L1 events containing finalized - // L2 block heights. Use the max spoke pool client lookback for the hub chain. - const l1FromBlock = spokePoolClient[hubPoolClient.chainId].eventSearchConfig.fromBlock; - const l1ToBlock = spokePoolClient[hubPoolClient.chainId].latestBlockSearched; + // L2 block heights. + const { fromBlock: l1FromBlock, toBlock: l1ToBlock } = await getBlockRangeByHoursOffsets(l1ChainId, 24 * 14, 0); // Optimize block range for querying relevant source events on L2. // Linea L2->L1 messages are claimable after 6 - 32 hours const { fromBlock: l2FromBlock, toBlock: l2ToBlock } = await getBlockRangeByHoursOffsets(l2ChainId, 24 * 8, 6); From 8f6617ed123a530f2827305d25a314b87912efc6 Mon Sep 17 00:00:00 2001 From: nicholaspai <9457025+nicholaspai@users.noreply.github.com> Date: Tue, 17 Dec 2024 15:54:49 -0500 Subject: [PATCH 42/44] improve(Relayer): Add total gas price to `info` log when relayer sends fill (#1949) * improve(Relayer): Add total gas price to `info` log when relayer sends fill This will help debugging and give us broad oversight into how accurate our gas price estimates are. We probably should refactor the overall relay fee calculation flow but this PR uses the data we already have (native and token gas costs) to derive the gas price easily. This PR also adds to the ProfitClient's `updateGasCosts` function a simple change to print out the gas price estimate per chain. A more helpful fix might be to add the `gasPrice` field to the `TransactionCostEstimate` type in the SDK and force functions like `sdk.common.estimateTotalGasRequiredByUnsignedTransaction()` to return the priority fee and base fee broken down so the relayer can log it * Update ProfitClient.ts * revert relayer * Add gasPrice as required elem to profit client * add log by reading from profit client * lint * import from sdk * Update ProfitClient.ConsiderProfitability.ts * Update ProfitClient.ConsiderProfitability.ts * Update ProfitClient.ConsiderProfitability.ts * Update Relayer.ts --- package.json | 2 +- src/clients/ProfitClient.ts | 29 ++++++++--- src/relayer/Relayer.ts | 60 ++++++++++++++++++---- src/utils/SDKUtils.ts | 1 + test/ProfitClient.ConsiderProfitability.ts | 17 +++--- test/mocks/MockProfitClient.ts | 1 + yarn.lock | 8 +-- 7 files changed, 87 insertions(+), 31 deletions(-) diff --git a/package.json b/package.json index 3112de3fd..c15becff2 100644 --- a/package.json +++ b/package.json @@ -12,7 +12,7 @@ "dependencies": { "@across-protocol/constants": "^3.1.22", "@across-protocol/contracts": "^3.0.18", - "@across-protocol/sdk": "^3.3.25", + "@across-protocol/sdk": "^3.3.26", "@arbitrum/sdk": "^4.0.2", "@consensys/linea-sdk": "^0.2.1", "@defi-wonderland/smock": "^2.3.5", diff --git a/src/clients/ProfitClient.ts b/src/clients/ProfitClient.ts index afbb31f30..af085aad9 100644 --- a/src/clients/ProfitClient.ts +++ b/src/clients/ProfitClient.ts @@ -28,6 +28,7 @@ import { TOKEN_SYMBOLS_MAP, TOKEN_EQUIVALENCE_REMAPPING, ZERO_ADDRESS, + formatGwei, } from "../utils"; import { Deposit, DepositWithBlock, L1Token, SpokePoolClientsByChain } from "../interfaces"; import { getAcrossHost } from "./AcrossAPIClient"; @@ -58,6 +59,7 @@ export type FillProfit = { grossRelayerFeeUsd: BigNumber; // USD value of the relay fee paid by the user. nativeGasCost: BigNumber; // Cost of completing the fill in the units of gas. tokenGasCost: BigNumber; // Cost of completing the fill in the relevant gas token. + gasPrice: BigNumber; // Gas price in wei. gasPadding: BigNumber; // Positive padding applied to nativeGasCost and tokenGasCost before profitability. gasMultiplier: BigNumber; // Gas multiplier applied to fill cost estimates before profitability. gasTokenPriceUsd: BigNumber; // Price paid per unit of gas the gas token in USD. @@ -213,7 +215,7 @@ export class ProfitClient { deposit, notificationPath: "across-unprofitable-fills", }); - return { nativeGasCost: uint256Max, tokenGasCost: uint256Max }; + return { nativeGasCost: uint256Max, tokenGasCost: uint256Max, gasPrice: uint256Max }; } } @@ -229,15 +231,21 @@ export class ProfitClient { return this._getTotalGasCost(deposit, this.relayerAddress); } + getGasCostsForChain(chainId: number): TransactionCostEstimate { + return this.totalGasCosts[chainId]; + } + // Estimate the gas cost of filling this relay. async estimateFillCost( deposit: Deposit - ): Promise> { + ): Promise> { const { destinationChainId: chainId } = deposit; const gasToken = this.resolveGasToken(chainId); const gasTokenPriceUsd = this.getPriceOfToken(gasToken.symbol); - let { nativeGasCost, tokenGasCost } = await this.getTotalGasCost(deposit); + const totalGasCost = await this.getTotalGasCost(deposit); + let { nativeGasCost, tokenGasCost } = totalGasCost; + const gasPrice = totalGasCost.gasPrice; Object.entries({ "gas consumption": nativeGasCost, // raw gas units @@ -265,6 +273,7 @@ export class ProfitClient { return { nativeGasCost, tokenGasCost, + gasPrice, gasTokenPriceUsd, gasCostUsd, }; @@ -363,7 +372,9 @@ export class ProfitClient { : bnZero; // Estimate the gas cost of filling this relay. - const { nativeGasCost, tokenGasCost, gasTokenPriceUsd, gasCostUsd } = await this.estimateFillCost(deposit); + const { nativeGasCost, tokenGasCost, gasTokenPriceUsd, gasCostUsd, gasPrice } = await this.estimateFillCost( + deposit + ); // Determine profitability. netRelayerFeePct effectively represents the capital cost to the relayer; // i.e. how much it pays out to the recipient vs. the net fee that it receives for doing so. @@ -386,6 +397,7 @@ export class ProfitClient { grossRelayerFeeUsd, nativeGasCost, tokenGasCost, + gasPrice, gasPadding: this.gasPadding, gasMultiplier: this.resolveGasMultiplier(deposit), gasTokenPriceUsd, @@ -434,7 +446,7 @@ export class ProfitClient { this.logger.debug({ at: "ProfitClient#getFillProfitability", - message: `${l1Token.symbol} v3 deposit ${depositId} with repayment on ${repaymentChainId} is ${profitable}`, + message: `${l1Token.symbol} deposit ${depositId} with repayment on ${repaymentChainId} is ${profitable}`, deposit, inputTokenPriceUsd: formatEther(fill.inputTokenPriceUsd), inputTokenAmountUsd: formatEther(fill.inputAmountUsd), @@ -445,6 +457,7 @@ export class ProfitClient { grossRelayerFeePct: `${formatFeePct(fill.grossRelayerFeePct)}%`, nativeGasCost: fill.nativeGasCost, tokenGasCost: formatEther(fill.tokenGasCost), + gasPrice: formatGwei(fill.gasPrice.toString()), gasPadding: this.gasPadding, gasMultiplier: formatEther(this.resolveGasMultiplier(deposit)), gasTokenPriceUsd: formatEther(fill.gasTokenPriceUsd), @@ -465,13 +478,14 @@ export class ProfitClient { lpFeePct: BigNumber, l1Token: L1Token, repaymentChainId: number - ): Promise> { + ): Promise> { let profitable = false; let netRelayerFeePct = bnZero; let nativeGasCost = uint256Max; let tokenGasCost = uint256Max; + let gasPrice = uint256Max; try { - ({ profitable, netRelayerFeePct, nativeGasCost, tokenGasCost } = await this.getFillProfitability( + ({ profitable, netRelayerFeePct, nativeGasCost, tokenGasCost, gasPrice } = await this.getFillProfitability( deposit, lpFeePct, l1Token, @@ -490,6 +504,7 @@ export class ProfitClient { profitable: profitable || (this.isTestnet && nativeGasCost.lt(uint256Max)), nativeGasCost, tokenGasCost, + gasPrice, netRelayerFeePct, }; } diff --git a/src/relayer/Relayer.ts b/src/relayer/Relayer.ts index 3f992385e..ae2313c69 100644 --- a/src/relayer/Relayer.ts +++ b/src/relayer/Relayer.ts @@ -21,6 +21,7 @@ import { TransactionResponse, ZERO_ADDRESS, Profiler, + formatGwei, } from "../utils"; import { RelayerClients } from "./RelayerClientHelper"; import { RelayerConfig } from "./RelayerConfig"; @@ -36,6 +37,7 @@ type BatchLPFees = { [depositKey: string]: RepaymentFee[] }; type RepaymentChainProfitability = { gasLimit: BigNumber; gasCost: BigNumber; + gasPrice: BigNumber; relayerFeePct: BigNumber; lpFeePct: BigNumber; }; @@ -673,7 +675,13 @@ export class Relayer { l1Token, lpFees ); - const { relayerFeePct, gasCost, gasLimit: _gasLimit, lpFeePct: realizedLpFeePct } = repaymentChainProfitability; + const { + relayerFeePct, + gasCost, + gasLimit: _gasLimit, + lpFeePct: realizedLpFeePct, + gasPrice, + } = repaymentChainProfitability; if (!isDefined(repaymentChainId)) { profitClient.captureUnprofitableFill(deposit, realizedLpFeePct, relayerFeePct, gasCost); } else { @@ -702,7 +710,7 @@ export class Relayer { tokenClient.decrementLocalBalance(destinationChainId, outputToken, outputAmount); const gasLimit = isMessageEmpty(resolveDepositMessage(deposit)) ? undefined : _gasLimit; - this.fillRelay(deposit, repaymentChainId, realizedLpFeePct, gasLimit); + this.fillRelay(deposit, repaymentChainId, realizedLpFeePct, gasPrice, gasLimit); } } else if (selfRelay) { // Prefer exiting early here to avoid fast filling any deposits we send. This approach assumes that we always @@ -720,7 +728,14 @@ export class Relayer { // relayer is both the depositor and the recipient, because a deposit on a cheap SpokePool chain could cause // expensive fills on (for example) mainnet. const { lpFeePct } = lpFees.find((lpFee) => lpFee.paymentChainId === destinationChainId); - this.fillRelay(deposit, destinationChainId, lpFeePct); + // For self-relays, gas price is not a concern because we are bypassing profitability requirements so + // use profit client's gasprice. + this.fillRelay( + deposit, + destinationChainId, + lpFeePct, + this.clients.profitClient.getGasCostsForChain(destinationChainId).gasPrice + ); } else { // TokenClient.getBalance returns that we don't have enough balance to submit the fast fill. // At this point, capture the shortfall so that the inventory manager can rebalance the token inventory. @@ -973,7 +988,13 @@ export class Relayer { this.setFillStatus(deposit, FillStatus.RequestedSlowFill); } - fillRelay(deposit: Deposit, repaymentChainId: number, realizedLpFeePct: BigNumber, gasLimit?: BigNumber): void { + fillRelay( + deposit: Deposit, + repaymentChainId: number, + realizedLpFeePct: BigNumber, + gasPrice: BigNumber, + gasLimit?: BigNumber + ): void { const { spokePoolClients } = this.clients; this.logger.debug({ at: "Relayer::fillRelay", @@ -1005,7 +1026,7 @@ export class Relayer { ]; const message = `Filled v3 deposit ${messageModifier}🚀`; - const mrkdwn = this.constructRelayFilledMrkdwn(deposit, repaymentChainId, realizedLpFeePct); + const mrkdwn = this.constructRelayFilledMrkdwn(deposit, repaymentChainId, realizedLpFeePct, gasPrice); const contract = spokePoolClients[deposit.destinationChainId].spokePool; const chainId = deposit.destinationChainId; const multiCallerClient = this.getMulticaller(chainId); @@ -1056,6 +1077,7 @@ export class Relayer { repaymentChainProfitability: { gasLimit: bnZero, gasCost: bnUint256Max, + gasPrice: bnUint256Max, relayerFeePct: bnZero, lpFeePct: bnUint256Max, }, @@ -1086,17 +1108,25 @@ export class Relayer { const getRepaymentChainProfitability = async ( preferredChainId: number, lpFeePct: BigNumber - ): Promise<{ profitable: boolean; gasLimit: BigNumber; gasCost: BigNumber; relayerFeePct: BigNumber }> => { + ): Promise<{ + profitable: boolean; + gasLimit: BigNumber; + gasCost: BigNumber; + gasPrice: BigNumber; + relayerFeePct: BigNumber; + }> => { const { profitable, nativeGasCost: gasLimit, tokenGasCost: gasCost, + gasPrice, netRelayerFeePct: relayerFeePct, // net relayer fee is equal to total fee minus the lp fee. } = await profitClient.isFillProfitable(deposit, lpFeePct, hubPoolToken, preferredChainId); return { profitable, gasLimit, gasCost, + gasPrice, relayerFeePct, }; }; @@ -1116,10 +1146,11 @@ export class Relayer { // @dev The following internal function should be the only one used to set `preferredChain` above. const getProfitabilityDataForPreferredChainIndex = (preferredChainIndex: number): RepaymentChainProfitability => { const lpFeePct = lpFeePcts[preferredChainIndex]; - const { gasLimit, gasCost, relayerFeePct } = repaymentChainProfitabilities[preferredChainIndex]; + const { gasLimit, gasCost, relayerFeePct, gasPrice } = repaymentChainProfitabilities[preferredChainIndex]; return { gasLimit, gasCost, + gasPrice, relayerFeePct, lpFeePct, }; @@ -1344,9 +1375,14 @@ export class Relayer { } } - private constructRelayFilledMrkdwn(deposit: Deposit, repaymentChainId: number, realizedLpFeePct: BigNumber): string { + private constructRelayFilledMrkdwn( + deposit: Deposit, + repaymentChainId: number, + realizedLpFeePct: BigNumber, + gasPrice: BigNumber + ): string { let mrkdwn = - this.constructBaseFillMarkdown(deposit, realizedLpFeePct) + + this.constructBaseFillMarkdown(deposit, realizedLpFeePct, gasPrice) + ` Relayer repayment: ${getNetworkName(repaymentChainId)}.`; if (isDepositSpedUp(deposit)) { @@ -1361,7 +1397,7 @@ export class Relayer { return mrkdwn; } - private constructBaseFillMarkdown(deposit: Deposit, _realizedLpFeePct: BigNumber): string { + private constructBaseFillMarkdown(deposit: Deposit, _realizedLpFeePct: BigNumber, _gasPriceGwei: BigNumber): string { const { symbol, decimals } = this.clients.hubPoolClient.getTokenInfoForDeposit(deposit); const srcChain = getNetworkName(deposit.originChainId); const dstChain = getNetworkName(deposit.destinationChainId); @@ -1380,7 +1416,9 @@ export class Relayer { const _outputAmount = createFormatFunction(2, 4, false, outputTokenDecimals)(deposit.outputAmount.toString()); msg += ` and output ${_outputAmount} ${outputTokenSymbol}, with depositor ${depositor}.` + - ` Realized LP fee: ${realizedLpFeePct}%, total fee: ${totalFeePct}%.`; + ` Realized LP fee: ${realizedLpFeePct}%, total fee: ${totalFeePct}%. Gas price used in profit calc: ${formatGwei( + _gasPriceGwei.toString() + )} Gwei.`; return msg; } diff --git a/src/utils/SDKUtils.ts b/src/utils/SDKUtils.ts index 9914e05bb..af2e474c7 100644 --- a/src/utils/SDKUtils.ts +++ b/src/utils/SDKUtils.ts @@ -40,6 +40,7 @@ export const { formatFeePct, shortenHexStrings, convertFromWei, + formatGwei, max, min, utf8ToHex, diff --git a/test/ProfitClient.ConsiderProfitability.ts b/test/ProfitClient.ConsiderProfitability.ts index e94e1d152..cc38639d8 100644 --- a/test/ProfitClient.ConsiderProfitability.ts +++ b/test/ProfitClient.ConsiderProfitability.ts @@ -78,12 +78,13 @@ describe("ProfitClient: Consider relay profit", () => { // Randomise the fillRelay cost in units of gas. const nativeGasCost = toBN(random(80_000, 100_000)); - const tokenGasCost = nativeGasCost.mul(toGWei(random(1, 100))).div(toBN(10).pow(9)); + const gasPrice = toGWei(random(1, 100)); + const tokenGasCost = nativeGasCost.mul(gasPrice).div(toBN(10).pow(9)); profitClient.setTokenPrice(gasToken.address, gasTokenPriceUsd); - profitClient.setGasCost(chainId, { nativeGasCost, tokenGasCost }); + profitClient.setGasCost(chainId, { nativeGasCost, tokenGasCost, gasPrice }); - return { nativeGasCost, tokenGasCost, gasTokenPriceUsd }; + return { nativeGasCost, tokenGasCost, gasPrice, gasTokenPriceUsd }; }; const tokens = Object.fromEntries( @@ -106,8 +107,9 @@ describe("ProfitClient: Consider relay profit", () => { chainIds.map((chainId) => { const nativeGasCost = toBN(100_000); // Assume 100k gas for a single fill const gasTokenPrice = toBN(chainId); - const tokenGasCost = nativeGasCost.mul(gasTokenPrice); - return [chainId, { nativeGasCost, tokenGasCost }]; + const gasPrice = gasTokenPrice; + const tokenGasCost = nativeGasCost.mul(gasPrice); + return [chainId, { nativeGasCost, tokenGasCost, gasPrice }]; }) ); @@ -296,12 +298,11 @@ describe("ProfitClient: Consider relay profit", () => { }); it("Considers gas cost when computing profitability", async () => { - const gasPrice = toGWei(10); const gasCostMultipliers = ["0.1", "0.5", "1", "2", "5", "10"].map((n) => toBNWei(n)); for (const originChainId of chainIds) { for (const destinationChainId of chainIds.filter((chainId) => chainId !== originChainId)) { - const { nativeGasCost: baseNativeGasCost } = gasCost[destinationChainId]; + const { nativeGasCost: baseNativeGasCost, gasPrice } = gasCost[destinationChainId]; for (const token of Object.values(tokens)) { const inputToken = randomAddress(); @@ -339,7 +340,7 @@ describe("ProfitClient: Consider relay profit", () => { const nativeGasCost = baseNativeGasCost.mul(gasCostMultiplier).div(fixedPoint); const tokenGasCost = nativeGasCost.mul(gasPrice); const gasCostUsd = tokenGasCost.mul(gasTokenPriceUsd).div(fixedPoint); - profitClient.setGasCost(destinationChainId, { nativeGasCost, tokenGasCost }); + profitClient.setGasCost(destinationChainId, { nativeGasCost, tokenGasCost, gasPrice }); const gasCostPct = gasCostUsd.mul(fixedPoint).div(outputAmountUsd); diff --git a/test/mocks/MockProfitClient.ts b/test/mocks/MockProfitClient.ts index e20a31a21..a2ae739f7 100644 --- a/test/mocks/MockProfitClient.ts +++ b/test/mocks/MockProfitClient.ts @@ -57,6 +57,7 @@ export class MockProfitClient extends ProfitClient { const defaultGasCost = { nativeGasCost: defaultFillCost, tokenGasCost: defaultGasPrice.mul(defaultFillCost), + gasPrice: defaultGasPrice, }; Object.values(spokePoolClients).map(({ chainId }) => { this.setGasCost(chainId, defaultGasCost); // gas/fill diff --git a/yarn.lock b/yarn.lock index 309ba67e0..8f22b6827 100644 --- a/yarn.lock +++ b/yarn.lock @@ -53,10 +53,10 @@ yargs "^17.7.2" zksync-web3 "^0.14.3" -"@across-protocol/sdk@^3.3.25": - version "3.3.25" - resolved "https://registry.yarnpkg.com/@across-protocol/sdk/-/sdk-3.3.25.tgz#6eec255fb7a1025050e0415b56f1bf8681936b1e" - integrity sha512-nBBrXY/kslvfsYnVd6kTNOuDSomlfRTw6v4uI40au/rEzPQ6G8X5d/F+DGN3iPfi3ltHY5BEiqE+E6s7AxHA8A== +"@across-protocol/sdk@^3.3.26": + version "3.3.26" + resolved "https://registry.yarnpkg.com/@across-protocol/sdk/-/sdk-3.3.26.tgz#b33aaf1545af9ff2969dd99db5ac39f3d785f689" + integrity sha512-RaEkwtme9k24NAQDKWTFaywrhQd7RqxRRitRwSXoiGm6Aw4iOXHoh/CjT3Z1wjcCBxHVeRozYUXESQlJZ+dOTw== dependencies: "@across-protocol/across-token" "^1.0.0" "@across-protocol/constants" "^3.1.22" From 1d753b643e6a50539e0fc924b1cf62e55f4b7fb3 Mon Sep 17 00:00:00 2001 From: nicholaspai <9457025+nicholaspai@users.noreply.github.com> Date: Wed, 18 Dec 2024 09:41:59 -0500 Subject: [PATCH 43/44] refactor: Remove queryHistoricalDepositForFill (#1954) This function is unused. --- src/utils/DepositUtils.ts | 14 -------------- src/utils/index.ts | 1 - 2 files changed, 15 deletions(-) delete mode 100644 src/utils/DepositUtils.ts diff --git a/src/utils/DepositUtils.ts b/src/utils/DepositUtils.ts deleted file mode 100644 index daa1b56e4..000000000 --- a/src/utils/DepositUtils.ts +++ /dev/null @@ -1,14 +0,0 @@ -import { utils } from "@across-protocol/sdk"; -import { Fill, SlowFillRequest } from "../interfaces"; -import { SpokePoolClient } from "../clients"; -import { getRedisCache } from "./"; - -// Load a deposit for a fill if the fill's deposit ID is outside this client's search range. -// This can be used by the Dataworker to determine whether to give a relayer a refund for a fill -// of a deposit older or younger than its fixed lookback. -export async function queryHistoricalDepositForFill( - spokePoolClient: SpokePoolClient, - fill: Fill | SlowFillRequest -): Promise { - return utils.queryHistoricalDepositForFill(spokePoolClient, fill, await getRedisCache(spokePoolClient.logger)); -} diff --git a/src/utils/index.ts b/src/utils/index.ts index 05af53a05..855a67e1e 100644 --- a/src/utils/index.ts +++ b/src/utils/index.ts @@ -54,7 +54,6 @@ export * from "./chains"; export * from "./fsUtils"; export * from "./ProviderUtils"; export * from "./SignerUtils"; -export * from "./DepositUtils"; export * from "./BlockUtils"; export * from "./EventUtils"; export * from "./FillUtils"; From 96adc9d8ffd271cb50b9ddff635d3b47d923808d Mon Sep 17 00:00:00 2001 From: bmzig <57361391+bmzig@users.noreply.github.com> Date: Wed, 18 Dec 2024 09:12:18 -0600 Subject: [PATCH 44/44] improve: wrap finalization calldata generation in a try/catch block (#1928) * improve: wrap finalization calldata generation in a try/catch Signed-off-by: bennett --------- Signed-off-by: bennett Co-authored-by: Paul <108695806+pxrl@users.noreply.github.com> --- src/finalizer/index.ts | 35 ++++++++++++++++++++++------------- 1 file changed, 22 insertions(+), 13 deletions(-) diff --git a/src/finalizer/index.ts b/src/finalizer/index.ts index 0e4f6fe3f..c9cee4e26 100644 --- a/src/finalizer/index.ts +++ b/src/finalizer/index.ts @@ -30,6 +30,7 @@ import { winston, CHAIN_IDs, Profiler, + stringifyThrownValue, } from "../utils"; import { ChainFinalizer, CrossChainMessage } from "./types"; import { @@ -227,21 +228,29 @@ export async function finalize( let totalDepositsForChain = 0; let totalMiscTxnsForChain = 0; await sdkUtils.mapAsync(chainSpecificFinalizers, async (finalizer) => { - const { callData, crossChainMessages } = await finalizer( - logger, - hubSigner, - hubPoolClient, - client, - l1ToL2AddressesToFinalize - ); + try { + const { callData, crossChainMessages } = await finalizer( + logger, + hubSigner, + hubPoolClient, + client, + l1ToL2AddressesToFinalize + ); - callData.forEach((txn, idx) => { - finalizationsToBatch.push({ txn, crossChainMessage: crossChainMessages[idx] }); - }); + callData.forEach((txn, idx) => { + finalizationsToBatch.push({ txn, crossChainMessage: crossChainMessages[idx] }); + }); - totalWithdrawalsForChain += crossChainMessages.filter(({ type }) => type === "withdrawal").length; - totalDepositsForChain += crossChainMessages.filter(({ type }) => type === "deposit").length; - totalMiscTxnsForChain += crossChainMessages.filter(({ type }) => type === "misc").length; + totalWithdrawalsForChain += crossChainMessages.filter(({ type }) => type === "withdrawal").length; + totalDepositsForChain += crossChainMessages.filter(({ type }) => type === "deposit").length; + totalMiscTxnsForChain += crossChainMessages.filter(({ type }) => type === "misc").length; + } catch (_e) { + logger.error({ + at: "finalizer", + message: `Something errored in a finalizer for chain ${client.chainId}`, + errorMsg: stringifyThrownValue(_e), + }); + } }); const totalTransfers = totalWithdrawalsForChain + totalDepositsForChain + totalMiscTxnsForChain; logger.debug({