diff --git a/src/common/ContractAddresses.ts b/src/common/ContractAddresses.ts index fffee9f33..70729fa91 100644 --- a/src/common/ContractAddresses.ts +++ b/src/common/ContractAddresses.ts @@ -1179,6 +1179,9 @@ export const CONTRACT_ADDRESSES: { }, ], }, + eth: { + address: "0x0000000000000000000000000000000000000000", + }, }, // Testnets 11155111: { diff --git a/src/utils/ProviderUtils.ts b/src/utils/ProviderUtils.ts index e89412f68..c6b218026 100644 --- a/src/utils/ProviderUtils.ts +++ b/src/utils/ProviderUtils.ts @@ -44,6 +44,7 @@ class RateLimitedProvider extends ethers.providers.StaticJsonRpcProvider { // of the list. constructor( maxConcurrency: number, + readonly pctRpcCallsLogged: number, ...cacheConstructorParams: ConstructorParameters ) { super(...cacheConstructorParams); @@ -59,6 +60,45 @@ class RateLimitedProvider extends ethers.providers.StaticJsonRpcProvider { }, maxConcurrency); } + async wrapSendWithLog(method: string, params: Array) { + if (this.pctRpcCallsLogged <= 0 || Math.random() > this.pctRpcCallsLogged / 100) { + // Non sample path: no logging or timing, just issue the request. + return super.send(method, params); + } else { + const loggerArgs = { + at: "ProviderUtils", + message: "Provider response sample", + provider: getOriginFromURL(this.connection.url), + method, + params, + }; + + // In this path we log an rpc response sample. + // Note: use performance.now() to ensure a purely monotonic clock. + const startTime = performance.now(); + try { + const result = await super.send(method, params); + const elapsedTimeS = (performance.now() - startTime) / 1000; + logger.debug({ + ...loggerArgs, + success: true, + timeElapsed: elapsedTimeS, + }); + return result; + } catch (error) { + // Log errors as well. + // For now, to keep logs light, don't log the error itself, just propogate and let it be handled higher up. + const elapsedTimeS = (performance.now() - startTime) / 1000; + logger.debug({ + ...loggerArgs, + success: false, + timeElapsed: elapsedTimeS, + }); + throw error; + } + } + } + override async send(method: string, params: Array): Promise { // This simply creates a promise and adds the arguments and resolve and reject handlers to the task. return new Promise((resolve, reject) => { @@ -273,6 +313,7 @@ export class RetryProvider extends ethers.providers.StaticJsonRpcProvider { readonly delay: number, readonly maxConcurrency: number, providerCacheNamespace: string, + pctRpcCallsLogged: number, redisClient?: RedisClient, standardTtlBlockDistance?: number, noTtlBlockDistance?: number @@ -288,6 +329,7 @@ export class RetryProvider extends ethers.providers.StaticJsonRpcProvider { standardTtlBlockDistance, noTtlBlockDistance, maxConcurrency, + pctRpcCallsLogged, ...inputs ) ); @@ -555,6 +597,7 @@ export async function getProvider(chainId: number, logger?: winston.Logger, useC NODE_PROVIDER_CACHE_NAMESPACE, NODE_LOG_EVERY_N_RATE_LIMIT_ERRORS, NODE_DISABLE_INFINITE_TTL_PROVIDER_CACHING, + NODE_PCT_RPC_CALLS_LOGGED, } = process.env; const timeout = Number(process.env[`NODE_TIMEOUT_${chainId}`] || NODE_TIMEOUT || defaultTimeout); @@ -604,6 +647,10 @@ export async function getProvider(chainId: number, logger?: winston.Logger, useC const logEveryNRateLimitErrors = Number(NODE_LOG_EVERY_N_RATE_LIMIT_ERRORS || "100"); + const pctRpcCallsLogged = Number( + process.env[`NODE_PCT_RPC_CALLS_LOGGED_${chainId}`] || NODE_PCT_RPC_CALLS_LOGGED || "0" + ); + // Custom delay + logging for RPC rate-limiting. let rateLimitLogCounter = 0; const rpcRateLimited = @@ -651,6 +698,7 @@ export async function getProvider(chainId: number, logger?: winston.Logger, useC retryDelay, nodeMaxConcurrency, providerCacheNamespace, + pctRpcCallsLogged, redisClient, disableProviderCache ? undefined : standardTtlBlockDistance, disableNoTtlCaching ? undefined : noTtlBlockDistance