diff --git a/package.json b/package.json index d8fb23d01..8f14ef3b4 100644 --- a/package.json +++ b/package.json @@ -12,7 +12,7 @@ "dependencies": { "@across-protocol/constants": "^3.1.19", "@across-protocol/contracts": "^3.0.16", - "@across-protocol/sdk": "^3.3.18", + "@across-protocol/sdk": "^3.3.21", "@arbitrum/sdk": "^4.0.2", "@consensys/linea-sdk": "^0.2.1", "@defi-wonderland/smock": "^2.3.5", diff --git a/src/clients/InventoryClient.ts b/src/clients/InventoryClient.ts index 1e25e19aa..ad42aad66 100644 --- a/src/clients/InventoryClient.ts +++ b/src/clients/InventoryClient.ts @@ -25,6 +25,7 @@ import { assert, compareAddressesSimple, getUsdcSymbol, + Profiler, getNativeTokenSymbol, } from "../utils"; import { HubPoolClient, TokenClient, BundleDataClient } from "."; @@ -59,6 +60,7 @@ export class InventoryClient { private readonly formatWei: ReturnType; private bundleRefundsPromise: Promise = undefined; private excessRunningBalancePromises: { [l1Token: string]: Promise<{ [chainId: number]: BigNumber }> } = {}; + private profiler: InstanceType; constructor( readonly relayer: string, @@ -75,6 +77,10 @@ export class InventoryClient { ) { this.scalar = sdkUtils.fixedPointAdjustment; this.formatWei = createFormatFunction(2, 4, false, 18); + this.profiler = new Profiler({ + logger: this.logger, + at: "InventoryClient", + }); } /** @@ -299,13 +305,15 @@ export class InventoryClient { async getBundleRefunds(l1Token: string): Promise<{ [chainId: string]: BigNumber }> { let refundsToConsider: CombinedRefunds[] = []; + let mark: ReturnType; // Increase virtual balance by pending relayer refunds from the latest valid bundle and the // upcoming bundle. We can assume that all refunds from the second latest valid bundle have already // been executed. - let startTimer: number; if (!isDefined(this.bundleRefundsPromise)) { - startTimer = performance.now(); // @dev Save this as a promise so that other parallel calls to this function don't make the same call. + mark = this.profiler.start("bundleRefunds", { + l1Token, + }); this.bundleRefundsPromise = this.getAllBundleRefunds(); } refundsToConsider = lodash.cloneDeep(await this.bundleRefundsPromise); @@ -327,12 +335,12 @@ export class InventoryClient { }, {} ); - if (startTimer) { - this.log(`Time taken to get bundle refunds: ${Math.round((performance.now() - startTimer) / 1000)}s`, { - l1Token, - totalRefundsPerChain, - }); - } + + mark?.stop({ + message: "Time to calculate total refunds per chain", + l1Token, + }); + return totalRefundsPerChain; } @@ -618,7 +626,8 @@ export class InventoryClient { ): Promise<{ [chainId: number]: BigNumber }> { const { root: latestPoolRebalanceRoot, blockRanges } = await this.bundleDataClient.getLatestPoolRebalanceRoot(); const chainIds = this.hubPoolClient.configStoreClient.getChainIdIndicesForBlock(); - const start = performance.now(); + + const mark = this.profiler.start("getLatestRunningBalances"); const runningBalances = Object.fromEntries( await sdkUtils.mapAsync(chainsToEvaluate, async (chainId) => { const chainIdIndex = chainIds.indexOf(chainId); @@ -674,13 +683,10 @@ export class InventoryClient { ]; }) ); - this.log( - `Approximated latest (abs. val) running balance for ORU chains for token ${l1Token} in ${ - Math.round(performance.now() - start) / 1000 - }s`, - { runningBalances } - ); - + mark.stop({ + message: "Time to get running balances", + runningBalances, + }); return Object.fromEntries(Object.entries(runningBalances).map(([k, v]) => [k, v.absLatestRunningBalance])); } diff --git a/src/clients/TokenClient.ts b/src/clients/TokenClient.ts index f2d2b68e8..613535260 100644 --- a/src/clients/TokenClient.ts +++ b/src/clients/TokenClient.ts @@ -12,8 +12,8 @@ import { MAX_UINT_VAL, assign, blockExplorerLink, - getCurrentTime, getNetworkName, + Profiler, runTransaction, toBN, winston, @@ -27,6 +27,7 @@ type TokenShortfallType = { }; export class TokenClient { + private profiler: InstanceType; tokenData: TokenDataType = {}; tokenShortfall: TokenShortfallType = {}; @@ -35,7 +36,9 @@ export class TokenClient { readonly relayerAddress: string, readonly spokePoolClients: { [chainId: number]: SpokePoolClient }, readonly hubPoolClient: HubPoolClient - ) {} + ) { + this.profiler = new Profiler({ at: "TokenClient", logger }); + } getAllTokenData(): TokenDataType { return this.tokenData; @@ -238,7 +241,7 @@ export class TokenClient { } async update(): Promise { - const start = getCurrentTime(); + const mark = this.profiler.start("update"); this.logger.debug({ at: "TokenBalanceClient", message: "Updating TokenBalance client" }); const { hubPoolClient } = this; @@ -272,11 +275,7 @@ export class TokenClient { }) ); - this.logger.debug({ - at: "TokenBalanceClient", - message: `Updated TokenBalance client in ${getCurrentTime() - start} seconds.`, - balanceData, - }); + mark.stop({ message: "Updated TokenBalance client.", balanceData }); } async fetchTokenData( diff --git a/src/dataworker/DataworkerUtils.ts b/src/dataworker/DataworkerUtils.ts index 9e2736b45..3352c5a30 100644 --- a/src/dataworker/DataworkerUtils.ts +++ b/src/dataworker/DataworkerUtils.ts @@ -24,6 +24,7 @@ import { getTimestampsForBundleEndBlocks, isDefined, MerkleTree, + Profiler, TOKEN_SYMBOLS_MAP, winston, } from "../utils"; @@ -350,7 +351,13 @@ export async function persistDataToArweave( Buffer.from(tag).length <= ARWEAVE_TAG_BYTE_LIMIT, `Arweave tag cannot exceed ${ARWEAVE_TAG_BYTE_LIMIT} bytes` ); - const startTime = performance.now(); + + const profiler = new Profiler({ + logger, + at: "DataworkerUtils#persistDataToArweave", + }); + const mark = profiler.start("persistDataToArweave"); + // Check if data already exists on Arweave with the given tag. // If so, we don't need to persist it again. const [matchingTxns, address, balance] = await Promise.all([ @@ -397,10 +404,8 @@ export async function persistDataToArweave( balance: formatWinston(balance), notificationPath: "across-arweave", }); - const endTime = performance.now(); - logger.debug({ - at: "Dataworker#index", - message: `Time to persist data to Arweave: ${endTime - startTime}ms`, + mark.stop({ + message: "Time to persist to Arweave", }); } } diff --git a/src/dataworker/index.ts b/src/dataworker/index.ts index 5458f6443..1c1bd646c 100644 --- a/src/dataworker/index.ts +++ b/src/dataworker/index.ts @@ -6,6 +6,7 @@ import { Signer, disconnectRedisClients, isDefined, + Profiler, } from "../utils"; import { spokePoolClientsToProviders } from "../common"; import { Dataworker } from "./Dataworker"; @@ -52,21 +53,29 @@ export async function createDataworker( dataworker, }; } + export async function runDataworker(_logger: winston.Logger, baseSigner: Signer): Promise { - logger = _logger; - let loopStart = performance.now(); - const { clients, config, dataworker } = await createDataworker(logger, baseSigner); - logger.debug({ + const profiler = new Profiler({ at: "Dataworker#index", - message: `Time to update non-spoke clients: ${(performance.now() - loopStart) / 1000}s`, + logger: _logger, }); - loopStart = performance.now(); + logger = _logger; + + const { clients, config, dataworker } = await profiler.measureAsync( + createDataworker(logger, baseSigner), + "createDataworker", + { + message: "Time to update non-spoke clients", + } + ); + let proposedBundleData: BundleData | undefined = undefined; let poolRebalanceLeafExecutionCount = 0; try { logger[startupLogLevel(config)]({ at: "Dataworker#index", message: "Dataworker started 👩‍🔬", config }); for (;;) { + profiler.mark("loopStart"); // Determine the spoke client's lookback: // 1. We initiate the spoke client event search windows based on a start bundle's bundle block end numbers and // how many bundles we want to look back from the start bundle blocks. @@ -108,7 +117,7 @@ export async function runDataworker(_logger: winston.Logger, baseSigner: Signer) fromBlocks, toBlocks ); - const dataworkerFunctionLoopTimerStart = performance.now(); + profiler.mark("dataworkerFunctionLoopTimerStart"); // Validate and dispute pending proposal before proposing a new one if (config.disputerEnabled) { await dataworker.validatePendingRootBundle( @@ -191,19 +200,23 @@ export async function runDataworker(_logger: winston.Logger, baseSigner: Signer) } else { await clients.multiCallerClient.executeTxnQueues(); } - - const dataworkerFunctionLoopTimerEnd = performance.now(); - logger.debug({ - at: "Dataworker#index", - message: `Time to update spoke pool clients and run dataworker function: ${Math.round( - (dataworkerFunctionLoopTimerEnd - loopStart) / 1000 - )}s`, - timeToLoadSpokes: Math.round((dataworkerFunctionLoopTimerStart - loopStart) / 1000), - timeToRunDataworkerFunctions: Math.round( - (dataworkerFunctionLoopTimerEnd - dataworkerFunctionLoopTimerStart) / 1000 - ), + profiler.mark("dataworkerFunctionLoopTimerEnd"); + profiler.measure("timeToLoadSpokes", { + message: "Time to load spokes in data worker loop", + from: "loopStart", + to: "dataworkerFunctionLoopTimerStart", + }); + profiler.measure("timeToRunDataworkerFunctions", { + message: "Time to run data worker functions in data worker loop", + from: "dataworkerFunctionLoopTimerStart", + to: "dataworkerFunctionLoopTimerEnd", + }); + // do we need to add an additional log for the sum of the previous? + profiler.measure("dataWorkerTotal", { + message: "Total time taken for dataworker loop", + from: "loopStart", + to: "dataworkerFunctionLoopTimerEnd", }); - loopStart = performance.now(); if (await processEndPollingLoop(logger, "Dataworker", config.pollingDelay)) { break; diff --git a/src/finalizer/index.ts b/src/finalizer/index.ts index 8b648dcf9..0e4f6fe3f 100644 --- a/src/finalizer/index.ts +++ b/src/finalizer/index.ts @@ -29,6 +29,7 @@ import { startupLogLevel, winston, CHAIN_IDs, + Profiler, } from "../utils"; import { ChainFinalizer, CrossChainMessage } from "./types"; import { @@ -471,17 +472,23 @@ export class FinalizerConfig extends DataworkerConfig { export async function runFinalizer(_logger: winston.Logger, baseSigner: Signer): Promise { logger = _logger; + // Same config as Dataworker for now. const config = new FinalizerConfig(process.env); + const profiler = new Profiler({ + logger, + at: "Finalizer#index", + config, + }); logger[startupLogLevel(config)]({ at: "Finalizer#index", message: "Finalizer started 🏋🏿‍♀️", config }); const { commonClients, spokePoolClients } = await constructFinalizerClients(logger, config, baseSigner); try { for (;;) { - const loopStart = performance.now(); + profiler.mark("loopStart"); await updateSpokePoolClients(spokePoolClients, ["TokensBridged"]); - const loopStartPostSpokePoolUpdates = performance.now(); + profiler.mark("loopStartPostSpokePoolUpdates"); if (config.finalizerEnabled) { const availableChains = commonClients.configStoreClient @@ -501,13 +508,25 @@ export async function runFinalizer(_logger: winston.Logger, baseSigner: Signer): } else { logger[startupLogLevel(config)]({ at: "Dataworker#index", message: "Finalizer disabled" }); } - const loopEndPostFinalizations = performance.now(); - logger.debug({ - at: "Finalizer#index", - message: `Time to loop: ${Math.round((loopEndPostFinalizations - loopStart) / 1000)}s`, - timeToUpdateSpokeClients: Math.round((loopStartPostSpokePoolUpdates - loopStart) / 1000), - timeToFinalize: Math.round((loopEndPostFinalizations - loopStartPostSpokePoolUpdates) / 1000), + profiler.mark("loopEndPostFinalizations"); + + profiler.measure("timeToUpdateSpokeClients", { + from: "loopStart", + to: "loopStartPostSpokePoolUpdates", + strategy: config.finalizationStrategy, + }); + + profiler.measure("timeToFinalize", { + from: "loopStartPostSpokePoolUpdates", + to: "loopEndPostFinalizations", + strategy: config.finalizationStrategy, + }); + + profiler.measure("loopTime", { + message: "Time to loop", + from: "loopStart", + to: "loopEndPostFinalizations", strategy: config.finalizationStrategy, }); diff --git a/src/libexec/util/evm/util.ts b/src/libexec/util/evm/util.ts index c19a86199..ff0272566 100644 --- a/src/libexec/util/evm/util.ts +++ b/src/libexec/util/evm/util.ts @@ -1,6 +1,6 @@ import assert from "assert"; import { Contract, EventFilter } from "ethers"; -import { getNetworkName, isDefined, paginatedEventQuery, winston } from "../../../utils"; +import { getNetworkName, isDefined, paginatedEventQuery, Profiler, winston } from "../../../utils"; import { Log, ScraperOpts } from "../../types"; /** @@ -45,8 +45,12 @@ export async function scrapeEvents( spokePool: Contract, eventName: string, opts: ScraperOpts & { toBlock: number }, - logger: winston.Logger + logger?: winston.Logger ): Promise { + const profiler = new Profiler({ + logger, + at: "scrapeEvents", + }); const { lookback, deploymentBlock, filterArgs, maxBlockRange, toBlock } = opts; const { chainId } = await spokePool.provider.getNetwork(); const chain = getNetworkName(chainId); @@ -55,13 +59,14 @@ export async function scrapeEvents( assert(toBlock > fromBlock, `${toBlock} > ${fromBlock}`); const searchConfig = { fromBlock, toBlock, maxBlockLookBack: maxBlockRange }; - const tStart = performance.now(); + const mark = profiler.start("paginatedEventQuery"); const filter = getEventFilter(spokePool, eventName, filterArgs[eventName]); const events = await paginatedEventQuery(spokePool, filter, searchConfig); - const tStop = performance.now(); - logger.debug({ - at: "scrapeEvents", - message: `Scraped ${events.length} ${chain} ${eventName} events in ${Math.round((tStop - tStart) / 1000)} seconds`, + mark.stop({ + message: `Scraped ${events.length} ${chain} ${eventName} events.`, + numEvents: events.length, + chain, + eventName, searchConfig, }); diff --git a/src/relayer/Relayer.ts b/src/relayer/Relayer.ts index 08d2b67a2..fa2428dd9 100644 --- a/src/relayer/Relayer.ts +++ b/src/relayer/Relayer.ts @@ -20,6 +20,7 @@ import { fixedPointAdjustment, TransactionResponse, ZERO_ADDRESS, + Profiler, } from "../utils"; import { RelayerClients } from "./RelayerClientHelper"; import { RelayerConfig } from "./RelayerConfig"; @@ -45,7 +46,7 @@ export class Relayer { private pendingTxnReceipts: { [chainId: number]: Promise } = {}; private lastLogTime = 0; private lastMaintenance = 0; - + private profiler: InstanceType; private hubPoolBlockBuffer: number; protected fillLimits: { [originChainId: number]: { fromBlock: number; limit: BigNumber }[] }; protected inventoryChainIds: number[]; @@ -69,7 +70,10 @@ export class Relayer { ]; } }); - + this.profiler = new Profiler({ + at: "Relayer", + logger: this.logger, + }); this.relayerAddress = getAddress(relayerAddress); this.inventoryChainIds = this.config.pollingDelay === 0 ? Object.values(clients.spokePoolClients).map(({ chainId }) => chainId) : []; @@ -1034,7 +1038,7 @@ export class Relayer { const originChain = getNetworkName(originChainId); const destinationChain = getNetworkName(destinationChainId); - const start = performance.now(); + const mark = this.profiler.start("resolveRepaymentChain"); const preferredChainIds = await inventoryClient.determineRefundChainId(deposit, hubPoolToken.address); if (preferredChainIds.length === 0) { // @dev If the origin chain is a lite chain and there are no preferred repayment chains, then we can assume @@ -1058,14 +1062,16 @@ export class Relayer { }; } - this.logger.debug({ - at: "Relayer::resolveRepaymentChain", + mark.stop({ message: `Determined eligible repayment chains ${JSON.stringify( preferredChainIds - )} for deposit ${depositId} from ${originChain} to ${destinationChain} in ${ - Math.round(performance.now() - start) / 1000 - }s.`, + )} for deposit ${depositId} from ${originChain} to ${destinationChain}.`, + preferredChainIds, + depositId, + originChain, + destinationChain, }); + const _repaymentFees = preferredChainIds.map((chainId) => repaymentFees.find(({ paymentChainId }) => paymentChainId === chainId) ); diff --git a/src/relayer/index.ts b/src/relayer/index.ts index a0187748e..a8e9afda7 100644 --- a/src/relayer/index.ts +++ b/src/relayer/index.ts @@ -3,9 +3,9 @@ import { config, delay, disconnectRedisClients, - getCurrentTime, getNetworkName, getRedisCache, + Profiler, Signer, winston, } from "../utils"; @@ -17,11 +17,12 @@ let logger: winston.Logger; const ACTIVE_RELAYER_EXPIRY = 600; // 10 minutes. const { RUN_IDENTIFIER: runIdentifier, BOT_IDENTIFIER: botIdentifier = "across-relayer" } = process.env; -const randomNumber = () => Math.floor(Math.random() * 1_000_000); export async function runRelayer(_logger: winston.Logger, baseSigner: Signer): Promise { - const relayerRun = randomNumber(); - const startTime = getCurrentTime(); + const profiler = new Profiler({ + at: "Relayer#run", + logger: _logger, + }); logger = _logger; const config = new RelayerConfig(process.env); @@ -42,7 +43,8 @@ export async function runRelayer(_logger: winston.Logger, baseSigner: Signer): P // Explicitly don't log ignoredAddresses because it can be huge and can overwhelm log transports. const { ignoredAddresses: _ignoredConfig, ...loggedConfig } = config; - logger.debug({ at: "Relayer#run", message: "Relayer started 🏃‍♂️", loggedConfig, relayerRun }); + logger.debug({ at: "Relayer#run", message: "Relayer started 🏃‍♂️", loggedConfig }); + const mark = profiler.start("relayer"); const relayerClients = await constructRelayerClients(logger, config, baseSigner); const relayer = new Relayer(await baseSigner.getAddress(), logger, relayerClients, config); await relayer.init(); @@ -56,8 +58,7 @@ export async function runRelayer(_logger: winston.Logger, baseSigner: Signer): P if (loop) { logger.debug({ at: "relayer#run", message: `Starting relayer execution loop ${run}.` }); } - - const tLoopStart = performance.now(); + const tLoopStart = profiler.start("Relayer execution loop"); const ready = await relayer.update(); const activeRelayer = redis ? await redis.get(botIdentifier) : undefined; @@ -65,7 +66,7 @@ export async function runRelayer(_logger: winston.Logger, baseSigner: Signer): P // If this instance can't update, throw an error (for now). if (!ready && activeRelayer) { if (run * pollingDelay < 120) { - const runTime = Math.round((performance.now() - tLoopStart) / 1000); + const runTime = Math.round((performance.now() - tLoopStart.startTime) / 1000); const delta = pollingDelay - runTime; logger.debug({ at: "Relayer#run", message: `Not ready to relay, waiting ${delta} seconds.` }); await delay(delta); @@ -100,11 +101,11 @@ export async function runRelayer(_logger: winston.Logger, baseSigner: Signer): P if (!loop) { stop = true; } else { - const runTime = Math.round((performance.now() - tLoopStart) / 1000); - logger.debug({ - at: "Relayer#run", - message: `Completed relayer execution loop ${run} in ${runTime} seconds.`, + const runTimeMilliseconds = tLoopStart.stop({ + message: "Completed relayer execution loop.", + loopCount: run, }); + const runTime = Math.round(runTimeMilliseconds / 1000); if (!stop && runTime < pollingDelay) { const delta = pollingDelay - runTime; @@ -136,6 +137,5 @@ export async function runRelayer(_logger: winston.Logger, baseSigner: Signer): P } } - const runtime = getCurrentTime() - startTime; - logger.debug({ at: "Relayer#index", message: `Completed relayer run ${relayerRun} in ${runtime} seconds.` }); + mark.stop({ message: "Relayer instance completed." }); } diff --git a/src/utils/SDKUtils.ts b/src/utils/SDKUtils.ts index f8ccb5287..9914e05bb 100644 --- a/src/utils/SDKUtils.ts +++ b/src/utils/SDKUtils.ts @@ -53,6 +53,7 @@ export const { getTokenInfo, getL1TokenInfo, getUsdcSymbol, + Profiler, } = sdk.utils; export const { diff --git a/yarn.lock b/yarn.lock index ff63b9d57..1b90d9793 100644 --- a/yarn.lock +++ b/yarn.lock @@ -16,6 +16,11 @@ resolved "https://registry.yarnpkg.com/@across-protocol/constants/-/constants-3.1.19.tgz#3c29b52ec5f2eece93a6abd50d580668b03dd7b3" integrity sha512-XOFF+o64TDn57xNfUB38kWy8lYyE9lB7PBdyoMOadsXx00HC3KMznFi/paLRKT1iZ50vDwHp00tNZbr7Z7umzA== +"@across-protocol/constants@^3.1.20": + version "3.1.20" + resolved "https://registry.yarnpkg.com/@across-protocol/constants/-/constants-3.1.20.tgz#305bd41f5644b7db5d9fd12a6a6b4bbbbe2fd016" + integrity sha512-B5RsvuOQsZdFgLk0WcFZGmoivm6g6gv95a+YKVBydcxZkNxAsyP065UQEDAmvRXvPhqGyehhd52515Xa/3bzyg== + "@across-protocol/contracts@^0.1.4": version "0.1.4" resolved "https://registry.yarnpkg.com/@across-protocol/contracts/-/contracts-0.1.4.tgz#64b3d91e639d2bb120ea94ddef3d160967047fa5" @@ -53,13 +58,13 @@ yargs "^17.7.2" zksync-web3 "^0.14.3" -"@across-protocol/sdk@^3.3.18": - version "3.3.18" - resolved "https://registry.yarnpkg.com/@across-protocol/sdk/-/sdk-3.3.18.tgz#d39ef359f9f639921fb412a1355167354014a80f" - integrity sha512-Ea40yDPL94T3uc6HhqDj8X7vovPSyOVSmA6Z3C1uZmdwRdDKt8hlg8k7yxIg+8aR5aEJJ7hCZy6bHdI5XHpbFQ== +"@across-protocol/sdk@^3.3.21": + version "3.3.21" + resolved "https://registry.yarnpkg.com/@across-protocol/sdk/-/sdk-3.3.21.tgz#f223a0d88b09c5f2335723b89e777a36df5255ca" + integrity sha512-N/0H5KwPS+iyMh8m1QvIPBNJuPFhHlRW1841AzcmhXdzwRtbarmSWXaXNDifeWDWMf3Fie8TN2WnSW4oQKd1HQ== dependencies: "@across-protocol/across-token" "^1.0.0" - "@across-protocol/constants" "^3.1.19" + "@across-protocol/constants" "^3.1.20" "@across-protocol/contracts" "^3.0.16" "@eth-optimism/sdk" "^3.3.1" "@ethersproject/bignumber" "^5.7.0"