diff --git a/package.json b/package.json index c0a641715..37923b31b 100644 --- a/package.json +++ b/package.json @@ -11,7 +11,7 @@ }, "dependencies": { "@across-protocol/contracts-v2": "2.4.3", - "@across-protocol/sdk-v2": "0.15.24", + "@across-protocol/sdk-v2": "0.16.4", "@arbitrum/sdk": "^3.1.3", "@defi-wonderland/smock": "^2.3.5", "@eth-optimism/sdk": "^3.1.0", diff --git a/src/clients/HubPoolClient.ts b/src/clients/HubPoolClient.ts index 5f54dd81c..bd5df59b3 100644 --- a/src/clients/HubPoolClient.ts +++ b/src/clients/HubPoolClient.ts @@ -1,8 +1,9 @@ import { clients } from "@across-protocol/sdk-v2"; -import { Contract } from "ethers"; +import { BigNumber, Contract } from "ethers"; import winston from "winston"; import { MakeOptional, EventSearchConfig } from "../utils"; import { IGNORED_HUB_EXECUTED_BUNDLES, IGNORED_HUB_PROPOSED_BUNDLES } from "../common"; +import { DepositWithBlock } from "../interfaces"; export class HubPoolClient extends clients.HubPoolClient { constructor( @@ -18,4 +19,20 @@ export class HubPoolClient extends clients.HubPoolClient { ignoredHubProposedBundles: IGNORED_HUB_PROPOSED_BUNDLES, }); } + + async computeRealizedLpFeePct( + deposit: Pick< + DepositWithBlock, + "quoteTimestamp" | "amount" | "destinationChainId" | "originChainId" | "blockNumber" + >, + l1Token: string + ): Promise<{ realizedLpFeePct: BigNumber | undefined; quoteBlock: number }> { + if (deposit.quoteTimestamp > this.currentTime) { + throw new Error( + `Cannot compute lp fee percent for quote timestamp ${deposit.quoteTimestamp} in the future. Current time: ${this.currentTime}.` + ); + } + + return await super.computeRealizedLpFeePct(deposit, l1Token); + } } diff --git a/src/clients/SpokePoolClient.ts b/src/clients/SpokePoolClient.ts index 09c2c796e..ea662d36d 100644 --- a/src/clients/SpokePoolClient.ts +++ b/src/clients/SpokePoolClient.ts @@ -1,3 +1,13 @@ import { clients } from "@across-protocol/sdk-v2"; +import { FundsDepositedEvent } from "../interfaces"; +import { isDefined } from "../utils/TypeGuards"; -export class SpokePoolClient extends clients.SpokePoolClient {} +export class SpokePoolClient extends clients.SpokePoolClient { + _isEarlyDeposit(depositEvent: FundsDepositedEvent, currentTime: number): boolean { + const hubCurrentTime = this.hubPoolClient?.currentTime; + if (!isDefined(hubCurrentTime)) { + throw new Error("HubPoolClient's currentTime is not defined"); + } + return depositEvent.args.quoteTimestamp > currentTime || depositEvent.args.quoteTimestamp > hubCurrentTime; + } +} diff --git a/src/clients/TokenClient.ts b/src/clients/TokenClient.ts index 7fc5b208c..ffaec0934 100644 --- a/src/clients/TokenClient.ts +++ b/src/clients/TokenClient.ts @@ -93,12 +93,12 @@ export class TokenClient { // requirement to send all seen relays and the total remaining balance of the relayer. getTokenShortfall(): { [chainId: number]: { - [token: string]: { balance: BigNumber; needed: BigNumber; shortfall: BigNumber; deposits: BigNumber }; + [token: string]: { balance: BigNumber; needed: BigNumber; shortfall: BigNumber; deposits: number[] }; }; } { const tokenShortfall: { [chainId: number]: { - [token: string]: { balance: BigNumber; needed: BigNumber; shortfall: BigNumber; deposits: BigNumber }; + [token: string]: { balance: BigNumber; needed: BigNumber; shortfall: BigNumber; deposits: number[] }; }; } = {}; Object.entries(this.tokenShortfall).forEach(([_chainId, tokenMap]) => { diff --git a/src/dataworker/Dataworker.ts b/src/dataworker/Dataworker.ts index c8edf5183..494b6d372 100644 --- a/src/dataworker/Dataworker.ts +++ b/src/dataworker/Dataworker.ts @@ -28,7 +28,6 @@ import { RunningBalances, PoolRebalanceLeaf, RelayerRefundLeaf, - BigNumberForToken, } from "../interfaces"; import { DataworkerClients } from "./DataworkerClientHelper"; import { SpokePoolClient, UBAClient, BalanceAllocator } from "../clients"; @@ -92,7 +91,6 @@ export class Dataworker { readonly chainIdListForBundleEvaluationBlockNumbers: number[], readonly maxRefundCountOverride: number | undefined, readonly maxL1TokenCountOverride: number | undefined, - readonly tokenTransferThreshold: BigNumberForToken = {}, readonly blockRangeEndBlockBuffer: { [chainId: number]: number } = {}, readonly spokeRootsLookbackCount = 0, readonly bufferToPropose = 0, @@ -102,7 +100,6 @@ export class Dataworker { if ( maxRefundCountOverride !== undefined || maxL1TokenCountOverride !== undefined || - Object.keys(tokenTransferThreshold).length > 0 || Object.keys(blockRangeEndBlockBuffer).length > 0 ) { this.logger.debug({ @@ -111,7 +108,6 @@ export class Dataworker { chainIdListForBundleEvaluationBlockNumbers, maxRefundCountOverride: this.maxRefundCountOverride, maxL1TokenCountOverride: this.maxL1TokenCountOverride, - tokenTransferThreshold: this.tokenTransferThreshold, blockRangeEndBlockBuffer: this.blockRangeEndBlockBuffer, }); } @@ -157,8 +153,7 @@ export class Dataworker { poolRebalanceLeaves, runningBalances, this.clients, - maxRefundCount, - this.tokenTransferThreshold + maxRefundCount ); } @@ -531,8 +526,7 @@ export class Dataworker { this.clients, this.maxRefundCountOverride ? this.maxRefundCountOverride - : this.clients.configStoreClient.getMaxRefundCountForRelayerRefundLeafForBlock(mainnetBundleEndBlock), - this.tokenTransferThreshold + : this.clients.configStoreClient.getMaxRefundCountForRelayerRefundLeafForBlock(mainnetBundleEndBlock) ); const slowRelayRoot = _buildSlowRelayRoot(unfilledDeposits); @@ -729,7 +723,6 @@ export class Dataworker { poolRebalanceLeafData.bundleLpFees, this.clients.configStoreClient, this.maxL1TokenCountOverride, - this.tokenTransferThreshold, poolRebalanceLeafData.incentivePoolBalances, poolRebalanceLeafData.netSendAmounts, true @@ -804,7 +797,6 @@ export class Dataworker { this.maxRefundCountOverride ? this.maxRefundCountOverride : this.clients.configStoreClient.getMaxRefundCountForRelayerRefundLeafForBlock(mainnetBundleEndBlock), - this.tokenTransferThreshold, true // Instruct function to always set amountToReturn = -netSendAmount iff netSendAmount < 0 ); return relayerRefundRoot; @@ -1138,12 +1130,26 @@ export class Dataworker { reason: "out-of-date-config-store-version", }; } + let rootBundleData: ProposeRootBundleReturnType; const mainnetBundleStartBlock = getBlockRangeForChain( blockRangesImpliedByBundleEndBlocks, hubPoolChainId, this.chainIdListForBundleEvaluationBlockNumbers )[0]; + + // Check if we have the right code to validate a bundle for the given block ranges. + const versionAtProposalBlock = + this.clients.configStoreClient.getConfigStoreVersionForBlock(mainnetBundleStartBlock); + + // Bundles that need to be validated with older code should emit helpful error logs about which code to run. + // @dev only throw this error if the hub chain ID is 1, suggesting we're running on production. + if (versionAtProposalBlock <= sdk.constants.TRANSFER_THRESHOLD_MAX_CONFIG_STORE_VERSION && hubPoolChainId === 1) { + throw new Error( + "Must use relayer-v2 code at commit 412ddc30af72c2ac78f9e4c8dccfccfd0eb478ab to validate a bundle with transferThreshold set" + ); + } + let isUBA = false; if ( sdk.clients.isUBAActivatedAtBlock( @@ -2248,7 +2254,6 @@ export class Dataworker { spokePoolClients, this.chainIdListForBundleEvaluationBlockNumbers, this.maxL1TokenCountOverride, - this.tokenTransferThreshold, logSlowFillExcessData ? this.logger : undefined ); } diff --git a/src/dataworker/DataworkerConfig.ts b/src/dataworker/DataworkerConfig.ts index 5deb39e8c..2c7719d57 100644 --- a/src/dataworker/DataworkerConfig.ts +++ b/src/dataworker/DataworkerConfig.ts @@ -4,7 +4,6 @@ import { BigNumber, assert, toBNWei } from "../utils"; export class DataworkerConfig extends CommonConfig { readonly maxPoolRebalanceLeafSizeOverride: number; readonly maxRelayerRepaymentLeafSizeOverride: number; - readonly tokenTransferThresholdOverride: { [l1TokenAddress: string]: BigNumber }; readonly rootBundleExecutionThreshold: BigNumber; readonly spokeRootsLookbackCount: number; // Consider making this configurable per chain ID. @@ -42,7 +41,6 @@ export class DataworkerConfig extends CommonConfig { constructor(env: ProcessEnv) { const { ROOT_BUNDLE_EXECUTION_THRESHOLD, - TOKEN_TRANSFER_THRESHOLD_OVERRIDE, MAX_POOL_REBALANCE_LEAF_SIZE_OVERRIDE, MAX_RELAYER_REPAYMENT_LEAF_SIZE_OVERRIDE, DISPUTER_ENABLED, @@ -76,9 +74,6 @@ export class DataworkerConfig extends CommonConfig { if (this.maxRelayerRepaymentLeafSizeOverride !== undefined) { assert(this.maxRelayerRepaymentLeafSizeOverride > 0, "Max leaf count set to 0"); } - this.tokenTransferThresholdOverride = TOKEN_TRANSFER_THRESHOLD_OVERRIDE - ? JSON.parse(TOKEN_TRANSFER_THRESHOLD_OVERRIDE) - : {}; this.rootBundleExecutionThreshold = ROOT_BUNDLE_EXECUTION_THRESHOLD ? toBNWei(ROOT_BUNDLE_EXECUTION_THRESHOLD) : toBNWei("500000"); diff --git a/src/dataworker/DataworkerUtils.ts b/src/dataworker/DataworkerUtils.ts index 885b19b49..9bb41a509 100644 --- a/src/dataworker/DataworkerUtils.ts +++ b/src/dataworker/DataworkerUtils.ts @@ -3,7 +3,6 @@ import { SpokePoolClient } from "../clients"; import { spokesThatHoldEthAndWeth } from "../common/Constants"; import { CONTRACT_ADDRESSES } from "../common/ContractAddresses"; import { - BigNumberForToken, DepositWithBlock, FillsToRefund, FillWithBlock, @@ -192,7 +191,6 @@ export function _buildRelayerRefundRoot( runningBalances: RunningBalances, clients: DataworkerClients, maxRefundCount: number, - tokenTransferThresholdOverrides: BigNumberForToken, isUBA = false ): { leaves: RelayerRefundLeaf[]; @@ -226,9 +224,6 @@ export function _buildRelayerRefundRoot( l2TokenAddress, endBlockForMainnet ); - const transferThreshold = - tokenTransferThresholdOverrides[l1TokenCounterpart] || - clients.configStoreClient.getTokenTransferThresholdForBlock(l1TokenCounterpart, endBlockForMainnet); const spokePoolTargetBalance = clients.configStoreClient.getSpokeTargetBalancesForBlock( l1TokenCounterpart, @@ -238,7 +233,6 @@ export function _buildRelayerRefundRoot( // The `amountToReturn` for a { repaymentChainId, L2TokenAddress} should be set to max(-netSendAmount, 0). amountToReturn = getAmountToReturnForRelayerRefundLeaf( - transferThreshold, spokePoolTargetBalance, runningBalances[repaymentChainId][l1TokenCounterpart] ); @@ -285,10 +279,6 @@ export function _buildRelayerRefundRoot( // If UBA model we don't need to do the following to figure out the amount to return: let amountToReturn = netSendAmount.mul(-1); if (!isUBA) { - const transferThreshold = - tokenTransferThresholdOverrides[leaf.l1Tokens[index]] || - clients.configStoreClient.getTokenTransferThresholdForBlock(leaf.l1Tokens[index], endBlockForMainnet); - const spokePoolTargetBalance = clients.configStoreClient.getSpokeTargetBalancesForBlock( leaf.l1Tokens[index], leaf.chainId, @@ -296,7 +286,6 @@ export function _buildRelayerRefundRoot( ); amountToReturn = getAmountToReturnForRelayerRefundLeaf( - transferThreshold, spokePoolTargetBalance, runningBalances[leaf.chainId][leaf.l1Tokens[index]] ); @@ -334,7 +323,6 @@ export async function _buildPoolRebalanceRoot( spokePoolClients: SpokePoolClientsByChain, chainIdListForBundleEvaluationBlockNumbers: number[], maxL1TokenCountOverride: number | undefined, - tokenTransferThreshold: BigNumberForToken, logger?: winston.Logger ): Promise { // Running balances are the amount of tokens that we need to send to each SpokePool to pay for all instant and @@ -369,6 +357,7 @@ export async function _buildPoolRebalanceRoot( allValidFills, allValidFillsInRange ); + if (logger && Object.keys(fillsTriggeringExcesses).length > 0) { logger.debug({ at: "Dataworker#DataworkerUtils", @@ -407,8 +396,7 @@ export async function _buildPoolRebalanceRoot( runningBalances, realizedLpFees, clients.configStoreClient, - maxL1TokenCountOverride, - tokenTransferThreshold + maxL1TokenCountOverride ); return { diff --git a/src/dataworker/PoolRebalanceUtils.ts b/src/dataworker/PoolRebalanceUtils.ts index 1cbb994fe..fe911a8a6 100644 --- a/src/dataworker/PoolRebalanceUtils.ts +++ b/src/dataworker/PoolRebalanceUtils.ts @@ -3,7 +3,6 @@ import { ConfigStoreClient, HubPoolClient, SpokePoolClient } from "../clients"; import { Clients } from "../common"; import * as interfaces from "../interfaces"; import { - BigNumberForToken, PendingRootBundle, PoolRebalanceLeaf, RelayerRefundLeaf, @@ -300,7 +299,6 @@ export function constructPoolRebalanceLeaves( realizedLpFees: interfaces.RunningBalances, configStoreClient: ConfigStoreClient, maxL1TokenCount?: number, - tokenTransferThreshold?: BigNumberForToken, incentivePoolBalances?: interfaces.RunningBalances, netSendAmounts?: interfaces.RunningBalances, ubaMode = false @@ -326,12 +324,6 @@ export function constructPoolRebalanceLeaves( for (let i = 0; i < sortedL1Tokens.length; i += maxL1TokensPerLeaf) { const l1TokensToIncludeInThisLeaf = sortedL1Tokens.slice(i, i + maxL1TokensPerLeaf); - const transferThresholds = l1TokensToIncludeInThisLeaf.map( - (l1Token) => - tokenTransferThreshold[l1Token] || - configStoreClient.getTokenTransferThresholdForBlock(l1Token, latestMainnetBlock) - ); - const spokeTargetBalances = l1TokensToIncludeInThisLeaf.map((l1Token) => configStoreClient.getSpokeTargetBalancesForBlock(l1Token, Number(chainId), latestMainnetBlock) ); @@ -349,11 +341,7 @@ export function constructPoolRebalanceLeaves( if (ubaMode && netSendAmounts?.[chainId] && netSendAmounts[chainId][l1Token]) { return netSendAmounts[chainId][l1Token]; } else if (runningBalances[chainId] && runningBalances[chainId][l1Token]) { - return getNetSendAmountForL1Token( - transferThresholds[index], - spokeTargetBalances[index], - runningBalances[chainId][l1Token] - ); + return getNetSendAmountForL1Token(spokeTargetBalances[index], runningBalances[chainId][l1Token]); } else { return toBN(0); } @@ -366,11 +354,7 @@ export function constructPoolRebalanceLeaves( if (ubaMode) { return runningBalances[chainId][l1Token]; } else { - return getRunningBalanceForL1Token( - transferThresholds[index], - spokeTargetBalances[index], - runningBalances[chainId][l1Token] - ); + return getRunningBalanceForL1Token(spokeTargetBalances[index], runningBalances[chainId][l1Token]); } } else { return toBN(0); @@ -438,21 +422,18 @@ export function computeDesiredTransferAmountToSpoke( // 0, indicating that we do not want the data worker to trigger a token transfer between hub pool and spoke // pool when executing this leaf. export function getNetSendAmountForL1Token( - transferThreshold: BigNumber, spokePoolTargetBalance: SpokePoolTargetBalance, runningBalance: BigNumber ): BigNumber { - const desiredTransferAmount = computeDesiredTransferAmountToSpoke(runningBalance, spokePoolTargetBalance); - return desiredTransferAmount.abs().gte(transferThreshold) ? desiredTransferAmount : toBN(0); + return computeDesiredTransferAmountToSpoke(runningBalance, spokePoolTargetBalance); } export function getRunningBalanceForL1Token( - transferThreshold: BigNumber, spokePoolTargetBalance: SpokePoolTargetBalance, runningBalance: BigNumber ): BigNumber { const desiredTransferAmount = computeDesiredTransferAmountToSpoke(runningBalance, spokePoolTargetBalance); - return desiredTransferAmount.abs().lt(transferThreshold) ? runningBalance : runningBalance.sub(desiredTransferAmount); + return runningBalance.sub(desiredTransferAmount); } // This returns a possible next block range that could be submitted as a new root bundle, or used as a reference diff --git a/src/dataworker/RelayerRefundUtils.ts b/src/dataworker/RelayerRefundUtils.ts index 3c4904f61..35952c535 100644 --- a/src/dataworker/RelayerRefundUtils.ts +++ b/src/dataworker/RelayerRefundUtils.ts @@ -3,15 +3,10 @@ import { BigNumber, compareAddresses, toBN } from "../utils"; import { getNetSendAmountForL1Token } from "./PoolRebalanceUtils"; export function getAmountToReturnForRelayerRefundLeaf( - transferThreshold: BigNumber, spokePoolTargetBalance: SpokePoolTargetBalance, runningBalanceForLeaf: BigNumber ): BigNumber { - const netSendAmountForLeaf = getNetSendAmountForL1Token( - transferThreshold, - spokePoolTargetBalance, - runningBalanceForLeaf - ); + const netSendAmountForLeaf = getNetSendAmountForL1Token(spokePoolTargetBalance, runningBalanceForLeaf); return netSendAmountForLeaf.mul(toBN(-1)).gt(toBN(0)) ? netSendAmountForLeaf.mul(toBN(-1)) : toBN(0); } diff --git a/src/dataworker/index.ts b/src/dataworker/index.ts index affea84ab..99e369a3d 100644 --- a/src/dataworker/index.ts +++ b/src/dataworker/index.ts @@ -32,7 +32,6 @@ export async function createDataworker( clients.configStoreClient.getChainIdIndicesForBlock(), config.maxRelayerRepaymentLeafSizeOverride, config.maxPoolRebalanceLeafSizeOverride, - config.tokenTransferThresholdOverride, config.blockRangeEndBlockBuffer, config.spokeRootsLookbackCount, config.bufferToPropose, diff --git a/src/interfaces/index.ts b/src/interfaces/index.ts index d357b4f22..d71b29415 100644 --- a/src/interfaces/index.ts +++ b/src/interfaces/index.ts @@ -15,7 +15,6 @@ export type BigNumberForToken = interfaces.BigNumberForToken; // ConfigStore interfaces export type ParsedTokenConfig = interfaces.ParsedTokenConfig; -export type L1TokenTransferThreshold = interfaces.L1TokenTransferThreshold; export type SpokePoolTargetBalance = interfaces.SpokePoolTargetBalance; export type SpokeTargetBalanceUpdate = interfaces.SpokeTargetBalanceUpdate; export type RouteRateModelUpdate = interfaces.RouteRateModelUpdate; diff --git a/test/ConfigStoreClient.ts b/test/ConfigStoreClient.ts index 3e9275d04..fa76472c6 100644 --- a/test/ConfigStoreClient.ts +++ b/test/ConfigStoreClient.ts @@ -3,7 +3,6 @@ import { constants } from "@across-protocol/sdk-v2"; import { GLOBAL_CONFIG_STORE_KEYS } from "../src/clients"; import { SpokePoolTargetBalance } from "../src/interfaces"; import { - DEFAULT_POOL_BALANCE_TOKEN_TRANSFER_THRESHOLD, MAX_L1_TOKENS_PER_POOL_REBALANCE_LEAF, MAX_REFUNDS_PER_RELAYER_REFUND_LEAF, destinationChainId, @@ -57,7 +56,6 @@ const sampleSpokeTargetBalances = { const tokenConfigToUpdate = JSON.stringify({ rateModel: sampleRateModel, routeRateModel: { "999-888": sampleRateModel2 }, - transferThreshold: DEFAULT_POOL_BALANCE_TOKEN_TRANSFER_THRESHOLD.toString(), spokeTargetBalances: sampleSpokeTargetBalances, }); @@ -169,7 +167,6 @@ describe("AcrossConfigStoreClient", async function () { // If ConfigStore has no events, stores nothing. await configStoreClient.update(); expect(configStoreClient.cumulativeRateModelUpdates.length).to.equal(0); - expect(configStoreClient.cumulativeTokenTransferUpdates.length).to.equal(0); expect(configStoreClient.cumulativeMaxL1TokenCountUpdates.length).to.equal(0); expect(configStoreClient.cumulativeMaxRefundCountUpdates.length).to.equal(0); @@ -178,18 +175,13 @@ describe("AcrossConfigStoreClient", async function () { await configStoreClient.update(); expect(configStoreClient.cumulativeRateModelUpdates.length).to.equal(1); - expect(configStoreClient.cumulativeTokenTransferUpdates.length).to.equal(1); // Update ignores TokenConfig events that don't include a rate model: await configStore.updateTokenConfig(l1Token.address, "gibberish"); - await configStore.updateTokenConfig( - l1Token.address, - JSON.stringify({ transferThreshold: DEFAULT_POOL_BALANCE_TOKEN_TRANSFER_THRESHOLD }) - ); + await configStore.updateTokenConfig(l1Token.address, ""); await configStoreClient.update(); expect(configStoreClient.cumulativeRateModelUpdates.length).to.equal(1); expect(configStoreClient.cumulativeRouteRateModelUpdates.length).to.equal(1); - expect(configStoreClient.cumulativeTokenTransferUpdates.length).to.equal(1); // Add GlobalConfig events and check that updating pulls in events await configStore.updateGlobalConfig( @@ -242,24 +234,6 @@ describe("AcrossConfigStoreClient", async function () { ).to.throw(/No updated rate model events for L1 token/); }); - it("Get token transfer threshold for block", async function () { - await configStore.updateTokenConfig(l1Token.address, tokenConfigToUpdate); - await configStoreClient.update(); - const initialUpdate = (await configStore.queryFilter(configStore.filters.UpdatedTokenConfig()))[0]; - expect(configStoreClient.getTokenTransferThresholdForBlock(l1Token.address, initialUpdate.blockNumber)).to.equal( - DEFAULT_POOL_BALANCE_TOKEN_TRANSFER_THRESHOLD - ); - // Block number when there is no config - expect(() => - configStoreClient.getTokenTransferThresholdForBlock(l1Token.address, initialUpdate.blockNumber - 1) - ).to.throw(/Could not find TransferThreshold/); - - // L1 token where there is no config - expect(() => - configStoreClient.getTokenTransferThresholdForBlock(l2Token.address, initialUpdate.blockNumber) - ).to.throw(/Could not find TransferThreshold/); - }); - // @note: expect(...)to.deep.equals() coerces BigNumbers incorrectly and fails. Why? it("Get spoke pool balance threshold for block", async function () { await configStore.updateTokenConfig(l1Token.address, tokenConfigToUpdate); diff --git a/test/Dataworker.blockRangeUtils.ts b/test/Dataworker.blockRangeUtils.ts index be48af100..7053df6e3 100644 --- a/test/Dataworker.blockRangeUtils.ts +++ b/test/Dataworker.blockRangeUtils.ts @@ -5,7 +5,7 @@ import { setupDataworker } from "./fixtures/Dataworker.Fixture"; import { DataworkerClients } from "../src/dataworker/DataworkerClientHelper"; import { HubPoolClient, SpokePoolClient } from "../src/clients"; import { getWidestPossibleExpectedBlockRange } from "../src/dataworker/PoolRebalanceUtils"; -import { originChainId, toBN } from "./constants"; +import { originChainId } from "./constants"; import { blockRangesAreInvalidForSpokeClients, getEndBlockBuffers } from "../src/dataworker/DataworkerUtils"; import { getDeployedBlockNumber } from "@across-protocol/contracts-v2"; import { MockHubPoolClient } from "./mocks"; @@ -17,13 +17,7 @@ let updateAllClients: () => Promise; describe("Dataworker block range-related utility methods", async function () { beforeEach(async function () { - ({ dataworkerClients, spokePoolClients, updateAllClients, hubPoolClient } = await setupDataworker( - ethers, - 1, - 1, - toBN(0), - 0 - )); + ({ dataworkerClients, spokePoolClients, updateAllClients, hubPoolClient } = await setupDataworker(ethers, 1, 1, 0)); await updateAllClients(); }); it("DataworkerUtils.getEndBlockBuffers", async function () { diff --git a/test/Dataworker.buildRoots.ts b/test/Dataworker.buildRoots.ts index 10046fee4..8281f6d0c 100644 --- a/test/Dataworker.buildRoots.ts +++ b/test/Dataworker.buildRoots.ts @@ -202,6 +202,25 @@ describe("Dataworker: Build merkle roots", async function () { }); describe("Build relayer refund root", function () { it("amountToReturn is 0", async function () { + // Set spoke target balance thresholds above deposit amounts so that amountToReturn is always 0. + await configStore.updateTokenConfig( + l1Token_1.address, + JSON.stringify({ + rateModel: sampleRateModel, + spokeTargetBalances: { + [originChainId]: { + // Threshold above the deposit amount. + threshold: amountToDeposit.mul(10).toString(), + target: amountToDeposit.div(2).toString(), + }, + [destinationChainId]: { + // Threshold above the deposit amount. + threshold: amountToDeposit.mul(10).toString(), + target: amountToDeposit.div(2).toString(), + }, + }, + }) + ); await updateAllClients(); const poolRebalanceRoot = await dataworkerInstance.buildPoolRebalanceRoot( getDefaultBlockRange(0), @@ -411,17 +430,6 @@ describe("Dataworker: Build merkle roots", async function () { ]); await updateAllClients(); - // Since amountToReturn is dependent on netSendAmount in pool rebalance leaf for same chain and token, - // let's fetch it. We'll move the token transfer threshold lower to make sure netSendAmount is negative. - await configStore.updateTokenConfig( - l1Token_1.address, - JSON.stringify({ - rateModel: sampleRateModel, - transferThreshold: toBNWei("0.0001").toString(), - }) - ); - await updateAllClients(); - // Since there was 1 unfilled deposit, there should be 1 relayer refund root for the deposit origin chain // where amountToReturn = -netSendAmount. const leaf1 = { @@ -866,8 +874,14 @@ describe("Dataworker: Build merkle roots", async function () { destinationChainId, amountToDeposit ); - const fill1 = await buildFillForRepaymentChain(spokePool_2, relayer, deposit1, 0.5, destinationChainId); - const fill2 = await buildFillForRepaymentChain(spokePool_2, relayer, deposit1, 0.25, destinationChainId); + await buildFillForRepaymentChain(spokePool_2, relayer, deposit1, 0.5, destinationChainId); + const lastFillBeforeSlowFill = await buildFillForRepaymentChain( + spokePool_2, + relayer, + deposit1, + 0.25, + destinationChainId + ); const fill2Block = await spokePool_2.provider.getBlockNumber(); // Produce bundle and execute pool leaves. Should produce a slow fill. Don't execute it. @@ -912,7 +926,7 @@ describe("Dataworker: Build merkle roots", async function () { ); // Send a third partial fill, this will produce an excess since a slow fill is already in flight for the deposit. - const fill3 = await buildFillForRepaymentChain(spokePool_2, relayer, deposit1, 0.25, destinationChainId); + await buildFillForRepaymentChain(spokePool_2, relayer, deposit1, 0.25, destinationChainId); await updateAllClients(); await destinationChainSpokePoolClient.update(); expect(destinationChainSpokePoolClient.getFills().length).to.equal(1); @@ -920,23 +934,18 @@ describe("Dataworker: Build merkle roots", async function () { fill2Block + 1, await spokePool_2.provider.getBlockNumber(), ]); - const merkleRoot2 = await dataworkerInstance.buildPoolRebalanceRoot(blockRange2, { + await dataworkerInstance.buildPoolRebalanceRoot(blockRange2, { ...spokePoolClients, [destinationChainId]: destinationChainSpokePoolClient, }); - const l1TokenForFill = merkleRoot2.leaves[0].l1Tokens[0]; - - // New running balance should be fill1 + fill2 + fill3 + slowFillAmount - excess - // excess should be the amount remaining after fill2. Since the slow fill was never - // executed, the excess should be equal to the slow fill amount so they should cancel out. - const expectedExcess = getRefund(fill2.amount.sub(fill2.totalFilledAmount), fill2.realizedLpFeePct); - expect(merkleRoot2.runningBalances[destinationChainId][l1TokenForFill]).to.equal( - getRefundForFills([fill1, fill2, fill3]) - ); - expect(lastSpyLogIncludes(spy, "Fills triggering excess returns from L2")).to.be.true; + const expectedExcess = getRefund( + lastFillBeforeSlowFill.amount.sub(lastFillBeforeSlowFill.totalFilledAmount), + lastFillBeforeSlowFill.realizedLpFeePct + ); expect( - spy.getCall(-1).lastArg.fillsTriggeringExcesses[destinationChainId][fill2.destinationToken][0].excess + spy.getCall(-1).lastArg.fillsTriggeringExcesses[destinationChainId][lastFillBeforeSlowFill.destinationToken][0] + .excess ).to.equal(expectedExcess.toString()); }); it("Many L1 tokens, testing leaf order and root construction", async function () { @@ -955,15 +964,6 @@ describe("Dataworker: Build merkle roots", async function () { amountToDeposit.mul(toBN(100)) ); - // Set a very high transfer threshold so leaves are not split - await configStore.updateTokenConfig( - l1Token.address, - JSON.stringify({ - rateModel: sampleRateModel, - transferThreshold: toBNWei("1000000").toString(), - }) - ); - await updateAllClients(); // Update client to be aware of new token mapping so we can build deposit correctly. const deposit = await buildDeposit( hubPoolClient, @@ -1002,9 +1002,8 @@ describe("Dataworker: Build merkle roots", async function () { return [ sortedL1Tokens.slice(0, MAX_L1_TOKENS_PER_POOL_REBALANCE_LEAF), [sortedL1Tokens[sortedL1Tokens.length - 1]], - ].map((l1TokensToIncludeInLeaf: string[], i) => { + ].map((l1TokensToIncludeInLeaf: string[]) => { return { - groupIndex: i, chainId, // Realized LP fees are 0 for origin chain since no fill was submitted to it, only deposits. bundleLpFees: @@ -1012,13 +1011,13 @@ describe("Dataworker: Build merkle roots", async function () { ? Array(l1TokensToIncludeInLeaf.length).fill(toBN(0)) : l1TokensToIncludeInLeaf.map((l1Token) => getRealizedLpFeeForFills([fillsForL1Token[l1Token]])), // Running balances are straightforward to compute because deposits are sent to origin chain and fills - // are sent to destination chain only. - runningBalances: + // are sent to destination chain only. The spoke pool threshold is 0 by default so running balances + // should be 0. + netSendAmounts: chainId === originChainId ? l1TokensToIncludeInLeaf.map((l1Token) => depositsForL1Token[l1Token].amount.mul(toBN(-1))) : l1TokensToIncludeInLeaf.map((l1Token) => getRefundForFills([fillsForL1Token[l1Token]])), - netSendAmounts: l1TokensToIncludeInLeaf.map(() => toBN(0)), // Should be 0 since running balances are - // under threshold + runningBalances: l1TokensToIncludeInLeaf.map(() => toBN(0)), l1Tokens: l1TokensToIncludeInLeaf, }; }); @@ -1027,9 +1026,10 @@ describe("Dataworker: Build merkle roots", async function () { .map((leaf, i) => { return { ...leaf, leafId: i }; }); - expect(merkleRoot1.leaves).to.deep.equal(expectedLeaves); + + expect(merkleRoot1.leaves).excludingEvery(["groupIndex"]).to.deep.equal(expectedLeaves); const expectedMerkleRoot = await buildPoolRebalanceLeafTree( - expectedLeaves.map((leaf) => { + merkleRoot1.leaves.map((leaf) => { return { ...leaf, chainId: toBN(leaf.chainId), groupIndex: toBN(leaf.groupIndex), leafId: toBN(leaf.leafId) }; }) ); @@ -1086,14 +1086,12 @@ describe("Dataworker: Build merkle roots", async function () { compareAddresses(addressA, addressB) ); const expectedLeaf = { - groupIndex: 0, chainId: originChainId, bundleLpFees: [ orderedL1Tokens[0] === l1Token_1.address ? toBN(0) : getRealizedLpFeeForFills([fillA]), orderedL1Tokens[0] === l1TokenNew.address ? toBN(0) : getRealizedLpFeeForFills([fillA]), ], - netSendAmounts: [toBN(0), toBN(0)], - runningBalances: [ + netSendAmounts: [ orderedL1Tokens[0] === l1Token_1.address ? depositB.amount.mul(toBN(-1)) : depositA.amount.sub(getRefundForFills([fillA])).mul(toBN(-1)), @@ -1101,66 +1099,10 @@ describe("Dataworker: Build merkle roots", async function () { ? depositB.amount.mul(toBN(-1)) : depositA.amount.sub(getRefundForFills([fillA])).mul(toBN(-1)), ], + runningBalances: [toBN(0), toBN(0)], l1Tokens: orderedL1Tokens, - leafId: 0, }; - expect(deepEqualsWithBigNumber(merkleRoot1.leaves, [expectedLeaf])).to.be.true; - }); - it("Token transfer exceeds threshold", async function () { - await updateAllClients(); - const deposit = await buildDeposit( - hubPoolClient, - spokePool_1, - erc20_1, - l1Token_1, - depositor, - destinationChainId, - amountToDeposit - ); - await updateAllClients(); - const fill = await buildFillForRepaymentChain(spokePool_2, depositor, deposit, 1, destinationChainId); - await updateAllClients(); - const merkleRoot1 = await dataworkerInstance.buildPoolRebalanceRoot(getDefaultBlockRange(0), spokePoolClients); - - const orderedChainIds = [originChainId, destinationChainId].sort((x, y) => x - y); - const expectedLeaves1 = orderedChainIds - .map((chainId) => { - return { - groupIndex: 0, - chainId, - bundleLpFees: chainId === originChainId ? [toBN(0)] : [getRealizedLpFeeForFills([fill])], - // Running balance is <<< token threshold, so running balance should be non-zero and net send amount - // should be 0. - runningBalances: chainId === originChainId ? [deposit.amount.mul(toBN(-1))] : [getRefundForFills([fill])], - netSendAmounts: [toBN(0)], - l1Tokens: [l1Token_1.address], - }; - }) - .map((leaf, i) => { - return { ...leaf, leafId: i }; - }); - expect(deepEqualsWithBigNumber(merkleRoot1.leaves, expectedLeaves1)).to.be.true; - - // Now set the threshold much lower than the running balance and check that running balances for all - // chains gets set to 0 and net send amount is equal to the running balance. This also tests that the - // dataworker is comparing the absolute value of the running balance with the threshold, not the signed value. - await configStore.updateTokenConfig( - l1Token_1.address, - JSON.stringify({ - rateModel: sampleRateModel, - transferThreshold: toBNWei(1).toString(), - }) - ); - await configStoreClient.update(); - const merkleRoot2 = await dataworkerInstance.buildPoolRebalanceRoot(getDefaultBlockRange(1), spokePoolClients); - const expectedLeaves2 = expectedLeaves1.map((leaf) => { - return { - ...leaf, - runningBalances: [toBN(0)], - netSendAmounts: leaf.runningBalances, - }; - }); - expect(deepEqualsWithBigNumber(merkleRoot2.leaves, expectedLeaves2)).to.be.true; + expect(merkleRoot1.leaves).excludingEvery(["groupIndex", "leafId"]).to.deep.equal([expectedLeaf]); }); it("Adds latest running balances to next", async function () { await updateAllClients(); @@ -1210,14 +1152,12 @@ describe("Dataworker: Build merkle roots", async function () { { chainId: originChainId, bundleLpFees: [toBN(0)], - netSendAmounts: [toBN(0)], - runningBalances: [startingRunningBalances.sub(amountToDeposit)], - groupIndex: 0, - leafId: 0, + netSendAmounts: [startingRunningBalances.sub(amountToDeposit)], + runningBalances: [toBN(0)], l1Tokens: [l1Token_1.address], }, ]; - expect(deepEqualsWithBigNumber(merkleRoot1.leaves, expectedLeaves1)).to.be.true; + expect(merkleRoot1.leaves).excludingEvery(["groupIndex", "leafId"]).to.deep.equal(expectedLeaves1); // Submit a partial fill on destination chain. This tests that the previous running balance is added to // running balances modified by repayments, slow fills, and deposits. @@ -1228,24 +1168,20 @@ describe("Dataworker: Build merkle roots", async function () { { chainId: originChainId, bundleLpFees: [toBN(0)], - netSendAmounts: [toBN(0)], - runningBalances: [startingRunningBalances.sub(amountToDeposit)], - groupIndex: 0, - leafId: 0, + netSendAmounts: [startingRunningBalances.sub(amountToDeposit)], + runningBalances: [toBN(0)], l1Tokens: [l1Token_1.address], }, { chainId: destinationChainId, bundleLpFees: [getRealizedLpFeeForFills([fill])], - netSendAmounts: [toBN(0)], - runningBalances: [startingRunningBalances.add(getRefundForFills([fill])).add(slowFillPayment)], - groupIndex: 0, - leafId: 1, + netSendAmounts: [startingRunningBalances.add(getRefundForFills([fill])).add(slowFillPayment)], + runningBalances: [toBN(0)], l1Tokens: [l1Token_1.address], }, ]; const merkleRoot2 = await dataworkerInstance.buildPoolRebalanceRoot(getDefaultBlockRange(1), spokePoolClients); - expect(deepEqualsWithBigNumber(merkleRoot2.leaves, expectedLeaves2)).to.be.true; + expect(merkleRoot2.leaves).excludingEvery(["groupIndex", "leafId"]).to.deep.equal(expectedLeaves2); }); it("Spoke pool balance threshold, above and below", async function () { await updateAllClients(); @@ -1265,7 +1201,6 @@ describe("Dataworker: Build merkle roots", async function () { l1Token_1.address, JSON.stringify({ rateModel: sampleRateModel, - transferThreshold: "0", spokeTargetBalances: { [originChainId]: { // Threshold above the deposit amount. @@ -1284,115 +1219,20 @@ describe("Dataworker: Build merkle roots", async function () { const merkleRoot1 = await dataworkerInstance.buildPoolRebalanceRoot(getDefaultBlockRange(0), spokePoolClients); const orderedChainIds = [originChainId, destinationChainId].sort((x, y) => x - y); - const expectedLeaves1 = orderedChainIds - .map((chainId) => { - return { - groupIndex: 0, - chainId, - bundleLpFees: chainId === originChainId ? [toBN(0)] : [getRealizedLpFeeForFills([fill])], - // Running balance is <<< spoke pool balance threshold, so running balance should be non-zero and net send - // amount should be 0 for the origin chain. This should _not affect_ the destination chain, since spoke - // pool balance thresholds only apply to funds being sent from spoke to hub. - runningBalances: chainId === originChainId ? [deposit.amount.mul(toBN(-1))] : [toBN(0)], - netSendAmounts: chainId === originChainId ? [toBN(0)] : [getRefundForFills([fill])], - l1Tokens: [l1Token_1.address], - }; - }) - .map((leaf, i) => { - return { ...leaf, leafId: i }; - }); - expect(deepEqualsWithBigNumber(merkleRoot1.leaves, expectedLeaves1)).to.be.true; - - // Now set the threshold much lower than the running balance and check that running balances for all - // chains gets set to 0 and net send amount is equal to the running balance. This also tests that the - // dataworker is comparing the absolute value of the running balance with the threshold, not the signed value. - await configStore.updateTokenConfig( - l1Token_1.address, - JSON.stringify({ - rateModel: sampleRateModel, - transferThreshold: "0", - spokeTargetBalances: { - [originChainId]: { - // Threshold is equal to the deposit amount. - threshold: amountToDeposit.toString(), - target: amountToDeposit.div(2).toString(), - }, - [destinationChainId]: { - // Threshold above the deposit amount. - threshold: amountToDeposit.mul(2).toString(), - target: amountToDeposit.div(2).toString(), - }, - }, - }) - ); - await configStoreClient.update(); - const merkleRoot2 = await dataworkerInstance.buildPoolRebalanceRoot(getDefaultBlockRange(1), spokePoolClients); - // We expect to have the target remaining on the spoke. - // We expect to transfer the total deposit amount minus the remaining spoke balance. - const expectedSpokeBalance = amountToDeposit.div(2); - const expectedTransferAmount = amountToDeposit.sub(expectedSpokeBalance); - const expectedLeaves2 = expectedLeaves1.map((leaf) => { + const expectedLeaves1 = orderedChainIds.map((chainId) => { return { - ...leaf, - runningBalances: leaf.chainId === originChainId ? [expectedSpokeBalance.mul(-1)] : leaf.runningBalances, - netSendAmounts: leaf.chainId === originChainId ? [expectedTransferAmount.mul(-1)] : leaf.netSendAmounts, + chainId, + bundleLpFees: chainId === originChainId ? [toBN(0)] : [getRealizedLpFeeForFills([fill])], + // Running balance is <<< spoke pool balance threshold, so running balance should be non-zero and net send + // amount should be 0 for the origin chain. This should _not affect_ the destination chain, since spoke + // pool balance thresholds only apply to funds being sent from spoke to hub. + runningBalances: chainId === originChainId ? [deposit.amount.mul(toBN(-1))] : [toBN(0)], + netSendAmounts: chainId === originChainId ? [toBN(0)] : [getRefundForFills([fill])], + l1Tokens: [l1Token_1.address], }; }); - expect(deepEqualsWithBigNumber(merkleRoot2.leaves, expectedLeaves2)).to.be.true; - }); - it("Spoke pool balance threshold, below transfer threshold", async function () { - await updateAllClients(); - const deposit = await buildDeposit( - hubPoolClient, - spokePool_1, - erc20_1, - l1Token_1, - depositor, - destinationChainId, - amountToDeposit - ); - await updateAllClients(); - const fill = await buildFillForRepaymentChain(spokePool_2, depositor, deposit, 1, destinationChainId); - await updateAllClients(); - await configStore.updateTokenConfig( - l1Token_1.address, - JSON.stringify({ - rateModel: sampleRateModel, - transferThreshold: amountToDeposit.add(1).toString(), - spokeTargetBalances: { - [originChainId]: { - // Threshold above the deposit amount. - threshold: amountToDeposit.toString(), - target: amountToDeposit.div(2).toString(), - }, - [destinationChainId]: { - // Threshold above the deposit amount. - threshold: amountToDeposit.mul(2).toString(), - target: amountToDeposit.div(2).toString(), - }, - }, - }) - ); - await configStoreClient.update(); - const merkleRoot1 = await dataworkerInstance.buildPoolRebalanceRoot(getDefaultBlockRange(0), spokePoolClients); + expect(merkleRoot1.leaves).excludingEvery(["groupIndex", "leafId"]).to.deep.equal(expectedLeaves1); - const orderedChainIds = [originChainId, destinationChainId].sort((x, y) => x - y); - const expectedLeaves1 = orderedChainIds - .map((chainId) => { - return { - groupIndex: 0, - chainId, - bundleLpFees: chainId === originChainId ? [toBN(0)] : [getRealizedLpFeeForFills([fill])], - // Running balance is below the transfer threshold, so the spoke balance threshold should have no impact. - runningBalances: chainId === originChainId ? [deposit.amount.mul(toBN(-1))] : [getRefundForFills([fill])], - netSendAmounts: [toBN(0)], - l1Tokens: [l1Token_1.address], - }; - }) - .map((leaf, i) => { - return { ...leaf, leafId: i }; - }); - expect(deepEqualsWithBigNumber(merkleRoot1.leaves, expectedLeaves1)).to.be.true; // Now set the threshold much lower than the running balance and check that running balances for all // chains gets set to 0 and net send amount is equal to the running balance. This also tests that the // dataworker is comparing the absolute value of the running balance with the threshold, not the signed value. @@ -1400,7 +1240,6 @@ describe("Dataworker: Build merkle roots", async function () { l1Token_1.address, JSON.stringify({ rateModel: sampleRateModel, - transferThreshold: "0", spokeTargetBalances: { [originChainId]: { // Threshold is equal to the deposit amount. @@ -1424,12 +1263,11 @@ describe("Dataworker: Build merkle roots", async function () { const expectedLeaves2 = expectedLeaves1.map((leaf) => { return { ...leaf, - runningBalances: leaf.chainId === originChainId ? [expectedSpokeBalance.mul(-1)] : [toBN(0)], - netSendAmounts: - leaf.chainId === originChainId ? [expectedTransferAmount.mul(-1)] : [getRefundForFills([fill])], + runningBalances: leaf.chainId === originChainId ? [expectedSpokeBalance.mul(-1)] : leaf.runningBalances, + netSendAmounts: leaf.chainId === originChainId ? [expectedTransferAmount.mul(-1)] : leaf.netSendAmounts, }; }); - expect(deepEqualsWithBigNumber(merkleRoot2.leaves, expectedLeaves2)).to.be.true; + expect(merkleRoot2.leaves).excludingEvery(["groupIndex", "leafId"]).to.deep.equal(expectedLeaves2); }); }); describe("UBA Root Bundles", function () { @@ -1510,6 +1348,25 @@ describe("Dataworker: Build merkle roots", async function () { }); describe("Build relayer refund root", function () { it("amountToReturn is 0", async function () { + // Set spoke target balance thresholds above deposit amounts so that amountToReturn is always 0. + await configStore.updateTokenConfig( + l1Token_1.address, + JSON.stringify({ + rateModel: sampleRateModel, + spokeTargetBalances: { + [originChainId]: { + // Threshold above the deposit amount. + threshold: amountToDeposit.mul(10).toString(), + target: amountToDeposit.div(2).toString(), + }, + [destinationChainId]: { + // Threshold above the deposit amount. + threshold: amountToDeposit.mul(10).toString(), + target: amountToDeposit.div(2).toString(), + }, + }, + }) + ); await updateAllClients(); // No UBA flows in this test so all amounts to return will be 0 const { poolRebalanceLeaves } = dataworkerInstance._UBA_buildPoolRebalanceLeaves( @@ -1563,17 +1420,18 @@ describe("Dataworker: Build merkle roots", async function () { const depositorBeforeRelayer = toBN(depositor.address).lt(toBN(relayer.address)); const leaf1 = { - chainId: destinationChainId, amountToReturn: toBN(0), + chainId: destinationChainId, + refundAmounts: [ + getRefund(deposit1.amount, deposit1.realizedLpFeePct), + getRefund(deposit2.amount, deposit2.realizedLpFeePct), + ], // Refund amounts should aggregate across all fills. + leafId: 0, l2TokenAddress: erc20_2.address, refundAddresses: [ depositorBeforeRelayer ? depositor.address : relayer.address, depositorBeforeRelayer ? relayer.address : depositor.address, ], // Sorted ascending alphabetically - refundAmounts: [ - getRefund(deposit1.amount, deposit1.realizedLpFeePct), - getRefund(deposit2.amount, deposit2.realizedLpFeePct), - ], // Refund amounts should aggregate across all fills. }; await updateAllClients(); @@ -1590,7 +1448,10 @@ describe("Dataworker: Build merkle roots", async function () { ubaClient ); expect(relayerRefundLeaves1.leaves.length).to.equal(1); - deepEqualsWithBigNumber(relayerRefundLeaves1.leaves[0], { ...leaf1, leafId: 0 }); + expect(relayerRefundLeaves1.leaves[0].amountToReturn).to.deep.equal(leaf1.amountToReturn); + relayerRefundLeaves1.leaves[0].refundAmounts.forEach((refundAmount, i) => { + expect(refundAmount).to.equal(leaf1.refundAmounts[i]); + }); // Splits leaf into multiple leaves if refunds > MAX_REFUNDS_PER_RELAYER_REFUND_LEAF. const deposit4 = await buildDeposit( @@ -1651,8 +1512,14 @@ describe("Dataworker: Build merkle roots", async function () { getRefund(deposit4.amount, deposit4.realizedLpFeePct).mul(toBNWei("0.01")).div(toBNWei("1")), ], }; - deepEqualsWithBigNumber(relayerRefundLeaves3.leaves[0], { ...leaf1, leafId: 0 }); - deepEqualsWithBigNumber(relayerRefundLeaves3.leaves[1], { ...leaf3, leafId: 1 }); + expect(relayerRefundLeaves3.leaves[0].amountToReturn).to.deep.equal(leaf1.amountToReturn); + relayerRefundLeaves3.leaves[0].refundAmounts.forEach((refundAmount, i) => { + expect(refundAmount).to.equal(leaf1.refundAmounts[i]); + }); + expect(relayerRefundLeaves3.leaves[1].amountToReturn).to.deep.equal(leaf3.amountToReturn); + relayerRefundLeaves3.leaves[1].refundAmounts.forEach((refundAmount, i) => { + expect(refundAmount).to.equal(leaf3.refundAmounts[i]); + }); }); it("amountToReturn is non 0", async function () { await updateAllClients(); @@ -1681,16 +1548,6 @@ describe("Dataworker: Build merkle roots", async function () { ubaClient ); - // For the UBA, the token transfer threshold shouldn't matter so set it absurdly high. - await configStore.updateTokenConfig( - l1Token_1.address, - JSON.stringify({ - rateModel: sampleRateModel, - transferThreshold: toBNWei("1000000").toString(), - }) - ); - await updateAllClients(); - // This leaf's amountToReturn should be non zero since the UBA client was injected with a flow // with a negative netRunningBalanceAdjustment const leaf1 = { @@ -1765,8 +1622,14 @@ describe("Dataworker: Build merkle roots", async function () { ubaClient ); expect(relayerRefundLeaves2.leaves.length).to.equal(2); - deepEqualsWithBigNumber(relayerRefundLeaves2.leaves[0], { ...newLeaf1, leafId: 0 }); - deepEqualsWithBigNumber(relayerRefundLeaves2.leaves[1], { ...leaf2, leafId: 1 }); + expect(relayerRefundLeaves2.leaves[0].amountToReturn).to.deep.equal(newLeaf1.amountToReturn); + relayerRefundLeaves2.leaves[0].refundAmounts.forEach((refundAmount, i) => { + expect(refundAmount).to.equal(newLeaf1.refundAmounts[i]); + }); + expect(relayerRefundLeaves2.leaves[1].amountToReturn).to.deep.equal(leaf2.amountToReturn); + relayerRefundLeaves2.leaves[1].refundAmounts.forEach((refundAmount, i) => { + expect(refundAmount).to.equal(leaf2.refundAmounts[i]); + }); }); it("Refunds are included in UBA mode", async function () { await updateAllClients(); @@ -1837,7 +1700,7 @@ describe("Dataworker: Build merkle roots", async function () { refundAddresses: [relayer.address], refundAmounts: [getRefund(deposit1.amount, ubaRealizedLpFeePct)], }; - deepEqualsWithBigNumber(relayerRefundLeaves2.leaves[0], { ...leaf1, leafId: 0 }); + expect(relayerRefundLeaves2.leaves[0]).excludingEvery(["amountToReturn", "leafId"]).to.deep.equal(leaf1); }); it("Relayer balancing fees are added to refunded amounts to relayers", async function () { // Submit 1 deposit and 1 fill on same chain: @@ -1890,15 +1753,6 @@ describe("Dataworker: Build merkle roots", async function () { [originChainId, destinationChainId], ubaClient ); - - // For the UBA, the token transfer threshold shouldn't matter so set it absurdly high. - await configStore.updateTokenConfig( - l1Token_1.address, - JSON.stringify({ - rateModel: sampleRateModel, - transferThreshold: toBNWei("1000000").toString(), - }) - ); await updateAllClients(); // Can pass in a fills to refund object that is empty for the refund chain and token, or @@ -1913,15 +1767,14 @@ describe("Dataworker: Build merkle roots", async function () { // Balancing fee for refund above let expectedRefundAmount = toBNWei("0.2"); expect(relayerRefundLeaves1.leaves.length).to.equal(1); - deepEqualsWithBigNumber(relayerRefundLeaves1.leaves[0], { - amountToReturn: ethers.constants.Zero, - chainId: originChainId, - leafId: 0, - refundAmounts: [expectedRefundAmount], - l2TokenAddress: erc20_1.address, - refundAddresses: [relayer.address], - }); - + expect(relayerRefundLeaves1.leaves[0]) + .excludingEvery(["amountToReturn", "leafId"]) + .to.deep.equal({ + chainId: originChainId, + refundAmounts: [expectedRefundAmount], + l2TokenAddress: erc20_1.address, + refundAddresses: [relayer.address], + }); // Try again while passing in an already populated fills to refund object. const relayerRefundLeaves2 = await dataworkerInstance._UBA_buildRelayerRefundLeaves( { @@ -1944,14 +1797,14 @@ describe("Dataworker: Build merkle roots", async function () { // Expected refund amount is now 1 + new balancing fee. expectedRefundAmount = expectedRefundAmount.add(toBNWei("1")); expect(relayerRefundLeaves2.leaves.length).to.equal(1); - deepEqualsWithBigNumber(relayerRefundLeaves2.leaves[0], { - amountToReturn: ethers.constants.Zero, - chainId: originChainId, - leafId: 0, - refundAmounts: [expectedRefundAmount], - l2TokenAddress: erc20_1.address, - refundAddresses: [relayer.address], - }); + expect(relayerRefundLeaves2.leaves[0]) + .excludingEvery(["amountToReturn", "leafId"]) + .to.deep.equal({ + chainId: originChainId, + refundAmounts: [expectedRefundAmount], + l2TokenAddress: erc20_1.address, + refundAddresses: [relayer.address], + }); // Add a refund, not a fill, to the flows object. Check that refund // is sent to repayment chain. @@ -1982,14 +1835,14 @@ describe("Dataworker: Build merkle roots", async function () { // Expected refund amount is now 1 + new balancing fee. expectedRefundAmount = toBNWei("0.1"); expect(relayerRefundLeaves3.leaves.length).to.equal(1); - deepEqualsWithBigNumber(relayerRefundLeaves3.leaves[0], { - amountToReturn: ethers.constants.Zero, - chainId: refundRequest.repaymentChainId, - leafId: 0, - refundAmounts: [expectedRefundAmount], - l2TokenAddress: refundRequest.refundToken, - refundAddresses: [refundRequest.relayer], - }); + expect(relayerRefundLeaves3.leaves[0]) + .excludingEvery(["amountToReturn", "leafId"]) + .to.deep.equal({ + chainId: refundRequest.repaymentChainId, + refundAmounts: [expectedRefundAmount], + l2TokenAddress: refundRequest.refundToken, + refundAddresses: [refundRequest.relayer], + }); }); }); describe("Build slow relay root", function () { diff --git a/test/Dataworker.executePoolRebalances.ts b/test/Dataworker.executePoolRebalances.ts index a44d26707..57b154c75 100644 --- a/test/Dataworker.executePoolRebalances.ts +++ b/test/Dataworker.executePoolRebalances.ts @@ -1,7 +1,6 @@ import { HubPoolClient, MultiCallerClient, SpokePoolClient } from "../src/clients"; import { MAX_UINT_VAL } from "../src/utils"; import { - DEFAULT_POOL_BALANCE_TOKEN_TRANSFER_THRESHOLD, MAX_L1_TOKENS_PER_POOL_REBALANCE_LEAF, MAX_REFUNDS_PER_RELAYER_REFUND_LEAF, amountToDeposit, @@ -45,7 +44,6 @@ describe("Dataworker: Execute pool rebalances", async function () { ethers, MAX_REFUNDS_PER_RELAYER_REFUND_LEAF, MAX_L1_TOKENS_PER_POOL_REBALANCE_LEAF, - DEFAULT_POOL_BALANCE_TOKEN_TRANSFER_THRESHOLD, 0, destinationChainId )); diff --git a/test/Dataworker.executeRelayerRefunds.ts b/test/Dataworker.executeRelayerRefunds.ts index 0152195d7..695b478a5 100644 --- a/test/Dataworker.executeRelayerRefunds.ts +++ b/test/Dataworker.executeRelayerRefunds.ts @@ -39,13 +39,7 @@ describe("Dataworker: Execute relayer refunds", async function () { multiCallerClient, updateAllClients, spokePoolClients, - } = await setupDataworker( - ethers, - MAX_REFUNDS_PER_RELAYER_REFUND_LEAF, - MAX_L1_TOKENS_PER_POOL_REBALANCE_LEAF, - ethers.BigNumber.from(0), - 0 - )); + } = await setupDataworker(ethers, MAX_REFUNDS_PER_RELAYER_REFUND_LEAF, MAX_L1_TOKENS_PER_POOL_REBALANCE_LEAF, 0)); }); it("Simple lifecycle", async function () { await updateAllClients(); diff --git a/test/Dataworker.executeSlowRelay.ts b/test/Dataworker.executeSlowRelay.ts index c7bcd52af..52fcb8cd9 100644 --- a/test/Dataworker.executeSlowRelay.ts +++ b/test/Dataworker.executeSlowRelay.ts @@ -39,13 +39,7 @@ describe("Dataworker: Execute slow relays", async function () { multiCallerClient, updateAllClients, spokePoolClients, - } = await setupDataworker( - ethers, - MAX_REFUNDS_PER_RELAYER_REFUND_LEAF, - MAX_L1_TOKENS_PER_POOL_REBALANCE_LEAF, - ethers.BigNumber.from(0), - 0 - )); + } = await setupDataworker(ethers, MAX_REFUNDS_PER_RELAYER_REFUND_LEAF, MAX_L1_TOKENS_PER_POOL_REBALANCE_LEAF, 0)); }); it("Simple lifecycle", async function () { await updateAllClients(); diff --git a/test/Dataworker.loadData.ts b/test/Dataworker.loadData.ts index 7cb4c347b..91ef65950 100644 --- a/test/Dataworker.loadData.ts +++ b/test/Dataworker.loadData.ts @@ -77,7 +77,7 @@ describe("Dataworker: Load data used in all functions", async function () { spokePoolClients, updateAllClients, spy, - } = await setupDataworker(ethers, 25, 25, toBN(0), 0)); + } = await setupDataworker(ethers, 25, 25, 0)); bundleDataClient = dataworkerInstance.clients.bundleDataClient; multiCallerClient = dataworkerInstance.clients.multiCallerClient; }); diff --git a/test/Dataworker.validateRootBundle.ts b/test/Dataworker.validateRootBundle.ts index e9dd617ba..81c5f611a 100644 --- a/test/Dataworker.validateRootBundle.ts +++ b/test/Dataworker.validateRootBundle.ts @@ -3,7 +3,6 @@ import { HubPoolClient, MultiCallerClient, SpokePoolClient } from "../src/client import { EMPTY_MERKLE_ROOT, MAX_UINT_VAL, utf8ToHex } from "../src/utils"; import { BUNDLE_END_BLOCK_BUFFER, - DEFAULT_POOL_BALANCE_TOKEN_TRANSFER_THRESHOLD, MAX_L1_TOKENS_PER_POOL_REBALANCE_LEAF, MAX_REFUNDS_PER_RELAYER_REFUND_LEAF, amountToDeposit, @@ -61,7 +60,6 @@ describe("Dataworker: Validate pending root bundle", async function () { ethers, MAX_REFUNDS_PER_RELAYER_REFUND_LEAF, MAX_L1_TOKENS_PER_POOL_REBALANCE_LEAF, - DEFAULT_POOL_BALANCE_TOKEN_TRANSFER_THRESHOLD, BUNDLE_END_BLOCK_BUFFER )); }); diff --git a/test/HubPoolClient.RootBundleEvents.ts b/test/HubPoolClient.RootBundleEvents.ts index 6c9fb7075..b07addb9a 100644 --- a/test/HubPoolClient.RootBundleEvents.ts +++ b/test/HubPoolClient.RootBundleEvents.ts @@ -52,7 +52,6 @@ describe("HubPoolClient: RootBundle Events", async function () { ethers, constants.MAX_REFUNDS_PER_RELAYER_REFUND_LEAF, constants.MAX_L1_TOKENS_PER_POOL_REBALANCE_LEAF, - constants.DEFAULT_POOL_BALANCE_TOKEN_TRANSFER_THRESHOLD, 0 )); diff --git a/test/HubPoolClient.Utilization.ts b/test/HubPoolClient.Utilization.ts index bc5539187..0f92ada21 100644 --- a/test/HubPoolClient.Utilization.ts +++ b/test/HubPoolClient.Utilization.ts @@ -1,6 +1,5 @@ -import { HubPoolClient } from "../src/clients"; +import { clients } from "@across-protocol/sdk-v2"; import { - DEFAULT_POOL_BALANCE_TOKEN_TRANSFER_THRESHOLD, amountToLp, destinationChainId, mockTreeRoot, @@ -30,7 +29,7 @@ import { let configStore: Contract, hubPool: Contract; let l1Token: Contract, l2Token: Contract, timer: Contract, weth: Contract; -let configStoreClient: MockConfigStoreClient, hubPoolClient: HubPoolClient; +let configStoreClient: MockConfigStoreClient, hubPoolClient: clients.HubPoolClient; let owner: SignerWithAddress; // Same rate model used for across-v1 tests: @@ -62,7 +61,6 @@ const sampleSpokeTargetBalances = { const tokenConfigToUpdate = JSON.stringify({ rateModel: sampleRateModel, routeRateModel: { "999-888": sampleRateModel2 }, - transferThreshold: DEFAULT_POOL_BALANCE_TOKEN_TRANSFER_THRESHOLD.toString(), spokeTargetBalances: sampleSpokeTargetBalances, }); @@ -98,7 +96,11 @@ describe("HubPool Utilization", async function () { await configStoreClient.update(); - hubPoolClient = new HubPoolClient(createSpyLogger().spyLogger, hubPool, configStoreClient); + hubPoolClient = new clients.HubPoolClient( + createSpyLogger().spyLogger, + hubPool, + configStoreClient as unknown as clients.AcrossConfigStoreClient + ); await configStoreClient.update(); await hubPoolClient.update(); }); diff --git a/test/Monitor.ts b/test/Monitor.ts index e553e02ac..906838cea 100644 --- a/test/Monitor.ts +++ b/test/Monitor.ts @@ -76,7 +76,6 @@ describe("Monitor", async function () { ethers, constants.MAX_REFUNDS_PER_RELAYER_REFUND_LEAF, constants.MAX_L1_TOKENS_PER_POOL_REBALANCE_LEAF, - constants.DEFAULT_POOL_BALANCE_TOKEN_TRANSFER_THRESHOLD, 0 )); diff --git a/test/Relayer.BasicFill.ts b/test/Relayer.BasicFill.ts index d33c06899..ecad12c18 100644 --- a/test/Relayer.BasicFill.ts +++ b/test/Relayer.BasicFill.ts @@ -1,12 +1,5 @@ import { random } from "lodash"; -import { - AcrossApiClient, - ConfigStoreClient, - HubPoolClient, - MultiCallerClient, - SpokePoolClient, - TokenClient, -} from "../src/clients"; +import { AcrossApiClient, ConfigStoreClient, MultiCallerClient, TokenClient } from "../src/clients"; import { CONFIG_STORE_VERSION, UBA_MIN_CONFIG_STORE_VERSION } from "../src/common"; import { Deposit } from "../src/interfaces"; import { Relayer } from "../src/relayer/Relayer"; @@ -45,15 +38,16 @@ import { winston, } from "./utils"; import { generateNoOpSpokePoolClientsForDefaultChainIndices } from "./utils/UBAUtils"; +import { clients } from "@across-protocol/sdk-v2"; let spokePool_1: Contract, erc20_1: Contract, spokePool_2: Contract, erc20_2: Contract; let hubPool: Contract, configStore: Contract, l1Token: Contract; let owner: SignerWithAddress, depositor: SignerWithAddress, relayer: SignerWithAddress; let spy: sinon.SinonSpy, spyLogger: winston.Logger; -let spokePoolClient_1: SpokePoolClient, spokePoolClient_2: SpokePoolClient; -let spokePoolClients: { [chainId: number]: SpokePoolClient }; -let configStoreClient: ConfigStoreClient, hubPoolClient: HubPoolClient, tokenClient: TokenClient; +let spokePoolClient_1: clients.SpokePoolClient, spokePoolClient_2: clients.SpokePoolClient; +let spokePoolClients: { [chainId: number]: clients.SpokePoolClient }; +let configStoreClient: ConfigStoreClient, hubPoolClient: clients.HubPoolClient, tokenClient: TokenClient; let relayerInstance: Relayer; let multiCallerClient: MultiCallerClient, profitClient: MockProfitClient; let spokePool1DeploymentBlock: number, spokePool2DeploymentBlock: number; @@ -99,19 +93,19 @@ describe("Relayer: Check for Unfilled Deposits and Fill", async function () { ) as unknown as ConfigStoreClient; await configStoreClient.update(); - hubPoolClient = new HubPoolClient(spyLogger, hubPool, configStoreClient); + hubPoolClient = new clients.HubPoolClient(spyLogger, hubPool, configStoreClient); await hubPoolClient.update(); multiCallerClient = new MockedMultiCallerClient(spyLogger); - spokePoolClient_1 = new SpokePoolClient( + spokePoolClient_1 = new clients.SpokePoolClient( spyLogger, spokePool_1.connect(relayer), hubPoolClient, originChainId, spokePool1DeploymentBlock ); - spokePoolClient_2 = new SpokePoolClient( + spokePoolClient_2 = new clients.SpokePoolClient( spyLogger, spokePool_2.connect(relayer), hubPoolClient, @@ -477,7 +471,11 @@ describe("Relayer: Check for Unfilled Deposits and Fill", async function () { const version = UBA_MIN_CONFIG_STORE_VERSION; configStoreClient = new ConfigStoreClient(spyLogger, configStore, { fromBlock: 0 }, version); await configStoreClient.update(); - hubPoolClient = new HubPoolClient(spyLogger, hubPool, configStoreClient); + hubPoolClient = new clients.HubPoolClient( + spyLogger, + hubPool, + configStoreClient as unknown as clients.AcrossConfigStoreClient + ); relayerInstance = new Relayer( relayer.address, spyLogger, diff --git a/test/Relayer.UnfilledDeposits.ts b/test/Relayer.UnfilledDeposits.ts index ae7958382..077d5881b 100644 --- a/test/Relayer.UnfilledDeposits.ts +++ b/test/Relayer.UnfilledDeposits.ts @@ -1,12 +1,4 @@ -import { - AcrossApiClient, - ConfigStoreClient, - HubPoolClient, - MultiCallerClient, - SpokePoolClient, - TokenClient, - UBAClient, -} from "../src/clients"; +import { AcrossApiClient, ConfigStoreClient, MultiCallerClient, TokenClient, UBAClient } from "../src/clients"; import { CHAIN_ID_TEST_LIST, amountToLp, @@ -35,6 +27,7 @@ import { simpleDeposit, toBNWei, } from "./utils"; +import { clients } from "@across-protocol/sdk-v2"; // Tested import { Relayer } from "../src/relayer/Relayer"; @@ -46,8 +39,8 @@ let hubPool: Contract, l1Token: Contract, configStore: Contract; let owner: SignerWithAddress, depositor: SignerWithAddress, relayer: SignerWithAddress; const { spy, spyLogger } = createSpyLogger(); -let spokePoolClient_1: SpokePoolClient, spokePoolClient_2: SpokePoolClient; -let configStoreClient: MockConfigStoreClient, hubPoolClient: HubPoolClient; +let spokePoolClient_1: clients.SpokePoolClient, spokePoolClient_2: clients.SpokePoolClient; +let configStoreClient: MockConfigStoreClient, hubPoolClient: clients.HubPoolClient; let multiCallerClient: MultiCallerClient, tokenClient: TokenClient; let profitClient: MockProfitClient; let spokePool1DeploymentBlock: number, spokePool2DeploymentBlock: number; @@ -93,17 +86,17 @@ describe("Relayer: Unfilled Deposits", async function () { configStoreClient = new MockConfigStoreClient(spyLogger, configStore, undefined, undefined, CHAIN_ID_TEST_LIST); await configStoreClient.update(); - hubPoolClient = new HubPoolClient(spyLogger, hubPool, configStoreClient); + hubPoolClient = new clients.HubPoolClient(spyLogger, hubPool, configStoreClient); await hubPoolClient.update(); - spokePoolClient_1 = new SpokePoolClient( + spokePoolClient_1 = new clients.SpokePoolClient( spyLogger, spokePool_1, hubPoolClient, originChainId, spokePool1DeploymentBlock ); - spokePoolClient_2 = new SpokePoolClient( + spokePoolClient_2 = new clients.SpokePoolClient( spyLogger, spokePool_2, hubPoolClient, diff --git a/test/SpokePoolClient.SpeedUp.ts b/test/SpokePoolClient.SpeedUp.ts index c808aa483..45afdc31b 100644 --- a/test/SpokePoolClient.SpeedUp.ts +++ b/test/SpokePoolClient.SpeedUp.ts @@ -14,14 +14,14 @@ import { toBNWei, } from "./utils"; -import { SpokePoolClient } from "../src/clients"; +import { clients } from "@across-protocol/sdk-v2"; import { DepositWithBlock } from "../src/interfaces"; let spokePool: Contract, erc20: Contract, destErc20: Contract, weth: Contract; let depositor: SignerWithAddress, deploymentBlock: number; const destinationChainId2 = destinationChainId + 1; -let spokePoolClient: SpokePoolClient; +let spokePoolClient: clients.SpokePoolClient; describe("SpokePoolClient: SpeedUp", async function () { const ignoredFields = [ @@ -37,7 +37,13 @@ describe("SpokePoolClient: SpeedUp", async function () { [, depositor] = await ethers.getSigners(); ({ spokePool, erc20, destErc20, weth, deploymentBlock } = await deploySpokePoolWithToken(originChainId)); await enableRoutes(spokePool, [{ originToken: erc20.address, destinationChainId: destinationChainId2 }]); - spokePoolClient = new SpokePoolClient(createSpyLogger().spyLogger, spokePool, null, originChainId, deploymentBlock); + spokePoolClient = new clients.SpokePoolClient( + createSpyLogger().spyLogger, + spokePool, + null, + originChainId, + deploymentBlock + ); await setupTokensForWallet(spokePool, depositor, [erc20, destErc20], weth, 10); }); diff --git a/test/SpokePoolClient.ValidateFill.ts b/test/SpokePoolClient.ValidateFill.ts index a2b1a9ec5..83ca7dbbd 100644 --- a/test/SpokePoolClient.ValidateFill.ts +++ b/test/SpokePoolClient.ValidateFill.ts @@ -27,10 +27,10 @@ import { sinon, } from "./utils"; -import { ConfigStoreClient, HubPoolClient, SpokePoolClient } from "../src/clients"; +import { ConfigStoreClient } from "../src/clients"; import { queryHistoricalDepositForFill } from "../src/utils"; import { MockConfigStoreClient, MockSpokePoolClient } from "./mocks"; -import { utils } from "@across-protocol/sdk-v2"; +import { utils, clients } from "@across-protocol/sdk-v2"; import { CHAIN_ID_TEST_LIST, repaymentChainId } from "./constants"; const { validateFillForDeposit } = utils; @@ -40,8 +40,8 @@ let spokePool1DeploymentBlock: number, spokePool2DeploymentBlock: number; let l1Token: Contract, configStore: Contract; let spy: sinon.SinonSpy, spyLogger: winston.Logger; -let spokePoolClient2: SpokePoolClient, hubPoolClient: HubPoolClient; -let spokePoolClient1: SpokePoolClient, configStoreClient: ConfigStoreClient; +let spokePoolClient2: clients.SpokePoolClient, hubPoolClient: clients.HubPoolClient; +let spokePoolClient1: clients.SpokePoolClient, configStoreClient: ConfigStoreClient; describe("SpokePoolClient: Fill Validation", async function () { beforeEach(async function () { @@ -81,17 +81,21 @@ describe("SpokePoolClient: Fill Validation", async function () { ) as unknown as ConfigStoreClient; await configStoreClient.update(); - hubPoolClient = new HubPoolClient(spyLogger, hubPool, configStoreClient); + hubPoolClient = new clients.HubPoolClient( + spyLogger, + hubPool, + configStoreClient as unknown as clients.AcrossConfigStoreClient + ); await hubPoolClient.update(); - spokePoolClient1 = new SpokePoolClient( + spokePoolClient1 = new clients.SpokePoolClient( spyLogger, spokePool_1, hubPoolClient, originChainId, spokePool1DeploymentBlock ); - spokePoolClient2 = new SpokePoolClient( + spokePoolClient2 = new clients.SpokePoolClient( createSpyLogger().spyLogger, spokePool_2, null, @@ -132,7 +136,7 @@ describe("SpokePoolClient: Fill Validation", async function () { it("Returns deposit matched with fill", async function () { const deposit_1 = await buildDeposit(hubPoolClient, spokePool_1, erc20_1, l1Token, depositor, destinationChainId); const fill_1 = await buildFill(spokePool_2, erc20_2, depositor, relayer, deposit_1, 0.5); - const spokePoolClientForDestinationChain = new SpokePoolClient( + const spokePoolClientForDestinationChain = new clients.SpokePoolClient( createSpyLogger().spyLogger, spokePool_1, null, @@ -488,7 +492,7 @@ describe("SpokePoolClient: Fill Validation", async function () { const fill_1 = await buildFill(spokePool_2, erc20_2, depositor, relayer, expectedDeposit, 0.2); const fill_2 = await buildModifiedFill(spokePool_2, depositor, relayer, fill_1, 2, 0.2, relayer.address, "0x12"); // Fill same % of deposit with 2x larger relayer fee pct. - const spokePoolClientForDestinationChain = new SpokePoolClient( + const spokePoolClientForDestinationChain = new clients.SpokePoolClient( createSpyLogger().spyLogger, spokePool_1, null, diff --git a/test/SpokePoolClient.deposits.ts b/test/SpokePoolClient.deposits.ts index ae493e27b..299628476 100644 --- a/test/SpokePoolClient.deposits.ts +++ b/test/SpokePoolClient.deposits.ts @@ -1,4 +1,4 @@ -import { SpokePoolClient } from "../src/clients"; +import { clients } from "@across-protocol/sdk-v2"; import { Contract, SignerWithAddress, @@ -18,14 +18,20 @@ let depositor1: SignerWithAddress, depositor2: SignerWithAddress; let deploymentBlock: number; const destinationChainId2 = destinationChainId + 1; -let spokePoolClient: SpokePoolClient; +let spokePoolClient: clients.SpokePoolClient; describe("SpokePoolClient: Deposits", async function () { beforeEach(async function () { [, depositor1, depositor2] = await ethers.getSigners(); ({ spokePool, erc20, destErc20, weth, deploymentBlock } = await deploySpokePoolWithToken(originChainId)); await enableRoutes(spokePool, [{ originToken: erc20.address, destinationChainId: destinationChainId2 }]); - spokePoolClient = new SpokePoolClient(createSpyLogger().spyLogger, spokePool, null, originChainId, deploymentBlock); + spokePoolClient = new clients.SpokePoolClient( + createSpyLogger().spyLogger, + spokePool, + null, + originChainId, + deploymentBlock + ); await setupTokensForWallet(spokePool, depositor1, [erc20, destErc20], weth, 10); await setupTokensForWallet(spokePool, depositor2, [erc20, destErc20], weth, 10); diff --git a/test/SpokePoolClient.fills.ts b/test/SpokePoolClient.fills.ts index a97b48629..e947a7979 100644 --- a/test/SpokePoolClient.fills.ts +++ b/test/SpokePoolClient.fills.ts @@ -58,12 +58,9 @@ describe("SpokePoolClient: Fills", async function () { destinationToken: destErc20.address, message: "0x", }; - const fill1 = await buildFill(spokePool, destErc20, depositor, relayer1, deposit, 1); - const fill2 = await buildFill(spokePool, destErc20, depositor, relayer1, { ...deposit, depositId: 1 }, 1); + await buildFill(spokePool, destErc20, depositor, relayer1, deposit, 1); + await buildFill(spokePool, destErc20, depositor, relayer1, { ...deposit, depositId: 1 }, 1); await spokePoolClient.update(); - - expect(spokePoolClient.getFills()[0]).to.deep.contains(fill1); - expect(spokePoolClient.getFills()[1]).to.deep.contains(fill2); expect(spokePoolClient.getFills().length).to.equal(2); }); it("Correctly fetches deposit data multiple fills, multiple chains", async function () { @@ -84,67 +81,25 @@ describe("SpokePoolClient: Fills", async function () { // Do 6 deposits. 2 for the first depositor on chain1, 1 for the first depositor on chain2, 1 for the second // depositor on chain1, and 2 for the second depositor on chain2. - const relayer1Chain1_1 = await buildFill(spokePool, destErc20, depositor, relayer1, deposit, 0.1); - const relayer1Chain1_2 = await buildFill(spokePool, destErc20, depositor, relayer1, deposit, 0.1); - const relayer1Chain2_1 = await buildFill( - spokePool, - destErc20, - depositor, - relayer1, - { ...deposit, originChainId: originChainId2 }, - 0.1 - ); + await buildFill(spokePool, destErc20, depositor, relayer1, deposit, 0.1); + await buildFill(spokePool, destErc20, depositor, relayer1, deposit, 0.1); + await buildFill(spokePool, destErc20, depositor, relayer1, { ...deposit, originChainId: originChainId2 }, 0.1); - const relayer2Chain1_1 = await buildFill(spokePool, destErc20, depositor, relayer2, deposit, 0.1); - const relayer2Chain2_1 = await buildFill( - spokePool, - destErc20, - depositor, - relayer2, - { ...deposit, originChainId: originChainId2 }, - 0.1 - ); - const relayer2Chain2_2 = await buildFill( - spokePool, - destErc20, - depositor, - relayer2, - { ...deposit, originChainId: originChainId2 }, - 0.1 - ); + await buildFill(spokePool, destErc20, depositor, relayer2, deposit, 0.1); + await buildFill(spokePool, destErc20, depositor, relayer2, { ...deposit, originChainId: originChainId2 }, 0.1); + await buildFill(spokePool, destErc20, depositor, relayer2, { ...deposit, originChainId: originChainId2 }, 0.1); await spokePoolClient.update(); // Validate associated ChainId Events are correctly returned. - expect(spokePoolClient.getFills()[0]).to.deep.contains(relayer1Chain1_1); - expect(spokePoolClient.getFills()[1]).to.deep.contains(relayer1Chain1_2); - expect(spokePoolClient.getFills()[2]).to.deep.contains(relayer1Chain2_1); - expect(spokePoolClient.getFills()[3]).to.deep.contains(relayer2Chain1_1); - expect(spokePoolClient.getFills()[4]).to.deep.contains(relayer2Chain2_1); - expect(spokePoolClient.getFills()[5]).to.deep.contains(relayer2Chain2_2); expect(spokePoolClient.getFills().length).to.equal(6); // TODO: Add `getFillsForRepaymentChainId` tests once we update the `fillRelay` method from contracts-v2 to allow // an overridable `repaymentChainId` - expect(spokePoolClient.getFillsForOriginChain(originChainId)[0]).to.deep.contains(relayer1Chain1_1); - expect(spokePoolClient.getFillsForOriginChain(originChainId)[1]).to.deep.contains(relayer1Chain1_2); - expect(spokePoolClient.getFillsForOriginChain(originChainId)[2]).to.deep.contains(relayer2Chain1_1); expect(spokePoolClient.getFillsForOriginChain(originChainId).length).to.equal(3); - - expect(spokePoolClient.getFillsForOriginChain(originChainId2)[0]).to.deep.contains(relayer1Chain2_1); - expect(spokePoolClient.getFillsForOriginChain(originChainId2)[1]).to.deep.contains(relayer2Chain2_1); - expect(spokePoolClient.getFillsForOriginChain(originChainId2)[2]).to.deep.contains(relayer2Chain2_2); expect(spokePoolClient.getFillsForOriginChain(originChainId2).length).to.equal(3); - - expect(spokePoolClient.getFillsForRelayer(relayer1.address)[0]).to.deep.contains(relayer1Chain1_1); - expect(spokePoolClient.getFillsForRelayer(relayer1.address)[1]).to.deep.contains(relayer1Chain1_2); - expect(spokePoolClient.getFillsForRelayer(relayer1.address)[2]).to.deep.contains(relayer1Chain2_1); expect(spokePoolClient.getFillsForRelayer(relayer1.address).length).to.equal(3); - - expect(spokePoolClient.getFillsForRelayer(relayer2.address)[0]).to.deep.contains(relayer2Chain1_1); - expect(spokePoolClient.getFillsForRelayer(relayer2.address)[1]).to.deep.contains(relayer2Chain2_1); - expect(spokePoolClient.getFillsForRelayer(relayer2.address)[2]).to.deep.contains(relayer2Chain2_2); expect(spokePoolClient.getFillsForRelayer(relayer2.address).length).to.equal(3); }); }); diff --git a/test/TokenClient.TokenShortfall.ts b/test/TokenClient.TokenShortfall.ts index 9287f81d0..9091db5f2 100644 --- a/test/TokenClient.TokenShortfall.ts +++ b/test/TokenClient.TokenShortfall.ts @@ -4,7 +4,6 @@ import { Contract, SignerWithAddress, createSpyLogger, - deepEqualsWithBigNumber, deployAndConfigureHubPool, deployConfigStore, deploySpokePoolWithToken, @@ -76,10 +75,11 @@ describe("TokenClient: Token shortfall", async function () { let needed = toBNWei(420); let shortfall = needed.sub(balance); tokenClient.captureTokenShortfall(destinationChainId, erc20_2.address, depositId, toBNWei(420)); - const expectedData = { - [destinationChainId]: { [erc20_2.address]: { deposits: [depositId], balance, needed, shortfall } }, - }; - expect(deepEqualsWithBigNumber(tokenClient.getTokenShortfall(), expectedData)).to.be.true; + const tokenShortFallData = tokenClient.getTokenShortfall()[destinationChainId][erc20_2.address]; + expect(tokenShortFallData.balance).to.equal(balance); + expect(tokenShortFallData.needed).to.equal(needed); + expect(tokenShortFallData.shortfall).to.equal(shortfall); + expect(tokenShortFallData.deposits).to.deep.equal([depositId]); // A subsequent shortfall deposit of 42 should add to the token shortfall and append the deposit id as 351+42 = 393. const depositId2 = 2; @@ -87,14 +87,15 @@ describe("TokenClient: Token shortfall", async function () { tokenClient.captureTokenShortfall(destinationChainId, erc20_2.address, depositId2, toBNWei(42)); needed = needed.add(toBNWei(42)); shortfall = needed.sub(balance); - const expectedData2 = { - [destinationChainId]: { [erc20_2.address]: { deposits: [depositId, depositId2], balance, needed, shortfall } }, - }; - expect(deepEqualsWithBigNumber(tokenClient.getTokenShortfall(), expectedData2)).to.be.true; + const tokenShortFallData2 = tokenClient.getTokenShortfall()[destinationChainId][erc20_2.address]; + expect(tokenShortFallData2.balance).to.equal(balance); + expect(tokenShortFallData2.needed).to.equal(needed); + expect(tokenShortFallData2.shortfall).to.equal(shortfall); + expect(tokenShortFallData2.deposits).to.deep.equal([depositId, depositId2]); // Updating the client should not impact anything. await updateAllClients(); - expect(deepEqualsWithBigNumber(tokenClient.getTokenShortfall(), expectedData2)).to.be.true; + expect(tokenShortFallData2).to.deep.equal(tokenClient.getTokenShortfall()[destinationChainId][erc20_2.address]); }); }); diff --git a/test/constants.ts b/test/constants.ts index 54a096591..f7a99751b 100644 --- a/test/constants.ts +++ b/test/constants.ts @@ -15,10 +15,6 @@ export const MAX_REFUNDS_PER_RELAYER_REFUND_LEAF = 3; // Max number of L1 tokens for a chain ID in a pool rebalance leaf. export const MAX_L1_TOKENS_PER_POOL_REBALANCE_LEAF = 3; -// Once running balances hits this number for an L1 token, net send amount should be set to running -// balances to transfer tokens to the spoke pool. -export const DEFAULT_POOL_BALANCE_TOKEN_TRANSFER_THRESHOLD = toWei(1000); - export const BUNDLE_END_BLOCK_BUFFER = 5; // DAI's Rate model. @@ -31,7 +27,6 @@ export const sampleRateModel = { export const defaultTokenConfig = JSON.stringify({ rateModel: sampleRateModel, - transferThreshold: DEFAULT_POOL_BALANCE_TOKEN_TRANSFER_THRESHOLD.toString(), }); // Add Mainnet chain ID 1 to the chain ID list because the dataworker uses this chain to look up latest GlobalConfig diff --git a/test/fixtures/Dataworker.Fixture.ts b/test/fixtures/Dataworker.Fixture.ts index b3898064c..718062a55 100644 --- a/test/fixtures/Dataworker.Fixture.ts +++ b/test/fixtures/Dataworker.Fixture.ts @@ -3,7 +3,6 @@ import { deploySpokePoolWithToken, enableRoutesOnHubPool, Contract, - BigNumber, enableRoutes, sampleRateModel, createSpyLogger, @@ -23,7 +22,6 @@ import { repaymentChainId, MAX_L1_TOKENS_PER_POOL_REBALANCE_LEAF, MAX_REFUNDS_PER_RELAYER_REFUND_LEAF, - DEFAULT_POOL_BALANCE_TOKEN_TRANSFER_THRESHOLD, } from "../constants"; import { Dataworker } from "../../src/dataworker/Dataworker"; // Tested @@ -31,20 +29,21 @@ import { BundleDataClient, TokenClient } from "../../src/clients"; import { DataworkerClients } from "../../src/dataworker/DataworkerClientHelper"; import { MockConfigStoreClient, MockedMultiCallerClient } from "../mocks"; import { EthersTestLibrary } from "../types"; +import { clients as sdkClients } from "@across-protocol/sdk-v2"; async function _constructSpokePoolClientsWithLookback( spokePools: Contract[], spokePoolChains: number[], spyLogger: winston.Logger, signer: SignerWithAddress, - hubPoolClient: clients.HubPoolClient, + hubPoolClient: sdkClients.HubPoolClient, lookbackForAllChains?: number, deploymentBlocks?: { [chainId: number]: number } ) { await hubPoolClient.update(); const latestBlocks = await Promise.all(spokePools.map((x) => x.provider.getBlockNumber())); return spokePools.map((pool, i) => { - return new clients.SpokePoolClient( + return new sdkClients.SpokePoolClient( spyLogger, pool.connect(signer), hubPoolClient, @@ -60,7 +59,6 @@ export async function setupDataworker( ethers: EthersTestLibrary, maxRefundPerRelayerRefundLeaf: number, maxL1TokensPerPoolRebalanceLeaf: number, - defaultPoolRebalanceTokenTransferThreshold: BigNumber, defaultEndBlockBuffer: number, destinationChainId = defaultDestinationChainId, originChainId = defaultOriginChainId, @@ -75,14 +73,14 @@ export async function setupDataworker( l1Token_2: Contract; configStore: Contract; timer: Contract; - spokePoolClient_1: clients.SpokePoolClient; - spokePoolClient_2: clients.SpokePoolClient; - spokePoolClient_3: clients.SpokePoolClient; - spokePoolClient_4: clients.SpokePoolClient; - spokePoolClients: { [chainId: number]: clients.SpokePoolClient }; + spokePoolClient_1: sdkClients.SpokePoolClient; + spokePoolClient_2: sdkClients.SpokePoolClient; + spokePoolClient_3: sdkClients.SpokePoolClient; + spokePoolClient_4: sdkClients.SpokePoolClient; + spokePoolClients: { [chainId: number]: sdkClients.SpokePoolClient }; mockedConfigStoreClient: MockConfigStoreClient; - configStoreClient: clients.ConfigStoreClient; - hubPoolClient: clients.HubPoolClient; + configStoreClient: sdkClients.AcrossConfigStoreClient; + hubPoolClient: sdkClients.HubPoolClient; dataworkerInstance: Dataworker; spyLogger: winston.Logger; spy: sinon.SinonSpy; @@ -163,8 +161,7 @@ export async function setupDataworker( [l1Token_1, l1Token_2], maxL1TokensPerPoolRebalanceLeaf, maxRefundPerRelayerRefundLeaf, - sampleRateModel, - defaultPoolRebalanceTokenTransferThreshold + sampleRateModel ); const configStoreClient = new MockConfigStoreClient(spyLogger, configStore); @@ -172,7 +169,7 @@ export async function setupDataworker( await configStoreClient.update(); - const hubPoolClient = new clients.HubPoolClient( + const hubPoolClient = new sdkClients.HubPoolClient( spyLogger, hubPool, configStoreClient, @@ -206,7 +203,7 @@ export async function setupDataworker( const bundleDataClient = new BundleDataClient( spyLogger, { - configStoreClient: configStoreClient as unknown as clients.ConfigStoreClient, + configStoreClient: configStoreClient as unknown as sdkClients.AcrossConfigStoreClient, multiCallerClient, hubPoolClient, }, @@ -219,7 +216,7 @@ export async function setupDataworker( tokenClient, hubPoolClient, multiCallerClient, - configStoreClient: configStoreClient as unknown as clients.ConfigStoreClient, + configStoreClient: configStoreClient as unknown as sdkClients.AcrossConfigStoreClient, profitClient, }; const dataworkerInstance = new Dataworker( @@ -228,7 +225,6 @@ export async function setupDataworker( testChainIdList, maxRefundPerRelayerRefundLeaf, maxL1TokensPerPoolRebalanceLeaf, - Object.fromEntries(testChainIdList.map((chainId) => [chainId, defaultPoolRebalanceTokenTransferThreshold])), Object.fromEntries(testChainIdList.map((chainId) => [chainId, defaultEndBlockBuffer])) ); @@ -267,7 +263,7 @@ export async function setupDataworker( spokePoolClient_3, spokePoolClient_4, spokePoolClients, - configStoreClient: configStoreClient as unknown as clients.ConfigStoreClient, + configStoreClient: configStoreClient as unknown as sdkClients.AcrossConfigStoreClient, mockedConfigStoreClient: configStoreClient, hubPoolClient, dataworkerInstance, @@ -306,14 +302,14 @@ export async function setupFastDataworker( l1Token_2: Contract; configStore: Contract; timer: Contract; - spokePoolClient_1: clients.SpokePoolClient; - spokePoolClient_2: clients.SpokePoolClient; - spokePoolClient_3: clients.SpokePoolClient; - spokePoolClient_4: clients.SpokePoolClient; - spokePoolClients: { [chainId: number]: clients.SpokePoolClient }; + spokePoolClient_1: sdkClients.SpokePoolClient; + spokePoolClient_2: sdkClients.SpokePoolClient; + spokePoolClient_3: sdkClients.SpokePoolClient; + spokePoolClient_4: sdkClients.SpokePoolClient; + spokePoolClients: { [chainId: number]: sdkClients.SpokePoolClient }; mockedConfigStoreClient: MockConfigStoreClient; - configStoreClient: clients.ConfigStoreClient; - hubPoolClient: clients.HubPoolClient; + configStoreClient: sdkClients.AcrossConfigStoreClient; + hubPoolClient: sdkClients.HubPoolClient; dataworkerInstance: Dataworker; spyLogger: winston.Logger; spy: sinon.SinonSpy; @@ -330,7 +326,6 @@ export async function setupFastDataworker( ethers, MAX_REFUNDS_PER_RELAYER_REFUND_LEAF, MAX_L1_TOKENS_PER_POOL_REBALANCE_LEAF, - DEFAULT_POOL_BALANCE_TOKEN_TRANSFER_THRESHOLD, 0, defaultDestinationChainId, defaultOriginChainId, diff --git a/test/utils/utils.ts b/test/utils/utils.ts index dd0db7cd3..c4b73e707 100644 --- a/test/utils/utils.ts +++ b/test/utils/utils.ts @@ -8,12 +8,10 @@ import { Deposit, Fill, FillWithBlock, RelayerRefundLeaf, RunningBalances } from import { TransactionResponse, buildRelayerRefundTree, toBN, toBNWei, utf8ToHex } from "../../src/utils"; import { DEFAULT_BLOCK_RANGE_FOR_CHAIN, - DEFAULT_POOL_BALANCE_TOKEN_TRANSFER_THRESHOLD, MAX_L1_TOKENS_PER_POOL_REBALANCE_LEAF, MAX_REFUNDS_PER_RELAYER_REFUND_LEAF, amountToDeposit, depositRelayerFeePct, - l1TokenTransferThreshold, sampleRateModel, zeroAddress, } from "../constants"; @@ -125,7 +123,6 @@ export async function deployConfigStore( maxL1TokensPerPoolRebalanceLeaf: number = MAX_L1_TOKENS_PER_POOL_REBALANCE_LEAF, maxRefundPerRelayerRefundLeaf: number = MAX_REFUNDS_PER_RELAYER_REFUND_LEAF, rateModel: unknown = sampleRateModel, - transferThreshold: BigNumber = DEFAULT_POOL_BALANCE_TOKEN_TRANSFER_THRESHOLD, additionalChainIdIndices?: number[] ): Promise<{ configStore: AcrossConfigStore; deploymentBlock: number }> { const configStore = (await ( @@ -138,7 +135,6 @@ export async function deployConfigStore( token.address, JSON.stringify({ rateModel: rateModel, - transferThreshold: transferThreshold.toString(), }) ); } @@ -230,10 +226,7 @@ export async function deployNewTokenMapping( { destinationChainId: spokePoolChainId, l1Token, destinationToken: l2Token }, { destinationChainId: spokePoolDestinationChainId, l1Token, destinationToken: l2TokenDestination }, ]); - await configStore.updateTokenConfig( - l1Token.address, - JSON.stringify({ rateModel: sampleRateModel, transferThreshold: l1TokenTransferThreshold.toString() }) - ); + await configStore.updateTokenConfig(l1Token.address, JSON.stringify({ rateModel: sampleRateModel })); // Give signer initial balance and approve hub pool and spoke pool to pull funds from it await addLiquidity(l1TokenHolder, hubPool, l1Token, amountToSeedLpPool); diff --git a/yarn.lock b/yarn.lock index 1a2055975..a2568d0f5 100644 --- a/yarn.lock +++ b/yarn.lock @@ -37,10 +37,10 @@ "@openzeppelin/contracts" "4.1.0" "@uma/core" "^2.18.0" -"@across-protocol/sdk-v2@0.15.24": - version "0.15.24" - resolved "https://registry.yarnpkg.com/@across-protocol/sdk-v2/-/sdk-v2-0.15.24.tgz#aa9528769e379c0faa770b032a524bd1707d4a9b" - integrity sha512-FdDcDEJ8z4nhqZ2ENOBJ4ZUZTMTZNrz8ADCaDgFdvUFK6J+Iv617PCiJBbJbQdC4n3HOsDB8Anovu+BBSd2ncQ== +"@across-protocol/sdk-v2@0.16.4": + version "0.16.4" + resolved "https://registry.yarnpkg.com/@across-protocol/sdk-v2/-/sdk-v2-0.16.4.tgz#89ef2dc46fae9647ed46fda50a4de270f925f68e" + integrity sha512-mpPYiW4Kb2uQXmPOmsnCqer/iSuRuixSJM91svIxwA2z3kw1xk57G94LeVSufFPzVD/5urUS/sYm7Ea0z75WSw== dependencies: "@across-protocol/across-token" "^1.0.0" "@across-protocol/contracts-v2" "^2.4.3" @@ -48,10 +48,12 @@ "@pinata/sdk" "^2.1.0" "@uma/sdk" "^0.34.1" axios "^0.27.2" + big-number "^2.0.0" decimal.js "^10.3.1" ethers "^5.7.2" lodash.get "^4.4.2" superstruct "^0.15.4" + tslib "^2.6.2" "@arbitrum/sdk@^3.1.3": version "3.1.3" @@ -4013,6 +4015,11 @@ big-integer@1.6.36: resolved "https://registry.yarnpkg.com/big-integer/-/big-integer-1.6.36.tgz#78631076265d4ae3555c04f85e7d9d2f3a071a36" integrity sha512-t70bfa7HYEA1D9idDbmuv7YbsbVkQ+Hp+8KFSul4aE5e/i1bjCNIRYJZlA8Q8p0r9T8cF/RVvwUgRA//FydEyg== +big-number@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/big-number/-/big-number-2.0.0.tgz#98548eda9393b445791670a213aed6f6dcd66ee3" + integrity sha512-C67Su0g+XsmXADX/UM9L/+xSbqqwq0D/qGJs2ky6Noy2FDuCZnC38ZSXODiaBvqWma2VYRZEXgm4H74PS6tCDg== + big.js@^5.2.2: version "5.2.2" resolved "https://registry.yarnpkg.com/big.js/-/big.js-5.2.2.tgz#65f0af382f578bcdc742bd9c281e9cb2d7768328" @@ -13676,6 +13683,11 @@ tslib@^2.3.1, tslib@^2.5.0: resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.5.3.tgz#24944ba2d990940e6e982c4bea147aba80209913" integrity sha512-mSxlJJwl3BMEQCUNnxXBU9jP4JBktcEGhURcPR6VQVlnP0FdDEsIaz0C35dXNGLyRfrATNofF0F5p2KPxQgB+w== +tslib@^2.6.2: + version "2.6.2" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.2.tgz#703ac29425e7b37cd6fd456e92404d46d1f3e4ae" + integrity sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q== + tsort@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/tsort/-/tsort-0.0.1.tgz#e2280f5e817f8bf4275657fd0f9aebd44f5a2786"