From 8461f0af4c7ea33a83fe1dc6ad60881e284c2d87 Mon Sep 17 00:00:00 2001 From: Wil Wade Date: Fri, 3 Nov 2023 14:27:17 -0400 Subject: [PATCH 1/6] Run Mode improvements for e2e tests and more (#1755) # Goal The goal of this PR is a small set of improvements: - Update e2e readme - Add additional debug logging defaults to the init.sh script for instant and interval sealing - Add a interval param to the interval sealing init.sh runner - Add serial run mode for e2e tests Part of https://github.com/LibertyDSNP/frequency/issues/1731 --- Makefile | 7 +++++-- e2e/README.md | 17 +++++++++++------ e2e/package.json | 1 + scripts/init.sh | 39 ++++++++++----------------------------- scripts/run_e2e_tests.sh | 5 +++++ 5 files changed, 32 insertions(+), 37 deletions(-) diff --git a/Makefile b/Makefile index c2688e1e69..a70d849750 100644 --- a/Makefile +++ b/Makefile @@ -26,8 +26,8 @@ start-manual: start-interval: ./scripts/init.sh start-frequency-interval -start-interval-1: - ./scripts/init.sh start-frequency-interval-1 +start-interval-short: + ./scripts/init.sh start-frequency-interval 1 .PHONY: stop stop-relay: @@ -236,6 +236,9 @@ test: e2e-tests: ./scripts/run_e2e_tests.sh +e2e-tests-serial: + ./scripts/run_e2e_tests.sh -c serial + e2e-tests-only: ./scripts/run_e2e_tests.sh -s diff --git a/e2e/README.md b/e2e/README.md index 15850e69fc..75e8de807f 100644 --- a/e2e/README.md +++ b/e2e/README.md @@ -13,7 +13,7 @@ To run an individual test (after starting up a Frequency node): Note: this is for the "createMsa" tests -`npm run test -- --grep createMsa` +`npm run test:serial -- --grep createMsa` See below for running load tests. @@ -58,18 +58,23 @@ with the following methods: } ``` 7. Extrinsic helper methods & returned events -Many of the extrinsic helper methods pass an event type to the underlying Extrincic object that can be used to parse the targeted event type -from the resulting stream and return it specifically. The `Extrinsic::signAndSend()` method returns an array of `[targetEvent, eventMap]` where -`targetEvent` will be the parsed target event *if present*. The `eventMap` is a map of with the keys being `paletteName.eventName`. +Many of the extrinsic helper methods pass an event type to the underlying Extrinsic object that can be used to parse the targeted event type +from the resulting stream and return it specifically. The `Extrinsic::signAndSend()` method returns an object of `{ target, eventMap }` where +`target` will be the parsed target event *if present*. The `eventMap` is a map of with the keys being `paletteName.eventName`. A special key "defaultEvent" is added to also contain the target event, if present. Events may be used with type guards to access the event-specific structure. Event types are in the `ApiRx.events..*` structure, and can be accessed like so: ``` const extrinsic = ExtrinsicHelper.createMsa(keypair); - const [targetEvent, eventMap] = await extrinsic.fundAndSend(); - if (targetEvent && ExtrinsicHelper.api.events.msa.MsaCreated.is(targetEvent)) { + const { target: targetEvent, eventMap } = await extrinsic.fundAndSend(); + if (targetEvent) { const msaId = targetEvent.data.msaId; } + // OR null coalescing + const maybeMsaId = targetEvent?.data.msaId; + + // OR Throw unless defined + const throwIfNotMsaId = targetEvent!.data.msaId; ``` Load Testing diff --git a/e2e/package.json b/e2e/package.json index 697efb6738..2e4004c00e 100644 --- a/e2e/package.json +++ b/e2e/package.json @@ -7,6 +7,7 @@ "clean": "rm -Rf dist", "build": "tsc -p ./tsconfig.json", "test": "mocha", + "test:serial": "mocha --parallel=false", "test:relay": "mocha --config .relay-chain.mocharc.json", "test:load": "mocha --config .load-test.mocharc.json", "format": "tsc --noEmit --pretty", diff --git a/scripts/init.sh b/scripts/init.sh index 8ffef84423..6c14f1df69 100755 --- a/scripts/init.sh +++ b/scripts/init.sh @@ -80,6 +80,9 @@ start-frequency-instant) ./target/debug/frequency \ --dev \ + --state-pruning archive \ + -lbasic-authorship=debug \ + -ltxpool=debug \ -lruntime=debug \ --sealing=instant \ --wasm-execution=compiled \ @@ -94,7 +97,9 @@ start-frequency-instant) ;; start-frequency-interval) - printf "\nBuilding Frequency without relay. Running with interval sealing ...\n" + defaultInterval=12 + interval=${2-$defaultInterval} + printf "\nBuilding Frequency without relay. Running with interval sealing with interval of $interval seconds...\n" cargo build --features frequency-no-relay parachain_dir=$base_dir/parachain/${para_id} @@ -107,36 +112,12 @@ start-frequency-interval) ./target/debug/frequency \ --dev \ + --state-pruning archive \ + -lbasic-authorship=debug \ + -ltxpool=debug \ -lruntime=debug \ --sealing=interval \ - --wasm-execution=compiled \ - --no-telemetry \ - --no-prometheus \ - --port $((30333)) \ - --rpc-port $((9944)) \ - --rpc-external \ - --rpc-cors all \ - --rpc-methods=Unsafe \ - --tmp - ;; - - start-frequency-interval-1) - printf "\nBuilding Frequency without relay. Running with interval sealing 1 second blocktime...\n" - cargo build --features frequency-no-relay - - parachain_dir=$base_dir/parachain/${para_id} - mkdir -p $parachain_dir; - - if [ "$2" == "purge" ]; then - echo "purging parachain..." - rm -rf $parachain_dir - fi - - ./target/debug/frequency \ - --dev \ - -lruntime=debug \ - --sealing=interval \ - --sealing-interval=1 \ + --sealing-interval=${interval} \ --wasm-execution=compiled \ --no-telemetry \ --no-prometheus \ diff --git a/scripts/run_e2e_tests.sh b/scripts/run_e2e_tests.sh index d937386206..2b20636828 100755 --- a/scripts/run_e2e_tests.sh +++ b/scripts/run_e2e_tests.sh @@ -65,6 +65,11 @@ case "${CHAIN}" in LOCAL_NODE_BLOCK_SEALING="manual" fi ;; + "serial") + PROVIDER_URL="ws://127.0.0.1:9944" + NPM_RUN_COMMAND="test:serial" + CHAIN_ENVIRONMENT="dev" + ;; "rococo_local") PROVIDER_URL="ws://127.0.0.1:9944" NPM_RUN_COMMAND="test:relay" From fef5364469e3d35b726df3b6207b22d6b1436d55 Mon Sep 17 00:00:00 2001 From: Wil Wade Date: Mon, 6 Nov 2023 10:59:51 -0500 Subject: [PATCH 2/6] More e2e Cleanups and Improvements (#1756) # Goal The goal of this PR is another pile of small e2e fixes and cleanups Part of #1731 # Discussion - Correctly name time release test - Move some msa tests into a separate file - Staking calls should always use `signAndSend` - Test handles for > 90 runs by using a random number generator - createMsa helper and using it for creating un-permissioned MSAs - Replace the rxjs with the apiPromise for rpc and state queries because they are faster and more reliable - getCapacity and DrainCapacity Improvements and cleanups - Serialize BigInts for Mocha (as sometimes it needs to serialize it) --- e2e/capacity/replenishment.test.ts | 43 +-- e2e/capacity/staking.test.ts | 54 ++-- e2e/capacity/transactions.test.ts | 34 +-- e2e/handles/handles.test.ts | 9 +- e2e/load-tests/signatureRegistry.test.ts | 3 +- e2e/messages/addIPFSMessage.test.ts | 10 +- e2e/msa/createMsa.test.ts | 212 +-------------- e2e/msa/msaKeyManagement.test.ts | 250 ++++++++++++++++++ e2e/scaffolding/extrinsicHelpers.ts | 23 +- e2e/scaffolding/funding.ts | 2 +- e2e/scaffolding/helpers.ts | 44 ++- e2e/scaffolding/rootHooks.ts | 5 + e2e/scenarios/grantDelegation.test.ts | 7 +- .../handleItemized.test.ts | 15 +- .../handlePaginated.test.ts | 13 +- e2e/sudo/sudo.test.ts | 5 +- .../{timeRelease.ts => timeRelease.test.ts} | 4 +- 17 files changed, 391 insertions(+), 342 deletions(-) create mode 100644 e2e/msa/msaKeyManagement.test.ts rename e2e/time-release/{timeRelease.ts => timeRelease.test.ts} (90%) diff --git a/e2e/capacity/replenishment.test.ts b/e2e/capacity/replenishment.test.ts index 01775c1b44..9caf23f539 100644 --- a/e2e/capacity/replenishment.test.ts +++ b/e2e/capacity/replenishment.test.ts @@ -14,7 +14,7 @@ import { DOLLARS, TokenPerCapacity, assertEvent, - getRemainingCapacity, + getCapacity, getNonce, } from "../scaffolding/helpers"; import { getFundingSource } from "../scaffolding/funding"; @@ -48,26 +48,27 @@ describe("Capacity Replenishment Testing: ", function () { const [stakeKeys, stakeProviderId] = await createAndStakeProvider("ReplFirst", totalStaked); const payload = JSON.stringify({ changeType: 1, fromId: 1, objectId: 2 }) const call = ExtrinsicHelper.addOnChainMessage(stakeKeys, schemaId, payload); + let nonce = await getNonce(stakeKeys); // confirm that we start with a full tank await ExtrinsicHelper.runToBlock(await getNextEpochBlock()); - let remainingCapacity = (await getRemainingCapacity(stakeProviderId)).toBigInt(); + let remainingCapacity = (await getCapacity(stakeProviderId)).remainingCapacity.toBigInt(); assert.equal(expectedCapacity, remainingCapacity, "Our expected capacity from staking is wrong"); - await call.payWithCapacity(-1); - remainingCapacity = (await getRemainingCapacity(stakeProviderId)).toBigInt(); + await call.payWithCapacity(nonce++); + remainingCapacity = (await getCapacity(stakeProviderId)).remainingCapacity.toBigInt(); assert(expectedCapacity > remainingCapacity, "Our remaining capacity is much higher than expected."); const capacityPerCall = expectedCapacity - remainingCapacity; assert(remainingCapacity > capacityPerCall, "We don't have enough to make a second call"); // one more txn to deplete capacity more so this current remaining is different from when // we submitted the first message. - await call.payWithCapacity(-1); + await call.payWithCapacity(nonce++); await ExtrinsicHelper.runToBlock(await getNextEpochBlock()); // this should cause capacity to be refilled and then deducted by the cost of one message. - await call.payWithCapacity(-1); - let newRemainingCapacity = (await getRemainingCapacity(stakeProviderId)).toBigInt(); + await call.payWithCapacity(nonce++); + const newRemainingCapacity = (await getCapacity(stakeProviderId)).remainingCapacity.toBigInt(); // this should be the same as after sending the first message, since it is the first message after // the epoch. @@ -87,7 +88,7 @@ describe("Capacity Replenishment Testing: ", function () { let call = ExtrinsicHelper.addOnChainMessage(stakeKeys, schemaId, payload); // run until we can't afford to send another message. - await drainCapacity(call, stakeProviderId, stakeKeys); + await drainCapacity(call, stakeProviderId); await assert_capacity_call_fails_with_balance_too_low(call); }); @@ -103,17 +104,17 @@ describe("Capacity Replenishment Testing: ", function () { // new user/msa stakes to provider const userKeys = createKeys("userKeys"); await fundKeypair(fundingSource, userKeys, 5n * DOLLARS); - const { eventMap } = await ExtrinsicHelper.stake(userKeys, stakeProviderId, userStakeAmt).fundAndSend(fundingSource); + const { eventMap } = await ExtrinsicHelper.stake(userKeys, stakeProviderId, userStakeAmt).signAndSend(); assertEvent(eventMap, 'system.ExtrinsicSuccess'); const payload = JSON.stringify({ changeType: 1, fromId: 1, objectId: 2 }) const call = ExtrinsicHelper.addOnChainMessage(stakeKeys, schemaId, payload); let expectedCapacity = (providerStakeAmt + userStakeAmt) / TokenPerCapacity; - const totalCapacity = (await getRemainingCapacity(stakeProviderId)).toBigInt(); + const totalCapacity = (await getCapacity(stakeProviderId)).totalCapacityIssued.toBigInt(); assert.equal(expectedCapacity, totalCapacity, `expected ${expectedCapacity} capacity, got ${totalCapacity}`); - const callCapacityCost = await drainCapacity(call, stakeProviderId, stakeKeys); + const callCapacityCost = await drainCapacity(call, stakeProviderId); // ensure provider can't send a message; they are out of capacity await assert_capacity_call_fails_with_balance_too_low(call); @@ -122,19 +123,19 @@ describe("Capacity Replenishment Testing: ", function () { let nextEpochBlock = await getNextEpochBlock(); await ExtrinsicHelper.runToBlock(nextEpochBlock); - let remainingCapacity = (await getRemainingCapacity(stakeProviderId)).toBigInt(); + let remainingCapacity = (await getCapacity(stakeProviderId)).remainingCapacity.toBigInt(); // double check we still do not have enough to send another message assert(remainingCapacity < callCapacityCost); // user stakes tiny additional amount - const { eventMap: hasStaked } = await ExtrinsicHelper.stake(userKeys, stakeProviderId, userIncrementAmt).fundAndSend(fundingSource); + const { eventMap: hasStaked } = await ExtrinsicHelper.stake(userKeys, stakeProviderId, userIncrementAmt).signAndSend(); assertEvent(hasStaked, 'capacity.Staked'); // provider can now send a message const { eventMap: hasCapacityWithdrawn } = await call.payWithCapacity(-1); assertEvent(hasCapacityWithdrawn, 'capacity.CapacityWithdrawn'); - remainingCapacity = (await getRemainingCapacity(stakeProviderId)).toBigInt(); + remainingCapacity = (await getCapacity(stakeProviderId)).remainingCapacity.toBigInt(); // show that capacity was replenished and then fee deducted. let approxExpected = providerStakeAmt + userStakeAmt + userIncrementAmt - callCapacityCost; assert(remainingCapacity <= approxExpected, `remainingCapacity = ${remainingCapacity.toString()}`); @@ -142,17 +143,17 @@ describe("Capacity Replenishment Testing: ", function () { }); }); -async function drainCapacity(call, stakeProviderId: u64, stakeKeys: KeyringPair): Promise { - const totalCapacity = (await getRemainingCapacity(stakeProviderId)).toBigInt(); +async function drainCapacity(call, stakeProviderId: u64): Promise { + const totalCapacity = (await getCapacity(stakeProviderId)).totalCapacityIssued.toBigInt(); + let nonce = await getNonce(call.keys); // Figure out the cost per call in Capacity - await call.payWithCapacity(-1); - let remainingCapacity = (await getRemainingCapacity(stakeProviderId)).toBigInt(); - const callCapacityCost = totalCapacity - remainingCapacity; + const { eventMap } = await call.payWithCapacity(nonce++); - // Run them out of funds, but don't flake just because it landed near an epoch boundary. + const callCapacityCost = eventMap["capacity.CapacityWithdrawn"].data.amount.toBigInt(); + + // // Run them out of funds, but don't flake just because it landed near an epoch boundary. await ExtrinsicHelper.runToBlock(await getNextEpochBlock()); const callsBeforeEmpty = Math.floor(Number(totalCapacity) / Number(callCapacityCost)); - const nonce = await getNonce(stakeKeys); await Promise.all(Array.from({ length: callsBeforeEmpty }, (_, k) => call.payWithCapacity(nonce + k))); return callCapacityCost; } diff --git a/e2e/capacity/staking.test.ts b/e2e/capacity/staking.test.ts index e043b16aa2..7b0da81838 100644 --- a/e2e/capacity/staking.test.ts +++ b/e2e/capacity/staking.test.ts @@ -7,10 +7,8 @@ import { createKeys, createMsaAndProvider, stakeToProvider, getNextEpochBlock, TEST_EPOCH_LENGTH, - CENTS, DOLLARS, createAndFundKeypair -} - from "../scaffolding/helpers"; -import { firstValueFrom } from "rxjs"; + CENTS, DOLLARS, createAndFundKeypair, getCapacity, createMsa +} from "../scaffolding/helpers"; import { isDev } from "../scaffolding/env"; import { getFundingSource } from "../scaffolding/funding"; @@ -45,7 +43,7 @@ describe("Capacity Staking Tests", function () { assert.equal(stakedAcctInfo.data.frozen, tokenMinStake, `expected ${tokenMinStake} frozen balance, got ${stakedAcctInfo.data.frozen}`) // Confirm that the capacity was added to the stakeProviderId using the query API - const capacityStaked = (await firstValueFrom(ExtrinsicHelper.api.query.capacity.capacityLedger(stakeProviderId))).unwrap(); + const capacityStaked = await getCapacity(stakeProviderId); assert.equal(capacityStaked.remainingCapacity, capacityMin, `expected capacityLedger.remainingCapacity = 1CENT, got ${capacityStaked.remainingCapacity}`); assert.equal(capacityStaked.totalTokensStaked, tokenMinStake, `expected capacityLedger.totalTokensStaked = 1CENT, got ${capacityStaked.totalTokensStaked}`); assert.equal(capacityStaked.totalCapacityIssued, capacityMin, `expected capacityLedger.totalCapacityIssued = 1CENT, got ${capacityStaked.totalCapacityIssued}`); @@ -54,12 +52,12 @@ describe("Capacity Staking Tests", function () { it("successfully unstakes the minimum amount", async function () { const stakeObj = ExtrinsicHelper.unstake(stakeKeys, stakeProviderId, tokenMinStake); - const { target: unStakeEvent } = await stakeObj.fundAndSend(fundingSource); + const { target: unStakeEvent } = await stakeObj.signAndSend(); assert.notEqual(unStakeEvent, undefined, "should return an UnStaked event"); assert.equal(unStakeEvent?.data.capacity, capacityMin, "should return an UnStaked event with 1 CENT reduced capacity"); // Confirm that the tokens were unstaked in the stakeProviderId account using the query API - const capacityStaked = (await firstValueFrom(ExtrinsicHelper.api.query.capacity.capacityLedger(stakeProviderId))).unwrap(); + const capacityStaked = await getCapacity(stakeProviderId); assert.equal(capacityStaked.remainingCapacity, 0, "should return a capacityLedger with 0 remainingCapacity"); assert.equal(capacityStaked.totalTokensStaked, 0, "should return a capacityLedger with 0 total tokens staked"); assert.equal(capacityStaked.totalCapacityIssued, 0, "should return a capacityLedger with 0 capacity issued"); @@ -76,7 +74,7 @@ describe("Capacity Staking Tests", function () { await ExtrinsicHelper.runToBlock(newEpochBlock + TEST_EPOCH_LENGTH + 1); const withdrawObj = ExtrinsicHelper.withdrawUnstaked(stakeKeys); - const { target: withdrawEvent } = await withdrawObj.fundAndSend(fundingSource); + const { target: withdrawEvent } = await withdrawObj.signAndSend(); assert.notEqual(withdrawEvent, undefined, "should return a StakeWithdrawn event"); const amount = withdrawEvent!.data.amount; @@ -87,7 +85,7 @@ describe("Capacity Staking Tests", function () { assert.equal(unStakedAcctInfo.data.frozen, 0, "should return an account with 0 frozen balance") // Confirm that the staked capacity was removed from the stakeProviderId account using the query API - const capacityStaked = (await firstValueFrom(ExtrinsicHelper.api.query.capacity.capacityLedger(stakeProviderId))).unwrap(); + const capacityStaked = await getCapacity(stakeProviderId); assert.equal(capacityStaked.remainingCapacity, 0, "should return a capacityLedger with 0 remainingCapacity"); assert.equal(capacityStaked.totalTokensStaked, 0, "should return a capacityLedger with 0 total tokens staked"); assert.equal(capacityStaked.totalCapacityIssued, 0, "should return a capacityLedger with 0 capacity issued"); @@ -99,7 +97,7 @@ describe("Capacity Staking Tests", function () { it("successfully increases the amount that was targeted to provider", async function () { await assert.doesNotReject(stakeToProvider(fundingSource, stakeKeys, stakeProviderId, tokenMinStake)); - const capacityStaked = (await firstValueFrom(ExtrinsicHelper.api.query.capacity.capacityLedger(stakeProviderId))).unwrap(); + const capacityStaked = await getCapacity(stakeProviderId); assert.equal(capacityStaked.remainingCapacity, capacityMin, `expected capacityStaked.remainingCapacity = ${capacityMin}, got ${capacityStaked.remainingCapacity}`); assert.equal(capacityStaked.totalTokensStaked, tokenMinStake, @@ -135,13 +133,13 @@ describe("Capacity Staking Tests", function () { // Confirm that the staked capacity of the original stakeProviderId account is unchanged // stakeProviderId should still have 1M from first test case in this describe. // otherProvider should now have 1M - const origStaked = (await firstValueFrom(ExtrinsicHelper.api.query.capacity.capacityLedger(stakeProviderId))).unwrap(); + const origStaked = await getCapacity(stakeProviderId); assert.equal(origStaked.remainingCapacity, expectedCapacity, `expected 1/50 CENT remaining capacity, got ${origStaked.remainingCapacity}`); assert.equal(origStaked.totalTokensStaked, 1n * CENTS, `expected 1 CENT staked, got ${origStaked.totalTokensStaked}`); assert.equal(origStaked.totalCapacityIssued, expectedCapacity, `expected 1/50 CENT capacity issued, got ${origStaked.totalCapacityIssued}`); // Confirm that the staked capacity was added to the otherProviderId account using the query API - const capacityStaked = (await firstValueFrom(ExtrinsicHelper.api.query.capacity.capacityLedger(otherProviderId))).unwrap(); + const capacityStaked = await getCapacity(otherProviderId); assert.equal(capacityStaked.remainingCapacity, expectedCapacity, "should return a capacityLedger with 1/50M remainingCapacity"); assert.equal(capacityStaked.totalTokensStaked, 1n * CENTS, "should return a capacityLedger with 1M total tokens staked"); assert.equal(capacityStaked.totalCapacityIssued, expectedCapacity, "should return a capacityLedger with 1/50M capacity issued"); @@ -153,11 +151,11 @@ describe("Capacity Staking Tests", function () { // get the current account info let currentAcctInfo = await ExtrinsicHelper.getAccountInfo(additionalKeys.address); - const currentStaked = (await firstValueFrom(ExtrinsicHelper.api.query.capacity.capacityLedger(stakeProviderId))).unwrap(); + const currentStaked = await getCapacity(stakeProviderId); await assert.doesNotReject(stakeToProvider(fundingSource, additionalKeys, stakeProviderId, tokenMinStake)); - const capacityStaked = (await firstValueFrom(ExtrinsicHelper.api.query.capacity.capacityLedger(stakeProviderId))).unwrap(); + const capacityStaked = await getCapacity(stakeProviderId); assert.equal( capacityStaked.remainingCapacity, currentStaked.remainingCapacity.toBigInt() + capacityMin, @@ -189,13 +187,13 @@ describe("Capacity Staking Tests", function () { describe("when staking and targeting an InvalidTarget", async function () { it("fails to stake", async function () { - const maxMsaId = (await ExtrinsicHelper.getCurrentMsaIdentifierMaximum()).toNumber(); - const stakeAmount = 10n * CENTS; - const stakeKeys = await createAndFundKeypair(fundingSource, stakeAmount + 1n, "StakeKeys"); + const stakeKeys = await createAndFundKeypair(fundingSource, accountBalance, "StakeKeys"); + + const [notProviderMsaId] = await createMsa(fundingSource); - const failStakeObj = ExtrinsicHelper.stake(stakeKeys, maxMsaId + 1, stakeAmount); - await assert.rejects(failStakeObj.fundAndSend(fundingSource), { name: "InvalidTarget" }); + const failStakeObj = ExtrinsicHelper.stake(stakeKeys, notProviderMsaId, stakeAmount); + await assert.rejects(failStakeObj.signAndSend(), { name: "InvalidTarget" }); }); }); @@ -206,28 +204,28 @@ describe("Capacity Staking Tests", function () { let stakeAmount = 1500n; const failStakeObj = ExtrinsicHelper.stake(stakingKeys, providerId, stakeAmount); - await assert.rejects(failStakeObj.fundAndSend(fundingSource), { name: "InsufficientStakingAmount" }); + await assert.rejects(failStakeObj.signAndSend(), { name: "InsufficientStakingAmount" }); }); }); describe("when attempting to stake a zero amount", async function () { it("fails to stake and errors ZeroAmountNotAllowed", async function () { let stakingKeys = createKeys("stakingKeys"); - let providerId = await createMsaAndProvider(fundingSource, stakingKeys, "stakingKeys", ); + let providerId = await createMsaAndProvider(fundingSource, stakingKeys, "stakingKeys", 10n * CENTS); const failStakeObj = ExtrinsicHelper.stake(stakingKeys, providerId, 0); - await assert.rejects(failStakeObj.fundAndSend(fundingSource), { name: "ZeroAmountNotAllowed" }); + await assert.rejects(failStakeObj.signAndSend(), { name: "ZeroAmountNotAllowed" }); }); }); describe("when staking an amount and account balance is too low", async function () { it("fails to stake and errors BalanceTooLowtoStake", async function () { let stakingKeys = createKeys("stakingKeys"); - let providerId = await createMsaAndProvider(fundingSource, stakingKeys, "stakingKeys"); + let providerId = await createMsaAndProvider(fundingSource, stakingKeys, "stakingKeys", 10n * CENTS); let stakingAmount = 1n * DOLLARS; const failStakeObj = ExtrinsicHelper.stake(stakingKeys, providerId, stakingAmount); - await assert.rejects(failStakeObj.fundAndSend(fundingSource), { name: "BalanceTooLowtoStake" }); + await assert.rejects(failStakeObj.signAndSend(), { name: "BalanceTooLowtoStake" }); }); }); @@ -245,14 +243,14 @@ describe("Capacity Staking Tests", function () { describe("when attempting to unstake a Zero amount", async function () { it("errors with UnstakedAmountIsZero", async function () { const failUnstakeObj = ExtrinsicHelper.unstake(unstakeKeys, providerId, 0); - await assert.rejects(failUnstakeObj.fundAndSend(fundingSource), { name: "UnstakedAmountIsZero" }); + await assert.rejects(failUnstakeObj.signAndSend(), { name: "UnstakedAmountIsZero" }); }); }) describe("when account has not staked", async function () { it("errors with StakingAccountNotFound", async function () { const failUnstakeObj = ExtrinsicHelper.unstake(unstakeKeys, providerId, tokenMinStake); - await assert.rejects(failUnstakeObj.fundAndSend(fundingSource), { name: "NotAStakingAccount" }); + await assert.rejects(failUnstakeObj.signAndSend(), { name: "NotAStakingAccount" }); }); }); }); @@ -264,11 +262,11 @@ describe("Capacity Staking Tests", function () { let providerId: u64 = await createMsaAndProvider(fundingSource, stakingKeys, "stakingKeys", accountBalance); const stakeObj = ExtrinsicHelper.stake(stakingKeys, providerId, tokenMinStake); - const { target: stakeEvent } = await stakeObj.fundAndSend(fundingSource); + const { target: stakeEvent } = await stakeObj.signAndSend(); assert.notEqual(stakeEvent, undefined, "should return a Stake event"); const withdrawObj = ExtrinsicHelper.withdrawUnstaked(stakingKeys); - await assert.rejects(withdrawObj.fundAndSend(fundingSource), { name: "NoUnstakedTokensAvailable" }); + await assert.rejects(withdrawObj.signAndSend(), { name: "NoUnstakedTokensAvailable" }); }); }) }); diff --git a/e2e/capacity/transactions.test.ts b/e2e/capacity/transactions.test.ts index eb9cce64a1..daeea303d9 100644 --- a/e2e/capacity/transactions.test.ts +++ b/e2e/capacity/transactions.test.ts @@ -7,7 +7,6 @@ import { u8aWrapBytes } from "@polkadot/util"; import assert from "assert"; import { AddKeyData, AddProviderPayload, EventMap, ExtrinsicHelper } from "../scaffolding/extrinsicHelpers"; import { base64 } from 'multiformats/bases/base64'; -import { firstValueFrom } from "rxjs"; import { SchemaId, MessageResponse } from "@frequency-chain/api-augment/interfaces"; import { createKeys, @@ -37,7 +36,9 @@ import { getOrCreateAvroChatMessagePaginatedSchema, generateItemizedSignaturePayloadV2, generatePaginatedUpsertSignaturePayloadV2, - generatePaginatedDeleteSignaturePayloadV2 + generatePaginatedDeleteSignaturePayloadV2, + getCapacity, + getTestHandle } from "../scaffolding/helpers"; import { FeeDetails } from "@polkadot/types/interfaces"; import { ipfsCid } from "../messages/ipfs"; @@ -92,7 +93,7 @@ describe("Capacity Transactions", function () { await assertAddNewKey(capacityKeys, addKeyPayload, newControlKeypair); // attempt a capacity transaction using the new unfunded key: claimHandle - const handle = "test_handle"; + const handle = getTestHandle(); const expiration = (await getBlockNumber()) + 10; const handle_vec = new Bytes(ExtrinsicHelper.api.registry, handle); const handlePayload = { @@ -176,12 +177,10 @@ describe("Capacity Transactions", function () { const grantDelegationOp = ExtrinsicHelper.grantDelegation(delegatorKeys, capacityKeys, signPayloadSr25519(delegatorKeys, addProviderData), payload); - const capacityStakedInitial = (await firstValueFrom(ExtrinsicHelper.api.query.capacity.capacityLedger(capacityProvider))).unwrap(); - ({ eventMap } = await grantDelegationOp.payWithCapacity()); // Check for remaining capacity to be reduced - const capacityStaked = (await firstValueFrom(ExtrinsicHelper.api.query.capacity.capacityLedger(capacityProvider))).unwrap(); + const capacityStaked = await getCapacity(capacityProvider); assertEvent(eventMap, "system.ExtrinsicSuccess"); assertEvent(eventMap, "capacity.CapacityWithdrawn"); @@ -209,7 +208,7 @@ describe("Capacity Transactions", function () { }) beforeEach(async function () { - starting_block = (await firstValueFrom(ExtrinsicHelper.api.rpc.chain.getHeader())).number.toNumber(); + starting_block = (await ExtrinsicHelper.apiPromise.rpc.chain.getHeader()).number.toNumber(); await assert.doesNotReject(stakeToProvider(fundingSource, capacityKeys, capacityProvider, amountStaked)); }); @@ -232,7 +231,7 @@ describe("Capacity Transactions", function () { const { eventMap } = await call.payWithCapacity(); assertEvent(eventMap, "capacity.CapacityWithdrawn"); assertEvent(eventMap, "messages.MessagesStored"); - const get = await firstValueFrom(ExtrinsicHelper.api.rpc.messages.getBySchemaId( + const get = await ExtrinsicHelper.apiPromise.rpc.messages.getBySchemaId( dummySchemaId, { from_block: starting_block, @@ -240,7 +239,7 @@ describe("Capacity Transactions", function () { to_block: starting_block + 999, page_size: 999 } - )); + ); const response: MessageResponse = get.content[get.content.length - 1]; assert.equal(response.payload, "0xdeadbeef", "payload should be 0xdeadbeef"); }); @@ -485,7 +484,7 @@ describe("Capacity Transactions", function () { it("successfully pays with Capacity for eligible transaction - claimHandle", async function () { await assert.doesNotReject(stakeToProvider(fundingSource, capacityKeys, capacityProvider, amountStaked)); - const handle = "test_handle"; + const handle = getTestHandle(); const expiration = (await getBlockNumber()) + 10; const handle_vec = new Bytes(ExtrinsicHelper.api.registry, handle); const handlePayload = { @@ -668,7 +667,7 @@ describe("Capacity Transactions", function () { addProviderPayload ); - const handle = "test_handle"; + const handle = getTestHandle(); const handle_vec = new Bytes(ExtrinsicHelper.api.registry, handle); const expiration = (await getBlockNumber()) + 5; const handlePayload = { @@ -772,10 +771,10 @@ describe("Capacity Transactions", function () { ); // Actual weights and fee - const { weight: { refTime, proofSize }, } = await firstValueFrom(ExtrinsicHelper.api.call.transactionPaymentApi.queryInfo(call.toHex(), 0)); - const weightFee = await firstValueFrom(ExtrinsicHelper.api.call.transactionPaymentApi.queryWeightToFee({refTime, proofSize})); + const { weight: { refTime, proofSize }, } = await ExtrinsicHelper.apiPromise.call.transactionPaymentApi.queryInfo(call.toHex(), 0); + const weightFee = await ExtrinsicHelper.apiPromise.call.transactionPaymentApi.queryWeightToFee({refTime, proofSize}); - const feeDetails: FeeDetails = await firstValueFrom(ExtrinsicHelper.api.rpc.frequencyTxPayment.computeCapacityFeeDetails(call.toHex(), null)); + const feeDetails: FeeDetails = await ExtrinsicHelper.apiPromise.rpc.frequencyTxPayment.computeCapacityFeeDetails(call.toHex(), null); assert.notEqual(feeDetails, undefined, "should have returned a feeDetails"); assert.notEqual(feeDetails.inclusionFee, undefined, "should have returned a partialFee"); assert(feeDetails.inclusionFee.isSome, "should have returned a partialFee"); @@ -796,7 +795,7 @@ describe("Capacity Transactions", function () { addProviderPayload ); const tx = ExtrinsicHelper.api.tx.frequencyTxPayment.payWithCapacity(insideTx); - const feeDetails: FeeDetails = await firstValueFrom(ExtrinsicHelper.api.rpc.frequencyTxPayment.computeCapacityFeeDetails(tx.toHex(), null)); + const feeDetails: FeeDetails = await ExtrinsicHelper.apiPromise.rpc.frequencyTxPayment.computeCapacityFeeDetails(tx.toHex(), null); assert.notEqual(feeDetails, undefined, "should have returned a feeDetails"); assert.notEqual(feeDetails.inclusionFee, undefined, "should have returned a partialFee"); assert(feeDetails.inclusionFee.isSome, "should have returned a partialFee"); @@ -810,7 +809,7 @@ describe("Capacity Transactions", function () { it("Returns nothing when requesting capacity cost of a non-capacity transaction", async function () { const tx = ExtrinsicHelper.api.tx.msa.retireMsa(); - const feeDetails: FeeDetails = await firstValueFrom(ExtrinsicHelper.api.rpc.frequencyTxPayment.computeCapacityFeeDetails(tx.toHex(), null)); + const feeDetails: FeeDetails = await ExtrinsicHelper.apiPromise.rpc.frequencyTxPayment.computeCapacityFeeDetails(tx.toHex(), null); assert.notEqual(feeDetails, undefined, "should have returned a feeDetails"); assert(feeDetails.inclusionFee.isNone, "should have returned something for the inclusionFee"); }); @@ -818,7 +817,8 @@ describe("Capacity Transactions", function () { it("Returns nothing when requesting pay with capacity call with a non-capacity transaction", async function () { const insideTx = ExtrinsicHelper.api.tx.msa.retireMsa(); const tx = ExtrinsicHelper.api.tx.frequencyTxPayment.payWithCapacity(insideTx); - const feeDetails: FeeDetails = await firstValueFrom(ExtrinsicHelper.api.rpc.frequencyTxPayment.computeCapacityFeeDetails(tx.toHex(), null)); assert.notEqual(feeDetails, undefined, "should have returned a feeDetails"); + const feeDetails: FeeDetails = await ExtrinsicHelper.apiPromise.rpc.frequencyTxPayment.computeCapacityFeeDetails(tx.toHex(), null); + assert.notEqual(feeDetails, undefined, "should have returned a feeDetails"); assert(feeDetails.inclusionFee.isNone, "should have returned something for the inclusionFee"); }); }); diff --git a/e2e/handles/handles.test.ts b/e2e/handles/handles.test.ts index 95d92400c9..5b8ce0033d 100644 --- a/e2e/handles/handles.test.ts +++ b/e2e/handles/handles.test.ts @@ -1,7 +1,7 @@ // Handles test suite import "@frequency-chain/api-augment"; import assert from "assert"; -import { createDelegator } from "../scaffolding/helpers"; +import { createDelegator, getTestHandle } from "../scaffolding/helpers"; import { KeyringPair } from "@polkadot/keyring/types"; import { MessageSourceId } from "@frequency-chain/api-augment/interfaces"; import { ExtrinsicHelper } from "../scaffolding/extrinsicHelpers"; @@ -27,7 +27,7 @@ describe("🤝 Handles", () => { describe("@Claim Handle", () => { it("should be able to claim a handle", async function () { - let handle = "test_handle"; + const handle = getTestHandle(); let currentBlock = await getBlockNumber(); const handle_vec = new Bytes(ExtrinsicHelper.api.registry, handle); const payload = { @@ -38,8 +38,7 @@ describe("🤝 Handles", () => { const claimHandle = ExtrinsicHelper.claimHandle(msaOwnerKeys, claimHandlePayload); const { target: event } = await claimHandle.fundAndSend(fundingSource); assert.notEqual(event, undefined, "claimHandle should return an event"); - handle = event!.data.handle.toString(); - assert.notEqual(handle, "", "claimHandle should emit a handle"); + assert.notEqual(event!.data.handle.toString(), "", "claimHandle should emit a handle"); }); }); @@ -70,7 +69,7 @@ describe("🤝 Handles", () => { /// Check chain to getNextSuffixesForHandle it("should be able to claim a handle and check suffix (=suffix_assumed if available on chain)", async function () { - const handle = "test-" + Math.random().toFixed(10).toString().replaceAll("0.", ""); + const handle = getTestHandle(); let handle_bytes = new Bytes(ExtrinsicHelper.api.registry, handle); /// Get presumptive suffix from chain (rpc) let suffixes_response = await ExtrinsicHelper.getNextSuffixesForHandle(handle, 10); diff --git a/e2e/load-tests/signatureRegistry.test.ts b/e2e/load-tests/signatureRegistry.test.ts index 0af2ae6247..496a82a16e 100644 --- a/e2e/load-tests/signatureRegistry.test.ts +++ b/e2e/load-tests/signatureRegistry.test.ts @@ -3,7 +3,6 @@ import assert from "assert"; import { createKeys, signPayloadSr25519, getBlockNumber, generateAddKeyPayload, createAndFundKeypair, getNonce, getExistentialDeposit } from "../scaffolding/helpers"; import { KeyringPair } from "@polkadot/keyring/types"; import { AddKeyData, ExtrinsicHelper } from "../scaffolding/extrinsicHelpers"; -import { firstValueFrom } from "rxjs"; import { u64, Option } from "@polkadot/types"; import { getFundingSource } from "../scaffolding/funding"; @@ -121,7 +120,7 @@ async function generateMsas(count: number = 1): Promise { async function createBlock(wait: number = 300) { // Wait ms before creating the block to give the chain time to process the transaction pool await new Promise(r => setTimeout(r, wait)); - return firstValueFrom(ExtrinsicHelper.api.rpc.engine.createBlock(true, true)); + return ExtrinsicHelper.apiPromise.rpc.engine.createBlock(true, true); } function getMsaFromKey(keys: KeyringPair): Promise> { diff --git a/e2e/messages/addIPFSMessage.test.ts b/e2e/messages/addIPFSMessage.test.ts index a77e85d331..1f8eb9339d 100644 --- a/e2e/messages/addIPFSMessage.test.ts +++ b/e2e/messages/addIPFSMessage.test.ts @@ -8,7 +8,6 @@ import assert from "assert"; import { createAndFundKeypair } from "../scaffolding/helpers"; import { ExtrinsicHelper } from "../scaffolding/extrinsicHelpers"; import { u16 } from "@polkadot/types"; -import { firstValueFrom } from "rxjs"; import { MessageResponse } from "@frequency-chain/api-augment/interfaces"; import { ipfsCid } from "./ipfs"; import { getFundingSource } from "../scaffolding/funding"; @@ -27,7 +26,7 @@ describe("Add Offchain Message", function () { let starting_block: number; before(async function () { - starting_block = (await firstValueFrom(ExtrinsicHelper.api.rpc.chain.getHeader())).number.toNumber(); + starting_block = (await ExtrinsicHelper.apiPromise.rpc.chain.getHeader()).number.toNumber(); const cid = await ipfsCid(ipfs_payload_data, './e2e_test.txt'); ipfs_cid_64 = cid.toString(base64); @@ -52,6 +51,7 @@ describe("Add Offchain Message", function () { it('should fail if insufficient funds', async function () { await assert.rejects(ExtrinsicHelper.addIPFSMessage(keys, schemaId, ipfs_cid_64, ipfs_payload_len).signAndSend(), { + name: 'RpcError', message: /Inability to pay some fees/, }); }); @@ -101,7 +101,7 @@ describe("Add Offchain Message", function () { }); it("should successfully retrieve added message and returned CID should have Base32 encoding", async function () { - const f = await firstValueFrom(ExtrinsicHelper.api.rpc.messages.getBySchemaId(schemaId, { from_block: starting_block, from_index: 0, to_block: starting_block + 999, page_size: 999 })); + const f = await ExtrinsicHelper.apiPromise.rpc.messages.getBySchemaId(schemaId, { from_block: starting_block, from_index: 0, to_block: starting_block + 999, page_size: 999 }); const response: MessageResponse = f.content[f.content.length - 1]; const cid = Buffer.from(response.cid.unwrap()).toString(); assert.equal(cid, ipfs_cid_32, 'returned CID should match base32-encoded CID'); @@ -116,14 +116,14 @@ describe("Add Offchain Message", function () { assert.deepEqual(event?.data.schemaId, dummySchemaId, 'schema ids should be equal'); assert.notEqual(event?.data.blockNumber, undefined, 'should have a block number'); - const get = await firstValueFrom(ExtrinsicHelper.api.rpc.messages.getBySchemaId( + const get = await ExtrinsicHelper.apiPromise.rpc.messages.getBySchemaId( dummySchemaId, { from_block: starting_block, from_index: 0, to_block: starting_block + 999, page_size: 999 } - )); + ); const response: MessageResponse = get.content[get.content.length - 1]; assert.equal(response.payload, "0xdeadbeef", "payload should be 0xdeadbeef"); }); diff --git a/e2e/msa/createMsa.test.ts b/e2e/msa/createMsa.test.ts index b33816097e..900093eef2 100644 --- a/e2e/msa/createMsa.test.ts +++ b/e2e/msa/createMsa.test.ts @@ -11,8 +11,6 @@ describe("Create Accounts", function () { const fundingSource = getFundingSource("msa-create-msa"); let keys: KeyringPair; - let msaId: u64; - let authorizedKeys: KeyringPair[] = []; before(async function () { keys = await createAndFundKeypair(fundingSource, 50_000_000n); @@ -28,7 +26,7 @@ describe("Create Accounts", function () { assert.notEqual(msaCreatedEvent, undefined, "should have returned an MsaCreated event"); assert.notEqual(chainEvents["transactionPayment.TransactionFeePaid"], undefined, "should have returned a TransactionFeePaid event"); - msaId = msaCreatedEvent!.data.msaId; + assert.notEqual(msaCreatedEvent?.data.msaId, undefined, "Failed to get the msaId from the event"); }); it("should fail to create an MSA for a keypair already associated with an MSA", async function () { @@ -38,212 +36,4 @@ describe("Create Accounts", function () { }); }); }); - - describe("addPublicKeyToMsa", function () { - let badKeys: KeyringPair; - let defaultPayload: AddKeyData = {}; - let payload: AddKeyData; - let ownerSig: Sr25519Signature; - let newSig: Sr25519Signature; - let badSig: Sr25519Signature; - let addKeyData: Codec; - - before(async function () { - authorizedKeys.push(await createAndFundKeypair(fundingSource, 50_000_000n)); - badKeys = createKeys(); - defaultPayload.msaId = msaId; - defaultPayload.newPublicKey = authorizedKeys[0].publicKey; - }); - - beforeEach(async function () { - payload = await generateAddKeyPayload(defaultPayload); - }); - - it("should fail to add public key if origin is not one of the signers of the payload (MsaOwnershipInvalidSignature)", async function () { - addKeyData = ExtrinsicHelper.api.registry.createType("PalletMsaAddKeyData", payload); - newSig = signPayloadSr25519(authorizedKeys[0], addKeyData); - badSig = signPayloadSr25519(badKeys, addKeyData); - const op = ExtrinsicHelper.addPublicKeyToMsa(keys, badSig, newSig, payload); - await assert.rejects(op.fundAndSend(fundingSource), { - name: 'MsaOwnershipInvalidSignature', - }); - }) - - it("should fail to add public key if new keypair is not one of the signers of the payload (NewKeyOwnershipInvalidSignature)", async function () { - addKeyData = ExtrinsicHelper.api.registry.createType("PalletMsaAddKeyData", payload); - ownerSig = signPayloadSr25519(keys, addKeyData); - badSig = signPayloadSr25519(badKeys, addKeyData); - const op = ExtrinsicHelper.addPublicKeyToMsa(keys, ownerSig, badSig, payload); - await assert.rejects(op.fundAndSend(fundingSource), { - name: 'NewKeyOwnershipInvalidSignature', - }); - }); - - it("should fail to add public key if origin does not have an MSA (NoKeyExists)", async function () { - const newOriginKeys = await createAndFundKeypair(fundingSource, 50_000_000n); - addKeyData = ExtrinsicHelper.api.registry.createType("PalletMsaAddKeyData", payload); - ownerSig = signPayloadSr25519(newOriginKeys, addKeyData); - newSig = signPayloadSr25519(authorizedKeys[0], addKeyData); - const op = ExtrinsicHelper.addPublicKeyToMsa(newOriginKeys, ownerSig, newSig, payload); - await assert.rejects(op.fundAndSend(fundingSource), { - name: 'NoKeyExists', - }); - }) - - it("should fail to add public key if origin does not own MSA (NotMsaOwner)", async function () { - const newPayload = await generateAddKeyPayload({ - ...defaultPayload, - msaId: new u64(ExtrinsicHelper.api.registry, 999), // If we create more than 999 MSAs in our test suites, this will fail - }); - addKeyData = ExtrinsicHelper.api.registry.createType("PalletMsaAddKeyData", newPayload); - ownerSig = signPayloadSr25519(keys, addKeyData); - newSig = signPayloadSr25519(authorizedKeys[0], addKeyData); - const op = ExtrinsicHelper.addPublicKeyToMsa(keys, ownerSig, newSig, newPayload); - await assert.rejects(op.fundAndSend(fundingSource), { - name: 'NotMsaOwner', - }); - }); - - it("should fail if expiration has passed (ProofHasExpired)", async function () { - const newPayload = await generateAddKeyPayload({ - ...defaultPayload, - expiration: (await ExtrinsicHelper.getLastBlock()).block.header.number.toNumber(), - }); - addKeyData = ExtrinsicHelper.api.registry.createType("PalletMsaAddKeyData", newPayload); - ownerSig = signPayloadSr25519(keys, addKeyData); - newSig = signPayloadSr25519(authorizedKeys[0], addKeyData); - const op = ExtrinsicHelper.addPublicKeyToMsa(keys, ownerSig, newSig, newPayload); - await assert.rejects(op.fundAndSend(fundingSource), { - name: 'ProofHasExpired', - }); - }) - - it("should fail if expiration is not yet valid (ProofNotYetValid)", async function () { - const maxMortality = ExtrinsicHelper.api.consts.msa.mortalityWindowSize.toNumber(); - const newPayload = await generateAddKeyPayload({ - ...defaultPayload, - expiration: (await ExtrinsicHelper.getLastBlock()).block.header.number.toNumber() + maxMortality + 999, - }); - addKeyData = ExtrinsicHelper.api.registry.createType("PalletMsaAddKeyData", newPayload); - ownerSig = signPayloadSr25519(keys, addKeyData); - newSig = signPayloadSr25519(authorizedKeys[0], addKeyData); - const op = ExtrinsicHelper.addPublicKeyToMsa(keys, ownerSig, newSig, newPayload); - await assert.rejects(op.fundAndSend(fundingSource), { - name: 'ProofNotYetValid', - }); - }) - - it("should successfully add a new public key to an existing MSA & disallow duplicate signed payload submission (SignatureAlreadySubmitted)", async function () { - addKeyData = ExtrinsicHelper.api.registry.createType("PalletMsaAddKeyData", payload); - - ownerSig = signPayloadSr25519(keys, addKeyData); - newSig = signPayloadSr25519(authorizedKeys[0], addKeyData); - const addPublicKeyOp = ExtrinsicHelper.addPublicKeyToMsa(keys, ownerSig, newSig, payload); - - const { target: publicKeyEvents } = await addPublicKeyOp.fundAndSend(fundingSource); - - assert.notEqual(publicKeyEvents, undefined, 'should have added public key'); - - await assert.rejects(addPublicKeyOp.fundAndSend(fundingSource), "should reject sending the same signed payload twice"); - }); - - it("should fail if attempting to add the same key more than once (KeyAlreadyRegistered)", async function () { - const addKeyData = ExtrinsicHelper.api.registry.createType("PalletMsaAddKeyData", payload); - - const ownerSig = signPayloadSr25519(keys, addKeyData); - const newSig = signPayloadSr25519(authorizedKeys[0], addKeyData); - const addPublicKeyOp = ExtrinsicHelper.addPublicKeyToMsa(keys, ownerSig, newSig, payload); - - await assert.rejects(addPublicKeyOp.fundAndSend(fundingSource), { - name: 'KeyAlreadyRegistered', - }) - }); - - it("should allow new keypair to act for/on MSA", async function () { - const additionalKeys = createKeys(); - const newPayload = await generateAddKeyPayload({ - ...defaultPayload, - newPublicKey: additionalKeys.publicKey, - }); - addKeyData = ExtrinsicHelper.api.registry.createType("PalletMsaAddKeyData", newPayload); - ownerSig = signPayloadSr25519(authorizedKeys[0], addKeyData); - newSig = signPayloadSr25519(additionalKeys, addKeyData); - const op = ExtrinsicHelper.addPublicKeyToMsa(authorizedKeys[0], ownerSig, newSig, newPayload); - const { target: event } = await op.fundAndSend(fundingSource); - assert.notEqual(event, undefined, 'should have added public key'); - authorizedKeys.push(additionalKeys); - }); - }); - - describe("retire/delete keys", function () { - let providerKeys: KeyringPair; - - before(async function () { - providerKeys = await createAndFundKeypair(fundingSource, 50_000_000n); - const createMsaOp = ExtrinsicHelper.createMsa(providerKeys); - await createMsaOp.fundAndSend(fundingSource); - - const createProviderOp = ExtrinsicHelper.createProvider(providerKeys, 'Test Provider'); - await createProviderOp.fundAndSend(fundingSource); - }); - - it("should disallow retiring MSA belonging to a provider", async function () { - const retireOp = ExtrinsicHelper.retireMsa(providerKeys); - await assert.rejects(retireOp.fundAndSend(fundingSource), { - name: 'RpcError', - message: /Custom error: 2/, - }); - }); - - it("should disallow retiring an MSA with more than one key authorized", async function () { - const retireOp = ExtrinsicHelper.retireMsa(keys); - await assert.rejects(retireOp.fundAndSend(fundingSource), { - name: 'RpcError', - message: /Custom error: 3/, - }); - }); - - it("should fail to delete public key for self", async function () { - const op = ExtrinsicHelper.deletePublicKey(keys, keys.publicKey); - await assert.rejects(op.fundAndSend(fundingSource), { - name: 'RpcError', - message: /Custom error: 4/, - }); - }); - - it("should fail to delete key if not authorized for key's MSA", async function () { - const op = ExtrinsicHelper.deletePublicKey(providerKeys, keys.publicKey); - await assert.rejects(op.fundAndSend(fundingSource), { - name: 'RpcError', - message: /Custom error: 5/, - }); - }); - - it("should test for 'NoKeyExists' error", async function () { - const key = createKeys("nothing key"); - const op = ExtrinsicHelper.deletePublicKey(keys, key.publicKey); - await assert.rejects(op.fundAndSend(fundingSource), { - name: 'RpcError', - message: /Custom error: 1/, - }); - }); - - it("should delete all other authorized keys", async function () { - for (const key of authorizedKeys) { - const op = ExtrinsicHelper.deletePublicKey(keys, key.publicKey); - const { target: event } = await op.fundAndSend(fundingSource); - assert.notEqual(event, undefined, "should have returned PublicKeyDeleted event"); - }; - authorizedKeys = []; - }); - - it("should allow retiring MSA after additional keys have been deleted", async function () { - const retireMsaOp = ExtrinsicHelper.retireMsa(keys); - const { target: event, eventMap } = await retireMsaOp.fundAndSend(fundingSource); - - assert.notEqual(eventMap["msa.PublicKeyDeleted"], undefined, 'should have deleted public key (retired)'); - assert.notEqual(event, undefined, 'should have retired msa'); - }); - - }); }) diff --git a/e2e/msa/msaKeyManagement.test.ts b/e2e/msa/msaKeyManagement.test.ts new file mode 100644 index 0000000000..84972d1beb --- /dev/null +++ b/e2e/msa/msaKeyManagement.test.ts @@ -0,0 +1,250 @@ +import "@frequency-chain/api-augment"; +import assert from "assert"; +import { createKeys, createAndFundKeypair, signPayloadSr25519, Sr25519Signature, generateAddKeyPayload, createProviderKeysAndId, CENTS } from "../scaffolding/helpers"; +import { KeyringPair } from "@polkadot/keyring/types"; +import { AddKeyData, ExtrinsicHelper } from "../scaffolding/extrinsicHelpers"; +import { u64 } from "@polkadot/types"; +import { Codec } from "@polkadot/types/types"; +import { getFundingSource } from "../scaffolding/funding"; + +describe("MSA Key management", function () { + const fundingSource = getFundingSource("msa-key-management"); + + describe("addPublicKeyToMsa", function () { + let keys: KeyringPair; + let msaId: u64; + let secondaryKey: KeyringPair; + let defaultPayload: AddKeyData = {}; + let payload: AddKeyData; + let ownerSig: Sr25519Signature; + let newSig: Sr25519Signature; + let badSig: Sr25519Signature; + let addKeyData: Codec; + + before(async function () { + // Setup an MSA with one key and a secondary funded key + keys = await createAndFundKeypair(fundingSource, 5n * CENTS); + const { target } = await ExtrinsicHelper.createMsa(keys).signAndSend(); + assert.notEqual(target?.data.msaId, undefined, "MSA Id not in expected event"); + msaId = target!.data.msaId; + + secondaryKey = await createAndFundKeypair(fundingSource, 5n * CENTS); + + // Default payload making it easier to test `addPublicKeyToMsa` + defaultPayload.msaId = msaId; + defaultPayload.newPublicKey = secondaryKey.publicKey; + }); + + beforeEach(async function () { + payload = await generateAddKeyPayload(defaultPayload); + }); + + it("should fail to add public key if origin is not one of the signers of the payload (MsaOwnershipInvalidSignature)", async function () { + const badKeys: KeyringPair = createKeys(); + addKeyData = ExtrinsicHelper.api.registry.createType("PalletMsaAddKeyData", payload); + newSig = signPayloadSr25519(secondaryKey, addKeyData); + badSig = signPayloadSr25519(badKeys, addKeyData); + const op = ExtrinsicHelper.addPublicKeyToMsa(keys, badSig, newSig, payload); + await assert.rejects(op.fundAndSend(fundingSource), { + name: 'MsaOwnershipInvalidSignature', + }); + }) + + it("should fail to add public key if new keypair is not one of the signers of the payload (NewKeyOwnershipInvalidSignature)", async function () { + const badKeys: KeyringPair = createKeys() + addKeyData = ExtrinsicHelper.api.registry.createType("PalletMsaAddKeyData", payload); + ownerSig = signPayloadSr25519(keys, addKeyData); + badSig = signPayloadSr25519(badKeys, addKeyData); + const op = ExtrinsicHelper.addPublicKeyToMsa(keys, ownerSig, badSig, payload); + await assert.rejects(op.fundAndSend(fundingSource), { + name: 'NewKeyOwnershipInvalidSignature', + }); + }); + + it("should fail to add public key if origin does not have an MSA (NoKeyExists)", async function () { + const newOriginKeys = await createAndFundKeypair(fundingSource, 50_000_000n); + addKeyData = ExtrinsicHelper.api.registry.createType("PalletMsaAddKeyData", payload); + ownerSig = signPayloadSr25519(newOriginKeys, addKeyData); + newSig = signPayloadSr25519(secondaryKey, addKeyData); + const op = ExtrinsicHelper.addPublicKeyToMsa(newOriginKeys, ownerSig, newSig, payload); + await assert.rejects(op.fundAndSend(fundingSource), { + name: 'NoKeyExists', + }); + }) + + it("should fail to add public key if origin does not own MSA (NotMsaOwner)", async function () { + const newPayload = await generateAddKeyPayload({ + ...defaultPayload, + msaId: new u64(ExtrinsicHelper.api.registry, 999), // If we create more than 999 MSAs in our test suites, this will fail + }); + addKeyData = ExtrinsicHelper.api.registry.createType("PalletMsaAddKeyData", newPayload); + ownerSig = signPayloadSr25519(keys, addKeyData); + newSig = signPayloadSr25519(secondaryKey, addKeyData); + const op = ExtrinsicHelper.addPublicKeyToMsa(keys, ownerSig, newSig, newPayload); + await assert.rejects(op.fundAndSend(fundingSource), { + name: 'NotMsaOwner', + }); + }); + + it("should fail if expiration has passed (ProofHasExpired)", async function () { + const newPayload = await generateAddKeyPayload({ + ...defaultPayload, + expiration: (await ExtrinsicHelper.getLastBlock()).block.header.number.toNumber(), + }); + addKeyData = ExtrinsicHelper.api.registry.createType("PalletMsaAddKeyData", newPayload); + ownerSig = signPayloadSr25519(keys, addKeyData); + newSig = signPayloadSr25519(secondaryKey, addKeyData); + const op = ExtrinsicHelper.addPublicKeyToMsa(keys, ownerSig, newSig, newPayload); + await assert.rejects(op.fundAndSend(fundingSource), { + name: 'ProofHasExpired', + }); + }) + + it("should fail if expiration is not yet valid (ProofNotYetValid)", async function () { + const maxMortality = ExtrinsicHelper.api.consts.msa.mortalityWindowSize.toNumber(); + const newPayload = await generateAddKeyPayload({ + ...defaultPayload, + expiration: (await ExtrinsicHelper.getLastBlock()).block.header.number.toNumber() + maxMortality + 999, + }); + addKeyData = ExtrinsicHelper.api.registry.createType("PalletMsaAddKeyData", newPayload); + ownerSig = signPayloadSr25519(keys, addKeyData); + newSig = signPayloadSr25519(secondaryKey, addKeyData); + const op = ExtrinsicHelper.addPublicKeyToMsa(keys, ownerSig, newSig, newPayload); + await assert.rejects(op.fundAndSend(fundingSource), { + name: 'ProofNotYetValid', + }); + }) + + it("should successfully add a new public key to an existing MSA & disallow duplicate signed payload submission (SignatureAlreadySubmitted)", async function () { + addKeyData = ExtrinsicHelper.api.registry.createType("PalletMsaAddKeyData", payload); + + ownerSig = signPayloadSr25519(keys, addKeyData); + newSig = signPayloadSr25519(secondaryKey, addKeyData); + const addPublicKeyOp = ExtrinsicHelper.addPublicKeyToMsa(keys, ownerSig, newSig, payload); + + const { target: publicKeyEvents } = await addPublicKeyOp.fundAndSend(fundingSource); + + assert.notEqual(publicKeyEvents, undefined, 'should have added public key'); + + await assert.rejects(addPublicKeyOp.fundAndSend(fundingSource), "should reject sending the same signed payload twice"); + }); + + it("should fail if attempting to add the same key more than once (KeyAlreadyRegistered)", async function () { + const addKeyData = ExtrinsicHelper.api.registry.createType("PalletMsaAddKeyData", payload); + + const ownerSig = signPayloadSr25519(keys, addKeyData); + const newSig = signPayloadSr25519(secondaryKey, addKeyData); + const addPublicKeyOp = ExtrinsicHelper.addPublicKeyToMsa(keys, ownerSig, newSig, payload); + + await assert.rejects(addPublicKeyOp.fundAndSend(fundingSource), { + name: 'KeyAlreadyRegistered', + }) + }); + + it("should allow new keypair to act for/on MSA", async function () { + const thirdKey = createKeys(); + const newPayload = await generateAddKeyPayload({ + ...defaultPayload, + newPublicKey: thirdKey.publicKey, + }); + addKeyData = ExtrinsicHelper.api.registry.createType("PalletMsaAddKeyData", newPayload); + ownerSig = signPayloadSr25519(secondaryKey, addKeyData); + newSig = signPayloadSr25519(thirdKey, addKeyData); + const op = ExtrinsicHelper.addPublicKeyToMsa(secondaryKey, ownerSig, newSig, newPayload); + const { target: event } = await op.fundAndSend(fundingSource); + assert.notEqual(event, undefined, 'should have added public key'); + + // Cleanup + await assert.doesNotReject(ExtrinsicHelper.deletePublicKey(keys, thirdKey.publicKey).signAndSend()); + }); + }); + + describe("provider msa", function () { + + it("should disallow retiring MSA belonging to a provider", async function () { + const [providerKeys] = await createProviderKeysAndId(fundingSource); + const retireOp = ExtrinsicHelper.retireMsa(providerKeys); + await assert.rejects(retireOp.signAndSend(), { + name: 'RpcError', + message: /Custom error: 2/, + }); + }); + }); + + describe("delete keys and retire", function () { + let keys: KeyringPair; + let secondaryKey: KeyringPair; + let msaId: u64; + + before(async function () { + // Generates a msa with two control keys + keys = await createAndFundKeypair(fundingSource, 50_000_000n); + secondaryKey = await createAndFundKeypair(fundingSource, 50_000_000n); + + const { target } = await ExtrinsicHelper.createMsa(keys).signAndSend(); + assert.notEqual(target?.data.msaId, undefined, "MSA Id not in expected event"); + msaId = target!.data.msaId; + + const payload = await generateAddKeyPayload({ + msaId, + newPublicKey: secondaryKey.publicKey, + }); + const payloadData = ExtrinsicHelper.api.registry.createType("PalletMsaAddKeyData", payload); + const ownerSig = signPayloadSr25519(keys, payloadData); + const newSig = signPayloadSr25519(secondaryKey, payloadData); + const op = ExtrinsicHelper.addPublicKeyToMsa(keys, ownerSig, newSig, payload); + const { target: event } = await op.signAndSend(); + assert.notEqual(event, undefined, 'should have added public key'); + }); + + it("should disallow retiring an MSA with more than one key authorized", async function () { + const retireOp = ExtrinsicHelper.retireMsa(keys); + await assert.rejects(retireOp.signAndSend(), { + name: 'RpcError', + message: /Custom error: 3/, + }); + }); + + it("should fail to delete public key for self", async function () { + const op = ExtrinsicHelper.deletePublicKey(keys, keys.publicKey); + await assert.rejects(op.signAndSend(), { + name: 'RpcError', + message: /Custom error: 4/, + }); + }); + + it("should fail to delete key if not authorized for key's MSA", async function () { + const [providerKeys] = await createProviderKeysAndId(fundingSource); + + const op = ExtrinsicHelper.deletePublicKey(providerKeys, keys.publicKey); + await assert.rejects(op.signAndSend(), { + name: 'RpcError', + message: /Custom error: 5/, + }); + }); + + it("should test for 'NoKeyExists' error", async function () { + const key = createKeys("nothing key"); + const op = ExtrinsicHelper.deletePublicKey(keys, key.publicKey); + await assert.rejects(op.signAndSend(), { + name: 'RpcError', + message: /Custom error: 1/, + }); + }); + + it("should delete secondary key", async function () { + const op = ExtrinsicHelper.deletePublicKey(keys, secondaryKey.publicKey); + const { target: event } = await op.signAndSend(); + assert.notEqual(event, undefined, "should have returned PublicKeyDeleted event"); + }); + + it("should allow retiring MSA after additional keys have been deleted", async function () { + const retireMsaOp = ExtrinsicHelper.retireMsa(keys); + const { target: event, eventMap } = await retireMsaOp.signAndSend(); + + assert.notEqual(eventMap["msa.PublicKeyDeleted"], undefined, 'should have deleted public key (retired)'); + assert.notEqual(event, undefined, 'should have retired msa'); + }); + + }); +}) diff --git a/e2e/scaffolding/extrinsicHelpers.ts b/e2e/scaffolding/extrinsicHelpers.ts index fc8f4a9ebb..2a653349bf 100644 --- a/e2e/scaffolding/extrinsicHelpers.ts +++ b/e2e/scaffolding/extrinsicHelpers.ts @@ -209,7 +209,7 @@ export class ExtrinsicHelper { } public static getLastBlock(): Promise { - return firstValueFrom(ExtrinsicHelper.api.rpc.chain.getBlock()); + return ExtrinsicHelper.apiPromise.rpc.chain.getBlock(); } /** Query Extrinsics */ @@ -221,10 +221,6 @@ export class ExtrinsicHelper { return ExtrinsicHelper.apiPromise.query.schemas.governanceSchemaModelMaxBytes(); } - public static getCurrentMsaIdentifierMaximum() { - return ExtrinsicHelper.apiPromise.query.msa.currentMsaIdentifierMaximum(); - } - /** Balance Extrinsics */ public static transferFunds(source: KeyringPair, dest: KeyringPair, amount: Compact | AnyNumber) { return new Extrinsic(() => ExtrinsicHelper.api.tx.balances.transferKeepAlive(dest.address, amount), source, ExtrinsicHelper.api.events.balances.Transfer); @@ -251,7 +247,7 @@ export class ExtrinsicHelper { /** Get Schema RPC */ public static getSchema(schemaId: u16): Promise> { - return firstValueFrom(ExtrinsicHelper.api.rpc.schemas.getBySchemaId(schemaId)); + return ExtrinsicHelper.apiPromise.rpc.schemas.getBySchemaId(schemaId); } /** MSA Extrinsics */ @@ -343,11 +339,11 @@ export class ExtrinsicHelper { } public static getItemizedStorage(msa_id: MessageSourceId, schemaId: any): Promise { - return firstValueFrom(ExtrinsicHelper.api.rpc.statefulStorage.getItemizedStorage(msa_id, schemaId)); + return ExtrinsicHelper.apiPromise.rpc.statefulStorage.getItemizedStorage(msa_id, schemaId); } public static getPaginatedStorage(msa_id: MessageSourceId, schemaId: any): Promise> { - return firstValueFrom(ExtrinsicHelper.api.rpc.statefulStorage.getPaginatedStorage(msa_id, schemaId)); + return ExtrinsicHelper.apiPromise.rpc.statefulStorage.getPaginatedStorage(msa_id, schemaId); } public static timeReleaseTransfer(keys: KeyringPair, who: KeyringPair, schedule: ReleaseSchedule) { @@ -364,8 +360,7 @@ export class ExtrinsicHelper { } public static getHandleForMSA(msa_id: MessageSourceId): Promise> { - let handle_response = ExtrinsicHelper.api.rpc.handles.getHandleForMsa(msa_id); - return firstValueFrom(handle_response); + return ExtrinsicHelper.apiPromise.rpc.handles.getHandleForMsa(msa_id); } public static getMsaForHandle(handle: string): Promise> { @@ -373,13 +368,11 @@ export class ExtrinsicHelper { } public static getNextSuffixesForHandle(base_handle: string, count: number): Promise { - let suffixes = ExtrinsicHelper.api.rpc.handles.getNextSuffixes(base_handle, count); - return firstValueFrom(suffixes); + return ExtrinsicHelper.apiPromise.rpc.handles.getNextSuffixes(base_handle, count); } public static validateHandle(base_handle: string): Promise { - let validationResult = ExtrinsicHelper.api.rpc.handles.validateHandle(base_handle); - return firstValueFrom(validationResult); + return ExtrinsicHelper.apiPromise.rpc.handles.validateHandle(base_handle); } public static addOnChainMessage(keys: KeyringPair, schemaId: any, payload: string) { @@ -425,7 +418,7 @@ export class ExtrinsicHelper { if (hasRelayChain()) { await new Promise((r) => setTimeout(r, 4_000)); } else { - await firstValueFrom(ExtrinsicHelper.api.rpc.engine.createBlock(true, true)); + await ExtrinsicHelper.apiPromise.rpc.engine.createBlock(true, true); } currentBlock = await getBlockNumber(); } diff --git a/e2e/scaffolding/funding.ts b/e2e/scaffolding/funding.ts index 934d9c3108..1238100299 100644 --- a/e2e/scaffolding/funding.ts +++ b/e2e/scaffolding/funding.ts @@ -1,7 +1,6 @@ import "@frequency-chain/api-augment"; import { Keyring } from "@polkadot/api"; import { isTestnet } from "./env"; -import { createKeys } from "./helpers"; const coreFundingSourcesSeed = "salt glare message absent guess transfer oblige refuse keen current lunar pilot"; const keyring = new Keyring({ type: 'sr25519' }); @@ -23,6 +22,7 @@ export const fundingSources = [ "scenarios-grant-delegation", "stateful-storage-handle-sig-req", "msa-create-msa", + "msa-key-management", "stateful-storage-handle-paginated", "stateful-storage-handle-itemized", ] as const; diff --git a/e2e/scaffolding/helpers.ts b/e2e/scaffolding/helpers.ts index d24059bd64..64c3887147 100644 --- a/e2e/scaffolding/helpers.ts +++ b/e2e/scaffolding/helpers.ts @@ -1,6 +1,7 @@ import { Keyring } from "@polkadot/api"; import { KeyringPair } from "@polkadot/keyring/types"; -import { u16, u32, u64, Option, u128 } from "@polkadot/types"; +import { u16, u32, u64, Option } from "@polkadot/types"; +import type { PalletCapacityCapacityDetails } from "@polkadot/types/lookup"; import { Codec } from "@polkadot/types/types"; import { u8aToHex, u8aWrapBytes } from "@polkadot/util"; import { mnemonicGenerate } from '@polkadot/util-crypto'; @@ -15,7 +16,6 @@ import { } from "./extrinsicHelpers"; import { HandleResponse, MessageSourceId, PageHash } from "@frequency-chain/api-augment/interfaces"; import assert from "assert"; -import { firstValueFrom } from "rxjs"; import { AVRO_GRAPH_CHANGE } from "../schemas/fixtures/avroGraphChangeSchemaType"; import { PARQUET_BROADCAST } from "../schemas/fixtures/parquetBroadcastSchemaType"; import { AVRO_CHAT_MESSAGE } from "../stateful-pallet-storage/fixtures/itemizedSchemaType"; @@ -32,6 +32,10 @@ export const CENTS = 1000000n; export const DOLLARS = 100n * CENTS; export const STARTING_BALANCE = 6n * CENTS + DOLLARS; +export function getTestHandle(prefix = "test-") { + return prefix + Math.random().toFixed(10).toString().replaceAll("0.", ""); +} + export function signPayloadSr25519(keys: KeyringPair, data: Codec): Sr25519Signature { return { Sr25519: u8aToHex(keys.sign(u8aWrapBytes(data.toU8a()))) } } @@ -178,7 +182,7 @@ export async function fundKeypair(source: KeyringPair, dest: KeyringPair, amount await ExtrinsicHelper.transferFunds(source, dest, amount).signAndSend(nonce); } -export async function createAndFundKeypair(source: KeyringPair, amount: bigint | undefined = undefined, keyName?: string, nonce?: number): Promise { +export async function createAndFundKeypair(source: KeyringPair, amount?: bigint, keyName?: string, nonce?: number): Promise { const keypair = createKeys(keyName); await fundKeypair(source, keypair, amount || await getExistentialDeposit(), nonce); @@ -261,6 +265,19 @@ export async function getHandleForMsa(msa_id: MessageSourceId): Promise { + + const keys = await createAndFundKeypair(source, amount); + const createMsaOp = ExtrinsicHelper.createMsa(keys); + const { target } = await createMsaOp.fundAndSend(source); + assert.notEqual(target, undefined, 'createMsa: should have returned MsaCreated event'); + + return [target!.data.msaId, keys]; +} + // Creates an MSA and a provider for the given keys // Returns the MSA Id of the provider export async function createMsaAndProvider(source: KeyringPair, keys: KeyringPair, providerName: string, amount: bigint | undefined = undefined): @@ -302,8 +319,8 @@ export async function stakeToProvider(source: KeyringPair, keys: KeyringPair, pr } export async function getNextEpochBlock() { - const epochInfo = await firstValueFrom(ExtrinsicHelper.api.query.capacity.currentEpochInfo()); - const actualEpochLength = await firstValueFrom(ExtrinsicHelper.api.query.capacity.epochLength()); + const epochInfo = await ExtrinsicHelper.apiPromise.query.capacity.currentEpochInfo(); + const actualEpochLength = await ExtrinsicHelper.apiPromise.query.capacity.epochLength(); return actualEpochLength.toNumber() + epochInfo.epochStart.toNumber() + 1; } @@ -313,7 +330,7 @@ export async function setEpochLength(keys: KeyringPair, epochLength: number): Pr if (setEpochLengthEvent) { const epochLength = setEpochLengthEvent.data.blocks; assert.equal(epochLength.toNumber(), TEST_EPOCH_LENGTH, "should set epoch length to TEST_EPOCH_LENGTH blocks"); - const actualEpochLength = await firstValueFrom(ExtrinsicHelper.api.query.capacity.epochLength()); + const actualEpochLength = await ExtrinsicHelper.apiPromise.query.capacity.epochLength(); assert.equal(actualEpochLength, TEST_EPOCH_LENGTH, `should have set epoch length to TEST_EPOCH_LENGTH blocks, but it's ${actualEpochLength}`); } else { @@ -407,20 +424,19 @@ export async function getOrCreateAvroChatMessageItemizedSchema(source: KeyringPa export const TokenPerCapacity = 50n; -export function assertEvent(events: EventMap, eventName: string) { - assert(events.hasOwnProperty(eventName)); -} - -export async function getRemainingCapacity(providerId: u64): Promise { - const capacityStaked = (await ExtrinsicHelper.apiPromise.query.capacity.capacityLedger(providerId)).unwrap(); - return capacityStaked.remainingCapacity; +export async function getCapacity(providerId: u64): Promise { + return (await ExtrinsicHelper.apiPromise.query.capacity.capacityLedger(providerId)).unwrap(); } export async function getNonce(keys: KeyringPair): Promise { - const nonce = await firstValueFrom(ExtrinsicHelper.api.call.accountNonceApi.accountNonce(keys.address)); + const nonce = await ExtrinsicHelper.apiPromise.call.accountNonceApi.accountNonce(keys.address); return nonce.toNumber(); } +export function assertEvent(events: EventMap, eventName: string) { + assert(events.hasOwnProperty(eventName)); +} + export function assertExtrinsicSuccess(eventMap: EventMap) { assert.notEqual(eventMap["system.ExtrinsicSuccess"], undefined); } diff --git a/e2e/scaffolding/rootHooks.ts b/e2e/scaffolding/rootHooks.ts index 47026983ba..e838a94b41 100644 --- a/e2e/scaffolding/rootHooks.ts +++ b/e2e/scaffolding/rootHooks.ts @@ -5,6 +5,11 @@ import { ExtrinsicHelper } from "./extrinsicHelpers"; import { drainFundedKeys } from "./helpers"; import { getRootFundingSource } from "./funding"; +// Make sure that we can serialize BigInts for Mocha +(BigInt.prototype as any).toJSON = function () { + return this.toString(); +}; + export const mochaHooks = { async beforeAll() { try { diff --git a/e2e/scenarios/grantDelegation.test.ts b/e2e/scenarios/grantDelegation.test.ts index 47692c5d33..2905bd4430 100644 --- a/e2e/scenarios/grantDelegation.test.ts +++ b/e2e/scenarios/grantDelegation.test.ts @@ -5,7 +5,6 @@ import assert from "assert"; import { AddProviderPayload, Extrinsic, ExtrinsicHelper } from "../scaffolding/extrinsicHelpers"; import { createAndFundKeypair, createAndFundKeypairs, createKeys, generateDelegationPayload, signPayloadSr25519 } from "../scaffolding/helpers"; import { SchemaGrantResponse, SchemaId } from "@frequency-chain/api-augment/interfaces"; -import { firstValueFrom } from "rxjs"; import { getFundingSource } from "../scaffolding/funding"; describe("Delegation Scenario Tests", function () { @@ -151,7 +150,7 @@ describe("Delegation Scenario Tests", function () { }); it('initial granted schemas should be correct', async function () { - let schemaGrants = await firstValueFrom(ExtrinsicHelper.api.rpc.msa.grantedSchemaIdsByMsaId(msaId, providerId)); + let schemaGrants = await ExtrinsicHelper.apiPromise.rpc.msa.grantedSchemaIdsByMsaId(msaId, providerId); assert.equal(schemaGrants.isSome, true); const schemaIds = schemaGrants.unwrap().filter((grant) => grant.revoked_at.toBigInt() === 0n).map((grant) => grant.schema_id.toNumber()); const expectedSchemaIds = [schemaId.toNumber()]; @@ -170,7 +169,7 @@ describe("Delegation Scenario Tests", function () { assert.notEqual(grantDelegationEvent, undefined, "should have returned DelegationGranted event"); assert.deepEqual(grantDelegationEvent?.data.providerId, providerId, 'provider IDs should match'); assert.deepEqual(grantDelegationEvent?.data.delegatorId, msaId, 'delegator IDs should match'); - let grants = await firstValueFrom(ExtrinsicHelper.api.rpc.msa.grantedSchemaIdsByMsaId(msaId, providerId)); + let grants = await ExtrinsicHelper.apiPromise.rpc.msa.grantedSchemaIdsByMsaId(msaId, providerId); const grantedSchemaIds = grants.unwrap().filter((grant) => grant.revoked_at.toBigInt() === 0n).map((grant) => grant.schema_id.toNumber()); const expectedSchemaIds = [schemaId.toNumber(), schemaId2.toNumber()]; assert.deepStrictEqual(grantedSchemaIds, expectedSchemaIds); @@ -209,7 +208,7 @@ describe("Delegation Scenario Tests", function () { it("should successfully revoke granted schema", async function () { const op = ExtrinsicHelper.revokeSchemaPermissions(keys, providerId, [schemaId2]); await assert.doesNotReject(op.fundAndSend(fundingSource)); - let grants = await firstValueFrom(ExtrinsicHelper.api.rpc.msa.grantedSchemaIdsByMsaId(msaId, providerId)); + let grants = await ExtrinsicHelper.apiPromise.rpc.msa.grantedSchemaIdsByMsaId(msaId, providerId); const grantedSchemaIds = grants.unwrap().filter((grant) => grant.revoked_at.toBigInt() === 0n).map((grant) => grant.schema_id.toNumber()); assert.deepEqual(grantedSchemaIds, [schemaId.toNumber()], "granted schema permissions should include only non-revoked schema permission"); }); diff --git a/e2e/stateful-pallet-storage/handleItemized.test.ts b/e2e/stateful-pallet-storage/handleItemized.test.ts index c69b507fd4..47bffdf091 100644 --- a/e2e/stateful-pallet-storage/handleItemized.test.ts +++ b/e2e/stateful-pallet-storage/handleItemized.test.ts @@ -1,7 +1,7 @@ // E2E tests for pallets/stateful-pallet-storage/handleItemized.ts import "@frequency-chain/api-augment"; import assert from "assert"; -import {createDelegatorAndDelegation, createProviderKeysAndId, getCurrentItemizedHash} from "../scaffolding/helpers"; +import {DOLLARS, createDelegatorAndDelegation, createMsa, createProviderKeysAndId, getCurrentItemizedHash} from "../scaffolding/helpers"; import { KeyringPair } from "@polkadot/keyring/types"; import { ExtrinsicHelper } from "../scaffolding/extrinsicHelpers"; import { AVRO_CHAT_MESSAGE } from "../stateful-pallet-storage/fixtures/itemizedSchemaType"; @@ -16,9 +16,9 @@ describe("📗 Stateful Pallet Storage", () => { let msa_id: MessageSourceId; let providerId: MessageSourceId; let providerKeys: KeyringPair; + let badMsaId: u64; before(async function () { - // Create a provider for the MSA, the provider will be used to grant delegation [providerKeys, providerId] = await createProviderKeysAndId(fundingSource); assert.notEqual(providerId, undefined, "setup should populate providerId"); @@ -28,16 +28,16 @@ describe("📗 Stateful Pallet Storage", () => { const createSchemaDeletable = ExtrinsicHelper.createSchema(providerKeys, AVRO_CHAT_MESSAGE, "AvroBinary", "Itemized"); const { target: eventDeletable } = await createSchemaDeletable.fundAndSend(fundingSource); schemaId_deletable = eventDeletable!.data.schemaId; - // Create non supported schema const createSchema2 = ExtrinsicHelper.createSchema(providerKeys, AVRO_CHAT_MESSAGE, "AvroBinary", "OnChain"); const { target: event2 } = await createSchema2.fundAndSend(fundingSource); assert.notEqual(event2, undefined, "setup should return a SchemaCreated event"); schemaId_unsupported = event2!.data.schemaId; - // Create a MSA for the delegator and delegate to the provider [, msa_id] = await createDelegatorAndDelegation(fundingSource, schemaId_deletable, providerId, providerKeys); assert.notEqual(msa_id, undefined, "setup should populate msa_id"); + // Create an MSA that is not a provider to be used for testing failure cases + [badMsaId] = await createMsa(fundingSource); }); describe("Itemized Storage Tests 😊/😥", () => { @@ -103,9 +103,8 @@ describe("📗 Stateful Pallet Storage", () => { "Add": payload_1 } let add_actions = [add_action]; - let bad_msa_id = new u64(ExtrinsicHelper.api.registry, 4_294_967_295) - let itemized_add_result_1 = ExtrinsicHelper.applyItemActions(providerKeys, schemaId_deletable, bad_msa_id, add_actions, 0); + let itemized_add_result_1 = ExtrinsicHelper.applyItemActions(providerKeys, schemaId_deletable, badMsaId, add_actions, 0); await assert.rejects(itemized_add_result_1.fundAndSend(fundingSource), { name: 'UnauthorizedDelegate', section: 'statefulStorage', @@ -187,8 +186,8 @@ describe("📗 Stateful Pallet Storage", () => { "Delete": idx_1, } let remove_actions = [remove_action_1]; - let bad_msa_id = new u64(ExtrinsicHelper.api.registry, 4_294_967_295); - let itemized_remove_result_1 = ExtrinsicHelper.applyItemActions(providerKeys, schemaId_deletable, bad_msa_id, remove_actions, 0); + + let itemized_remove_result_1 = ExtrinsicHelper.applyItemActions(providerKeys, schemaId_deletable, badMsaId, remove_actions, 0); await assert.rejects(itemized_remove_result_1.fundAndSend(fundingSource), { name: 'UnauthorizedDelegate', section: 'statefulStorage', diff --git a/e2e/stateful-pallet-storage/handlePaginated.test.ts b/e2e/stateful-pallet-storage/handlePaginated.test.ts index cf1e68d230..9d4a3b9bde 100644 --- a/e2e/stateful-pallet-storage/handlePaginated.test.ts +++ b/e2e/stateful-pallet-storage/handlePaginated.test.ts @@ -1,7 +1,7 @@ // E2E tests for pallets/stateful-pallet-storage/handlePaginated.ts import "@frequency-chain/api-augment"; import assert from "assert"; -import {createProviderKeysAndId, createDelegatorAndDelegation, getCurrentPaginatedHash} from "../scaffolding/helpers"; +import {createProviderKeysAndId, createDelegatorAndDelegation, getCurrentPaginatedHash, createMsa} from "../scaffolding/helpers"; import { KeyringPair } from "@polkadot/keyring/types"; import { ExtrinsicHelper } from "../scaffolding/extrinsicHelpers"; import { AVRO_CHAT_MESSAGE } from "./fixtures/itemizedSchemaType"; @@ -17,6 +17,7 @@ describe("📗 Stateful Pallet Storage", () => { let msa_id: MessageSourceId; let providerId: MessageSourceId; let providerKeys: KeyringPair; + let badMsaId: u64; before(async function () { // Create a provider for the MSA, the provider will be used to grant delegation @@ -38,6 +39,9 @@ describe("📗 Stateful Pallet Storage", () => { // Create a MSA for the delegator and delegate to the provider [, msa_id] = await createDelegatorAndDelegation(fundingSource, schemaId, providerId, providerKeys); assert.notEqual(msa_id, undefined, "setup should populate msa_id"); + + // Create an MSA that is not a provider to be used for testing failure cases + [badMsaId] = await createMsa(fundingSource); }); describe("Paginated Storage Upsert/Remove Tests 😊/😥", () => { @@ -101,10 +105,9 @@ describe("📗 Stateful Pallet Storage", () => { let page_id = 0; let payload_1 = new Bytes(ExtrinsicHelper.api.registry, "Hello World From Frequency"); - let bad_msa_id = new u64(ExtrinsicHelper.api.registry, 999) let target_hash = await getCurrentPaginatedHash(msa_id, schemaId, page_id) - let paginated_add_result_1 = ExtrinsicHelper.upsertPage(providerKeys, schemaId, bad_msa_id, page_id, payload_1, target_hash); + let paginated_add_result_1 = ExtrinsicHelper.upsertPage(providerKeys, schemaId, badMsaId, page_id, payload_1, target_hash); await assert.rejects(paginated_add_result_1.fundAndSend(fundingSource), { name: 'UnauthorizedDelegate', section: 'statefulStorage', @@ -146,9 +149,7 @@ describe("📗 Stateful Pallet Storage", () => { }); it("🛑 should fail call to remove page for un-delegated attempts", async function () { - let bad_msa_id = new u64(ExtrinsicHelper.api.registry, 999) - - let paginated_add_result_1 = ExtrinsicHelper.removePage(providerKeys, schemaId, bad_msa_id, 0, 0); + let paginated_add_result_1 = ExtrinsicHelper.removePage(providerKeys, schemaId, badMsaId, 0, 0); await assert.rejects(paginated_add_result_1.fundAndSend(fundingSource), { name: 'UnauthorizedDelegate', section: 'statefulStorage', diff --git a/e2e/sudo/sudo.test.ts b/e2e/sudo/sudo.test.ts index af3ed656b6..d2cb36b5c2 100644 --- a/e2e/sudo/sudo.test.ts +++ b/e2e/sudo/sudo.test.ts @@ -4,7 +4,6 @@ import "@frequency-chain/api-augment"; import { MessageSourceId, SchemaId } from "@frequency-chain/api-augment/interfaces"; import { KeyringPair } from "@polkadot/keyring/types"; import assert from "assert"; -import { firstValueFrom } from "rxjs"; import { Extrinsic, ExtrinsicHelper } from "../scaffolding/extrinsicHelpers"; import { isTestnet } from "../scaffolding/env"; import { getSudo, getFundingSource } from "../scaffolding/funding"; @@ -40,11 +39,11 @@ describe("Sudo required", function () { }) it("should successfully set the max schema size", async function () { - const size = (await firstValueFrom(ExtrinsicHelper.api.query.schemas.governanceSchemaModelMaxBytes())).toBigInt(); + const size = (await ExtrinsicHelper.apiPromise.query.schemas.governanceSchemaModelMaxBytes()).toBigInt(); const op = new Extrinsic(() => ExtrinsicHelper.api.tx.schemas.setMaxSchemaModelBytes(size + 1n), sudoKey); await op.sudoSignAndSend(); assert.equal(true, true, 'operation should not have thrown error'); - const newSize = (await firstValueFrom(ExtrinsicHelper.api.query.schemas.governanceSchemaModelMaxBytes())).toBigInt(); + const newSize = (await ExtrinsicHelper.apiPromise.query.schemas.governanceSchemaModelMaxBytes()).toBigInt(); assert.equal(size + 1n, newSize, 'new size should have been set'); }); }); diff --git a/e2e/time-release/timeRelease.ts b/e2e/time-release/timeRelease.test.ts similarity index 90% rename from e2e/time-release/timeRelease.ts rename to e2e/time-release/timeRelease.test.ts index b289235239..9efb40d0a9 100644 --- a/e2e/time-release/timeRelease.ts +++ b/e2e/time-release/timeRelease.test.ts @@ -7,7 +7,7 @@ import { getFundingSource } from "../scaffolding/funding"; const DOLLARS: number = 100000000; // 100_000_000 -export function getBlocksInMonthPeriod(blockTime, periodInMonths) { +export function getBlocksInMonthPeriod(blockTime: number, periodInMonths: number) { const secondsPerMonth = 2592000; // Assuming 30 days in a month // Calculate the number of blocks in the given period @@ -45,7 +45,7 @@ describe("TimeRelease", function () { let schedule: ReleaseSchedule = calculateReleaseSchedule(amount); const vestedTransferTx = ExtrinsicHelper.timeReleaseTransfer(sourceKey, vesterKeys, schedule); - const [event, eventMap] = await vestedTransferTx.signAndSend(); + const { target: event, eventMap } = await vestedTransferTx.signAndSend(); assert.notEqual(event, undefined, "should have returned ReleaseScheduleAdded event"); }); }); From ef28664464a0429a5a23019c25d37f8779e57c66 Mon Sep 17 00:00:00 2001 From: Wil Wade Date: Mon, 6 Nov 2023 15:53:12 -0500 Subject: [PATCH 3/6] Testing e2e add autononce feature (#1760) # Goal The goal of this PR is to make nonce management easy inside of e2e tests. This does it via a module that caches and then keeps a local state up. Part of #1731 # Discussion - RPC errors will NOT increment nonces, so those must use the `current` option - Also split up some of the handles tests to make this easier - Made sure everything was using `function ()` instead of `() =>` for mocha --- e2e/.mocharc.json | 2 +- e2e/capacity/replenishment.test.ts | 4 +- e2e/capacity/transactions.test.ts | 14 +- e2e/handles/handles.test.ts | 170 +++++++++--------- e2e/messages/addIPFSMessage.test.ts | 2 +- e2e/msa/msaKeyManagement.test.ts | 14 +- e2e/scaffolding/autoNonce.ts | 56 ++++++ e2e/scaffolding/extrinsicHelpers.ts | 52 ++++-- e2e/scenarios/grantDelegation.test.ts | 68 ++++--- e2e/schemas/createSchema.test.ts | 4 +- .../handleItemized.test.ts | 8 +- .../handlePaginated.test.ts | 8 +- .../handleSignatureRequired.test.ts | 6 +- e2e/sudo/sudo.test.ts | 4 +- 14 files changed, 250 insertions(+), 162 deletions(-) create mode 100644 e2e/scaffolding/autoNonce.ts diff --git a/e2e/.mocharc.json b/e2e/.mocharc.json index 89b3fd4aad..e306b8687b 100644 --- a/e2e/.mocharc.json +++ b/e2e/.mocharc.json @@ -9,5 +9,5 @@ ], "loader": "ts-node/esm", "spec": ["./{,!(node_modules|load-tests)/**}/*.test.ts"], - "timeout": 10000 + "timeout": 20000 } diff --git a/e2e/capacity/replenishment.test.ts b/e2e/capacity/replenishment.test.ts index 9caf23f539..8d373c22b7 100644 --- a/e2e/capacity/replenishment.test.ts +++ b/e2e/capacity/replenishment.test.ts @@ -78,7 +78,7 @@ describe("Capacity Replenishment Testing: ", function () { function assert_capacity_call_fails_with_balance_too_low(call: Extrinsic) { return assert.rejects( - call.payWithCapacity(-1), { name: "RpcError", message: /1010.+account balance too low/ }); + call.payWithCapacity('current'), { name: "RpcError", message: /1010.+account balance too low/ }); } describe("Capacity is not replenished", function () { @@ -132,7 +132,7 @@ describe("Capacity Replenishment Testing: ", function () { assertEvent(hasStaked, 'capacity.Staked'); // provider can now send a message - const { eventMap: hasCapacityWithdrawn } = await call.payWithCapacity(-1); + const { eventMap: hasCapacityWithdrawn } = await call.payWithCapacity(); assertEvent(hasCapacityWithdrawn, 'capacity.CapacityWithdrawn'); remainingCapacity = (await getCapacity(stakeProviderId)).remainingCapacity.toBigInt(); diff --git a/e2e/capacity/transactions.test.ts b/e2e/capacity/transactions.test.ts index daeea303d9..b5859070fc 100644 --- a/e2e/capacity/transactions.test.ts +++ b/e2e/capacity/transactions.test.ts @@ -65,7 +65,7 @@ describe("Capacity Transactions", function () { const ownerSig: Sr25519Signature = signPayloadSr25519(capacityKeys, addKeyPayloadCodec); const newSig: Sr25519Signature = signPayloadSr25519(newControlKeypair, addKeyPayloadCodec); const addPublicKeyOp = ExtrinsicHelper.addPublicKeyToMsa(capacityKeys, ownerSig, newSig, addKeyPayload); - const { eventMap } = await addPublicKeyOp.signAndSend(-1); + const { eventMap } = await addPublicKeyOp.signAndSend(); assertEvent(eventMap, "system.ExtrinsicSuccess"); assertEvent(eventMap, "msa.PublicKeyAdded"); } @@ -102,7 +102,7 @@ describe("Capacity Transactions", function () { }; const claimHandlePayload = ExtrinsicHelper.api.registry.createType("CommonPrimitivesHandlesClaimHandlePayload", handlePayload); const claimHandle = ExtrinsicHelper.claimHandle(newControlKeypair, claimHandlePayload); - await assert.rejects(claimHandle.payWithCapacity(), { + await assert.rejects(claimHandle.payWithCapacity('current'), { name: "RpcError", message: "1010: Invalid Transaction: Inability to pay some fees , e.g. account balance too low" }); @@ -506,7 +506,7 @@ describe("Capacity Transactions", function () { const capacityKeys = createKeys("CapacityKeys"); const capacityProvider = await createMsaAndProvider(fundingSource, capacityKeys, "CapacityProvider", FUNDS_AMOUNT); const nonCapacityTxn = ExtrinsicHelper.stake(capacityKeys, capacityProvider, 1n * CENTS); - await assert.rejects(nonCapacityTxn.payWithCapacity(), { + await assert.rejects(nonCapacityTxn.payWithCapacity('current'), { name: "RpcError", message: "1010: Invalid Transaction: Custom error: 0" }); @@ -531,7 +531,7 @@ describe("Capacity Transactions", function () { const grantDelegationOp = ExtrinsicHelper.grantDelegation(delegatorKeys, noCapacityKeys, signPayloadSr25519(noCapacityKeys, addProviderData), payload); - await assert.rejects(grantDelegationOp.payWithCapacity(), { + await assert.rejects(grantDelegationOp.payWithCapacity('current'), { name: "RpcError", message: "1010: Invalid Transaction: Inability to pay some fees , e.g. account balance too low" }); @@ -571,7 +571,7 @@ describe("Capacity Transactions", function () { const grantDelegationOp = ExtrinsicHelper.grantDelegation(delegatorKeys, noTokensKeys, signPayloadSr25519(delegatorKeys, addProviderData), payload); - await assert.rejects(grantDelegationOp.payWithCapacity(), { + await assert.rejects(grantDelegationOp.payWithCapacity('current'), { name: 'RpcError', message: "1010: Invalid Transaction: Inability to pay some fees , e.g. account balance too low", }) @@ -611,7 +611,7 @@ describe("Capacity Transactions", function () { const grantDelegationOp = ExtrinsicHelper.grantDelegation(delegatorKeys, noProviderKeys, signPayloadSr25519(delegatorKeys, addProviderData), payload); - await assert.rejects(grantDelegationOp.payWithCapacity(), { + await assert.rejects(grantDelegationOp.payWithCapacity('current'), { name: "RpcError", message: "1010: Invalid Transaction: Inability to pay some fees , e.g. account balance too low" }); @@ -630,7 +630,7 @@ describe("Capacity Transactions", function () { const grantDelegationOp = ExtrinsicHelper.grantDelegation(delegatorKeys, emptyKeys, signPayloadSr25519(delegatorKeys, addProviderData), payload); - await assert.rejects(grantDelegationOp.payWithCapacity(), { + await assert.rejects(grantDelegationOp.payWithCapacity('current'), { name: "RpcError", message: "1010: Invalid Transaction: Custom error: 1" }); diff --git a/e2e/handles/handles.test.ts b/e2e/handles/handles.test.ts index 5b8ce0033d..792003e9dd 100644 --- a/e2e/handles/handles.test.ts +++ b/e2e/handles/handles.test.ts @@ -10,22 +10,21 @@ import { getBlockNumber } from "../scaffolding/helpers"; import { hasRelayChain } from "../scaffolding/env"; import { getFundingSource } from "../scaffolding/funding"; -describe("🤝 Handles", () => { +describe("🤝 Handles", function () { const fundingSource = getFundingSource("handles"); + const expirationOffset = hasRelayChain() ? 10 : 100; - let msa_id: MessageSourceId; - let msaOwnerKeys: KeyringPair; + describe("Claim and Retire", function () { + let msa_id: MessageSourceId; + let msaOwnerKeys: KeyringPair; - let expirationOffset = hasRelayChain() ? 10 : 100; - - before(async function () { - // Create a MSA for the delegator - [msaOwnerKeys, msa_id] = await createDelegator(fundingSource); - assert.notEqual(msaOwnerKeys, undefined, "setup should populate delegator_key"); - assert.notEqual(msa_id, undefined, "setup should populate msa_id"); - }); + before(async function () { + // Create a MSA for the delegator + [msaOwnerKeys, msa_id] = await createDelegator(fundingSource); + assert.notEqual(msaOwnerKeys, undefined, "setup should populate delegator_key"); + assert.notEqual(msa_id, undefined, "setup should populate msa_id"); + }); - describe("@Claim Handle", () => { it("should be able to claim a handle", async function () { const handle = getTestHandle(); let currentBlock = await getBlockNumber(); @@ -40,9 +39,7 @@ describe("🤝 Handles", () => { assert.notEqual(event, undefined, "claimHandle should return an event"); assert.notEqual(event!.data.handle.toString(), "", "claimHandle should emit a handle"); }); - }); - describe("@Retire Handle", () => { it("should be able to retire a handle", async function () { let handle_response = await ExtrinsicHelper.getHandleForMSA(msa_id); if (!handle_response.isSome) { @@ -58,84 +55,95 @@ describe("🤝 Handles", () => { await ExtrinsicHelper.runToBlock(currentBlock + expirationOffset + 1); // Must be at least 1 > original expiration const retireHandle = ExtrinsicHelper.retireHandle(msaOwnerKeys); - const { target: event } = await retireHandle.fundAndSend(fundingSource); + const { target: event } = await retireHandle.signAndSend('current'); assert.notEqual(event, undefined, "retireHandle should return an event"); const handle = event!.data.handle.toString(); assert.notEqual(handle, "", "retireHandle should return the correct handle"); }); }); - describe("@Alt Path: Claim Handle with possible presumptive suffix/RPC test", () => { - /// Check chain to getNextSuffixesForHandle - - it("should be able to claim a handle and check suffix (=suffix_assumed if available on chain)", async function () { - const handle = getTestHandle(); - let handle_bytes = new Bytes(ExtrinsicHelper.api.registry, handle); - /// Get presumptive suffix from chain (rpc) - let suffixes_response = await ExtrinsicHelper.getNextSuffixesForHandle(handle, 10); - let resp_base_handle = suffixes_response.base_handle.toString(); - assert.equal(resp_base_handle, handle, "resp_base_handle should be equal to handle"); - let suffix_assumed = suffixes_response.suffixes[0]; - assert.notEqual(suffix_assumed, 0, "suffix_assumed should not be 0"); - - let currentBlock = await getBlockNumber(); - /// Claim handle (extrinsic) - const payload_ext = { - baseHandle: handle_bytes, - expiration: currentBlock + expirationOffset, - }; - const claimHandlePayload = ExtrinsicHelper.api.registry.createType("CommonPrimitivesHandlesClaimHandlePayload", payload_ext); - const claimHandle = ExtrinsicHelper.claimHandle(msaOwnerKeys, claimHandlePayload); - const { target: event } = await claimHandle.fundAndSend(fundingSource); - assert.notEqual(event, undefined, "claimHandle should return an event"); - const displayHandle = event!.data.handle.toUtf8(); - assert.notEqual(displayHandle, "", "claimHandle should emit a handle"); + describe("Claim and Retire Alt", function () { + let msa_id: MessageSourceId; + let msaOwnerKeys: KeyringPair; - // get handle using msa (rpc) - let handle_response = await ExtrinsicHelper.getHandleForMSA(msa_id); - if (!handle_response.isSome) { - throw new Error("handle_response should be Some"); - } - let full_handle_state = handle_response.unwrap(); - let suffix_from_state = full_handle_state.suffix; - let suffix = suffix_from_state.toNumber(); - assert.notEqual(suffix, 0, "suffix should not be 0"); - assert.equal(suffix, suffix_assumed, "suffix should be equal to suffix_assumed"); - - /// Get MSA from full display handle (rpc) - const msaOption = await ExtrinsicHelper.getMsaForHandle(displayHandle); - assert(msaOption.isSome, "msaOption should be Some"); - const msaFromHandle = msaOption.unwrap(); - assert.equal(msaFromHandle.toString(), msa_id.toString(), "msaFromHandle should be equal to msa_id"); + before(async function () { + // Create a MSA for the delegator + [msaOwnerKeys, msa_id] = await createDelegator(fundingSource); + assert.notEqual(msaOwnerKeys, undefined, "setup should populate delegator_key"); + assert.notEqual(msa_id, undefined, "setup should populate msa_id"); }); - }); - - describe("👇 Negative Test: Early retire handle", () => { - it("should not be able to retire a handle before expiration", async function () { - let handle_response = await ExtrinsicHelper.getHandleForMSA(msa_id); - if (!handle_response.isSome) { - throw new Error("handle_response should be Some"); - } - let full_handle_state = handle_response.unwrap(); - let suffix_from_state = full_handle_state.suffix; - let suffix = suffix_from_state.toNumber(); - assert.notEqual(suffix, 0, "suffix should not be 0"); + describe("Claim Handle with possible presumptive suffix/RPC test", function () { + + it("should be able to claim a handle and check suffix (=suffix_assumed if available on chain)", async function () { + const handle = getTestHandle(); + let handle_bytes = new Bytes(ExtrinsicHelper.api.registry, handle); + /// Get presumptive suffix from chain (rpc) + let suffixes_response = await ExtrinsicHelper.getNextSuffixesForHandle(handle, 10); + let resp_base_handle = suffixes_response.base_handle.toString(); + assert.equal(resp_base_handle, handle, "resp_base_handle should be equal to handle"); + let suffix_assumed = suffixes_response.suffixes[0]; + assert.notEqual(suffix_assumed, 0, "suffix_assumed should not be 0"); + + let currentBlock = await getBlockNumber(); + /// Claim handle (extrinsic) + const payload_ext = { + baseHandle: handle_bytes, + expiration: currentBlock + expirationOffset, + }; + const claimHandlePayload = ExtrinsicHelper.api.registry.createType("CommonPrimitivesHandlesClaimHandlePayload", payload_ext); + const claimHandle = ExtrinsicHelper.claimHandle(msaOwnerKeys, claimHandlePayload); + const { target: event } = await claimHandle.fundAndSend(fundingSource); + assert.notEqual(event, undefined, "claimHandle should return an event"); + const displayHandle = event!.data.handle.toUtf8(); + assert.notEqual(displayHandle, "", "claimHandle should emit a handle"); + + // get handle using msa (rpc) + let handle_response = await ExtrinsicHelper.getHandleForMSA(msa_id); + if (!handle_response.isSome) { + throw new Error("handle_response should be Some"); + } + let full_handle_state = handle_response.unwrap(); + let suffix_from_state = full_handle_state.suffix; + let suffix = suffix_from_state.toNumber(); + assert.notEqual(suffix, 0, "suffix should not be 0"); + assert.equal(suffix, suffix_assumed, "suffix should be equal to suffix_assumed"); + + /// Get MSA from full display handle (rpc) + const msaOption = await ExtrinsicHelper.getMsaForHandle(displayHandle); + assert(msaOption.isSome, "msaOption should be Some"); + const msaFromHandle = msaOption.unwrap(); + assert.equal(msaFromHandle.toString(), msa_id.toString(), "msaFromHandle should be equal to msa_id"); + }); + }); - let currentBlock = await getBlockNumber(); - await ExtrinsicHelper.runToBlock(currentBlock + expirationOffset + 1); - try { - const retireHandle = ExtrinsicHelper.retireHandle(msaOwnerKeys); - const { target: event } = await retireHandle.fundAndSend(fundingSource); - assert.equal(event, undefined, "retireHandle should not return an event"); - } - catch (e) { - assert.notEqual(e, undefined, "retireHandle should throw an error"); - } + describe("👇 Negative Test: Early retire handle", function () { + it("should not be able to retire a handle before expiration", async function () { + let handle_response = await ExtrinsicHelper.getHandleForMSA(msa_id); + if (!handle_response.isSome) { + throw new Error("handle_response should be Some"); + } + + let full_handle_state = handle_response.unwrap(); + let suffix_from_state = full_handle_state.suffix; + let suffix = suffix_from_state.toNumber(); + assert.notEqual(suffix, 0, "suffix should not be 0"); + + let currentBlock = await getBlockNumber(); + await ExtrinsicHelper.runToBlock(currentBlock + expirationOffset + 1); + try { + const retireHandle = ExtrinsicHelper.retireHandle(msaOwnerKeys); + const { target: event } = await retireHandle.fundAndSend(fundingSource); + assert.equal(event, undefined, "retireHandle should not return an event"); + } + catch (e) { + assert.notEqual(e, undefined, "retireHandle should throw an error"); + } + }); }); }); - describe("Suffixes Integrity Check", () => { + describe("Suffixes Integrity Check", function () { it("should return same suffixes for `abcdefg` from chain as hardcoded", async function () { let suffixes = await ExtrinsicHelper.getNextSuffixesForHandle("abcdefg", 10); let suffixes_expected = [23, 65, 16, 53, 25, 75, 29, 26, 10, 87]; @@ -144,8 +152,8 @@ describe("🤝 Handles", () => { }); }); - describe("validateHandle basic test", () => { - it('returns true for good handle, and false for bad handle', async () => { + describe("validateHandle basic test", function () { + it('returns true for good handle, and false for bad handle', async function () { let res = await ExtrinsicHelper.validateHandle("Robert`DROP TABLE STUDENTS;--"); assert.equal(res.toHuman(), false); res = await ExtrinsicHelper.validateHandle("Little Bobby Tables") diff --git a/e2e/messages/addIPFSMessage.test.ts b/e2e/messages/addIPFSMessage.test.ts index 1f8eb9339d..0a44ee4fae 100644 --- a/e2e/messages/addIPFSMessage.test.ts +++ b/e2e/messages/addIPFSMessage.test.ts @@ -50,7 +50,7 @@ describe("Add Offchain Message", function () { }); it('should fail if insufficient funds', async function () { - await assert.rejects(ExtrinsicHelper.addIPFSMessage(keys, schemaId, ipfs_cid_64, ipfs_payload_len).signAndSend(), { + await assert.rejects(ExtrinsicHelper.addIPFSMessage(keys, schemaId, ipfs_cid_64, ipfs_payload_len).signAndSend('current'), { name: 'RpcError', message: /Inability to pay some fees/, }); diff --git a/e2e/msa/msaKeyManagement.test.ts b/e2e/msa/msaKeyManagement.test.ts index 84972d1beb..10cad0bb8e 100644 --- a/e2e/msa/msaKeyManagement.test.ts +++ b/e2e/msa/msaKeyManagement.test.ts @@ -164,7 +164,7 @@ describe("MSA Key management", function () { it("should disallow retiring MSA belonging to a provider", async function () { const [providerKeys] = await createProviderKeysAndId(fundingSource); const retireOp = ExtrinsicHelper.retireMsa(providerKeys); - await assert.rejects(retireOp.signAndSend(), { + await assert.rejects(retireOp.signAndSend('current'), { name: 'RpcError', message: /Custom error: 2/, }); @@ -199,7 +199,7 @@ describe("MSA Key management", function () { it("should disallow retiring an MSA with more than one key authorized", async function () { const retireOp = ExtrinsicHelper.retireMsa(keys); - await assert.rejects(retireOp.signAndSend(), { + await assert.rejects(retireOp.signAndSend('current'), { name: 'RpcError', message: /Custom error: 3/, }); @@ -207,7 +207,7 @@ describe("MSA Key management", function () { it("should fail to delete public key for self", async function () { const op = ExtrinsicHelper.deletePublicKey(keys, keys.publicKey); - await assert.rejects(op.signAndSend(), { + await assert.rejects(op.signAndSend('current'), { name: 'RpcError', message: /Custom error: 4/, }); @@ -217,7 +217,7 @@ describe("MSA Key management", function () { const [providerKeys] = await createProviderKeysAndId(fundingSource); const op = ExtrinsicHelper.deletePublicKey(providerKeys, keys.publicKey); - await assert.rejects(op.signAndSend(), { + await assert.rejects(op.signAndSend('current'), { name: 'RpcError', message: /Custom error: 5/, }); @@ -226,7 +226,7 @@ describe("MSA Key management", function () { it("should test for 'NoKeyExists' error", async function () { const key = createKeys("nothing key"); const op = ExtrinsicHelper.deletePublicKey(keys, key.publicKey); - await assert.rejects(op.signAndSend(), { + await assert.rejects(op.signAndSend('current'), { name: 'RpcError', message: /Custom error: 1/, }); @@ -234,13 +234,13 @@ describe("MSA Key management", function () { it("should delete secondary key", async function () { const op = ExtrinsicHelper.deletePublicKey(keys, secondaryKey.publicKey); - const { target: event } = await op.signAndSend(); + const { target: event } = await op.signAndSend('current'); assert.notEqual(event, undefined, "should have returned PublicKeyDeleted event"); }); it("should allow retiring MSA after additional keys have been deleted", async function () { const retireMsaOp = ExtrinsicHelper.retireMsa(keys); - const { target: event, eventMap } = await retireMsaOp.signAndSend(); + const { target: event, eventMap } = await retireMsaOp.signAndSend('current'); assert.notEqual(eventMap["msa.PublicKeyDeleted"], undefined, 'should have deleted public key (retired)'); assert.notEqual(event, undefined, 'should have retired msa'); diff --git a/e2e/scaffolding/autoNonce.ts b/e2e/scaffolding/autoNonce.ts new file mode 100644 index 0000000000..0600bd1928 --- /dev/null +++ b/e2e/scaffolding/autoNonce.ts @@ -0,0 +1,56 @@ + +/** + * The AutoNonce Module keeps track of nonces used by tests + * Because all keys in the tests are managed by the tests, the nonces are determined by: + * 1. Prior transaction count + * 2. Not counting transactions that had RPC failures + */ + +import type { KeyringPair } from "@polkadot/keyring/types"; +import { ExtrinsicHelper } from "./extrinsicHelpers"; + +export type AutoNonce = number | 'auto' | 'current'; + +const nonceCache = new Map(); + +const getNonce = async (keys: KeyringPair) => { + return (await ExtrinsicHelper.getAccountInfo(keys.address)).nonce.toNumber(); +} + +const reset = (keys: KeyringPair) => { + nonceCache.delete(keys.address); +} + +const current = async (keys: KeyringPair): Promise => { + return nonceCache.get(keys.address) || await getNonce(keys); +} + +const increment = async (keys: KeyringPair) => { + const nonce = await current(keys); + nonceCache.set(keys.address, nonce + 1); + return nonce; +} + +/** + * Use the auto nonce system + * @param keys The KeyringPair that will be used for sending a transaction + * @param inputNonce + * "auto" (default) for using the auto system + * "current" for just getting the current one instead of incrementing + * for just using a specific number (also sets it to increments for the future) + * @returns + */ +const auto = (keys: KeyringPair, inputNonce: AutoNonce = 'auto'): Promise => { + switch(inputNonce) { + case 'auto': return increment(keys); + case 'current': return current(keys); + default: + nonceCache.set(keys.address, inputNonce + 1); + return Promise.resolve(inputNonce); + } +} + +export default { + auto, + reset, +} diff --git a/e2e/scaffolding/extrinsicHelpers.ts b/e2e/scaffolding/extrinsicHelpers.ts index 2a653349bf..bbdcb617b2 100644 --- a/e2e/scaffolding/extrinsicHelpers.ts +++ b/e2e/scaffolding/extrinsicHelpers.ts @@ -3,9 +3,10 @@ import { ApiTypes, AugmentedEvent, SubmittableExtrinsic } from "@polkadot/api/ty import { KeyringPair } from "@polkadot/keyring/types"; import { Compact, u128, u16, u32, u64, Vec, Option, Bool } from "@polkadot/types"; import { FrameSystemAccountInfo, SpRuntimeDispatchError } from "@polkadot/types/lookup"; -import { AnyNumber, AnyTuple, Codec, IEvent, ISubmittableResult } from "@polkadot/types/types"; +import { AnyJson, AnyNumber, AnyTuple, Codec, IEvent, ISubmittableResult } from "@polkadot/types/types"; import { firstValueFrom, filter, map, pipe, tap } from "rxjs"; import { getBlockNumber, getExistentialDeposit, log, Sr25519Signature } from "./helpers"; +import autoNonce, { AutoNonce } from "./autoNonce"; import { connect, connectPromise } from "./apiConnection"; import { DispatchError, Event, SignedBlock } from "@polkadot/types/interfaces"; import { IsEvent } from "@polkadot/types/metadata/decorate/types"; @@ -59,6 +60,17 @@ export class EventError extends Error { } } +class CallError extends Error { + message: string; + result: AnyJson; + + constructor(submittable: ISubmittableResult, msg?: string) { + super(); + this.result = submittable.toHuman(); + this.message = msg ?? "Call Error"; + } +} + export type EventMap = Record; function eventKey(event: Event): string { @@ -112,22 +124,42 @@ export class Extrinsic status.isInBlock || status.isFinalized), - this.parseResult(this.event), - )) + // This uses automatic nonce management by default. + public async signAndSend(inputNonce?: AutoNonce) { + const nonce = await autoNonce.auto(this.keys, inputNonce); + + try { + const op = this.extrinsic(); + return await firstValueFrom(op.signAndSend(this.keys, { nonce }).pipe( + tap((result) => { + // If we learn a transaction has an error status (this does NOT include RPC errors) + // Then throw an error + if (result.isError) { + throw new CallError(result, `Failed Transaction for ${this.event?.meta.name || "unknown"}`); + } + }), + filter(({ status }) => status.isInBlock || status.isFinalized), + this.parseResult(this.event), + )); + } catch (e) { + if ((e as any).name === "RpcError" && inputNonce === 'auto') { + console.error("WARNING: Unexpected RPC Error! If it is expected, use 'current' for the nonce."); + } + throw e; + } } - public sudoSignAndSend() { - return firstValueFrom(this.api.tx.sudo.sudo(this.extrinsic()).signAndSend(this.keys).pipe( + public async sudoSignAndSend() { + const nonce = await autoNonce.auto(this.keys); + return await firstValueFrom(this.api.tx.sudo.sudo(this.extrinsic()).signAndSend(this.keys, { nonce }).pipe( filter(({ status }) => status.isInBlock || status.isFinalized), this.parseResult(this.event), )) } - public payWithCapacity(nonce?: number) { - return firstValueFrom(this.api.tx.frequencyTxPayment.payWithCapacity(this.extrinsic()).signAndSend(this.keys, { nonce }).pipe( + public async payWithCapacity(inputNonce?: AutoNonce) { + const nonce = await autoNonce.auto(this.keys, inputNonce); + return await firstValueFrom(this.api.tx.frequencyTxPayment.payWithCapacity(this.extrinsic()).signAndSend(this.keys, { nonce }).pipe( filter(({ status }) => status.isInBlock || status.isFinalized), this.parseResult(this.event), )) diff --git a/e2e/scenarios/grantDelegation.test.ts b/e2e/scenarios/grantDelegation.test.ts index 2905bd4430..7d46006ad6 100644 --- a/e2e/scenarios/grantDelegation.test.ts +++ b/e2e/scenarios/grantDelegation.test.ts @@ -3,7 +3,7 @@ import { KeyringPair } from "@polkadot/keyring/types"; import { u16, u64 } from "@polkadot/types"; import assert from "assert"; import { AddProviderPayload, Extrinsic, ExtrinsicHelper } from "../scaffolding/extrinsicHelpers"; -import { createAndFundKeypair, createAndFundKeypairs, createKeys, generateDelegationPayload, signPayloadSr25519 } from "../scaffolding/helpers"; +import { DOLLARS, createAndFundKeypair, createAndFundKeypairs, createKeys, generateDelegationPayload, getBlockNumber, signPayloadSr25519 } from "../scaffolding/helpers"; import { SchemaGrantResponse, SchemaId } from "@frequency-chain/api-augment/interfaces"; import { getFundingSource } from "../scaffolding/funding"; @@ -24,26 +24,25 @@ describe("Delegation Scenario Tests", function () { before(async function () { // Fund all the different keys - [noMsaKeys, keys, otherMsaKeys, providerKeys, otherProviderKeys] = await createAndFundKeypairs(fundingSource, ["noMsaKeys", "keys", "otherMsaKeys", "providerKeys", "otherProviderKeys"]); + [noMsaKeys, keys, otherMsaKeys, providerKeys, otherProviderKeys] = await createAndFundKeypairs(fundingSource, ["noMsaKeys", "keys", "otherMsaKeys", "providerKeys", "otherProviderKeys"], 1n * DOLLARS); - const createMsaOp = ExtrinsicHelper.createMsa(keys); - let { target: msaCreatedEvent } = await createMsaOp.fundAndSend(fundingSource); - msaId = msaCreatedEvent!.data.msaId; + const { target: msaCreatedEvent1 } = await ExtrinsicHelper.createMsa(keys).signAndSend(); + msaId = msaCreatedEvent1!.data.msaId; - ({ target: msaCreatedEvent } = await ExtrinsicHelper.createMsa(otherMsaKeys).fundAndSend(fundingSource)); - otherMsaId = msaCreatedEvent!.data.msaId; + const { target: msaCreatedEvent2 } = await ExtrinsicHelper.createMsa(otherMsaKeys).signAndSend(); + otherMsaId = msaCreatedEvent2!.data.msaId; let createProviderMsaOp = ExtrinsicHelper.createMsa(providerKeys); - await createProviderMsaOp.fundAndSend(fundingSource); + await createProviderMsaOp.signAndSend(); let createProviderOp = ExtrinsicHelper.createProvider(providerKeys, "MyPoster"); - let { target: providerEvent } = await createProviderOp.fundAndSend(fundingSource); + let { target: providerEvent } = await createProviderOp.signAndSend(); assert.notEqual(providerEvent, undefined, "setup should return a ProviderCreated event"); providerId = providerEvent!.data.providerId; createProviderMsaOp = ExtrinsicHelper.createMsa(otherProviderKeys); - await createProviderMsaOp.fundAndSend(fundingSource); + await createProviderMsaOp.signAndSend(); createProviderOp = ExtrinsicHelper.createProvider(otherProviderKeys, "MyPoster"); - ({ target: providerEvent } = await createProviderOp.fundAndSend(fundingSource)); + ({ target: providerEvent } = await createProviderOp.signAndSend()); assert.notEqual(providerEvent, undefined, "setup should return a ProviderCreated event"); otherProviderId = providerEvent!.data.providerId; @@ -79,12 +78,12 @@ describe("Delegation Scenario Tests", function () { }, ] }, "AvroBinary", "OnChain"); - let { target: createSchemaEvent } = await createSchemaOp.fundAndSend(fundingSource); + let { target: createSchemaEvent } = await createSchemaOp.signAndSend(); assert.notEqual(createSchemaEvent, undefined, "setup should return SchemaCreated event"); schemaId = createSchemaEvent!.data.schemaId; // Create a second schema - ({ target: createSchemaEvent } = await createSchemaOp.fundAndSend(fundingSource)); + ({ target: createSchemaEvent } = await createSchemaOp.signAndSend()); assert.notEqual(createSchemaEvent, undefined, "setup should return SchemaCreated event"); schemaId2 = createSchemaEvent!.data.schemaId; }); @@ -218,12 +217,12 @@ describe("Delegation Scenario Tests", function () { it("should fail to revoke a delegation if no MSA exists (InvalidMsaKey)", async function () { const nonMsaKeys = await createAndFundKeypair(fundingSource); const op = ExtrinsicHelper.revokeDelegationByDelegator(nonMsaKeys, providerId); - await assert.rejects(op.fundAndSend(fundingSource), { name: 'RpcError', message: /Custom error: 1$/ }); + await assert.rejects(op.signAndSend('current'), { name: 'RpcError', message: /Custom error: 1$/ }); }); it("should revoke a delegation by delegator", async function () { const revokeDelegationOp = ExtrinsicHelper.revokeDelegationByDelegator(keys, providerId); - const { target: revokeDelegationEvent } = await revokeDelegationOp.fundAndSend(fundingSource); + const { target: revokeDelegationEvent } = await revokeDelegationOp.signAndSend('current'); assert.notEqual(revokeDelegationEvent, undefined, "should have returned DelegationRevoked event"); assert.deepEqual(revokeDelegationEvent?.data.providerId, providerId, 'provider ids should be equal'); assert.deepEqual(revokeDelegationEvent?.data.delegatorId, msaId, 'delegator ids should be equal'); @@ -231,28 +230,20 @@ describe("Delegation Scenario Tests", function () { it("should fail to revoke a delegation that has already been revoked (InvalidDelegation)", async function () { const op = ExtrinsicHelper.revokeDelegationByDelegator(keys, providerId); - await assert.rejects(op.fundAndSend(fundingSource), { name: 'RpcError', message: /Custom error: 0$/ }); + await assert.rejects(op.signAndSend('current'), { name: 'RpcError', message: /Custom error: 0$/ }); }); it("should fail to revoke delegation where no delegation exists (DelegationNotFound)", async function () { const op = ExtrinsicHelper.revokeDelegationByDelegator(keys, otherProviderId); - await assert.rejects(op.fundAndSend(fundingSource), { name: 'RpcError', message: /Custom error: 0$/ }); + await assert.rejects(op.signAndSend('current'), { name: 'RpcError', message: /Custom error: 0$/ }); }); - describe('Successful revocation', () => { + describe('Successful revocation', function () { let newKeys: KeyringPair; let msaId: u64 | undefined; - let revokedAtBlock: bigint; - async function revokeDelegationByProvider (myMsaId: u64 | undefined) { - const op = ExtrinsicHelper.revokeDelegationByProvider(myMsaId as u64, providerKeys); - const { target: revokeEvent } = await op.fundAndSend(fundingSource); - assert.notEqual(revokeEvent, undefined, "should have returned a DelegationRevoked event"); - assert.deepEqual(revokeEvent?.data.delegatorId, myMsaId, 'delegator ids should match'); - assert.deepEqual(revokeEvent?.data.providerId, providerId, 'provider ids should match'); - revokedAtBlock = (await ExtrinsicHelper.apiPromise.rpc.chain.getBlock()).block.header.number.toBigInt() - } + let revokedAtBlock: number; - before(async () => { + before(async function () { newKeys = createKeys(); const payload = await generateDelegationPayload({ authorizedMsaId: providerId, schemaIds: [schemaId] }); const addProviderData = ExtrinsicHelper.api.registry.createType("PalletMsaAddProvider", payload); @@ -262,7 +253,7 @@ describe("Delegation Scenario Tests", function () { assert.notEqual(msaId, undefined, 'should have returned an MSA'); }); - it("schema permissions revoked block of delegation should be zero", async () => { + it("schema permissions revoked block of delegation should be zero", async function () { const delegationsResponse = await ExtrinsicHelper.apiPromise.rpc.msa.grantedSchemaIdsByMsaId(msaId, providerId); assert(delegationsResponse.isSome); const delegations: SchemaGrantResponse[] = delegationsResponse.unwrap().toArray(); @@ -273,24 +264,24 @@ describe("Delegation Scenario Tests", function () { it("should revoke a delegation by provider", async function () { const op = ExtrinsicHelper.revokeDelegationByProvider(msaId as u64, providerKeys); - const { target: revokeEvent } = await op.fundAndSend(fundingSource); + const { target: revokeEvent } = await op.signAndSend('current'); assert.notEqual(revokeEvent, undefined, "should have returned a DelegationRevoked event"); assert.deepEqual(revokeEvent?.data.delegatorId, msaId, 'delegator ids should match'); assert.deepEqual(revokeEvent?.data.providerId, providerId, 'provider ids should match'); - revokedAtBlock = (await ExtrinsicHelper.apiPromise.rpc.chain.getBlock()).block.header.number.toBigInt() + revokedAtBlock = await getBlockNumber(); }); - it("revoked delegation should be reflected in all previously-granted schema permissions", async () => { + it("revoked delegation should be reflected in all previously-granted schema permissions", async function () { // Make a block first to make sure the state has rolled to the next block - const currentBlock = (await ExtrinsicHelper.apiPromise.rpc.chain.getBlock()).block.header.number.toNumber(); + const currentBlock = await getBlockNumber(); ExtrinsicHelper.runToBlock(currentBlock + 1); const delegationsResponse = await ExtrinsicHelper.apiPromise.rpc.msa.grantedSchemaIdsByMsaId(msaId, providerId); assert(delegationsResponse.isSome); const delegations: SchemaGrantResponse[] = delegationsResponse.unwrap().toArray(); delegations.forEach((delegation) => { - const diff = delegation.revoked_at.toBigInt() - revokedAtBlock; + const diff = delegation.revoked_at.toNumber() - revokedAtBlock; // Due to parallelization, this could be off by a few blocks - assert(Math.abs(Number(diff.toString())) < 5); + assert(Math.abs(Number(diff.toString())) < 20); }) }) @@ -302,9 +293,10 @@ describe("Delegation Scenario Tests", function () { const { target: msaEvent } = await op.fundAndSend(fundingSource); const newMsaId = msaEvent?.data.msaId; assert.notEqual(newMsaId, undefined, 'should have returned an MSA'); - await revokeDelegationByProvider(newMsaId); + await assert.doesNotReject(ExtrinsicHelper.revokeDelegationByProvider(newMsaId!, providerKeys).signAndSend('current')); + const retireMsaOp = ExtrinsicHelper.retireMsa(delegatorKeys); - const { target: retireMsaEvent } = await retireMsaOp.fundAndSend(fundingSource); + const { target: retireMsaEvent } = await retireMsaOp.signAndSend('current'); assert.notEqual(retireMsaEvent, undefined, "should have returned MsaRetired event"); assert.deepEqual(retireMsaEvent?.data.msaId, newMsaId, 'msaId should be equal'); }); @@ -318,7 +310,7 @@ describe("Delegation Scenario Tests", function () { const newMsaId = msaEvent?.data.msaId; assert.notEqual(newMsaId, undefined, 'should have returned an MSA'); const retireMsaOp = ExtrinsicHelper.retireMsa(delegatorKeys); - await assert.rejects(retireMsaOp.fundAndSend(fundingSource), { name: 'RpcError', message: /Custom error: 6$/ }); + await assert.rejects(retireMsaOp.signAndSend('current'), { name: 'RpcError', message: /Custom error: 6$/ }); }); }); }); diff --git a/e2e/schemas/createSchema.test.ts b/e2e/schemas/createSchema.test.ts index e27d82d9d1..d3592aaac3 100644 --- a/e2e/schemas/createSchema.test.ts +++ b/e2e/schemas/createSchema.test.ts @@ -20,7 +20,7 @@ describe("#createSchema", function () { it("should fail if account does not have enough tokens", async function () { - await assert.rejects(ExtrinsicHelper.createSchema(accountWithNoFunds, AVRO_GRAPH_CHANGE, "AvroBinary", "OnChain").signAndSend(), { + await assert.rejects(ExtrinsicHelper.createSchema(accountWithNoFunds, AVRO_GRAPH_CHANGE, "AvroBinary", "OnChain").signAndSend('current'), { name: 'RpcError', message: /Inability to pay some fees/, }); @@ -28,7 +28,7 @@ describe("#createSchema", function () { it("should fail if account does not have enough tokens v2", async function () { - await assert.rejects(ExtrinsicHelper.createSchemaV2(accountWithNoFunds, AVRO_GRAPH_CHANGE, "AvroBinary", "OnChain",[]).signAndSend(), { + await assert.rejects(ExtrinsicHelper.createSchemaV2(accountWithNoFunds, AVRO_GRAPH_CHANGE, "AvroBinary", "OnChain",[]).signAndSend('current'), { name: 'RpcError', message: /Inability to pay some fees/, }); diff --git a/e2e/stateful-pallet-storage/handleItemized.test.ts b/e2e/stateful-pallet-storage/handleItemized.test.ts index 47bffdf091..41bb45c939 100644 --- a/e2e/stateful-pallet-storage/handleItemized.test.ts +++ b/e2e/stateful-pallet-storage/handleItemized.test.ts @@ -9,7 +9,7 @@ import { MessageSourceId, SchemaId } from "@frequency-chain/api-augment/interfac import { Bytes, u16, u64 } from "@polkadot/types"; import { getFundingSource } from "../scaffolding/funding"; -describe("📗 Stateful Pallet Storage", () => { +describe("📗 Stateful Pallet Storage", function () { const fundingSource = getFundingSource("stateful-storage-handle-itemized"); let schemaId_deletable: SchemaId; let schemaId_unsupported: SchemaId; @@ -40,7 +40,7 @@ describe("📗 Stateful Pallet Storage", () => { [badMsaId] = await createMsa(fundingSource); }); - describe("Itemized Storage Tests 😊/😥", () => { + describe("Itemized Storage Tests 😊/😥", function () { it("✅ should be able to call applyItemizedAction and apply actions", async function () { @@ -132,7 +132,7 @@ describe("📗 Stateful Pallet Storage", () => { }); }); - describe("Itemized Storage Remove Action Tests", () => { + describe("Itemized Storage Remove Action Tests", function () { it("✅ should be able to call applyItemizedAction and apply remove actions", async function () { let target_hash = await getCurrentItemizedHash(msa_id, schemaId_deletable); @@ -205,7 +205,7 @@ describe("📗 Stateful Pallet Storage", () => { }); }); - describe("Itemized Storage RPC Tests", () => { + describe("Itemized Storage RPC Tests", function () { it("✅ should be able to call getItemizedStorage and get data for itemized schema", async function () { const result = await ExtrinsicHelper.getItemizedStorage(msa_id, schemaId_deletable); assert.notEqual(result.hash, undefined, "should have returned a hash"); diff --git a/e2e/stateful-pallet-storage/handlePaginated.test.ts b/e2e/stateful-pallet-storage/handlePaginated.test.ts index 9d4a3b9bde..7b3930b8dd 100644 --- a/e2e/stateful-pallet-storage/handlePaginated.test.ts +++ b/e2e/stateful-pallet-storage/handlePaginated.test.ts @@ -9,7 +9,7 @@ import { MessageSourceId, SchemaId } from "@frequency-chain/api-augment/interfac import { Bytes, u16, u64 } from "@polkadot/types"; import { getFundingSource } from "../scaffolding/funding"; -describe("📗 Stateful Pallet Storage", () => { +describe("📗 Stateful Pallet Storage", function () { const fundingSource = getFundingSource("stateful-storage-handle-paginated"); let schemaId: SchemaId; @@ -44,7 +44,7 @@ describe("📗 Stateful Pallet Storage", () => { [badMsaId] = await createMsa(fundingSource); }); - describe("Paginated Storage Upsert/Remove Tests 😊/😥", () => { + describe("Paginated Storage Upsert/Remove Tests 😊/😥", function () { it("✅ should be able to call upsert page and add a page and remove a page via id", async function () { let page_id = 0; @@ -127,7 +127,7 @@ describe("📗 Stateful Pallet Storage", () => { }); }); - describe("Paginated Storage Removal Negative Tests 😊/😥", () => { + describe("Paginated Storage Removal Negative Tests 😊/😥", function () { it("🛑 should fail call to remove page with invalid schemaId", async function () { let fake_schema_id = 999; @@ -165,7 +165,7 @@ describe("📗 Stateful Pallet Storage", () => { }); }); - describe("Paginated Storage RPC Tests", () => { + describe("Paginated Storage RPC Tests", function () { it("✅ should be able to call get_paginated_storage and get paginated data", async function () { const result = await ExtrinsicHelper.getPaginatedStorage(msa_id, schemaId); assert.notEqual(result, undefined, "should have returned a valid response"); diff --git a/e2e/stateful-pallet-storage/handleSignatureRequired.test.ts b/e2e/stateful-pallet-storage/handleSignatureRequired.test.ts index 52ddd4aaed..5006dd155a 100644 --- a/e2e/stateful-pallet-storage/handleSignatureRequired.test.ts +++ b/e2e/stateful-pallet-storage/handleSignatureRequired.test.ts @@ -19,7 +19,7 @@ import { MessageSourceId, SchemaId } from "@frequency-chain/api-augment/interfac import { Bytes, u16 } from "@polkadot/types"; import { getFundingSource } from "../scaffolding/funding"; -describe("📗 Stateful Pallet Storage Signature Required", () => { +describe("📗 Stateful Pallet Storage Signature Required", function () { const fundingSource = getFundingSource("stateful-storage-handle-sig-req"); @@ -54,7 +54,7 @@ describe("📗 Stateful Pallet Storage Signature Required", () => { assert.notEqual(msa_id, undefined, "setup should populate msa_id"); }); - describe("Itemized With Signature Storage Tests", () => { + describe("Itemized With Signature Storage Tests", function () { it("should be able to call applyItemizedActionWithSignature and apply actions", async function () { @@ -120,7 +120,7 @@ describe("📗 Stateful Pallet Storage Signature Required", () => { }); }); - describe("Paginated With Signature Storage Tests", () => { + describe("Paginated With Signature Storage Tests", function () { it("should be able to call upsert a page and delete it successfully", async function () { let page_id = new u16(ExtrinsicHelper.api.registry, 1); diff --git a/e2e/sudo/sudo.test.ts b/e2e/sudo/sudo.test.ts index d2cb36b5c2..28b5503dfe 100644 --- a/e2e/sudo/sudo.test.ts +++ b/e2e/sudo/sudo.test.ts @@ -75,7 +75,7 @@ describe("Sudo required", function () { }); - describe("📗 Stateful Pallet Storage AppendOnly Schemas", () => { + describe("📗 Stateful Pallet Storage AppendOnly Schemas", function () { // This requires schema creation abilities let itemizedSchemaId: SchemaId; @@ -99,7 +99,7 @@ describe("Sudo required", function () { assert.notEqual(msa_id, undefined, "setup should populate msa_id"); }); - describe("Itemized With AppendOnly Storage Tests", () => { + describe("Itemized With AppendOnly Storage Tests", function () { it("should not be able to call delete action", async function () { From 298c7e087ebacd2b7cf28772ceaea3ced323fb35 Mon Sep 17 00:00:00 2001 From: Shannon Wells Date: Tue, 7 Nov 2023 22:18:11 +0000 Subject: [PATCH 4/6] Fix bug with StakingAccountDetails#withdraw (#1769) # Goal The goal of this PR is to fix the bug with calling `StakingAccountDetails#withdraw` with more than what is in `self.active`. Closes #1768 --- .../src/tests/staking_account_details_tests.rs | 8 ++++++++ pallets/capacity/src/types.rs | 12 +++++++----- 2 files changed, 15 insertions(+), 5 deletions(-) diff --git a/pallets/capacity/src/tests/staking_account_details_tests.rs b/pallets/capacity/src/tests/staking_account_details_tests.rs index c89884aed4..e49ed9082b 100644 --- a/pallets/capacity/src/tests/staking_account_details_tests.rs +++ b/pallets/capacity/src/tests/staking_account_details_tests.rs @@ -39,6 +39,14 @@ fn staking_account_details_withdraw_goes_to_zero_when_result_below_minimum() { assert_eq!(Ok(10u64), staking_account_details.withdraw(6, 3)); assert_eq!(0u64, staking_account_details.active); assert_eq!(10u64, staking_account_details.total); + + staking_account_details.deposit(10); + assert_eq!(Ok(10u64), staking_account_details.withdraw(9, 3)); + assert_eq!(0u64, staking_account_details.active); + + staking_account_details.deposit(10); + assert_eq!(Ok(10u64), staking_account_details.withdraw(11, 3)); + assert_eq!(0u64, staking_account_details.active); } #[test] diff --git a/pallets/capacity/src/types.rs b/pallets/capacity/src/types.rs index 0d9491c72e..9b76809180 100644 --- a/pallets/capacity/src/types.rs +++ b/pallets/capacity/src/types.rs @@ -105,12 +105,14 @@ impl StakingAccountDetails { self.unlocking.len() < T::MaxUnlockingChunks::get() as usize, Error::::MaxUnlockingChunksExceeded ); - let mut active = self.active.saturating_sub(amount); + + let current_active = self.active; + let mut new_active = self.active.saturating_sub(amount); let mut actual_unstaked: BalanceOf = amount; - if active.le(&T::MinimumStakingAmount::get()) { - actual_unstaked = amount.saturating_add(active); - active = Zero::zero(); + if new_active.le(&T::MinimumStakingAmount::get()) { + actual_unstaked = current_active; + new_active = Zero::zero(); } let unlock_chunk = UnlockChunk { value: actual_unstaked, thaw_at }; @@ -119,7 +121,7 @@ impl StakingAccountDetails { .try_push(unlock_chunk) .map_err(|_| Error::::MaxUnlockingChunksExceeded)?; - self.active = active; + self.active = new_active; Ok(actual_unstaked) } } From 8c8384f79074872198310012da70ea537f492bf8 Mon Sep 17 00:00:00 2001 From: Wil Wade Date: Wed, 8 Nov 2023 09:02:08 -0500 Subject: [PATCH 5/6] E2E Testing fixes and cleanup (#1765) # Goal The goal of this PR is the final misc pieces of getting the e2e tests less flaky. Does this fix all of it? No. Most? Yes. Closes #1731 # Discussion - Correct batch error asserting - Ids used as "bad" should use close to the max - Use Immortal Eras due to issues with speed and [AncientBirthBlock](https://substrate.stackexchange.com/questions/10411/fast-block-production-causes-mortality-checking-failing-with-ancientbirthblock) - Cleanup before funding - Move assertAddNewKey - Add new assertHasMessage --- e2e/capacity/transactions.test.ts | 30 +++++----- e2e/messages/addIPFSMessage.test.ts | 12 ++-- e2e/miscellaneous/utilityBatch.test.ts | 38 +++++------- e2e/msa/msaKeyManagement.test.ts | 6 +- e2e/scaffolding/extrinsicHelpers.ts | 9 ++- e2e/scaffolding/helpers.ts | 58 ++++++++++++++----- e2e/scaffolding/rootHooks.ts | 13 +++-- .../handleItemized.test.ts | 6 +- .../handlePaginated.test.ts | 15 ++--- .../handleSignatureRequired.test.ts | 7 ++- e2e/sudo/sudo.test.ts | 3 +- 11 files changed, 112 insertions(+), 85 deletions(-) diff --git a/e2e/capacity/transactions.test.ts b/e2e/capacity/transactions.test.ts index b5859070fc..a5314b71aa 100644 --- a/e2e/capacity/transactions.test.ts +++ b/e2e/capacity/transactions.test.ts @@ -38,7 +38,9 @@ import { generatePaginatedUpsertSignaturePayloadV2, generatePaginatedDeleteSignaturePayloadV2, getCapacity, - getTestHandle + getTestHandle, + assertHasMessage, + assertAddNewKey } from "../scaffolding/helpers"; import { FeeDetails } from "@polkadot/types/interfaces"; import { ipfsCid } from "../messages/ipfs"; @@ -60,16 +62,6 @@ describe("Capacity Transactions", function () { assert.notEqual(schemaId, undefined, "setup should populate schemaId"); }); - async function assertAddNewKey(capacityKeys: KeyringPair, addKeyPayload: AddKeyData, newControlKeypair: KeyringPair) { - const addKeyPayloadCodec: Codec = ExtrinsicHelper.api.registry.createType("PalletMsaAddKeyData", addKeyPayload); - const ownerSig: Sr25519Signature = signPayloadSr25519(capacityKeys, addKeyPayloadCodec); - const newSig: Sr25519Signature = signPayloadSr25519(newControlKeypair, addKeyPayloadCodec); - const addPublicKeyOp = ExtrinsicHelper.addPublicKeyToMsa(capacityKeys, ownerSig, newSig, addKeyPayload); - const { eventMap } = await addPublicKeyOp.signAndSend(); - assertEvent(eventMap, "system.ExtrinsicSuccess"); - assertEvent(eventMap, "msa.PublicKeyAdded"); - } - function getCapacityFee(chainEvents: EventMap): bigint { if (chainEvents["capacity.CapacityWithdrawn"] && ExtrinsicHelper.api.events.capacity.CapacityWithdrawn.is(chainEvents["capacity.CapacityWithdrawn"])) { @@ -118,6 +110,7 @@ describe("Capacity Transactions", function () { before(async function () { capacityKeys = createKeys("CapacityKeys"); capacityProvider = await createMsaAndProvider(fundingSource, capacityKeys, "CapacityProvider", FUNDS_AMOUNT); + // Stake enough for all transactions await assert.doesNotReject(stakeToProvider(fundingSource, capacityKeys, capacityProvider, stakedForMsa)); }) @@ -186,11 +179,11 @@ describe("Capacity Transactions", function () { assertEvent(eventMap, "capacity.CapacityWithdrawn"); assertEvent(eventMap, "msa.DelegationGranted"); - let fee = getCapacityFee(eventMap); + const fee = getCapacityFee(eventMap); // assuming no other txns charged against capacity (b/c of async tests), this should be the maximum amount left. const maximumExpectedRemaining = stakedForMsa / TokenPerCapacity - fee - let remaining = capacityStaked.remainingCapacity.toBigInt(); + const remaining = capacityStaked.remainingCapacity.toBigInt(); assert(remaining <= maximumExpectedRemaining, `expected ${remaining} to be <= ${maximumExpectedRemaining}`); assert.equal(capacityStaked.totalTokensStaked.toBigInt(), stakedForMsa); assert.equal(capacityStaked.totalCapacityIssued.toBigInt(), stakedForMsa / TokenPerCapacity); @@ -209,6 +202,7 @@ describe("Capacity Transactions", function () { beforeEach(async function () { starting_block = (await ExtrinsicHelper.apiPromise.rpc.chain.getHeader()).number.toNumber(); + // Stake each time so that we always have enough capacity to do the call await assert.doesNotReject(stakeToProvider(fundingSource, capacityKeys, capacityProvider, amountStaked)); }); @@ -240,8 +234,7 @@ describe("Capacity Transactions", function () { page_size: 999 } ); - const response: MessageResponse = get.content[get.content.length - 1]; - assert.equal(response.payload, "0xdeadbeef", "payload should be 0xdeadbeef"); + assertHasMessage(get, x => x.payload.isSome && x.payload.toString() === "0xdeadbeef"); }); }); @@ -254,8 +247,10 @@ describe("Capacity Transactions", function () { before(async function () { capacityKeys = createKeys("CapacityKeys"); capacityProvider = await createMsaAndProvider(fundingSource, capacityKeys, "CapacityProvider", FUNDS_AMOUNT); - }) + }); + beforeEach(async function () { + // Stake each time so that we always have enough capacity to do the call await assert.doesNotReject(stakeToProvider(fundingSource, capacityKeys, capacityProvider, amountStaked)); }); @@ -480,7 +475,8 @@ describe("Capacity Transactions", function () { before(async function () { capacityKeys = createKeys("CapacityKeys"); capacityProvider = await createMsaAndProvider(fundingSource, capacityKeys, "CapacityProvider", FUNDS_AMOUNT); - }) + }); + it("successfully pays with Capacity for eligible transaction - claimHandle", async function () { await assert.doesNotReject(stakeToProvider(fundingSource, capacityKeys, capacityProvider, amountStaked)); diff --git a/e2e/messages/addIPFSMessage.test.ts b/e2e/messages/addIPFSMessage.test.ts index 0a44ee4fae..08b66c4f9a 100644 --- a/e2e/messages/addIPFSMessage.test.ts +++ b/e2e/messages/addIPFSMessage.test.ts @@ -5,7 +5,7 @@ import { base32 } from 'multiformats/bases/base32'; import { CID } from 'multiformats/cid' import { PARQUET_BROADCAST } from "../schemas/fixtures/parquetBroadcastSchemaType"; import assert from "assert"; -import { createAndFundKeypair } from "../scaffolding/helpers"; +import { assertHasMessage, createAndFundKeypair } from "../scaffolding/helpers"; import { ExtrinsicHelper } from "../scaffolding/extrinsicHelpers"; import { u16 } from "@polkadot/types"; import { MessageResponse } from "@frequency-chain/api-augment/interfaces"; @@ -102,9 +102,10 @@ describe("Add Offchain Message", function () { it("should successfully retrieve added message and returned CID should have Base32 encoding", async function () { const f = await ExtrinsicHelper.apiPromise.rpc.messages.getBySchemaId(schemaId, { from_block: starting_block, from_index: 0, to_block: starting_block + 999, page_size: 999 }); - const response: MessageResponse = f.content[f.content.length - 1]; - const cid = Buffer.from(response.cid.unwrap()).toString(); - assert.equal(cid, ipfs_cid_32, 'returned CID should match base32-encoded CID'); + assertHasMessage(f, x => { + const cid = x.cid.isSome && Buffer.from(x.cid.unwrap()).toString(); + return cid === ipfs_cid_32; + }); }) describe("Add OnChain Message and successfully retrieve it", function () { @@ -124,8 +125,7 @@ describe("Add Offchain Message", function () { page_size: 999 } ); - const response: MessageResponse = get.content[get.content.length - 1]; - assert.equal(response.payload, "0xdeadbeef", "payload should be 0xdeadbeef"); + assertHasMessage(get, x => x.payload.isSome && x.payload.toString() === "0xdeadbeef"); }); }); }); diff --git a/e2e/miscellaneous/utilityBatch.test.ts b/e2e/miscellaneous/utilityBatch.test.ts index a8fe66de2f..d61a6c9e98 100644 --- a/e2e/miscellaneous/utilityBatch.test.ts +++ b/e2e/miscellaneous/utilityBatch.test.ts @@ -1,7 +1,7 @@ import assert from "assert"; import { KeyringPair } from "@polkadot/keyring/types"; import { ExtrinsicHelper } from "../scaffolding/extrinsicHelpers"; -import { createAndFundKeypair, getNonce } from "../scaffolding/helpers"; +import { DOLLARS, createAndFundKeypair } from "../scaffolding/helpers"; import { ApiTypes, SubmittableExtrinsic } from "@polkadot/api/types"; import { getFundingSource } from "../scaffolding/funding"; @@ -11,10 +11,9 @@ describe("Utility Batch Filtering", function () { const fundingSource = getFundingSource("misc-util-batch"); - before(async function () { - let nonce = await getNonce(fundingSource); - sender = await createAndFundKeypair(fundingSource, 50_000_000n, "utility-sender", nonce++); - recipient = await createAndFundKeypair(fundingSource, 50_000_000n, "utility-recipient", nonce++); + beforeEach(async function () { + sender = await createAndFundKeypair(fundingSource, 5n * DOLLARS, 'utility-sender'); + recipient = await createAndFundKeypair(fundingSource, 5n * DOLLARS, 'utility-recipient'); }); it("should successfully execute ✅ batch with allowed calls", async function () { @@ -44,11 +43,10 @@ describe("Utility Batch Filtering", function () { const batchAll = ExtrinsicHelper.executeUtilityBatchAll(sender, badBatch); try { await batchAll.fundAndSend(fundingSource); + assert.fail("batchAll should have caused an error"); } catch (err) { - error = err; - assert.notEqual(error, undefined, " batchAll should return an error"); + assert.notEqual(err, undefined, " batchAll should return an error"); } - assert.notEqual(error, undefined, " batchAll should return an error"); }); it("should fail to execute ❌ batch with disallowed calls", async function () { @@ -88,14 +86,12 @@ describe("Utility Batch Filtering", function () { const badBatch: SubmittableExtrinsic[] = []; badBatch.push(ExtrinsicHelper.api.tx.msa.retireMsa()) const batch = ExtrinsicHelper.executeUtilityBatchAll(sender, badBatch); - let error: any; try { await batch.fundAndSend(fundingSource); + assert.fail("batch should have caused an error"); } catch (err) { - error = err; - assert.notEqual(error, undefined, "should return an error"); + assert.notEqual(err, undefined, "should return an error"); } - assert.notEqual(error, undefined, "should return an error"); }); it("should fail to execute ❌ batch with `Pays::Yes` `create_provider`call blocked by Frequency", async function () { @@ -103,14 +99,12 @@ describe("Utility Batch Filtering", function () { const badBatch: SubmittableExtrinsic[] = []; badBatch.push(ExtrinsicHelper.api.tx.msa.createProvider("I am a ba(tch)d provider")) const batch = ExtrinsicHelper.executeUtilityBatchAll(sender, badBatch); - let error: any; try { await batch.fundAndSend(fundingSource); + assert.fail("batch should have caused an error"); } catch (err) { - error = err; - assert.notEqual(error, undefined, "should return an error"); + assert.notEqual(err, undefined, "should return an error"); } - assert.notEqual(error, undefined, "should return an error"); }); it("should fail to execute ❌ batch with `Pays::Yes` `create_schema` call blocked by Frequency", async function () { @@ -118,14 +112,12 @@ describe("Utility Batch Filtering", function () { const badBatch: SubmittableExtrinsic[] = []; badBatch.push(ExtrinsicHelper.api.tx.msa.createProvider("I am a ba(tch)d provider")) const batch = ExtrinsicHelper.executeUtilityBatchAll(sender, badBatch); - let error: any; try { await batch.fundAndSend(fundingSource); + assert.fail("batch should have caused an error"); } catch (err) { - error = err; - assert.notEqual(error, undefined, "should return an error"); + assert.notEqual(err, undefined, "should return an error"); } - assert.notEqual(error, undefined, "should return an error"); }); it("should fail to execute ❌ batch with nested batch", async function () { @@ -136,13 +128,11 @@ describe("Utility Batch Filtering", function () { innerBatch.push(ExtrinsicHelper.api.tx.system.remark("Hello From Batch")) nestedBatch.push(ExtrinsicHelper.api.tx.utility.batch(innerBatch)) const batch = ExtrinsicHelper.executeUtilityBatchAll(sender, nestedBatch); - let error: any; try { await batch.fundAndSend(fundingSource); + assert.fail("batch should have caused an error"); } catch (err) { - error = err; - assert.notEqual(error, undefined, "should return an error"); + assert.notEqual(err, undefined, "should return an error"); } - assert.notEqual(error, undefined, "should return an error"); }); }); diff --git a/e2e/msa/msaKeyManagement.test.ts b/e2e/msa/msaKeyManagement.test.ts index 10cad0bb8e..ec22792fa7 100644 --- a/e2e/msa/msaKeyManagement.test.ts +++ b/e2e/msa/msaKeyManagement.test.ts @@ -7,6 +7,8 @@ import { u64 } from "@polkadot/types"; import { Codec } from "@polkadot/types/types"; import { getFundingSource } from "../scaffolding/funding"; +const maxU64 = 18_446_744_073_709_551_615n; + describe("MSA Key management", function () { const fundingSource = getFundingSource("msa-key-management"); @@ -75,7 +77,7 @@ describe("MSA Key management", function () { it("should fail to add public key if origin does not own MSA (NotMsaOwner)", async function () { const newPayload = await generateAddKeyPayload({ ...defaultPayload, - msaId: new u64(ExtrinsicHelper.api.registry, 999), // If we create more than 999 MSAs in our test suites, this will fail + msaId: new u64(ExtrinsicHelper.api.registry, maxU64), }); addKeyData = ExtrinsicHelper.api.registry.createType("PalletMsaAddKeyData", newPayload); ownerSig = signPayloadSr25519(keys, addKeyData); @@ -155,7 +157,7 @@ describe("MSA Key management", function () { assert.notEqual(event, undefined, 'should have added public key'); // Cleanup - await assert.doesNotReject(ExtrinsicHelper.deletePublicKey(keys, thirdKey.publicKey).signAndSend()); + await assert.doesNotReject(ExtrinsicHelper.deletePublicKey(keys, thirdKey.publicKey).signAndSend('current')); }); }); diff --git a/e2e/scaffolding/extrinsicHelpers.ts b/e2e/scaffolding/extrinsicHelpers.ts index bbdcb617b2..039d8aac78 100644 --- a/e2e/scaffolding/extrinsicHelpers.ts +++ b/e2e/scaffolding/extrinsicHelpers.ts @@ -130,7 +130,8 @@ export class Extrinsic { // If we learn a transaction has an error status (this does NOT include RPC errors) // Then throw an error @@ -151,7 +152,8 @@ export class Extrinsic status.isInBlock || status.isFinalized), this.parseResult(this.event), )) @@ -159,7 +161,8 @@ export class Extrinsic status.isInBlock || status.isFinalized), this.parseResult(this.event), )) diff --git a/e2e/scaffolding/helpers.ts b/e2e/scaffolding/helpers.ts index 64c3887147..89b38f2bd8 100644 --- a/e2e/scaffolding/helpers.ts +++ b/e2e/scaffolding/helpers.ts @@ -1,7 +1,7 @@ import { Keyring } from "@polkadot/api"; import { KeyringPair } from "@polkadot/keyring/types"; import { u16, u32, u64, Option } from "@polkadot/types"; -import type { PalletCapacityCapacityDetails } from "@polkadot/types/lookup"; +import type { FrameSystemAccountInfo, PalletCapacityCapacityDetails } from "@polkadot/types/lookup"; import { Codec } from "@polkadot/types/types"; import { u8aToHex, u8aWrapBytes } from "@polkadot/util"; import { mnemonicGenerate } from '@polkadot/util-crypto'; @@ -14,7 +14,7 @@ import { ItemizedSignaturePayload, ItemizedSignaturePayloadV2, PaginatedDeleteSignaturePayload, PaginatedDeleteSignaturePayloadV2, PaginatedUpsertSignaturePayload, PaginatedUpsertSignaturePayloadV2 } from "./extrinsicHelpers"; -import { HandleResponse, MessageSourceId, PageHash } from "@frequency-chain/api-augment/interfaces"; +import { BlockPaginationResponseMessage, HandleResponse, MessageResponse, MessageSourceId, PageHash } from "@frequency-chain/api-augment/interfaces"; import assert from "assert"; import { AVRO_GRAPH_CHANGE } from "../schemas/fixtures/avroGraphChangeSchemaType"; import { PARQUET_BROADCAST } from "../schemas/fixtures/parquetBroadcastSchemaType"; @@ -165,16 +165,24 @@ export function createKeys(name: string = 'first pair'): KeyringPair { return keypair; } +function canDrainAccount(info: FrameSystemAccountInfo): Boolean { + return !info.isEmpty + && info.data.free.toNumber() > 1_500_000 // ~Cost to do the transfer + && info.data.reserved.toNumber() < 1 + && info.data.frozen.toNumber() < 1; +} + export async function drainKeys(keyPairs: KeyringPair[], dest: string) { try { - await Promise.allSettled(keyPairs.map(async (keypair) => { - const info = await ExtrinsicHelper.getAccountInfo(keypair.address); - if (!info.isEmpty && info.data.free.toNumber() > 0) { - await ExtrinsicHelper.emptyAccount(keypair, dest).signAndSend(); - } - })); + await Promise.all( + keyPairs.map(async (keypair) => { + const info = await ExtrinsicHelper.getAccountInfo(keypair.address); + // Only drain keys that can be + if (canDrainAccount(info)) await ExtrinsicHelper.emptyAccount(keypair, dest).signAndSend(); + }) + ); } catch (e) { - console.log("Error draining accounts: ", e); + console.log('Error draining accounts: ', e); } } @@ -210,8 +218,8 @@ export function log(...args: any[]) { } } -export async function createProviderKeysAndId(source: KeyringPair): Promise<[KeyringPair, u64]> { - const providerKeys = await createAndFundKeypair(source); +export async function createProviderKeysAndId(source: KeyringPair, amount?: bigint): Promise<[KeyringPair, u64]> { + const providerKeys = await createAndFundKeypair(source, amount); await ExtrinsicHelper.createMsa(providerKeys).fundAndSend(source); const createProviderOp = ExtrinsicHelper.createProvider(providerKeys, "PrivateProvider"); const { target: providerEvent } = await createProviderOp.fundAndSend(source); @@ -219,8 +227,8 @@ export async function createProviderKeysAndId(source: KeyringPair): Promise<[Key return [providerKeys, providerId]; } -export async function createDelegator(source: KeyringPair): Promise<[KeyringPair, u64]> { - let keys = await createAndFundKeypair(source); +export async function createDelegator(source: KeyringPair, amount?: bigint): Promise<[KeyringPair, u64]> { + let keys = await createAndFundKeypair(source, amount); const createMsa = ExtrinsicHelper.createMsa(keys); const { target: msaCreatedEvent } = await createMsa.fundAndSend(source); const delegatorMsaId = msaCreatedEvent?.data.msaId || new u64(ExtrinsicHelper.api.registry, 0); @@ -440,3 +448,27 @@ export function assertEvent(events: EventMap, eventName: string) { export function assertExtrinsicSuccess(eventMap: EventMap) { assert.notEqual(eventMap["system.ExtrinsicSuccess"], undefined); } + +export function assertHasMessage(response: BlockPaginationResponseMessage, testFn: (x: MessageResponse) => Boolean) { + const messages = response.content; + assert(messages.length > 0, "Expected some messages, but found none."); + + const found = messages.find(testFn); + + if (found) { + assert.notEqual(found, undefined); + } else { + const allPayloads = messages.map(x => x.payload.toString()); + assert.fail(`Unable to find message in response (length: ${messages.length}, Payloads: ${allPayloads.join(", ")})`); + } +} + +export async function assertAddNewKey(capacityKeys: KeyringPair, addKeyPayload: AddKeyData, newControlKeypair: KeyringPair) { + const addKeyPayloadCodec: Codec = ExtrinsicHelper.api.registry.createType("PalletMsaAddKeyData", addKeyPayload); + const ownerSig: Sr25519Signature = signPayloadSr25519(capacityKeys, addKeyPayloadCodec); + const newSig: Sr25519Signature = signPayloadSr25519(newControlKeypair, addKeyPayloadCodec); + const addPublicKeyOp = ExtrinsicHelper.addPublicKeyToMsa(capacityKeys, ownerSig, newSig, addKeyPayload); + const { eventMap } = await addPublicKeyOp.signAndSend(); + assertEvent(eventMap, "system.ExtrinsicSuccess"); + assertEvent(eventMap, "msa.PublicKeyAdded"); +} diff --git a/e2e/scaffolding/rootHooks.ts b/e2e/scaffolding/rootHooks.ts index e838a94b41..3bfbc83753 100644 --- a/e2e/scaffolding/rootHooks.ts +++ b/e2e/scaffolding/rootHooks.ts @@ -15,20 +15,21 @@ export const mochaHooks = { try { await cryptoWaitReady(); await ExtrinsicHelper.initialize(); - } catch(e) { - console.error("Failed to run beforeAll root hook: ", e); + } catch (e) { + console.error('Failed to run beforeAll root hook: ', this.test.parent.suites[0].title, e); } }, async afterAll() { + const testSuite = this.test.parent.suites[0].title; + console.log("Starting ROOT hook shutdown", testSuite) try { // Any key created using helpers `createKeys` is kept in the module // then any value remaining is drained here at the end const rootAddress = getRootFundingSource().keys.address; await drainFundedKeys(rootAddress); - await ExtrinsicHelper.api.disconnect(); - await ExtrinsicHelper.apiPromise.disconnect(); - } catch(e) { - console.error("Failed to run afterAll root hook: ", e); + console.log("ENDING ROOT hook shutdown", testSuite) + } catch (e) { + console.error('Failed to run afterAll root hook: ', testSuite, e); } } } diff --git a/e2e/stateful-pallet-storage/handleItemized.test.ts b/e2e/stateful-pallet-storage/handleItemized.test.ts index 41bb45c939..d2873297a0 100644 --- a/e2e/stateful-pallet-storage/handleItemized.test.ts +++ b/e2e/stateful-pallet-storage/handleItemized.test.ts @@ -20,17 +20,17 @@ describe("📗 Stateful Pallet Storage", function () { before(async function () { // Create a provider for the MSA, the provider will be used to grant delegation - [providerKeys, providerId] = await createProviderKeysAndId(fundingSource); + [providerKeys, providerId] = await createProviderKeysAndId(fundingSource, 2n * DOLLARS); assert.notEqual(providerId, undefined, "setup should populate providerId"); assert.notEqual(providerKeys, undefined, "setup should populate providerKeys"); // Create a schema to allow delete actions const createSchemaDeletable = ExtrinsicHelper.createSchema(providerKeys, AVRO_CHAT_MESSAGE, "AvroBinary", "Itemized"); - const { target: eventDeletable } = await createSchemaDeletable.fundAndSend(fundingSource); + const { target: eventDeletable } = await createSchemaDeletable.signAndSend(); schemaId_deletable = eventDeletable!.data.schemaId; // Create non supported schema const createSchema2 = ExtrinsicHelper.createSchema(providerKeys, AVRO_CHAT_MESSAGE, "AvroBinary", "OnChain"); - const { target: event2 } = await createSchema2.fundAndSend(fundingSource); + const { target: event2 } = await createSchema2.signAndSend(); assert.notEqual(event2, undefined, "setup should return a SchemaCreated event"); schemaId_unsupported = event2!.data.schemaId; // Create a MSA for the delegator and delegate to the provider diff --git a/e2e/stateful-pallet-storage/handlePaginated.test.ts b/e2e/stateful-pallet-storage/handlePaginated.test.ts index 7b3930b8dd..69cf897a3d 100644 --- a/e2e/stateful-pallet-storage/handlePaginated.test.ts +++ b/e2e/stateful-pallet-storage/handlePaginated.test.ts @@ -1,7 +1,7 @@ // E2E tests for pallets/stateful-pallet-storage/handlePaginated.ts import "@frequency-chain/api-augment"; import assert from "assert"; -import {createProviderKeysAndId, createDelegatorAndDelegation, getCurrentPaginatedHash, createMsa} from "../scaffolding/helpers"; +import {createProviderKeysAndId, createDelegatorAndDelegation, getCurrentPaginatedHash, createMsa, DOLLARS} from "../scaffolding/helpers"; import { KeyringPair } from "@polkadot/keyring/types"; import { ExtrinsicHelper } from "../scaffolding/extrinsicHelpers"; import { AVRO_CHAT_MESSAGE } from "./fixtures/itemizedSchemaType"; @@ -9,6 +9,8 @@ import { MessageSourceId, SchemaId } from "@frequency-chain/api-augment/interfac import { Bytes, u16, u64 } from "@polkadot/types"; import { getFundingSource } from "../scaffolding/funding"; +const badSchemaId = 65_534; + describe("📗 Stateful Pallet Storage", function () { const fundingSource = getFundingSource("stateful-storage-handle-paginated"); @@ -21,18 +23,18 @@ describe("📗 Stateful Pallet Storage", function () { before(async function () { // Create a provider for the MSA, the provider will be used to grant delegation - [providerKeys, providerId] = await createProviderKeysAndId(fundingSource); + [providerKeys, providerId] = await createProviderKeysAndId(fundingSource, 2n * DOLLARS); assert.notEqual(providerId, undefined, "setup should populate providerId"); assert.notEqual(providerKeys, undefined, "setup should populate providerKeys"); // Create a schema for Paginated PayloadLocation const createSchema = ExtrinsicHelper.createSchema(providerKeys, AVRO_CHAT_MESSAGE, "AvroBinary", "Paginated"); - const { target: event } = await createSchema.fundAndSend(fundingSource); + const { target: event } = await createSchema.signAndSend(); schemaId = event!.data.schemaId; // Create non supported schema const createSchema2 = ExtrinsicHelper.createSchema(providerKeys, AVRO_CHAT_MESSAGE, "AvroBinary", "OnChain"); - const { target: event2 } = await createSchema2.fundAndSend(fundingSource); + const { target: event2 } = await createSchema2.signAndSend(); assert.notEqual(event2, undefined, "setup should return a SchemaCreated event"); schemaId_unsupported = event2!.data.schemaId; @@ -81,7 +83,7 @@ describe("📗 Stateful Pallet Storage", function () { let page_id = 0; let target_hash = await getCurrentPaginatedHash(msa_id, schemaId, page_id) let payload_1 = new Bytes(ExtrinsicHelper.api.registry, "Hello World From Frequency"); - let fake_schema_id = new u16(ExtrinsicHelper.api.registry, 999); + let fake_schema_id = new u16(ExtrinsicHelper.api.registry, badSchemaId); let paginated_add_result_1 = ExtrinsicHelper.upsertPage(providerKeys, fake_schema_id, msa_id, page_id, payload_1, target_hash); await assert.rejects(paginated_add_result_1.fundAndSend(fundingSource), { name: 'InvalidSchemaId', @@ -130,9 +132,8 @@ describe("📗 Stateful Pallet Storage", function () { describe("Paginated Storage Removal Negative Tests 😊/😥", function () { it("🛑 should fail call to remove page with invalid schemaId", async function () { - let fake_schema_id = 999; let page_id = 0; - let paginated_add_result_1 = ExtrinsicHelper.removePage(providerKeys, fake_schema_id, msa_id, page_id, 0); + let paginated_add_result_1 = ExtrinsicHelper.removePage(providerKeys, badSchemaId, msa_id, page_id, 0); await assert.rejects(paginated_add_result_1.fundAndSend(fundingSource), { name: 'InvalidSchemaId', section: 'statefulStorage', diff --git a/e2e/stateful-pallet-storage/handleSignatureRequired.test.ts b/e2e/stateful-pallet-storage/handleSignatureRequired.test.ts index 5006dd155a..1bb759bbb4 100644 --- a/e2e/stateful-pallet-storage/handleSignatureRequired.test.ts +++ b/e2e/stateful-pallet-storage/handleSignatureRequired.test.ts @@ -2,6 +2,7 @@ import "@frequency-chain/api-augment"; import assert from "assert"; import { + DOLLARS, createDelegator, createProviderKeysAndId, generateItemizedSignaturePayload, @@ -33,18 +34,18 @@ describe("📗 Stateful Pallet Storage Signature Required", function () { before(async function () { // Create a provider for the MSA, the provider will be used to grant delegation - [providerKeys, providerId] = await createProviderKeysAndId(fundingSource); + [providerKeys, providerId] = await createProviderKeysAndId(fundingSource, 2n * DOLLARS); assert.notEqual(providerId, undefined, "setup should populate providerId"); assert.notEqual(providerKeys, undefined, "setup should populate providerKeys"); // Create a schema for Itemized PayloadLocation const createSchema = ExtrinsicHelper.createSchema(providerKeys, AVRO_CHAT_MESSAGE, "AvroBinary", "Itemized"); - const { target: event } = await createSchema.fundAndSend(fundingSource); + const { target: event } = await createSchema.signAndSend(); itemizedSchemaId = event!.data.schemaId; // Create a schema for Paginated PayloadLocation const createSchema2 = ExtrinsicHelper.createSchema(providerKeys, AVRO_CHAT_MESSAGE, "AvroBinary", "Paginated"); - const { target: event2 } = await createSchema2.fundAndSend(fundingSource); + const { target: event2 } = await createSchema2.signAndSend(); assert.notEqual(event2, undefined, "setup should return a SchemaCreated event"); paginatedSchemaId = event2!.data.schemaId; diff --git a/e2e/sudo/sudo.test.ts b/e2e/sudo/sudo.test.ts index 28b5503dfe..55cc2b0dde 100644 --- a/e2e/sudo/sudo.test.ts +++ b/e2e/sudo/sudo.test.ts @@ -10,6 +10,7 @@ import { getSudo, getFundingSource } from "../scaffolding/funding"; import { AVRO_GRAPH_CHANGE } from "../schemas/fixtures/avroGraphChangeSchemaType"; import { Bytes, u16 } from "@polkadot/types"; import { + DOLLARS, createDelegatorAndDelegation, createProviderKeysAndId, getCurrentItemizedHash, @@ -85,7 +86,7 @@ describe("Sudo required", function () { before(async function () { // Create a provider for the MSA, the provider will be used to grant delegation - [providerKeys, providerId] = await createProviderKeysAndId(fundingSource); + [providerKeys, providerId] = await createProviderKeysAndId(fundingSource, 2n * DOLLARS); assert.notEqual(providerId, undefined, "setup should populate providerId"); assert.notEqual(providerKeys, undefined, "setup should populate providerKeys"); From 8f55a11ae91aebe6a3ea1fc42cc50fc9c3f1db4f Mon Sep 17 00:00:00 2001 From: Wil Wade Date: Wed, 8 Nov 2023 14:56:30 -0500 Subject: [PATCH 6/6] E2E Testing add Prettier and Eslint (#1767) # Goal The goal of this PR is to get e2e tests under formatting and eslint control. Closes #1764 --- .github/workflows/verify-pr-commit.yml | 3 + e2e/.eslintrc.cjs | 24 + e2e/.load-test.mocharc.json | 18 +- e2e/.mocharc.json | 18 +- e2e/.prettierignore | 3 + e2e/.prettierrc.json | 9 +- e2e/.relay-chain.mocharc.json | 22 +- e2e/capacity/replenishment.test.ts | 79 +- e2e/capacity/staking.test.ts | 527 ++++---- e2e/capacity/transactions.test.ts | 771 +++++++----- e2e/handles/handles.test.ts | 318 ++--- e2e/load-tests/signatureRegistry.test.ts | 237 ++-- e2e/messages/addIPFSMessage.test.ts | 263 ++-- e2e/messages/ipfs.ts | 2 +- e2e/miscellaneous/utilityBatch.test.ts | 249 ++-- e2e/msa/createMsa.test.ts | 77 +- e2e/msa/msaKeyManagement.test.ts | 506 ++++---- e2e/package-lock.json | 281 +++-- e2e/package.json | 10 +- e2e/scaffolding/apiConnection.ts | 22 +- e2e/scaffolding/autoNonce.ts | 27 +- e2e/scaffolding/env.ts | 22 +- e2e/scaffolding/extrinsicHelpers.ts | 1096 +++++++++++------ e2e/scaffolding/funding.ts | 56 +- e2e/scaffolding/globalHooks.ts | 22 +- e2e/scaffolding/helpers.ts | 342 ++--- e2e/scaffolding/rootHooks.ts | 16 +- e2e/scenarios/grantDelegation.test.ts | 851 +++++++------ e2e/schemas/createSchema.test.ts | 102 +- .../fixtures/avroGraphChangeSchemaType.ts | 48 +- .../fixtures/parquetBroadcastSchemaType.ts | 66 +- .../fixtures/itemizedSchemaType.ts | 52 +- .../handleItemized.test.ts | 493 ++++---- .../handlePaginated.test.ts | 404 +++--- .../handleSignatureRequired.test.ts | 290 +++-- e2e/sudo/sudo.test.ts | 119 +- e2e/time-release/timeRelease.test.ts | 77 +- e2e/tsconfig.json | 48 +- 38 files changed, 4304 insertions(+), 3266 deletions(-) create mode 100644 e2e/.eslintrc.cjs diff --git a/.github/workflows/verify-pr-commit.yml b/.github/workflows/verify-pr-commit.yml index 174f1d0012..78b50488de 100644 --- a/.github/workflows/verify-pr-commit.yml +++ b/.github/workflows/verify-pr-commit.yml @@ -694,6 +694,9 @@ jobs: - name: Install NPM Modules run: npm ci working-directory: e2e + - name: Lint + run: npm run lint + working-directory: e2e - name: Run E2E Tests working-directory: e2e env: diff --git a/e2e/.eslintrc.cjs b/e2e/.eslintrc.cjs new file mode 100644 index 0000000000..9b70938099 --- /dev/null +++ b/e2e/.eslintrc.cjs @@ -0,0 +1,24 @@ +/* eslint-env node */ +module.exports = { + ignorePatterns: ['dist'], + extends: ['eslint:recommended', 'plugin:@typescript-eslint/recommended', 'plugin:mocha/recommended'], + parser: '@typescript-eslint/parser', + plugins: ['@typescript-eslint', 'mocha'], + root: true, + env: { + mocha: true, + node: true, + }, + rules: { + '@typescript-eslint/no-explicit-any': 'off', // Don't worry about an any in a test + '@typescript-eslint/no-unused-vars': 'warn', + '@typescript-eslint/no-unused-vars': [ + 'error', + { + argsIgnorePattern: '^_', + varsIgnorePattern: '^_', + caughtErrorsIgnorePattern: '^_', + }, + ], + }, +}; diff --git a/e2e/.load-test.mocharc.json b/e2e/.load-test.mocharc.json index 4eaa0cc4dc..7e6e4525ff 100644 --- a/e2e/.load-test.mocharc.json +++ b/e2e/.load-test.mocharc.json @@ -1,13 +1,9 @@ { - "exit": true, - "extension": ["ts"], - "parallel": false, - "require": [ - "scaffolding/globalHooks.ts", - "scaffolding/rootHooks.ts", - "scaffolding/extrinsicHelpers.ts" - ], - "loader": "ts-node/esm", - "spec": ["./load-tests/**/*.test.ts"], - "timeout": 60000 + "exit": true, + "extension": ["ts"], + "parallel": false, + "require": ["scaffolding/globalHooks.ts", "scaffolding/rootHooks.ts", "scaffolding/extrinsicHelpers.ts"], + "loader": "ts-node/esm", + "spec": ["./load-tests/**/*.test.ts"], + "timeout": 60000 } diff --git a/e2e/.mocharc.json b/e2e/.mocharc.json index e306b8687b..e1149d4aaf 100644 --- a/e2e/.mocharc.json +++ b/e2e/.mocharc.json @@ -1,13 +1,9 @@ { - "exit": true, - "extension": ["ts"], - "parallel": true, - "require": [ - "scaffolding/globalHooks.ts", - "scaffolding/rootHooks.ts", - "scaffolding/extrinsicHelpers.ts" - ], - "loader": "ts-node/esm", - "spec": ["./{,!(node_modules|load-tests)/**}/*.test.ts"], - "timeout": 20000 + "exit": true, + "extension": ["ts"], + "parallel": true, + "require": ["scaffolding/globalHooks.ts", "scaffolding/rootHooks.ts", "scaffolding/extrinsicHelpers.ts"], + "loader": "ts-node/esm", + "spec": ["./{,!(node_modules|load-tests)/**}/*.test.ts"], + "timeout": 20000 } diff --git a/e2e/.prettierignore b/e2e/.prettierignore index 5b52988c94..e00e777008 100644 --- a/e2e/.prettierignore +++ b/e2e/.prettierignore @@ -1,8 +1,11 @@ # package.json is formatted by package managers, so we ignore it here package.json +package-lock.json build/* +dist/* coverage/* multimodules/* /*.d.ts /*.map /*.js +**/*.md diff --git a/e2e/.prettierrc.json b/e2e/.prettierrc.json index 0967ef424b..cd87a91cb1 100644 --- a/e2e/.prettierrc.json +++ b/e2e/.prettierrc.json @@ -1 +1,8 @@ -{} +{ + "trailingComma": "es5", + "tabWidth": 2, + "semi": true, + "singleQuote": true, + "useTabs": false, + "printWidth": 120 +} diff --git a/e2e/.relay-chain.mocharc.json b/e2e/.relay-chain.mocharc.json index 66984d6f1a..e8b6d63c77 100644 --- a/e2e/.relay-chain.mocharc.json +++ b/e2e/.relay-chain.mocharc.json @@ -1,17 +1,9 @@ { - "exit": true, - "extension": [ - "ts" - ], - "parallel": true, - "require": [ - "scaffolding/globalHooks.ts", - "scaffolding/rootHooks.ts", - "scaffolding/extrinsicHelpers.ts" - ], - "loader": "ts-node/esm", - "spec": [ - "./{,!(node_modules|load-tests)/**}/*.test.ts" - ], - "timeout": 600000 + "exit": true, + "extension": ["ts"], + "parallel": true, + "require": ["scaffolding/globalHooks.ts", "scaffolding/rootHooks.ts", "scaffolding/extrinsicHelpers.ts"], + "loader": "ts-node/esm", + "spec": ["./{,!(node_modules|load-tests)/**}/*.test.ts"], + "timeout": 600000 } diff --git a/e2e/capacity/replenishment.test.ts b/e2e/capacity/replenishment.test.ts index 8d373c22b7..2e09843595 100644 --- a/e2e/capacity/replenishment.test.ts +++ b/e2e/capacity/replenishment.test.ts @@ -1,8 +1,8 @@ -import "@frequency-chain/api-augment"; -import { KeyringPair } from "@polkadot/keyring/types"; -import { u16, u64 } from "@polkadot/types" -import assert from "assert"; -import { Extrinsic, ExtrinsicHelper } from "../scaffolding/extrinsicHelpers"; +import '@frequency-chain/api-augment'; +import { KeyringPair } from '@polkadot/keyring/types'; +import { u16, u64 } from '@polkadot/types'; +import assert from 'assert'; +import { Extrinsic, ExtrinsicHelper } from '../scaffolding/extrinsicHelpers'; import { createKeys, createMsaAndProvider, @@ -16,24 +16,23 @@ import { assertEvent, getCapacity, getNonce, -} from "../scaffolding/helpers"; -import { getFundingSource } from "../scaffolding/funding"; -import { isTestnet } from "../scaffolding/env"; +} from '../scaffolding/helpers'; +import { getFundingSource } from '../scaffolding/funding'; +import { isTestnet } from '../scaffolding/env'; -describe("Capacity Replenishment Testing: ", function () { - let schemaId: u16; - const fundingSource = getFundingSource("capacity-replenishment"); +const fundingSource = getFundingSource('capacity-replenishment'); +describe('Capacity Replenishment Testing: ', function () { + let schemaId: u16; async function createAndStakeProvider(name: string, stakingAmount: bigint): Promise<[KeyringPair, u64]> { const stakeKeys = createKeys(name); - const stakeProviderId = await createMsaAndProvider(fundingSource, stakeKeys, "ReplProv", 50n * DOLLARS); - assert.notEqual(stakeProviderId, 0, "stakeProviderId should not be zero"); + const stakeProviderId = await createMsaAndProvider(fundingSource, stakeKeys, 'ReplProv', 50n * DOLLARS); + assert.notEqual(stakeProviderId, 0, 'stakeProviderId should not be zero'); await stakeToProvider(fundingSource, stakeKeys, stakeProviderId, stakingAmount); return [stakeKeys, stakeProviderId]; } - before(async function () { // Replenishment requires the epoch length to be shorter than testnet (set in globalHooks) if (isTestnet()) this.skip(); @@ -41,23 +40,23 @@ describe("Capacity Replenishment Testing: ", function () { schemaId = await getOrCreateGraphChangeSchema(fundingSource); }); - describe("Capacity is replenished", function () { - it("after new epoch", async function () { + describe('Capacity is replenished', function () { + it('after new epoch', async function () { const totalStaked = 3n * DOLLARS; - let expectedCapacity = totalStaked / TokenPerCapacity; - const [stakeKeys, stakeProviderId] = await createAndStakeProvider("ReplFirst", totalStaked); - const payload = JSON.stringify({ changeType: 1, fromId: 1, objectId: 2 }) + const expectedCapacity = totalStaked / TokenPerCapacity; + const [stakeKeys, stakeProviderId] = await createAndStakeProvider('ReplFirst', totalStaked); + const payload = JSON.stringify({ changeType: 1, fromId: 1, objectId: 2 }); const call = ExtrinsicHelper.addOnChainMessage(stakeKeys, schemaId, payload); let nonce = await getNonce(stakeKeys); // confirm that we start with a full tank await ExtrinsicHelper.runToBlock(await getNextEpochBlock()); let remainingCapacity = (await getCapacity(stakeProviderId)).remainingCapacity.toBigInt(); - assert.equal(expectedCapacity, remainingCapacity, "Our expected capacity from staking is wrong"); + assert.equal(expectedCapacity, remainingCapacity, 'Our expected capacity from staking is wrong'); await call.payWithCapacity(nonce++); remainingCapacity = (await getCapacity(stakeProviderId)).remainingCapacity.toBigInt(); - assert(expectedCapacity > remainingCapacity, "Our remaining capacity is much higher than expected."); + assert(expectedCapacity > remainingCapacity, 'Our remaining capacity is much higher than expected.'); const capacityPerCall = expectedCapacity - remainingCapacity; assert(remainingCapacity > capacityPerCall, "We don't have enough to make a second call"); @@ -77,15 +76,17 @@ describe("Capacity Replenishment Testing: ", function () { }); function assert_capacity_call_fails_with_balance_too_low(call: Extrinsic) { - return assert.rejects( - call.payWithCapacity('current'), { name: "RpcError", message: /1010.+account balance too low/ }); + return assert.rejects(call.payWithCapacity('current'), { + name: 'RpcError', + message: /1010.+account balance too low/, + }); } - describe("Capacity is not replenished", function () { - it("if out of capacity and last_replenished_at is <= current epoch", async function () { - let [stakeKeys, stakeProviderId] = await createAndStakeProvider("NoSend", 150n * CENTS); - let payload = JSON.stringify({ changeType: 1, fromId: 1, objectId: 2 }) - let call = ExtrinsicHelper.addOnChainMessage(stakeKeys, schemaId, payload); + describe('Capacity is not replenished', function () { + it('if out of capacity and last_replenished_at is <= current epoch', async function () { + const [stakeKeys, stakeProviderId] = await createAndStakeProvider('NoSend', 150n * CENTS); + const payload = JSON.stringify({ changeType: 1, fromId: 1, objectId: 2 }); + const call = ExtrinsicHelper.addOnChainMessage(stakeKeys, schemaId, payload); // run until we can't afford to send another message. await drainCapacity(call, stakeProviderId); @@ -95,22 +96,22 @@ describe("Capacity Replenishment Testing: ", function () { }); describe("Regression test: when user attempts to stake tiny amounts before provider's first message of an epoch,", function () { - it("provider is still replenished and can send a message", async function () { + it('provider is still replenished and can send a message', async function () { const providerStakeAmt = 3n * DOLLARS; const userStakeAmt = 100n * CENTS; const userIncrementAmt = 1n * CENTS; - const [stakeKeys, stakeProviderId] = await createAndStakeProvider("TinyStake", providerStakeAmt); + const [stakeKeys, stakeProviderId] = await createAndStakeProvider('TinyStake', providerStakeAmt); // new user/msa stakes to provider - const userKeys = createKeys("userKeys"); + const userKeys = createKeys('userKeys'); await fundKeypair(fundingSource, userKeys, 5n * DOLLARS); const { eventMap } = await ExtrinsicHelper.stake(userKeys, stakeProviderId, userStakeAmt).signAndSend(); assertEvent(eventMap, 'system.ExtrinsicSuccess'); - const payload = JSON.stringify({ changeType: 1, fromId: 1, objectId: 2 }) + const payload = JSON.stringify({ changeType: 1, fromId: 1, objectId: 2 }); const call = ExtrinsicHelper.addOnChainMessage(stakeKeys, schemaId, payload); - let expectedCapacity = (providerStakeAmt + userStakeAmt) / TokenPerCapacity; + const expectedCapacity = (providerStakeAmt + userStakeAmt) / TokenPerCapacity; const totalCapacity = (await getCapacity(stakeProviderId)).totalCapacityIssued.toBigInt(); assert.equal(expectedCapacity, totalCapacity, `expected ${expectedCapacity} capacity, got ${totalCapacity}`); @@ -120,7 +121,7 @@ describe("Capacity Replenishment Testing: ", function () { await assert_capacity_call_fails_with_balance_too_low(call); // go to next epoch - let nextEpochBlock = await getNextEpochBlock(); + const nextEpochBlock = await getNextEpochBlock(); await ExtrinsicHelper.runToBlock(nextEpochBlock); let remainingCapacity = (await getCapacity(stakeProviderId)).remainingCapacity.toBigInt(); @@ -128,7 +129,11 @@ describe("Capacity Replenishment Testing: ", function () { assert(remainingCapacity < callCapacityCost); // user stakes tiny additional amount - const { eventMap: hasStaked } = await ExtrinsicHelper.stake(userKeys, stakeProviderId, userIncrementAmt).signAndSend(); + const { eventMap: hasStaked } = await ExtrinsicHelper.stake( + userKeys, + stakeProviderId, + userIncrementAmt + ).signAndSend(); assertEvent(hasStaked, 'capacity.Staked'); // provider can now send a message @@ -137,7 +142,7 @@ describe("Capacity Replenishment Testing: ", function () { remainingCapacity = (await getCapacity(stakeProviderId)).remainingCapacity.toBigInt(); // show that capacity was replenished and then fee deducted. - let approxExpected = providerStakeAmt + userStakeAmt + userIncrementAmt - callCapacityCost; + const approxExpected = providerStakeAmt + userStakeAmt + userIncrementAmt - callCapacityCost; assert(remainingCapacity <= approxExpected, `remainingCapacity = ${remainingCapacity.toString()}`); }); }); @@ -149,7 +154,7 @@ async function drainCapacity(call, stakeProviderId: u64): Promise { // Figure out the cost per call in Capacity const { eventMap } = await call.payWithCapacity(nonce++); - const callCapacityCost = eventMap["capacity.CapacityWithdrawn"].data.amount.toBigInt(); + const callCapacityCost = eventMap['capacity.CapacityWithdrawn'].data.amount.toBigInt(); // // Run them out of funds, but don't flake just because it landed near an epoch boundary. await ExtrinsicHelper.runToBlock(await getNextEpochBlock()); diff --git a/e2e/capacity/staking.test.ts b/e2e/capacity/staking.test.ts index 7b0da81838..9766ae576c 100644 --- a/e2e/capacity/staking.test.ts +++ b/e2e/capacity/staking.test.ts @@ -1,273 +1,328 @@ -import "@frequency-chain/api-augment"; -import { KeyringPair } from "@polkadot/keyring/types"; -import { u64, } from "@polkadot/types"; -import assert from "assert"; -import { ExtrinsicHelper } from "../scaffolding/extrinsicHelpers"; +import '@frequency-chain/api-augment'; +import { KeyringPair } from '@polkadot/keyring/types'; +import { u64 } from '@polkadot/types'; +import assert from 'assert'; +import { ExtrinsicHelper } from '../scaffolding/extrinsicHelpers'; import { - createKeys, createMsaAndProvider, - stakeToProvider, - getNextEpochBlock, TEST_EPOCH_LENGTH, - CENTS, DOLLARS, createAndFundKeypair, getCapacity, createMsa -} from "../scaffolding/helpers"; -import { isDev } from "../scaffolding/env"; -import { getFundingSource } from "../scaffolding/funding"; - -describe("Capacity Staking Tests", function () { - const accountBalance: bigint = 2n * DOLLARS; - const tokenMinStake: bigint = 1n * CENTS; - let capacityMin: bigint = tokenMinStake / 50n; - const fundingSource = getFundingSource("capacity-staking"); - - // The frozen balance is initialized and tracked throughout the staking end to end tests - // to accommodate for the fact that withdrawing unstaked token tests are not executed - // against a relay chain. Since the length of time to wait for an epoch period to roll over could - // potentially be hours / days, that test is skipped. Therefore, we must track the frozen balance - // so that tests against it can pass regardless if withdrawing (the frozen balance decreasing) - // has happened or not. - let trackedFrozenBalance: bigint = 0n; - - describe("scenario: user staking, unstaking, and withdraw-unstaked", function () { - let stakeKeys: KeyringPair; - let stakeProviderId: u64; - before(async function () { - stakeKeys = createKeys("StakeKeys"); - stakeProviderId = await createMsaAndProvider(fundingSource, stakeKeys, "StakeProvider", accountBalance); - }); + createKeys, + createMsaAndProvider, + stakeToProvider, + getNextEpochBlock, + TEST_EPOCH_LENGTH, + CENTS, + DOLLARS, + createAndFundKeypair, + getCapacity, + createMsa, +} from '../scaffolding/helpers'; +import { isDev } from '../scaffolding/env'; +import { getFundingSource } from '../scaffolding/funding'; + +const accountBalance: bigint = 2n * DOLLARS; +const tokenMinStake: bigint = 1n * CENTS; +const capacityMin: bigint = tokenMinStake / 50n; +const fundingSource = getFundingSource('capacity-staking'); + +describe('Capacity Staking Tests', function () { + // The frozen balance is initialized and tracked throughout the staking end to end tests + // to accommodate for the fact that withdrawing unstaked token tests are not executed + // against a relay chain. Since the length of time to wait for an epoch period to roll over could + // potentially be hours / days, that test is skipped. Therefore, we must track the frozen balance + // so that tests against it can pass regardless if withdrawing (the frozen balance decreasing) + // has happened or not. + let trackedFrozenBalance: bigint = 0n; + + describe('scenario: user staking, unstaking, and withdraw-unstaked', function () { + let stakeKeys: KeyringPair; + let stakeProviderId: u64; + before(async function () { + stakeKeys = createKeys('StakeKeys'); + stakeProviderId = await createMsaAndProvider(fundingSource, stakeKeys, 'StakeProvider', accountBalance); + }); - it("successfully stakes the minimum amount", async function () { + it('successfully stakes the minimum amount', async function () { + await assert.doesNotReject(stakeToProvider(fundingSource, stakeKeys, stakeProviderId, tokenMinStake)); + + // Confirm that the tokens were locked in the stakeKeys account using the query API + const stakedAcctInfo = await ExtrinsicHelper.getAccountInfo(stakeKeys.address); + assert.equal( + stakedAcctInfo.data.frozen, + tokenMinStake, + `expected ${tokenMinStake} frozen balance, got ${stakedAcctInfo.data.frozen}` + ); + + // Confirm that the capacity was added to the stakeProviderId using the query API + const capacityStaked = await getCapacity(stakeProviderId); + assert.equal( + capacityStaked.remainingCapacity, + capacityMin, + `expected capacityLedger.remainingCapacity = 1CENT, got ${capacityStaked.remainingCapacity}` + ); + assert.equal( + capacityStaked.totalTokensStaked, + tokenMinStake, + `expected capacityLedger.totalTokensStaked = 1CENT, got ${capacityStaked.totalTokensStaked}` + ); + assert.equal( + capacityStaked.totalCapacityIssued, + capacityMin, + `expected capacityLedger.totalCapacityIssued = 1CENT, got ${capacityStaked.totalCapacityIssued}` + ); + trackedFrozenBalance += tokenMinStake; + }); - await assert.doesNotReject(stakeToProvider(fundingSource, stakeKeys, stakeProviderId, tokenMinStake)); + it('successfully unstakes the minimum amount', async function () { + const stakeObj = ExtrinsicHelper.unstake(stakeKeys, stakeProviderId, tokenMinStake); + const { target: unStakeEvent } = await stakeObj.signAndSend(); + assert.notEqual(unStakeEvent, undefined, 'should return an UnStaked event'); + assert.equal( + unStakeEvent?.data.capacity, + capacityMin, + 'should return an UnStaked event with 1 CENT reduced capacity' + ); + + // Confirm that the tokens were unstaked in the stakeProviderId account using the query API + const capacityStaked = await getCapacity(stakeProviderId); + assert.equal(capacityStaked.remainingCapacity, 0, 'should return a capacityLedger with 0 remainingCapacity'); + assert.equal(capacityStaked.totalTokensStaked, 0, 'should return a capacityLedger with 0 total tokens staked'); + assert.equal(capacityStaked.totalCapacityIssued, 0, 'should return a capacityLedger with 0 capacity issued'); + }); - // Confirm that the tokens were locked in the stakeKeys account using the query API - const stakedAcctInfo = await ExtrinsicHelper.getAccountInfo(stakeKeys.address); - assert.equal(stakedAcctInfo.data.frozen, tokenMinStake, `expected ${tokenMinStake} frozen balance, got ${stakedAcctInfo.data.frozen}`) + it('successfully withdraws the unstaked amount', async function () { + // Withdrawing unstaked token will only be executed against a Frequency + // node built for development due to the long length of time it would + // take to wait for an epoch period to roll over. + if (!isDev()) this.skip(); - // Confirm that the capacity was added to the stakeProviderId using the query API - const capacityStaked = await getCapacity(stakeProviderId); - assert.equal(capacityStaked.remainingCapacity, capacityMin, `expected capacityLedger.remainingCapacity = 1CENT, got ${capacityStaked.remainingCapacity}`); - assert.equal(capacityStaked.totalTokensStaked, tokenMinStake, `expected capacityLedger.totalTokensStaked = 1CENT, got ${capacityStaked.totalTokensStaked}`); - assert.equal(capacityStaked.totalCapacityIssued, capacityMin, `expected capacityLedger.totalCapacityIssued = 1CENT, got ${capacityStaked.totalCapacityIssued}`); - trackedFrozenBalance += tokenMinStake; - }); + // Mine enough blocks to pass the unstake period = CapacityUnstakingThawPeriod = 2 epochs + const newEpochBlock = await getNextEpochBlock(); + await ExtrinsicHelper.runToBlock(newEpochBlock + TEST_EPOCH_LENGTH + 1); - it("successfully unstakes the minimum amount", async function () { - const stakeObj = ExtrinsicHelper.unstake(stakeKeys, stakeProviderId, tokenMinStake); - const { target: unStakeEvent } = await stakeObj.signAndSend(); - assert.notEqual(unStakeEvent, undefined, "should return an UnStaked event"); - assert.equal(unStakeEvent?.data.capacity, capacityMin, "should return an UnStaked event with 1 CENT reduced capacity"); + const withdrawObj = ExtrinsicHelper.withdrawUnstaked(stakeKeys); + const { target: withdrawEvent } = await withdrawObj.signAndSend(); + assert.notEqual(withdrawEvent, undefined, 'should return a StakeWithdrawn event'); - // Confirm that the tokens were unstaked in the stakeProviderId account using the query API - const capacityStaked = await getCapacity(stakeProviderId); - assert.equal(capacityStaked.remainingCapacity, 0, "should return a capacityLedger with 0 remainingCapacity"); - assert.equal(capacityStaked.totalTokensStaked, 0, "should return a capacityLedger with 0 total tokens staked"); - assert.equal(capacityStaked.totalCapacityIssued, 0, "should return a capacityLedger with 0 capacity issued"); - }); - - it("successfully withdraws the unstaked amount", async function () { - // Withdrawing unstaked token will only be executed against a Frequency - // node built for development due to the long length of time it would - // take to wait for an epoch period to roll over. - if (!isDev()) this.skip(); + const amount = withdrawEvent!.data.amount; + assert.equal(amount, tokenMinStake, 'should return a StakeWithdrawn event with 1M amount'); - // Mine enough blocks to pass the unstake period = CapacityUnstakingThawPeriod = 2 epochs - let newEpochBlock = await getNextEpochBlock(); - await ExtrinsicHelper.runToBlock(newEpochBlock + TEST_EPOCH_LENGTH + 1); + // Confirm that the tokens were unstaked in the stakeKeys account using the query API + const unStakedAcctInfo = await ExtrinsicHelper.getAccountInfo(stakeKeys.address); + assert.equal(unStakedAcctInfo.data.frozen, 0, 'should return an account with 0 frozen balance'); - const withdrawObj = ExtrinsicHelper.withdrawUnstaked(stakeKeys); - const { target: withdrawEvent } = await withdrawObj.signAndSend(); - assert.notEqual(withdrawEvent, undefined, "should return a StakeWithdrawn event"); + // Confirm that the staked capacity was removed from the stakeProviderId account using the query API + const capacityStaked = await getCapacity(stakeProviderId); + assert.equal(capacityStaked.remainingCapacity, 0, 'should return a capacityLedger with 0 remainingCapacity'); + assert.equal(capacityStaked.totalTokensStaked, 0, 'should return a capacityLedger with 0 total tokens staked'); + assert.equal(capacityStaked.totalCapacityIssued, 0, 'should return a capacityLedger with 0 capacity issued'); - const amount = withdrawEvent!.data.amount; - assert.equal(amount, tokenMinStake, "should return a StakeWithdrawn event with 1M amount"); + trackedFrozenBalance -= tokenMinStake; + }); + describe('when staking to multiple times', function () { + describe('and targeting same provider', function () { + it('successfully increases the amount that was targeted to provider', async function () { + await assert.doesNotReject(stakeToProvider(fundingSource, stakeKeys, stakeProviderId, tokenMinStake)); + + const capacityStaked = await getCapacity(stakeProviderId); + assert.equal( + capacityStaked.remainingCapacity, + capacityMin, + `expected capacityStaked.remainingCapacity = ${capacityMin}, got ${capacityStaked.remainingCapacity}` + ); + assert.equal( + capacityStaked.totalTokensStaked, + tokenMinStake, + `expected capacityStaked.totalTokensStaked = ${tokenMinStake}, got ${capacityStaked.totalTokensStaked}` + ); + assert.equal( + capacityStaked.totalCapacityIssued, + capacityMin, + `expected capacityStaked.totalCapacityIssued = ${capacityMin}, got ${capacityStaked.totalCapacityIssued}` + ); + trackedFrozenBalance += tokenMinStake; + + // Confirm that the tokens were staked in the stakeKeys account using the query API + const stakedAcctInfo = await ExtrinsicHelper.getAccountInfo(stakeKeys.address); + + const increasedFrozen: bigint = stakedAcctInfo.data.frozen.toBigInt(); + + assert.equal( + increasedFrozen, + trackedFrozenBalance, + `expected frozen=${tokenMinStake}, got ${increasedFrozen}` + ); + }); - // Confirm that the tokens were unstaked in the stakeKeys account using the query API - const unStakedAcctInfo = await ExtrinsicHelper.getAccountInfo(stakeKeys.address); - assert.equal(unStakedAcctInfo.data.frozen, 0, "should return an account with 0 frozen balance") + describe('and targeting different provider', function () { + let otherProviderKeys: KeyringPair; + let otherProviderId: u64; + + beforeEach(async function () { + otherProviderKeys = createKeys('OtherProviderKeys'); + otherProviderId = await createMsaAndProvider(fundingSource, otherProviderKeys, 'OtherProvider'); + }); + + it('does not change other targets amounts', async function () { + // Increase stake by 1 cent to a different target. + await assert.doesNotReject(stakeToProvider(fundingSource, stakeKeys, otherProviderId, 1n * CENTS)); + const expectedCapacity = CENTS / 50n; + + // Confirm that the staked capacity of the original stakeProviderId account is unchanged + // stakeProviderId should still have 1M from first test case in this describe. + // otherProvider should now have 1M + const origStaked = await getCapacity(stakeProviderId); + assert.equal( + origStaked.remainingCapacity, + expectedCapacity, + `expected 1/50 CENT remaining capacity, got ${origStaked.remainingCapacity}` + ); + assert.equal( + origStaked.totalTokensStaked, + 1n * CENTS, + `expected 1 CENT staked, got ${origStaked.totalTokensStaked}` + ); + assert.equal( + origStaked.totalCapacityIssued, + expectedCapacity, + `expected 1/50 CENT capacity issued, got ${origStaked.totalCapacityIssued}` + ); + + // Confirm that the staked capacity was added to the otherProviderId account using the query API + const capacityStaked = await getCapacity(otherProviderId); + assert.equal( + capacityStaked.remainingCapacity, + expectedCapacity, + 'should return a capacityLedger with 1/50M remainingCapacity' + ); + assert.equal( + capacityStaked.totalTokensStaked, + 1n * CENTS, + 'should return a capacityLedger with 1M total tokens staked' + ); + assert.equal( + capacityStaked.totalCapacityIssued, + expectedCapacity, + 'should return a capacityLedger with 1/50M capacity issued' + ); + }); + + it('successfully increases the amount that was targeted to provider from different accounts', async function () { + // Create a new account + const additionalKeys = await createAndFundKeypair(fundingSource, accountBalance); + const currentStaked = await getCapacity(stakeProviderId); + + await assert.doesNotReject(stakeToProvider(fundingSource, additionalKeys, stakeProviderId, tokenMinStake)); - // Confirm that the staked capacity was removed from the stakeProviderId account using the query API const capacityStaked = await getCapacity(stakeProviderId); - assert.equal(capacityStaked.remainingCapacity, 0, "should return a capacityLedger with 0 remainingCapacity"); - assert.equal(capacityStaked.totalTokensStaked, 0, "should return a capacityLedger with 0 total tokens staked"); - assert.equal(capacityStaked.totalCapacityIssued, 0, "should return a capacityLedger with 0 capacity issued"); - - trackedFrozenBalance -= tokenMinStake; - }); - describe("when staking to multiple times", async function () { - describe("and targeting same provider", async function () { - it("successfully increases the amount that was targeted to provider", async function () { - await assert.doesNotReject(stakeToProvider(fundingSource, stakeKeys, stakeProviderId, tokenMinStake)); - - const capacityStaked = await getCapacity(stakeProviderId); - assert.equal(capacityStaked.remainingCapacity, capacityMin, - `expected capacityStaked.remainingCapacity = ${capacityMin}, got ${capacityStaked.remainingCapacity}`); - assert.equal(capacityStaked.totalTokensStaked, tokenMinStake, - `expected capacityStaked.totalTokensStaked = ${tokenMinStake}, got ${capacityStaked.totalTokensStaked}`); - assert.equal(capacityStaked.totalCapacityIssued, capacityMin, - `expected capacityStaked.totalCapacityIssued = ${capacityMin}, got ${capacityStaked.totalCapacityIssued}`); - trackedFrozenBalance += tokenMinStake; - - // Confirm that the tokens were staked in the stakeKeys account using the query API - const stakedAcctInfo = await ExtrinsicHelper.getAccountInfo(stakeKeys.address); - - let increasedFrozen: bigint = stakedAcctInfo.data.frozen.toBigInt(); - - assert.equal(increasedFrozen, trackedFrozenBalance, `expected frozen=${tokenMinStake}, got ${increasedFrozen}`) - - }); - - describe("and targeting different provider", async function () { - let otherProviderKeys: KeyringPair; - let otherProviderId: u64; - - beforeEach(async function () { - otherProviderKeys = createKeys("OtherProviderKeys"); - otherProviderId = await createMsaAndProvider(fundingSource, otherProviderKeys, "OtherProvider"); - }); - - it("does not change other targets amounts", async function () { - // Increase stake by 1 cent to a different target. - await assert.doesNotReject(stakeToProvider(fundingSource, stakeKeys, otherProviderId, 1n * CENTS)); - const expectedCapacity = CENTS/50n; - - - // Confirm that the staked capacity of the original stakeProviderId account is unchanged - // stakeProviderId should still have 1M from first test case in this describe. - // otherProvider should now have 1M - const origStaked = await getCapacity(stakeProviderId); - assert.equal(origStaked.remainingCapacity, expectedCapacity, `expected 1/50 CENT remaining capacity, got ${origStaked.remainingCapacity}`); - assert.equal(origStaked.totalTokensStaked, 1n * CENTS, `expected 1 CENT staked, got ${origStaked.totalTokensStaked}`); - assert.equal(origStaked.totalCapacityIssued, expectedCapacity, `expected 1/50 CENT capacity issued, got ${origStaked.totalCapacityIssued}`); - - // Confirm that the staked capacity was added to the otherProviderId account using the query API - const capacityStaked = await getCapacity(otherProviderId); - assert.equal(capacityStaked.remainingCapacity, expectedCapacity, "should return a capacityLedger with 1/50M remainingCapacity"); - assert.equal(capacityStaked.totalTokensStaked, 1n * CENTS, "should return a capacityLedger with 1M total tokens staked"); - assert.equal(capacityStaked.totalCapacityIssued, expectedCapacity, "should return a capacityLedger with 1/50M capacity issued"); - }); - - it("successfully increases the amount that was targeted to provider from different accounts", async function () { - // Create a new account - const additionalKeys = await createAndFundKeypair(fundingSource, accountBalance); - - // get the current account info - let currentAcctInfo = await ExtrinsicHelper.getAccountInfo(additionalKeys.address); - const currentStaked = await getCapacity(stakeProviderId); - - await assert.doesNotReject(stakeToProvider(fundingSource, additionalKeys, stakeProviderId, tokenMinStake)); - - const capacityStaked = await getCapacity(stakeProviderId); - assert.equal( - capacityStaked.remainingCapacity, - currentStaked.remainingCapacity.toBigInt() + capacityMin, - `should return a capacityLedger with ${capacityMin} remaining, got ${capacityStaked.remainingCapacity}` - ); - assert.equal( - capacityStaked.totalTokensStaked, - currentStaked.totalTokensStaked.toBigInt() + tokenMinStake, - `should return a capacityLedger with ${tokenMinStake} total staked, got: ${capacityStaked.totalTokensStaked}` - ); - assert.equal( - capacityStaked.totalCapacityIssued, - currentStaked.totalCapacityIssued.toBigInt() + capacityMin, - `should return a capacityLedger with ${capacityMin} total issued, got ${capacityStaked.totalCapacityIssued}` - ); - - // Confirm that the tokens were not staked in the stakeKeys account using the query API - const stakedAcctInfo = await ExtrinsicHelper.getAccountInfo(additionalKeys.address); - - let increasedFrozen: bigint = stakedAcctInfo.data.frozen.toBigInt(); - - assert.equal(increasedFrozen, tokenMinStake, `expected frozen=${tokenMinStake}, got ${increasedFrozen}`) - }); - - }); - }); + assert.equal( + capacityStaked.remainingCapacity, + currentStaked.remainingCapacity.toBigInt() + capacityMin, + `should return a capacityLedger with ${capacityMin} remaining, got ${capacityStaked.remainingCapacity}` + ); + assert.equal( + capacityStaked.totalTokensStaked, + currentStaked.totalTokensStaked.toBigInt() + tokenMinStake, + `should return a capacityLedger with ${tokenMinStake} total staked, got: ${capacityStaked.totalTokensStaked}` + ); + assert.equal( + capacityStaked.totalCapacityIssued, + currentStaked.totalCapacityIssued.toBigInt() + capacityMin, + `should return a capacityLedger with ${capacityMin} total issued, got ${capacityStaked.totalCapacityIssued}` + ); + + // Confirm that the tokens were not staked in the stakeKeys account using the query API + const stakedAcctInfo = await ExtrinsicHelper.getAccountInfo(additionalKeys.address); + + const increasedFrozen: bigint = stakedAcctInfo.data.frozen.toBigInt(); + + assert.equal(increasedFrozen, tokenMinStake, `expected frozen=${tokenMinStake}, got ${increasedFrozen}`); + }); }); + }); }); + }); - describe("when staking and targeting an InvalidTarget", async function () { - it("fails to stake", async function () { - const stakeAmount = 10n * CENTS; - const stakeKeys = await createAndFundKeypair(fundingSource, accountBalance, "StakeKeys"); + describe('when staking and targeting an InvalidTarget', function () { + it('fails to stake', async function () { + const stakeAmount = 10n * CENTS; + const stakeKeys = await createAndFundKeypair(fundingSource, accountBalance, 'StakeKeys'); - const [notProviderMsaId] = await createMsa(fundingSource); + const [notProviderMsaId] = await createMsa(fundingSource); - const failStakeObj = ExtrinsicHelper.stake(stakeKeys, notProviderMsaId, stakeAmount); - await assert.rejects(failStakeObj.signAndSend(), { name: "InvalidTarget" }); - }); + const failStakeObj = ExtrinsicHelper.stake(stakeKeys, notProviderMsaId, stakeAmount); + await assert.rejects(failStakeObj.signAndSend(), { name: 'InvalidTarget' }); }); + }); - describe("when attempting to stake below the minimum staking requirements", async function () { - it("should fail to stake for InsufficientStakingAmount", async function () { - let stakingKeys = createKeys("stakingKeys"); - let providerId = await createMsaAndProvider(fundingSource, stakingKeys, "stakingKeys", 150n * CENTS); - let stakeAmount = 1500n; + describe('when attempting to stake below the minimum staking requirements', function () { + it('should fail to stake for InsufficientStakingAmount', async function () { + const stakingKeys = createKeys('stakingKeys'); + const providerId = await createMsaAndProvider(fundingSource, stakingKeys, 'stakingKeys', 150n * CENTS); + const stakeAmount = 1500n; - const failStakeObj = ExtrinsicHelper.stake(stakingKeys, providerId, stakeAmount); - await assert.rejects(failStakeObj.signAndSend(), { name: "InsufficientStakingAmount" }); - }); + const failStakeObj = ExtrinsicHelper.stake(stakingKeys, providerId, stakeAmount); + await assert.rejects(failStakeObj.signAndSend(), { name: 'InsufficientStakingAmount' }); }); + }); - describe("when attempting to stake a zero amount", async function () { - it("fails to stake and errors ZeroAmountNotAllowed", async function () { - let stakingKeys = createKeys("stakingKeys"); - let providerId = await createMsaAndProvider(fundingSource, stakingKeys, "stakingKeys", 10n * CENTS); + describe('when attempting to stake a zero amount', function () { + it('fails to stake and errors ZeroAmountNotAllowed', async function () { + const stakingKeys = createKeys('stakingKeys'); + const providerId = await createMsaAndProvider(fundingSource, stakingKeys, 'stakingKeys', 10n * CENTS); - const failStakeObj = ExtrinsicHelper.stake(stakingKeys, providerId, 0); - await assert.rejects(failStakeObj.signAndSend(), { name: "ZeroAmountNotAllowed" }); - }); + const failStakeObj = ExtrinsicHelper.stake(stakingKeys, providerId, 0); + await assert.rejects(failStakeObj.signAndSend(), { name: 'ZeroAmountNotAllowed' }); }); + }); - describe("when staking an amount and account balance is too low", async function () { - it("fails to stake and errors BalanceTooLowtoStake", async function () { - let stakingKeys = createKeys("stakingKeys"); - let providerId = await createMsaAndProvider(fundingSource, stakingKeys, "stakingKeys", 10n * CENTS); - let stakingAmount = 1n * DOLLARS; + describe('when staking an amount and account balance is too low', function () { + it('fails to stake and errors BalanceTooLowtoStake', async function () { + const stakingKeys = createKeys('stakingKeys'); + const providerId = await createMsaAndProvider(fundingSource, stakingKeys, 'stakingKeys', 10n * CENTS); + const stakingAmount = 1n * DOLLARS; - const failStakeObj = ExtrinsicHelper.stake(stakingKeys, providerId, stakingAmount); - await assert.rejects(failStakeObj.signAndSend(), { name: "BalanceTooLowtoStake" }); - }); + const failStakeObj = ExtrinsicHelper.stake(stakingKeys, providerId, stakingAmount); + await assert.rejects(failStakeObj.signAndSend(), { name: 'BalanceTooLowtoStake' }); }); + }); + describe('unstake()', function () { + let unstakeKeys: KeyringPair; + let providerId: u64; - describe("unstake()", function () { - let unstakeKeys: KeyringPair; - let providerId: u64; + before(async function () { + const accountBalance: bigint = 100n * CENTS; + unstakeKeys = createKeys('stakingKeys'); + providerId = await createMsaAndProvider(fundingSource, unstakeKeys, 'stakingKeys', accountBalance); + }); - before(async function () { - let accountBalance: bigint = 100n * CENTS; - unstakeKeys = createKeys("stakingKeys"); - providerId = await createMsaAndProvider(fundingSource, unstakeKeys, "stakingKeys", accountBalance); - }); + describe('when attempting to unstake a Zero amount', function () { + it('errors with UnstakedAmountIsZero', async function () { + const failUnstakeObj = ExtrinsicHelper.unstake(unstakeKeys, providerId, 0); + await assert.rejects(failUnstakeObj.signAndSend(), { name: 'UnstakedAmountIsZero' }); + }); + }); - describe("when attempting to unstake a Zero amount", async function () { - it("errors with UnstakedAmountIsZero", async function () { - const failUnstakeObj = ExtrinsicHelper.unstake(unstakeKeys, providerId, 0); - await assert.rejects(failUnstakeObj.signAndSend(), { name: "UnstakedAmountIsZero" }); - }); - }) - - describe("when account has not staked", async function () { - it("errors with StakingAccountNotFound", async function () { - const failUnstakeObj = ExtrinsicHelper.unstake(unstakeKeys, providerId, tokenMinStake); - await assert.rejects(failUnstakeObj.signAndSend(), { name: "NotAStakingAccount" }); - }); - }); + describe('when account has not staked', function () { + it('errors with StakingAccountNotFound', async function () { + const failUnstakeObj = ExtrinsicHelper.unstake(unstakeKeys, providerId, tokenMinStake); + await assert.rejects(failUnstakeObj.signAndSend(), { name: 'NotAStakingAccount' }); + }); }); + }); - describe("withdraw_unstaked()", async function () { - describe("when attempting to call withdrawUnstake before first calling unstake", async function () { - it("errors with NoUnstakedTokensAvailable", async function () { - let stakingKeys: KeyringPair = createKeys("stakingKeys"); - let providerId: u64 = await createMsaAndProvider(fundingSource, stakingKeys, "stakingKeys", accountBalance); + describe('withdraw_unstaked()', function () { + describe('when attempting to call withdrawUnstake before first calling unstake', function () { + it('errors with NoUnstakedTokensAvailable', async function () { + const stakingKeys: KeyringPair = createKeys('stakingKeys'); + const providerId: u64 = await createMsaAndProvider(fundingSource, stakingKeys, 'stakingKeys', accountBalance); - const stakeObj = ExtrinsicHelper.stake(stakingKeys, providerId, tokenMinStake); - const { target: stakeEvent } = await stakeObj.signAndSend(); - assert.notEqual(stakeEvent, undefined, "should return a Stake event"); + const stakeObj = ExtrinsicHelper.stake(stakingKeys, providerId, tokenMinStake); + const { target: stakeEvent } = await stakeObj.signAndSend(); + assert.notEqual(stakeEvent, undefined, 'should return a Stake event'); - const withdrawObj = ExtrinsicHelper.withdrawUnstaked(stakingKeys); - await assert.rejects(withdrawObj.signAndSend(), { name: "NoUnstakedTokensAvailable" }); - }); - }) + const withdrawObj = ExtrinsicHelper.withdrawUnstaked(stakingKeys); + await assert.rejects(withdrawObj.signAndSend(), { name: 'NoUnstakedTokensAvailable' }); + }); }); -}) + }); +}); diff --git a/e2e/capacity/transactions.test.ts b/e2e/capacity/transactions.test.ts index a5314b71aa..1d25e85780 100644 --- a/e2e/capacity/transactions.test.ts +++ b/e2e/capacity/transactions.test.ts @@ -1,13 +1,12 @@ -import "@frequency-chain/api-augment"; -import { KeyringPair } from "@polkadot/keyring/types"; -import { Bytes, u64, u16 } from "@polkadot/types"; -import { Codec } from "@polkadot/types/types"; -import { u8aToHex } from "@polkadot/util/u8a/toHex"; -import { u8aWrapBytes } from "@polkadot/util"; -import assert from "assert"; -import { AddKeyData, AddProviderPayload, EventMap, ExtrinsicHelper } from "../scaffolding/extrinsicHelpers"; +import '@frequency-chain/api-augment'; +import { KeyringPair } from '@polkadot/keyring/types'; +import { Bytes, u64, u16 } from '@polkadot/types'; +import { u8aToHex } from '@polkadot/util/u8a/toHex'; +import { u8aWrapBytes } from '@polkadot/util'; +import assert from 'assert'; +import { AddKeyData, AddProviderPayload, EventMap, ExtrinsicHelper } from '../scaffolding/extrinsicHelpers'; import { base64 } from 'multiformats/bases/base64'; -import { SchemaId, MessageResponse } from "@frequency-chain/api-augment/interfaces"; +import { SchemaId } from '@frequency-chain/api-augment/interfaces'; import { createKeys, createAndFundKeypair, @@ -22,7 +21,6 @@ import { DOLLARS, getOrCreateGraphChangeSchema, TokenPerCapacity, - Sr25519Signature, assertEvent, getCurrentItemizedHash, getCurrentPaginatedHash, @@ -40,48 +38,50 @@ import { getCapacity, getTestHandle, assertHasMessage, - assertAddNewKey -} from "../scaffolding/helpers"; -import { FeeDetails } from "@polkadot/types/interfaces"; -import { ipfsCid } from "../messages/ipfs"; -import { getFundingSource } from "../scaffolding/funding"; - -describe("Capacity Transactions", function () { - const fundingSource = getFundingSource("capacity-transactions"); - - const FUNDS_AMOUNT: bigint = 50n * DOLLARS; - - describe("pay_with_capacity", function () { - describe("when caller has a Capacity account", async function () { + assertAddNewKey, +} from '../scaffolding/helpers'; +import { FeeDetails } from '@polkadot/types/interfaces'; +import { ipfsCid } from '../messages/ipfs'; +import { getFundingSource } from '../scaffolding/funding'; + +const FUNDS_AMOUNT: bigint = 50n * DOLLARS; +const fundingSource = getFundingSource('capacity-transactions'); + +describe('Capacity Transactions', function () { + describe('pay_with_capacity', function () { + describe('when caller has a Capacity account', function () { let schemaId: u16; const amountStaked = 2n * DOLLARS; before(async function () { // Create schemas for testing with Grant Delegation to test pay_with_capacity schemaId = await getOrCreateGraphChangeSchema(fundingSource); - assert.notEqual(schemaId, undefined, "setup should populate schemaId"); + assert.notEqual(schemaId, undefined, 'setup should populate schemaId'); }); function getCapacityFee(chainEvents: EventMap): bigint { - if (chainEvents["capacity.CapacityWithdrawn"] && - ExtrinsicHelper.api.events.capacity.CapacityWithdrawn.is(chainEvents["capacity.CapacityWithdrawn"])) { - return chainEvents["capacity.CapacityWithdrawn"].data.amount.toBigInt(); + if ( + chainEvents['capacity.CapacityWithdrawn'] && + ExtrinsicHelper.api.events.capacity.CapacityWithdrawn.is(chainEvents['capacity.CapacityWithdrawn']) + ) { + return chainEvents['capacity.CapacityWithdrawn'].data.amount.toBigInt(); } return 0n; } - it("fails when a provider makes an eligible extrinsic call using non-funded control key", async function () { - const capacityKeys = createKeys("CapacityKeysNSF"); - const capacityProvider = await createMsaAndProvider(fundingSource, capacityKeys, "CapProvNSF", FUNDS_AMOUNT); + it('fails when a provider makes an eligible extrinsic call using non-funded control key', async function () { + const capacityKeys = createKeys('CapacityKeysNSF'); + const capacityProvider = await createMsaAndProvider(fundingSource, capacityKeys, 'CapProvNSF', FUNDS_AMOUNT); // this will first fund 'capacityKeys' before staking await assert.doesNotReject(stakeToProvider(fundingSource, capacityKeys, capacityProvider, amountStaked)); // As current owner, add a new set of control keys that do not have a balance. - let newControlKeypair = createKeys("NewKeyNoBalance"); + const newControlKeypair = createKeys('NewKeyNoBalance'); const newPublicKey = newControlKeypair.publicKey; - const addKeyPayload: AddKeyData = await generateAddKeyPayload( - { msaId: capacityProvider, newPublicKey: newPublicKey } - ); + const addKeyPayload: AddKeyData = await generateAddKeyPayload({ + msaId: capacityProvider, + newPublicKey: newPublicKey, + }); await assertAddNewKey(capacityKeys, addKeyPayload, newControlKeypair); // attempt a capacity transaction using the new unfunded key: claimHandle @@ -92,96 +92,99 @@ describe("Capacity Transactions", function () { baseHandle: handle_vec, expiration: expiration, }; - const claimHandlePayload = ExtrinsicHelper.api.registry.createType("CommonPrimitivesHandlesClaimHandlePayload", handlePayload); + const claimHandlePayload = ExtrinsicHelper.api.registry.createType( + 'CommonPrimitivesHandlesClaimHandlePayload', + handlePayload + ); const claimHandle = ExtrinsicHelper.claimHandle(newControlKeypair, claimHandlePayload); await assert.rejects(claimHandle.payWithCapacity('current'), { - name: "RpcError", message: - "1010: Invalid Transaction: Inability to pay some fees , e.g. account balance too low" + name: 'RpcError', + message: '1010: Invalid Transaction: Inability to pay some fees , e.g. account balance too low', }); }); - describe("when capacity eligible transaction is from the msa pallet", async function () { + describe('when capacity eligible transaction is from the msa pallet', function () { let capacityKeys: KeyringPair; let capacityProvider: u64; let delegatorKeys: KeyringPair; let payload: any = {}; - const stakedForMsa = 15n * DOLLARS + const stakedForMsa = 15n * DOLLARS; before(async function () { - capacityKeys = createKeys("CapacityKeys"); - capacityProvider = await createMsaAndProvider(fundingSource, capacityKeys, "CapacityProvider", FUNDS_AMOUNT); + capacityKeys = createKeys('CapacityKeys'); + capacityProvider = await createMsaAndProvider(fundingSource, capacityKeys, 'CapacityProvider', FUNDS_AMOUNT); // Stake enough for all transactions await assert.doesNotReject(stakeToProvider(fundingSource, capacityKeys, capacityProvider, stakedForMsa)); - }) + }); beforeEach(async function () { - delegatorKeys = createKeys("delegatorKeys") + delegatorKeys = createKeys('delegatorKeys'); payload = await generateDelegationPayload({ authorizedMsaId: capacityProvider, schemaIds: [schemaId], }); - }) + }); - it("successfully pays with Capacity for eligible transaction - addPublicKeytoMSA", async function () { - let authorizedKeys: KeyringPair[] = []; - let defaultPayload: AddKeyData = {}; - let payload: AddKeyData; - let ownerSig: Sr25519Signature; - let newSig: Sr25519Signature; - let addKeyData: Codec; + it('successfully pays with Capacity for eligible transaction - addPublicKeytoMSA', async function () { + const authorizedKeys: KeyringPair[] = []; + const defaultPayload: AddKeyData = {}; authorizedKeys.push(await createAndFundKeypair(fundingSource, 50_000_000n)); - defaultPayload.msaId = capacityProvider + defaultPayload.msaId = capacityProvider; defaultPayload.newPublicKey = authorizedKeys[0].publicKey; - payload = await generateAddKeyPayload(defaultPayload); - addKeyData = ExtrinsicHelper.api.registry.createType("PalletMsaAddKeyData", payload); - ownerSig = signPayloadSr25519(capacityKeys, addKeyData); - newSig = signPayloadSr25519(authorizedKeys[0], addKeyData); + const payload = await generateAddKeyPayload(defaultPayload); + const addKeyData = ExtrinsicHelper.api.registry.createType('PalletMsaAddKeyData', payload); + const ownerSig = signPayloadSr25519(capacityKeys, addKeyData); + const newSig = signPayloadSr25519(authorizedKeys[0], addKeyData); const addPublicKeyOp = ExtrinsicHelper.addPublicKeyToMsa(capacityKeys, ownerSig, newSig, payload); const { eventMap } = await addPublicKeyOp.payWithCapacity(); - assertEvent(eventMap, "system.ExtrinsicSuccess"); - assertEvent(eventMap, "capacity.CapacityWithdrawn"); - assertEvent(eventMap, "msa.PublicKeyAdded"); + assertEvent(eventMap, 'system.ExtrinsicSuccess'); + assertEvent(eventMap, 'capacity.CapacityWithdrawn'); + assertEvent(eventMap, 'msa.PublicKeyAdded'); }); - it("successfully pays with Capacity for eligible transaction - createSponsoredAccountWithDelegation", async function () { - const addProviderData = ExtrinsicHelper.api.registry.createType("PalletMsaAddProvider", payload); + it('successfully pays with Capacity for eligible transaction - createSponsoredAccountWithDelegation', async function () { + const addProviderData = ExtrinsicHelper.api.registry.createType('PalletMsaAddProvider', payload); const call = ExtrinsicHelper.createSponsoredAccountWithDelegation( - delegatorKeys, capacityKeys, + delegatorKeys, + capacityKeys, signPayloadSr25519(delegatorKeys, addProviderData), payload ); const { eventMap } = await call.payWithCapacity(); - assertEvent(eventMap, "system.ExtrinsicSuccess"); - assertEvent(eventMap, "capacity.CapacityWithdrawn"); - assertEvent(eventMap, "msa.DelegationGranted"); + assertEvent(eventMap, 'system.ExtrinsicSuccess'); + assertEvent(eventMap, 'capacity.CapacityWithdrawn'); + assertEvent(eventMap, 'msa.DelegationGranted'); }); - it("successfully pays with Capacity for eligible transaction - grantDelegation", async function () { - + it('successfully pays with Capacity for eligible transaction - grantDelegation', async function () { await fundKeypair(fundingSource, delegatorKeys, 10n * CENTS); let { eventMap } = await ExtrinsicHelper.createMsa(delegatorKeys).signAndSend(); - assertEvent(eventMap, "msa.MsaCreated"); + assertEvent(eventMap, 'msa.MsaCreated'); - const addProviderData = ExtrinsicHelper.api.registry.createType("PalletMsaAddProvider", payload); - const grantDelegationOp = ExtrinsicHelper.grantDelegation(delegatorKeys, capacityKeys, - signPayloadSr25519(delegatorKeys, addProviderData), payload); + const addProviderData = ExtrinsicHelper.api.registry.createType('PalletMsaAddProvider', payload); + const grantDelegationOp = ExtrinsicHelper.grantDelegation( + delegatorKeys, + capacityKeys, + signPayloadSr25519(delegatorKeys, addProviderData), + payload + ); ({ eventMap } = await grantDelegationOp.payWithCapacity()); // Check for remaining capacity to be reduced const capacityStaked = await getCapacity(capacityProvider); - assertEvent(eventMap, "system.ExtrinsicSuccess"); - assertEvent(eventMap, "capacity.CapacityWithdrawn"); - assertEvent(eventMap, "msa.DelegationGranted"); + assertEvent(eventMap, 'system.ExtrinsicSuccess'); + assertEvent(eventMap, 'capacity.CapacityWithdrawn'); + assertEvent(eventMap, 'msa.DelegationGranted'); const fee = getCapacityFee(eventMap); // assuming no other txns charged against capacity (b/c of async tests), this should be the maximum amount left. - const maximumExpectedRemaining = stakedForMsa / TokenPerCapacity - fee + const maximumExpectedRemaining = stakedForMsa / TokenPerCapacity - fee; const remaining = capacityStaked.remainingCapacity.toBigInt(); assert(remaining <= maximumExpectedRemaining, `expected ${remaining} to be <= ${maximumExpectedRemaining}`); @@ -190,15 +193,15 @@ describe("Capacity Transactions", function () { }); }); - describe("when capacity eligible transaction is from the messages pallet", async function () { + describe('when capacity eligible transaction is from the messages pallet', function () { let starting_block: number; let capacityKeys: KeyringPair; let capacityProvider: u64; before(async function () { - capacityKeys = createKeys("CapacityKeys"); - capacityProvider = await createMsaAndProvider(fundingSource, capacityKeys, "CapacityProvider", FUNDS_AMOUNT); - }) + capacityKeys = createKeys('CapacityKeys'); + capacityProvider = await createMsaAndProvider(fundingSource, capacityKeys, 'CapacityProvider', FUNDS_AMOUNT); + }); beforeEach(async function () { starting_block = (await ExtrinsicHelper.apiPromise.rpc.chain.getHeader()).number.toNumber(); @@ -206,47 +209,44 @@ describe("Capacity Transactions", function () { await assert.doesNotReject(stakeToProvider(fundingSource, capacityKeys, capacityProvider, amountStaked)); }); - it("successfully pays with Capacity for eligible transaction - addIPFSMessage", async function () { - let schemaId = await getOrCreateParquetBroadcastSchema(fundingSource); - const ipfs_payload_data = "This is a test of Frequency."; + it('successfully pays with Capacity for eligible transaction - addIPFSMessage', async function () { + const schemaId = await getOrCreateParquetBroadcastSchema(fundingSource); + const ipfs_payload_data = 'This is a test of Frequency.'; const ipfs_payload_len = ipfs_payload_data.length + 1; const ipfs_cid_64 = (await ipfsCid(ipfs_payload_data, './e2e_test.txt')).toString(base64); const call = ExtrinsicHelper.addIPFSMessage(capacityKeys, schemaId, ipfs_cid_64, ipfs_payload_len); const { eventMap } = await call.payWithCapacity(); - assertEvent(eventMap, "capacity.CapacityWithdrawn"); - assertEvent(eventMap, "messages.MessagesStored"); + assertEvent(eventMap, 'capacity.CapacityWithdrawn'); + assertEvent(eventMap, 'messages.MessagesStored'); }); - it("successfully pays with Capacity for eligible transaction - addOnchainMessage", async function () { + it('successfully pays with Capacity for eligible transaction - addOnchainMessage', async function () { // Create a dummy on-chain schema - let dummySchemaId: u16 = await getOrCreateDummySchema(fundingSource); - const call = ExtrinsicHelper.addOnChainMessage(capacityKeys, dummySchemaId, "0xdeadbeef"); + const dummySchemaId: u16 = await getOrCreateDummySchema(fundingSource); + const call = ExtrinsicHelper.addOnChainMessage(capacityKeys, dummySchemaId, '0xdeadbeef'); const { eventMap } = await call.payWithCapacity(); - assertEvent(eventMap, "capacity.CapacityWithdrawn"); - assertEvent(eventMap, "messages.MessagesStored"); - const get = await ExtrinsicHelper.apiPromise.rpc.messages.getBySchemaId( - dummySchemaId, - { - from_block: starting_block, - from_index: 0, - to_block: starting_block + 999, - page_size: 999 - } - ); - assertHasMessage(get, x => x.payload.isSome && x.payload.toString() === "0xdeadbeef"); + assertEvent(eventMap, 'capacity.CapacityWithdrawn'); + assertEvent(eventMap, 'messages.MessagesStored'); + const get = await ExtrinsicHelper.apiPromise.rpc.messages.getBySchemaId(dummySchemaId, { + from_block: starting_block, + from_index: 0, + to_block: starting_block + 999, + page_size: 999, + }); + assertHasMessage(get, (x) => x.payload.isSome && x.payload.toString() === '0xdeadbeef'); }); }); - describe("when capacity eligible transaction is from the StatefulStorage pallet", async function () { + describe('when capacity eligible transaction is from the StatefulStorage pallet', function () { let delegatorKeys: KeyringPair; let delegatorProviderId: u64; let capacityKeys: KeyringPair; let capacityProvider: u64; before(async function () { - capacityKeys = createKeys("CapacityKeys"); - capacityProvider = await createMsaAndProvider(fundingSource, capacityKeys, "CapacityProvider", FUNDS_AMOUNT); + capacityKeys = createKeys('CapacityKeys'); + capacityProvider = await createMsaAndProvider(fundingSource, capacityKeys, 'CapacityProvider', FUNDS_AMOUNT); }); beforeEach(async function () { @@ -254,136 +254,173 @@ describe("Capacity Transactions", function () { await assert.doesNotReject(stakeToProvider(fundingSource, capacityKeys, capacityProvider, amountStaked)); }); - it("successfully pays with Capacity for eligible transaction - applyItemActions", async function () { + it('successfully pays with Capacity for eligible transaction - applyItemActions', async function () { // Create a schema to allow delete actions - let schemaId_deletable: SchemaId = await getOrCreateAvroChatMessageItemizedSchema(fundingSource) + const schemaId_deletable: SchemaId = await getOrCreateAvroChatMessageItemizedSchema(fundingSource); // Add and update actions - let payload_1 = new Bytes(ExtrinsicHelper.api.registry, "Hello World From Frequency"); + const payload_1 = new Bytes(ExtrinsicHelper.api.registry, 'Hello World From Frequency'); const add_action = { - "Add": payload_1 - } + Add: payload_1, + }; - let payload_2 = new Bytes(ExtrinsicHelper.api.registry, "Hello World Again From Frequency"); + const payload_2 = new Bytes(ExtrinsicHelper.api.registry, 'Hello World Again From Frequency'); const update_action = { - "Add": payload_2 - } + Add: payload_2, + }; const target_hash = await getCurrentItemizedHash(capacityProvider, schemaId_deletable); - let add_actions = [add_action, update_action]; - const call = ExtrinsicHelper.applyItemActions(capacityKeys, schemaId_deletable, capacityProvider, add_actions, target_hash); + const add_actions = [add_action, update_action]; + const call = ExtrinsicHelper.applyItemActions( + capacityKeys, + schemaId_deletable, + capacityProvider, + add_actions, + target_hash + ); const { eventMap } = await call.payWithCapacity(); - assertEvent(eventMap, "system.ExtrinsicSuccess"); - assertEvent(eventMap, "capacity.CapacityWithdrawn"); - assertEvent(eventMap, "statefulStorage.ItemizedPageUpdated"); + assertEvent(eventMap, 'system.ExtrinsicSuccess'); + assertEvent(eventMap, 'capacity.CapacityWithdrawn'); + assertEvent(eventMap, 'statefulStorage.ItemizedPageUpdated'); }); - it("successfully pays with Capacity for eligible transaction - upsertPage; deletePage", async function () { + it('successfully pays with Capacity for eligible transaction - upsertPage; deletePage', async function () { // Get a schema for Paginated PayloadLocation schemaId = await getOrCreateAvroChatMessagePaginatedSchema(fundingSource); - let page_id = 0; - let target_hash = await getCurrentPaginatedHash(capacityProvider, schemaId, page_id) + const page_id = 0; + let target_hash = await getCurrentPaginatedHash(capacityProvider, schemaId, page_id); // Add and update actions - const payload_1 = new Bytes(ExtrinsicHelper.api.registry, "Hello World From Frequency"); - const call = ExtrinsicHelper.upsertPage(capacityKeys, schemaId, capacityProvider, page_id, payload_1, target_hash); + const payload_1 = new Bytes(ExtrinsicHelper.api.registry, 'Hello World From Frequency'); + const call = ExtrinsicHelper.upsertPage( + capacityKeys, + schemaId, + capacityProvider, + page_id, + payload_1, + target_hash + ); const { eventMap } = await call.payWithCapacity(); - assertEvent(eventMap, "system.ExtrinsicSuccess"); - assertEvent(eventMap, "capacity.CapacityWithdrawn"); - assertEvent(eventMap, "statefulStorage.PaginatedPageUpdated") + assertEvent(eventMap, 'system.ExtrinsicSuccess'); + assertEvent(eventMap, 'capacity.CapacityWithdrawn'); + assertEvent(eventMap, 'statefulStorage.PaginatedPageUpdated'); // Remove the page - target_hash = await getCurrentPaginatedHash(capacityProvider, schemaId, page_id) + target_hash = await getCurrentPaginatedHash(capacityProvider, schemaId, page_id); const call2 = ExtrinsicHelper.removePage(capacityKeys, schemaId, capacityProvider, page_id, target_hash); const { eventMap: eventMap2 } = await call2.payWithCapacity(); - assertEvent(eventMap2, "system.ExtrinsicSuccess"); - assertEvent(eventMap2, "capacity.CapacityWithdrawn"); - assertEvent(eventMap2, "statefulStorage.PaginatedPageDeleted"); + assertEvent(eventMap2, 'system.ExtrinsicSuccess'); + assertEvent(eventMap2, 'capacity.CapacityWithdrawn'); + assertEvent(eventMap2, 'statefulStorage.PaginatedPageDeleted'); }); - it("successfully pays with Capacity for eligible transaction - applyItemActionsWithSignature", async function () { + it('successfully pays with Capacity for eligible transaction - applyItemActionsWithSignature', async function () { // Create a schema for Itemized PayloadLocation - let itemizedSchemaId: SchemaId = await getOrCreateAvroChatMessageItemizedSchema(fundingSource); + const itemizedSchemaId: SchemaId = await getOrCreateAvroChatMessageItemizedSchema(fundingSource); // Create a MSA for the delegator [delegatorKeys, delegatorProviderId] = await createDelegator(fundingSource); - assert.notEqual(delegatorKeys, undefined, "setup should populate delegator_key"); - assert.notEqual(delegatorProviderId, undefined, "setup should populate msa_id"); + assert.notEqual(delegatorKeys, undefined, 'setup should populate delegator_key'); + assert.notEqual(delegatorProviderId, undefined, 'setup should populate msa_id'); // Add and update actions - let payload_1 = new Bytes(ExtrinsicHelper.api.registry, "Hello World From Frequency"); + const payload_1 = new Bytes(ExtrinsicHelper.api.registry, 'Hello World From Frequency'); const add_action = { - "Add": payload_1 - } + Add: payload_1, + }; - let payload_2 = new Bytes(ExtrinsicHelper.api.registry, "Hello World Again From Frequency"); + const payload_2 = new Bytes(ExtrinsicHelper.api.registry, 'Hello World Again From Frequency'); const update_action = { - "Add": payload_2 - } + Add: payload_2, + }; const target_hash = await getCurrentItemizedHash(delegatorProviderId, itemizedSchemaId); - let add_actions = [add_action, update_action]; + const add_actions = [add_action, update_action]; const payload = await generateItemizedSignaturePayload({ msaId: delegatorProviderId, targetHash: target_hash, schemaId: itemizedSchemaId, actions: add_actions, }); - const itemizedPayloadData = ExtrinsicHelper.api.registry.createType("PalletStatefulStorageItemizedSignaturePayload", payload); - let itemized_add_result_1 = ExtrinsicHelper.applyItemActionsWithSignature(delegatorKeys, capacityKeys, signPayloadSr25519(delegatorKeys, itemizedPayloadData), payload); + const itemizedPayloadData = ExtrinsicHelper.api.registry.createType( + 'PalletStatefulStorageItemizedSignaturePayload', + payload + ); + const itemized_add_result_1 = ExtrinsicHelper.applyItemActionsWithSignature( + delegatorKeys, + capacityKeys, + signPayloadSr25519(delegatorKeys, itemizedPayloadData), + payload + ); const { target: pageUpdateEvent1, eventMap } = await itemized_add_result_1.payWithCapacity(); - assertEvent(eventMap, "system.ExtrinsicSuccess"); - assertEvent(eventMap, "capacity.CapacityWithdrawn"); - assert.notEqual(pageUpdateEvent1, undefined, "should have returned a PalletStatefulStorageItemizedActionApplied event"); + assertEvent(eventMap, 'system.ExtrinsicSuccess'); + assertEvent(eventMap, 'capacity.CapacityWithdrawn'); + assert.notEqual( + pageUpdateEvent1, + undefined, + 'should have returned a PalletStatefulStorageItemizedActionApplied event' + ); }); - it("successfully pays with Capacity for eligible transaction - applyItemActionsWithSignatureV2", async function () { + it('successfully pays with Capacity for eligible transaction - applyItemActionsWithSignatureV2', async function () { // Create a schema for Itemized PayloadLocation - let itemizedSchemaId: SchemaId = await getOrCreateAvroChatMessageItemizedSchema(fundingSource); + const itemizedSchemaId: SchemaId = await getOrCreateAvroChatMessageItemizedSchema(fundingSource); // Create a MSA for the delegator [delegatorKeys, delegatorProviderId] = await createDelegator(fundingSource); - assert.notEqual(delegatorKeys, undefined, "setup should populate delegator_key"); - assert.notEqual(delegatorProviderId, undefined, "setup should populate msa_id"); + assert.notEqual(delegatorKeys, undefined, 'setup should populate delegator_key'); + assert.notEqual(delegatorProviderId, undefined, 'setup should populate msa_id'); // Add and update actions - let payload_1 = new Bytes(ExtrinsicHelper.api.registry, "Hello World From Frequency"); + const payload_1 = new Bytes(ExtrinsicHelper.api.registry, 'Hello World From Frequency'); const add_action = { - "Add": payload_1 - } + Add: payload_1, + }; - let payload_2 = new Bytes(ExtrinsicHelper.api.registry, "Hello World Again From Frequency"); + const payload_2 = new Bytes(ExtrinsicHelper.api.registry, 'Hello World Again From Frequency'); const update_action = { - "Add": payload_2 - } + Add: payload_2, + }; const target_hash = await getCurrentItemizedHash(delegatorProviderId, itemizedSchemaId); - let add_actions = [add_action, update_action]; + const add_actions = [add_action, update_action]; const payload = await generateItemizedSignaturePayloadV2({ targetHash: target_hash, schemaId: itemizedSchemaId, actions: add_actions, }); - const itemizedPayloadData = ExtrinsicHelper.api.registry.createType("PalletStatefulStorageItemizedSignaturePayloadV2", payload); - let itemized_add_result_1 = ExtrinsicHelper.applyItemActionsWithSignatureV2(delegatorKeys, capacityKeys, signPayloadSr25519(delegatorKeys, itemizedPayloadData), payload); + const itemizedPayloadData = ExtrinsicHelper.api.registry.createType( + 'PalletStatefulStorageItemizedSignaturePayloadV2', + payload + ); + const itemized_add_result_1 = ExtrinsicHelper.applyItemActionsWithSignatureV2( + delegatorKeys, + capacityKeys, + signPayloadSr25519(delegatorKeys, itemizedPayloadData), + payload + ); const { target: pageUpdateEvent1, eventMap } = await itemized_add_result_1.payWithCapacity(); - assertEvent(eventMap, "system.ExtrinsicSuccess"); - assertEvent(eventMap, "capacity.CapacityWithdrawn"); - assert.notEqual(pageUpdateEvent1, undefined, "should have returned a PalletStatefulStorageItemizedActionApplied event"); + assertEvent(eventMap, 'system.ExtrinsicSuccess'); + assertEvent(eventMap, 'capacity.CapacityWithdrawn'); + assert.notEqual( + pageUpdateEvent1, + undefined, + 'should have returned a PalletStatefulStorageItemizedActionApplied event' + ); }); - it("successfully pays with Capacity for eligible transaction - upsertPageWithSignature; deletePageWithSignature", async function () { - let paginatedSchemaId: SchemaId = await getOrCreateAvroChatMessagePaginatedSchema(fundingSource); + it('successfully pays with Capacity for eligible transaction - upsertPageWithSignature; deletePageWithSignature', async function () { + const paginatedSchemaId: SchemaId = await getOrCreateAvroChatMessagePaginatedSchema(fundingSource); // Create a MSA for the delegator [delegatorKeys, delegatorProviderId] = await createDelegator(fundingSource); - assert.notEqual(delegatorKeys, undefined, "setup should populate delegator_key"); - assert.notEqual(delegatorProviderId, undefined, "setup should populate msa_id"); + assert.notEqual(delegatorKeys, undefined, 'setup should populate delegator_key'); + assert.notEqual(delegatorProviderId, undefined, 'setup should populate msa_id'); - let page_id = new u16(ExtrinsicHelper.api.registry, 1); + const page_id = new u16(ExtrinsicHelper.api.registry, 1); // Add and update actions let target_hash = await getCurrentPaginatedHash(delegatorProviderId, paginatedSchemaId, page_id.toNumber()); @@ -392,14 +429,26 @@ describe("Capacity Transactions", function () { targetHash: target_hash, schemaId: paginatedSchemaId, pageId: page_id, - payload: new Bytes(ExtrinsicHelper.api.registry, "Hello World From Frequency"), + payload: new Bytes(ExtrinsicHelper.api.registry, 'Hello World From Frequency'), }); - const upsertPayloadData = ExtrinsicHelper.api.registry.createType("PalletStatefulStoragePaginatedUpsertSignaturePayload", upsertPayload); - let upsert_result = ExtrinsicHelper.upsertPageWithSignature(delegatorKeys, capacityKeys, signPayloadSr25519(delegatorKeys, upsertPayloadData), upsertPayload); + const upsertPayloadData = ExtrinsicHelper.api.registry.createType( + 'PalletStatefulStoragePaginatedUpsertSignaturePayload', + upsertPayload + ); + const upsert_result = ExtrinsicHelper.upsertPageWithSignature( + delegatorKeys, + capacityKeys, + signPayloadSr25519(delegatorKeys, upsertPayloadData), + upsertPayload + ); const { target: pageUpdateEvent, eventMap: eventMap1 } = await upsert_result.payWithCapacity(); - assertEvent(eventMap1, "system.ExtrinsicSuccess"); - assertEvent(eventMap1, "capacity.CapacityWithdrawn"); - assert.notEqual(pageUpdateEvent, undefined, "should have returned a PalletStatefulStoragePaginatedPageUpdate event"); + assertEvent(eventMap1, 'system.ExtrinsicSuccess'); + assertEvent(eventMap1, 'capacity.CapacityWithdrawn'); + assert.notEqual( + pageUpdateEvent, + undefined, + 'should have returned a PalletStatefulStoragePaginatedPageUpdate event' + ); // Remove the page target_hash = await getCurrentPaginatedHash(delegatorProviderId, paginatedSchemaId, page_id.toNumber()); @@ -409,28 +458,36 @@ describe("Capacity Transactions", function () { schemaId: paginatedSchemaId, pageId: page_id, }); - const deletePayloadData = ExtrinsicHelper.api.registry.createType("PalletStatefulStoragePaginatedDeleteSignaturePayload", deletePayload); - let remove_result = ExtrinsicHelper.deletePageWithSignature(delegatorKeys, capacityKeys, signPayloadSr25519(delegatorKeys, deletePayloadData), deletePayload); + const deletePayloadData = ExtrinsicHelper.api.registry.createType( + 'PalletStatefulStoragePaginatedDeleteSignaturePayload', + deletePayload + ); + const remove_result = ExtrinsicHelper.deletePageWithSignature( + delegatorKeys, + capacityKeys, + signPayloadSr25519(delegatorKeys, deletePayloadData), + deletePayload + ); const { target: pageRemove, eventMap: eventMap2 } = await remove_result.payWithCapacity(); - assertEvent(eventMap2, "system.ExtrinsicSuccess"); - assertEvent(eventMap2, "capacity.CapacityWithdrawn"); - assert.notEqual(pageRemove, undefined, "should have returned a event"); + assertEvent(eventMap2, 'system.ExtrinsicSuccess'); + assertEvent(eventMap2, 'capacity.CapacityWithdrawn'); + assert.notEqual(pageRemove, undefined, 'should have returned a event'); // no pages should exist const result = await ExtrinsicHelper.getPaginatedStorage(delegatorProviderId, paginatedSchemaId); - assert.notEqual(result, undefined, "should have returned a valid response"); - assert.equal(result.length, 0, "should returned no paginated pages"); + assert.notEqual(result, undefined, 'should have returned a valid response'); + assert.equal(result.length, 0, 'should returned no paginated pages'); }); - it("successfully pays with Capacity for eligible transaction - upsertPageWithSignatureV2; deletePageWithSignatureV2", async function () { - let paginatedSchemaId: SchemaId = await getOrCreateAvroChatMessagePaginatedSchema(fundingSource); + it('successfully pays with Capacity for eligible transaction - upsertPageWithSignatureV2; deletePageWithSignatureV2', async function () { + const paginatedSchemaId: SchemaId = await getOrCreateAvroChatMessagePaginatedSchema(fundingSource); // Create a MSA for the delegator [delegatorKeys, delegatorProviderId] = await createDelegator(fundingSource); - assert.notEqual(delegatorKeys, undefined, "setup should populate delegator_key"); - assert.notEqual(delegatorProviderId, undefined, "setup should populate msa_id"); + assert.notEqual(delegatorKeys, undefined, 'setup should populate delegator_key'); + assert.notEqual(delegatorProviderId, undefined, 'setup should populate msa_id'); - let page_id = new u16(ExtrinsicHelper.api.registry, 1); + const page_id = new u16(ExtrinsicHelper.api.registry, 1); // Add and update actions let target_hash = await getCurrentPaginatedHash(delegatorProviderId, paginatedSchemaId, page_id.toNumber()); @@ -438,14 +495,26 @@ describe("Capacity Transactions", function () { targetHash: target_hash, schemaId: paginatedSchemaId, pageId: page_id, - payload: new Bytes(ExtrinsicHelper.api.registry, "Hello World From Frequency"), + payload: new Bytes(ExtrinsicHelper.api.registry, 'Hello World From Frequency'), }); - const upsertPayloadData = ExtrinsicHelper.api.registry.createType("PalletStatefulStoragePaginatedUpsertSignaturePayloadV2", upsertPayload); - let upsert_result = ExtrinsicHelper.upsertPageWithSignatureV2(delegatorKeys, capacityKeys, signPayloadSr25519(delegatorKeys, upsertPayloadData), upsertPayload); + const upsertPayloadData = ExtrinsicHelper.api.registry.createType( + 'PalletStatefulStoragePaginatedUpsertSignaturePayloadV2', + upsertPayload + ); + const upsert_result = ExtrinsicHelper.upsertPageWithSignatureV2( + delegatorKeys, + capacityKeys, + signPayloadSr25519(delegatorKeys, upsertPayloadData), + upsertPayload + ); const { target: pageUpdateEvent, eventMap: eventMap1 } = await upsert_result.payWithCapacity(); - assertEvent(eventMap1, "system.ExtrinsicSuccess"); - assertEvent(eventMap1, "capacity.CapacityWithdrawn"); - assert.notEqual(pageUpdateEvent, undefined, "should have returned a PalletStatefulStoragePaginatedPageUpdate event"); + assertEvent(eventMap1, 'system.ExtrinsicSuccess'); + assertEvent(eventMap1, 'capacity.CapacityWithdrawn'); + assert.notEqual( + pageUpdateEvent, + undefined, + 'should have returned a PalletStatefulStoragePaginatedPageUpdate event' + ); // Remove the page target_hash = await getCurrentPaginatedHash(delegatorProviderId, paginatedSchemaId, page_id.toNumber()); @@ -454,30 +523,38 @@ describe("Capacity Transactions", function () { schemaId: paginatedSchemaId, pageId: page_id, }); - const deletePayloadData = ExtrinsicHelper.api.registry.createType("PalletStatefulStoragePaginatedDeleteSignaturePayloadV2", deletePayload); - let remove_result = ExtrinsicHelper.deletePageWithSignatureV2(delegatorKeys, capacityKeys, signPayloadSr25519(delegatorKeys, deletePayloadData), deletePayload); + const deletePayloadData = ExtrinsicHelper.api.registry.createType( + 'PalletStatefulStoragePaginatedDeleteSignaturePayloadV2', + deletePayload + ); + const remove_result = ExtrinsicHelper.deletePageWithSignatureV2( + delegatorKeys, + capacityKeys, + signPayloadSr25519(delegatorKeys, deletePayloadData), + deletePayload + ); const { target: pageRemove, eventMap: eventMap2 } = await remove_result.payWithCapacity(); - assertEvent(eventMap2, "system.ExtrinsicSuccess"); - assertEvent(eventMap2, "capacity.CapacityWithdrawn"); - assert.notEqual(pageRemove, undefined, "should have returned a event"); + assertEvent(eventMap2, 'system.ExtrinsicSuccess'); + assertEvent(eventMap2, 'capacity.CapacityWithdrawn'); + assert.notEqual(pageRemove, undefined, 'should have returned a event'); // no pages should exist const result = await ExtrinsicHelper.getPaginatedStorage(delegatorProviderId, paginatedSchemaId); - assert.notEqual(result, undefined, "should have returned a valid response"); - assert.equal(result.length, 0, "should returned no paginated pages"); + assert.notEqual(result, undefined, 'should have returned a valid response'); + assert.equal(result.length, 0, 'should returned no paginated pages'); }); }); - describe("when capacity eligible transaction is from the handles pallet", async function () { + describe('when capacity eligible transaction is from the handles pallet', function () { let capacityKeys: KeyringPair; let capacityProvider: u64; before(async function () { - capacityKeys = createKeys("CapacityKeys"); - capacityProvider = await createMsaAndProvider(fundingSource, capacityKeys, "CapacityProvider", FUNDS_AMOUNT); + capacityKeys = createKeys('CapacityKeys'); + capacityProvider = await createMsaAndProvider(fundingSource, capacityKeys, 'CapacityProvider', FUNDS_AMOUNT); }); - it("successfully pays with Capacity for eligible transaction - claimHandle", async function () { + it('successfully pays with Capacity for eligible transaction - claimHandle', async function () { await assert.doesNotReject(stakeToProvider(fundingSource, capacityKeys, capacityProvider, amountStaked)); const handle = getTestHandle(); @@ -487,69 +564,82 @@ describe("Capacity Transactions", function () { baseHandle: handle_vec, expiration: expiration, }; - const claimHandlePayload = ExtrinsicHelper.api.registry.createType("CommonPrimitivesHandlesClaimHandlePayload", handlePayload); + const claimHandlePayload = ExtrinsicHelper.api.registry.createType( + 'CommonPrimitivesHandlesClaimHandlePayload', + handlePayload + ); const claimHandle = ExtrinsicHelper.claimHandle(capacityKeys, claimHandlePayload); const { eventMap } = await claimHandle.payWithCapacity(); - assertEvent(eventMap, "system.ExtrinsicSuccess"); - assertEvent(eventMap, "capacity.CapacityWithdrawn"); - assertEvent(eventMap, "handles.HandleClaimed"); + assertEvent(eventMap, 'system.ExtrinsicSuccess'); + assertEvent(eventMap, 'capacity.CapacityWithdrawn'); + assertEvent(eventMap, 'handles.HandleClaimed'); }); }); // When a user attempts to pay for a non-capacity transaction with Capacity, // it should error and drop the transaction from the transaction-pool. - it("fails to pay with Capacity for a non-capacity transaction", async function () { - const capacityKeys = createKeys("CapacityKeys"); - const capacityProvider = await createMsaAndProvider(fundingSource, capacityKeys, "CapacityProvider", FUNDS_AMOUNT); + it('fails to pay with Capacity for a non-capacity transaction', async function () { + const capacityKeys = createKeys('CapacityKeys'); + const capacityProvider = await createMsaAndProvider( + fundingSource, + capacityKeys, + 'CapacityProvider', + FUNDS_AMOUNT + ); const nonCapacityTxn = ExtrinsicHelper.stake(capacityKeys, capacityProvider, 1n * CENTS); await assert.rejects(nonCapacityTxn.payWithCapacity('current'), { - name: "RpcError", message: - "1010: Invalid Transaction: Custom error: 0" + name: 'RpcError', + message: '1010: Invalid Transaction: Custom error: 0', }); }); // When a user does not have enough capacity to pay for the transaction fee // and is NOT eligible to replenish Capacity, it should error and be dropped // from the transaction pool. - it("fails to pay for a transaction with empty capacity", async function () { - const capacityKeys = createKeys("CapKeysEmpty"); - const capacityProvider = await createMsaAndProvider(fundingSource, capacityKeys, "CapProvEmpty", FUNDS_AMOUNT); - let noCapacityKeys = createKeys("noCapacityKeys"); - let _providerId = await createMsaAndProvider(fundingSource, noCapacityKeys, "NoCapProvider"); + it('fails to pay for a transaction with empty capacity', async function () { + const capacityKeys = createKeys('CapKeysEmpty'); + const capacityProvider = await createMsaAndProvider(fundingSource, capacityKeys, 'CapProvEmpty', FUNDS_AMOUNT); + const noCapacityKeys = createKeys('noCapacityKeys'); + const _providerId = await createMsaAndProvider(fundingSource, noCapacityKeys, 'NoCapProvider'); - let delegatorKeys = createKeys("delegatorKeys"); + const delegatorKeys = createKeys('delegatorKeys'); const payload = await generateDelegationPayload({ authorizedMsaId: capacityProvider, schemaIds: [schemaId], }); - const addProviderData = ExtrinsicHelper.api.registry.createType("PalletMsaAddProvider", payload); - const grantDelegationOp = ExtrinsicHelper.grantDelegation(delegatorKeys, noCapacityKeys, - signPayloadSr25519(noCapacityKeys, addProviderData), payload); + const addProviderData = ExtrinsicHelper.api.registry.createType('PalletMsaAddProvider', payload); + const grantDelegationOp = ExtrinsicHelper.grantDelegation( + delegatorKeys, + noCapacityKeys, + signPayloadSr25519(noCapacityKeys, addProviderData), + payload + ); await assert.rejects(grantDelegationOp.payWithCapacity('current'), { - name: "RpcError", message: - "1010: Invalid Transaction: Inability to pay some fees , e.g. account balance too low" + name: 'RpcError', + message: '1010: Invalid Transaction: Inability to pay some fees , e.g. account balance too low', }); }); // *All keys should have at least an EXISTENTIAL_DEPOSIT = 1M. - it("fails to pay for transaction when key does has not met the min deposit", async function () { - const capacityKeys = createKeys("CapKeysUnder"); - const capacityProvider = await createMsaAndProvider(fundingSource, capacityKeys, "CapProvUnder", FUNDS_AMOUNT); - const noTokensKeys = createKeys("noTokensKeys"); - const delegatorKeys = await createAndFundKeypair(fundingSource, 2n * DOLLARS, "delegatorKeys"); + it('fails to pay for transaction when key does has not met the min deposit', async function () { + const capacityKeys = createKeys('CapKeysUnder'); + const capacityProvider = await createMsaAndProvider(fundingSource, capacityKeys, 'CapProvUnder', FUNDS_AMOUNT); + const noTokensKeys = createKeys('noTokensKeys'); + const delegatorKeys = await createAndFundKeypair(fundingSource, 2n * DOLLARS, 'delegatorKeys'); await assert.doesNotReject(stakeToProvider(fundingSource, capacityKeys, capacityProvider, 1n * DOLLARS)); // Add new key - let newKeyPayload: AddKeyData = await generateAddKeyPayload({ - msaId: new u64(ExtrinsicHelper.api.registry, capacityProvider), newPublicKey: noTokensKeys.publicKey + const newKeyPayload: AddKeyData = await generateAddKeyPayload({ + msaId: new u64(ExtrinsicHelper.api.registry, capacityProvider), + newPublicKey: noTokensKeys.publicKey, }); - let addKeyData = ExtrinsicHelper.api.registry.createType("PalletMsaAddKeyData", newKeyPayload); + const addKeyData = ExtrinsicHelper.api.registry.createType('PalletMsaAddKeyData', newKeyPayload); - let ownerSig = signPayloadSr25519(capacityKeys, addKeyData); - let newSig = signPayloadSr25519(noTokensKeys, addKeyData); + const ownerSig = signPayloadSr25519(capacityKeys, addKeyData); + const newSig = signPayloadSr25519(noTokensKeys, addKeyData); const addPublicKeyOp = ExtrinsicHelper.addPublicKeyToMsa(capacityKeys, ownerSig, newSig, newKeyPayload); const { target: publicKeyEvent } = await addPublicKeyOp.fundAndSend(fundingSource); @@ -557,24 +647,28 @@ describe("Capacity Transactions", function () { const createMsaOp = ExtrinsicHelper.createMsa(delegatorKeys); const { target: MsaCreatedEvent } = await createMsaOp.fundAndSend(fundingSource); - assert.notEqual(MsaCreatedEvent, undefined, "should have returned MsaCreated event"); + assert.notEqual(MsaCreatedEvent, undefined, 'should have returned MsaCreated event'); const payload = await generateDelegationPayload({ authorizedMsaId: capacityProvider, schemaIds: [schemaId], }); - const addProviderData = ExtrinsicHelper.api.registry.createType("PalletMsaAddProvider", payload); - const grantDelegationOp = ExtrinsicHelper.grantDelegation(delegatorKeys, noTokensKeys, - signPayloadSr25519(delegatorKeys, addProviderData), payload); + const addProviderData = ExtrinsicHelper.api.registry.createType('PalletMsaAddProvider', payload); + const grantDelegationOp = ExtrinsicHelper.grantDelegation( + delegatorKeys, + noTokensKeys, + signPayloadSr25519(delegatorKeys, addProviderData), + payload + ); await assert.rejects(grantDelegationOp.payWithCapacity('current'), { name: 'RpcError', - message: "1010: Invalid Transaction: Inability to pay some fees , e.g. account balance too low", - }) + message: '1010: Invalid Transaction: Inability to pay some fees , e.g. account balance too low', + }); }); }); - describe("when caller does not have a Capacity account", async function () { + describe('when caller does not have a Capacity account', function () { let delegatorKeys: KeyringPair; let delegatorProviderId: u64; let schemaId: u16; @@ -582,20 +676,20 @@ describe("Capacity Transactions", function () { beforeEach(async function () { // Create and fund a keypair with EXISTENTIAL_DEPOSIT // Use this keypair for delegator operations - delegatorKeys = createKeys("OtherProviderKeys"); - delegatorProviderId = await createMsaAndProvider(fundingSource, delegatorKeys, "Delegator", FUNDS_AMOUNT); + delegatorKeys = createKeys('OtherProviderKeys'); + delegatorProviderId = await createMsaAndProvider(fundingSource, delegatorKeys, 'Delegator', FUNDS_AMOUNT); schemaId = new u16(ExtrinsicHelper.api.registry, 0); }); - describe("but has an MSA account that has not been registered as a Provider", async function () { - it("fails to pay for a transaction", async function () { + describe('but has an MSA account that has not been registered as a Provider', function () { + it('fails to pay for a transaction', async function () { // Create a keypair with msaId, but no provider - const noProviderKeys = await createAndFundKeypair(fundingSource, FUNDS_AMOUNT, "NoProviderKeys"); + const noProviderKeys = await createAndFundKeypair(fundingSource, FUNDS_AMOUNT, 'NoProviderKeys'); const createMsaOp = ExtrinsicHelper.createMsa(noProviderKeys); const { target: msaCreatedEvent } = await createMsaOp.fundAndSend(fundingSource); - assert.notEqual(msaCreatedEvent, undefined, "should have returned MsaCreated event"); + assert.notEqual(msaCreatedEvent, undefined, 'should have returned MsaCreated event'); // When a user is not a registered provider and attempts to pay with Capacity, // it should error with InvalidTransaction::Payment, which is a 1010 error, Inability to pay some fees. @@ -603,39 +697,47 @@ describe("Capacity Transactions", function () { authorizedMsaId: delegatorProviderId, schemaIds: [schemaId], }); - const addProviderData = ExtrinsicHelper.api.registry.createType("PalletMsaAddProvider", payload); - const grantDelegationOp = ExtrinsicHelper.grantDelegation(delegatorKeys, noProviderKeys, - signPayloadSr25519(delegatorKeys, addProviderData), payload); + const addProviderData = ExtrinsicHelper.api.registry.createType('PalletMsaAddProvider', payload); + const grantDelegationOp = ExtrinsicHelper.grantDelegation( + delegatorKeys, + noProviderKeys, + signPayloadSr25519(delegatorKeys, addProviderData), + payload + ); await assert.rejects(grantDelegationOp.payWithCapacity('current'), { - name: "RpcError", message: - "1010: Invalid Transaction: Inability to pay some fees , e.g. account balance too low" + name: 'RpcError', + message: '1010: Invalid Transaction: Inability to pay some fees , e.g. account balance too low', }); }); }); - describe("and does not have an MSA account associated to signing keys", async function () { - it("fails to pay for a transaction", async function () { - let emptyKeys = await createAndFundKeypair(fundingSource, 50_000_000n); + describe('and does not have an MSA account associated to signing keys', function () { + it('fails to pay for a transaction', async function () { + const emptyKeys = await createAndFundKeypair(fundingSource, 50_000_000n); const payload = await generateDelegationPayload({ authorizedMsaId: delegatorProviderId, schemaIds: [schemaId], }); - const addProviderData = ExtrinsicHelper.api.registry.createType("PalletMsaAddProvider", payload); - const grantDelegationOp = ExtrinsicHelper.grantDelegation(delegatorKeys, emptyKeys, - signPayloadSr25519(delegatorKeys, addProviderData), payload); + const addProviderData = ExtrinsicHelper.api.registry.createType('PalletMsaAddProvider', payload); + const grantDelegationOp = ExtrinsicHelper.grantDelegation( + delegatorKeys, + emptyKeys, + signPayloadSr25519(delegatorKeys, addProviderData), + payload + ); await assert.rejects(grantDelegationOp.payWithCapacity('current'), { - name: "RpcError", message: - "1010: Invalid Transaction: Custom error: 1" + name: 'RpcError', + message: '1010: Invalid Transaction: Custom error: 1', }); }); }); }); }); - describe("pay_with_capacity_batch_all", function () { + describe('pay_with_capacity_batch_all', function () { let capacityProviderKeys: KeyringPair; let capacityProvider: u64; let schemaId: u16; @@ -643,20 +745,25 @@ describe("Capacity Transactions", function () { const amountStaked = 9n * DOLLARS; beforeEach(async function () { - capacityProviderKeys = createKeys("CapacityProviderKeys"); - capacityProvider = await createMsaAndProvider(fundingSource, capacityProviderKeys, "CapacityProvider", FUNDS_AMOUNT); + capacityProviderKeys = createKeys('CapacityProviderKeys'); + capacityProvider = await createMsaAndProvider( + fundingSource, + capacityProviderKeys, + 'CapacityProvider', + FUNDS_AMOUNT + ); defaultPayload = { authorizedMsaId: capacityProvider, schemaIds: [schemaId], - } + }; }); - it("successfully pays with Capacity for a batch of eligible transactions - [createSponsoredAccountWithDelegation, claimHandle]", async function () { + it('successfully pays with Capacity for a batch of eligible transactions - [createSponsoredAccountWithDelegation, claimHandle]', async function () { await assert.doesNotReject(stakeToProvider(fundingSource, capacityProviderKeys, capacityProvider, amountStaked)); const addProviderPayload = await generateDelegationPayload({ ...defaultPayload }); - const addProviderData = ExtrinsicHelper.api.registry.createType("PalletMsaAddProvider", addProviderPayload); - let delegatorKeys = createKeys("delegatorKeys"); + const addProviderData = ExtrinsicHelper.api.registry.createType('PalletMsaAddProvider', addProviderPayload); + const delegatorKeys = createKeys('delegatorKeys'); const createSponsoredAccountWithDelegation = ExtrinsicHelper.api.tx.msa.createSponsoredAccountWithDelegation( delegatorKeys.publicKey, signPayloadSr25519(delegatorKeys, addProviderData), @@ -670,7 +777,10 @@ describe("Capacity Transactions", function () { baseHandle: handle_vec, expiration: expiration, }; - const claimHandlePayload: any = ExtrinsicHelper.api.registry.createType("CommonPrimitivesHandlesClaimHandlePayload", handlePayload); + const claimHandlePayload: any = ExtrinsicHelper.api.registry.createType( + 'CommonPrimitivesHandlesClaimHandlePayload', + handlePayload + ); const claimHandleProof = { Sr25519: u8aToHex(delegatorKeys.sign(u8aWrapBytes(claimHandlePayload.toU8a()))), }; @@ -680,44 +790,43 @@ describe("Capacity Transactions", function () { claimHandleProof, claimHandlePayload ); - const calls = [ - createSponsoredAccountWithDelegation, - claimHandle, - ]; + const calls = [createSponsoredAccountWithDelegation, claimHandle]; - let payWithCapacityBatchAllOp = ExtrinsicHelper.payWithCapacityBatchAll(capacityProviderKeys, calls); + const payWithCapacityBatchAllOp = ExtrinsicHelper.payWithCapacityBatchAll(capacityProviderKeys, calls); const { target: batchCompletedEvent, eventMap } = await payWithCapacityBatchAllOp.signAndSend(); - if (batchCompletedEvent && - !(ExtrinsicHelper.api.events.utility.BatchCompleted.is(batchCompletedEvent))) { - assert.fail("should return a BatchCompletedEvent"); + if (batchCompletedEvent && !ExtrinsicHelper.api.events.utility.BatchCompleted.is(batchCompletedEvent)) { + assert.fail('should return a BatchCompletedEvent'); } - assert.notEqual(eventMap["msa.DelegationGranted"], undefined, 'should have returned DelegationGranted event'); - assert.notEqual(eventMap["handles.HandleClaimed"], undefined, 'should have returned HandleClaimed event'); + assert.notEqual(eventMap['msa.DelegationGranted'], undefined, 'should have returned DelegationGranted event'); + assert.notEqual(eventMap['handles.HandleClaimed'], undefined, 'should have returned HandleClaimed event'); }); - it("batch fails if one transaction fails - [createSponsoredAccountWithDelegation, claimHandle]", async function () { + it('batch fails if one transaction fails - [createSponsoredAccountWithDelegation, claimHandle]', async function () { await assert.doesNotReject(stakeToProvider(fundingSource, capacityProviderKeys, capacityProvider, amountStaked)); const addProviderPayload = await generateDelegationPayload({ ...defaultPayload }); - const addProviderData = ExtrinsicHelper.api.registry.createType("PalletMsaAddProvider", addProviderPayload); - let delegatorKeys = createKeys("delegatorKeys"); + const addProviderData = ExtrinsicHelper.api.registry.createType('PalletMsaAddProvider', addProviderPayload); + const delegatorKeys = createKeys('delegatorKeys'); const createSponsoredAccountWithDelegation = ExtrinsicHelper.api.tx.msa.createSponsoredAccountWithDelegation( delegatorKeys.publicKey, signPayloadSr25519(delegatorKeys, addProviderData), addProviderPayload ); - const handle = "test_handle_that_exceeds_the_byte_limit"; + const handle = 'test_handle_that_exceeds_the_byte_limit'; const handle_vec = new Bytes(ExtrinsicHelper.api.registry, handle); const expiration = (await getBlockNumber()) + 5; const handlePayload = { baseHandle: handle_vec, expiration: expiration, }; - const claimHandlePayload: any = ExtrinsicHelper.api.registry.createType("CommonPrimitivesHandlesClaimHandlePayload", handlePayload); + const claimHandlePayload: any = ExtrinsicHelper.api.registry.createType( + 'CommonPrimitivesHandlesClaimHandlePayload', + handlePayload + ); const calimHandleProof = { Sr25519: u8aToHex(delegatorKeys.sign(u8aWrapBytes(claimHandlePayload.toU8a()))), }; @@ -727,39 +836,40 @@ describe("Capacity Transactions", function () { calimHandleProof, claimHandlePayload ); - const calls = [ - createSponsoredAccountWithDelegation, - claimHandle, - ]; + const calls = [createSponsoredAccountWithDelegation, claimHandle]; - let payWithCapacityBatchAllOp = ExtrinsicHelper.payWithCapacityBatchAll(capacityProviderKeys, calls); + const payWithCapacityBatchAllOp = ExtrinsicHelper.payWithCapacityBatchAll(capacityProviderKeys, calls); await assert.rejects(payWithCapacityBatchAllOp.signAndSend(), { - name: "InvalidHandleByteLength" + name: 'InvalidHandleByteLength', }); }); }); - describe("Capacity RPC", function () { + describe('Capacity RPC', function () { let capacityProviderKeys: KeyringPair; let capacityProvider: u64; let schemaId: u16; let defaultPayload: AddProviderPayload; - const amountStaked = 9n * DOLLARS; beforeEach(async function () { - capacityProviderKeys = createKeys("CapacityProviderKeys"); - capacityProvider = await createMsaAndProvider(fundingSource, capacityProviderKeys, "CapacityProvider", FUNDS_AMOUNT); + capacityProviderKeys = createKeys('CapacityProviderKeys'); + capacityProvider = await createMsaAndProvider( + fundingSource, + capacityProviderKeys, + 'CapacityProvider', + FUNDS_AMOUNT + ); defaultPayload = { authorizedMsaId: capacityProvider, schemaIds: [schemaId], - } + }; }); - it("Returns `FeeDetails` when requesting capacity cost of a transaction", async function () { + it('Returns `FeeDetails` when requesting capacity cost of a transaction', async function () { const addProviderPayload = await generateDelegationPayload({ ...defaultPayload }); - const addProviderData = ExtrinsicHelper.api.registry.createType("PalletMsaAddProvider", addProviderPayload); - let delegatorKeys = createKeys("delegatorKeys"); + const addProviderData = ExtrinsicHelper.api.registry.createType('PalletMsaAddProvider', addProviderPayload); + const delegatorKeys = createKeys('delegatorKeys'); const call = ExtrinsicHelper.api.tx.msa.createSponsoredAccountWithDelegation( delegatorKeys.publicKey, signPayloadSr25519(delegatorKeys, addProviderData), @@ -767,55 +877,78 @@ describe("Capacity Transactions", function () { ); // Actual weights and fee - const { weight: { refTime, proofSize }, } = await ExtrinsicHelper.apiPromise.call.transactionPaymentApi.queryInfo(call.toHex(), 0); - const weightFee = await ExtrinsicHelper.apiPromise.call.transactionPaymentApi.queryWeightToFee({refTime, proofSize}); + const { + weight: { refTime, proofSize }, + } = await ExtrinsicHelper.apiPromise.call.transactionPaymentApi.queryInfo(call.toHex(), 0); + const weightFee = await ExtrinsicHelper.apiPromise.call.transactionPaymentApi.queryWeightToFee({ + refTime, + proofSize, + }); - const feeDetails: FeeDetails = await ExtrinsicHelper.apiPromise.rpc.frequencyTxPayment.computeCapacityFeeDetails(call.toHex(), null); - assert.notEqual(feeDetails, undefined, "should have returned a feeDetails"); - assert.notEqual(feeDetails.inclusionFee, undefined, "should have returned a partialFee"); - assert(feeDetails.inclusionFee.isSome, "should have returned a partialFee"); + const feeDetails: FeeDetails = await ExtrinsicHelper.apiPromise.rpc.frequencyTxPayment.computeCapacityFeeDetails( + call.toHex(), + null + ); + assert.notEqual(feeDetails, undefined, 'should have returned a feeDetails'); + assert.notEqual(feeDetails.inclusionFee, undefined, 'should have returned a partialFee'); + assert(feeDetails.inclusionFee.isSome, 'should have returned a partialFee'); const { baseFee, lenFee, adjustedWeightFee } = feeDetails.inclusionFee.toJSON() as any; - assert(Math.abs(baseFee - 106382) < 10_000, "The base fee appears to be wrong or have changed more than expected"); - assert(Math.abs(lenFee - 1170000) < 100, "The len fee appears to be wrong or have changed more than expected"); + assert( + Math.abs(baseFee - 106382) < 10_000, + 'The base fee appears to be wrong or have changed more than expected' + ); + assert(Math.abs(lenFee - 1170000) < 100, 'The len fee appears to be wrong or have changed more than expected'); // This is comparing stable weight, which has no impact from targeted_fee_adjustment, with actual weights. - assert(Math.abs(adjustedWeightFee - weightFee.toNumber()) < 10_000, "The adjusted weight fee appears to be wrong or have changed more than expected"); + assert( + Math.abs(adjustedWeightFee - weightFee.toNumber()) < 10_000, + 'The adjusted weight fee appears to be wrong or have changed more than expected' + ); }); - it("Returns `FeeDetails` when requesting capacity cost of a transaction when wrapped in payWithCapacity", async function () { + it('Returns `FeeDetails` when requesting capacity cost of a transaction when wrapped in payWithCapacity', async function () { const addProviderPayload = await generateDelegationPayload({ ...defaultPayload }); - const addProviderData = ExtrinsicHelper.api.registry.createType("PalletMsaAddProvider", addProviderPayload); - let delegatorKeys = createKeys("delegatorKeys"); + const addProviderData = ExtrinsicHelper.api.registry.createType('PalletMsaAddProvider', addProviderPayload); + const delegatorKeys = createKeys('delegatorKeys'); const insideTx = ExtrinsicHelper.api.tx.msa.createSponsoredAccountWithDelegation( delegatorKeys.publicKey, signPayloadSr25519(delegatorKeys, addProviderData), addProviderPayload ); const tx = ExtrinsicHelper.api.tx.frequencyTxPayment.payWithCapacity(insideTx); - const feeDetails: FeeDetails = await ExtrinsicHelper.apiPromise.rpc.frequencyTxPayment.computeCapacityFeeDetails(tx.toHex(), null); - assert.notEqual(feeDetails, undefined, "should have returned a feeDetails"); - assert.notEqual(feeDetails.inclusionFee, undefined, "should have returned a partialFee"); - assert(feeDetails.inclusionFee.isSome, "should have returned a partialFee"); + const feeDetails: FeeDetails = await ExtrinsicHelper.apiPromise.rpc.frequencyTxPayment.computeCapacityFeeDetails( + tx.toHex(), + null + ); + assert.notEqual(feeDetails, undefined, 'should have returned a feeDetails'); + assert.notEqual(feeDetails.inclusionFee, undefined, 'should have returned a partialFee'); + assert(feeDetails.inclusionFee.isSome, 'should have returned a partialFee'); const { baseFee, lenFee, adjustedWeightFee } = feeDetails.inclusionFee.toJSON() as any; // payWithCapacity costs will fluctuate, so just checking that they are valid positive numbers - assert(baseFee > 1, "The base fee appears to be wrong."); - assert(lenFee > 1, "The len fee appears to be wrong."); - assert(adjustedWeightFee > 1, "The adjusted weight fee appears to be wrong"); + assert(baseFee > 1, 'The base fee appears to be wrong.'); + assert(lenFee > 1, 'The len fee appears to be wrong.'); + assert(adjustedWeightFee > 1, 'The adjusted weight fee appears to be wrong'); }); - it("Returns nothing when requesting capacity cost of a non-capacity transaction", async function () { + it('Returns nothing when requesting capacity cost of a non-capacity transaction', async function () { const tx = ExtrinsicHelper.api.tx.msa.retireMsa(); - const feeDetails: FeeDetails = await ExtrinsicHelper.apiPromise.rpc.frequencyTxPayment.computeCapacityFeeDetails(tx.toHex(), null); - assert.notEqual(feeDetails, undefined, "should have returned a feeDetails"); - assert(feeDetails.inclusionFee.isNone, "should have returned something for the inclusionFee"); + const feeDetails: FeeDetails = await ExtrinsicHelper.apiPromise.rpc.frequencyTxPayment.computeCapacityFeeDetails( + tx.toHex(), + null + ); + assert.notEqual(feeDetails, undefined, 'should have returned a feeDetails'); + assert(feeDetails.inclusionFee.isNone, 'should have returned something for the inclusionFee'); }); - it("Returns nothing when requesting pay with capacity call with a non-capacity transaction", async function () { + it('Returns nothing when requesting pay with capacity call with a non-capacity transaction', async function () { const insideTx = ExtrinsicHelper.api.tx.msa.retireMsa(); const tx = ExtrinsicHelper.api.tx.frequencyTxPayment.payWithCapacity(insideTx); - const feeDetails: FeeDetails = await ExtrinsicHelper.apiPromise.rpc.frequencyTxPayment.computeCapacityFeeDetails(tx.toHex(), null); - assert.notEqual(feeDetails, undefined, "should have returned a feeDetails"); - assert(feeDetails.inclusionFee.isNone, "should have returned something for the inclusionFee"); + const feeDetails: FeeDetails = await ExtrinsicHelper.apiPromise.rpc.frequencyTxPayment.computeCapacityFeeDetails( + tx.toHex(), + null + ); + assert.notEqual(feeDetails, undefined, 'should have returned a feeDetails'); + assert(feeDetails.inclusionFee.isNone, 'should have returned something for the inclusionFee'); }); }); }); diff --git a/e2e/handles/handles.test.ts b/e2e/handles/handles.test.ts index 792003e9dd..805d16dc93 100644 --- a/e2e/handles/handles.test.ts +++ b/e2e/handles/handles.test.ts @@ -1,165 +1,169 @@ // Handles test suite -import "@frequency-chain/api-augment"; -import assert from "assert"; -import { createDelegator, getTestHandle } from "../scaffolding/helpers"; -import { KeyringPair } from "@polkadot/keyring/types"; -import { MessageSourceId } from "@frequency-chain/api-augment/interfaces"; -import { ExtrinsicHelper } from "../scaffolding/extrinsicHelpers"; -import { Bytes } from "@polkadot/types"; -import { getBlockNumber } from "../scaffolding/helpers"; -import { hasRelayChain } from "../scaffolding/env"; -import { getFundingSource } from "../scaffolding/funding"; - -describe("🤝 Handles", function () { - const fundingSource = getFundingSource("handles"); - const expirationOffset = hasRelayChain() ? 10 : 100; - - describe("Claim and Retire", function () { - let msa_id: MessageSourceId; - let msaOwnerKeys: KeyringPair; - - before(async function () { - // Create a MSA for the delegator - [msaOwnerKeys, msa_id] = await createDelegator(fundingSource); - assert.notEqual(msaOwnerKeys, undefined, "setup should populate delegator_key"); - assert.notEqual(msa_id, undefined, "setup should populate msa_id"); - }); - - it("should be able to claim a handle", async function () { - const handle = getTestHandle(); - let currentBlock = await getBlockNumber(); - const handle_vec = new Bytes(ExtrinsicHelper.api.registry, handle); - const payload = { - baseHandle: handle_vec, - expiration: currentBlock + expirationOffset, - } - const claimHandlePayload = ExtrinsicHelper.api.registry.createType("CommonPrimitivesHandlesClaimHandlePayload", payload); - const claimHandle = ExtrinsicHelper.claimHandle(msaOwnerKeys, claimHandlePayload); - const { target: event } = await claimHandle.fundAndSend(fundingSource); - assert.notEqual(event, undefined, "claimHandle should return an event"); - assert.notEqual(event!.data.handle.toString(), "", "claimHandle should emit a handle"); - }); - - it("should be able to retire a handle", async function () { - let handle_response = await ExtrinsicHelper.getHandleForMSA(msa_id); - if (!handle_response.isSome) { - throw new Error("handle_response should be Some"); - } - let full_handle_state = handle_response.unwrap(); - let suffix_from_state = full_handle_state.suffix; - let suffix = suffix_from_state.toNumber(); - assert.notEqual(suffix, 0, "suffix should not be 0"); - assert.notEqual(full_handle_state.canonical_base, undefined, "canonical_base should not be undefined"); - assert.notEqual(full_handle_state.base_handle, undefined, "base_handle should not be undefined"); - let currentBlock = await getBlockNumber(); - await ExtrinsicHelper.runToBlock(currentBlock + expirationOffset + 1); // Must be at least 1 > original expiration - - const retireHandle = ExtrinsicHelper.retireHandle(msaOwnerKeys); - const { target: event } = await retireHandle.signAndSend('current'); - assert.notEqual(event, undefined, "retireHandle should return an event"); - const handle = event!.data.handle.toString(); - assert.notEqual(handle, "", "retireHandle should return the correct handle"); - }); +import '@frequency-chain/api-augment'; +import assert from 'assert'; +import { createDelegator, getTestHandle } from '../scaffolding/helpers'; +import { KeyringPair } from '@polkadot/keyring/types'; +import { MessageSourceId } from '@frequency-chain/api-augment/interfaces'; +import { ExtrinsicHelper } from '../scaffolding/extrinsicHelpers'; +import { Bytes } from '@polkadot/types'; +import { getBlockNumber } from '../scaffolding/helpers'; +import { hasRelayChain } from '../scaffolding/env'; +import { getFundingSource } from '../scaffolding/funding'; + +const fundingSource = getFundingSource('handles'); +const expirationOffset = hasRelayChain() ? 10 : 100; + +describe('🤝 Handles', function () { + describe('Claim and Retire', function () { + let msa_id: MessageSourceId; + let msaOwnerKeys: KeyringPair; + + before(async function () { + // Create a MSA for the delegator + [msaOwnerKeys, msa_id] = await createDelegator(fundingSource); + assert.notEqual(msaOwnerKeys, undefined, 'setup should populate delegator_key'); + assert.notEqual(msa_id, undefined, 'setup should populate msa_id'); }); - describe("Claim and Retire Alt", function () { - let msa_id: MessageSourceId; - let msaOwnerKeys: KeyringPair; - - before(async function () { - // Create a MSA for the delegator - [msaOwnerKeys, msa_id] = await createDelegator(fundingSource); - assert.notEqual(msaOwnerKeys, undefined, "setup should populate delegator_key"); - assert.notEqual(msa_id, undefined, "setup should populate msa_id"); - }); - - describe("Claim Handle with possible presumptive suffix/RPC test", function () { - - it("should be able to claim a handle and check suffix (=suffix_assumed if available on chain)", async function () { - const handle = getTestHandle(); - let handle_bytes = new Bytes(ExtrinsicHelper.api.registry, handle); - /// Get presumptive suffix from chain (rpc) - let suffixes_response = await ExtrinsicHelper.getNextSuffixesForHandle(handle, 10); - let resp_base_handle = suffixes_response.base_handle.toString(); - assert.equal(resp_base_handle, handle, "resp_base_handle should be equal to handle"); - let suffix_assumed = suffixes_response.suffixes[0]; - assert.notEqual(suffix_assumed, 0, "suffix_assumed should not be 0"); - - let currentBlock = await getBlockNumber(); - /// Claim handle (extrinsic) - const payload_ext = { - baseHandle: handle_bytes, - expiration: currentBlock + expirationOffset, - }; - const claimHandlePayload = ExtrinsicHelper.api.registry.createType("CommonPrimitivesHandlesClaimHandlePayload", payload_ext); - const claimHandle = ExtrinsicHelper.claimHandle(msaOwnerKeys, claimHandlePayload); - const { target: event } = await claimHandle.fundAndSend(fundingSource); - assert.notEqual(event, undefined, "claimHandle should return an event"); - const displayHandle = event!.data.handle.toUtf8(); - assert.notEqual(displayHandle, "", "claimHandle should emit a handle"); - - // get handle using msa (rpc) - let handle_response = await ExtrinsicHelper.getHandleForMSA(msa_id); - if (!handle_response.isSome) { - throw new Error("handle_response should be Some"); - } - let full_handle_state = handle_response.unwrap(); - let suffix_from_state = full_handle_state.suffix; - let suffix = suffix_from_state.toNumber(); - assert.notEqual(suffix, 0, "suffix should not be 0"); - assert.equal(suffix, suffix_assumed, "suffix should be equal to suffix_assumed"); - - /// Get MSA from full display handle (rpc) - const msaOption = await ExtrinsicHelper.getMsaForHandle(displayHandle); - assert(msaOption.isSome, "msaOption should be Some"); - const msaFromHandle = msaOption.unwrap(); - assert.equal(msaFromHandle.toString(), msa_id.toString(), "msaFromHandle should be equal to msa_id"); - }); - }); - - describe("👇 Negative Test: Early retire handle", function () { - it("should not be able to retire a handle before expiration", async function () { - let handle_response = await ExtrinsicHelper.getHandleForMSA(msa_id); - if (!handle_response.isSome) { - throw new Error("handle_response should be Some"); - } - - let full_handle_state = handle_response.unwrap(); - let suffix_from_state = full_handle_state.suffix; - let suffix = suffix_from_state.toNumber(); - assert.notEqual(suffix, 0, "suffix should not be 0"); - - let currentBlock = await getBlockNumber(); - await ExtrinsicHelper.runToBlock(currentBlock + expirationOffset + 1); - try { - const retireHandle = ExtrinsicHelper.retireHandle(msaOwnerKeys); - const { target: event } = await retireHandle.fundAndSend(fundingSource); - assert.equal(event, undefined, "retireHandle should not return an event"); - } - catch (e) { - assert.notEqual(e, undefined, "retireHandle should throw an error"); - } - }); - }); + it('should be able to claim a handle', async function () { + const handle = getTestHandle(); + const currentBlock = await getBlockNumber(); + const handle_vec = new Bytes(ExtrinsicHelper.api.registry, handle); + const payload = { + baseHandle: handle_vec, + expiration: currentBlock + expirationOffset, + }; + const claimHandlePayload = ExtrinsicHelper.api.registry.createType( + 'CommonPrimitivesHandlesClaimHandlePayload', + payload + ); + const claimHandle = ExtrinsicHelper.claimHandle(msaOwnerKeys, claimHandlePayload); + const { target: event } = await claimHandle.fundAndSend(fundingSource); + assert.notEqual(event, undefined, 'claimHandle should return an event'); + assert.notEqual(event!.data.handle.toString(), '', 'claimHandle should emit a handle'); }); - describe("Suffixes Integrity Check", function () { - it("should return same suffixes for `abcdefg` from chain as hardcoded", async function () { - let suffixes = await ExtrinsicHelper.getNextSuffixesForHandle("abcdefg", 10); - let suffixes_expected = [23, 65, 16, 53, 25, 75, 29, 26, 10, 87]; - let resp_suffixes_number = suffixes.suffixes.map((x) => x.toNumber()); - assert.deepEqual(resp_suffixes_number, suffixes_expected, "suffixes should be equal to suffixes_expected"); - }); + it('should be able to retire a handle', async function () { + const handle_response = await ExtrinsicHelper.getHandleForMSA(msa_id); + if (!handle_response.isSome) { + throw new Error('handle_response should be Some'); + } + const full_handle_state = handle_response.unwrap(); + const suffix_from_state = full_handle_state.suffix; + const suffix = suffix_from_state.toNumber(); + assert.notEqual(suffix, 0, 'suffix should not be 0'); + assert.notEqual(full_handle_state.canonical_base, undefined, 'canonical_base should not be undefined'); + assert.notEqual(full_handle_state.base_handle, undefined, 'base_handle should not be undefined'); + const currentBlock = await getBlockNumber(); + await ExtrinsicHelper.runToBlock(currentBlock + expirationOffset + 1); // Must be at least 1 > original expiration + + const retireHandle = ExtrinsicHelper.retireHandle(msaOwnerKeys); + const { target: event } = await retireHandle.signAndSend('current'); + assert.notEqual(event, undefined, 'retireHandle should return an event'); + const handle = event!.data.handle.toString(); + assert.notEqual(handle, '', 'retireHandle should return the correct handle'); }); + }); - describe("validateHandle basic test", function () { - it('returns true for good handle, and false for bad handle', async function () { - let res = await ExtrinsicHelper.validateHandle("Robert`DROP TABLE STUDENTS;--"); - assert.equal(res.toHuman(), false); - res = await ExtrinsicHelper.validateHandle("Little Bobby Tables") - assert.equal(res.toHuman(), true); - res = await ExtrinsicHelper.validateHandle("Bobbay😀😀") - assert.equal(res.toHuman(), false); - }); - }) + describe('Claim and Retire Alt', function () { + let msa_id: MessageSourceId; + let msaOwnerKeys: KeyringPair; + + before(async function () { + // Create a MSA for the delegator + [msaOwnerKeys, msa_id] = await createDelegator(fundingSource); + assert.notEqual(msaOwnerKeys, undefined, 'setup should populate delegator_key'); + assert.notEqual(msa_id, undefined, 'setup should populate msa_id'); + }); + + describe('Claim Handle with possible presumptive suffix/RPC test', function () { + it('should be able to claim a handle and check suffix (=suffix_assumed if available on chain)', async function () { + const handle = getTestHandle(); + const handle_bytes = new Bytes(ExtrinsicHelper.api.registry, handle); + /// Get presumptive suffix from chain (rpc) + const suffixes_response = await ExtrinsicHelper.getNextSuffixesForHandle(handle, 10); + const resp_base_handle = suffixes_response.base_handle.toString(); + assert.equal(resp_base_handle, handle, 'resp_base_handle should be equal to handle'); + const suffix_assumed = suffixes_response.suffixes[0]; + assert.notEqual(suffix_assumed, 0, 'suffix_assumed should not be 0'); + + const currentBlock = await getBlockNumber(); + /// Claim handle (extrinsic) + const payload_ext = { + baseHandle: handle_bytes, + expiration: currentBlock + expirationOffset, + }; + const claimHandlePayload = ExtrinsicHelper.api.registry.createType( + 'CommonPrimitivesHandlesClaimHandlePayload', + payload_ext + ); + const claimHandle = ExtrinsicHelper.claimHandle(msaOwnerKeys, claimHandlePayload); + const { target: event } = await claimHandle.fundAndSend(fundingSource); + assert.notEqual(event, undefined, 'claimHandle should return an event'); + const displayHandle = event!.data.handle.toUtf8(); + assert.notEqual(displayHandle, '', 'claimHandle should emit a handle'); + + // get handle using msa (rpc) + const handle_response = await ExtrinsicHelper.getHandleForMSA(msa_id); + if (!handle_response.isSome) { + throw new Error('handle_response should be Some'); + } + const full_handle_state = handle_response.unwrap(); + const suffix_from_state = full_handle_state.suffix; + const suffix = suffix_from_state.toNumber(); + assert.notEqual(suffix, 0, 'suffix should not be 0'); + assert.equal(suffix, suffix_assumed, 'suffix should be equal to suffix_assumed'); + + /// Get MSA from full display handle (rpc) + const msaOption = await ExtrinsicHelper.getMsaForHandle(displayHandle); + assert(msaOption.isSome, 'msaOption should be Some'); + const msaFromHandle = msaOption.unwrap(); + assert.equal(msaFromHandle.toString(), msa_id.toString(), 'msaFromHandle should be equal to msa_id'); + }); + }); + + describe('👇 Negative Test: Early retire handle', function () { + it('should not be able to retire a handle before expiration', async function () { + const handle_response = await ExtrinsicHelper.getHandleForMSA(msa_id); + if (!handle_response.isSome) { + throw new Error('handle_response should be Some'); + } + + const full_handle_state = handle_response.unwrap(); + const suffix_from_state = full_handle_state.suffix; + const suffix = suffix_from_state.toNumber(); + assert.notEqual(suffix, 0, 'suffix should not be 0'); + + const currentBlock = await getBlockNumber(); + await ExtrinsicHelper.runToBlock(currentBlock + expirationOffset + 1); + try { + const retireHandle = ExtrinsicHelper.retireHandle(msaOwnerKeys); + const { target: event } = await retireHandle.fundAndSend(fundingSource); + assert.equal(event, undefined, 'retireHandle should not return an event'); + } catch (e) { + assert.notEqual(e, undefined, 'retireHandle should throw an error'); + } + }); + }); + }); + + describe('Suffixes Integrity Check', function () { + it('should return same suffixes for `abcdefg` from chain as hardcoded', async function () { + const suffixes = await ExtrinsicHelper.getNextSuffixesForHandle('abcdefg', 10); + const suffixes_expected = [23, 65, 16, 53, 25, 75, 29, 26, 10, 87]; + const resp_suffixes_number = suffixes.suffixes.map((x) => x.toNumber()); + assert.deepEqual(resp_suffixes_number, suffixes_expected, 'suffixes should be equal to suffixes_expected'); + }); + }); + + describe('validateHandle basic test', function () { + it('returns true for good handle, and false for bad handle', async function () { + let res = await ExtrinsicHelper.validateHandle('Robert`DROP TABLE STUDENTS;--'); + assert.equal(res.toHuman(), false); + res = await ExtrinsicHelper.validateHandle('Little Bobby Tables'); + assert.equal(res.toHuman(), true); + res = await ExtrinsicHelper.validateHandle('Bobbay😀😀'); + assert.equal(res.toHuman(), false); + }); + }); }); diff --git a/e2e/load-tests/signatureRegistry.test.ts b/e2e/load-tests/signatureRegistry.test.ts index 496a82a16e..a7faddab72 100644 --- a/e2e/load-tests/signatureRegistry.test.ts +++ b/e2e/load-tests/signatureRegistry.test.ts @@ -1,158 +1,163 @@ -import "@frequency-chain/api-augment"; -import assert from "assert"; -import { createKeys, signPayloadSr25519, getBlockNumber, generateAddKeyPayload, createAndFundKeypair, getNonce, getExistentialDeposit } from "../scaffolding/helpers"; -import { KeyringPair } from "@polkadot/keyring/types"; -import { AddKeyData, ExtrinsicHelper } from "../scaffolding/extrinsicHelpers"; -import { u64, Option } from "@polkadot/types"; -import { getFundingSource } from "../scaffolding/funding"; +import '@frequency-chain/api-augment'; +import assert from 'assert'; +import { + createKeys, + signPayloadSr25519, + getBlockNumber, + generateAddKeyPayload, + createAndFundKeypair, + getNonce, + getExistentialDeposit, +} from '../scaffolding/helpers'; +import { KeyringPair } from '@polkadot/keyring/types'; +import { AddKeyData, ExtrinsicHelper } from '../scaffolding/extrinsicHelpers'; +import { u64, Option } from '@polkadot/types'; +import { getFundingSource } from '../scaffolding/funding'; interface GeneratedMsa { - id: u64; - nonce: number; - controlKey: KeyringPair; + id: u64; + nonce: number; + controlKey: KeyringPair; } -describe("MSA Initializer Load Tests", function () { - const fundingSource = getFundingSource("load-signature-registry"); - - beforeEach(async function () { - await createBlock(); - }); - - it("should successfully create 50k signatures within 100 blocks", async function () { - - const msaKeys: GeneratedMsa[] = await generateMsas(2300); +describe('MSA Initializer Load Tests', function () { + beforeEach(async function () { + await createBlock(); + }); - let blockNumber = await getBlockNumber(); - console.log("Starting Block Number:", blockNumber); + it('should successfully create 50k signatures within 100 blocks', async function () { + const msaKeys: GeneratedMsa[] = await generateMsas(2300); - let signatureCallTests: Array = []; + let blockNumber = await getBlockNumber(); + console.log('Starting Block Number:', blockNumber); - // Make 50k signatures via 50k/2 loops - const loopCount = 25_000; - for (let i = 0; i < loopCount; i++) { - const ithMsaKey = i % msaKeys.length; - let { controlKey, nonce, id } = msaKeys[ithMsaKey]; + let signatureCallTests: Array = []; - // This call adds 2 signatures - signatureCallTests.push(await addSigs(id, controlKey, blockNumber, nonce)); - console.count("addSigs called (2 signatures)"); - msaKeys[ithMsaKey].nonce++; + // Make 50k signatures via 50k/2 loops + const loopCount = 25_000; + for (let i = 0; i < loopCount; i++) { + const ithMsaKey = i % msaKeys.length; + const { controlKey, nonce, id } = msaKeys[ithMsaKey]; - // Create block and check... - if (i > 0 && i % 330 === 0) { - await createBlock(200); - blockNumber++; - console.log(`Forming block...`); - await checkKeys(i - 330, [...signatureCallTests]); - signatureCallTests = []; - } + // This call adds 2 signatures + signatureCallTests.push(await addSigs(id, controlKey, blockNumber, nonce)); + console.count('addSigs called (2 signatures)'); + msaKeys[ithMsaKey].nonce++; - } + // Create block and check... + if (i > 0 && i % 330 === 0) { + await createBlock(200); + blockNumber++; console.log(`Forming block...`); - await createBlock(); - - // Final Check - await checkKeys(loopCount - (loopCount % 330), signatureCallTests); + await checkKeys(i - 330, [...signatureCallTests]); + signatureCallTests = []; + } + } + console.log(`Forming block...`); + await createBlock(); - const blockNumberEnd = await getBlockNumber(); - console.log("Ending Block Number:", blockNumberEnd); + // Final Check + await checkKeys(loopCount - (loopCount % 330), signatureCallTests); - }); -}) + const blockNumberEnd = await getBlockNumber(); + console.log('Ending Block Number:', blockNumberEnd); + }); +}); // Checks the keys to make sure they were associated with an MSA // Also will form a block in case something was still inside the transaction queue async function checkKeys(startingNumber: number, keysToTest: KeyringPair[]) { - console.log(`Testing MSA Key connections for ${keysToTest.length} keys...`); - for (let i = 0; i < keysToTest.length; i++) { - const key = keysToTest[i]; - let msaOption = await getMsaFromKey(key); - if (!msaOption.isSome) { - console.log(`The ${startingNumber + i} key (${key.address}) failed to be associated with an MSA. Trying another block...`); - await createBlock(); - msaOption = await getMsaFromKey(key); - - } - assert(msaOption.isSome, `The ${startingNumber + i} key (${key.address}) failed to be associated with an MSA.`); + console.log(`Testing MSA Key connections for ${keysToTest.length} keys...`); + for (let i = 0; i < keysToTest.length; i++) { + const key = keysToTest[i]; + let msaOption = await getMsaFromKey(key); + if (!msaOption.isSome) { + console.log( + `The ${startingNumber + i} key (${key.address}) failed to be associated with an MSA. Trying another block...` + ); + await createBlock(); + msaOption = await getMsaFromKey(key); } + assert(msaOption.isSome, `The ${startingNumber + i} key (${key.address}) failed to be associated with an MSA.`); + } } // Generate MSAs and give it 100 UNIT async function generateMsas(count: number = 1): Promise { - // Make sure we are not on an edge - const createBlockEvery = count === 300 ? 290 : 300; - const fundingSource = getFundingSource("load-signature-registry"); - - // Create and fund the control keys - let controlKeyPromises: Array> = []; - let devAccountNonce = await getNonce(fundingSource); - const ed = await getExistentialDeposit(); - for (let i = 0; i < count; i++) { - controlKeyPromises.push(createAndFundKeypair(fundingSource, 100n * 10n * ed, undefined, devAccountNonce++)); - if (i > 0 && i % createBlockEvery === 0) await createBlock(100); + // Make sure we are not on an edge + const createBlockEvery = count === 300 ? 290 : 300; + const fundingSource = getFundingSource('load-signature-registry'); + + // Create and fund the control keys + const controlKeyPromises: Array> = []; + let devAccountNonce = await getNonce(fundingSource); + const ed = await getExistentialDeposit(); + for (let i = 0; i < count; i++) { + controlKeyPromises.push(createAndFundKeypair(fundingSource, 100n * 10n * ed, undefined, devAccountNonce++)); + if (i > 0 && i % createBlockEvery === 0) await createBlock(100); + } + await createBlock(); + const controlKeys = await Promise.all(controlKeyPromises); + + // Create the msas + const msaPromises: Array> = []; + for (let i = 0; i < count; i++) { + msaPromises.push( + createMsa(controlKeys[i]).then((id) => ({ + controlKey: controlKeys[i], + id, + nonce: 1, + })) + ); + if (i > 0 && i % createBlockEvery === 0) { + await createBlock(200); + console.log('Generated Msas: ', i); } - await createBlock(); - const controlKeys = await Promise.all(controlKeyPromises); - - // Create the msas - let msaPromises: Array> = []; - for (let i = 0; i < count; i++) { - msaPromises.push(createMsa(controlKeys[i]).then((id) => ({ - controlKey: controlKeys[i], - id, - nonce: 1, - })) - ); - if (i > 0 && i % createBlockEvery === 0) { - await createBlock(200); - console.log("Generated Msas: ", i); - } - } - await createBlock(750); - // Create a second block in case there were more than could fit in ^ blocks - await createBlock(); - const msaIds = await Promise.all(msaPromises) - console.log("Generated Msas: ", count); - return msaIds; + } + await createBlock(750); + // Create a second block in case there were more than could fit in ^ blocks + await createBlock(); + const msaIds = await Promise.all(msaPromises); + console.log('Generated Msas: ', count); + return msaIds; } async function createBlock(wait: number = 300) { - // Wait ms before creating the block to give the chain time to process the transaction pool - await new Promise(r => setTimeout(r, wait)); - return ExtrinsicHelper.apiPromise.rpc.engine.createBlock(true, true); + // Wait ms before creating the block to give the chain time to process the transaction pool + await new Promise((r) => setTimeout(r, wait)); + return ExtrinsicHelper.apiPromise.rpc.engine.createBlock(true, true); } function getMsaFromKey(keys: KeyringPair): Promise> { - return ExtrinsicHelper.apiPromise.query.msa.publicKeyToMsaId(keys.address); + return ExtrinsicHelper.apiPromise.query.msa.publicKeyToMsaId(keys.address); } async function createMsa(keys: KeyringPair): Promise { - const result = await ExtrinsicHelper.createMsa(keys).signAndSend(); - const msaRecord = result[1]["msa.MsaCreated"]; - if (msaRecord) return msaRecord.data[0] as u64; + const result = await ExtrinsicHelper.createMsa(keys).signAndSend(); + const msaRecord = result[1]['msa.MsaCreated']; + if (msaRecord) return msaRecord.data[0] as u64; - // This doesn't always work likely due to the load causing the ws subscription to be dropped. - // So doing a backup call to help - const tryDirect = await getMsaFromKey(keys); - if (tryDirect.isSome) return tryDirect.value; + // This doesn't always work likely due to the load causing the ws subscription to be dropped. + // So doing a backup call to help + const tryDirect = await getMsaFromKey(keys); + if (tryDirect.isSome) return tryDirect.value; - throw("Failed to get MSA Id..."); + throw 'Failed to get MSA Id...'; } async function addSigs(msaId: u64, keys: KeyringPair, blockNumber: number, nonce: number): Promise { - let newKeys = createKeys(`${nonce} nonce control key`); + const newKeys = createKeys(`${nonce} nonce control key`); - let defaultPayload: AddKeyData = {}; - defaultPayload.msaId = msaId; - defaultPayload.newPublicKey = newKeys.publicKey; - const payload = await generateAddKeyPayload(defaultPayload, 100, blockNumber); + const defaultPayload: AddKeyData = {}; + defaultPayload.msaId = msaId; + defaultPayload.newPublicKey = newKeys.publicKey; + const payload = await generateAddKeyPayload(defaultPayload, 100, blockNumber); - let addKeyData = ExtrinsicHelper.api.registry.createType("PalletMsaAddKeyData", payload); + const addKeyData = ExtrinsicHelper.api.registry.createType('PalletMsaAddKeyData', payload); - let ownerSig = signPayloadSr25519(keys, addKeyData); - let newSig = signPayloadSr25519(newKeys, addKeyData); - ExtrinsicHelper.addPublicKeyToMsa(keys, ownerSig, newSig, payload).signAndSend(nonce); + const ownerSig = signPayloadSr25519(keys, addKeyData); + const newSig = signPayloadSr25519(newKeys, addKeyData); + ExtrinsicHelper.addPublicKeyToMsa(keys, ownerSig, newSig, payload).signAndSend(nonce); - return newKeys; + return newKeys; } diff --git a/e2e/messages/addIPFSMessage.test.ts b/e2e/messages/addIPFSMessage.test.ts index 08b66c4f9a..02ebf64482 100644 --- a/e2e/messages/addIPFSMessage.test.ts +++ b/e2e/messages/addIPFSMessage.test.ts @@ -1,131 +1,146 @@ -import "@frequency-chain/api-augment"; -import { KeyringPair } from "@polkadot/keyring/types"; +import '@frequency-chain/api-augment'; +import { KeyringPair } from '@polkadot/keyring/types'; import { base64 } from 'multiformats/bases/base64'; import { base32 } from 'multiformats/bases/base32'; -import { CID } from 'multiformats/cid' -import { PARQUET_BROADCAST } from "../schemas/fixtures/parquetBroadcastSchemaType"; -import assert from "assert"; -import { assertHasMessage, createAndFundKeypair } from "../scaffolding/helpers"; -import { ExtrinsicHelper } from "../scaffolding/extrinsicHelpers"; -import { u16 } from "@polkadot/types"; -import { MessageResponse } from "@frequency-chain/api-augment/interfaces"; -import { ipfsCid } from "./ipfs"; -import { getFundingSource } from "../scaffolding/funding"; - -describe("Add Offchain Message", function () { - const fundingSource = getFundingSource("messages-add-ipfs"); - - let keys: KeyringPair; - let schemaId: u16; - let dummySchemaId: u16; - - let ipfs_cid_64: string; - let ipfs_cid_32: string; - const ipfs_payload_data = "This is a test of Frequency."; - const ipfs_payload_len = ipfs_payload_data.length + 1; - let starting_block: number; - - before(async function () { - starting_block = (await ExtrinsicHelper.apiPromise.rpc.chain.getHeader()).number.toNumber(); - - const cid = await ipfsCid(ipfs_payload_data, './e2e_test.txt'); - ipfs_cid_64 = cid.toString(base64); - ipfs_cid_32 = cid.toString(base32); - - keys = await createAndFundKeypair(fundingSource); - - // Create a new MSA - const createMsa = ExtrinsicHelper.createMsa(keys); - await createMsa.fundAndSend(fundingSource); - - // Create a schema for IPFS - const createSchema = ExtrinsicHelper.createSchema(keys, PARQUET_BROADCAST, "Parquet", "IPFS"); - const { target: event } = await createSchema.fundAndSend(fundingSource); - schemaId = event!.data.schemaId; - - // Create a dummy on-chain schema - const createDummySchema = ExtrinsicHelper.createSchema(keys, { type: "record", name: "Dummy on-chain schema", fields: [] }, "AvroBinary", "OnChain"); - const { target: dummySchemaEvent } = await createDummySchema.fundAndSend(fundingSource); - dummySchemaId = dummySchemaEvent!.data.schemaId; +import { CID } from 'multiformats/cid'; +import { PARQUET_BROADCAST } from '../schemas/fixtures/parquetBroadcastSchemaType'; +import assert from 'assert'; +import { assertHasMessage, createAndFundKeypair } from '../scaffolding/helpers'; +import { ExtrinsicHelper } from '../scaffolding/extrinsicHelpers'; +import { u16 } from '@polkadot/types'; +import { ipfsCid } from './ipfs'; +import { getFundingSource } from '../scaffolding/funding'; + +const fundingSource = getFundingSource('messages-add-ipfs'); +const ipfs_payload_data = 'This is a test of Frequency.'; +const ipfs_payload_len = ipfs_payload_data.length + 1; + +describe('Add Offchain Message', function () { + let keys: KeyringPair; + let schemaId: u16; + let dummySchemaId: u16; + + let ipfs_cid_64: string; + let ipfs_cid_32: string; + let starting_block: number; + + before(async function () { + starting_block = (await ExtrinsicHelper.apiPromise.rpc.chain.getHeader()).number.toNumber(); + + const cid = await ipfsCid(ipfs_payload_data, './e2e_test.txt'); + ipfs_cid_64 = cid.toString(base64); + ipfs_cid_32 = cid.toString(base32); + + keys = await createAndFundKeypair(fundingSource); + + // Create a new MSA + const createMsa = ExtrinsicHelper.createMsa(keys); + await createMsa.fundAndSend(fundingSource); + + // Create a schema for IPFS + const createSchema = ExtrinsicHelper.createSchema(keys, PARQUET_BROADCAST, 'Parquet', 'IPFS'); + const { target: event } = await createSchema.fundAndSend(fundingSource); + schemaId = event!.data.schemaId; + + // Create a dummy on-chain schema + const createDummySchema = ExtrinsicHelper.createSchema( + keys, + { type: 'record', name: 'Dummy on-chain schema', fields: [] }, + 'AvroBinary', + 'OnChain' + ); + const { target: dummySchemaEvent } = await createDummySchema.fundAndSend(fundingSource); + dummySchemaId = dummySchemaEvent!.data.schemaId; + }); + + it('should fail if insufficient funds', async function () { + await assert.rejects( + ExtrinsicHelper.addIPFSMessage(keys, schemaId, ipfs_cid_64, ipfs_payload_len).signAndSend('current'), + { + name: 'RpcError', + message: /Inability to pay some fees/, + } + ); + }); + + it('should fail if MSA is not valid (InvalidMessageSourceAccount)', async function () { + const accountWithNoMsa = await createAndFundKeypair(fundingSource); + await assert.rejects( + ExtrinsicHelper.addIPFSMessage(accountWithNoMsa, schemaId, ipfs_cid_64, ipfs_payload_len).fundAndSend( + fundingSource + ), + { + name: 'InvalidMessageSourceAccount', + section: 'messages', + } + ); + }); + + it('should fail if schema does not exist (InvalidSchemaId)', async function () { + // Pick an arbitrarily high schemaId, such that it won't exist on the test chain. + // If we ever create more than 999 schemas in a test suite/single Frequency instance, this test will fail. + const f = ExtrinsicHelper.addIPFSMessage(keys, 999, ipfs_cid_64, ipfs_payload_len); + await assert.rejects(f.fundAndSend(fundingSource), { + name: 'InvalidSchemaId', + section: 'messages', }); - - it('should fail if insufficient funds', async function () { - await assert.rejects(ExtrinsicHelper.addIPFSMessage(keys, schemaId, ipfs_cid_64, ipfs_payload_len).signAndSend('current'), { - name: 'RpcError', - message: /Inability to pay some fees/, - }); - }); - - it('should fail if MSA is not valid (InvalidMessageSourceAccount)', async function () { - const accountWithNoMsa = await createAndFundKeypair(fundingSource); - await assert.rejects(ExtrinsicHelper.addIPFSMessage(accountWithNoMsa, schemaId, ipfs_cid_64, ipfs_payload_len).fundAndSend(fundingSource), { - name: 'InvalidMessageSourceAccount', - section: 'messages', - }); - }); - - it('should fail if schema does not exist (InvalidSchemaId)', async function () { - // Pick an arbitrarily high schemaId, such that it won't exist on the test chain. - // If we ever create more than 999 schemas in a test suite/single Frequency instance, this test will fail. - const f = ExtrinsicHelper.addIPFSMessage(keys, 999, ipfs_cid_64, ipfs_payload_len); - await assert.rejects(f.fundAndSend(fundingSource), { - name: 'InvalidSchemaId', - section: 'messages', - }); - }); - - it("should fail if schema payload location is not IPFS (InvalidPayloadLocation)", async function () { - const op = ExtrinsicHelper.addIPFSMessage(keys, dummySchemaId, ipfs_cid_64, ipfs_payload_len); - await assert.rejects(op.fundAndSend(fundingSource), { name: "InvalidPayloadLocation" }); + }); + + it('should fail if schema payload location is not IPFS (InvalidPayloadLocation)', async function () { + const op = ExtrinsicHelper.addIPFSMessage(keys, dummySchemaId, ipfs_cid_64, ipfs_payload_len); + await assert.rejects(op.fundAndSend(fundingSource), { name: 'InvalidPayloadLocation' }); + }); + + it('should fail if CID cannot be decoded (InvalidCid)', async function () { + const f = ExtrinsicHelper.addIPFSMessage(keys, schemaId, 'foo', ipfs_payload_len); + await assert.rejects(f.fundAndSend(fundingSource), { name: 'InvalidCid' }); + }); + + it('should fail if CID is CIDv0 (UnsupportedCidVersion)', async function () { + const cid = await ipfsCid(ipfs_payload_data, './e2e_test.txt'); + const cidV0 = CID.createV0(cid.multihash as any).toString(); + const f = ExtrinsicHelper.addIPFSMessage(keys, schemaId, cidV0, ipfs_payload_len); + await assert.rejects(f.fundAndSend(fundingSource), { name: 'UnsupportedCidVersion' }); + }); + + it('should successfully add an IPFS message', async function () { + const f = ExtrinsicHelper.addIPFSMessage(keys, schemaId, ipfs_cid_64, ipfs_payload_len); + const { target: event } = await f.fundAndSend(fundingSource); + + assert.notEqual(event, undefined, 'should have returned a MessagesStored event'); + assert.deepEqual(event?.data.schemaId, schemaId, 'schema ids should be equal'); + assert.notEqual(event?.data.blockNumber, undefined, 'should have a block number'); + }); + + it('should successfully retrieve added message and returned CID should have Base32 encoding', async function () { + const f = await ExtrinsicHelper.apiPromise.rpc.messages.getBySchemaId(schemaId, { + from_block: starting_block, + from_index: 0, + to_block: starting_block + 999, + page_size: 999, }); - - it("should fail if CID cannot be decoded (InvalidCid)", async function () { - const f = ExtrinsicHelper.addIPFSMessage(keys, schemaId, "foo", ipfs_payload_len); - await assert.rejects(f.fundAndSend(fundingSource), { name: "InvalidCid" }); - }); - - it("should fail if CID is CIDv0 (UnsupportedCidVersion)", async function () { - const cid = await ipfsCid(ipfs_payload_data, './e2e_test.txt'); - const cidV0 = CID.createV0(cid.multihash as any).toString(); - const f = ExtrinsicHelper.addIPFSMessage(keys, schemaId, cidV0, ipfs_payload_len); - await assert.rejects(f.fundAndSend(fundingSource), { name: "UnsupportedCidVersion" }); + assertHasMessage(f, (x) => { + const cid = x.cid.isSome && Buffer.from(x.cid.unwrap()).toString(); + return cid === ipfs_cid_32; }); - - it("should successfully add an IPFS message", async function () { - const f = ExtrinsicHelper.addIPFSMessage(keys, schemaId, ipfs_cid_64, ipfs_payload_len); - const { target: event } = await f.fundAndSend(fundingSource); - - assert.notEqual(event, undefined, "should have returned a MessagesStored event"); - assert.deepEqual(event?.data.schemaId, schemaId, 'schema ids should be equal'); - assert.notEqual(event?.data.blockNumber, undefined, 'should have a block number'); - }); - - it("should successfully retrieve added message and returned CID should have Base32 encoding", async function () { - const f = await ExtrinsicHelper.apiPromise.rpc.messages.getBySchemaId(schemaId, { from_block: starting_block, from_index: 0, to_block: starting_block + 999, page_size: 999 }); - assertHasMessage(f, x => { - const cid = x.cid.isSome && Buffer.from(x.cid.unwrap()).toString(); - return cid === ipfs_cid_32; - }); - }) - - describe("Add OnChain Message and successfully retrieve it", function () { - it("should successfully add and retrieve an onchain message", async function () { - const f = ExtrinsicHelper.addOnChainMessage(keys, dummySchemaId, "0xdeadbeef"); - const { target: event } = await f.fundAndSend(fundingSource); - - assert.notEqual(event, undefined, "should have returned a MessagesStored event"); - assert.deepEqual(event?.data.schemaId, dummySchemaId, 'schema ids should be equal'); - assert.notEqual(event?.data.blockNumber, undefined, 'should have a block number'); - - const get = await ExtrinsicHelper.apiPromise.rpc.messages.getBySchemaId( - dummySchemaId, - { from_block: starting_block, - from_index: 0, - to_block: starting_block + 999, - page_size: 999 - } - ); - assertHasMessage(get, x => x.payload.isSome && x.payload.toString() === "0xdeadbeef"); - }); + }); + + describe('Add OnChain Message and successfully retrieve it', function () { + it('should successfully add and retrieve an onchain message', async function () { + const f = ExtrinsicHelper.addOnChainMessage(keys, dummySchemaId, '0xdeadbeef'); + const { target: event } = await f.fundAndSend(fundingSource); + + assert.notEqual(event, undefined, 'should have returned a MessagesStored event'); + assert.deepEqual(event?.data.schemaId, dummySchemaId, 'schema ids should be equal'); + assert.notEqual(event?.data.blockNumber, undefined, 'should have a block number'); + + const get = await ExtrinsicHelper.apiPromise.rpc.messages.getBySchemaId(dummySchemaId, { + from_block: starting_block, + from_index: 0, + to_block: starting_block + 999, + page_size: 999, + }); + assertHasMessage(get, (x) => x.payload.isSome && x.payload.toString() === '0xdeadbeef'); }); + }); }); diff --git a/e2e/messages/ipfs.ts b/e2e/messages/ipfs.ts index 3bd3b9687b..37a3f46fc4 100644 --- a/e2e/messages/ipfs.ts +++ b/e2e/messages/ipfs.ts @@ -14,7 +14,7 @@ export async function ipfsCid(content: string, path: string) { // add the bytes to your node and receive a unique content identifier const cid = await fs.addFile({ path, - content: encoder.encode(content) + content: encoder.encode(content), }); return cid; diff --git a/e2e/miscellaneous/utilityBatch.test.ts b/e2e/miscellaneous/utilityBatch.test.ts index d61a6c9e98..b4f06e8f14 100644 --- a/e2e/miscellaneous/utilityBatch.test.ts +++ b/e2e/miscellaneous/utilityBatch.test.ts @@ -1,138 +1,141 @@ -import assert from "assert"; -import { KeyringPair } from "@polkadot/keyring/types"; -import { ExtrinsicHelper } from "../scaffolding/extrinsicHelpers"; -import { DOLLARS, createAndFundKeypair } from "../scaffolding/helpers"; -import { ApiTypes, SubmittableExtrinsic } from "@polkadot/api/types"; -import { getFundingSource } from "../scaffolding/funding"; +import assert from 'assert'; +import { KeyringPair } from '@polkadot/keyring/types'; +import { ExtrinsicHelper } from '../scaffolding/extrinsicHelpers'; +import { DOLLARS, createAndFundKeypair } from '../scaffolding/helpers'; +import { ApiTypes, SubmittableExtrinsic } from '@polkadot/api/types'; +import { getFundingSource } from '../scaffolding/funding'; -describe("Utility Batch Filtering", function () { - let sender: KeyringPair; - let recipient: KeyringPair; +const fundingSource = getFundingSource('misc-util-batch'); - const fundingSource = getFundingSource("misc-util-batch"); +describe('Utility Batch Filtering', function () { + let sender: KeyringPair; + let recipient: KeyringPair; - beforeEach(async function () { - sender = await createAndFundKeypair(fundingSource, 5n * DOLLARS, 'utility-sender'); - recipient = await createAndFundKeypair(fundingSource, 5n * DOLLARS, 'utility-recipient'); - }); + beforeEach(async function () { + sender = await createAndFundKeypair(fundingSource, 5n * DOLLARS, 'utility-sender'); + recipient = await createAndFundKeypair(fundingSource, 5n * DOLLARS, 'utility-recipient'); + }); - it("should successfully execute ✅ batch with allowed calls", async function () { - // good batch: with only allowed calls - const goodBatch: SubmittableExtrinsic[] = []; - goodBatch.push(ExtrinsicHelper.api.tx.balances.transferAllowDeath(recipient.address, 1000)) - goodBatch.push(ExtrinsicHelper.api.tx.system.remark("Hello From Batch")) - goodBatch.push(ExtrinsicHelper.api.tx.msa.create()) - const batch = ExtrinsicHelper.executeUtilityBatchAll(sender, goodBatch); - const { target: event, eventMap } = await batch.fundAndSend(fundingSource); - assert.notEqual(event, undefined, "should return an event"); - assert.notEqual(eventMap, undefined, "should return an eventMap"); - }); + it('should successfully execute ✅ batch with allowed calls', async function () { + // good batch: with only allowed calls + const goodBatch: SubmittableExtrinsic[] = []; + goodBatch.push(ExtrinsicHelper.api.tx.balances.transferAllowDeath(recipient.address, 1000)); + goodBatch.push(ExtrinsicHelper.api.tx.system.remark('Hello From Batch')); + goodBatch.push(ExtrinsicHelper.api.tx.msa.create()); + const batch = ExtrinsicHelper.executeUtilityBatchAll(sender, goodBatch); + const { target: event, eventMap } = await batch.fundAndSend(fundingSource); + assert.notEqual(event, undefined, 'should return an event'); + assert.notEqual(eventMap, undefined, 'should return an eventMap'); + }); - it("should fail to execute ❌ batchAll with disallowed calls", async function () { - // bad batch: with a mix of allowed and disallowed calls - const badBatch: SubmittableExtrinsic[] = []; - //allowed - badBatch.push(ExtrinsicHelper.api.tx.balances.transferAllowDeath(recipient.address, 1000)) - badBatch.push(ExtrinsicHelper.api.tx.system.remark("Hello From Batch")) - // not allowed - badBatch.push(ExtrinsicHelper.api.tx.handles.retireHandle()) - badBatch.push(ExtrinsicHelper.api.tx.msa.retireMsa()) - let error: any; + it('should fail to execute ❌ batchAll with disallowed calls', async function () { + // bad batch: with a mix of allowed and disallowed calls + const badBatch: SubmittableExtrinsic[] = []; + //allowed + badBatch.push(ExtrinsicHelper.api.tx.balances.transferAllowDeath(recipient.address, 1000)); + badBatch.push(ExtrinsicHelper.api.tx.system.remark('Hello From Batch')); + // not allowed + badBatch.push(ExtrinsicHelper.api.tx.handles.retireHandle()); + badBatch.push(ExtrinsicHelper.api.tx.msa.retireMsa()); - // batchAll - const batchAll = ExtrinsicHelper.executeUtilityBatchAll(sender, badBatch); - try { - await batchAll.fundAndSend(fundingSource); - assert.fail("batchAll should have caused an error"); - } catch (err) { - assert.notEqual(err, undefined, " batchAll should return an error"); - } - }); + // batchAll + const batchAll = ExtrinsicHelper.executeUtilityBatchAll(sender, badBatch); + try { + await batchAll.fundAndSend(fundingSource); + assert.fail('batchAll should have caused an error'); + } catch (err) { + assert.notEqual(err, undefined, ' batchAll should return an error'); + } + }); - it("should fail to execute ❌ batch with disallowed calls", async function () { - // bad batch: with a mix of allowed and disallowed calls - const badBatch: SubmittableExtrinsic[] = []; - badBatch.push(ExtrinsicHelper.api.tx.balances.transferAllowDeath(recipient.address, 1000)) - badBatch.push(ExtrinsicHelper.api.tx.system.remark("Hello From Batch")) - badBatch.push(ExtrinsicHelper.api.tx.handles.retireHandle()) - badBatch.push(ExtrinsicHelper.api.tx.msa.retireMsa()) + it('should fail to execute ❌ batch with disallowed calls', async function () { + // bad batch: with a mix of allowed and disallowed calls + const badBatch: SubmittableExtrinsic[] = []; + badBatch.push(ExtrinsicHelper.api.tx.balances.transferAllowDeath(recipient.address, 1000)); + badBatch.push(ExtrinsicHelper.api.tx.system.remark('Hello From Batch')); + badBatch.push(ExtrinsicHelper.api.tx.handles.retireHandle()); + badBatch.push(ExtrinsicHelper.api.tx.msa.retireMsa()); - // batch - const batch = ExtrinsicHelper.executeUtilityBatch(sender, badBatch); - const { target: ok, eventMap } = await batch.fundAndSend(fundingSource); - assert.equal(ok, undefined, "should not return an ok event"); - assert.equal(eventMap["utility.BatchCompleted"], undefined, "should not return a batch completed event"); - assert.notEqual(eventMap["utility.BatchInterrupted"], undefined, "should return a batch interrupted event"); - }); + // batch + const batch = ExtrinsicHelper.executeUtilityBatch(sender, badBatch); + const { target: ok, eventMap } = await batch.fundAndSend(fundingSource); + assert.equal(ok, undefined, 'should not return an ok event'); + assert.equal(eventMap['utility.BatchCompleted'], undefined, 'should not return a batch completed event'); + assert.notEqual(eventMap['utility.BatchInterrupted'], undefined, 'should return a batch interrupted event'); + }); - it("should fail to execute ❌ forceBatch with disallowed calls", async function () { - // bad batch: with a mix of allowed and disallowed calls - const badBatch: SubmittableExtrinsic[] = []; - badBatch.push(ExtrinsicHelper.api.tx.balances.transferAllowDeath(recipient.address, 1000)) - badBatch.push(ExtrinsicHelper.api.tx.system.remark("Hello From Batch")) - badBatch.push(ExtrinsicHelper.api.tx.handles.retireHandle()) - badBatch.push(ExtrinsicHelper.api.tx.msa.retireMsa()) + it('should fail to execute ❌ forceBatch with disallowed calls', async function () { + // bad batch: with a mix of allowed and disallowed calls + const badBatch: SubmittableExtrinsic[] = []; + badBatch.push(ExtrinsicHelper.api.tx.balances.transferAllowDeath(recipient.address, 1000)); + badBatch.push(ExtrinsicHelper.api.tx.system.remark('Hello From Batch')); + badBatch.push(ExtrinsicHelper.api.tx.handles.retireHandle()); + badBatch.push(ExtrinsicHelper.api.tx.msa.retireMsa()); - // forceBatch - const forceBatch = ExtrinsicHelper.executeUtilityForceBatch(sender, badBatch); - const { target: ok, eventMap } = await forceBatch.fundAndSend(fundingSource); - assert.equal(ok, undefined, "should not return an ok event"); - assert.equal(eventMap["utility.BatchCompleted"], undefined, "should not return a batch completed event"); - assert.notEqual(eventMap["utility.BatchCompletedWithErrors"], undefined, "should return a batch completed with error event"); - }); + // forceBatch + const forceBatch = ExtrinsicHelper.executeUtilityForceBatch(sender, badBatch); + const { target: ok, eventMap } = await forceBatch.fundAndSend(fundingSource); + assert.equal(ok, undefined, 'should not return an ok event'); + assert.equal(eventMap['utility.BatchCompleted'], undefined, 'should not return a batch completed event'); + assert.notEqual( + eventMap['utility.BatchCompletedWithErrors'], + undefined, + 'should return a batch completed with error event' + ); + }); - it("should fail to execute ❌ batch with `Pays::No` calls", async function () { - // bad batch: with frequency related Pays::No call - const badBatch: SubmittableExtrinsic[] = []; - badBatch.push(ExtrinsicHelper.api.tx.msa.retireMsa()) - const batch = ExtrinsicHelper.executeUtilityBatchAll(sender, badBatch); - try { - await batch.fundAndSend(fundingSource); - assert.fail("batch should have caused an error"); - } catch (err) { - assert.notEqual(err, undefined, "should return an error"); - } - }); + it('should fail to execute ❌ batch with `Pays::No` calls', async function () { + // bad batch: with frequency related Pays::No call + const badBatch: SubmittableExtrinsic[] = []; + badBatch.push(ExtrinsicHelper.api.tx.msa.retireMsa()); + const batch = ExtrinsicHelper.executeUtilityBatchAll(sender, badBatch); + try { + await batch.fundAndSend(fundingSource); + assert.fail('batch should have caused an error'); + } catch (err) { + assert.notEqual(err, undefined, 'should return an error'); + } + }); - it("should fail to execute ❌ batch with `Pays::Yes` `create_provider`call blocked by Frequency", async function () { - // bad batch: with frequency related Pays::Yes call - const badBatch: SubmittableExtrinsic[] = []; - badBatch.push(ExtrinsicHelper.api.tx.msa.createProvider("I am a ba(tch)d provider")) - const batch = ExtrinsicHelper.executeUtilityBatchAll(sender, badBatch); - try { - await batch.fundAndSend(fundingSource); - assert.fail("batch should have caused an error"); - } catch (err) { - assert.notEqual(err, undefined, "should return an error"); - } - }); + it('should fail to execute ❌ batch with `Pays::Yes` `create_provider`call blocked by Frequency', async function () { + // bad batch: with frequency related Pays::Yes call + const badBatch: SubmittableExtrinsic[] = []; + badBatch.push(ExtrinsicHelper.api.tx.msa.createProvider('I am a ba(tch)d provider')); + const batch = ExtrinsicHelper.executeUtilityBatchAll(sender, badBatch); + try { + await batch.fundAndSend(fundingSource); + assert.fail('batch should have caused an error'); + } catch (err) { + assert.notEqual(err, undefined, 'should return an error'); + } + }); - it("should fail to execute ❌ batch with `Pays::Yes` `create_schema` call blocked by Frequency", async function () { - // bad batch: with frequency related Pays::Yes call - const badBatch: SubmittableExtrinsic[] = []; - badBatch.push(ExtrinsicHelper.api.tx.msa.createProvider("I am a ba(tch)d provider")) - const batch = ExtrinsicHelper.executeUtilityBatchAll(sender, badBatch); - try { - await batch.fundAndSend(fundingSource); - assert.fail("batch should have caused an error"); - } catch (err) { - assert.notEqual(err, undefined, "should return an error"); - } - }); + it('should fail to execute ❌ batch with `Pays::Yes` `create_schema` call blocked by Frequency', async function () { + // bad batch: with frequency related Pays::Yes call + const badBatch: SubmittableExtrinsic[] = []; + badBatch.push(ExtrinsicHelper.api.tx.msa.createProvider('I am a ba(tch)d provider')); + const batch = ExtrinsicHelper.executeUtilityBatchAll(sender, badBatch); + try { + await batch.fundAndSend(fundingSource); + assert.fail('batch should have caused an error'); + } catch (err) { + assert.notEqual(err, undefined, 'should return an error'); + } + }); - it("should fail to execute ❌ batch with nested batch", async function () { - // batch with nested batch - const nestedBatch: SubmittableExtrinsic[] = []; - const innerBatch: SubmittableExtrinsic[] = []; - innerBatch.push(ExtrinsicHelper.api.tx.balances.transferAllowDeath(recipient.address, 1000)) - innerBatch.push(ExtrinsicHelper.api.tx.system.remark("Hello From Batch")) - nestedBatch.push(ExtrinsicHelper.api.tx.utility.batch(innerBatch)) - const batch = ExtrinsicHelper.executeUtilityBatchAll(sender, nestedBatch); - try { - await batch.fundAndSend(fundingSource); - assert.fail("batch should have caused an error"); - } catch (err) { - assert.notEqual(err, undefined, "should return an error"); - } - }); + it('should fail to execute ❌ batch with nested batch', async function () { + // batch with nested batch + const nestedBatch: SubmittableExtrinsic[] = []; + const innerBatch: SubmittableExtrinsic[] = []; + innerBatch.push(ExtrinsicHelper.api.tx.balances.transferAllowDeath(recipient.address, 1000)); + innerBatch.push(ExtrinsicHelper.api.tx.system.remark('Hello From Batch')); + nestedBatch.push(ExtrinsicHelper.api.tx.utility.batch(innerBatch)); + const batch = ExtrinsicHelper.executeUtilityBatchAll(sender, nestedBatch); + try { + await batch.fundAndSend(fundingSource); + assert.fail('batch should have caused an error'); + } catch (err) { + assert.notEqual(err, undefined, 'should return an error'); + } + }); }); diff --git a/e2e/msa/createMsa.test.ts b/e2e/msa/createMsa.test.ts index 900093eef2..fbf8cc6029 100644 --- a/e2e/msa/createMsa.test.ts +++ b/e2e/msa/createMsa.test.ts @@ -1,39 +1,44 @@ -import "@frequency-chain/api-augment"; -import assert from "assert"; -import { createKeys, createAndFundKeypair, signPayloadSr25519, Sr25519Signature, generateAddKeyPayload } from "../scaffolding/helpers"; -import { KeyringPair } from "@polkadot/keyring/types"; -import { AddKeyData, ExtrinsicHelper } from "../scaffolding/extrinsicHelpers"; -import { u64 } from "@polkadot/types"; -import { Codec } from "@polkadot/types/types"; -import { getFundingSource } from "../scaffolding/funding"; - -describe("Create Accounts", function () { - const fundingSource = getFundingSource("msa-create-msa"); - - let keys: KeyringPair; - - before(async function () { - keys = await createAndFundKeypair(fundingSource, 50_000_000n); +import '@frequency-chain/api-augment'; +import assert from 'assert'; +import { createAndFundKeypair } from '../scaffolding/helpers'; +import { KeyringPair } from '@polkadot/keyring/types'; +import { ExtrinsicHelper } from '../scaffolding/extrinsicHelpers'; +import { getFundingSource } from '../scaffolding/funding'; + +const fundingSource = getFundingSource('msa-create-msa'); + +describe('Create Accounts', function () { + let keys: KeyringPair; + + before(async function () { + keys = await createAndFundKeypair(fundingSource, 50_000_000n); + }); + + describe('createMsa', function () { + it('should successfully create an MSA account', async function () { + const f = ExtrinsicHelper.createMsa(keys); + const { target: msaCreatedEvent, eventMap: chainEvents } = await f.fundAndSend(fundingSource); + + assert.notEqual( + chainEvents['system.ExtrinsicSuccess'], + undefined, + 'should have returned an ExtrinsicSuccess event' + ); + assert.notEqual(msaCreatedEvent, undefined, 'should have returned an MsaCreated event'); + assert.notEqual( + chainEvents['transactionPayment.TransactionFeePaid'], + undefined, + 'should have returned a TransactionFeePaid event' + ); + + assert.notEqual(msaCreatedEvent?.data.msaId, undefined, 'Failed to get the msaId from the event'); }); - describe("createMsa", function () { - - it("should successfully create an MSA account", async function () { - const f = ExtrinsicHelper.createMsa(keys); - const { target: msaCreatedEvent, eventMap: chainEvents } = await f.fundAndSend(fundingSource); - - assert.notEqual(chainEvents["system.ExtrinsicSuccess"], undefined, "should have returned an ExtrinsicSuccess event"); - assert.notEqual(msaCreatedEvent, undefined, "should have returned an MsaCreated event"); - assert.notEqual(chainEvents["transactionPayment.TransactionFeePaid"], undefined, "should have returned a TransactionFeePaid event"); - - assert.notEqual(msaCreatedEvent?.data.msaId, undefined, "Failed to get the msaId from the event"); - }); - - it("should fail to create an MSA for a keypair already associated with an MSA", async function () { - const op = ExtrinsicHelper.createMsa(keys); - await assert.rejects(op.fundAndSend(fundingSource), { - name: 'KeyAlreadyRegistered', - }); - }); + it('should fail to create an MSA for a keypair already associated with an MSA', async function () { + const op = ExtrinsicHelper.createMsa(keys); + await assert.rejects(op.fundAndSend(fundingSource), { + name: 'KeyAlreadyRegistered', + }); }); -}) + }); +}); diff --git a/e2e/msa/msaKeyManagement.test.ts b/e2e/msa/msaKeyManagement.test.ts index ec22792fa7..80a877be34 100644 --- a/e2e/msa/msaKeyManagement.test.ts +++ b/e2e/msa/msaKeyManagement.test.ts @@ -1,252 +1,260 @@ -import "@frequency-chain/api-augment"; -import assert from "assert"; -import { createKeys, createAndFundKeypair, signPayloadSr25519, Sr25519Signature, generateAddKeyPayload, createProviderKeysAndId, CENTS } from "../scaffolding/helpers"; -import { KeyringPair } from "@polkadot/keyring/types"; -import { AddKeyData, ExtrinsicHelper } from "../scaffolding/extrinsicHelpers"; -import { u64 } from "@polkadot/types"; -import { Codec } from "@polkadot/types/types"; -import { getFundingSource } from "../scaffolding/funding"; +import '@frequency-chain/api-augment'; +import assert from 'assert'; +import { + createKeys, + createAndFundKeypair, + signPayloadSr25519, + Sr25519Signature, + generateAddKeyPayload, + createProviderKeysAndId, + CENTS, +} from '../scaffolding/helpers'; +import { KeyringPair } from '@polkadot/keyring/types'; +import { AddKeyData, ExtrinsicHelper } from '../scaffolding/extrinsicHelpers'; +import { u64 } from '@polkadot/types'; +import { Codec } from '@polkadot/types/types'; +import { getFundingSource } from '../scaffolding/funding'; const maxU64 = 18_446_744_073_709_551_615n; +const fundingSource = getFundingSource('msa-key-management'); + +describe('MSA Key management', function () { + describe('addPublicKeyToMsa', function () { + let keys: KeyringPair; + let msaId: u64; + let secondaryKey: KeyringPair; + const defaultPayload: AddKeyData = {}; + let payload: AddKeyData; + let ownerSig: Sr25519Signature; + let newSig: Sr25519Signature; + let badSig: Sr25519Signature; + let addKeyData: Codec; + + before(async function () { + // Setup an MSA with one key and a secondary funded key + keys = await createAndFundKeypair(fundingSource, 5n * CENTS); + const { target } = await ExtrinsicHelper.createMsa(keys).signAndSend(); + assert.notEqual(target?.data.msaId, undefined, 'MSA Id not in expected event'); + msaId = target!.data.msaId; + + secondaryKey = await createAndFundKeypair(fundingSource, 5n * CENTS); + + // Default payload making it easier to test `addPublicKeyToMsa` + defaultPayload.msaId = msaId; + defaultPayload.newPublicKey = secondaryKey.publicKey; + }); + + beforeEach(async function () { + payload = await generateAddKeyPayload(defaultPayload); + }); + + it('should fail to add public key if origin is not one of the signers of the payload (MsaOwnershipInvalidSignature)', async function () { + const badKeys: KeyringPair = createKeys(); + addKeyData = ExtrinsicHelper.api.registry.createType('PalletMsaAddKeyData', payload); + newSig = signPayloadSr25519(secondaryKey, addKeyData); + badSig = signPayloadSr25519(badKeys, addKeyData); + const op = ExtrinsicHelper.addPublicKeyToMsa(keys, badSig, newSig, payload); + await assert.rejects(op.fundAndSend(fundingSource), { + name: 'MsaOwnershipInvalidSignature', + }); + }); + + it('should fail to add public key if new keypair is not one of the signers of the payload (NewKeyOwnershipInvalidSignature)', async function () { + const badKeys: KeyringPair = createKeys(); + addKeyData = ExtrinsicHelper.api.registry.createType('PalletMsaAddKeyData', payload); + ownerSig = signPayloadSr25519(keys, addKeyData); + badSig = signPayloadSr25519(badKeys, addKeyData); + const op = ExtrinsicHelper.addPublicKeyToMsa(keys, ownerSig, badSig, payload); + await assert.rejects(op.fundAndSend(fundingSource), { + name: 'NewKeyOwnershipInvalidSignature', + }); + }); + + it('should fail to add public key if origin does not have an MSA (NoKeyExists)', async function () { + const newOriginKeys = await createAndFundKeypair(fundingSource, 50_000_000n); + addKeyData = ExtrinsicHelper.api.registry.createType('PalletMsaAddKeyData', payload); + ownerSig = signPayloadSr25519(newOriginKeys, addKeyData); + newSig = signPayloadSr25519(secondaryKey, addKeyData); + const op = ExtrinsicHelper.addPublicKeyToMsa(newOriginKeys, ownerSig, newSig, payload); + await assert.rejects(op.fundAndSend(fundingSource), { + name: 'NoKeyExists', + }); + }); + + it('should fail to add public key if origin does not own MSA (NotMsaOwner)', async function () { + const newPayload = await generateAddKeyPayload({ + ...defaultPayload, + msaId: new u64(ExtrinsicHelper.api.registry, maxU64), + }); + addKeyData = ExtrinsicHelper.api.registry.createType('PalletMsaAddKeyData', newPayload); + ownerSig = signPayloadSr25519(keys, addKeyData); + newSig = signPayloadSr25519(secondaryKey, addKeyData); + const op = ExtrinsicHelper.addPublicKeyToMsa(keys, ownerSig, newSig, newPayload); + await assert.rejects(op.fundAndSend(fundingSource), { + name: 'NotMsaOwner', + }); + }); + + it('should fail if expiration has passed (ProofHasExpired)', async function () { + const newPayload = await generateAddKeyPayload({ + ...defaultPayload, + expiration: (await ExtrinsicHelper.getLastBlock()).block.header.number.toNumber(), + }); + addKeyData = ExtrinsicHelper.api.registry.createType('PalletMsaAddKeyData', newPayload); + ownerSig = signPayloadSr25519(keys, addKeyData); + newSig = signPayloadSr25519(secondaryKey, addKeyData); + const op = ExtrinsicHelper.addPublicKeyToMsa(keys, ownerSig, newSig, newPayload); + await assert.rejects(op.fundAndSend(fundingSource), { + name: 'ProofHasExpired', + }); + }); + + it('should fail if expiration is not yet valid (ProofNotYetValid)', async function () { + const maxMortality = ExtrinsicHelper.api.consts.msa.mortalityWindowSize.toNumber(); + const newPayload = await generateAddKeyPayload({ + ...defaultPayload, + expiration: (await ExtrinsicHelper.getLastBlock()).block.header.number.toNumber() + maxMortality + 999, + }); + addKeyData = ExtrinsicHelper.api.registry.createType('PalletMsaAddKeyData', newPayload); + ownerSig = signPayloadSr25519(keys, addKeyData); + newSig = signPayloadSr25519(secondaryKey, addKeyData); + const op = ExtrinsicHelper.addPublicKeyToMsa(keys, ownerSig, newSig, newPayload); + await assert.rejects(op.fundAndSend(fundingSource), { + name: 'ProofNotYetValid', + }); + }); + + it('should successfully add a new public key to an existing MSA & disallow duplicate signed payload submission (SignatureAlreadySubmitted)', async function () { + addKeyData = ExtrinsicHelper.api.registry.createType('PalletMsaAddKeyData', payload); + + ownerSig = signPayloadSr25519(keys, addKeyData); + newSig = signPayloadSr25519(secondaryKey, addKeyData); + const addPublicKeyOp = ExtrinsicHelper.addPublicKeyToMsa(keys, ownerSig, newSig, payload); + + const { target: publicKeyEvents } = await addPublicKeyOp.fundAndSend(fundingSource); + + assert.notEqual(publicKeyEvents, undefined, 'should have added public key'); + + await assert.rejects( + addPublicKeyOp.fundAndSend(fundingSource), + 'should reject sending the same signed payload twice' + ); + }); -describe("MSA Key management", function () { - const fundingSource = getFundingSource("msa-key-management"); - - describe("addPublicKeyToMsa", function () { - let keys: KeyringPair; - let msaId: u64; - let secondaryKey: KeyringPair; - let defaultPayload: AddKeyData = {}; - let payload: AddKeyData; - let ownerSig: Sr25519Signature; - let newSig: Sr25519Signature; - let badSig: Sr25519Signature; - let addKeyData: Codec; - - before(async function () { - // Setup an MSA with one key and a secondary funded key - keys = await createAndFundKeypair(fundingSource, 5n * CENTS); - const { target } = await ExtrinsicHelper.createMsa(keys).signAndSend(); - assert.notEqual(target?.data.msaId, undefined, "MSA Id not in expected event"); - msaId = target!.data.msaId; - - secondaryKey = await createAndFundKeypair(fundingSource, 5n * CENTS); - - // Default payload making it easier to test `addPublicKeyToMsa` - defaultPayload.msaId = msaId; - defaultPayload.newPublicKey = secondaryKey.publicKey; - }); - - beforeEach(async function () { - payload = await generateAddKeyPayload(defaultPayload); - }); - - it("should fail to add public key if origin is not one of the signers of the payload (MsaOwnershipInvalidSignature)", async function () { - const badKeys: KeyringPair = createKeys(); - addKeyData = ExtrinsicHelper.api.registry.createType("PalletMsaAddKeyData", payload); - newSig = signPayloadSr25519(secondaryKey, addKeyData); - badSig = signPayloadSr25519(badKeys, addKeyData); - const op = ExtrinsicHelper.addPublicKeyToMsa(keys, badSig, newSig, payload); - await assert.rejects(op.fundAndSend(fundingSource), { - name: 'MsaOwnershipInvalidSignature', - }); - }) - - it("should fail to add public key if new keypair is not one of the signers of the payload (NewKeyOwnershipInvalidSignature)", async function () { - const badKeys: KeyringPair = createKeys() - addKeyData = ExtrinsicHelper.api.registry.createType("PalletMsaAddKeyData", payload); - ownerSig = signPayloadSr25519(keys, addKeyData); - badSig = signPayloadSr25519(badKeys, addKeyData); - const op = ExtrinsicHelper.addPublicKeyToMsa(keys, ownerSig, badSig, payload); - await assert.rejects(op.fundAndSend(fundingSource), { - name: 'NewKeyOwnershipInvalidSignature', - }); - }); - - it("should fail to add public key if origin does not have an MSA (NoKeyExists)", async function () { - const newOriginKeys = await createAndFundKeypair(fundingSource, 50_000_000n); - addKeyData = ExtrinsicHelper.api.registry.createType("PalletMsaAddKeyData", payload); - ownerSig = signPayloadSr25519(newOriginKeys, addKeyData); - newSig = signPayloadSr25519(secondaryKey, addKeyData); - const op = ExtrinsicHelper.addPublicKeyToMsa(newOriginKeys, ownerSig, newSig, payload); - await assert.rejects(op.fundAndSend(fundingSource), { - name: 'NoKeyExists', - }); - }) - - it("should fail to add public key if origin does not own MSA (NotMsaOwner)", async function () { - const newPayload = await generateAddKeyPayload({ - ...defaultPayload, - msaId: new u64(ExtrinsicHelper.api.registry, maxU64), - }); - addKeyData = ExtrinsicHelper.api.registry.createType("PalletMsaAddKeyData", newPayload); - ownerSig = signPayloadSr25519(keys, addKeyData); - newSig = signPayloadSr25519(secondaryKey, addKeyData); - const op = ExtrinsicHelper.addPublicKeyToMsa(keys, ownerSig, newSig, newPayload); - await assert.rejects(op.fundAndSend(fundingSource), { - name: 'NotMsaOwner', - }); - }); - - it("should fail if expiration has passed (ProofHasExpired)", async function () { - const newPayload = await generateAddKeyPayload({ - ...defaultPayload, - expiration: (await ExtrinsicHelper.getLastBlock()).block.header.number.toNumber(), - }); - addKeyData = ExtrinsicHelper.api.registry.createType("PalletMsaAddKeyData", newPayload); - ownerSig = signPayloadSr25519(keys, addKeyData); - newSig = signPayloadSr25519(secondaryKey, addKeyData); - const op = ExtrinsicHelper.addPublicKeyToMsa(keys, ownerSig, newSig, newPayload); - await assert.rejects(op.fundAndSend(fundingSource), { - name: 'ProofHasExpired', - }); - }) - - it("should fail if expiration is not yet valid (ProofNotYetValid)", async function () { - const maxMortality = ExtrinsicHelper.api.consts.msa.mortalityWindowSize.toNumber(); - const newPayload = await generateAddKeyPayload({ - ...defaultPayload, - expiration: (await ExtrinsicHelper.getLastBlock()).block.header.number.toNumber() + maxMortality + 999, - }); - addKeyData = ExtrinsicHelper.api.registry.createType("PalletMsaAddKeyData", newPayload); - ownerSig = signPayloadSr25519(keys, addKeyData); - newSig = signPayloadSr25519(secondaryKey, addKeyData); - const op = ExtrinsicHelper.addPublicKeyToMsa(keys, ownerSig, newSig, newPayload); - await assert.rejects(op.fundAndSend(fundingSource), { - name: 'ProofNotYetValid', - }); - }) - - it("should successfully add a new public key to an existing MSA & disallow duplicate signed payload submission (SignatureAlreadySubmitted)", async function () { - addKeyData = ExtrinsicHelper.api.registry.createType("PalletMsaAddKeyData", payload); - - ownerSig = signPayloadSr25519(keys, addKeyData); - newSig = signPayloadSr25519(secondaryKey, addKeyData); - const addPublicKeyOp = ExtrinsicHelper.addPublicKeyToMsa(keys, ownerSig, newSig, payload); - - const { target: publicKeyEvents } = await addPublicKeyOp.fundAndSend(fundingSource); - - assert.notEqual(publicKeyEvents, undefined, 'should have added public key'); - - await assert.rejects(addPublicKeyOp.fundAndSend(fundingSource), "should reject sending the same signed payload twice"); - }); - - it("should fail if attempting to add the same key more than once (KeyAlreadyRegistered)", async function () { - const addKeyData = ExtrinsicHelper.api.registry.createType("PalletMsaAddKeyData", payload); - - const ownerSig = signPayloadSr25519(keys, addKeyData); - const newSig = signPayloadSr25519(secondaryKey, addKeyData); - const addPublicKeyOp = ExtrinsicHelper.addPublicKeyToMsa(keys, ownerSig, newSig, payload); - - await assert.rejects(addPublicKeyOp.fundAndSend(fundingSource), { - name: 'KeyAlreadyRegistered', - }) - }); - - it("should allow new keypair to act for/on MSA", async function () { - const thirdKey = createKeys(); - const newPayload = await generateAddKeyPayload({ - ...defaultPayload, - newPublicKey: thirdKey.publicKey, - }); - addKeyData = ExtrinsicHelper.api.registry.createType("PalletMsaAddKeyData", newPayload); - ownerSig = signPayloadSr25519(secondaryKey, addKeyData); - newSig = signPayloadSr25519(thirdKey, addKeyData); - const op = ExtrinsicHelper.addPublicKeyToMsa(secondaryKey, ownerSig, newSig, newPayload); - const { target: event } = await op.fundAndSend(fundingSource); - assert.notEqual(event, undefined, 'should have added public key'); - - // Cleanup - await assert.doesNotReject(ExtrinsicHelper.deletePublicKey(keys, thirdKey.publicKey).signAndSend('current')); - }); - }); - - describe("provider msa", function () { - - it("should disallow retiring MSA belonging to a provider", async function () { - const [providerKeys] = await createProviderKeysAndId(fundingSource); - const retireOp = ExtrinsicHelper.retireMsa(providerKeys); - await assert.rejects(retireOp.signAndSend('current'), { - name: 'RpcError', - message: /Custom error: 2/, - }); - }); - }); - - describe("delete keys and retire", function () { - let keys: KeyringPair; - let secondaryKey: KeyringPair; - let msaId: u64; - - before(async function () { - // Generates a msa with two control keys - keys = await createAndFundKeypair(fundingSource, 50_000_000n); - secondaryKey = await createAndFundKeypair(fundingSource, 50_000_000n); - - const { target } = await ExtrinsicHelper.createMsa(keys).signAndSend(); - assert.notEqual(target?.data.msaId, undefined, "MSA Id not in expected event"); - msaId = target!.data.msaId; - - const payload = await generateAddKeyPayload({ - msaId, - newPublicKey: secondaryKey.publicKey, - }); - const payloadData = ExtrinsicHelper.api.registry.createType("PalletMsaAddKeyData", payload); - const ownerSig = signPayloadSr25519(keys, payloadData); - const newSig = signPayloadSr25519(secondaryKey, payloadData); - const op = ExtrinsicHelper.addPublicKeyToMsa(keys, ownerSig, newSig, payload); - const { target: event } = await op.signAndSend(); - assert.notEqual(event, undefined, 'should have added public key'); - }); - - it("should disallow retiring an MSA with more than one key authorized", async function () { - const retireOp = ExtrinsicHelper.retireMsa(keys); - await assert.rejects(retireOp.signAndSend('current'), { - name: 'RpcError', - message: /Custom error: 3/, - }); - }); - - it("should fail to delete public key for self", async function () { - const op = ExtrinsicHelper.deletePublicKey(keys, keys.publicKey); - await assert.rejects(op.signAndSend('current'), { - name: 'RpcError', - message: /Custom error: 4/, - }); - }); - - it("should fail to delete key if not authorized for key's MSA", async function () { - const [providerKeys] = await createProviderKeysAndId(fundingSource); - - const op = ExtrinsicHelper.deletePublicKey(providerKeys, keys.publicKey); - await assert.rejects(op.signAndSend('current'), { - name: 'RpcError', - message: /Custom error: 5/, - }); - }); - - it("should test for 'NoKeyExists' error", async function () { - const key = createKeys("nothing key"); - const op = ExtrinsicHelper.deletePublicKey(keys, key.publicKey); - await assert.rejects(op.signAndSend('current'), { - name: 'RpcError', - message: /Custom error: 1/, - }); - }); - - it("should delete secondary key", async function () { - const op = ExtrinsicHelper.deletePublicKey(keys, secondaryKey.publicKey); - const { target: event } = await op.signAndSend('current'); - assert.notEqual(event, undefined, "should have returned PublicKeyDeleted event"); - }); - - it("should allow retiring MSA after additional keys have been deleted", async function () { - const retireMsaOp = ExtrinsicHelper.retireMsa(keys); - const { target: event, eventMap } = await retireMsaOp.signAndSend('current'); - - assert.notEqual(eventMap["msa.PublicKeyDeleted"], undefined, 'should have deleted public key (retired)'); - assert.notEqual(event, undefined, 'should have retired msa'); - }); - - }); -}) + it('should fail if attempting to add the same key more than once (KeyAlreadyRegistered)', async function () { + const addKeyData = ExtrinsicHelper.api.registry.createType('PalletMsaAddKeyData', payload); + + const ownerSig = signPayloadSr25519(keys, addKeyData); + const newSig = signPayloadSr25519(secondaryKey, addKeyData); + const addPublicKeyOp = ExtrinsicHelper.addPublicKeyToMsa(keys, ownerSig, newSig, payload); + + await assert.rejects(addPublicKeyOp.fundAndSend(fundingSource), { + name: 'KeyAlreadyRegistered', + }); + }); + + it('should allow new keypair to act for/on MSA', async function () { + const thirdKey = createKeys(); + const newPayload = await generateAddKeyPayload({ + ...defaultPayload, + newPublicKey: thirdKey.publicKey, + }); + addKeyData = ExtrinsicHelper.api.registry.createType('PalletMsaAddKeyData', newPayload); + ownerSig = signPayloadSr25519(secondaryKey, addKeyData); + newSig = signPayloadSr25519(thirdKey, addKeyData); + const op = ExtrinsicHelper.addPublicKeyToMsa(secondaryKey, ownerSig, newSig, newPayload); + const { target: event } = await op.fundAndSend(fundingSource); + assert.notEqual(event, undefined, 'should have added public key'); + + // Cleanup + await assert.doesNotReject(ExtrinsicHelper.deletePublicKey(keys, thirdKey.publicKey).signAndSend('current')); + }); + }); + + describe('provider msa', function () { + it('should disallow retiring MSA belonging to a provider', async function () { + const [providerKeys] = await createProviderKeysAndId(fundingSource); + const retireOp = ExtrinsicHelper.retireMsa(providerKeys); + await assert.rejects(retireOp.signAndSend('current'), { + name: 'RpcError', + message: /Custom error: 2/, + }); + }); + }); + + describe('delete keys and retire', function () { + let keys: KeyringPair; + let secondaryKey: KeyringPair; + let msaId: u64; + + before(async function () { + // Generates a msa with two control keys + keys = await createAndFundKeypair(fundingSource, 50_000_000n); + secondaryKey = await createAndFundKeypair(fundingSource, 50_000_000n); + + const { target } = await ExtrinsicHelper.createMsa(keys).signAndSend(); + assert.notEqual(target?.data.msaId, undefined, 'MSA Id not in expected event'); + msaId = target!.data.msaId; + + const payload = await generateAddKeyPayload({ + msaId, + newPublicKey: secondaryKey.publicKey, + }); + const payloadData = ExtrinsicHelper.api.registry.createType('PalletMsaAddKeyData', payload); + const ownerSig = signPayloadSr25519(keys, payloadData); + const newSig = signPayloadSr25519(secondaryKey, payloadData); + const op = ExtrinsicHelper.addPublicKeyToMsa(keys, ownerSig, newSig, payload); + const { target: event } = await op.signAndSend(); + assert.notEqual(event, undefined, 'should have added public key'); + }); + + it('should disallow retiring an MSA with more than one key authorized', async function () { + const retireOp = ExtrinsicHelper.retireMsa(keys); + await assert.rejects(retireOp.signAndSend('current'), { + name: 'RpcError', + message: /Custom error: 3/, + }); + }); + + it('should fail to delete public key for self', async function () { + const op = ExtrinsicHelper.deletePublicKey(keys, keys.publicKey); + await assert.rejects(op.signAndSend('current'), { + name: 'RpcError', + message: /Custom error: 4/, + }); + }); + + it("should fail to delete key if not authorized for key's MSA", async function () { + const [providerKeys] = await createProviderKeysAndId(fundingSource); + + const op = ExtrinsicHelper.deletePublicKey(providerKeys, keys.publicKey); + await assert.rejects(op.signAndSend('current'), { + name: 'RpcError', + message: /Custom error: 5/, + }); + }); + + it("should test for 'NoKeyExists' error", async function () { + const key = createKeys('nothing key'); + const op = ExtrinsicHelper.deletePublicKey(keys, key.publicKey); + await assert.rejects(op.signAndSend('current'), { + name: 'RpcError', + message: /Custom error: 1/, + }); + }); + + it('should delete secondary key', async function () { + const op = ExtrinsicHelper.deletePublicKey(keys, secondaryKey.publicKey); + const { target: event } = await op.signAndSend('current'); + assert.notEqual(event, undefined, 'should have returned PublicKeyDeleted event'); + }); + + it('should allow retiring MSA after additional keys have been deleted', async function () { + const retireMsaOp = ExtrinsicHelper.retireMsa(keys); + const { target: event, eventMap } = await retireMsaOp.signAndSend('current'); + + assert.notEqual(eventMap['msa.PublicKeyDeleted'], undefined, 'should have deleted public key (retired)'); + assert.notEqual(event, undefined, 'should have retired msa'); + }); + }); +}); diff --git a/e2e/package-lock.json b/e2e/package-lock.json index 76cf7114c9..6c3f0f1131 100644 --- a/e2e/package-lock.json +++ b/e2e/package-lock.json @@ -24,8 +24,10 @@ "@polkadot/typegen": "10.9.1", "@types/mocha": "^10.0.2", "@types/workerpool": "^6.4.5", - "@typescript-eslint/eslint-plugin": "^6.7.3", - "@typescript-eslint/parser": "^6.7.3", + "@typescript-eslint/eslint-plugin": "^6.10.0", + "@typescript-eslint/parser": "^6.10.0", + "eslint": "^8.53.0", + "eslint-plugin-mocha": "^10.2.0", "mocha": "^10.2.0", "prettier": "^3.0.3", "sinon": "^16.0.0", @@ -38,7 +40,6 @@ "resolved": "https://registry.npmjs.org/@aashutoshrathi/word-wrap/-/word-wrap-1.2.6.tgz", "integrity": "sha512-1Yjs2SvM8TflER/OD3cOjhWWOZb58A2t7wpE2S9XfBYTiIl+XFhQG2bjy4Pu1I+EAlCNUzRDYDdFwFYUKvXcIA==", "dev": true, - "peer": true, "engines": { "node": ">=0.10.0" } @@ -225,11 +226,10 @@ } }, "node_modules/@eslint/eslintrc": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.2.tgz", - "integrity": "sha512-+wvgpDsrB1YqAMdEUCcnTlpfVBH7Vqn6A/NT3D8WVXFIaKMlErPIZT3oCIAVCOtarRpMtelZLqJeU3t7WY6X6g==", + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.3.tgz", + "integrity": "sha512-yZzuIG+jnVu6hNSzFEN07e8BxF3uAzYtQb6uDkaYZLo6oYZDCq454c5kB8zxnzfCYyP4MIuyBn10L0DqwujTmA==", "dev": true, - "peer": true, "dependencies": { "ajv": "^6.12.4", "debug": "^4.3.2", @@ -249,11 +249,10 @@ } }, "node_modules/@eslint/js": { - "version": "8.50.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.50.0.tgz", - "integrity": "sha512-NCC3zz2+nvYd+Ckfh87rA47zfu2QsQpvc6k1yzTk+b9KzRj0wkGa8LSoGOXN6Zv4lRf/EIoZ80biDh9HOI+RNQ==", + "version": "8.53.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.53.0.tgz", + "integrity": "sha512-Kn7K8dx/5U6+cT1yEhpX1w4PCSg0M+XyRILPgvwcEBjerFWCwQj5sbr3/VmxqV0JGHCBCzyd6LxypEuehypY1w==", "dev": true, - "peer": true, "engines": { "node": "^12.22.0 || ^14.17.0 || >=16.0.0" } @@ -308,13 +307,12 @@ } }, "node_modules/@humanwhocodes/config-array": { - "version": "0.11.11", - "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.11.tgz", - "integrity": "sha512-N2brEuAadi0CcdeMXUkhbZB84eskAc8MEX1By6qEchoVywSgXPIjou4rYsl0V3Hj0ZnuGycGCjdNgockbzeWNA==", + "version": "0.11.13", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.13.tgz", + "integrity": "sha512-JSBDMiDKSzQVngfRjOdFXgFfklaXI4K9nLF49Auh21lmBWRLIK3+xTErTWD4KU54pb6coM6ESE7Awz/FNU3zgQ==", "dev": true, - "peer": true, "dependencies": { - "@humanwhocodes/object-schema": "^1.2.1", + "@humanwhocodes/object-schema": "^2.0.1", "debug": "^4.1.1", "minimatch": "^3.0.5" }, @@ -327,7 +325,6 @@ "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", "dev": true, - "peer": true, "engines": { "node": ">=12.22" }, @@ -337,11 +334,10 @@ } }, "node_modules/@humanwhocodes/object-schema": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz", - "integrity": "sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==", - "dev": true, - "peer": true + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-2.0.1.tgz", + "integrity": "sha512-dvuCeX5fC9dXgJn9t+X5atfmgQAzUOWqS1254Gh0m6i8wKd10ebXkfNKiRK+1GWi/yTvvLDHpoxLr0xxxeslWw==", + "dev": true }, "node_modules/@ipld/dag-cbor": { "version": "9.0.0", @@ -1618,9 +1614,9 @@ } }, "node_modules/@types/json-schema": { - "version": "7.0.13", - "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.13.tgz", - "integrity": "sha512-RbSSoHliUbnXj3ny0CNFOoxrIDV6SUGyStHsvDqosw6CkdPV8TtWGlfecuK4ToyMEAql6pzNxgCFKanovUzlgQ==", + "version": "7.0.14", + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.14.tgz", + "integrity": "sha512-U3PUjAudAdJBeC2pgN8uTIKgxrb4nlDF3SF0++EldXQvQBGkpFZMSnwQiIoDU77tv45VgNkl/L4ouD+rEomujw==", "dev": true }, "node_modules/@types/mocha": { @@ -1649,9 +1645,9 @@ "integrity": "sha512-XISRgDJ2Tc5q4TRqvgJtzsRkFYNJzZrhTdtMoGVBttwzzQJkPnS3WWTFc7kuDRoPtPakl+T+OfdEUjYJj7Jbow==" }, "node_modules/@types/semver": { - "version": "7.5.3", - "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.3.tgz", - "integrity": "sha512-OxepLK9EuNEIPxWNME+C6WwbRAOOI2o2BaQEGzz5Lu2e4Z5eDnEo+/aVEDMIXywoJitJ7xWd641wrGLZdtwRyw==", + "version": "7.5.4", + "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.4.tgz", + "integrity": "sha512-MMzuxN3GdFwskAnb6fz0orFvhfqi752yjaXylr0Rp4oDg5H0Zn1IuyRhDVvYOwAXoJirx2xuS16I3WjxnAIHiQ==", "dev": true }, "node_modules/@types/sinon": { @@ -1685,16 +1681,16 @@ } }, "node_modules/@typescript-eslint/eslint-plugin": { - "version": "6.7.3", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.7.3.tgz", - "integrity": "sha512-vntq452UHNltxsaaN+L9WyuMch8bMd9CqJ3zhzTPXXidwbf5mqqKCVXEuvRZUqLJSTLeWE65lQwyXsRGnXkCTA==", + "version": "6.10.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.10.0.tgz", + "integrity": "sha512-uoLj4g2OTL8rfUQVx2AFO1hp/zja1wABJq77P6IclQs6I/m9GLrm7jCdgzZkvWdDCQf1uEvoa8s8CupsgWQgVg==", "dev": true, "dependencies": { "@eslint-community/regexpp": "^4.5.1", - "@typescript-eslint/scope-manager": "6.7.3", - "@typescript-eslint/type-utils": "6.7.3", - "@typescript-eslint/utils": "6.7.3", - "@typescript-eslint/visitor-keys": "6.7.3", + "@typescript-eslint/scope-manager": "6.10.0", + "@typescript-eslint/type-utils": "6.10.0", + "@typescript-eslint/utils": "6.10.0", + "@typescript-eslint/visitor-keys": "6.10.0", "debug": "^4.3.4", "graphemer": "^1.4.0", "ignore": "^5.2.4", @@ -1720,15 +1716,15 @@ } }, "node_modules/@typescript-eslint/parser": { - "version": "6.7.3", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-6.7.3.tgz", - "integrity": "sha512-TlutE+iep2o7R8Lf+yoer3zU6/0EAUc8QIBB3GYBc1KGz4c4TRm83xwXUZVPlZ6YCLss4r77jbu6j3sendJoiQ==", + "version": "6.10.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-6.10.0.tgz", + "integrity": "sha512-+sZwIj+s+io9ozSxIWbNB5873OSdfeBEH/FR0re14WLI6BaKuSOnnwCJ2foUiu8uXf4dRp1UqHP0vrZ1zXGrog==", "dev": true, "dependencies": { - "@typescript-eslint/scope-manager": "6.7.3", - "@typescript-eslint/types": "6.7.3", - "@typescript-eslint/typescript-estree": "6.7.3", - "@typescript-eslint/visitor-keys": "6.7.3", + "@typescript-eslint/scope-manager": "6.10.0", + "@typescript-eslint/types": "6.10.0", + "@typescript-eslint/typescript-estree": "6.10.0", + "@typescript-eslint/visitor-keys": "6.10.0", "debug": "^4.3.4" }, "engines": { @@ -1748,13 +1744,13 @@ } }, "node_modules/@typescript-eslint/scope-manager": { - "version": "6.7.3", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.7.3.tgz", - "integrity": "sha512-wOlo0QnEou9cHO2TdkJmzF7DFGvAKEnB82PuPNHpT8ZKKaZu6Bm63ugOTn9fXNJtvuDPanBc78lGUGGytJoVzQ==", + "version": "6.10.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.10.0.tgz", + "integrity": "sha512-TN/plV7dzqqC2iPNf1KrxozDgZs53Gfgg5ZHyw8erd6jd5Ta/JIEcdCheXFt9b1NYb93a1wmIIVW/2gLkombDg==", "dev": true, "dependencies": { - "@typescript-eslint/types": "6.7.3", - "@typescript-eslint/visitor-keys": "6.7.3" + "@typescript-eslint/types": "6.10.0", + "@typescript-eslint/visitor-keys": "6.10.0" }, "engines": { "node": "^16.0.0 || >=18.0.0" @@ -1765,13 +1761,13 @@ } }, "node_modules/@typescript-eslint/type-utils": { - "version": "6.7.3", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-6.7.3.tgz", - "integrity": "sha512-Fc68K0aTDrKIBvLnKTZ5Pf3MXK495YErrbHb1R6aTpfK5OdSFj0rVN7ib6Tx6ePrZ2gsjLqr0s98NG7l96KSQw==", + "version": "6.10.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-6.10.0.tgz", + "integrity": "sha512-wYpPs3hgTFblMYwbYWPT3eZtaDOjbLyIYuqpwuLBBqhLiuvJ+9sEp2gNRJEtR5N/c9G1uTtQQL5AhV0fEPJYcg==", "dev": true, "dependencies": { - "@typescript-eslint/typescript-estree": "6.7.3", - "@typescript-eslint/utils": "6.7.3", + "@typescript-eslint/typescript-estree": "6.10.0", + "@typescript-eslint/utils": "6.10.0", "debug": "^4.3.4", "ts-api-utils": "^1.0.1" }, @@ -1792,9 +1788,9 @@ } }, "node_modules/@typescript-eslint/types": { - "version": "6.7.3", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.7.3.tgz", - "integrity": "sha512-4g+de6roB2NFcfkZb439tigpAMnvEIg3rIjWQ+EM7IBaYt/CdJt6em9BJ4h4UpdgaBWdmx2iWsafHTrqmgIPNw==", + "version": "6.10.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.10.0.tgz", + "integrity": "sha512-36Fq1PWh9dusgo3vH7qmQAj5/AZqARky1Wi6WpINxB6SkQdY5vQoT2/7rW7uBIsPDcvvGCLi4r10p0OJ7ITAeg==", "dev": true, "engines": { "node": "^16.0.0 || >=18.0.0" @@ -1805,13 +1801,13 @@ } }, "node_modules/@typescript-eslint/typescript-estree": { - "version": "6.7.3", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.7.3.tgz", - "integrity": "sha512-YLQ3tJoS4VxLFYHTw21oe1/vIZPRqAO91z6Uv0Ss2BKm/Ag7/RVQBcXTGcXhgJMdA4U+HrKuY5gWlJlvoaKZ5g==", + "version": "6.10.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.10.0.tgz", + "integrity": "sha512-ek0Eyuy6P15LJVeghbWhSrBCj/vJpPXXR+EpaRZqou7achUWL8IdYnMSC5WHAeTWswYQuP2hAZgij/bC9fanBg==", "dev": true, "dependencies": { - "@typescript-eslint/types": "6.7.3", - "@typescript-eslint/visitor-keys": "6.7.3", + "@typescript-eslint/types": "6.10.0", + "@typescript-eslint/visitor-keys": "6.10.0", "debug": "^4.3.4", "globby": "^11.1.0", "is-glob": "^4.0.3", @@ -1832,17 +1828,17 @@ } }, "node_modules/@typescript-eslint/utils": { - "version": "6.7.3", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-6.7.3.tgz", - "integrity": "sha512-vzLkVder21GpWRrmSR9JxGZ5+ibIUSudXlW52qeKpzUEQhRSmyZiVDDj3crAth7+5tmN1ulvgKaCU2f/bPRCzg==", + "version": "6.10.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-6.10.0.tgz", + "integrity": "sha512-v+pJ1/RcVyRc0o4wAGux9x42RHmAjIGzPRo538Z8M1tVx6HOnoQBCX/NoadHQlZeC+QO2yr4nNSFWOoraZCAyg==", "dev": true, "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", "@types/json-schema": "^7.0.12", "@types/semver": "^7.5.0", - "@typescript-eslint/scope-manager": "6.7.3", - "@typescript-eslint/types": "6.7.3", - "@typescript-eslint/typescript-estree": "6.7.3", + "@typescript-eslint/scope-manager": "6.10.0", + "@typescript-eslint/types": "6.10.0", + "@typescript-eslint/typescript-estree": "6.10.0", "semver": "^7.5.4" }, "engines": { @@ -1857,12 +1853,12 @@ } }, "node_modules/@typescript-eslint/visitor-keys": { - "version": "6.7.3", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.7.3.tgz", - "integrity": "sha512-HEVXkU9IB+nk9o63CeICMHxFWbHWr3E1mpilIQBe9+7L/lH97rleFLVtYsfnWB+JVMaiFnEaxvknvmIzX+CqVg==", + "version": "6.10.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.10.0.tgz", + "integrity": "sha512-xMGluxQIEtOM7bqFCo+rCMh5fqI+ZxV5RUUOa29iVPz1OgCZrtc7rFnz5cLUazlkPKYqX+75iuDq7m0HQ48nCg==", "dev": true, "dependencies": { - "@typescript-eslint/types": "6.7.3", + "@typescript-eslint/types": "6.10.0", "eslint-visitor-keys": "^3.4.1" }, "engines": { @@ -1873,6 +1869,12 @@ "url": "https://opencollective.com/typescript-eslint" } }, + "node_modules/@ungap/structured-clone": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.2.0.tgz", + "integrity": "sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ==", + "dev": true + }, "node_modules/@vascosantos/moving-average": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@vascosantos/moving-average/-/moving-average-1.1.0.tgz", @@ -1908,7 +1910,6 @@ "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", "dev": true, - "peer": true, "peerDependencies": { "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" } @@ -1927,7 +1928,6 @@ "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", "dev": true, - "peer": true, "dependencies": { "fast-deep-equal": "^3.1.1", "fast-json-stable-stringify": "^2.0.0", @@ -2175,7 +2175,6 @@ "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", "dev": true, - "peer": true, "engines": { "node": ">=6" } @@ -2360,8 +2359,7 @@ "version": "0.1.4", "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", - "dev": true, - "peer": true + "dev": true }, "node_modules/default-gateway": { "version": "7.2.2", @@ -2469,7 +2467,6 @@ "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", "dev": true, - "peer": true, "dependencies": { "esutils": "^2.0.2" }, @@ -2510,19 +2507,19 @@ } }, "node_modules/eslint": { - "version": "8.50.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.50.0.tgz", - "integrity": "sha512-FOnOGSuFuFLv/Sa+FDVRZl4GGVAAFFi8LecRsI5a1tMO5HIE8nCm4ivAlzt4dT3ol/PaaGC0rJEEXQmHJBGoOg==", + "version": "8.53.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.53.0.tgz", + "integrity": "sha512-N4VuiPjXDUa4xVeV/GC/RV3hQW9Nw+Y463lkWaKKXKYMvmRiRDAtfpuPFLN+E1/6ZhyR8J2ig+eVREnYgUsiag==", "dev": true, - "peer": true, "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.6.1", - "@eslint/eslintrc": "^2.1.2", - "@eslint/js": "8.50.0", - "@humanwhocodes/config-array": "^0.11.11", + "@eslint/eslintrc": "^2.1.3", + "@eslint/js": "8.53.0", + "@humanwhocodes/config-array": "^0.11.13", "@humanwhocodes/module-importer": "^1.0.1", "@nodelib/fs.walk": "^1.2.8", + "@ungap/structured-clone": "^1.2.0", "ajv": "^6.12.4", "chalk": "^4.0.0", "cross-spawn": "^7.0.2", @@ -2564,12 +2561,27 @@ "url": "https://opencollective.com/eslint" } }, + "node_modules/eslint-plugin-mocha": { + "version": "10.2.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-mocha/-/eslint-plugin-mocha-10.2.0.tgz", + "integrity": "sha512-ZhdxzSZnd1P9LqDPF0DBcFLpRIGdh1zkF2JHnQklKQOvrQtT73kdP5K9V2mzvbLR+cCAO9OI48NXK/Ax9/ciCQ==", + "dev": true, + "dependencies": { + "eslint-utils": "^3.0.0", + "rambda": "^7.4.0" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "eslint": ">=7.0.0" + } + }, "node_modules/eslint-scope": { "version": "7.2.2", "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.2.tgz", "integrity": "sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg==", "dev": true, - "peer": true, "dependencies": { "esrecurse": "^4.3.0", "estraverse": "^5.2.0" @@ -2581,6 +2593,33 @@ "url": "https://opencollective.com/eslint" } }, + "node_modules/eslint-utils": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-3.0.0.tgz", + "integrity": "sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA==", + "dev": true, + "dependencies": { + "eslint-visitor-keys": "^2.0.0" + }, + "engines": { + "node": "^10.0.0 || ^12.0.0 || >= 14.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/mysticatea" + }, + "peerDependencies": { + "eslint": ">=5" + } + }, + "node_modules/eslint-utils/node_modules/eslint-visitor-keys": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz", + "integrity": "sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==", + "dev": true, + "engines": { + "node": ">=10" + } + }, "node_modules/eslint-visitor-keys": { "version": "3.4.3", "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", @@ -2598,7 +2637,6 @@ "resolved": "https://registry.npmjs.org/espree/-/espree-9.6.1.tgz", "integrity": "sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==", "dev": true, - "peer": true, "dependencies": { "acorn": "^8.9.0", "acorn-jsx": "^5.3.2", @@ -2616,7 +2654,6 @@ "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.5.0.tgz", "integrity": "sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg==", "dev": true, - "peer": true, "dependencies": { "estraverse": "^5.1.0" }, @@ -2629,7 +2666,6 @@ "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", "dev": true, - "peer": true, "dependencies": { "estraverse": "^5.2.0" }, @@ -2642,7 +2678,6 @@ "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", "dev": true, - "peer": true, "engines": { "node": ">=4.0" } @@ -2652,7 +2687,6 @@ "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", "dev": true, - "peer": true, "engines": { "node": ">=0.10.0" } @@ -2679,13 +2713,12 @@ "version": "3.1.3", "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", - "dev": true, - "peer": true + "dev": true }, "node_modules/fast-glob": { - "version": "3.3.1", - "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.1.tgz", - "integrity": "sha512-kNFPyjhh5cKjrUltxs+wFx+ZkbRaxxmZ+X0ZU31SOsxCEtP9VPgtq2teZw1DebupL5GmDaNQ6yKMMVcM41iqDg==", + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.2.tgz", + "integrity": "sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==", "dev": true, "dependencies": { "@nodelib/fs.stat": "^2.0.2", @@ -2714,15 +2747,13 @@ "version": "2.1.0", "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", - "dev": true, - "peer": true + "dev": true }, "node_modules/fast-levenshtein": { "version": "2.0.6", "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", - "dev": true, - "peer": true + "dev": true }, "node_modules/fastq": { "version": "1.15.0", @@ -2760,7 +2791,6 @@ "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==", "dev": true, - "peer": true, "dependencies": { "flat-cache": "^3.0.4" }, @@ -2810,7 +2840,6 @@ "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.1.0.tgz", "integrity": "sha512-OHx4Qwrrt0E4jEIcI5/Xb+f+QmJYNj2rrK8wiIdQOIrB9WrrJL8cjZvXdXuBTkkEwEqLycb5BeZDV1o2i9bTew==", "dev": true, - "peer": true, "dependencies": { "flatted": "^3.2.7", "keyv": "^4.5.3", @@ -2824,8 +2853,7 @@ "version": "3.2.9", "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.9.tgz", "integrity": "sha512-36yxDn5H7OFZQla0/jFJmbIKTdZAQHngCedGxiMmpNfEZM0sdEeT+WczLQrjK6D7o2aiyLYDnkw0R3JK0Qv1RQ==", - "dev": true, - "peer": true + "dev": true }, "node_modules/formdata-polyfill": { "version": "4.0.10", @@ -2917,7 +2945,6 @@ "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", "dev": true, - "peer": true, "dependencies": { "is-glob": "^4.0.3" }, @@ -2926,11 +2953,10 @@ } }, "node_modules/globals": { - "version": "13.22.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-13.22.0.tgz", - "integrity": "sha512-H1Ddc/PbZHTDVJSnj8kWptIRSD6AM3pK+mKytuIVF4uoBV7rshFlhhvA58ceJ5wp3Er58w6zj7bykMpYXt3ETw==", + "version": "13.23.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.23.0.tgz", + "integrity": "sha512-XAmF0RjlrjY23MA51q3HltdlGxUpXPvg0GioKiD9X6HD28iMjo2dKC8Vqwm7lne4GNr78+RHTfliktR6ZH09wA==", "dev": true, - "peer": true, "dependencies": { "type-fest": "^0.20.2" }, @@ -3109,7 +3135,6 @@ "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", "dev": true, - "peer": true, "dependencies": { "parent-module": "^1.0.0", "resolve-from": "^4.0.0" @@ -3126,7 +3151,6 @@ "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", "dev": true, - "peer": true, "engines": { "node": ">=0.8.19" } @@ -3389,7 +3413,6 @@ "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==", "dev": true, - "peer": true, "engines": { "node": ">=8" } @@ -3765,22 +3788,19 @@ "version": "3.0.1", "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", - "dev": true, - "peer": true + "dev": true }, "node_modules/json-schema-traverse": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", - "dev": true, - "peer": true + "dev": true }, "node_modules/json-stable-stringify-without-jsonify": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", - "dev": true, - "peer": true + "dev": true }, "node_modules/json-stringify-safe": { "version": "5.0.1", @@ -3803,7 +3823,6 @@ "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.3.tgz", "integrity": "sha512-QCiSav9WaX1PgETJ+SpNnx2PRRapJ/oRSXM4VO5OGYGSjrxbKPVFVhB3l2OCbLCk329N8qyAtsJjSjvVBWzEug==", "dev": true, - "peer": true, "dependencies": { "json-buffer": "3.0.1" } @@ -3813,7 +3832,6 @@ "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", "dev": true, - "peer": true, "dependencies": { "prelude-ls": "^1.2.1", "type-check": "~0.4.0" @@ -3929,8 +3947,7 @@ "version": "4.6.2", "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", - "dev": true, - "peer": true + "dev": true }, "node_modules/log-symbols": { "version": "4.1.0", @@ -5040,7 +5057,6 @@ "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.3.tgz", "integrity": "sha512-JjCoypp+jKn1ttEFExxhetCKeJt9zhAgAve5FXHixTvFDW/5aEktX9bufBKLRRMdU7bNtpLfcGu94B3cdEJgjg==", "dev": true, - "peer": true, "dependencies": { "@aashutoshrathi/word-wrap": "^1.2.3", "deep-is": "^0.1.3", @@ -5187,7 +5203,6 @@ "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", "dev": true, - "peer": true, "dependencies": { "callsites": "^3.0.0" }, @@ -5261,7 +5276,6 @@ "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", "dev": true, - "peer": true, "engines": { "node": ">= 0.8.0" } @@ -5348,11 +5362,10 @@ } }, "node_modules/punycode": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.0.tgz", - "integrity": "sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA==", + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", "dev": true, - "peer": true, "engines": { "node": ">=6" } @@ -5412,6 +5425,12 @@ } } }, + "node_modules/rambda": { + "version": "7.5.0", + "resolved": "https://registry.npmjs.org/rambda/-/rambda-7.5.0.tgz", + "integrity": "sha512-y/M9weqWAH4iopRd7EHDEQQvpFPHj1AA3oHozE9tfITHUtTR7Z9PSlIRRG2l1GuW7sefC1cXFfIcF+cgnShdBA==", + "dev": true + }, "node_modules/randombytes": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", @@ -5473,7 +5492,6 @@ "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", "dev": true, - "peer": true, "engines": { "node": ">=4" } @@ -5506,7 +5524,6 @@ "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", "dev": true, - "peer": true, "dependencies": { "glob": "^7.1.3" }, @@ -5797,8 +5814,7 @@ "version": "0.2.0", "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==", - "dev": true, - "peer": true + "dev": true }, "node_modules/thunky": { "version": "1.1.0", @@ -5920,7 +5936,6 @@ "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", "dev": true, - "peer": true, "dependencies": { "prelude-ls": "^1.2.1" }, @@ -5942,7 +5957,6 @@ "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", "dev": true, - "peer": true, "engines": { "node": ">=10" }, @@ -6021,7 +6035,6 @@ "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", "dev": true, - "peer": true, "dependencies": { "punycode": "^2.1.0" } diff --git a/e2e/package.json b/e2e/package.json index 2e4004c00e..eb251197ef 100644 --- a/e2e/package.json +++ b/e2e/package.json @@ -10,8 +10,8 @@ "test:serial": "mocha --parallel=false", "test:relay": "mocha --config .relay-chain.mocharc.json", "test:load": "mocha --config .load-test.mocharc.json", - "format": "tsc --noEmit --pretty", - "lint": "tsc --noEmit --pretty", + "format": "tsc --noEmit --pretty && prettier --write . && eslint --fix .", + "lint": "tsc --noEmit --pretty && eslint . && prettier --check .", "preinstall": "echo \"NOTICE: E2E tests REQUIRE ../js/api-augment to have been built and packaged\"" }, "keywords": [], @@ -32,8 +32,10 @@ "@polkadot/typegen": "10.9.1", "@types/mocha": "^10.0.2", "@types/workerpool": "^6.4.5", - "@typescript-eslint/eslint-plugin": "^6.7.3", - "@typescript-eslint/parser": "^6.7.3", + "@typescript-eslint/eslint-plugin": "^6.10.0", + "@typescript-eslint/parser": "^6.10.0", + "eslint": "^8.53.0", + "eslint-plugin-mocha": "^10.2.0", "mocha": "^10.2.0", "prettier": "^3.0.3", "sinon": "^16.0.0", diff --git a/e2e/scaffolding/apiConnection.ts b/e2e/scaffolding/apiConnection.ts index 44b806e8bc..b5ff9e2e86 100644 --- a/e2e/scaffolding/apiConnection.ts +++ b/e2e/scaffolding/apiConnection.ts @@ -1,17 +1,17 @@ -import { options } from "@frequency-chain/api-augment"; -import { ApiRx, WsProvider, ApiPromise } from "@polkadot/api"; -import { firstValueFrom } from "rxjs"; -import { env } from "./env"; +import { options } from '@frequency-chain/api-augment'; +import { ApiRx, WsProvider, ApiPromise } from '@polkadot/api'; +import { firstValueFrom } from 'rxjs'; +import * as env from './env'; export async function connect(providerUrl?: string | string[] | undefined): Promise { - const provider = new WsProvider(providerUrl || env.providerUrl); - const apiObservable = ApiRx.create({ provider, ...options }); - return firstValueFrom(apiObservable); + const provider = new WsProvider(providerUrl || env.providerUrl); + const apiObservable = ApiRx.create({ provider, ...options }); + return firstValueFrom(apiObservable); } export async function connectPromise(providerUrl?: string | string[] | undefined): Promise { - const provider = new WsProvider(providerUrl || env.providerUrl); - const api = await ApiPromise.create({ provider, ...options }); - await api.isReady; - return api; + const provider = new WsProvider(providerUrl || env.providerUrl); + const api = await ApiPromise.create({ provider, ...options }); + await api.isReady; + return api; } diff --git a/e2e/scaffolding/autoNonce.ts b/e2e/scaffolding/autoNonce.ts index 0600bd1928..562a7988f5 100644 --- a/e2e/scaffolding/autoNonce.ts +++ b/e2e/scaffolding/autoNonce.ts @@ -1,4 +1,3 @@ - /** * The AutoNonce Module keeps track of nonces used by tests * Because all keys in the tests are managed by the tests, the nonces are determined by: @@ -6,8 +5,8 @@ * 2. Not counting transactions that had RPC failures */ -import type { KeyringPair } from "@polkadot/keyring/types"; -import { ExtrinsicHelper } from "./extrinsicHelpers"; +import type { KeyringPair } from '@polkadot/keyring/types'; +import { ExtrinsicHelper } from './extrinsicHelpers'; export type AutoNonce = number | 'auto' | 'current'; @@ -15,21 +14,21 @@ const nonceCache = new Map(); const getNonce = async (keys: KeyringPair) => { return (await ExtrinsicHelper.getAccountInfo(keys.address)).nonce.toNumber(); -} +}; const reset = (keys: KeyringPair) => { nonceCache.delete(keys.address); -} +}; const current = async (keys: KeyringPair): Promise => { - return nonceCache.get(keys.address) || await getNonce(keys); -} + return nonceCache.get(keys.address) || (await getNonce(keys)); +}; const increment = async (keys: KeyringPair) => { const nonce = await current(keys); nonceCache.set(keys.address, nonce + 1); return nonce; -} +}; /** * Use the auto nonce system @@ -41,16 +40,18 @@ const increment = async (keys: KeyringPair) => { * @returns */ const auto = (keys: KeyringPair, inputNonce: AutoNonce = 'auto'): Promise => { - switch(inputNonce) { - case 'auto': return increment(keys); - case 'current': return current(keys); + switch (inputNonce) { + case 'auto': + return increment(keys); + case 'current': + return current(keys); default: nonceCache.set(keys.address, inputNonce + 1); return Promise.resolve(inputNonce); } -} +}; export default { auto, reset, -} +}; diff --git a/e2e/scaffolding/env.ts b/e2e/scaffolding/env.ts index bb28031ab9..f15abb21d2 100644 --- a/e2e/scaffolding/env.ts +++ b/e2e/scaffolding/env.ts @@ -1,15 +1,11 @@ - -export namespace env { - export const providerUrl = process.env.WS_PROVIDER_URL; - export const verbose = (process.env.VERBOSE_TESTS === 'true' || process.env.VERBOSE_TESTS === '1'); -} - +export const providerUrl = process.env.WS_PROVIDER_URL; +export const verbose = process.env.VERBOSE_TESTS === 'true' || process.env.VERBOSE_TESTS === '1'; const CHAIN_ENVIRONMENT = { - DEVELOPMENT: "dev", - ROCOCO_TESTNET: "rococo-testnet", - ROCOCO_LOCAL: "rococo-local", -} + DEVELOPMENT: 'dev', + ROCOCO_TESTNET: 'rococo-testnet', + ROCOCO_LOCAL: 'rococo-local', +}; export function isTestnet() { return process.env.CHAIN_ENVIRONMENT === CHAIN_ENVIRONMENT.ROCOCO_TESTNET; @@ -20,6 +16,8 @@ export function isDev() { } export function hasRelayChain() { - return process.env.CHAIN_ENVIRONMENT === CHAIN_ENVIRONMENT.ROCOCO_LOCAL - || process.env.CHAIN_ENVIRONMENT === CHAIN_ENVIRONMENT.ROCOCO_TESTNET + return ( + process.env.CHAIN_ENVIRONMENT === CHAIN_ENVIRONMENT.ROCOCO_LOCAL || + process.env.CHAIN_ENVIRONMENT === CHAIN_ENVIRONMENT.ROCOCO_TESTNET + ); } diff --git a/e2e/scaffolding/extrinsicHelpers.ts b/e2e/scaffolding/extrinsicHelpers.ts index 039d8aac78..c794d3eb6b 100644 --- a/e2e/scaffolding/extrinsicHelpers.ts +++ b/e2e/scaffolding/extrinsicHelpers.ts @@ -1,80 +1,112 @@ -import { ApiPromise, ApiRx } from "@polkadot/api"; -import { ApiTypes, AugmentedEvent, SubmittableExtrinsic } from "@polkadot/api/types"; -import { KeyringPair } from "@polkadot/keyring/types"; -import { Compact, u128, u16, u32, u64, Vec, Option, Bool } from "@polkadot/types"; -import { FrameSystemAccountInfo, SpRuntimeDispatchError } from "@polkadot/types/lookup"; -import { AnyJson, AnyNumber, AnyTuple, Codec, IEvent, ISubmittableResult } from "@polkadot/types/types"; -import { firstValueFrom, filter, map, pipe, tap } from "rxjs"; -import { getBlockNumber, getExistentialDeposit, log, Sr25519Signature } from "./helpers"; -import autoNonce, { AutoNonce } from "./autoNonce"; -import { connect, connectPromise } from "./apiConnection"; -import { DispatchError, Event, SignedBlock } from "@polkadot/types/interfaces"; -import { IsEvent } from "@polkadot/types/metadata/decorate/types"; -import { HandleResponse, ItemizedStoragePageResponse, MessageSourceId, PaginatedStorageResponse, PresumptiveSuffixesResponse, SchemaResponse } from "@frequency-chain/api-augment/interfaces"; -import { u8aToHex } from "@polkadot/util/u8a/toHex"; -import { u8aWrapBytes } from "@polkadot/util"; +import { ApiPromise, ApiRx } from '@polkadot/api'; +import { ApiTypes, AugmentedEvent, SubmittableExtrinsic } from '@polkadot/api/types'; +import { KeyringPair } from '@polkadot/keyring/types'; +import { Compact, u128, u16, u32, u64, Vec, Option, Bool } from '@polkadot/types'; +import { FrameSystemAccountInfo, SpRuntimeDispatchError } from '@polkadot/types/lookup'; +import { AnyJson, AnyNumber, AnyTuple, Codec, IEvent, ISubmittableResult } from '@polkadot/types/types'; +import { firstValueFrom, filter, map, pipe, tap } from 'rxjs'; +import { getBlockNumber, getExistentialDeposit, log, Sr25519Signature } from './helpers'; +import autoNonce, { AutoNonce } from './autoNonce'; +import { connect, connectPromise } from './apiConnection'; +import { DispatchError, Event, SignedBlock } from '@polkadot/types/interfaces'; +import { IsEvent } from '@polkadot/types/metadata/decorate/types'; +import { + HandleResponse, + ItemizedStoragePageResponse, + MessageSourceId, + PaginatedStorageResponse, + PresumptiveSuffixesResponse, + SchemaResponse, +} from '@frequency-chain/api-augment/interfaces'; +import { u8aToHex } from '@polkadot/util/u8a/toHex'; +import { u8aWrapBytes } from '@polkadot/util'; import type { Call } from '@polkadot/types/interfaces/runtime'; -import { hasRelayChain } from "./env"; +import { hasRelayChain } from './env'; export type ReleaseSchedule = { - start: number; - period: number; - periodCount: number; - perPeriod: bigint; + start: number; + period: number; + periodCount: number; + perPeriod: bigint; }; -export type AddKeyData = { msaId?: u64; expiration?: any; newPublicKey?: any; } -export type AddProviderPayload = { authorizedMsaId?: u64; schemaIds?: u16[], expiration?: any; } -export type ItemizedSignaturePayload = { msaId?: u64; schemaId?: u16, targetHash?: u32, expiration?: any; actions?: any; } -export type ItemizedSignaturePayloadV2 = { schemaId?: u16, targetHash?: u32, expiration?: any; actions?: any; } -export type PaginatedUpsertSignaturePayload = { msaId?: u64; schemaId?: u16, pageId?: u16, targetHash?: u32, expiration?: any; payload?: any; } -export type PaginatedUpsertSignaturePayloadV2 = { schemaId?: u16, pageId?: u16, targetHash?: u32, expiration?: any; payload?: any; } -export type PaginatedDeleteSignaturePayload = { msaId?: u64; schemaId?: u16, pageId?: u16, targetHash?: u32, expiration?: any; } -export type PaginatedDeleteSignaturePayloadV2 = { schemaId?: u16, pageId?: u16, targetHash?: u32, expiration?: any; } +export type AddKeyData = { msaId?: u64; expiration?: any; newPublicKey?: any }; +export type AddProviderPayload = { authorizedMsaId?: u64; schemaIds?: u16[]; expiration?: any }; +export type ItemizedSignaturePayload = { + msaId?: u64; + schemaId?: u16; + targetHash?: u32; + expiration?: any; + actions?: any; +}; +export type ItemizedSignaturePayloadV2 = { schemaId?: u16; targetHash?: u32; expiration?: any; actions?: any }; +export type PaginatedUpsertSignaturePayload = { + msaId?: u64; + schemaId?: u16; + pageId?: u16; + targetHash?: u32; + expiration?: any; + payload?: any; +}; +export type PaginatedUpsertSignaturePayloadV2 = { + schemaId?: u16; + pageId?: u16; + targetHash?: u32; + expiration?: any; + payload?: any; +}; +export type PaginatedDeleteSignaturePayload = { + msaId?: u64; + schemaId?: u16; + pageId?: u16; + targetHash?: u32; + expiration?: any; +}; +export type PaginatedDeleteSignaturePayloadV2 = { schemaId?: u16; pageId?: u16; targetHash?: u32; expiration?: any }; export class EventError extends Error { - name: string = ''; - message: string = ''; - stack?: string = ''; - section?: string = ''; - rawError: DispatchError | SpRuntimeDispatchError; - - constructor(source: DispatchError | SpRuntimeDispatchError) { - super(); - - if (source.isModule) { - const decoded = source.registry.findMetaError(source.asModule); - this.name = decoded.name; - this.message = decoded.docs.join(' '); - this.section = decoded.section; - } else { - this.name = source.type; - this.message = source.type; - this.section = ''; - } - this.rawError = source; - } - - public toString() { - return `${this.section}.${this.name}: ${this.message}`; - } + name: string = ''; + message: string = ''; + stack?: string = ''; + section?: string = ''; + rawError: DispatchError | SpRuntimeDispatchError; + + constructor(source: DispatchError | SpRuntimeDispatchError) { + super(); + + if (source.isModule) { + const decoded = source.registry.findMetaError(source.asModule); + this.name = decoded.name; + this.message = decoded.docs.join(' '); + this.section = decoded.section; + } else { + this.name = source.type; + this.message = source.type; + this.section = ''; + } + this.rawError = source; + } + + public toString() { + return `${this.section}.${this.name}: ${this.message}`; + } } class CallError extends Error { - message: string; - result: AnyJson; - - constructor(submittable: ISubmittableResult, msg?: string) { - super(); - this.result = submittable.toHuman(); - this.message = msg ?? "Call Error"; - } + message: string; + result: AnyJson; + + constructor(submittable: ISubmittableResult, msg?: string) { + super(); + this.result = submittable.toHuman(); + this.message = msg ?? 'Call Error'; + } } export type EventMap = Record; function eventKey(event: Event): string { - return `${event.section}.${event.method}`; + return `${event.section}.${event.method}`; } /** @@ -105,357 +137,615 @@ function eventKey(event: Event): string { type ParsedEvent = IEvent; export type ParsedEventResult = { - target?: ParsedEvent; - eventMap: EventMap; + target?: ParsedEvent; + eventMap: EventMap; }; - export class Extrinsic { - - private event?: IsEvent; - private extrinsic: () => SubmittableExtrinsic<"rxjs", T>; - private keys: KeyringPair; - public api: ApiRx; - - constructor(extrinsic: () => SubmittableExtrinsic<"rxjs", T>, keys: KeyringPair, targetEvent?: IsEvent) { - this.extrinsic = extrinsic; - this.keys = keys; - this.event = targetEvent; - this.api = ExtrinsicHelper.api; - } - - // This uses automatic nonce management by default. - public async signAndSend(inputNonce?: AutoNonce) { - const nonce = await autoNonce.auto(this.keys, inputNonce); - - try { - const op = this.extrinsic(); - // Era is 0 for tests due to issues with BirthBlock - return await firstValueFrom(op.signAndSend(this.keys, { nonce, era: 0 }).pipe( - tap((result) => { - // If we learn a transaction has an error status (this does NOT include RPC errors) - // Then throw an error - if (result.isError) { - throw new CallError(result, `Failed Transaction for ${this.event?.meta.name || "unknown"}`); - } - }), - filter(({ status }) => status.isInBlock || status.isFinalized), - this.parseResult(this.event), - )); - } catch (e) { - if ((e as any).name === "RpcError" && inputNonce === 'auto') { - console.error("WARNING: Unexpected RPC Error! If it is expected, use 'current' for the nonce."); + private event?: IsEvent; + private extrinsic: () => SubmittableExtrinsic<'rxjs', T>; + private keys: KeyringPair; + public api: ApiRx; + + constructor(extrinsic: () => SubmittableExtrinsic<'rxjs', T>, keys: KeyringPair, targetEvent?: IsEvent) { + this.extrinsic = extrinsic; + this.keys = keys; + this.event = targetEvent; + this.api = ExtrinsicHelper.api; + } + + // This uses automatic nonce management by default. + public async signAndSend(inputNonce?: AutoNonce) { + const nonce = await autoNonce.auto(this.keys, inputNonce); + + try { + const op = this.extrinsic(); + // Era is 0 for tests due to issues with BirthBlock + return await firstValueFrom( + op.signAndSend(this.keys, { nonce, era: 0 }).pipe( + tap((result) => { + // If we learn a transaction has an error status (this does NOT include RPC errors) + // Then throw an error + if (result.isError) { + throw new CallError(result, `Failed Transaction for ${this.event?.meta.name || 'unknown'}`); } - throw e; + }), + filter(({ status }) => status.isInBlock || status.isFinalized), + this.parseResult(this.event) + ) + ); + } catch (e) { + if ((e as any).name === 'RpcError' && inputNonce === 'auto') { + console.error("WARNING: Unexpected RPC Error! If it is expected, use 'current' for the nonce."); + } + throw e; + } + } + + public async sudoSignAndSend() { + const nonce = await autoNonce.auto(this.keys); + // Era is 0 for tests due to issues with BirthBlock + return await firstValueFrom( + this.api.tx.sudo + .sudo(this.extrinsic()) + .signAndSend(this.keys, { nonce, era: 0 }) + .pipe( + filter(({ status }) => status.isInBlock || status.isFinalized), + this.parseResult(this.event) + ) + ); + } + + public async payWithCapacity(inputNonce?: AutoNonce) { + const nonce = await autoNonce.auto(this.keys, inputNonce); + // Era is 0 for tests due to issues with BirthBlock + return await firstValueFrom( + this.api.tx.frequencyTxPayment + .payWithCapacity(this.extrinsic()) + .signAndSend(this.keys, { nonce, era: 0 }) + .pipe( + filter(({ status }) => status.isInBlock || status.isFinalized), + this.parseResult(this.event) + ) + ); + } + + public getEstimatedTxFee(): Promise { + return firstValueFrom( + this.extrinsic() + .paymentInfo(this.keys) + .pipe(map((info) => info.partialFee.toBigInt())) + ); + } + + public getCall(): Call { + const call = ExtrinsicHelper.api.createType('Call', this.extrinsic.call); + return call; + } + + async fundOperation(source: KeyringPair) { + const [amount, accountInfo] = await Promise.all([ + this.getEstimatedTxFee(), + ExtrinsicHelper.getAccountInfo(this.keys.address), + ]); + const freeBalance = BigInt(accountInfo.data.free.toString()) - (await getExistentialDeposit()); + if (amount > freeBalance) { + await ExtrinsicHelper.transferFunds(source, this.keys, amount).signAndSend(); + } + } + + public async fundAndSend(source: KeyringPair) { + await this.fundOperation(source); + log('Fund and Send', `Fund Source: ${source.address}`); + return this.signAndSend(); + } + + private parseResult( + targetEvent?: AugmentedEvent + ) { + return pipe( + tap((result: ISubmittableResult) => { + if (result.dispatchError) { + const err = new EventError(result.dispatchError); + log(err.toString()); + throw err; } - } - - public async sudoSignAndSend() { - const nonce = await autoNonce.auto(this.keys); - // Era is 0 for tests due to issues with BirthBlock - return await firstValueFrom(this.api.tx.sudo.sudo(this.extrinsic()).signAndSend(this.keys, { nonce, era: 0 }).pipe( - filter(({ status }) => status.isInBlock || status.isFinalized), - this.parseResult(this.event), - )) - } - - public async payWithCapacity(inputNonce?: AutoNonce) { - const nonce = await autoNonce.auto(this.keys, inputNonce); - // Era is 0 for tests due to issues with BirthBlock - return await firstValueFrom(this.api.tx.frequencyTxPayment.payWithCapacity(this.extrinsic()).signAndSend(this.keys, { nonce, era: 0 }).pipe( - filter(({ status }) => status.isInBlock || status.isFinalized), - this.parseResult(this.event), - )) - } - - public getEstimatedTxFee(): Promise { - return firstValueFrom(this.extrinsic().paymentInfo(this.keys).pipe( - map((info) => info.partialFee.toBigInt()) - )); - } - - public getCall(): Call { - const call = ExtrinsicHelper.api.createType('Call', this.extrinsic.call); - return call; - } - - async fundOperation(source: KeyringPair) { - const [amount, accountInfo] = await Promise.all([this.getEstimatedTxFee(), ExtrinsicHelper.getAccountInfo(this.keys.address)]); - const freeBalance = BigInt(accountInfo.data.free.toString()) - (await getExistentialDeposit()); - if (amount > freeBalance) { - await ExtrinsicHelper.transferFunds(source, this.keys, amount).signAndSend(); + }), + map((result: ISubmittableResult) => { + const eventMap = result.events.reduce((acc, { event }) => { + acc[eventKey(event)] = event; + if (this.api.events.sudo.Sudid.is(event)) { + const { + data: [result], + } = event; + if (result.isErr) { + const err = new EventError(result.asErr); + log(err.toString()); + throw err; + } + } + return acc; + }, {} as EventMap); + + const target = targetEvent && result.events.find(({ event }) => targetEvent.is(event))?.event; + // Required for Typescript to be happy + if (target && targetEvent.is(target)) { + return { target, eventMap }; } - } - - public async fundAndSend(source: KeyringPair) { - await this.fundOperation(source); - log("Fund and Send", `Fund Source: ${source.address}`); - return this.signAndSend(); - } - private parseResult(targetEvent?: AugmentedEvent) { - return pipe( - tap((result: ISubmittableResult) => { - if (result.dispatchError) { - let err = new EventError(result.dispatchError); - log(err.toString()); - throw err; - } - }), - map((result: ISubmittableResult) => { - const eventMap = result.events.reduce((acc, { event }) => { - acc[eventKey(event)] = event; - if (this.api.events.sudo.Sudid.is(event)) { - let { data: [result] } = event; - if (result.isErr) { - let err = new EventError(result.asErr); - log(err.toString()); - throw err; - } - } - return acc; - }, {} as EventMap); - - const target = targetEvent && result.events.find(({ event }) => targetEvent.is(event))?.event; - // Required for Typescript to be happy - if (target && targetEvent.is(target)) { - return { target, eventMap }; - } - - return { eventMap }; - }), - tap(({ eventMap }) => { - Object.entries(eventMap).map(([k, v]) => log(k, v.toHuman())); - }), - ); - } + return { eventMap }; + }), + tap(({ eventMap }) => { + Object.entries(eventMap).map(([k, v]) => log(k, v.toHuman())); + }) + ); + } } export class ExtrinsicHelper { - public static api: ApiRx; - public static apiPromise: ApiPromise; - - constructor() { } - - public static async initialize(providerUrl?: string | string[] | undefined) { - ExtrinsicHelper.api = await connect(providerUrl); - // For single state queries (api.query), ApiPromise is better - ExtrinsicHelper.apiPromise = await connectPromise(providerUrl); - } - - public static getLastBlock(): Promise { - return ExtrinsicHelper.apiPromise.rpc.chain.getBlock(); - } - - /** Query Extrinsics */ - public static getAccountInfo(address: string): Promise { - return ExtrinsicHelper.apiPromise.query.system.account(address); - } - - public static getSchemaMaxBytes() { - return ExtrinsicHelper.apiPromise.query.schemas.governanceSchemaModelMaxBytes(); - } - - /** Balance Extrinsics */ - public static transferFunds(source: KeyringPair, dest: KeyringPair, amount: Compact | AnyNumber) { - return new Extrinsic(() => ExtrinsicHelper.api.tx.balances.transferKeepAlive(dest.address, amount), source, ExtrinsicHelper.api.events.balances.Transfer); - } - - public static emptyAccount(source: KeyringPair, dest: KeyringPair["address"]) { - return new Extrinsic(() => ExtrinsicHelper.api.tx.balances.transferAll(dest, false), source, ExtrinsicHelper.api.events.balances.Transfer); - } - - /** Schema Extrinsics */ - public static createSchema(keys: KeyringPair, model: any, modelType: "AvroBinary" | "Parquet", payloadLocation: "OnChain" | "IPFS" | "Itemized" | "Paginated") { - return new Extrinsic(() => ExtrinsicHelper.api.tx.schemas.createSchema(JSON.stringify(model), modelType, payloadLocation), keys, ExtrinsicHelper.api.events.schemas.SchemaCreated); - } - - /** Schema v2 Extrinsics */ - public static createSchemaV2(keys: KeyringPair, model: any, modelType: "AvroBinary" | "Parquet", payloadLocation: "OnChain" | "IPFS" | "Itemized" | "Paginated", grant: ("AppendOnly" | "SignatureRequired")[]) { - return new Extrinsic(() => ExtrinsicHelper.api.tx.schemas.createSchemaV2(JSON.stringify(model), modelType, payloadLocation, grant), keys, ExtrinsicHelper.api.events.schemas.SchemaCreated); - } - - /** Generic Schema Extrinsics */ - public static createSchemaWithSettingsGov(keys: KeyringPair, model: any, modelType: "AvroBinary" | "Parquet", payloadLocation: "OnChain" | "IPFS" | "Itemized" | "Paginated", grant: "AppendOnly" | "SignatureRequired") { - return new Extrinsic(() => ExtrinsicHelper.api.tx.schemas.createSchemaViaGovernance(keys.publicKey, JSON.stringify(model), modelType, payloadLocation, [grant]), keys, ExtrinsicHelper.api.events.schemas.SchemaCreated); - } - - /** Get Schema RPC */ - public static getSchema(schemaId: u16): Promise> { - return ExtrinsicHelper.apiPromise.rpc.schemas.getBySchemaId(schemaId); - } - - /** MSA Extrinsics */ - public static createMsa(keys: KeyringPair) { - return new Extrinsic(() => ExtrinsicHelper.api.tx.msa.create(), keys, ExtrinsicHelper.api.events.msa.MsaCreated); - } - - public static addPublicKeyToMsa(keys: KeyringPair, ownerSignature: Sr25519Signature, newSignature: Sr25519Signature, payload: AddKeyData) { - return new Extrinsic(() => ExtrinsicHelper.api.tx.msa.addPublicKeyToMsa(keys.publicKey, ownerSignature, newSignature, payload), keys, ExtrinsicHelper.api.events.msa.PublicKeyAdded); - } - - public static deletePublicKey(keys: KeyringPair, publicKey: Uint8Array) { - ExtrinsicHelper.api.query.msa - return new Extrinsic(() => ExtrinsicHelper.api.tx.msa.deleteMsaPublicKey(publicKey), keys, ExtrinsicHelper.api.events.msa.PublicKeyDeleted); - } - - public static retireMsa(keys: KeyringPair) { - return new Extrinsic(() => ExtrinsicHelper.api.tx.msa.retireMsa(), keys, ExtrinsicHelper.api.events.msa.MsaRetired); - } - - public static createProvider(keys: KeyringPair, providerName: string) { - return new Extrinsic(() => ExtrinsicHelper.api.tx.msa.createProvider(providerName), keys, ExtrinsicHelper.api.events.msa.ProviderCreated); - } - - public static createSponsoredAccountWithDelegation(delegatorKeys: KeyringPair, providerKeys: KeyringPair, signature: Sr25519Signature, payload: AddProviderPayload) { - return new Extrinsic(() => ExtrinsicHelper.api.tx.msa.createSponsoredAccountWithDelegation(delegatorKeys.publicKey, signature, payload), providerKeys, ExtrinsicHelper.api.events.msa.MsaCreated); - } - - public static grantDelegation(delegatorKeys: KeyringPair, providerKeys: KeyringPair, signature: Sr25519Signature, payload: AddProviderPayload) { - return new Extrinsic(() => ExtrinsicHelper.api.tx.msa.grantDelegation(delegatorKeys.publicKey, signature, payload), providerKeys, ExtrinsicHelper.api.events.msa.DelegationGranted); - } - - public static grantSchemaPermissions(delegatorKeys: KeyringPair, providerMsaId: any, schemaIds: any) { - return new Extrinsic(() => ExtrinsicHelper.api.tx.msa.grantSchemaPermissions(providerMsaId, schemaIds), delegatorKeys, ExtrinsicHelper.api.events.msa.DelegationUpdated); - } - - public static revokeSchemaPermissions(delegatorKeys: KeyringPair, providerMsaId: any, schemaIds: any) { - return new Extrinsic(() => ExtrinsicHelper.api.tx.msa.revokeSchemaPermissions(providerMsaId, schemaIds), delegatorKeys, ExtrinsicHelper.api.events.msa.DelegationUpdated); - } - - public static revokeDelegationByDelegator(keys: KeyringPair, providerMsaId: any) { - return new Extrinsic(() => ExtrinsicHelper.api.tx.msa.revokeDelegationByDelegator(providerMsaId), keys, ExtrinsicHelper.api.events.msa.DelegationRevoked); - } - - public static revokeDelegationByProvider(delegatorMsaId: u64, providerKeys: KeyringPair) { - return new Extrinsic(() => ExtrinsicHelper.api.tx.msa.revokeDelegationByProvider(delegatorMsaId), providerKeys, ExtrinsicHelper.api.events.msa.DelegationRevoked); - } - - /** Messages Extrinsics */ - public static addIPFSMessage(keys: KeyringPair, schemaId: any, cid: string, payload_length: number) { - return new Extrinsic(() => ExtrinsicHelper.api.tx.messages.addIpfsMessage(schemaId, cid, payload_length), keys, ExtrinsicHelper.api.events.messages.MessagesStored); - } - - /** Stateful Storage Extrinsics */ - public static applyItemActions(keys: KeyringPair, schemaId: any, msa_id: MessageSourceId, actions: any, target_hash: any) { - return new Extrinsic(() => ExtrinsicHelper.api.tx.statefulStorage.applyItemActions(msa_id, schemaId, target_hash, actions), keys, ExtrinsicHelper.api.events.statefulStorage.ItemizedPageUpdated); - } - - public static removePage(keys: KeyringPair, schemaId: any, msa_id: MessageSourceId, page_id: any, target_hash: any) { - return new Extrinsic(() => ExtrinsicHelper.api.tx.statefulStorage.deletePage(msa_id, schemaId, page_id, target_hash), keys, ExtrinsicHelper.api.events.statefulStorage.PaginatedPageDeleted); - } - - public static upsertPage(keys: KeyringPair, schemaId: any, msa_id: MessageSourceId, page_id: any, payload: any, target_hash: any) { - return new Extrinsic(() => ExtrinsicHelper.api.tx.statefulStorage.upsertPage(msa_id, schemaId, page_id, target_hash, payload), keys, ExtrinsicHelper.api.events.statefulStorage.PaginatedPageUpdated); - } - - public static applyItemActionsWithSignature(delegatorKeys: KeyringPair, providerKeys: KeyringPair, signature: Sr25519Signature, payload: ItemizedSignaturePayload) { - return new Extrinsic(() => ExtrinsicHelper.api.tx.statefulStorage.applyItemActionsWithSignature(delegatorKeys.publicKey, signature, payload), providerKeys, ExtrinsicHelper.api.events.statefulStorage.ItemizedPageUpdated); - } - - public static applyItemActionsWithSignatureV2(delegatorKeys: KeyringPair, providerKeys: KeyringPair, signature: Sr25519Signature, payload: ItemizedSignaturePayloadV2) { - return new Extrinsic(() => ExtrinsicHelper.api.tx.statefulStorage.applyItemActionsWithSignatureV2(delegatorKeys.publicKey, signature, payload), providerKeys, ExtrinsicHelper.api.events.statefulStorage.ItemizedPageUpdated); - } - - public static deletePageWithSignature(delegatorKeys: KeyringPair, providerKeys: KeyringPair, signature: Sr25519Signature, payload: PaginatedDeleteSignaturePayload) { - return new Extrinsic(() => ExtrinsicHelper.api.tx.statefulStorage.deletePageWithSignature(delegatorKeys.publicKey, signature, payload), providerKeys, ExtrinsicHelper.api.events.statefulStorage.PaginatedPageDeleted); - } - - public static deletePageWithSignatureV2(delegatorKeys: KeyringPair, providerKeys: KeyringPair, signature: Sr25519Signature, payload: PaginatedDeleteSignaturePayloadV2) { - return new Extrinsic(() => ExtrinsicHelper.api.tx.statefulStorage.deletePageWithSignatureV2(delegatorKeys.publicKey, signature, payload), providerKeys, ExtrinsicHelper.api.events.statefulStorage.PaginatedPageDeleted); - } - - public static upsertPageWithSignature(delegatorKeys: KeyringPair, providerKeys: KeyringPair, signature: Sr25519Signature, payload: PaginatedUpsertSignaturePayload) { - return new Extrinsic(() => ExtrinsicHelper.api.tx.statefulStorage.upsertPageWithSignature(delegatorKeys.publicKey, signature, payload), providerKeys, ExtrinsicHelper.api.events.statefulStorage.PaginatedPageUpdated); - } - - public static upsertPageWithSignatureV2(delegatorKeys: KeyringPair, providerKeys: KeyringPair, signature: Sr25519Signature, payload: PaginatedUpsertSignaturePayloadV2) { - return new Extrinsic(() => ExtrinsicHelper.api.tx.statefulStorage.upsertPageWithSignatureV2(delegatorKeys.publicKey, signature, payload), providerKeys, ExtrinsicHelper.api.events.statefulStorage.PaginatedPageUpdated); - } - - public static getItemizedStorage(msa_id: MessageSourceId, schemaId: any): Promise { - return ExtrinsicHelper.apiPromise.rpc.statefulStorage.getItemizedStorage(msa_id, schemaId); - } - - public static getPaginatedStorage(msa_id: MessageSourceId, schemaId: any): Promise> { - return ExtrinsicHelper.apiPromise.rpc.statefulStorage.getPaginatedStorage(msa_id, schemaId); - } - - public static timeReleaseTransfer(keys: KeyringPair, who: KeyringPair, schedule: ReleaseSchedule) { - return new Extrinsic(() => ExtrinsicHelper.api.tx.timeRelease.transfer(who.address, schedule), keys, ExtrinsicHelper.api.events.timeRelease.ReleaseScheduleAdded); - } - - public static claimHandle(delegatorKeys: KeyringPair, payload: any) { - const proof = { Sr25519: u8aToHex(delegatorKeys.sign(u8aWrapBytes(payload.toU8a()))) } - return new Extrinsic(() => ExtrinsicHelper.api.tx.handles.claimHandle(delegatorKeys.publicKey, proof, payload), delegatorKeys, ExtrinsicHelper.api.events.handles.HandleClaimed); - } - - public static retireHandle(delegatorKeys: KeyringPair) { - return new Extrinsic(() => ExtrinsicHelper.api.tx.handles.retireHandle(), delegatorKeys, ExtrinsicHelper.api.events.handles.HandleRetired); - } - - public static getHandleForMSA(msa_id: MessageSourceId): Promise> { - return ExtrinsicHelper.apiPromise.rpc.handles.getHandleForMsa(msa_id); - } - - public static getMsaForHandle(handle: string): Promise> { - return ExtrinsicHelper.apiPromise.rpc.handles.getMsaForHandle(handle); - } - - public static getNextSuffixesForHandle(base_handle: string, count: number): Promise { - return ExtrinsicHelper.apiPromise.rpc.handles.getNextSuffixes(base_handle, count); - } - - public static validateHandle(base_handle: string): Promise { - return ExtrinsicHelper.apiPromise.rpc.handles.validateHandle(base_handle); - } - - public static addOnChainMessage(keys: KeyringPair, schemaId: any, payload: string) { - return new Extrinsic(() => ExtrinsicHelper.api.tx.messages.addOnchainMessage(null, schemaId, payload), keys, ExtrinsicHelper.api.events.messages.MessagesStored); - } - - /** Capacity Extrinsics **/ - public static setEpochLength(keys: KeyringPair, epoch_length: any) { - return new Extrinsic(() => ExtrinsicHelper.api.tx.capacity.setEpochLength(epoch_length), keys, ExtrinsicHelper.api.events.capacity.EpochLengthUpdated); - } - public static stake(keys: KeyringPair, target: any, amount: any) { - return new Extrinsic(() => ExtrinsicHelper.api.tx.capacity.stake(target, amount), keys, ExtrinsicHelper.api.events.capacity.Staked); - } - - public static unstake(keys: KeyringPair, target: any, amount: any) { - return new Extrinsic(() => ExtrinsicHelper.api.tx.capacity.unstake(target, amount), keys, ExtrinsicHelper.api.events.capacity.UnStaked); - } - - public static withdrawUnstaked(keys: KeyringPair) { - return new Extrinsic(() => ExtrinsicHelper.api.tx.capacity.withdrawUnstaked(), keys, ExtrinsicHelper.api.events.capacity.StakeWithdrawn); - } - - public static payWithCapacityBatchAll(keys: KeyringPair, calls: any) { - return new Extrinsic(() => ExtrinsicHelper.api.tx.frequencyTxPayment.payWithCapacityBatchAll(calls), keys, ExtrinsicHelper.api.events.utility.BatchCompleted); - } - - public static executeUtilityBatchAll(keys: KeyringPair, calls: any) { - return new Extrinsic(() => ExtrinsicHelper.api.tx.utility.batchAll(calls), keys, ExtrinsicHelper.api.events.utility.BatchCompleted); - } - - public static executeUtilityBatch(keys: KeyringPair, calls: any) { - return new Extrinsic(() => ExtrinsicHelper.api.tx.utility.batch(calls), keys, ExtrinsicHelper.api.events.utility.BatchCompleted); - } - - public static executeUtilityForceBatch(keys: KeyringPair, calls: any) { - return new Extrinsic(() => ExtrinsicHelper.api.tx.utility.forceBatch(calls), keys, ExtrinsicHelper.api.events.utility.BatchCompleted); - } - - public static async runToBlock(blockNumber: number) { - let currentBlock = await getBlockNumber(); - while (currentBlock < blockNumber) { - // In Rococo, just wait - if (hasRelayChain()) { - await new Promise((r) => setTimeout(r, 4_000)); - } else { - await ExtrinsicHelper.apiPromise.rpc.engine.createBlock(true, true); - } - currentBlock = await getBlockNumber(); + public static api: ApiRx; + public static apiPromise: ApiPromise; + + constructor() {} + + public static async initialize(providerUrl?: string | string[] | undefined) { + ExtrinsicHelper.api = await connect(providerUrl); + // For single state queries (api.query), ApiPromise is better + ExtrinsicHelper.apiPromise = await connectPromise(providerUrl); + } + + public static getLastBlock(): Promise { + return ExtrinsicHelper.apiPromise.rpc.chain.getBlock(); + } + + /** Query Extrinsics */ + public static getAccountInfo(address: string): Promise { + return ExtrinsicHelper.apiPromise.query.system.account(address); + } + + public static getSchemaMaxBytes() { + return ExtrinsicHelper.apiPromise.query.schemas.governanceSchemaModelMaxBytes(); + } + + /** Balance Extrinsics */ + public static transferFunds(source: KeyringPair, dest: KeyringPair, amount: Compact | AnyNumber) { + return new Extrinsic( + () => ExtrinsicHelper.api.tx.balances.transferKeepAlive(dest.address, amount), + source, + ExtrinsicHelper.api.events.balances.Transfer + ); + } + + public static emptyAccount(source: KeyringPair, dest: KeyringPair['address']) { + return new Extrinsic( + () => ExtrinsicHelper.api.tx.balances.transferAll(dest, false), + source, + ExtrinsicHelper.api.events.balances.Transfer + ); + } + + /** Schema Extrinsics */ + public static createSchema( + keys: KeyringPair, + model: any, + modelType: 'AvroBinary' | 'Parquet', + payloadLocation: 'OnChain' | 'IPFS' | 'Itemized' | 'Paginated' + ) { + return new Extrinsic( + () => ExtrinsicHelper.api.tx.schemas.createSchema(JSON.stringify(model), modelType, payloadLocation), + keys, + ExtrinsicHelper.api.events.schemas.SchemaCreated + ); + } + + /** Schema v2 Extrinsics */ + public static createSchemaV2( + keys: KeyringPair, + model: any, + modelType: 'AvroBinary' | 'Parquet', + payloadLocation: 'OnChain' | 'IPFS' | 'Itemized' | 'Paginated', + grant: ('AppendOnly' | 'SignatureRequired')[] + ) { + return new Extrinsic( + () => ExtrinsicHelper.api.tx.schemas.createSchemaV2(JSON.stringify(model), modelType, payloadLocation, grant), + keys, + ExtrinsicHelper.api.events.schemas.SchemaCreated + ); + } + + /** Generic Schema Extrinsics */ + public static createSchemaWithSettingsGov( + keys: KeyringPair, + model: any, + modelType: 'AvroBinary' | 'Parquet', + payloadLocation: 'OnChain' | 'IPFS' | 'Itemized' | 'Paginated', + grant: 'AppendOnly' | 'SignatureRequired' + ) { + return new Extrinsic( + () => + ExtrinsicHelper.api.tx.schemas.createSchemaViaGovernance( + keys.publicKey, + JSON.stringify(model), + modelType, + payloadLocation, + [grant] + ), + keys, + ExtrinsicHelper.api.events.schemas.SchemaCreated + ); + } + + /** Get Schema RPC */ + public static getSchema(schemaId: u16): Promise> { + return ExtrinsicHelper.apiPromise.rpc.schemas.getBySchemaId(schemaId); + } + + /** MSA Extrinsics */ + public static createMsa(keys: KeyringPair) { + return new Extrinsic(() => ExtrinsicHelper.api.tx.msa.create(), keys, ExtrinsicHelper.api.events.msa.MsaCreated); + } + + public static addPublicKeyToMsa( + keys: KeyringPair, + ownerSignature: Sr25519Signature, + newSignature: Sr25519Signature, + payload: AddKeyData + ) { + return new Extrinsic( + () => ExtrinsicHelper.api.tx.msa.addPublicKeyToMsa(keys.publicKey, ownerSignature, newSignature, payload), + keys, + ExtrinsicHelper.api.events.msa.PublicKeyAdded + ); + } + + public static deletePublicKey(keys: KeyringPair, publicKey: Uint8Array) { + ExtrinsicHelper.api.query.msa; + return new Extrinsic( + () => ExtrinsicHelper.api.tx.msa.deleteMsaPublicKey(publicKey), + keys, + ExtrinsicHelper.api.events.msa.PublicKeyDeleted + ); + } + + public static retireMsa(keys: KeyringPair) { + return new Extrinsic(() => ExtrinsicHelper.api.tx.msa.retireMsa(), keys, ExtrinsicHelper.api.events.msa.MsaRetired); + } + + public static createProvider(keys: KeyringPair, providerName: string) { + return new Extrinsic( + () => ExtrinsicHelper.api.tx.msa.createProvider(providerName), + keys, + ExtrinsicHelper.api.events.msa.ProviderCreated + ); + } + + public static createSponsoredAccountWithDelegation( + delegatorKeys: KeyringPair, + providerKeys: KeyringPair, + signature: Sr25519Signature, + payload: AddProviderPayload + ) { + return new Extrinsic( + () => + ExtrinsicHelper.api.tx.msa.createSponsoredAccountWithDelegation(delegatorKeys.publicKey, signature, payload), + providerKeys, + ExtrinsicHelper.api.events.msa.MsaCreated + ); + } + + public static grantDelegation( + delegatorKeys: KeyringPair, + providerKeys: KeyringPair, + signature: Sr25519Signature, + payload: AddProviderPayload + ) { + return new Extrinsic( + () => ExtrinsicHelper.api.tx.msa.grantDelegation(delegatorKeys.publicKey, signature, payload), + providerKeys, + ExtrinsicHelper.api.events.msa.DelegationGranted + ); + } + + public static grantSchemaPermissions(delegatorKeys: KeyringPair, providerMsaId: any, schemaIds: any) { + return new Extrinsic( + () => ExtrinsicHelper.api.tx.msa.grantSchemaPermissions(providerMsaId, schemaIds), + delegatorKeys, + ExtrinsicHelper.api.events.msa.DelegationUpdated + ); + } + + public static revokeSchemaPermissions(delegatorKeys: KeyringPair, providerMsaId: any, schemaIds: any) { + return new Extrinsic( + () => ExtrinsicHelper.api.tx.msa.revokeSchemaPermissions(providerMsaId, schemaIds), + delegatorKeys, + ExtrinsicHelper.api.events.msa.DelegationUpdated + ); + } + + public static revokeDelegationByDelegator(keys: KeyringPair, providerMsaId: any) { + return new Extrinsic( + () => ExtrinsicHelper.api.tx.msa.revokeDelegationByDelegator(providerMsaId), + keys, + ExtrinsicHelper.api.events.msa.DelegationRevoked + ); + } + + public static revokeDelegationByProvider(delegatorMsaId: u64, providerKeys: KeyringPair) { + return new Extrinsic( + () => ExtrinsicHelper.api.tx.msa.revokeDelegationByProvider(delegatorMsaId), + providerKeys, + ExtrinsicHelper.api.events.msa.DelegationRevoked + ); + } + + /** Messages Extrinsics */ + public static addIPFSMessage(keys: KeyringPair, schemaId: any, cid: string, payload_length: number) { + return new Extrinsic( + () => ExtrinsicHelper.api.tx.messages.addIpfsMessage(schemaId, cid, payload_length), + keys, + ExtrinsicHelper.api.events.messages.MessagesStored + ); + } + + /** Stateful Storage Extrinsics */ + public static applyItemActions( + keys: KeyringPair, + schemaId: any, + msa_id: MessageSourceId, + actions: any, + target_hash: any + ) { + return new Extrinsic( + () => ExtrinsicHelper.api.tx.statefulStorage.applyItemActions(msa_id, schemaId, target_hash, actions), + keys, + ExtrinsicHelper.api.events.statefulStorage.ItemizedPageUpdated + ); + } + + public static removePage(keys: KeyringPair, schemaId: any, msa_id: MessageSourceId, page_id: any, target_hash: any) { + return new Extrinsic( + () => ExtrinsicHelper.api.tx.statefulStorage.deletePage(msa_id, schemaId, page_id, target_hash), + keys, + ExtrinsicHelper.api.events.statefulStorage.PaginatedPageDeleted + ); + } + + public static upsertPage( + keys: KeyringPair, + schemaId: any, + msa_id: MessageSourceId, + page_id: any, + payload: any, + target_hash: any + ) { + return new Extrinsic( + () => ExtrinsicHelper.api.tx.statefulStorage.upsertPage(msa_id, schemaId, page_id, target_hash, payload), + keys, + ExtrinsicHelper.api.events.statefulStorage.PaginatedPageUpdated + ); + } + + public static applyItemActionsWithSignature( + delegatorKeys: KeyringPair, + providerKeys: KeyringPair, + signature: Sr25519Signature, + payload: ItemizedSignaturePayload + ) { + return new Extrinsic( + () => + ExtrinsicHelper.api.tx.statefulStorage.applyItemActionsWithSignature( + delegatorKeys.publicKey, + signature, + payload + ), + providerKeys, + ExtrinsicHelper.api.events.statefulStorage.ItemizedPageUpdated + ); + } + + public static applyItemActionsWithSignatureV2( + delegatorKeys: KeyringPair, + providerKeys: KeyringPair, + signature: Sr25519Signature, + payload: ItemizedSignaturePayloadV2 + ) { + return new Extrinsic( + () => + ExtrinsicHelper.api.tx.statefulStorage.applyItemActionsWithSignatureV2( + delegatorKeys.publicKey, + signature, + payload + ), + providerKeys, + ExtrinsicHelper.api.events.statefulStorage.ItemizedPageUpdated + ); + } + + public static deletePageWithSignature( + delegatorKeys: KeyringPair, + providerKeys: KeyringPair, + signature: Sr25519Signature, + payload: PaginatedDeleteSignaturePayload + ) { + return new Extrinsic( + () => ExtrinsicHelper.api.tx.statefulStorage.deletePageWithSignature(delegatorKeys.publicKey, signature, payload), + providerKeys, + ExtrinsicHelper.api.events.statefulStorage.PaginatedPageDeleted + ); + } + + public static deletePageWithSignatureV2( + delegatorKeys: KeyringPair, + providerKeys: KeyringPair, + signature: Sr25519Signature, + payload: PaginatedDeleteSignaturePayloadV2 + ) { + return new Extrinsic( + () => + ExtrinsicHelper.api.tx.statefulStorage.deletePageWithSignatureV2(delegatorKeys.publicKey, signature, payload), + providerKeys, + ExtrinsicHelper.api.events.statefulStorage.PaginatedPageDeleted + ); + } + + public static upsertPageWithSignature( + delegatorKeys: KeyringPair, + providerKeys: KeyringPair, + signature: Sr25519Signature, + payload: PaginatedUpsertSignaturePayload + ) { + return new Extrinsic( + () => ExtrinsicHelper.api.tx.statefulStorage.upsertPageWithSignature(delegatorKeys.publicKey, signature, payload), + providerKeys, + ExtrinsicHelper.api.events.statefulStorage.PaginatedPageUpdated + ); + } + + public static upsertPageWithSignatureV2( + delegatorKeys: KeyringPair, + providerKeys: KeyringPair, + signature: Sr25519Signature, + payload: PaginatedUpsertSignaturePayloadV2 + ) { + return new Extrinsic( + () => + ExtrinsicHelper.api.tx.statefulStorage.upsertPageWithSignatureV2(delegatorKeys.publicKey, signature, payload), + providerKeys, + ExtrinsicHelper.api.events.statefulStorage.PaginatedPageUpdated + ); + } + + public static getItemizedStorage(msa_id: MessageSourceId, schemaId: any): Promise { + return ExtrinsicHelper.apiPromise.rpc.statefulStorage.getItemizedStorage(msa_id, schemaId); + } + + public static getPaginatedStorage(msa_id: MessageSourceId, schemaId: any): Promise> { + return ExtrinsicHelper.apiPromise.rpc.statefulStorage.getPaginatedStorage(msa_id, schemaId); + } + + public static timeReleaseTransfer(keys: KeyringPair, who: KeyringPair, schedule: ReleaseSchedule) { + return new Extrinsic( + () => ExtrinsicHelper.api.tx.timeRelease.transfer(who.address, schedule), + keys, + ExtrinsicHelper.api.events.timeRelease.ReleaseScheduleAdded + ); + } + + public static claimHandle(delegatorKeys: KeyringPair, payload: any) { + const proof = { Sr25519: u8aToHex(delegatorKeys.sign(u8aWrapBytes(payload.toU8a()))) }; + return new Extrinsic( + () => ExtrinsicHelper.api.tx.handles.claimHandle(delegatorKeys.publicKey, proof, payload), + delegatorKeys, + ExtrinsicHelper.api.events.handles.HandleClaimed + ); + } + + public static retireHandle(delegatorKeys: KeyringPair) { + return new Extrinsic( + () => ExtrinsicHelper.api.tx.handles.retireHandle(), + delegatorKeys, + ExtrinsicHelper.api.events.handles.HandleRetired + ); + } + + public static getHandleForMSA(msa_id: MessageSourceId): Promise> { + return ExtrinsicHelper.apiPromise.rpc.handles.getHandleForMsa(msa_id); + } + + public static getMsaForHandle(handle: string): Promise> { + return ExtrinsicHelper.apiPromise.rpc.handles.getMsaForHandle(handle); + } + + public static getNextSuffixesForHandle(base_handle: string, count: number): Promise { + return ExtrinsicHelper.apiPromise.rpc.handles.getNextSuffixes(base_handle, count); + } + + public static validateHandle(base_handle: string): Promise { + return ExtrinsicHelper.apiPromise.rpc.handles.validateHandle(base_handle); + } + + public static addOnChainMessage(keys: KeyringPair, schemaId: any, payload: string) { + return new Extrinsic( + () => ExtrinsicHelper.api.tx.messages.addOnchainMessage(null, schemaId, payload), + keys, + ExtrinsicHelper.api.events.messages.MessagesStored + ); + } + + /** Capacity Extrinsics **/ + public static setEpochLength(keys: KeyringPair, epoch_length: any) { + return new Extrinsic( + () => ExtrinsicHelper.api.tx.capacity.setEpochLength(epoch_length), + keys, + ExtrinsicHelper.api.events.capacity.EpochLengthUpdated + ); + } + public static stake(keys: KeyringPair, target: any, amount: any) { + return new Extrinsic( + () => ExtrinsicHelper.api.tx.capacity.stake(target, amount), + keys, + ExtrinsicHelper.api.events.capacity.Staked + ); + } + + public static unstake(keys: KeyringPair, target: any, amount: any) { + return new Extrinsic( + () => ExtrinsicHelper.api.tx.capacity.unstake(target, amount), + keys, + ExtrinsicHelper.api.events.capacity.UnStaked + ); + } + + public static withdrawUnstaked(keys: KeyringPair) { + return new Extrinsic( + () => ExtrinsicHelper.api.tx.capacity.withdrawUnstaked(), + keys, + ExtrinsicHelper.api.events.capacity.StakeWithdrawn + ); + } + + public static payWithCapacityBatchAll(keys: KeyringPair, calls: any) { + return new Extrinsic( + () => ExtrinsicHelper.api.tx.frequencyTxPayment.payWithCapacityBatchAll(calls), + keys, + ExtrinsicHelper.api.events.utility.BatchCompleted + ); + } + + public static executeUtilityBatchAll(keys: KeyringPair, calls: any) { + return new Extrinsic( + () => ExtrinsicHelper.api.tx.utility.batchAll(calls), + keys, + ExtrinsicHelper.api.events.utility.BatchCompleted + ); + } + + public static executeUtilityBatch(keys: KeyringPair, calls: any) { + return new Extrinsic( + () => ExtrinsicHelper.api.tx.utility.batch(calls), + keys, + ExtrinsicHelper.api.events.utility.BatchCompleted + ); + } + + public static executeUtilityForceBatch(keys: KeyringPair, calls: any) { + return new Extrinsic( + () => ExtrinsicHelper.api.tx.utility.forceBatch(calls), + keys, + ExtrinsicHelper.api.events.utility.BatchCompleted + ); + } + + public static async runToBlock(blockNumber: number) { + let currentBlock = await getBlockNumber(); + while (currentBlock < blockNumber) { + // In Rococo, just wait + if (hasRelayChain()) { + await new Promise((r) => setTimeout(r, 4_000)); + } else { + await ExtrinsicHelper.apiPromise.rpc.engine.createBlock(true, true); } + currentBlock = await getBlockNumber(); } + } } diff --git a/e2e/scaffolding/funding.ts b/e2e/scaffolding/funding.ts index 1238100299..fa97fb4608 100644 --- a/e2e/scaffolding/funding.ts +++ b/e2e/scaffolding/funding.ts @@ -1,34 +1,34 @@ -import "@frequency-chain/api-augment"; -import { Keyring } from "@polkadot/api"; -import { isTestnet } from "./env"; +import '@frequency-chain/api-augment'; +import { Keyring } from '@polkadot/api'; +import { isTestnet } from './env'; -const coreFundingSourcesSeed = "salt glare message absent guess transfer oblige refuse keen current lunar pilot"; +const coreFundingSourcesSeed = 'salt glare message absent guess transfer oblige refuse keen current lunar pilot'; const keyring = new Keyring({ type: 'sr25519' }); // This is a list of the funding sources. // New ones should be added to support additional parallel testing // tldr: Each test file should have a separate funding source listed below export const fundingSources = [ - "capacity-replenishment", - "load-signature-registry", - "capacity-transactions", - "sudo-transactions", - "time-release", - "capacity-staking", - "schemas-create", - "handles", - "messages-add-ipfs", - "misc-util-batch", - "scenarios-grant-delegation", - "stateful-storage-handle-sig-req", - "msa-create-msa", - "msa-key-management", - "stateful-storage-handle-paginated", - "stateful-storage-handle-itemized", + 'capacity-replenishment', + 'load-signature-registry', + 'capacity-transactions', + 'sudo-transactions', + 'time-release', + 'capacity-staking', + 'schemas-create', + 'handles', + 'messages-add-ipfs', + 'misc-util-batch', + 'scenarios-grant-delegation', + 'stateful-storage-handle-sig-req', + 'msa-create-msa', + 'msa-key-management', + 'stateful-storage-handle-paginated', + 'stateful-storage-handle-itemized', ] as const; // Get the correct key for this Funding Source -export function getFundingSource(name: typeof fundingSources[number]) { +export function getFundingSource(name: (typeof fundingSources)[number]) { if (fundingSources.includes(name)) { return keyring.addFromUri(`${coreFundingSourcesSeed}//${name}`, { name }, 'sr25519'); } @@ -37,12 +37,12 @@ export function getFundingSource(name: typeof fundingSources[number]) { export function getSudo() { if (isTestnet()) { - throw new Error("Sudo not available on testnet!") + throw new Error('Sudo not available on testnet!'); } return { - uri: "//Alice", - keys: keyring.addFromUri("//Alice"), + uri: '//Alice', + keys: keyring.addFromUri('//Alice'), }; } @@ -50,18 +50,18 @@ export function getRootFundingSource() { if (isTestnet()) { const seed_phrase = process.env.FUNDING_ACCOUNT_SEED_PHRASE; if (seed_phrase === undefined) { - console.error("FUNDING_ACCOUNT_SEED_PHRASE must not be undefined when CHAIN_ENVIRONMENT is \"rococo\""); + console.error('FUNDING_ACCOUNT_SEED_PHRASE must not be undefined when CHAIN_ENVIRONMENT is "rococo"'); process.exit(1); } return { - uri: "RococoTestRunnerAccount", + uri: 'RococoTestRunnerAccount', keys: keyring.addFromMnemonic(seed_phrase), }; } return { - uri: "//Alice", - keys: keyring.addFromUri("//Alice"), + uri: '//Alice', + keys: keyring.addFromUri('//Alice'), }; } diff --git a/e2e/scaffolding/globalHooks.ts b/e2e/scaffolding/globalHooks.ts index 966ad2c747..bf6f273df1 100644 --- a/e2e/scaffolding/globalHooks.ts +++ b/e2e/scaffolding/globalHooks.ts @@ -1,21 +1,23 @@ // These run ONCE per entire test run -import { cryptoWaitReady } from "@polkadot/util-crypto"; +import { cryptoWaitReady } from '@polkadot/util-crypto'; import workerpool from 'workerpool'; -import { ExtrinsicHelper } from "./extrinsicHelpers"; -import { fundingSources, getFundingSource, getRootFundingSource, getSudo } from "./funding"; -import { TEST_EPOCH_LENGTH, drainKeys, getNonce, setEpochLength } from "./helpers"; -import { isDev } from "./env"; +import { ExtrinsicHelper } from './extrinsicHelpers'; +import { fundingSources, getFundingSource, getRootFundingSource, getSudo } from './funding'; +import { TEST_EPOCH_LENGTH, drainKeys, getNonce, setEpochLength } from './helpers'; +import { isDev } from './env'; const SOURCE_AMOUNT = 100_000_000_000_000n; async function fundAllSources() { const root = getRootFundingSource().keys; - console.log("Root funding source: ", root.address); + console.log('Root funding source: ', root.address); const nonce = await getNonce(root); - await Promise.all(fundingSources.map((dest, i) => { - return ExtrinsicHelper.transferFunds(root, getFundingSource(dest), SOURCE_AMOUNT).signAndSend(nonce + i); - })); + await Promise.all( + fundingSources.map((dest, i) => { + return ExtrinsicHelper.transferFunds(root, getFundingSource(dest), SOURCE_AMOUNT).signAndSend(nonce + i); + }) + ); } async function devSudoActions() { @@ -31,7 +33,7 @@ function drainAllSources() { } export async function mochaGlobalSetup() { - console.log('Global Setup Start', "Reported CPU Count: ", workerpool.cpus); + console.log('Global Setup Start', 'Reported CPU Count: ', workerpool.cpus); await cryptoWaitReady(); await ExtrinsicHelper.initialize(); await fundAllSources(); diff --git a/e2e/scaffolding/helpers.ts b/e2e/scaffolding/helpers.ts index 89b38f2bd8..550403b62c 100644 --- a/e2e/scaffolding/helpers.ts +++ b/e2e/scaffolding/helpers.ts @@ -1,150 +1,167 @@ -import { Keyring } from "@polkadot/api"; -import { KeyringPair } from "@polkadot/keyring/types"; -import { u16, u32, u64, Option } from "@polkadot/types"; -import type { FrameSystemAccountInfo, PalletCapacityCapacityDetails } from "@polkadot/types/lookup"; -import { Codec } from "@polkadot/types/types"; -import { u8aToHex, u8aWrapBytes } from "@polkadot/util"; +import { Keyring } from '@polkadot/api'; +import { KeyringPair } from '@polkadot/keyring/types'; +import { u16, u32, u64, Option } from '@polkadot/types'; +import type { FrameSystemAccountInfo, PalletCapacityCapacityDetails } from '@polkadot/types/lookup'; +import { Codec } from '@polkadot/types/types'; +import { u8aToHex, u8aWrapBytes } from '@polkadot/util'; import { mnemonicGenerate } from '@polkadot/util-crypto'; -import { env, isTestnet } from "./env"; +import { verbose, isTestnet } from './env'; import { AddKeyData, AddProviderPayload, EventMap, ExtrinsicHelper, - ItemizedSignaturePayload, ItemizedSignaturePayloadV2, PaginatedDeleteSignaturePayload, - PaginatedDeleteSignaturePayloadV2, PaginatedUpsertSignaturePayload, PaginatedUpsertSignaturePayloadV2 -} from "./extrinsicHelpers"; -import { BlockPaginationResponseMessage, HandleResponse, MessageResponse, MessageSourceId, PageHash } from "@frequency-chain/api-augment/interfaces"; -import assert from "assert"; -import { AVRO_GRAPH_CHANGE } from "../schemas/fixtures/avroGraphChangeSchemaType"; -import { PARQUET_BROADCAST } from "../schemas/fixtures/parquetBroadcastSchemaType"; -import { AVRO_CHAT_MESSAGE } from "../stateful-pallet-storage/fixtures/itemizedSchemaType"; + ItemizedSignaturePayload, + ItemizedSignaturePayloadV2, + PaginatedDeleteSignaturePayload, + PaginatedDeleteSignaturePayloadV2, + PaginatedUpsertSignaturePayload, + PaginatedUpsertSignaturePayloadV2, +} from './extrinsicHelpers'; +import { + BlockPaginationResponseMessage, + HandleResponse, + MessageResponse, + MessageSourceId, + PageHash, +} from '@frequency-chain/api-augment/interfaces'; +import assert from 'assert'; +import { AVRO_GRAPH_CHANGE } from '../schemas/fixtures/avroGraphChangeSchemaType'; +import { PARQUET_BROADCAST } from '../schemas/fixtures/parquetBroadcastSchemaType'; +import { AVRO_CHAT_MESSAGE } from '../stateful-pallet-storage/fixtures/itemizedSchemaType'; export interface Account { - uri: string, - keys: KeyringPair, + uri: string; + keys: KeyringPair; } -export type Sr25519Signature = { Sr25519: `0x${string}` } +export type Sr25519Signature = { Sr25519: `0x${string}` }; export const TEST_EPOCH_LENGTH = 50; export const CENTS = 1000000n; export const DOLLARS = 100n * CENTS; export const STARTING_BALANCE = 6n * CENTS + DOLLARS; -export function getTestHandle(prefix = "test-") { - return prefix + Math.random().toFixed(10).toString().replaceAll("0.", ""); +export function getTestHandle(prefix = 'test-') { + return prefix + Math.random().toFixed(10).toString().replaceAll('0.', ''); } export function signPayloadSr25519(keys: KeyringPair, data: Codec): Sr25519Signature { - return { Sr25519: u8aToHex(keys.sign(u8aWrapBytes(data.toU8a()))) } + return { Sr25519: u8aToHex(keys.sign(u8aWrapBytes(data.toU8a()))) }; } -export async function generateDelegationPayload(payloadInputs: AddProviderPayload, expirationOffset: number = 100, blockNumber?: number): Promise { - let { expiration, ...payload } = payloadInputs; - if (!expiration) { - expiration = (blockNumber || (await getBlockNumber())) + expirationOffset; - } +export async function generateDelegationPayload( + payloadInputs: AddProviderPayload, + expirationOffset: number = 100, + blockNumber?: number +): Promise { + const { expiration, ...payload } = payloadInputs; return { - expiration, + expiration: expiration || (blockNumber || (await getBlockNumber())) + expirationOffset, ...payload, - } + }; } export async function getBlockNumber(): Promise { - return (await ExtrinsicHelper.getLastBlock()).block.header.number.toNumber() + return (await ExtrinsicHelper.getLastBlock()).block.header.number.toNumber(); } - let cacheED: null | bigint = null; export async function getExistentialDeposit(): Promise { if (cacheED !== null) return cacheED; - return cacheED = ExtrinsicHelper.api.consts.balances.existentialDeposit.toBigInt(); + return (cacheED = ExtrinsicHelper.api.consts.balances.existentialDeposit.toBigInt()); } -export async function generateAddKeyPayload(payloadInputs: AddKeyData, expirationOffset: number = 100, blockNumber?: number): Promise { - let { expiration, ...payload } = payloadInputs; - if (!expiration) { - expiration = (blockNumber || (await getBlockNumber())) + expirationOffset; - } +export async function generateAddKeyPayload( + payloadInputs: AddKeyData, + expirationOffset: number = 100, + blockNumber?: number +): Promise { + const { expiration, ...payload } = payloadInputs; return { - expiration, + expiration: expiration || (blockNumber || (await getBlockNumber())) + expirationOffset, ...payload, - } + }; } -export async function generateItemizedSignaturePayload(payloadInputs: ItemizedSignaturePayload, expirationOffset: number = 100, blockNumber?: number): Promise { - let { expiration, ...payload } = payloadInputs; - if (!expiration) { - expiration = (blockNumber || (await getBlockNumber())) + expirationOffset; - } +export async function generateItemizedSignaturePayload( + payloadInputs: ItemizedSignaturePayload, + expirationOffset: number = 100, + blockNumber?: number +): Promise { + const { expiration, ...payload } = payloadInputs; return { - expiration, + expiration: expiration || (blockNumber || (await getBlockNumber())) + expirationOffset, ...payload, - } + }; } -export async function generateItemizedSignaturePayloadV2(payloadInputs: ItemizedSignaturePayloadV2, expirationOffset: number = 100, blockNumber?: number): Promise { - let { expiration, ...payload } = payloadInputs; - if (!expiration) { - expiration = (blockNumber || (await getBlockNumber())) + expirationOffset; - } +export async function generateItemizedSignaturePayloadV2( + payloadInputs: ItemizedSignaturePayloadV2, + expirationOffset: number = 100, + blockNumber?: number +): Promise { + const { expiration, ...payload } = payloadInputs; return { - expiration, + expiration: expiration || (blockNumber || (await getBlockNumber())) + expirationOffset, ...payload, - } + }; } -export async function generatePaginatedUpsertSignaturePayload(payloadInputs: PaginatedUpsertSignaturePayload, expirationOffset: number = 100, blockNumber?: number): Promise { - let { expiration, ...payload } = payloadInputs; - if (!expiration) { - expiration = (blockNumber || (await getBlockNumber())) + expirationOffset; - } +export async function generatePaginatedUpsertSignaturePayload( + payloadInputs: PaginatedUpsertSignaturePayload, + expirationOffset: number = 100, + blockNumber?: number +): Promise { + const { expiration, ...payload } = payloadInputs; return { - expiration, + expiration: expiration || (blockNumber || (await getBlockNumber())) + expirationOffset, ...payload, - } + }; } -export async function generatePaginatedUpsertSignaturePayloadV2(payloadInputs: PaginatedUpsertSignaturePayloadV2, expirationOffset: number = 100, blockNumber?: number): Promise { - let { expiration, ...payload } = payloadInputs; - if (!expiration) { - expiration = (blockNumber || (await getBlockNumber())) + expirationOffset; - } +export async function generatePaginatedUpsertSignaturePayloadV2( + payloadInputs: PaginatedUpsertSignaturePayloadV2, + expirationOffset: number = 100, + blockNumber?: number +): Promise { + const { expiration, ...payload } = payloadInputs; return { - expiration, + expiration: expiration || (blockNumber || (await getBlockNumber())) + expirationOffset, ...payload, - } + }; } -export async function generatePaginatedDeleteSignaturePayload(payloadInputs: PaginatedDeleteSignaturePayload, expirationOffset: number = 100, blockNumber?: number): Promise { - let { expiration, ...payload } = payloadInputs; - if (!expiration) { - expiration = (blockNumber || (await getBlockNumber())) + expirationOffset; - } +export async function generatePaginatedDeleteSignaturePayload( + payloadInputs: PaginatedDeleteSignaturePayload, + expirationOffset: number = 100, + blockNumber?: number +): Promise { + const { expiration, ...payload } = payloadInputs; return { - expiration, + expiration: expiration || (blockNumber || (await getBlockNumber())) + expirationOffset, ...payload, - } + }; } -export async function generatePaginatedDeleteSignaturePayloadV2(payloadInputs: PaginatedDeleteSignaturePayloadV2, expirationOffset: number = 100, blockNumber?: number): Promise { - let { expiration, ...payload } = payloadInputs; - if (!expiration) { - expiration = (blockNumber || (await getBlockNumber())) + expirationOffset; - } +export async function generatePaginatedDeleteSignaturePayloadV2( + payloadInputs: PaginatedDeleteSignaturePayloadV2, + expirationOffset: number = 100, + blockNumber?: number +): Promise { + const { expiration, ...payload } = payloadInputs; return { - expiration, + expiration: expiration || (blockNumber || (await getBlockNumber())) + expirationOffset, ...payload, - } + }; } // Keep track of all the funded keys so that we can drain them at the end of the test @@ -165,11 +182,13 @@ export function createKeys(name: string = 'first pair'): KeyringPair { return keypair; } -function canDrainAccount(info: FrameSystemAccountInfo): Boolean { - return !info.isEmpty - && info.data.free.toNumber() > 1_500_000 // ~Cost to do the transfer - && info.data.reserved.toNumber() < 1 - && info.data.frozen.toNumber() < 1; +function canDrainAccount(info: FrameSystemAccountInfo): boolean { + return ( + !info.isEmpty && + info.data.free.toNumber() > 1_500_000 && // ~Cost to do the transfer + info.data.reserved.toNumber() < 1 && + info.data.frozen.toNumber() < 1 + ); } export async function drainKeys(keyPairs: KeyringPair[], dest: string) { @@ -186,34 +205,47 @@ export async function drainKeys(keyPairs: KeyringPair[], dest: string) { } } -export async function fundKeypair(source: KeyringPair, dest: KeyringPair, amount: bigint, nonce?: number): Promise { +export async function fundKeypair( + source: KeyringPair, + dest: KeyringPair, + amount: bigint, + nonce?: number +): Promise { await ExtrinsicHelper.transferFunds(source, dest, amount).signAndSend(nonce); } -export async function createAndFundKeypair(source: KeyringPair, amount?: bigint, keyName?: string, nonce?: number): Promise { +export async function createAndFundKeypair( + source: KeyringPair, + amount?: bigint, + keyName?: string, + nonce?: number +): Promise { const keypair = createKeys(keyName); - await fundKeypair(source, keypair, amount || await getExistentialDeposit(), nonce); - log("Funded", `Name: ${keyName || "None provided"}`, `Address: ${keypair.address}`); + await fundKeypair(source, keypair, amount || (await getExistentialDeposit()), nonce); + log('Funded', `Name: ${keyName || 'None provided'}`, `Address: ${keypair.address}`); return keypair; } -export async function createAndFundKeypairs(source: KeyringPair, keyNames: string[], amountOverExDep: bigint = 100_000_000n): Promise { +export async function createAndFundKeypairs( + source: KeyringPair, + keyNames: string[], + amountOverExDep: bigint = 100_000_000n +): Promise { const nonce = await getNonce(source); const existentialDeposit = await getExistentialDeposit(); const wait: Array> = keyNames.map((keyName, i) => { const keypair = createKeys(keyName + ` ${i}th`); - return fundKeypair(source, keypair, existentialDeposit + amountOverExDep, nonce + i) - .then(() => keypair); + return fundKeypair(source, keypair, existentialDeposit + amountOverExDep, nonce + i).then(() => keypair); }); return Promise.all(wait); } export function log(...args: any[]) { - if (env.verbose) { + if (verbose) { console.log(...args); } } @@ -221,14 +253,14 @@ export function log(...args: any[]) { export async function createProviderKeysAndId(source: KeyringPair, amount?: bigint): Promise<[KeyringPair, u64]> { const providerKeys = await createAndFundKeypair(source, amount); await ExtrinsicHelper.createMsa(providerKeys).fundAndSend(source); - const createProviderOp = ExtrinsicHelper.createProvider(providerKeys, "PrivateProvider"); + const createProviderOp = ExtrinsicHelper.createProvider(providerKeys, 'PrivateProvider'); const { target: providerEvent } = await createProviderOp.fundAndSend(source); const providerId = providerEvent?.data.providerId || new u64(ExtrinsicHelper.api.registry, 0); return [providerKeys, providerId]; } export async function createDelegator(source: KeyringPair, amount?: bigint): Promise<[KeyringPair, u64]> { - let keys = await createAndFundKeypair(source, amount); + const keys = await createAndFundKeypair(source, amount); const createMsa = ExtrinsicHelper.createMsa(keys); const { target: msaCreatedEvent } = await createMsa.fundAndSend(source); const delegatorMsaId = msaCreatedEvent?.data.msaId || new u64(ExtrinsicHelper.api.registry, 0); @@ -236,7 +268,12 @@ export async function createDelegator(source: KeyringPair, amount?: bigint): Pro return [keys, delegatorMsaId]; } -export async function createDelegatorAndDelegation(source: KeyringPair, schemaId: u16, providerId: u64, providerKeys: KeyringPair): Promise<[KeyringPair, u64]> { +export async function createDelegatorAndDelegation( + source: KeyringPair, + schemaId: u16, + providerId: u64, + providerKeys: KeyringPair +): Promise<[KeyringPair, u64]> { // Create a delegator msa const [keys, delegatorMsaId] = await createDelegator(source); @@ -245,9 +282,14 @@ export async function createDelegatorAndDelegation(source: KeyringPair, schemaId authorizedMsaId: providerId, schemaIds: [schemaId], }); - const addProviderData = ExtrinsicHelper.api.registry.createType("PalletMsaAddProvider", payload); - - const grantDelegationOp = ExtrinsicHelper.grantDelegation(keys, providerKeys, signPayloadSr25519(keys, addProviderData), payload); + const addProviderData = ExtrinsicHelper.api.registry.createType('PalletMsaAddProvider', payload); + + const grantDelegationOp = ExtrinsicHelper.grantDelegation( + keys, + providerKeys, + signPayloadSr25519(keys, addProviderData), + payload + ); await grantDelegationOp.fundAndSend(source); return [keys, delegatorMsaId]; @@ -275,9 +317,7 @@ export async function getHandleForMsa(msa_id: MessageSourceId): Promise { - +export async function createMsa(source: KeyringPair, amount?: bigint): Promise<[u64, KeyringPair]> { const keys = await createAndFundKeypair(source, amount); const createMsaOp = ExtrinsicHelper.createMsa(keys); const { target } = await createMsaOp.fundAndSend(source); @@ -288,8 +328,12 @@ export async function createMsa(source: KeyringPair, amount?: bigint): // Creates an MSA and a provider for the given keys // Returns the MSA Id of the provider -export async function createMsaAndProvider(source: KeyringPair, keys: KeyringPair, providerName: string, amount: bigint | undefined = undefined): - Promise { +export async function createMsaAndProvider( + source: KeyringPair, + keys: KeyringPair, + providerName: string, + amount: bigint | undefined = undefined +): Promise { // Create and fund a keypair with stakeAmount // Use this keypair for stake operations await fundKeypair(source, keys, amount || (await getExistentialDeposit())); @@ -308,20 +352,28 @@ export async function createMsaAndProvider(source: KeyringPair, keys: KeyringPai } // Stakes the given amount of tokens from the given keys to the given provider -export async function stakeToProvider(source: KeyringPair, keys: KeyringPair, providerId: u64, tokensToStake: bigint): Promise { +export async function stakeToProvider( + source: KeyringPair, + keys: KeyringPair, + providerId: u64, + tokensToStake: bigint +): Promise { const stakeOp = ExtrinsicHelper.stake(keys, providerId, tokensToStake); const { target: stakeEvent } = await stakeOp.fundAndSend(source); assert.notEqual(stakeEvent, undefined, 'stakeToProvider: should have returned Stake event'); if (stakeEvent) { - let stakedCapacity = stakeEvent.data.capacity; + const stakedCapacity = stakeEvent.data.capacity; // let capacityCost: bigint = ExtrinsicHelper.api.consts.capacity.capacityPerToken.toBigInt(); - let expectedCapacity = tokensToStake / TokenPerCapacity; + const expectedCapacity = tokensToStake / TokenPerCapacity; - assert.equal(stakedCapacity, expectedCapacity, `stakeToProvider: expected ${expectedCapacity}, got ${stakedCapacity}`); - } - else { + assert.equal( + stakedCapacity, + expectedCapacity, + `stakeToProvider: expected ${expectedCapacity}, got ${stakedCapacity}` + ); + } else { return Promise.reject('stakeToProvider: stakeEvent should be capacity.Staked event'); } } @@ -337,12 +389,15 @@ export async function setEpochLength(keys: KeyringPair, epochLength: number): Pr const { target: setEpochLengthEvent } = await setEpochLengthOp.sudoSignAndSend(); if (setEpochLengthEvent) { const epochLength = setEpochLengthEvent.data.blocks; - assert.equal(epochLength.toNumber(), TEST_EPOCH_LENGTH, "should set epoch length to TEST_EPOCH_LENGTH blocks"); + assert.equal(epochLength.toNumber(), TEST_EPOCH_LENGTH, 'should set epoch length to TEST_EPOCH_LENGTH blocks'); const actualEpochLength = await ExtrinsicHelper.apiPromise.query.capacity.epochLength(); - assert.equal(actualEpochLength, TEST_EPOCH_LENGTH, `should have set epoch length to TEST_EPOCH_LENGTH blocks, but it's ${actualEpochLength}`); - } - else { - assert.fail("should return an EpochLengthUpdated event"); + assert.equal( + actualEpochLength, + TEST_EPOCH_LENGTH, + `should have set epoch length to TEST_EPOCH_LENGTH blocks, but it's ${actualEpochLength}` + ); + } else { + assert.fail('should return an EpochLengthUpdated event'); } } @@ -351,14 +406,17 @@ export async function getOrCreateGraphChangeSchema(source: KeyringPair): Promise const ROCOCO_GRAPH_CHANGE_SCHEMA_ID: u16 = new u16(ExtrinsicHelper.api.registry, 53); return ROCOCO_GRAPH_CHANGE_SCHEMA_ID; } else { - const { target: createSchemaEvent, eventMap } = await ExtrinsicHelper - .createSchema(source, AVRO_GRAPH_CHANGE, "AvroBinary", "OnChain") - .fundAndSend(source); + const { target: createSchemaEvent, eventMap } = await ExtrinsicHelper.createSchema( + source, + AVRO_GRAPH_CHANGE, + 'AvroBinary', + 'OnChain' + ).fundAndSend(source); assertExtrinsicSuccess(eventMap); if (createSchemaEvent) { return createSchemaEvent.data.schemaId; } else { - assert.fail("failed to create a schema") + assert.fail('failed to create a schema'); } } } @@ -368,12 +426,12 @@ export async function getOrCreateParquetBroadcastSchema(source: KeyringPair): Pr const ROCOCO_PARQUET_BROADCAST_SCHEMA_ID: u16 = new u16(ExtrinsicHelper.api.registry, 51); return ROCOCO_PARQUET_BROADCAST_SCHEMA_ID; } else { - const createSchema = ExtrinsicHelper.createSchema(source, PARQUET_BROADCAST, "Parquet", "IPFS"); - let { target: event } = await createSchema.fundAndSend(source); + const createSchema = ExtrinsicHelper.createSchema(source, PARQUET_BROADCAST, 'Parquet', 'IPFS'); + const { target: event } = await createSchema.fundAndSend(source); if (event) { return event.data.schemaId; } else { - assert.fail("failed to create a schema") + assert.fail('failed to create a schema'); } } } @@ -385,15 +443,15 @@ export async function getOrCreateDummySchema(source: KeyringPair): Promise } else { const createDummySchema = ExtrinsicHelper.createSchema( source, - { type: "record", name: "Dummy on-chain schema", fields: [] }, - "AvroBinary", - "OnChain" + { type: 'record', name: 'Dummy on-chain schema', fields: [] }, + 'AvroBinary', + 'OnChain' ); const { target: dummySchemaEvent } = await createDummySchema.fundAndSend(source); if (dummySchemaEvent) { return dummySchemaEvent.data.schemaId; } else { - assert.fail("failed to create a schema") + assert.fail('failed to create a schema'); } } } @@ -404,12 +462,12 @@ export async function getOrCreateAvroChatMessagePaginatedSchema(source: KeyringP return ROCOCO_AVRO_CHAT_MESSAGE_PAGINATED; } else { // Create a schema for Paginated PayloadLocation - const createSchema = ExtrinsicHelper.createSchema(source, AVRO_CHAT_MESSAGE, "AvroBinary", "Paginated"); + const createSchema = ExtrinsicHelper.createSchema(source, AVRO_CHAT_MESSAGE, 'AvroBinary', 'Paginated'); const { target: event } = await createSchema.fundAndSend(source); if (event) { return event.data.schemaId; } else { - assert.fail("failed to create a schema") + assert.fail('failed to create a schema'); } } } @@ -420,12 +478,12 @@ export async function getOrCreateAvroChatMessageItemizedSchema(source: KeyringPa return ROCOCO_AVRO_CHAT_MESSAGE_ITEMIZED; } else { // Create a schema for Paginated PayloadLocation - const createSchema = ExtrinsicHelper.createSchema(source, AVRO_CHAT_MESSAGE, "AvroBinary", "Itemized"); + const createSchema = ExtrinsicHelper.createSchema(source, AVRO_CHAT_MESSAGE, 'AvroBinary', 'Itemized'); const { target: event } = await createSchema.fundAndSend(source); if (event) { return event.data.schemaId; } else { - assert.fail("failed to create a schema") + assert.fail('failed to create a schema'); } } } @@ -442,33 +500,37 @@ export async function getNonce(keys: KeyringPair): Promise { } export function assertEvent(events: EventMap, eventName: string) { - assert(events.hasOwnProperty(eventName)); + assert(Object.hasOwn(events, eventName), `Could not find expected event: ${eventName}`); } export function assertExtrinsicSuccess(eventMap: EventMap) { - assert.notEqual(eventMap["system.ExtrinsicSuccess"], undefined); + assert.notEqual(eventMap['system.ExtrinsicSuccess'], undefined); } -export function assertHasMessage(response: BlockPaginationResponseMessage, testFn: (x: MessageResponse) => Boolean) { +export function assertHasMessage(response: BlockPaginationResponseMessage, testFn: (x: MessageResponse) => boolean) { const messages = response.content; - assert(messages.length > 0, "Expected some messages, but found none."); + assert(messages.length > 0, 'Expected some messages, but found none.'); const found = messages.find(testFn); if (found) { assert.notEqual(found, undefined); } else { - const allPayloads = messages.map(x => x.payload.toString()); - assert.fail(`Unable to find message in response (length: ${messages.length}, Payloads: ${allPayloads.join(", ")})`); + const allPayloads = messages.map((x) => x.payload.toString()); + assert.fail(`Unable to find message in response (length: ${messages.length}, Payloads: ${allPayloads.join(', ')})`); } } -export async function assertAddNewKey(capacityKeys: KeyringPair, addKeyPayload: AddKeyData, newControlKeypair: KeyringPair) { - const addKeyPayloadCodec: Codec = ExtrinsicHelper.api.registry.createType("PalletMsaAddKeyData", addKeyPayload); +export async function assertAddNewKey( + capacityKeys: KeyringPair, + addKeyPayload: AddKeyData, + newControlKeypair: KeyringPair +) { + const addKeyPayloadCodec: Codec = ExtrinsicHelper.api.registry.createType('PalletMsaAddKeyData', addKeyPayload); const ownerSig: Sr25519Signature = signPayloadSr25519(capacityKeys, addKeyPayloadCodec); const newSig: Sr25519Signature = signPayloadSr25519(newControlKeypair, addKeyPayloadCodec); const addPublicKeyOp = ExtrinsicHelper.addPublicKeyToMsa(capacityKeys, ownerSig, newSig, addKeyPayload); const { eventMap } = await addPublicKeyOp.signAndSend(); - assertEvent(eventMap, "system.ExtrinsicSuccess"); - assertEvent(eventMap, "msa.PublicKeyAdded"); + assertEvent(eventMap, 'system.ExtrinsicSuccess'); + assertEvent(eventMap, 'msa.PublicKeyAdded'); } diff --git a/e2e/scaffolding/rootHooks.ts b/e2e/scaffolding/rootHooks.ts index 3bfbc83753..a8b2443b5a 100644 --- a/e2e/scaffolding/rootHooks.ts +++ b/e2e/scaffolding/rootHooks.ts @@ -1,9 +1,9 @@ // These run once per TEST FILE -import { cryptoWaitReady } from "@polkadot/util-crypto"; -import { ExtrinsicHelper } from "./extrinsicHelpers"; -import { drainFundedKeys } from "./helpers"; -import { getRootFundingSource } from "./funding"; +import { cryptoWaitReady } from '@polkadot/util-crypto'; +import { ExtrinsicHelper } from './extrinsicHelpers'; +import { drainFundedKeys } from './helpers'; +import { getRootFundingSource } from './funding'; // Make sure that we can serialize BigInts for Mocha (BigInt.prototype as any).toJSON = function () { @@ -21,15 +21,15 @@ export const mochaHooks = { }, async afterAll() { const testSuite = this.test.parent.suites[0].title; - console.log("Starting ROOT hook shutdown", testSuite) + console.log('Starting ROOT hook shutdown', testSuite); try { // Any key created using helpers `createKeys` is kept in the module // then any value remaining is drained here at the end const rootAddress = getRootFundingSource().keys.address; await drainFundedKeys(rootAddress); - console.log("ENDING ROOT hook shutdown", testSuite) + console.log('ENDING ROOT hook shutdown', testSuite); } catch (e) { console.error('Failed to run afterAll root hook: ', testSuite, e); } - } -} + }, +}; diff --git a/e2e/scenarios/grantDelegation.test.ts b/e2e/scenarios/grantDelegation.test.ts index 7d46006ad6..9ca345474c 100644 --- a/e2e/scenarios/grantDelegation.test.ts +++ b/e2e/scenarios/grantDelegation.test.ts @@ -1,398 +1,519 @@ -import "@frequency-chain/api-augment"; -import { KeyringPair } from "@polkadot/keyring/types"; -import { u16, u64 } from "@polkadot/types"; -import assert from "assert"; -import { AddProviderPayload, Extrinsic, ExtrinsicHelper } from "../scaffolding/extrinsicHelpers"; -import { DOLLARS, createAndFundKeypair, createAndFundKeypairs, createKeys, generateDelegationPayload, getBlockNumber, signPayloadSr25519 } from "../scaffolding/helpers"; -import { SchemaGrantResponse, SchemaId } from "@frequency-chain/api-augment/interfaces"; -import { getFundingSource } from "../scaffolding/funding"; - -describe("Delegation Scenario Tests", function () { - const fundingSource = getFundingSource("scenarios-grant-delegation"); - - let keys: KeyringPair; - let otherMsaKeys: KeyringPair; - let noMsaKeys: KeyringPair; - let providerKeys: KeyringPair; - let otherProviderKeys: KeyringPair; - let schemaId: u16; - let schemaId2: SchemaId; - let providerId: u64; - let otherProviderId: u64; - let msaId: u64; - let otherMsaId: u64; - - before(async function () { - // Fund all the different keys - [noMsaKeys, keys, otherMsaKeys, providerKeys, otherProviderKeys] = await createAndFundKeypairs(fundingSource, ["noMsaKeys", "keys", "otherMsaKeys", "providerKeys", "otherProviderKeys"], 1n * DOLLARS); - - const { target: msaCreatedEvent1 } = await ExtrinsicHelper.createMsa(keys).signAndSend(); - msaId = msaCreatedEvent1!.data.msaId; - - const { target: msaCreatedEvent2 } = await ExtrinsicHelper.createMsa(otherMsaKeys).signAndSend(); - otherMsaId = msaCreatedEvent2!.data.msaId; - - let createProviderMsaOp = ExtrinsicHelper.createMsa(providerKeys); - await createProviderMsaOp.signAndSend(); - let createProviderOp = ExtrinsicHelper.createProvider(providerKeys, "MyPoster"); - let { target: providerEvent } = await createProviderOp.signAndSend(); - assert.notEqual(providerEvent, undefined, "setup should return a ProviderCreated event"); - providerId = providerEvent!.data.providerId; - - createProviderMsaOp = ExtrinsicHelper.createMsa(otherProviderKeys); - await createProviderMsaOp.signAndSend(); - createProviderOp = ExtrinsicHelper.createProvider(otherProviderKeys, "MyPoster"); - ({ target: providerEvent } = await createProviderOp.signAndSend()); - assert.notEqual(providerEvent, undefined, "setup should return a ProviderCreated event"); - otherProviderId = providerEvent!.data.providerId; - - const createSchemaOp = ExtrinsicHelper.createSchema(keys, { - type: "record", - name: "Post", - fields: [ - { - name: "title", - type: { - name: "Title", - type: "string" - } - }, - { - name: "content", - type: { - name: "Content", - type: "string" - } - }, - { - name: "fromId", - type: { - name: "DSNPId", - type: "fixed", - size: 8, - }, - }, - { - name: "objectId", - type: "DSNPId", - }, - ] - }, "AvroBinary", "OnChain"); - let { target: createSchemaEvent } = await createSchemaOp.signAndSend(); - assert.notEqual(createSchemaEvent, undefined, "setup should return SchemaCreated event"); - schemaId = createSchemaEvent!.data.schemaId; - - // Create a second schema - ({ target: createSchemaEvent } = await createSchemaOp.signAndSend()); - assert.notEqual(createSchemaEvent, undefined, "setup should return SchemaCreated event"); - schemaId2 = createSchemaEvent!.data.schemaId; +import '@frequency-chain/api-augment'; +import { KeyringPair } from '@polkadot/keyring/types'; +import { u16, u64 } from '@polkadot/types'; +import assert from 'assert'; +import { AddProviderPayload, Extrinsic, ExtrinsicHelper } from '../scaffolding/extrinsicHelpers'; +import { + DOLLARS, + createAndFundKeypair, + createAndFundKeypairs, + createKeys, + generateDelegationPayload, + getBlockNumber, + signPayloadSr25519, +} from '../scaffolding/helpers'; +import { SchemaGrantResponse, SchemaId } from '@frequency-chain/api-augment/interfaces'; +import { getFundingSource } from '../scaffolding/funding'; + +const fundingSource = getFundingSource('scenarios-grant-delegation'); + +describe('Delegation Scenario Tests', function () { + let keys: KeyringPair; + let otherMsaKeys: KeyringPair; + let noMsaKeys: KeyringPair; + let providerKeys: KeyringPair; + let otherProviderKeys: KeyringPair; + let schemaId: u16; + let schemaId2: SchemaId; + let providerId: u64; + let otherProviderId: u64; + let msaId: u64; + let otherMsaId: u64; + + before(async function () { + // Fund all the different keys + [noMsaKeys, keys, otherMsaKeys, providerKeys, otherProviderKeys] = await createAndFundKeypairs( + fundingSource, + ['noMsaKeys', 'keys', 'otherMsaKeys', 'providerKeys', 'otherProviderKeys'], + 1n * DOLLARS + ); + + const { target: msaCreatedEvent1 } = await ExtrinsicHelper.createMsa(keys).signAndSend(); + msaId = msaCreatedEvent1!.data.msaId; + + const { target: msaCreatedEvent2 } = await ExtrinsicHelper.createMsa(otherMsaKeys).signAndSend(); + otherMsaId = msaCreatedEvent2!.data.msaId; + + let createProviderMsaOp = ExtrinsicHelper.createMsa(providerKeys); + await createProviderMsaOp.signAndSend(); + let createProviderOp = ExtrinsicHelper.createProvider(providerKeys, 'MyPoster'); + let { target: providerEvent } = await createProviderOp.signAndSend(); + assert.notEqual(providerEvent, undefined, 'setup should return a ProviderCreated event'); + providerId = providerEvent!.data.providerId; + + createProviderMsaOp = ExtrinsicHelper.createMsa(otherProviderKeys); + await createProviderMsaOp.signAndSend(); + createProviderOp = ExtrinsicHelper.createProvider(otherProviderKeys, 'MyPoster'); + ({ target: providerEvent } = await createProviderOp.signAndSend()); + assert.notEqual(providerEvent, undefined, 'setup should return a ProviderCreated event'); + otherProviderId = providerEvent!.data.providerId; + + const createSchemaOp = ExtrinsicHelper.createSchema( + keys, + { + type: 'record', + name: 'Post', + fields: [ + { + name: 'title', + type: { + name: 'Title', + type: 'string', + }, + }, + { + name: 'content', + type: { + name: 'Content', + type: 'string', + }, + }, + { + name: 'fromId', + type: { + name: 'DSNPId', + type: 'fixed', + size: 8, + }, + }, + { + name: 'objectId', + type: 'DSNPId', + }, + ], + }, + 'AvroBinary', + 'OnChain' + ); + let { target: createSchemaEvent } = await createSchemaOp.signAndSend(); + assert.notEqual(createSchemaEvent, undefined, 'setup should return SchemaCreated event'); + schemaId = createSchemaEvent!.data.schemaId; + + // Create a second schema + ({ target: createSchemaEvent } = await createSchemaOp.signAndSend()); + assert.notEqual(createSchemaEvent, undefined, 'setup should return SchemaCreated event'); + schemaId2 = createSchemaEvent!.data.schemaId; + }); + + describe('delegation grants', function () { + it('should fail to grant delegation if payload not signed by delegator (AddProviderSignatureVerificationFailed)', async function () { + const payload = await generateDelegationPayload({ + authorizedMsaId: providerId, + schemaIds: [schemaId], + }); + const addProviderData = ExtrinsicHelper.api.registry.createType('PalletMsaAddProvider', payload); + + const grantDelegationOp = ExtrinsicHelper.grantDelegation( + keys, + providerKeys, + signPayloadSr25519(providerKeys, addProviderData), + payload + ); + await assert.rejects(grantDelegationOp.fundAndSend(fundingSource), { + name: 'AddProviderSignatureVerificationFailed', + }); }); - describe("delegation grants", function () { - - it("should fail to grant delegation if payload not signed by delegator (AddProviderSignatureVerificationFailed)", async function () { - const payload = await generateDelegationPayload({ - authorizedMsaId: providerId, - schemaIds: [schemaId], - }); - const addProviderData = ExtrinsicHelper.api.registry.createType("PalletMsaAddProvider", payload); + it('should fail to delegate to self (InvalidSelfProvider)', async function () { + const payload = await generateDelegationPayload({ + authorizedMsaId: providerId, + schemaIds: [schemaId], + }); + const addProviderData = ExtrinsicHelper.api.registry.createType('PalletMsaAddProvider', payload); + + const grantDelegationOp = ExtrinsicHelper.grantDelegation( + providerKeys, + providerKeys, + signPayloadSr25519(providerKeys, addProviderData), + payload + ); + await assert.rejects(grantDelegationOp.fundAndSend(fundingSource), { name: 'InvalidSelfProvider' }); + }); - const grantDelegationOp = ExtrinsicHelper.grantDelegation(keys, providerKeys, signPayloadSr25519(providerKeys, addProviderData), payload); - await assert.rejects(grantDelegationOp.fundAndSend(fundingSource), { name: 'AddProviderSignatureVerificationFailed' }); - }); + it('should fail to grant delegation to an MSA that is not a registered provider (ProviderNotRegistered)', async function () { + const payload = await generateDelegationPayload({ + authorizedMsaId: otherMsaId, + schemaIds: [schemaId], + }); + const addProviderData = ExtrinsicHelper.api.registry.createType('PalletMsaAddProvider', payload); + + const grantDelegationOp = ExtrinsicHelper.grantDelegation( + keys, + otherMsaKeys, + signPayloadSr25519(keys, addProviderData), + payload + ); + await assert.rejects(grantDelegationOp.fundAndSend(fundingSource), { name: 'ProviderNotRegistered' }); + }); - it("should fail to delegate to self (InvalidSelfProvider)", async function () { - const payload = await generateDelegationPayload({ - authorizedMsaId: providerId, - schemaIds: [schemaId], - }); - const addProviderData = ExtrinsicHelper.api.registry.createType("PalletMsaAddProvider", payload); + it('should fail to grant delegation if ID in payload does not match origin (UnauthorizedDelegator)', async function () { + const payload = await generateDelegationPayload({ + authorizedMsaId: otherMsaId, + schemaIds: [schemaId], + }); + const addProviderData = ExtrinsicHelper.api.registry.createType('PalletMsaAddProvider', payload); + + const grantDelegationOp = ExtrinsicHelper.grantDelegation( + keys, + providerKeys, + signPayloadSr25519(keys, addProviderData), + payload + ); + await assert.rejects(grantDelegationOp.fundAndSend(fundingSource), { name: 'UnauthorizedDelegator' }); + }); - const grantDelegationOp = ExtrinsicHelper.grantDelegation(providerKeys, providerKeys, signPayloadSr25519(providerKeys, addProviderData), payload); - await assert.rejects(grantDelegationOp.fundAndSend(fundingSource), { name: 'InvalidSelfProvider' }); - }); + it('should grant a delegation to a provider', async function () { + const payload = await generateDelegationPayload({ + authorizedMsaId: providerId, + schemaIds: [schemaId], + }); + const addProviderData = ExtrinsicHelper.api.registry.createType('PalletMsaAddProvider', payload); + + const grantDelegationOp = ExtrinsicHelper.grantDelegation( + keys, + providerKeys, + signPayloadSr25519(keys, addProviderData), + payload + ); + const { target: grantDelegationEvent } = await grantDelegationOp.fundAndSend(fundingSource); + assert.notEqual(grantDelegationEvent, undefined, 'should have returned DelegationGranted event'); + assert.deepEqual(grantDelegationEvent?.data.providerId, providerId, 'provider IDs should match'); + assert.deepEqual(grantDelegationEvent?.data.delegatorId, msaId, 'delegator IDs should match'); + }); - it("should fail to grant delegation to an MSA that is not a registered provider (ProviderNotRegistered)", async function () { - const payload = await generateDelegationPayload({ - authorizedMsaId: otherMsaId, - schemaIds: [schemaId], - }); - const addProviderData = ExtrinsicHelper.api.registry.createType("PalletMsaAddProvider", payload); + it('initial granted schemas should be correct', async function () { + const schemaGrants = await ExtrinsicHelper.apiPromise.rpc.msa.grantedSchemaIdsByMsaId(msaId, providerId); + assert.equal(schemaGrants.isSome, true); + const schemaIds = schemaGrants + .unwrap() + .filter((grant) => grant.revoked_at.toBigInt() === 0n) + .map((grant) => grant.schema_id.toNumber()); + const expectedSchemaIds = [schemaId.toNumber()]; + assert.deepStrictEqual(schemaIds, expectedSchemaIds, 'granted schemas should equal initial set'); + }); - const grantDelegationOp = ExtrinsicHelper.grantDelegation(keys, otherMsaKeys, signPayloadSr25519(keys, addProviderData), payload); - await assert.rejects(grantDelegationOp.fundAndSend(fundingSource), { name: 'ProviderNotRegistered' }); - }); + it('should grant additional schema permissions', async function () { + const payload = await generateDelegationPayload({ + authorizedMsaId: providerId, + schemaIds: [schemaId, schemaId2], + }); + const addProviderData = ExtrinsicHelper.api.registry.createType('PalletMsaAddProvider', payload); + + const grantDelegationOp = ExtrinsicHelper.grantDelegation( + keys, + providerKeys, + signPayloadSr25519(keys, addProviderData), + payload + ); + const { target: grantDelegationEvent } = await grantDelegationOp.fundAndSend(fundingSource); + assert.notEqual(grantDelegationEvent, undefined, 'should have returned DelegationGranted event'); + assert.deepEqual(grantDelegationEvent?.data.providerId, providerId, 'provider IDs should match'); + assert.deepEqual(grantDelegationEvent?.data.delegatorId, msaId, 'delegator IDs should match'); + const grants = await ExtrinsicHelper.apiPromise.rpc.msa.grantedSchemaIdsByMsaId(msaId, providerId); + const grantedSchemaIds = grants + .unwrap() + .filter((grant) => grant.revoked_at.toBigInt() === 0n) + .map((grant) => grant.schema_id.toNumber()); + const expectedSchemaIds = [schemaId.toNumber(), schemaId2.toNumber()]; + assert.deepStrictEqual(grantedSchemaIds, expectedSchemaIds); + }); + }); - it("should fail to grant delegation if ID in payload does not match origin (UnauthorizedDelegator)", async function () { - const payload = await generateDelegationPayload({ - authorizedMsaId: otherMsaId, - schemaIds: [schemaId], - }); - const addProviderData = ExtrinsicHelper.api.registry.createType("PalletMsaAddProvider", payload); + describe('revoke schema permissions', function () { + it('should fail to revoke schema permissions from non-MSA (NoKeyExists)', async function () { + const nonMsaKeys = await createAndFundKeypair(fundingSource, 5_000_000n); + const op = ExtrinsicHelper.revokeSchemaPermissions(nonMsaKeys, providerId, [schemaId]); + await assert.rejects(op.fundAndSend(fundingSource), { name: 'NoKeyExists' }); + }); - const grantDelegationOp = ExtrinsicHelper.grantDelegation(keys, providerKeys, signPayloadSr25519(keys, addProviderData), payload); - await assert.rejects(grantDelegationOp.fundAndSend(fundingSource), { name: 'UnauthorizedDelegator' }); - }); + it('should fail to revoke schema permissions from a provider for which no delegation exists (DelegationNotFound)', async function () { + const op = ExtrinsicHelper.revokeSchemaPermissions(keys, otherProviderId, [schemaId]); + await assert.rejects(op.fundAndSend(fundingSource), { name: 'DelegationNotFound' }); + }); - it("should grant a delegation to a provider", async function () { - const payload = await generateDelegationPayload({ - authorizedMsaId: providerId, - schemaIds: [schemaId], - }); - const addProviderData = ExtrinsicHelper.api.registry.createType("PalletMsaAddProvider", payload); - - const grantDelegationOp = ExtrinsicHelper.grantDelegation(keys, providerKeys, signPayloadSr25519(keys, addProviderData), payload); - const { target: grantDelegationEvent } = await grantDelegationOp.fundAndSend(fundingSource); - assert.notEqual(grantDelegationEvent, undefined, "should have returned DelegationGranted event"); - assert.deepEqual(grantDelegationEvent?.data.providerId, providerId, 'provider IDs should match'); - assert.deepEqual(grantDelegationEvent?.data.delegatorId, msaId, 'delegator IDs should match'); - }); + it('should fail to revoke schema permission which was never granted (SchemaNotGranted)', async function () { + const schema = { + name: 'nonGrantedSchema', + type: 'record', + fields: [], + }; - it('initial granted schemas should be correct', async function () { - let schemaGrants = await ExtrinsicHelper.apiPromise.rpc.msa.grantedSchemaIdsByMsaId(msaId, providerId); - assert.equal(schemaGrants.isSome, true); - const schemaIds = schemaGrants.unwrap().filter((grant) => grant.revoked_at.toBigInt() === 0n).map((grant) => grant.schema_id.toNumber()); - const expectedSchemaIds = [schemaId.toNumber()]; - assert.deepStrictEqual(schemaIds, expectedSchemaIds, "granted schemas should equal initial set"); - }); + const sop = ExtrinsicHelper.createSchema(keys, schema, 'AvroBinary', 'OnChain'); + const { target: sevent } = await sop.fundAndSend(fundingSource); + const sid = sevent?.data.schemaId; + assert.notEqual(sid, undefined, 'should have created schema'); - it('should grant additional schema permissions', async function() { - const payload = await generateDelegationPayload({ - authorizedMsaId: providerId, - schemaIds: [schemaId, schemaId2], - }); - const addProviderData = ExtrinsicHelper.api.registry.createType("PalletMsaAddProvider", payload); - - const grantDelegationOp = ExtrinsicHelper.grantDelegation(keys, providerKeys, signPayloadSr25519(keys, addProviderData), payload); - const { target: grantDelegationEvent } = await grantDelegationOp.fundAndSend(fundingSource); - assert.notEqual(grantDelegationEvent, undefined, "should have returned DelegationGranted event"); - assert.deepEqual(grantDelegationEvent?.data.providerId, providerId, 'provider IDs should match'); - assert.deepEqual(grantDelegationEvent?.data.delegatorId, msaId, 'delegator IDs should match'); - let grants = await ExtrinsicHelper.apiPromise.rpc.msa.grantedSchemaIdsByMsaId(msaId, providerId); - const grantedSchemaIds = grants.unwrap().filter((grant) => grant.revoked_at.toBigInt() === 0n).map((grant) => grant.schema_id.toNumber()); - const expectedSchemaIds = [schemaId.toNumber(), schemaId2.toNumber()]; - assert.deepStrictEqual(grantedSchemaIds, expectedSchemaIds); - }) + const op = ExtrinsicHelper.revokeSchemaPermissions(keys, providerId, [sid]); + await assert.rejects(op.fundAndSend(fundingSource), { name: 'SchemaNotGranted' }); + }); + it('should successfully revoke granted schema', async function () { + const op = ExtrinsicHelper.revokeSchemaPermissions(keys, providerId, [schemaId2]); + await assert.doesNotReject(op.fundAndSend(fundingSource)); + const grants = await ExtrinsicHelper.apiPromise.rpc.msa.grantedSchemaIdsByMsaId(msaId, providerId); + const grantedSchemaIds = grants + .unwrap() + .filter((grant) => grant.revoked_at.toBigInt() === 0n) + .map((grant) => grant.schema_id.toNumber()); + assert.deepEqual( + grantedSchemaIds, + [schemaId.toNumber()], + 'granted schema permissions should include only non-revoked schema permission' + ); }); + }); - describe("revoke schema permissions", function () { - it("should fail to revoke schema permissions from non-MSA (NoKeyExists)", async function () { - const nonMsaKeys = await createAndFundKeypair(fundingSource, 5_000_000n); - const op = ExtrinsicHelper.revokeSchemaPermissions(nonMsaKeys, providerId, [schemaId]); - await assert.rejects(op.fundAndSend(fundingSource), { name: 'NoKeyExists' }); - }); + describe('revoke delegations', function () { + it('should fail to revoke a delegation if no MSA exists (InvalidMsaKey)', async function () { + const nonMsaKeys = await createAndFundKeypair(fundingSource); + const op = ExtrinsicHelper.revokeDelegationByDelegator(nonMsaKeys, providerId); + await assert.rejects(op.signAndSend('current'), { name: 'RpcError', message: /Custom error: 1$/ }); + }); - it("should fail to revoke schema permissions from a provider for which no delegation exists (DelegationNotFound)", async function () { - const op = ExtrinsicHelper.revokeSchemaPermissions(keys, otherProviderId, [schemaId]); - await assert.rejects(op.fundAndSend(fundingSource), { name: 'DelegationNotFound' }); - }) - - it("should fail to revoke schema permission which was never granted (SchemaNotGranted)", async function () { - const schema = { - name: "nonGrantedSchema", - type: "record", - fields: [], - } - - const sop = ExtrinsicHelper.createSchema(keys, schema, "AvroBinary", "OnChain"); - const { target: sevent } = await sop.fundAndSend(fundingSource); - const sid = sevent?.data.schemaId; - assert.notEqual(sid, undefined, "should have created schema"); - - const op = ExtrinsicHelper.revokeSchemaPermissions(keys, providerId, [sid]); - await assert.rejects(op.fundAndSend(fundingSource), { name: 'SchemaNotGranted' }); - }); + it('should revoke a delegation by delegator', async function () { + const revokeDelegationOp = ExtrinsicHelper.revokeDelegationByDelegator(keys, providerId); + const { target: revokeDelegationEvent } = await revokeDelegationOp.signAndSend('current'); + assert.notEqual(revokeDelegationEvent, undefined, 'should have returned DelegationRevoked event'); + assert.deepEqual(revokeDelegationEvent?.data.providerId, providerId, 'provider ids should be equal'); + assert.deepEqual(revokeDelegationEvent?.data.delegatorId, msaId, 'delegator ids should be equal'); + }); - it("should successfully revoke granted schema", async function () { - const op = ExtrinsicHelper.revokeSchemaPermissions(keys, providerId, [schemaId2]); - await assert.doesNotReject(op.fundAndSend(fundingSource)); - let grants = await ExtrinsicHelper.apiPromise.rpc.msa.grantedSchemaIdsByMsaId(msaId, providerId); - const grantedSchemaIds = grants.unwrap().filter((grant) => grant.revoked_at.toBigInt() === 0n).map((grant) => grant.schema_id.toNumber()); - assert.deepEqual(grantedSchemaIds, [schemaId.toNumber()], "granted schema permissions should include only non-revoked schema permission"); - }); + it('should fail to revoke a delegation that has already been revoked (InvalidDelegation)', async function () { + const op = ExtrinsicHelper.revokeDelegationByDelegator(keys, providerId); + await assert.rejects(op.signAndSend('current'), { name: 'RpcError', message: /Custom error: 0$/ }); }); - describe("revoke delegations", function () { - it("should fail to revoke a delegation if no MSA exists (InvalidMsaKey)", async function () { - const nonMsaKeys = await createAndFundKeypair(fundingSource); - const op = ExtrinsicHelper.revokeDelegationByDelegator(nonMsaKeys, providerId); - await assert.rejects(op.signAndSend('current'), { name: 'RpcError', message: /Custom error: 1$/ }); - }); + it('should fail to revoke delegation where no delegation exists (DelegationNotFound)', async function () { + const op = ExtrinsicHelper.revokeDelegationByDelegator(keys, otherProviderId); + await assert.rejects(op.signAndSend('current'), { name: 'RpcError', message: /Custom error: 0$/ }); + }); - it("should revoke a delegation by delegator", async function () { - const revokeDelegationOp = ExtrinsicHelper.revokeDelegationByDelegator(keys, providerId); - const { target: revokeDelegationEvent } = await revokeDelegationOp.signAndSend('current'); - assert.notEqual(revokeDelegationEvent, undefined, "should have returned DelegationRevoked event"); - assert.deepEqual(revokeDelegationEvent?.data.providerId, providerId, 'provider ids should be equal'); - assert.deepEqual(revokeDelegationEvent?.data.delegatorId, msaId, 'delegator ids should be equal'); + describe('Successful revocation', function () { + let newKeys: KeyringPair; + let msaId: u64 | undefined; + let revokedAtBlock: number; + + before(async function () { + newKeys = createKeys(); + const payload = await generateDelegationPayload({ authorizedMsaId: providerId, schemaIds: [schemaId] }); + const addProviderData = ExtrinsicHelper.api.registry.createType('PalletMsaAddProvider', payload); + const op = ExtrinsicHelper.createSponsoredAccountWithDelegation( + newKeys, + providerKeys, + signPayloadSr25519(newKeys, addProviderData), + payload + ); + const { target: msaEvent } = await op.fundAndSend(fundingSource); + msaId = msaEvent?.data.msaId; + assert.notEqual(msaId, undefined, 'should have returned an MSA'); + }); + + it('schema permissions revoked block of delegation should be zero', async function () { + const delegationsResponse = await ExtrinsicHelper.apiPromise.rpc.msa.grantedSchemaIdsByMsaId(msaId, providerId); + assert(delegationsResponse.isSome); + const delegations: SchemaGrantResponse[] = delegationsResponse.unwrap().toArray(); + delegations.forEach((delegation) => { + assert(delegation.revoked_at.toBigInt() == 0n); }); - - it("should fail to revoke a delegation that has already been revoked (InvalidDelegation)", async function () { - const op = ExtrinsicHelper.revokeDelegationByDelegator(keys, providerId); - await assert.rejects(op.signAndSend('current'), { name: 'RpcError', message: /Custom error: 0$/ }); + }); + + it('should revoke a delegation by provider', async function () { + const op = ExtrinsicHelper.revokeDelegationByProvider(msaId as u64, providerKeys); + const { target: revokeEvent } = await op.signAndSend('current'); + assert.notEqual(revokeEvent, undefined, 'should have returned a DelegationRevoked event'); + assert.deepEqual(revokeEvent?.data.delegatorId, msaId, 'delegator ids should match'); + assert.deepEqual(revokeEvent?.data.providerId, providerId, 'provider ids should match'); + revokedAtBlock = await getBlockNumber(); + }); + + it('revoked delegation should be reflected in all previously-granted schema permissions', async function () { + // Make a block first to make sure the state has rolled to the next block + const currentBlock = await getBlockNumber(); + ExtrinsicHelper.runToBlock(currentBlock + 1); + const delegationsResponse = await ExtrinsicHelper.apiPromise.rpc.msa.grantedSchemaIdsByMsaId(msaId, providerId); + assert(delegationsResponse.isSome); + const delegations: SchemaGrantResponse[] = delegationsResponse.unwrap().toArray(); + delegations.forEach((delegation) => { + const diff = delegation.revoked_at.toNumber() - revokedAtBlock; + // Due to parallelization, this could be off by a few blocks + assert(Math.abs(Number(diff.toString())) < 20); }); + }); + + it('should revoke a delegation by delegator and retire msa', async function () { + const delegatorKeys = createKeys(); + const payload = await generateDelegationPayload({ authorizedMsaId: providerId, schemaIds: [schemaId] }); + const addProviderData = ExtrinsicHelper.api.registry.createType('PalletMsaAddProvider', payload); + const op = ExtrinsicHelper.createSponsoredAccountWithDelegation( + delegatorKeys, + providerKeys, + signPayloadSr25519(delegatorKeys, addProviderData), + payload + ); + const { target: msaEvent } = await op.fundAndSend(fundingSource); + const newMsaId = msaEvent?.data.msaId; + assert.notEqual(newMsaId, undefined, 'should have returned an MSA'); + await assert.doesNotReject( + ExtrinsicHelper.revokeDelegationByProvider(newMsaId!, providerKeys).signAndSend('current') + ); + + const retireMsaOp = ExtrinsicHelper.retireMsa(delegatorKeys); + const { target: retireMsaEvent } = await retireMsaOp.signAndSend('current'); + assert.notEqual(retireMsaEvent, undefined, 'should have returned MsaRetired event'); + assert.deepEqual(retireMsaEvent?.data.msaId, newMsaId, 'msaId should be equal'); + }); + + it('should fail to retire msa with any active delegations', async function () { + const delegatorKeys = createKeys(); + const payload = await generateDelegationPayload({ authorizedMsaId: providerId, schemaIds: [schemaId] }); + const addProviderData = ExtrinsicHelper.api.registry.createType('PalletMsaAddProvider', payload); + const op = ExtrinsicHelper.createSponsoredAccountWithDelegation( + delegatorKeys, + providerKeys, + signPayloadSr25519(delegatorKeys, addProviderData), + payload + ); + const { target: msaEvent } = await op.fundAndSend(fundingSource); + const newMsaId = msaEvent?.data.msaId; + assert.notEqual(newMsaId, undefined, 'should have returned an MSA'); + const retireMsaOp = ExtrinsicHelper.retireMsa(delegatorKeys); + await assert.rejects(retireMsaOp.signAndSend('current'), { name: 'RpcError', message: /Custom error: 6$/ }); + }); + }); + }); - it("should fail to revoke delegation where no delegation exists (DelegationNotFound)", async function () { - const op = ExtrinsicHelper.revokeDelegationByDelegator(keys, otherProviderId); - await assert.rejects(op.signAndSend('current'), { name: 'RpcError', message: /Custom error: 0$/ }); - }); + describe('createSponsoredAccountWithDelegation', function () { + let sponsorKeys: KeyringPair; + let op: Extrinsic; + let defaultPayload: AddProviderPayload; - describe('Successful revocation', function () { - let newKeys: KeyringPair; - let msaId: u64 | undefined; - let revokedAtBlock: number; - - before(async function () { - newKeys = createKeys(); - const payload = await generateDelegationPayload({ authorizedMsaId: providerId, schemaIds: [schemaId] }); - const addProviderData = ExtrinsicHelper.api.registry.createType("PalletMsaAddProvider", payload); - const op = ExtrinsicHelper.createSponsoredAccountWithDelegation(newKeys, providerKeys, signPayloadSr25519(newKeys, addProviderData), payload); - const { target: msaEvent } = await op.fundAndSend(fundingSource); - msaId = msaEvent?.data.msaId; - assert.notEqual(msaId, undefined, 'should have returned an MSA'); - }); - - it("schema permissions revoked block of delegation should be zero", async function () { - const delegationsResponse = await ExtrinsicHelper.apiPromise.rpc.msa.grantedSchemaIdsByMsaId(msaId, providerId); - assert(delegationsResponse.isSome); - const delegations: SchemaGrantResponse[] = delegationsResponse.unwrap().toArray(); - delegations.forEach((delegation) => { - assert(delegation.revoked_at.toBigInt() == 0n); - }) - }) - - it("should revoke a delegation by provider", async function () { - const op = ExtrinsicHelper.revokeDelegationByProvider(msaId as u64, providerKeys); - const { target: revokeEvent } = await op.signAndSend('current'); - assert.notEqual(revokeEvent, undefined, "should have returned a DelegationRevoked event"); - assert.deepEqual(revokeEvent?.data.delegatorId, msaId, 'delegator ids should match'); - assert.deepEqual(revokeEvent?.data.providerId, providerId, 'provider ids should match'); - revokedAtBlock = await getBlockNumber(); - }); - - it("revoked delegation should be reflected in all previously-granted schema permissions", async function () { - // Make a block first to make sure the state has rolled to the next block - const currentBlock = await getBlockNumber(); - ExtrinsicHelper.runToBlock(currentBlock + 1); - const delegationsResponse = await ExtrinsicHelper.apiPromise.rpc.msa.grantedSchemaIdsByMsaId(msaId, providerId); - assert(delegationsResponse.isSome); - const delegations: SchemaGrantResponse[] = delegationsResponse.unwrap().toArray(); - delegations.forEach((delegation) => { - const diff = delegation.revoked_at.toNumber() - revokedAtBlock; - // Due to parallelization, this could be off by a few blocks - assert(Math.abs(Number(diff.toString())) < 20); - }) - }) - - it("should revoke a delegation by delegator and retire msa", async function () { - const delegatorKeys = createKeys(); - const payload = await generateDelegationPayload({ authorizedMsaId: providerId, schemaIds: [schemaId] }); - const addProviderData = ExtrinsicHelper.api.registry.createType("PalletMsaAddProvider", payload); - const op = ExtrinsicHelper.createSponsoredAccountWithDelegation(delegatorKeys, providerKeys, signPayloadSr25519(delegatorKeys, addProviderData), payload); - const { target: msaEvent } = await op.fundAndSend(fundingSource); - const newMsaId = msaEvent?.data.msaId; - assert.notEqual(newMsaId, undefined, 'should have returned an MSA'); - await assert.doesNotReject(ExtrinsicHelper.revokeDelegationByProvider(newMsaId!, providerKeys).signAndSend('current')); - - const retireMsaOp = ExtrinsicHelper.retireMsa(delegatorKeys); - const { target: retireMsaEvent } = await retireMsaOp.signAndSend('current'); - assert.notEqual(retireMsaEvent, undefined, "should have returned MsaRetired event"); - assert.deepEqual(retireMsaEvent?.data.msaId, newMsaId, 'msaId should be equal'); - }); - - it("should fail to retire msa with any active delegations", async function () { - const delegatorKeys = createKeys(); - const payload = await generateDelegationPayload({ authorizedMsaId: providerId, schemaIds: [schemaId] }); - const addProviderData = ExtrinsicHelper.api.registry.createType("PalletMsaAddProvider", payload); - const op = ExtrinsicHelper.createSponsoredAccountWithDelegation(delegatorKeys, providerKeys, signPayloadSr25519(delegatorKeys, addProviderData), payload); - const { target: msaEvent } = await op.fundAndSend(fundingSource); - const newMsaId = msaEvent?.data.msaId; - assert.notEqual(newMsaId, undefined, 'should have returned an MSA'); - const retireMsaOp = ExtrinsicHelper.retireMsa(delegatorKeys); - await assert.rejects(retireMsaOp.signAndSend('current'), { name: 'RpcError', message: /Custom error: 6$/ }); - }); - }); + before(async function () { + sponsorKeys = await createAndFundKeypair(fundingSource, 50_000_000n); + defaultPayload = { + authorizedMsaId: providerId, + schemaIds: [schemaId], + }; }); - describe("createSponsoredAccountWithDelegation", function () { - let sponsorKeys: KeyringPair; - let op: Extrinsic; - let defaultPayload: AddProviderPayload; - - before(async function () { - sponsorKeys = await createAndFundKeypair(fundingSource, 50_000_000n); - defaultPayload = { - authorizedMsaId: providerId, - schemaIds: [schemaId], - } - }); + it("should fail to create delegated account if provider ids don't match (UnauthorizedProvider)", async function () { + const payload = await generateDelegationPayload({ ...defaultPayload, authorizedMsaId: otherProviderId }); + const addProviderData = ExtrinsicHelper.api.registry.createType('PalletMsaAddProvider', payload); + + op = ExtrinsicHelper.createSponsoredAccountWithDelegation( + sponsorKeys, + providerKeys, + signPayloadSr25519(sponsorKeys, addProviderData), + payload + ); + await assert.rejects(op.fundAndSend(fundingSource), { name: 'UnauthorizedProvider' }); + }); - it("should fail to create delegated account if provider ids don't match (UnauthorizedProvider)", async function () { - const payload = await generateDelegationPayload({ ...defaultPayload, authorizedMsaId: otherProviderId }); - const addProviderData = ExtrinsicHelper.api.registry.createType("PalletMsaAddProvider", payload); + it('should fail to create delegated account if payload signature cannot be verified (InvalidSignature)', async function () { + const payload = await generateDelegationPayload({ ...defaultPayload, schemaIds: [] }); + const addProviderData = ExtrinsicHelper.api.registry.createType('PalletMsaAddProvider', payload); + + op = ExtrinsicHelper.createSponsoredAccountWithDelegation( + sponsorKeys, + providerKeys, + signPayloadSr25519(sponsorKeys, addProviderData), + { ...payload, ...defaultPayload } + ); + await assert.rejects(op.fundAndSend(fundingSource), { name: 'InvalidSignature' }); + }); - op = ExtrinsicHelper.createSponsoredAccountWithDelegation(sponsorKeys, providerKeys, signPayloadSr25519(sponsorKeys, addProviderData), payload); - await assert.rejects(op.fundAndSend(fundingSource), { name: 'UnauthorizedProvider' }); - }); + it('should fail to create delegated account if no MSA exists for origin (NoKeyExists)', async function () { + const payload = await generateDelegationPayload(defaultPayload); + const addProviderData = ExtrinsicHelper.api.registry.createType('PalletMsaAddProvider', payload); + + op = ExtrinsicHelper.createSponsoredAccountWithDelegation( + sponsorKeys, + noMsaKeys, + signPayloadSr25519(sponsorKeys, addProviderData), + payload + ); + await assert.rejects(op.fundAndSend(fundingSource), { name: 'NoKeyExists' }); + }); - it("should fail to create delegated account if payload signature cannot be verified (InvalidSignature)", async function () { - const payload = await generateDelegationPayload({ ...defaultPayload, schemaIds: [] }); - const addProviderData = ExtrinsicHelper.api.registry.createType("PalletMsaAddProvider", payload); + it('should fail to create delegated account if MSA already exists for delegator (KeyAlreadyRegistered)', async function () { + const payload = await generateDelegationPayload(defaultPayload); + const addProviderData = ExtrinsicHelper.api.registry.createType('PalletMsaAddProvider', payload); + + op = ExtrinsicHelper.createSponsoredAccountWithDelegation( + keys, + providerKeys, + signPayloadSr25519(keys, addProviderData), + payload + ); + await assert.rejects(op.fundAndSend(fundingSource), { name: 'KeyAlreadyRegistered' }); + }); - op = ExtrinsicHelper.createSponsoredAccountWithDelegation(sponsorKeys, providerKeys, signPayloadSr25519(sponsorKeys, addProviderData), { ...payload, ...defaultPayload }); - await assert.rejects(op.fundAndSend(fundingSource), { name: 'InvalidSignature' }); - }); + it('should fail to create delegated account if provider is not registered (ProviderNotRegistered)', async function () { + const payload = await generateDelegationPayload({ ...defaultPayload, authorizedMsaId: otherMsaId }); + const addProviderData = ExtrinsicHelper.api.registry.createType('PalletMsaAddProvider', payload); + + op = ExtrinsicHelper.createSponsoredAccountWithDelegation( + keys, + otherMsaKeys, + signPayloadSr25519(keys, addProviderData), + payload + ); + await assert.rejects(op.fundAndSend(fundingSource), { name: 'ProviderNotRegistered' }); + }); - it("should fail to create delegated account if no MSA exists for origin (NoKeyExists)", async function () { - const payload = await generateDelegationPayload(defaultPayload); - const addProviderData = ExtrinsicHelper.api.registry.createType("PalletMsaAddProvider", payload); + it('should fail to create delegated account if provider if payload proof is too far in the future (ProofNotYetValid)', async function () { + const payload = await generateDelegationPayload(defaultPayload, 999); + const addProviderData = ExtrinsicHelper.api.registry.createType('PalletMsaAddProvider', payload); + + op = ExtrinsicHelper.createSponsoredAccountWithDelegation( + sponsorKeys, + providerKeys, + signPayloadSr25519(sponsorKeys, addProviderData), + payload + ); + await assert.rejects(op.fundAndSend(fundingSource), { name: 'ProofNotYetValid' }); + }); - op = ExtrinsicHelper.createSponsoredAccountWithDelegation(sponsorKeys, noMsaKeys, signPayloadSr25519(sponsorKeys, addProviderData), payload); - await assert.rejects(op.fundAndSend(fundingSource), { name: 'NoKeyExists' }); - }); + it('should fail to create delegated account if provider if payload proof has expired (ProofHasExpired))', async function () { + const payload = await generateDelegationPayload(defaultPayload, -1); + const addProviderData = ExtrinsicHelper.api.registry.createType('PalletMsaAddProvider', payload); + + op = ExtrinsicHelper.createSponsoredAccountWithDelegation( + sponsorKeys, + providerKeys, + signPayloadSr25519(sponsorKeys, addProviderData), + payload + ); + await assert.rejects(op.fundAndSend(fundingSource), { name: 'ProofHasExpired' }); + }); - it("should fail to create delegated account if MSA already exists for delegator (KeyAlreadyRegistered)", async function () { - const payload = await generateDelegationPayload(defaultPayload); - const addProviderData = ExtrinsicHelper.api.registry.createType("PalletMsaAddProvider", payload); - - op = ExtrinsicHelper.createSponsoredAccountWithDelegation(keys, providerKeys, signPayloadSr25519(keys, addProviderData), payload); - await assert.rejects(op.fundAndSend(fundingSource), { name: 'KeyAlreadyRegistered' }); - }) - - it("should fail to create delegated account if provider is not registered (ProviderNotRegistered)", async function () { - const payload = await generateDelegationPayload({ ...defaultPayload, authorizedMsaId: otherMsaId }); - const addProviderData = ExtrinsicHelper.api.registry.createType("PalletMsaAddProvider", payload); - - op = ExtrinsicHelper.createSponsoredAccountWithDelegation(keys, otherMsaKeys, signPayloadSr25519(keys, addProviderData), payload); - await assert.rejects(op.fundAndSend(fundingSource), { name: 'ProviderNotRegistered' }); - }) - - it("should fail to create delegated account if provider if payload proof is too far in the future (ProofNotYetValid)", async function () { - const payload = await generateDelegationPayload(defaultPayload, 999); - const addProviderData = ExtrinsicHelper.api.registry.createType("PalletMsaAddProvider", payload); - - op = ExtrinsicHelper.createSponsoredAccountWithDelegation(sponsorKeys, providerKeys, signPayloadSr25519(sponsorKeys, addProviderData), payload); - await assert.rejects(op.fundAndSend(fundingSource), { name: 'ProofNotYetValid' }); - }) - - it("should fail to create delegated account if provider if payload proof has expired (ProofHasExpired))", async function () { - const payload = await generateDelegationPayload(defaultPayload, -1); - const addProviderData = ExtrinsicHelper.api.registry.createType("PalletMsaAddProvider", payload); - - op = ExtrinsicHelper.createSponsoredAccountWithDelegation(sponsorKeys, providerKeys, signPayloadSr25519(sponsorKeys, addProviderData), payload); - await assert.rejects(op.fundAndSend(fundingSource), { name: 'ProofHasExpired' }); - }) - - it("should successfully create a delegated account", async function () { - const payload = await generateDelegationPayload(defaultPayload); - const addProviderData = ExtrinsicHelper.api.registry.createType("PalletMsaAddProvider", payload); - - op = ExtrinsicHelper.createSponsoredAccountWithDelegation(sponsorKeys, providerKeys, signPayloadSr25519(sponsorKeys, addProviderData), payload); - const { target: event, eventMap } = await op.fundAndSend(fundingSource); - assert.notEqual(event, undefined, 'should have returned MsaCreated event'); - assert.notEqual(eventMap["msa.DelegationGranted"], undefined, 'should have returned DelegationGranted event'); - await assert.rejects(op.fundAndSend(fundingSource), { name: 'SignatureAlreadySubmitted' }, "should reject double submission"); - }) - }) -}) + it('should successfully create a delegated account', async function () { + const payload = await generateDelegationPayload(defaultPayload); + const addProviderData = ExtrinsicHelper.api.registry.createType('PalletMsaAddProvider', payload); + + op = ExtrinsicHelper.createSponsoredAccountWithDelegation( + sponsorKeys, + providerKeys, + signPayloadSr25519(sponsorKeys, addProviderData), + payload + ); + const { target: event, eventMap } = await op.fundAndSend(fundingSource); + assert.notEqual(event, undefined, 'should have returned MsaCreated event'); + assert.notEqual(eventMap['msa.DelegationGranted'], undefined, 'should have returned DelegationGranted event'); + await assert.rejects( + op.fundAndSend(fundingSource), + { name: 'SignatureAlreadySubmitted' }, + 'should reject double submission' + ); + }); + }); +}); diff --git a/e2e/schemas/createSchema.test.ts b/e2e/schemas/createSchema.test.ts index d3592aaac3..bd3ee6b86b 100644 --- a/e2e/schemas/createSchema.test.ts +++ b/e2e/schemas/createSchema.test.ts @@ -1,119 +1,131 @@ -import "@frequency-chain/api-augment"; +import '@frequency-chain/api-augment'; -import assert from "assert"; +import assert from 'assert'; -import { AVRO_GRAPH_CHANGE } from "./fixtures/avroGraphChangeSchemaType"; -import { KeyringPair } from "@polkadot/keyring/types"; -import { ExtrinsicHelper } from "../scaffolding/extrinsicHelpers"; -import { createKeys, createAndFundKeypair, assertExtrinsicSuccess } from "../scaffolding/helpers"; -import { getFundingSource } from "../scaffolding/funding"; +import { AVRO_GRAPH_CHANGE } from './fixtures/avroGraphChangeSchemaType'; +import { KeyringPair } from '@polkadot/keyring/types'; +import { ExtrinsicHelper } from '../scaffolding/extrinsicHelpers'; +import { createKeys, createAndFundKeypair, assertExtrinsicSuccess } from '../scaffolding/helpers'; +import { getFundingSource } from '../scaffolding/funding'; -describe("#createSchema", function () { +const fundingSource = getFundingSource('schemas-create'); + +describe('#createSchema', function () { let keys: KeyringPair; let accountWithNoFunds: KeyringPair; - const fundingSource = getFundingSource("schemas-create"); before(async function () { keys = await createAndFundKeypair(fundingSource, 50_000_000n); accountWithNoFunds = createKeys(); }); - it("should fail if account does not have enough tokens", async function () { - - await assert.rejects(ExtrinsicHelper.createSchema(accountWithNoFunds, AVRO_GRAPH_CHANGE, "AvroBinary", "OnChain").signAndSend('current'), { - name: 'RpcError', - message: /Inability to pay some fees/, - }); + it('should fail if account does not have enough tokens', async function () { + await assert.rejects( + ExtrinsicHelper.createSchema(accountWithNoFunds, AVRO_GRAPH_CHANGE, 'AvroBinary', 'OnChain').signAndSend( + 'current' + ), + { + name: 'RpcError', + message: /Inability to pay some fees/, + } + ); }); - it("should fail if account does not have enough tokens v2", async function () { - - await assert.rejects(ExtrinsicHelper.createSchemaV2(accountWithNoFunds, AVRO_GRAPH_CHANGE, "AvroBinary", "OnChain",[]).signAndSend('current'), { - name: 'RpcError', - message: /Inability to pay some fees/, - }); + it('should fail if account does not have enough tokens v2', async function () { + await assert.rejects( + ExtrinsicHelper.createSchemaV2(accountWithNoFunds, AVRO_GRAPH_CHANGE, 'AvroBinary', 'OnChain', []).signAndSend( + 'current' + ), + { + name: 'RpcError', + message: /Inability to pay some fees/, + } + ); }); - it("should fail to create invalid schema", async function () { - const f = ExtrinsicHelper.createSchema(keys, new Array(1000, 3), "AvroBinary", "OnChain"); + it('should fail to create invalid schema', async function () { + const f = ExtrinsicHelper.createSchema(keys, [1000, 3], 'AvroBinary', 'OnChain'); await assert.rejects(f.fundAndSend(fundingSource), { name: 'InvalidSchema', }); }); - it("should fail to create invalid schema v2", async function () { - const f = ExtrinsicHelper.createSchemaV2(keys, new Array(1000, 3), "AvroBinary", "OnChain", []); + it('should fail to create invalid schema v2', async function () { + const f = ExtrinsicHelper.createSchemaV2(keys, [1000, 3], 'AvroBinary', 'OnChain', []); await assert.rejects(f.fundAndSend(fundingSource), { name: 'InvalidSchema', }); }); - it("should fail to create schema less than minimum size", async function () { - const f = ExtrinsicHelper.createSchema(keys, {}, "AvroBinary", "OnChain"); + it('should fail to create schema less than minimum size', async function () { + const f = ExtrinsicHelper.createSchema(keys, {}, 'AvroBinary', 'OnChain'); await assert.rejects(f.fundAndSend(fundingSource), { name: 'LessThanMinSchemaModelBytes', }); }); - it("should fail to create schema less than minimum size v2", async function () { - const f = ExtrinsicHelper.createSchemaV2(keys, {}, "AvroBinary", "OnChain", []); + it('should fail to create schema less than minimum size v2', async function () { + const f = ExtrinsicHelper.createSchemaV2(keys, {}, 'AvroBinary', 'OnChain', []); await assert.rejects(f.fundAndSend(fundingSource), { name: 'LessThanMinSchemaModelBytes', }); }); - it("should fail to create schema greater than maximum size", async function () { + it('should fail to create schema greater than maximum size', async function () { const maxBytes = (await ExtrinsicHelper.getSchemaMaxBytes()).toNumber(); // Create a schema whose JSON representation is exactly 1 byte larger than the max allowed const hugeSchema = { - type: "record", + type: 'record', fields: [], - } + }; const hugeSize = JSON.stringify(hugeSchema).length; const sizeToFill = maxBytes - hugeSize - ',"name":""'.length + 1; - hugeSchema["name"] = Array.from(Array(sizeToFill).keys()).map(i => 'a').join(''); + hugeSchema['name'] = Array.from(Array(sizeToFill).keys()) + .map(() => 'a') + .join(''); - const f = ExtrinsicHelper.createSchema(keys, hugeSchema, "AvroBinary", "OnChain"); + const f = ExtrinsicHelper.createSchema(keys, hugeSchema, 'AvroBinary', 'OnChain'); await assert.rejects(f.fundAndSend(fundingSource), { name: 'ExceedsMaxSchemaModelBytes', }); }); - it("should fail to create schema greater than maximum size v2", async function () { + it('should fail to create schema greater than maximum size v2', async function () { const maxBytes = (await ExtrinsicHelper.getSchemaMaxBytes()).toNumber(); // Create a schema whose JSON representation is exactly 1 byte larger than the max allowed const hugeSchema = { - type: "record", + type: 'record', fields: [], - } + }; const hugeSize = JSON.stringify(hugeSchema).length; const sizeToFill = maxBytes - hugeSize - ',"name":""'.length + 1; - hugeSchema["name"] = Array.from(Array(sizeToFill).keys()).map(i => 'a').join(''); + hugeSchema['name'] = Array.from(Array(sizeToFill).keys()) + .map(() => 'a') + .join(''); - const f = ExtrinsicHelper.createSchemaV2(keys, hugeSchema, "AvroBinary", "OnChain", []); + const f = ExtrinsicHelper.createSchemaV2(keys, hugeSchema, 'AvroBinary', 'OnChain', []); await assert.rejects(f.fundAndSend(fundingSource), { name: 'ExceedsMaxSchemaModelBytes', }); }); - it("should successfully create an Avro GraphChange schema", async function () { - const f = ExtrinsicHelper.createSchema(keys, AVRO_GRAPH_CHANGE, "AvroBinary", "OnChain"); + it('should successfully create an Avro GraphChange schema', async function () { + const f = ExtrinsicHelper.createSchema(keys, AVRO_GRAPH_CHANGE, 'AvroBinary', 'OnChain'); const { target: createSchemaEvent, eventMap } = await f.fundAndSend(fundingSource); assertExtrinsicSuccess(eventMap); assert.notEqual(createSchemaEvent, undefined); }); - it("should successfully create an Avro GraphChange schema v2", async function () { - const f = ExtrinsicHelper.createSchemaV2(keys, AVRO_GRAPH_CHANGE, "AvroBinary", "OnChain", []); + it('should successfully create an Avro GraphChange schema v2', async function () { + const f = ExtrinsicHelper.createSchemaV2(keys, AVRO_GRAPH_CHANGE, 'AvroBinary', 'OnChain', []); const { target: createSchemaEvent, eventMap } = await f.fundAndSend(fundingSource); assertExtrinsicSuccess(eventMap); assert.notEqual(createSchemaEvent, undefined); }); - -}) +}); diff --git a/e2e/schemas/fixtures/avroGraphChangeSchemaType.ts b/e2e/schemas/fixtures/avroGraphChangeSchemaType.ts index 2535b6c58f..52154a7b37 100644 --- a/e2e/schemas/fixtures/avroGraphChangeSchemaType.ts +++ b/e2e/schemas/fixtures/avroGraphChangeSchemaType.ts @@ -1,27 +1,27 @@ export const AVRO_GRAPH_CHANGE = { - type: "record", - name: "GraphChange", - fields: [ - // When converting from Frequency Schema Message to DSNP Announcement, assume announcementType=1 - { - name: "changeType", - type: { - name: "ChangeTypeEnum", - type: "enum", - symbols: ["Unfollow", "Follow"], // Encoded as int - }, + type: 'record', + name: 'GraphChange', + fields: [ + // When converting from Frequency Schema Message to DSNP Announcement, assume announcementType=1 + { + name: 'changeType', + type: { + name: 'ChangeTypeEnum', + type: 'enum', + symbols: ['Unfollow', 'Follow'], // Encoded as int }, - { - name: "fromId", - type: { - name: "DSNPId", - type: "fixed", - size: 8, - }, + }, + { + name: 'fromId', + type: { + name: 'DSNPId', + type: 'fixed', + size: 8, }, - { - name: "objectId", - type: "DSNPId", - }, - ], - }; \ No newline at end of file + }, + { + name: 'objectId', + type: 'DSNPId', + }, + ], +}; diff --git a/e2e/schemas/fixtures/parquetBroadcastSchemaType.ts b/e2e/schemas/fixtures/parquetBroadcastSchemaType.ts index b595ab8d99..66bdb28558 100644 --- a/e2e/schemas/fixtures/parquetBroadcastSchemaType.ts +++ b/e2e/schemas/fixtures/parquetBroadcastSchemaType.ts @@ -1,36 +1,36 @@ export const PARQUET_BROADCAST = [ - { - name: "announcementType", - column_type: { - INTEGER: { - bit_width: 32, - sign: true, - }, - }, - compression: "GZIP", - bloom_filter: false, + { + name: 'announcementType', + column_type: { + INTEGER: { + bit_width: 32, + sign: true, + }, }, - { - name: "contentHash", - column_type: "BYTE_ARRAY", - compression: "GZIP", - bloom_filter: true, + compression: 'GZIP', + bloom_filter: false, + }, + { + name: 'contentHash', + column_type: 'BYTE_ARRAY', + compression: 'GZIP', + bloom_filter: true, + }, + { + name: 'fromId', + column_type: { + INTEGER: { + bit_width: 64, + sign: false, + }, }, - { - name: "fromId", - column_type: { - INTEGER: { - bit_width: 64, - sign: false, - }, - }, - compression: "GZIP", - bloom_filter: true, - }, - { - name: "url", - column_type: "STRING", - compression: "GZIP", - bloom_filter: false, - }, -]; \ No newline at end of file + compression: 'GZIP', + bloom_filter: true, + }, + { + name: 'url', + column_type: 'STRING', + compression: 'GZIP', + bloom_filter: false, + }, +]; diff --git a/e2e/stateful-pallet-storage/fixtures/itemizedSchemaType.ts b/e2e/stateful-pallet-storage/fixtures/itemizedSchemaType.ts index 85bb82aabd..84becf1568 100644 --- a/e2e/stateful-pallet-storage/fixtures/itemizedSchemaType.ts +++ b/e2e/stateful-pallet-storage/fixtures/itemizedSchemaType.ts @@ -1,28 +1,28 @@ export const AVRO_CHAT_MESSAGE = { - type: "record", - name: "ChatMessage", - fields: [ - { - name: "fromId", - type: { - name: "DSNPId", - type: "fixed", - size: 8, - }, - }, - { - name: "message", - type: "string", - }, - { - name: "inReplyTo", - type: ["null", "DSNPId"], - default: null, - }, - { - name: "url", - type: ["null", "string"], - default: null, - }, - ], + type: 'record', + name: 'ChatMessage', + fields: [ + { + name: 'fromId', + type: { + name: 'DSNPId', + type: 'fixed', + size: 8, + }, + }, + { + name: 'message', + type: 'string', + }, + { + name: 'inReplyTo', + type: ['null', 'DSNPId'], + default: null, + }, + { + name: 'url', + type: ['null', 'string'], + default: null, + }, + ], }; diff --git a/e2e/stateful-pallet-storage/handleItemized.test.ts b/e2e/stateful-pallet-storage/handleItemized.test.ts index d2873297a0..d0563ddde6 100644 --- a/e2e/stateful-pallet-storage/handleItemized.test.ts +++ b/e2e/stateful-pallet-storage/handleItemized.test.ts @@ -1,215 +1,296 @@ // E2E tests for pallets/stateful-pallet-storage/handleItemized.ts -import "@frequency-chain/api-augment"; -import assert from "assert"; -import {DOLLARS, createDelegatorAndDelegation, createMsa, createProviderKeysAndId, getCurrentItemizedHash} from "../scaffolding/helpers"; -import { KeyringPair } from "@polkadot/keyring/types"; -import { ExtrinsicHelper } from "../scaffolding/extrinsicHelpers"; -import { AVRO_CHAT_MESSAGE } from "../stateful-pallet-storage/fixtures/itemizedSchemaType"; -import { MessageSourceId, SchemaId } from "@frequency-chain/api-augment/interfaces"; -import { Bytes, u16, u64 } from "@polkadot/types"; -import { getFundingSource } from "../scaffolding/funding"; - -describe("📗 Stateful Pallet Storage", function () { - const fundingSource = getFundingSource("stateful-storage-handle-itemized"); - let schemaId_deletable: SchemaId; - let schemaId_unsupported: SchemaId; - let msa_id: MessageSourceId; - let providerId: MessageSourceId; - let providerKeys: KeyringPair; - let badMsaId: u64; - - before(async function () { - // Create a provider for the MSA, the provider will be used to grant delegation - [providerKeys, providerId] = await createProviderKeysAndId(fundingSource, 2n * DOLLARS); - assert.notEqual(providerId, undefined, "setup should populate providerId"); - assert.notEqual(providerKeys, undefined, "setup should populate providerKeys"); - - // Create a schema to allow delete actions - const createSchemaDeletable = ExtrinsicHelper.createSchema(providerKeys, AVRO_CHAT_MESSAGE, "AvroBinary", "Itemized"); - const { target: eventDeletable } = await createSchemaDeletable.signAndSend(); - schemaId_deletable = eventDeletable!.data.schemaId; - // Create non supported schema - const createSchema2 = ExtrinsicHelper.createSchema(providerKeys, AVRO_CHAT_MESSAGE, "AvroBinary", "OnChain"); - const { target: event2 } = await createSchema2.signAndSend(); - assert.notEqual(event2, undefined, "setup should return a SchemaCreated event"); - schemaId_unsupported = event2!.data.schemaId; - // Create a MSA for the delegator and delegate to the provider - [, msa_id] = await createDelegatorAndDelegation(fundingSource, schemaId_deletable, providerId, providerKeys); - assert.notEqual(msa_id, undefined, "setup should populate msa_id"); - // Create an MSA that is not a provider to be used for testing failure cases - [badMsaId] = await createMsa(fundingSource); +import '@frequency-chain/api-augment'; +import assert from 'assert'; +import { + DOLLARS, + createDelegatorAndDelegation, + createMsa, + createProviderKeysAndId, + getCurrentItemizedHash, +} from '../scaffolding/helpers'; +import { KeyringPair } from '@polkadot/keyring/types'; +import { ExtrinsicHelper } from '../scaffolding/extrinsicHelpers'; +import { AVRO_CHAT_MESSAGE } from '../stateful-pallet-storage/fixtures/itemizedSchemaType'; +import { MessageSourceId, SchemaId } from '@frequency-chain/api-augment/interfaces'; +import { Bytes, u16, u64 } from '@polkadot/types'; +import { getFundingSource } from '../scaffolding/funding'; + +const fundingSource = getFundingSource('stateful-storage-handle-itemized'); + +describe('📗 Stateful Pallet Storage', function () { + let schemaId_deletable: SchemaId; + let schemaId_unsupported: SchemaId; + let msa_id: MessageSourceId; + let providerId: MessageSourceId; + let providerKeys: KeyringPair; + let badMsaId: u64; + + before(async function () { + // Create a provider for the MSA, the provider will be used to grant delegation + [providerKeys, providerId] = await createProviderKeysAndId(fundingSource, 2n * DOLLARS); + assert.notEqual(providerId, undefined, 'setup should populate providerId'); + assert.notEqual(providerKeys, undefined, 'setup should populate providerKeys'); + + // Create a schema to allow delete actions + const createSchemaDeletable = ExtrinsicHelper.createSchema( + providerKeys, + AVRO_CHAT_MESSAGE, + 'AvroBinary', + 'Itemized' + ); + const { target: eventDeletable } = await createSchemaDeletable.signAndSend(); + schemaId_deletable = eventDeletable!.data.schemaId; + // Create non supported schema + const createSchema2 = ExtrinsicHelper.createSchema(providerKeys, AVRO_CHAT_MESSAGE, 'AvroBinary', 'OnChain'); + const { target: event2 } = await createSchema2.signAndSend(); + assert.notEqual(event2, undefined, 'setup should return a SchemaCreated event'); + schemaId_unsupported = event2!.data.schemaId; + // Create a MSA for the delegator and delegate to the provider + [, msa_id] = await createDelegatorAndDelegation(fundingSource, schemaId_deletable, providerId, providerKeys); + assert.notEqual(msa_id, undefined, 'setup should populate msa_id'); + // Create an MSA that is not a provider to be used for testing failure cases + [badMsaId] = await createMsa(fundingSource); + }); + + describe('Itemized Storage Tests 😊/😥', function () { + it('✅ should be able to call applyItemizedAction and apply actions', async function () { + // Add and update actions + const payload_1 = new Bytes(ExtrinsicHelper.api.registry, 'Hello World From Frequency'); + + const add_action = { + Add: payload_1, + }; + + const payload_2 = new Bytes(ExtrinsicHelper.api.registry, 'Hello World Again From Frequency'); + + const update_action = { + Add: payload_2, + }; + + const target_hash = await getCurrentItemizedHash(msa_id, schemaId_deletable); + + const add_actions = [add_action, update_action]; + const itemized_add_result_1 = ExtrinsicHelper.applyItemActions( + providerKeys, + schemaId_deletable, + msa_id, + add_actions, + target_hash + ); + const { target: pageUpdateEvent1, eventMap: chainEvents } = + await itemized_add_result_1.fundAndSend(fundingSource); + assert.notEqual( + chainEvents['system.ExtrinsicSuccess'], + undefined, + 'should have returned an ExtrinsicSuccess event' + ); + assert.notEqual( + chainEvents['transactionPayment.TransactionFeePaid'], + undefined, + 'should have returned a TransactionFeePaid event' + ); + assert.notEqual( + pageUpdateEvent1, + undefined, + 'should have returned a PalletStatefulStorageItemizedActionApplied event' + ); }); - describe("Itemized Storage Tests 😊/😥", function () { - - it("✅ should be able to call applyItemizedAction and apply actions", async function () { - - // Add and update actions - let payload_1 = new Bytes(ExtrinsicHelper.api.registry, "Hello World From Frequency"); - - const add_action = { - "Add": payload_1 - } - - let payload_2 = new Bytes(ExtrinsicHelper.api.registry, "Hello World Again From Frequency"); - - const update_action = { - "Add": payload_2 - } - - const target_hash = await getCurrentItemizedHash(msa_id, schemaId_deletable); - - let add_actions = [add_action, update_action]; - let itemized_add_result_1 = ExtrinsicHelper.applyItemActions(providerKeys, schemaId_deletable, msa_id, add_actions, target_hash); - const { target: pageUpdateEvent1, eventMap: chainEvents } = await itemized_add_result_1.fundAndSend(fundingSource); - assert.notEqual(chainEvents["system.ExtrinsicSuccess"], undefined, "should have returned an ExtrinsicSuccess event"); - assert.notEqual(chainEvents["transactionPayment.TransactionFeePaid"], undefined, "should have returned a TransactionFeePaid event"); - assert.notEqual(pageUpdateEvent1, undefined, "should have returned a PalletStatefulStorageItemizedActionApplied event"); - }); - - it("🛑 should fail call to applyItemizedAction with invalid schemaId", async function () { - - let payload_1 = new Bytes(ExtrinsicHelper.api.registry, "Hello World From Frequency"); - const add_action = { - "Add": payload_1 - } - let add_actions = [add_action]; - let fake_schema_id = new u16(ExtrinsicHelper.api.registry, 65_534); - let itemized_add_result_1 = ExtrinsicHelper.applyItemActions(providerKeys, fake_schema_id, msa_id, add_actions, 0); - await assert.rejects(itemized_add_result_1.fundAndSend(fundingSource), { - name: 'InvalidSchemaId', - section: 'statefulStorage', - }); - }); - - it("🛑 should fail call to applyItemizedAction with invalid schema location", async function () { - - let payload_1 = new Bytes(ExtrinsicHelper.api.registry, "Hello World From Frequency"); - const add_action = { - "Add": payload_1 - } - let add_actions = [add_action]; - let itemized_add_result_1 = ExtrinsicHelper.applyItemActions(providerKeys, schemaId_unsupported, msa_id, add_actions, 0); - await assert.rejects(itemized_add_result_1.fundAndSend(fundingSource), { - name: 'SchemaPayloadLocationMismatch', - section: 'statefulStorage', - }); - }); - - it("🛑 should fail call to applyItemizedAction with for un-delegated attempts", async function () { - - let payload_1 = new Bytes(ExtrinsicHelper.api.registry, "Hello World From Frequency"); - const add_action = { - "Add": payload_1 - } - let add_actions = [add_action]; - - let itemized_add_result_1 = ExtrinsicHelper.applyItemActions(providerKeys, schemaId_deletable, badMsaId, add_actions, 0); - await assert.rejects(itemized_add_result_1.fundAndSend(fundingSource), { - name: 'UnauthorizedDelegate', - section: 'statefulStorage', - }); - }); - - it("🛑 should fail call to applyItemizedAction for target hash mismatch", async function () { - - // Add and update actions - let payload_1 = new Bytes(ExtrinsicHelper.api.registry, "Hello World From Frequency"); - - const add_action = { - "Add": payload_1 - } - - let payload_2 = new Bytes(ExtrinsicHelper.api.registry, "Hello World Again From Frequency"); - - const update_action = { - "Add": payload_2 - } - - let add_actions = [add_action, update_action]; - let itemized_add_result_1 = ExtrinsicHelper.applyItemActions(providerKeys, schemaId_deletable, msa_id, add_actions, 0); - await assert.rejects(itemized_add_result_1.fundAndSend(fundingSource), { name: 'StalePageState' }); - }); + it('🛑 should fail call to applyItemizedAction with invalid schemaId', async function () { + const payload_1 = new Bytes(ExtrinsicHelper.api.registry, 'Hello World From Frequency'); + const add_action = { + Add: payload_1, + }; + const add_actions = [add_action]; + const fake_schema_id = new u16(ExtrinsicHelper.api.registry, 65_534); + const itemized_add_result_1 = ExtrinsicHelper.applyItemActions( + providerKeys, + fake_schema_id, + msa_id, + add_actions, + 0 + ); + await assert.rejects(itemized_add_result_1.fundAndSend(fundingSource), { + name: 'InvalidSchemaId', + section: 'statefulStorage', + }); }); - describe("Itemized Storage Remove Action Tests", function () { - - it("✅ should be able to call applyItemizedAction and apply remove actions", async function () { - let target_hash = await getCurrentItemizedHash(msa_id, schemaId_deletable); - - // Delete action - const idx_1: u16 = new u16(ExtrinsicHelper.api.registry, 1) - const remove_action_1 = { - "Delete": idx_1, - } - - target_hash = await getCurrentItemizedHash(msa_id, schemaId_deletable); - - let remove_actions = [remove_action_1]; - let itemized_remove_result_1 = ExtrinsicHelper.applyItemActions(providerKeys, schemaId_deletable, msa_id, remove_actions, target_hash); - const { target: pageUpdateEvent2, eventMap: chainEvents2 } = await itemized_remove_result_1.fundAndSend(fundingSource); - assert.notEqual(chainEvents2["system.ExtrinsicSuccess"], undefined, "should have returned an ExtrinsicSuccess event"); - assert.notEqual(chainEvents2["transactionPayment.TransactionFeePaid"], undefined, "should have returned a TransactionFeePaid event"); - assert.notEqual(pageUpdateEvent2, undefined, "should have returned a event"); - }); - - it("🛑 should fail call to remove action with invalid schemaId", async function () { - const idx_1: u16 = new u16(ExtrinsicHelper.api.registry, 1) - const remove_action_1 = { - "Delete": idx_1, - } - let remove_actions = [remove_action_1]; - let fake_schema_id = new u16(ExtrinsicHelper.api.registry, 65_534); - let itemized_remove_result_1 = ExtrinsicHelper.applyItemActions(providerKeys, fake_schema_id, msa_id, remove_actions, 0); - await assert.rejects(itemized_remove_result_1.fundAndSend(fundingSource), { - name: 'InvalidSchemaId', - section: 'statefulStorage', - }); - }); - - it("🛑 should fail call to remove action with invalid schema location", async function () { - const idx_1: u16 = new u16(ExtrinsicHelper.api.registry, 1) - const remove_action_1 = { - "Delete": idx_1, - } - let remove_actions = [remove_action_1]; - let itemized_remove_result_1 = ExtrinsicHelper.applyItemActions(providerKeys, schemaId_unsupported, msa_id, remove_actions, 0); - await assert.rejects(itemized_remove_result_1.fundAndSend(fundingSource), { - name: 'SchemaPayloadLocationMismatch', - section: 'statefulStorage', - }); - }); - - it("🛑 should fail call to remove action with invalid msa_id", async function () { - const idx_1: u16 = new u16(ExtrinsicHelper.api.registry, 1) - const remove_action_1 = { - "Delete": idx_1, - } - let remove_actions = [remove_action_1]; - - let itemized_remove_result_1 = ExtrinsicHelper.applyItemActions(providerKeys, schemaId_deletable, badMsaId, remove_actions, 0); - await assert.rejects(itemized_remove_result_1.fundAndSend(fundingSource), { - name: 'UnauthorizedDelegate', - section: 'statefulStorage', - }); - }); - - it("🛑 should fail call to remove action with stale state hash", async function () { - const idx_1: u16 = new u16(ExtrinsicHelper.api.registry, 1); - const remove_action = { - "Delete": idx_1, - } - let remove_actions = [remove_action]; - let op = ExtrinsicHelper.applyItemActions(providerKeys, schemaId_deletable, msa_id, remove_actions, 0); - await assert.rejects(op.fundAndSend(fundingSource), { name: 'StalePageState' }) - }); + it('🛑 should fail call to applyItemizedAction with invalid schema location', async function () { + const payload_1 = new Bytes(ExtrinsicHelper.api.registry, 'Hello World From Frequency'); + const add_action = { + Add: payload_1, + }; + const add_actions = [add_action]; + const itemized_add_result_1 = ExtrinsicHelper.applyItemActions( + providerKeys, + schemaId_unsupported, + msa_id, + add_actions, + 0 + ); + await assert.rejects(itemized_add_result_1.fundAndSend(fundingSource), { + name: 'SchemaPayloadLocationMismatch', + section: 'statefulStorage', + }); }); - describe("Itemized Storage RPC Tests", function () { - it("✅ should be able to call getItemizedStorage and get data for itemized schema", async function () { - const result = await ExtrinsicHelper.getItemizedStorage(msa_id, schemaId_deletable); - assert.notEqual(result.hash, undefined, "should have returned a hash"); - assert.notEqual(result.size, undefined, "should have returned a itemized responses"); - }); + it('🛑 should fail call to applyItemizedAction with for un-delegated attempts', async function () { + const payload_1 = new Bytes(ExtrinsicHelper.api.registry, 'Hello World From Frequency'); + const add_action = { + Add: payload_1, + }; + const add_actions = [add_action]; + + const itemized_add_result_1 = ExtrinsicHelper.applyItemActions( + providerKeys, + schemaId_deletable, + badMsaId, + add_actions, + 0 + ); + await assert.rejects(itemized_add_result_1.fundAndSend(fundingSource), { + name: 'UnauthorizedDelegate', + section: 'statefulStorage', + }); }); + + it('🛑 should fail call to applyItemizedAction for target hash mismatch', async function () { + // Add and update actions + const payload_1 = new Bytes(ExtrinsicHelper.api.registry, 'Hello World From Frequency'); + + const add_action = { + Add: payload_1, + }; + + const payload_2 = new Bytes(ExtrinsicHelper.api.registry, 'Hello World Again From Frequency'); + + const update_action = { + Add: payload_2, + }; + + const add_actions = [add_action, update_action]; + const itemized_add_result_1 = ExtrinsicHelper.applyItemActions( + providerKeys, + schemaId_deletable, + msa_id, + add_actions, + 0 + ); + await assert.rejects(itemized_add_result_1.fundAndSend(fundingSource), { name: 'StalePageState' }); + }); + }); + + describe('Itemized Storage Remove Action Tests', function () { + it('✅ should be able to call applyItemizedAction and apply remove actions', async function () { + let target_hash = await getCurrentItemizedHash(msa_id, schemaId_deletable); + + // Delete action + const idx_1: u16 = new u16(ExtrinsicHelper.api.registry, 1); + const remove_action_1 = { + Delete: idx_1, + }; + + target_hash = await getCurrentItemizedHash(msa_id, schemaId_deletable); + + const remove_actions = [remove_action_1]; + const itemized_remove_result_1 = ExtrinsicHelper.applyItemActions( + providerKeys, + schemaId_deletable, + msa_id, + remove_actions, + target_hash + ); + const { target: pageUpdateEvent2, eventMap: chainEvents2 } = + await itemized_remove_result_1.fundAndSend(fundingSource); + assert.notEqual( + chainEvents2['system.ExtrinsicSuccess'], + undefined, + 'should have returned an ExtrinsicSuccess event' + ); + assert.notEqual( + chainEvents2['transactionPayment.TransactionFeePaid'], + undefined, + 'should have returned a TransactionFeePaid event' + ); + assert.notEqual(pageUpdateEvent2, undefined, 'should have returned a event'); + }); + + it('🛑 should fail call to remove action with invalid schemaId', async function () { + const idx_1: u16 = new u16(ExtrinsicHelper.api.registry, 1); + const remove_action_1 = { + Delete: idx_1, + }; + const remove_actions = [remove_action_1]; + const fake_schema_id = new u16(ExtrinsicHelper.api.registry, 65_534); + const itemized_remove_result_1 = ExtrinsicHelper.applyItemActions( + providerKeys, + fake_schema_id, + msa_id, + remove_actions, + 0 + ); + await assert.rejects(itemized_remove_result_1.fundAndSend(fundingSource), { + name: 'InvalidSchemaId', + section: 'statefulStorage', + }); + }); + + it('🛑 should fail call to remove action with invalid schema location', async function () { + const idx_1: u16 = new u16(ExtrinsicHelper.api.registry, 1); + const remove_action_1 = { + Delete: idx_1, + }; + const remove_actions = [remove_action_1]; + const itemized_remove_result_1 = ExtrinsicHelper.applyItemActions( + providerKeys, + schemaId_unsupported, + msa_id, + remove_actions, + 0 + ); + await assert.rejects(itemized_remove_result_1.fundAndSend(fundingSource), { + name: 'SchemaPayloadLocationMismatch', + section: 'statefulStorage', + }); + }); + + it('🛑 should fail call to remove action with invalid msa_id', async function () { + const idx_1: u16 = new u16(ExtrinsicHelper.api.registry, 1); + const remove_action_1 = { + Delete: idx_1, + }; + const remove_actions = [remove_action_1]; + + const itemized_remove_result_1 = ExtrinsicHelper.applyItemActions( + providerKeys, + schemaId_deletable, + badMsaId, + remove_actions, + 0 + ); + await assert.rejects(itemized_remove_result_1.fundAndSend(fundingSource), { + name: 'UnauthorizedDelegate', + section: 'statefulStorage', + }); + }); + + it('🛑 should fail call to remove action with stale state hash', async function () { + const idx_1: u16 = new u16(ExtrinsicHelper.api.registry, 1); + const remove_action = { + Delete: idx_1, + }; + const remove_actions = [remove_action]; + const op = ExtrinsicHelper.applyItemActions(providerKeys, schemaId_deletable, msa_id, remove_actions, 0); + await assert.rejects(op.fundAndSend(fundingSource), { name: 'StalePageState' }); + }); + }); + + describe('Itemized Storage RPC Tests', function () { + it('✅ should be able to call getItemizedStorage and get data for itemized schema', async function () { + const result = await ExtrinsicHelper.getItemizedStorage(msa_id, schemaId_deletable); + assert.notEqual(result.hash, undefined, 'should have returned a hash'); + assert.notEqual(result.size, undefined, 'should have returned a itemized responses'); + }); + }); }); diff --git a/e2e/stateful-pallet-storage/handlePaginated.test.ts b/e2e/stateful-pallet-storage/handlePaginated.test.ts index 69cf897a3d..1a369a9fc0 100644 --- a/e2e/stateful-pallet-storage/handlePaginated.test.ts +++ b/e2e/stateful-pallet-storage/handlePaginated.test.ts @@ -1,177 +1,251 @@ // E2E tests for pallets/stateful-pallet-storage/handlePaginated.ts -import "@frequency-chain/api-augment"; -import assert from "assert"; -import {createProviderKeysAndId, createDelegatorAndDelegation, getCurrentPaginatedHash, createMsa, DOLLARS} from "../scaffolding/helpers"; -import { KeyringPair } from "@polkadot/keyring/types"; -import { ExtrinsicHelper } from "../scaffolding/extrinsicHelpers"; -import { AVRO_CHAT_MESSAGE } from "./fixtures/itemizedSchemaType"; -import { MessageSourceId, SchemaId } from "@frequency-chain/api-augment/interfaces"; -import { Bytes, u16, u64 } from "@polkadot/types"; -import { getFundingSource } from "../scaffolding/funding"; +import '@frequency-chain/api-augment'; +import assert from 'assert'; +import { + createProviderKeysAndId, + createDelegatorAndDelegation, + getCurrentPaginatedHash, + createMsa, + DOLLARS, +} from '../scaffolding/helpers'; +import { KeyringPair } from '@polkadot/keyring/types'; +import { ExtrinsicHelper } from '../scaffolding/extrinsicHelpers'; +import { AVRO_CHAT_MESSAGE } from './fixtures/itemizedSchemaType'; +import { MessageSourceId, SchemaId } from '@frequency-chain/api-augment/interfaces'; +import { Bytes, u16, u64 } from '@polkadot/types'; +import { getFundingSource } from '../scaffolding/funding'; const badSchemaId = 65_534; +const fundingSource = getFundingSource('stateful-storage-handle-paginated'); + +describe('📗 Stateful Pallet Storage', function () { + let schemaId: SchemaId; + let schemaId_unsupported: SchemaId; + let msa_id: MessageSourceId; + let providerId: MessageSourceId; + let providerKeys: KeyringPair; + let badMsaId: u64; + + before(async function () { + // Create a provider for the MSA, the provider will be used to grant delegation + [providerKeys, providerId] = await createProviderKeysAndId(fundingSource, 2n * DOLLARS); + assert.notEqual(providerId, undefined, 'setup should populate providerId'); + assert.notEqual(providerKeys, undefined, 'setup should populate providerKeys'); + + // Create a schema for Paginated PayloadLocation + const createSchema = ExtrinsicHelper.createSchema(providerKeys, AVRO_CHAT_MESSAGE, 'AvroBinary', 'Paginated'); + const { target: event } = await createSchema.signAndSend(); + schemaId = event!.data.schemaId; + + // Create non supported schema + const createSchema2 = ExtrinsicHelper.createSchema(providerKeys, AVRO_CHAT_MESSAGE, 'AvroBinary', 'OnChain'); + const { target: event2 } = await createSchema2.signAndSend(); + assert.notEqual(event2, undefined, 'setup should return a SchemaCreated event'); + schemaId_unsupported = event2!.data.schemaId; + + // Create a MSA for the delegator and delegate to the provider + [, msa_id] = await createDelegatorAndDelegation(fundingSource, schemaId, providerId, providerKeys); + assert.notEqual(msa_id, undefined, 'setup should populate msa_id'); + + // Create an MSA that is not a provider to be used for testing failure cases + [badMsaId] = await createMsa(fundingSource); + }); + + describe('Paginated Storage Upsert/Remove Tests 😊/😥', function () { + it('✅ should be able to call upsert page and add a page and remove a page via id', async function () { + let page_id = 0; + let target_hash = await getCurrentPaginatedHash(msa_id, schemaId, page_id); + + // Add and update actions + const payload_1 = new Bytes(ExtrinsicHelper.api.registry, 'Hello World From Frequency'); + const paginated_add_result_1 = ExtrinsicHelper.upsertPage( + providerKeys, + schemaId, + msa_id, + page_id, + payload_1, + target_hash + ); + const { target: pageUpdateEvent1, eventMap: chainEvents } = + await paginated_add_result_1.fundAndSend(fundingSource); + assert.notEqual( + chainEvents['system.ExtrinsicSuccess'], + undefined, + 'should have returned an ExtrinsicSuccess event' + ); + assert.notEqual( + chainEvents['transactionPayment.TransactionFeePaid'], + undefined, + 'should have returned a TransactionFeePaid event' + ); + assert.notEqual( + pageUpdateEvent1, + undefined, + 'should have returned a PalletStatefulStoragepaginatedActionApplied event' + ); + + // Add another page + page_id = 1; + target_hash = await getCurrentPaginatedHash(msa_id, schemaId, page_id); + const paginated_add_result_2 = ExtrinsicHelper.upsertPage( + providerKeys, + schemaId, + msa_id, + page_id, + payload_1, + target_hash + ); + const { target: pageUpdateEvent2, eventMap: chainEvents2 } = + await paginated_add_result_2.fundAndSend(fundingSource); + assert.notEqual( + chainEvents2['system.ExtrinsicSuccess'], + undefined, + 'should have returned an ExtrinsicSuccess event' + ); + assert.notEqual( + chainEvents2['transactionPayment.TransactionFeePaid'], + undefined, + 'should have returned a TransactionFeePaid event' + ); + assert.notEqual( + pageUpdateEvent2, + undefined, + 'should have returned a PalletStatefulStoragepaginatedActionApplied event' + ); + + // Remove the second page + target_hash = await getCurrentPaginatedHash(msa_id, schemaId, page_id); + const paginated_remove_result_1 = ExtrinsicHelper.removePage( + providerKeys, + schemaId, + msa_id, + page_id, + target_hash + ); + const { target: pageRemove, eventMap: chainEvents3 } = await paginated_remove_result_1.fundAndSend(fundingSource); + assert.notEqual( + chainEvents3['system.ExtrinsicSuccess'], + undefined, + 'should have returned an ExtrinsicSuccess event' + ); + assert.notEqual( + chainEvents3['transactionPayment.TransactionFeePaid'], + undefined, + 'should have returned a TransactionFeePaid event' + ); + assert.notEqual(pageRemove, undefined, 'should have returned a event'); + }); + + it('🛑 should fail call to upsert page with invalid schemaId', async function () { + const page_id = 0; + const target_hash = await getCurrentPaginatedHash(msa_id, schemaId, page_id); + const payload_1 = new Bytes(ExtrinsicHelper.api.registry, 'Hello World From Frequency'); + const fake_schema_id = new u16(ExtrinsicHelper.api.registry, badSchemaId); + const paginated_add_result_1 = ExtrinsicHelper.upsertPage( + providerKeys, + fake_schema_id, + msa_id, + page_id, + payload_1, + target_hash + ); + await assert.rejects(paginated_add_result_1.fundAndSend(fundingSource), { + name: 'InvalidSchemaId', + section: 'statefulStorage', + }); + }); + + it('🛑 should fail call to upsert page with invalid schema location', async function () { + const page_id = 0; + const target_hash = await getCurrentPaginatedHash(msa_id, schemaId, page_id); + const payload_1 = new Bytes(ExtrinsicHelper.api.registry, 'Hello World From Frequency'); + const paginated_add_result_1 = ExtrinsicHelper.upsertPage( + providerKeys, + schemaId_unsupported, + msa_id, + page_id, + payload_1, + target_hash + ); + await assert.rejects(paginated_add_result_1.fundAndSend(fundingSource), { + name: 'SchemaPayloadLocationMismatch', + section: 'statefulStorage', + }); + }); -describe("📗 Stateful Pallet Storage", function () { - const fundingSource = getFundingSource("stateful-storage-handle-paginated"); - - let schemaId: SchemaId; - let schemaId_unsupported: SchemaId; - let msa_id: MessageSourceId; - let providerId: MessageSourceId; - let providerKeys: KeyringPair; - let badMsaId: u64; - - before(async function () { - // Create a provider for the MSA, the provider will be used to grant delegation - [providerKeys, providerId] = await createProviderKeysAndId(fundingSource, 2n * DOLLARS); - assert.notEqual(providerId, undefined, "setup should populate providerId"); - assert.notEqual(providerKeys, undefined, "setup should populate providerKeys"); - - // Create a schema for Paginated PayloadLocation - const createSchema = ExtrinsicHelper.createSchema(providerKeys, AVRO_CHAT_MESSAGE, "AvroBinary", "Paginated"); - const { target: event } = await createSchema.signAndSend(); - schemaId = event!.data.schemaId; - - // Create non supported schema - const createSchema2 = ExtrinsicHelper.createSchema(providerKeys, AVRO_CHAT_MESSAGE, "AvroBinary", "OnChain"); - const { target: event2 } = await createSchema2.signAndSend(); - assert.notEqual(event2, undefined, "setup should return a SchemaCreated event"); - schemaId_unsupported = event2!.data.schemaId; - - // Create a MSA for the delegator and delegate to the provider - [, msa_id] = await createDelegatorAndDelegation(fundingSource, schemaId, providerId, providerKeys); - assert.notEqual(msa_id, undefined, "setup should populate msa_id"); - - // Create an MSA that is not a provider to be used for testing failure cases - [badMsaId] = await createMsa(fundingSource); + it('🛑 should fail call to upsert page with for un-delegated attempts', async function () { + const page_id = 0; + const payload_1 = new Bytes(ExtrinsicHelper.api.registry, 'Hello World From Frequency'); + + const target_hash = await getCurrentPaginatedHash(msa_id, schemaId, page_id); + const paginated_add_result_1 = ExtrinsicHelper.upsertPage( + providerKeys, + schemaId, + badMsaId, + page_id, + payload_1, + target_hash + ); + await assert.rejects(paginated_add_result_1.fundAndSend(fundingSource), { + name: 'UnauthorizedDelegate', + section: 'statefulStorage', + }); }); - describe("Paginated Storage Upsert/Remove Tests 😊/😥", function () { - - it("✅ should be able to call upsert page and add a page and remove a page via id", async function () { - let page_id = 0; - let target_hash = await getCurrentPaginatedHash(msa_id, schemaId, page_id) - - // Add and update actions - let payload_1 = new Bytes(ExtrinsicHelper.api.registry, "Hello World From Frequency"); - let paginated_add_result_1 = ExtrinsicHelper.upsertPage(providerKeys, schemaId, msa_id, page_id, payload_1, target_hash); - const { target: pageUpdateEvent1, eventMap: chainEvents } = await paginated_add_result_1.fundAndSend(fundingSource); - assert.notEqual(chainEvents["system.ExtrinsicSuccess"], undefined, "should have returned an ExtrinsicSuccess event"); - assert.notEqual(chainEvents["transactionPayment.TransactionFeePaid"], undefined, "should have returned a TransactionFeePaid event"); - assert.notEqual(pageUpdateEvent1, undefined, "should have returned a PalletStatefulStoragepaginatedActionApplied event"); - - // Add another page - page_id = 1; - target_hash = await getCurrentPaginatedHash(msa_id, schemaId, page_id) - let paginated_add_result_2 = ExtrinsicHelper.upsertPage(providerKeys, schemaId, msa_id, page_id, payload_1, target_hash); - const { target: pageUpdateEvent2, eventMap: chainEvents2 } = await paginated_add_result_2.fundAndSend(fundingSource); - assert.notEqual(chainEvents2["system.ExtrinsicSuccess"], undefined, "should have returned an ExtrinsicSuccess event"); - assert.notEqual(chainEvents2["transactionPayment.TransactionFeePaid"], undefined, "should have returned a TransactionFeePaid event"); - assert.notEqual(pageUpdateEvent2, undefined, "should have returned a PalletStatefulStoragepaginatedActionApplied event"); - - // Remove the second page - target_hash = await getCurrentPaginatedHash(msa_id, schemaId, page_id) - let paginated_remove_result_1 = ExtrinsicHelper.removePage(providerKeys, schemaId, msa_id, page_id, target_hash); - const { target: pageRemove, eventMap: chainEvents3 } = await paginated_remove_result_1.fundAndSend(fundingSource); - assert.notEqual(chainEvents3["system.ExtrinsicSuccess"], undefined, "should have returned an ExtrinsicSuccess event"); - assert.notEqual(chainEvents3["transactionPayment.TransactionFeePaid"], undefined, "should have returned a TransactionFeePaid event"); - assert.notEqual(pageRemove, undefined, "should have returned a event"); - }); - - it("🛑 should fail call to upsert page with invalid schemaId", async function () { - - let page_id = 0; - let target_hash = await getCurrentPaginatedHash(msa_id, schemaId, page_id) - let payload_1 = new Bytes(ExtrinsicHelper.api.registry, "Hello World From Frequency"); - let fake_schema_id = new u16(ExtrinsicHelper.api.registry, badSchemaId); - let paginated_add_result_1 = ExtrinsicHelper.upsertPage(providerKeys, fake_schema_id, msa_id, page_id, payload_1, target_hash); - await assert.rejects(paginated_add_result_1.fundAndSend(fundingSource), { - name: 'InvalidSchemaId', - section: 'statefulStorage', - }); - }); - - it("🛑 should fail call to upsert page with invalid schema location", async function () { - - let page_id = 0; - let target_hash = await getCurrentPaginatedHash(msa_id, schemaId, page_id) - let payload_1 = new Bytes(ExtrinsicHelper.api.registry, "Hello World From Frequency"); - let paginated_add_result_1 = ExtrinsicHelper.upsertPage(providerKeys, schemaId_unsupported, msa_id, page_id, payload_1, target_hash); - await assert.rejects(paginated_add_result_1.fundAndSend(fundingSource), { - name: 'SchemaPayloadLocationMismatch', - section: 'statefulStorage', - }); - }); - - it("🛑 should fail call to upsert page with for un-delegated attempts", async function () { - - let page_id = 0; - let payload_1 = new Bytes(ExtrinsicHelper.api.registry, "Hello World From Frequency"); - - let target_hash = await getCurrentPaginatedHash(msa_id, schemaId, page_id) - let paginated_add_result_1 = ExtrinsicHelper.upsertPage(providerKeys, schemaId, badMsaId, page_id, payload_1, target_hash); - await assert.rejects(paginated_add_result_1.fundAndSend(fundingSource), { - name: 'UnauthorizedDelegate', - section: 'statefulStorage', - }); - }); - - it("🛑 should fail call to upsert page with stale target hash", async function () { - - let page_id = 0; - let payload_1 = new Bytes(ExtrinsicHelper.api.registry, "Hello World From Frequency"); - - let paginated_add_result_1 = ExtrinsicHelper.upsertPage(providerKeys, schemaId, msa_id, page_id, payload_1, 0); - await assert.rejects(paginated_add_result_1.fundAndSend(fundingSource), { - name: 'StalePageState', - section: 'statefulStorage', - }); - }); + it('🛑 should fail call to upsert page with stale target hash', async function () { + const page_id = 0; + const payload_1 = new Bytes(ExtrinsicHelper.api.registry, 'Hello World From Frequency'); + + const paginated_add_result_1 = ExtrinsicHelper.upsertPage(providerKeys, schemaId, msa_id, page_id, payload_1, 0); + await assert.rejects(paginated_add_result_1.fundAndSend(fundingSource), { + name: 'StalePageState', + section: 'statefulStorage', + }); + }); + }); + + describe('Paginated Storage Removal Negative Tests 😊/😥', function () { + it('🛑 should fail call to remove page with invalid schemaId', async function () { + const page_id = 0; + const paginated_add_result_1 = ExtrinsicHelper.removePage(providerKeys, badSchemaId, msa_id, page_id, 0); + await assert.rejects(paginated_add_result_1.fundAndSend(fundingSource), { + name: 'InvalidSchemaId', + section: 'statefulStorage', + }); }); - describe("Paginated Storage Removal Negative Tests 😊/😥", function () { - - it("🛑 should fail call to remove page with invalid schemaId", async function () { - let page_id = 0; - let paginated_add_result_1 = ExtrinsicHelper.removePage(providerKeys, badSchemaId, msa_id, page_id, 0); - await assert.rejects(paginated_add_result_1.fundAndSend(fundingSource), { - name: 'InvalidSchemaId', - section: 'statefulStorage', - }); - }); - - it("🛑 should fail call to remove page with invalid schema location", async function () { - let page_id = 0; - let paginated_add_result_1 = ExtrinsicHelper.removePage(providerKeys, schemaId_unsupported, msa_id, page_id, 0); - await assert.rejects(paginated_add_result_1.fundAndSend(fundingSource), { - name: 'SchemaPayloadLocationMismatch', - section: 'statefulStorage', - }); - }); - - it("🛑 should fail call to remove page for un-delegated attempts", async function () { - let paginated_add_result_1 = ExtrinsicHelper.removePage(providerKeys, schemaId, badMsaId, 0, 0); - await assert.rejects(paginated_add_result_1.fundAndSend(fundingSource), { - name: 'UnauthorizedDelegate', - section: 'statefulStorage', - }); - }); - - it("🛑 should fail call to remove page with stale target hash", async function () { - let paginated_add_result_1 = ExtrinsicHelper.removePage(providerKeys, schemaId, msa_id, 0, 0); - await assert.rejects(paginated_add_result_1.fundAndSend(fundingSource), { - name: 'StalePageState', - section: 'statefulStorage', - }); - }); + it('🛑 should fail call to remove page with invalid schema location', async function () { + const page_id = 0; + const paginated_add_result_1 = ExtrinsicHelper.removePage(providerKeys, schemaId_unsupported, msa_id, page_id, 0); + await assert.rejects(paginated_add_result_1.fundAndSend(fundingSource), { + name: 'SchemaPayloadLocationMismatch', + section: 'statefulStorage', + }); }); - describe("Paginated Storage RPC Tests", function () { - it("✅ should be able to call get_paginated_storage and get paginated data", async function () { - const result = await ExtrinsicHelper.getPaginatedStorage(msa_id, schemaId); - assert.notEqual(result, undefined, "should have returned a valid response"); - assert.notEqual(result.length, 0, "should have returned paginated responses"); - assert.notEqual(result[0].hash, undefined, "should have returned a valid page"); - }); + it('🛑 should fail call to remove page for un-delegated attempts', async function () { + const paginated_add_result_1 = ExtrinsicHelper.removePage(providerKeys, schemaId, badMsaId, 0, 0); + await assert.rejects(paginated_add_result_1.fundAndSend(fundingSource), { + name: 'UnauthorizedDelegate', + section: 'statefulStorage', + }); + }); + + it('🛑 should fail call to remove page with stale target hash', async function () { + const paginated_add_result_1 = ExtrinsicHelper.removePage(providerKeys, schemaId, msa_id, 0, 0); + await assert.rejects(paginated_add_result_1.fundAndSend(fundingSource), { + name: 'StalePageState', + section: 'statefulStorage', + }); + }); + }); + + describe('Paginated Storage RPC Tests', function () { + it('✅ should be able to call get_paginated_storage and get paginated data', async function () { + const result = await ExtrinsicHelper.getPaginatedStorage(msa_id, schemaId); + assert.notEqual(result, undefined, 'should have returned a valid response'); + assert.notEqual(result.length, 0, 'should have returned paginated responses'); + assert.notEqual(result[0].hash, undefined, 'should have returned a valid page'); }); + }); }); diff --git a/e2e/stateful-pallet-storage/handleSignatureRequired.test.ts b/e2e/stateful-pallet-storage/handleSignatureRequired.test.ts index 1bb759bbb4..b4828688ef 100644 --- a/e2e/stateful-pallet-storage/handleSignatureRequired.test.ts +++ b/e2e/stateful-pallet-storage/handleSignatureRequired.test.ts @@ -1,29 +1,30 @@ // E2E tests for pallets/stateful-pallet-storage/handleItemizedWithSignature.ts -import "@frequency-chain/api-augment"; -import assert from "assert"; +import '@frequency-chain/api-augment'; +import assert from 'assert'; import { DOLLARS, createDelegator, createProviderKeysAndId, generateItemizedSignaturePayload, generateItemizedSignaturePayloadV2, - generatePaginatedDeleteSignaturePayload, generatePaginatedDeleteSignaturePayloadV2, - generatePaginatedUpsertSignaturePayload, generatePaginatedUpsertSignaturePayloadV2, + generatePaginatedDeleteSignaturePayload, + generatePaginatedDeleteSignaturePayloadV2, + generatePaginatedUpsertSignaturePayload, + generatePaginatedUpsertSignaturePayloadV2, getCurrentItemizedHash, getCurrentPaginatedHash, - signPayloadSr25519 -} from "../scaffolding/helpers"; -import { KeyringPair } from "@polkadot/keyring/types"; -import { ExtrinsicHelper } from "../scaffolding/extrinsicHelpers"; -import { AVRO_CHAT_MESSAGE } from "../stateful-pallet-storage/fixtures/itemizedSchemaType"; -import { MessageSourceId, SchemaId } from "@frequency-chain/api-augment/interfaces"; -import { Bytes, u16 } from "@polkadot/types"; -import { getFundingSource } from "../scaffolding/funding"; - -describe("📗 Stateful Pallet Storage Signature Required", function () { - const fundingSource = getFundingSource("stateful-storage-handle-sig-req"); - - + signPayloadSr25519, +} from '../scaffolding/helpers'; +import { KeyringPair } from '@polkadot/keyring/types'; +import { ExtrinsicHelper } from '../scaffolding/extrinsicHelpers'; +import { AVRO_CHAT_MESSAGE } from '../stateful-pallet-storage/fixtures/itemizedSchemaType'; +import { MessageSourceId, SchemaId } from '@frequency-chain/api-augment/interfaces'; +import { Bytes, u16 } from '@polkadot/types'; +import { getFundingSource } from '../scaffolding/funding'; + +const fundingSource = getFundingSource('stateful-storage-handle-sig-req'); + +describe('📗 Stateful Pallet Storage Signature Required', function () { let itemizedSchemaId: SchemaId; let paginatedSchemaId: SchemaId; let msa_id: MessageSourceId; @@ -32,99 +33,136 @@ describe("📗 Stateful Pallet Storage Signature Required", function () { let delegatorKeys: KeyringPair; before(async function () { - // Create a provider for the MSA, the provider will be used to grant delegation [providerKeys, providerId] = await createProviderKeysAndId(fundingSource, 2n * DOLLARS); - assert.notEqual(providerId, undefined, "setup should populate providerId"); - assert.notEqual(providerKeys, undefined, "setup should populate providerKeys"); + assert.notEqual(providerId, undefined, 'setup should populate providerId'); + assert.notEqual(providerKeys, undefined, 'setup should populate providerKeys'); // Create a schema for Itemized PayloadLocation - const createSchema = ExtrinsicHelper.createSchema(providerKeys, AVRO_CHAT_MESSAGE, "AvroBinary", "Itemized"); + const createSchema = ExtrinsicHelper.createSchema(providerKeys, AVRO_CHAT_MESSAGE, 'AvroBinary', 'Itemized'); const { target: event } = await createSchema.signAndSend(); itemizedSchemaId = event!.data.schemaId; // Create a schema for Paginated PayloadLocation - const createSchema2 = ExtrinsicHelper.createSchema(providerKeys, AVRO_CHAT_MESSAGE, "AvroBinary", "Paginated"); + const createSchema2 = ExtrinsicHelper.createSchema(providerKeys, AVRO_CHAT_MESSAGE, 'AvroBinary', 'Paginated'); const { target: event2 } = await createSchema2.signAndSend(); - assert.notEqual(event2, undefined, "setup should return a SchemaCreated event"); - paginatedSchemaId = event2!.data.schemaId; + assert.notEqual(event2, undefined, 'setup should return a SchemaCreated event'); + paginatedSchemaId = event2!.data.schemaId; // Create a MSA for the delegator [delegatorKeys, msa_id] = await createDelegator(fundingSource); - assert.notEqual(delegatorKeys, undefined, "setup should populate delegator_key"); - assert.notEqual(msa_id, undefined, "setup should populate msa_id"); + assert.notEqual(delegatorKeys, undefined, 'setup should populate delegator_key'); + assert.notEqual(msa_id, undefined, 'setup should populate msa_id'); }); - describe("Itemized With Signature Storage Tests", function () { - - it("should be able to call applyItemizedActionWithSignature and apply actions", async function () { - + describe('Itemized With Signature Storage Tests', function () { + it('should be able to call applyItemizedActionWithSignature and apply actions', async function () { // Add and update actions - let payload_1 = new Bytes(ExtrinsicHelper.api.registry, "Hello World From Frequency"); + const payload_1 = new Bytes(ExtrinsicHelper.api.registry, 'Hello World From Frequency'); const add_action = { - "Add": payload_1 - } + Add: payload_1, + }; - let payload_2 = new Bytes(ExtrinsicHelper.api.registry, "Hello World Again From Frequency"); + const payload_2 = new Bytes(ExtrinsicHelper.api.registry, 'Hello World Again From Frequency'); const update_action = { - "Add": payload_2 - } + Add: payload_2, + }; const target_hash = await getCurrentItemizedHash(msa_id, itemizedSchemaId); - let add_actions = [add_action, update_action]; + const add_actions = [add_action, update_action]; const payload = await generateItemizedSignaturePayload({ msaId: msa_id, targetHash: target_hash, schemaId: itemizedSchemaId, actions: add_actions, }); - const itemizedPayloadData = ExtrinsicHelper.api.registry.createType("PalletStatefulStorageItemizedSignaturePayload", payload); - let itemized_add_result_1 = ExtrinsicHelper.applyItemActionsWithSignature(delegatorKeys, providerKeys, signPayloadSr25519(delegatorKeys, itemizedPayloadData), payload); - const { target: pageUpdateEvent1, eventMap: chainEvents } = await itemized_add_result_1.fundAndSend(fundingSource); - assert.notEqual(chainEvents["system.ExtrinsicSuccess"], undefined, "should have returned an ExtrinsicSuccess event"); - assert.notEqual(chainEvents["transactionPayment.TransactionFeePaid"], undefined, "should have returned a TransactionFeePaid event"); - assert.notEqual(pageUpdateEvent1, undefined, "should have returned a PalletStatefulStorageItemizedActionApplied event"); + const itemizedPayloadData = ExtrinsicHelper.api.registry.createType( + 'PalletStatefulStorageItemizedSignaturePayload', + payload + ); + const itemized_add_result_1 = ExtrinsicHelper.applyItemActionsWithSignature( + delegatorKeys, + providerKeys, + signPayloadSr25519(delegatorKeys, itemizedPayloadData), + payload + ); + const { target: pageUpdateEvent1, eventMap: chainEvents } = + await itemized_add_result_1.fundAndSend(fundingSource); + assert.notEqual( + chainEvents['system.ExtrinsicSuccess'], + undefined, + 'should have returned an ExtrinsicSuccess event' + ); + assert.notEqual( + chainEvents['transactionPayment.TransactionFeePaid'], + undefined, + 'should have returned a TransactionFeePaid event' + ); + assert.notEqual( + pageUpdateEvent1, + undefined, + 'should have returned a PalletStatefulStorageItemizedActionApplied event' + ); }); - it("should be able to call applyItemizedActionWithSignatureV2 and apply actions", async function () { - + it('should be able to call applyItemizedActionWithSignatureV2 and apply actions', async function () { // Add and update actions - let payload_1 = new Bytes(ExtrinsicHelper.api.registry, "Hello World From Frequency"); + const payload_1 = new Bytes(ExtrinsicHelper.api.registry, 'Hello World From Frequency'); const add_action = { - "Add": payload_1 - } + Add: payload_1, + }; - let payload_2 = new Bytes(ExtrinsicHelper.api.registry, "Hello World Again From Frequency"); + const payload_2 = new Bytes(ExtrinsicHelper.api.registry, 'Hello World Again From Frequency'); const update_action = { - "Add": payload_2 - } + Add: payload_2, + }; const target_hash = await getCurrentItemizedHash(msa_id, itemizedSchemaId); - let add_actions = [add_action, update_action]; + const add_actions = [add_action, update_action]; const payload = await generateItemizedSignaturePayloadV2({ targetHash: target_hash, schemaId: itemizedSchemaId, actions: add_actions, }); - const itemizedPayloadData = ExtrinsicHelper.api.registry.createType("PalletStatefulStorageItemizedSignaturePayloadV2", payload); - let itemized_add_result_1 = ExtrinsicHelper.applyItemActionsWithSignatureV2(delegatorKeys, providerKeys, signPayloadSr25519(delegatorKeys, itemizedPayloadData), payload); - const { target: pageUpdateEvent1, eventMap: chainEvents } = await itemized_add_result_1.fundAndSend(fundingSource); - assert.notEqual(chainEvents["system.ExtrinsicSuccess"], undefined, "should have returned an ExtrinsicSuccess event"); - assert.notEqual(chainEvents["transactionPayment.TransactionFeePaid"], undefined, "should have returned a TransactionFeePaid event"); - assert.notEqual(pageUpdateEvent1, undefined, "should have returned a PalletStatefulStorageItemizedActionApplied event"); + const itemizedPayloadData = ExtrinsicHelper.api.registry.createType( + 'PalletStatefulStorageItemizedSignaturePayloadV2', + payload + ); + const itemized_add_result_1 = ExtrinsicHelper.applyItemActionsWithSignatureV2( + delegatorKeys, + providerKeys, + signPayloadSr25519(delegatorKeys, itemizedPayloadData), + payload + ); + const { target: pageUpdateEvent1, eventMap: chainEvents } = + await itemized_add_result_1.fundAndSend(fundingSource); + assert.notEqual( + chainEvents['system.ExtrinsicSuccess'], + undefined, + 'should have returned an ExtrinsicSuccess event' + ); + assert.notEqual( + chainEvents['transactionPayment.TransactionFeePaid'], + undefined, + 'should have returned a TransactionFeePaid event' + ); + assert.notEqual( + pageUpdateEvent1, + undefined, + 'should have returned a PalletStatefulStorageItemizedActionApplied event' + ); }); }); - describe("Paginated With Signature Storage Tests", function () { - - it("should be able to call upsert a page and delete it successfully", async function () { - let page_id = new u16(ExtrinsicHelper.api.registry, 1); + describe('Paginated With Signature Storage Tests', function () { + it('should be able to call upsert a page and delete it successfully', async function () { + const page_id = new u16(ExtrinsicHelper.api.registry, 1); // Add and update actions let target_hash = await getCurrentPaginatedHash(msa_id, paginatedSchemaId, page_id.toNumber()); @@ -133,14 +171,34 @@ describe("📗 Stateful Pallet Storage Signature Required", function () { targetHash: target_hash, schemaId: paginatedSchemaId, pageId: page_id, - payload: new Bytes(ExtrinsicHelper.api.registry, "Hello World From Frequency"), + payload: new Bytes(ExtrinsicHelper.api.registry, 'Hello World From Frequency'), }); - const upsertPayloadData = ExtrinsicHelper.api.registry.createType("PalletStatefulStoragePaginatedUpsertSignaturePayload", upsertPayload); - let upsert_result = ExtrinsicHelper.upsertPageWithSignature(delegatorKeys, providerKeys, signPayloadSr25519(delegatorKeys, upsertPayloadData), upsertPayload); + const upsertPayloadData = ExtrinsicHelper.api.registry.createType( + 'PalletStatefulStoragePaginatedUpsertSignaturePayload', + upsertPayload + ); + const upsert_result = ExtrinsicHelper.upsertPageWithSignature( + delegatorKeys, + providerKeys, + signPayloadSr25519(delegatorKeys, upsertPayloadData), + upsertPayload + ); const { target: pageUpdateEvent, eventMap: chainEvents1 } = await upsert_result.fundAndSend(fundingSource); - assert.notEqual(chainEvents1["system.ExtrinsicSuccess"], undefined, "should have returned an ExtrinsicSuccess event"); - assert.notEqual(chainEvents1["transactionPayment.TransactionFeePaid"], undefined, "should have returned a TransactionFeePaid event"); - assert.notEqual(pageUpdateEvent, undefined, "should have returned a PalletStatefulStoragePaginatedPageUpdate event"); + assert.notEqual( + chainEvents1['system.ExtrinsicSuccess'], + undefined, + 'should have returned an ExtrinsicSuccess event' + ); + assert.notEqual( + chainEvents1['transactionPayment.TransactionFeePaid'], + undefined, + 'should have returned a TransactionFeePaid event' + ); + assert.notEqual( + pageUpdateEvent, + undefined, + 'should have returned a PalletStatefulStoragePaginatedPageUpdate event' + ); // Remove the page target_hash = await getCurrentPaginatedHash(msa_id, paginatedSchemaId, page_id.toNumber()); @@ -150,21 +208,37 @@ describe("📗 Stateful Pallet Storage Signature Required", function () { schemaId: paginatedSchemaId, pageId: page_id, }); - const deletePayloadData = ExtrinsicHelper.api.registry.createType("PalletStatefulStoragePaginatedDeleteSignaturePayload", deletePayload); - let remove_result = ExtrinsicHelper.deletePageWithSignature(delegatorKeys, providerKeys, signPayloadSr25519(delegatorKeys, deletePayloadData), deletePayload); + const deletePayloadData = ExtrinsicHelper.api.registry.createType( + 'PalletStatefulStoragePaginatedDeleteSignaturePayload', + deletePayload + ); + const remove_result = ExtrinsicHelper.deletePageWithSignature( + delegatorKeys, + providerKeys, + signPayloadSr25519(delegatorKeys, deletePayloadData), + deletePayload + ); const { target: pageRemove, eventMap: chainEvents2 } = await remove_result.fundAndSend(fundingSource); - assert.notEqual(chainEvents2["system.ExtrinsicSuccess"], undefined, "should have returned an ExtrinsicSuccess event"); - assert.notEqual(chainEvents2["transactionPayment.TransactionFeePaid"], undefined, "should have returned a TransactionFeePaid event"); - assert.notEqual(pageRemove, undefined, "should have returned a event"); + assert.notEqual( + chainEvents2['system.ExtrinsicSuccess'], + undefined, + 'should have returned an ExtrinsicSuccess event' + ); + assert.notEqual( + chainEvents2['transactionPayment.TransactionFeePaid'], + undefined, + 'should have returned a TransactionFeePaid event' + ); + assert.notEqual(pageRemove, undefined, 'should have returned a event'); // no pages should exist const result = await ExtrinsicHelper.getPaginatedStorage(msa_id, paginatedSchemaId); - assert.notEqual(result, undefined, "should have returned a valid response"); - assert.equal(result.length, 0, "should returned no paginated pages"); + assert.notEqual(result, undefined, 'should have returned a valid response'); + assert.equal(result.length, 0, 'should returned no paginated pages'); }); - it("should be able to call upsertPageWithSignatureV2 a page and deletePageWithSignatureV2 it successfully", async function () { - let page_id = new u16(ExtrinsicHelper.api.registry, 1); + it('should be able to call upsertPageWithSignatureV2 a page and deletePageWithSignatureV2 it successfully', async function () { + const page_id = new u16(ExtrinsicHelper.api.registry, 1); // Add and update actions let target_hash = await getCurrentPaginatedHash(msa_id, paginatedSchemaId, page_id.toNumber()); @@ -172,14 +246,34 @@ describe("📗 Stateful Pallet Storage Signature Required", function () { targetHash: target_hash, schemaId: paginatedSchemaId, pageId: page_id, - payload: new Bytes(ExtrinsicHelper.api.registry, "Hello World From Frequency"), + payload: new Bytes(ExtrinsicHelper.api.registry, 'Hello World From Frequency'), }); - const upsertPayloadData = ExtrinsicHelper.api.registry.createType("PalletStatefulStoragePaginatedUpsertSignaturePayloadV2", upsertPayload); - let upsert_result = ExtrinsicHelper.upsertPageWithSignatureV2(delegatorKeys, providerKeys, signPayloadSr25519(delegatorKeys, upsertPayloadData), upsertPayload); + const upsertPayloadData = ExtrinsicHelper.api.registry.createType( + 'PalletStatefulStoragePaginatedUpsertSignaturePayloadV2', + upsertPayload + ); + const upsert_result = ExtrinsicHelper.upsertPageWithSignatureV2( + delegatorKeys, + providerKeys, + signPayloadSr25519(delegatorKeys, upsertPayloadData), + upsertPayload + ); const { target: pageUpdateEvent, eventMap: chainEvents1 } = await upsert_result.fundAndSend(fundingSource); - assert.notEqual(chainEvents1["system.ExtrinsicSuccess"], undefined, "should have returned an ExtrinsicSuccess event"); - assert.notEqual(chainEvents1["transactionPayment.TransactionFeePaid"], undefined, "should have returned a TransactionFeePaid event"); - assert.notEqual(pageUpdateEvent, undefined, "should have returned a PalletStatefulStoragePaginatedPageUpdate event"); + assert.notEqual( + chainEvents1['system.ExtrinsicSuccess'], + undefined, + 'should have returned an ExtrinsicSuccess event' + ); + assert.notEqual( + chainEvents1['transactionPayment.TransactionFeePaid'], + undefined, + 'should have returned a TransactionFeePaid event' + ); + assert.notEqual( + pageUpdateEvent, + undefined, + 'should have returned a PalletStatefulStoragePaginatedPageUpdate event' + ); // Remove the page target_hash = await getCurrentPaginatedHash(msa_id, paginatedSchemaId, page_id.toNumber()); @@ -188,17 +282,33 @@ describe("📗 Stateful Pallet Storage Signature Required", function () { schemaId: paginatedSchemaId, pageId: page_id, }); - const deletePayloadData = ExtrinsicHelper.api.registry.createType("PalletStatefulStoragePaginatedDeleteSignaturePayloadV2", deletePayload); - let remove_result = ExtrinsicHelper.deletePageWithSignatureV2(delegatorKeys, providerKeys, signPayloadSr25519(delegatorKeys, deletePayloadData), deletePayload); + const deletePayloadData = ExtrinsicHelper.api.registry.createType( + 'PalletStatefulStoragePaginatedDeleteSignaturePayloadV2', + deletePayload + ); + const remove_result = ExtrinsicHelper.deletePageWithSignatureV2( + delegatorKeys, + providerKeys, + signPayloadSr25519(delegatorKeys, deletePayloadData), + deletePayload + ); const { target: pageRemove, eventMap: chainEvents2 } = await remove_result.fundAndSend(fundingSource); - assert.notEqual(chainEvents2["system.ExtrinsicSuccess"], undefined, "should have returned an ExtrinsicSuccess event"); - assert.notEqual(chainEvents2["transactionPayment.TransactionFeePaid"], undefined, "should have returned a TransactionFeePaid event"); - assert.notEqual(pageRemove, undefined, "should have returned a event"); + assert.notEqual( + chainEvents2['system.ExtrinsicSuccess'], + undefined, + 'should have returned an ExtrinsicSuccess event' + ); + assert.notEqual( + chainEvents2['transactionPayment.TransactionFeePaid'], + undefined, + 'should have returned a TransactionFeePaid event' + ); + assert.notEqual(pageRemove, undefined, 'should have returned a event'); // no pages should exist const result = await ExtrinsicHelper.getPaginatedStorage(msa_id, paginatedSchemaId); - assert.notEqual(result, undefined, "should have returned a valid response"); - assert.equal(result.length, 0, "should returned no paginated pages"); + assert.notEqual(result, undefined, 'should have returned a valid response'); + assert.equal(result.length, 0, 'should returned no paginated pages'); }); }); }); diff --git a/e2e/sudo/sudo.test.ts b/e2e/sudo/sudo.test.ts index 55cc2b0dde..4718814282 100644 --- a/e2e/sudo/sudo.test.ts +++ b/e2e/sudo/sudo.test.ts @@ -1,45 +1,45 @@ // All the sudo required tests must be in one test for parallelization needs -import "@frequency-chain/api-augment"; -import { MessageSourceId, SchemaId } from "@frequency-chain/api-augment/interfaces"; -import { KeyringPair } from "@polkadot/keyring/types"; -import assert from "assert"; -import { Extrinsic, ExtrinsicHelper } from "../scaffolding/extrinsicHelpers"; -import { isTestnet } from "../scaffolding/env"; -import { getSudo, getFundingSource } from "../scaffolding/funding"; -import { AVRO_GRAPH_CHANGE } from "../schemas/fixtures/avroGraphChangeSchemaType"; -import { Bytes, u16 } from "@polkadot/types"; +import '@frequency-chain/api-augment'; +import { MessageSourceId, SchemaId } from '@frequency-chain/api-augment/interfaces'; +import { KeyringPair } from '@polkadot/keyring/types'; +import assert from 'assert'; +import { Extrinsic, ExtrinsicHelper } from '../scaffolding/extrinsicHelpers'; +import { isTestnet } from '../scaffolding/env'; +import { getSudo, getFundingSource } from '../scaffolding/funding'; +import { AVRO_GRAPH_CHANGE } from '../schemas/fixtures/avroGraphChangeSchemaType'; +import { Bytes, u16 } from '@polkadot/types'; import { DOLLARS, createDelegatorAndDelegation, createProviderKeysAndId, getCurrentItemizedHash, -} from "../scaffolding/helpers"; -import { AVRO_CHAT_MESSAGE } from "../stateful-pallet-storage/fixtures/itemizedSchemaType"; +} from '../scaffolding/helpers'; +import { AVRO_CHAT_MESSAGE } from '../stateful-pallet-storage/fixtures/itemizedSchemaType'; -describe("Sudo required", function () { +describe('Sudo required', function () { let sudoKey: KeyringPair; let fundingSource: KeyringPair; before(function () { if (isTestnet()) this.skip(); sudoKey = getSudo().keys; - fundingSource = getFundingSource("sudo-transactions"); + fundingSource = getFundingSource('sudo-transactions'); }); - describe("schema#setMaxSchemaModelBytes", function () { - it("should fail to set the schema size because of lack of root authority (BadOrigin)", async function () { + describe('schema#setMaxSchemaModelBytes', function () { + it('should fail to set the schema size because of lack of root authority (BadOrigin)', async function () { const operation = new Extrinsic(() => ExtrinsicHelper.api.tx.schemas.setMaxSchemaModelBytes(1000000), sudoKey); await assert.rejects(operation.signAndSend(), { name: 'BadOrigin' }); }); - it("should fail to set max schema size > hard-coded limit (SchemaModelMaxBytesBoundedVecLimit)", async function () { + it('should fail to set max schema size > hard-coded limit (SchemaModelMaxBytesBoundedVecLimit)', async function () { const limit = ExtrinsicHelper.api.consts.schemas.schemaModelMaxBytesBoundedVecLimit.toBigInt(); const op = new Extrinsic(() => ExtrinsicHelper.api.tx.schemas.setMaxSchemaModelBytes(limit + 1n), sudoKey); await assert.rejects(op.sudoSignAndSend(), { name: 'ExceedsMaxSchemaModelBytes' }); - }) + }); - it("should successfully set the max schema size", async function () { + it('should successfully set the max schema size', async function () { const size = (await ExtrinsicHelper.apiPromise.query.schemas.governanceSchemaModelMaxBytes()).toBigInt(); const op = new Extrinsic(() => ExtrinsicHelper.api.tx.schemas.setMaxSchemaModelBytes(size + 1n), sudoKey); await op.sudoSignAndSend(); @@ -49,34 +49,44 @@ describe("Sudo required", function () { }); }); - describe("stateful-pallet-storage", function () { - - it("should fail to create non itemized schema with AppendOnly settings", async function () { + describe('stateful-pallet-storage', function () { + it('should fail to create non itemized schema with AppendOnly settings', async function () { if (isTestnet()) this.skip(); - const ex = ExtrinsicHelper.createSchemaWithSettingsGov(sudoKey, AVRO_GRAPH_CHANGE, "AvroBinary", "Paginated", "AppendOnly"); + const ex = ExtrinsicHelper.createSchemaWithSettingsGov( + sudoKey, + AVRO_GRAPH_CHANGE, + 'AvroBinary', + 'Paginated', + 'AppendOnly' + ); await assert.rejects(ex.sudoSignAndSend(), { - name: 'InvalidSetting' + name: 'InvalidSetting', }); }); - it("should not fail to create itemized schema with AppendOnly settings", async function () { + it('should not fail to create itemized schema with AppendOnly settings', async function () { if (isTestnet()) this.skip(); - const createSchema = ExtrinsicHelper.createSchemaWithSettingsGov(sudoKey, AVRO_GRAPH_CHANGE, "AvroBinary", "Itemized", "AppendOnly"); + const createSchema = ExtrinsicHelper.createSchemaWithSettingsGov( + sudoKey, + AVRO_GRAPH_CHANGE, + 'AvroBinary', + 'Itemized', + 'AppendOnly' + ); const { target: event } = await createSchema.sudoSignAndSend(); assert.notEqual(event, undefined); const itemizedSchemaId: u16 = event?.data.schemaId || new u16(ExtrinsicHelper.api.registry, 0); assert.notEqual(itemizedSchemaId.toNumber(), 0); - let schema_response = await ExtrinsicHelper.getSchema(itemizedSchemaId); + const schema_response = await ExtrinsicHelper.getSchema(itemizedSchemaId); assert(schema_response.isSome); - let schema_response_value = schema_response.unwrap(); - let schema_settings = schema_response_value.settings; + const schema_response_value = schema_response.unwrap(); + const schema_settings = schema_response_value.settings; assert.notEqual(schema_settings.length, 0); }); - - describe("📗 Stateful Pallet Storage AppendOnly Schemas", function () { + describe('📗 Stateful Pallet Storage AppendOnly Schemas', function () { // This requires schema creation abilities let itemizedSchemaId: SchemaId; @@ -87,46 +97,56 @@ describe("Sudo required", function () { before(async function () { // Create a provider for the MSA, the provider will be used to grant delegation [providerKeys, providerId] = await createProviderKeysAndId(fundingSource, 2n * DOLLARS); - assert.notEqual(providerId, undefined, "setup should populate providerId"); - assert.notEqual(providerKeys, undefined, "setup should populate providerKeys"); + assert.notEqual(providerId, undefined, 'setup should populate providerId'); + assert.notEqual(providerKeys, undefined, 'setup should populate providerKeys'); // Create a schema for Itemized PayloadLocation - const createSchema = ExtrinsicHelper.createSchemaWithSettingsGov(sudoKey, AVRO_CHAT_MESSAGE, "AvroBinary", "Itemized", "AppendOnly"); + const createSchema = ExtrinsicHelper.createSchemaWithSettingsGov( + sudoKey, + AVRO_CHAT_MESSAGE, + 'AvroBinary', + 'Itemized', + 'AppendOnly' + ); const { target: event } = await createSchema.sudoSignAndSend(); itemizedSchemaId = event!.data.schemaId; // Create a MSA for the delegator and delegate to the provider for the itemized schema [, msa_id] = await createDelegatorAndDelegation(fundingSource, itemizedSchemaId, providerId, providerKeys); - assert.notEqual(msa_id, undefined, "setup should populate msa_id"); + assert.notEqual(msa_id, undefined, 'setup should populate msa_id'); }); - describe("Itemized With AppendOnly Storage Tests", function () { - - it("should not be able to call delete action", async function () { - + describe('Itemized With AppendOnly Storage Tests', function () { + it('should not be able to call delete action', async function () { // Add and update actions - let payload_1 = new Bytes(ExtrinsicHelper.api.registry, "Hello World From Frequency"); + const payload_1 = new Bytes(ExtrinsicHelper.api.registry, 'Hello World From Frequency'); const add_action = { - "Add": payload_1 - } + Add: payload_1, + }; - let payload_2 = new Bytes(ExtrinsicHelper.api.registry, "Hello World Again From Frequency"); + const payload_2 = new Bytes(ExtrinsicHelper.api.registry, 'Hello World Again From Frequency'); const update_action = { - "Add": payload_2 - } + Add: payload_2, + }; - const idx_1: u16 = new u16(ExtrinsicHelper.api.registry, 1) + const idx_1: u16 = new u16(ExtrinsicHelper.api.registry, 1); const delete_action = { - "Delete": idx_1 - } + Delete: idx_1, + }; const target_hash = await getCurrentItemizedHash(msa_id, itemizedSchemaId); - let add_actions = [add_action, update_action, delete_action]; + const add_actions = [add_action, update_action, delete_action]; - let itemized_add_result_1 = ExtrinsicHelper.applyItemActions(providerKeys, itemizedSchemaId, msa_id, add_actions, target_hash); + const itemized_add_result_1 = ExtrinsicHelper.applyItemActions( + providerKeys, + itemizedSchemaId, + msa_id, + add_actions, + target_hash + ); await assert.rejects(itemized_add_result_1.fundAndSend(fundingSource), { name: 'UnsupportedOperationForSchema', section: 'statefulStorage', @@ -135,5 +155,4 @@ describe("Sudo required", function () { }); }); }); - }); diff --git a/e2e/time-release/timeRelease.test.ts b/e2e/time-release/timeRelease.test.ts index 9efb40d0a9..c310a3fc0a 100644 --- a/e2e/time-release/timeRelease.test.ts +++ b/e2e/time-release/timeRelease.test.ts @@ -1,52 +1,53 @@ -import "@frequency-chain/api-augment"; -import assert from "assert"; -import { createAndFundKeypair } from "../scaffolding/helpers"; -import { KeyringPair } from "@polkadot/keyring/types"; -import { ExtrinsicHelper, ReleaseSchedule } from "../scaffolding/extrinsicHelpers"; -import { getFundingSource } from "../scaffolding/funding"; +import '@frequency-chain/api-augment'; +import assert from 'assert'; +import { createAndFundKeypair } from '../scaffolding/helpers'; +import { KeyringPair } from '@polkadot/keyring/types'; +import { ExtrinsicHelper, ReleaseSchedule } from '../scaffolding/extrinsicHelpers'; +import { getFundingSource } from '../scaffolding/funding'; const DOLLARS: number = 100000000; // 100_000_000 -export function getBlocksInMonthPeriod(blockTime: number, periodInMonths: number) { - const secondsPerMonth = 2592000; // Assuming 30 days in a month +function getBlocksInMonthPeriod(blockTime: number, periodInMonths: number) { + const secondsPerMonth = 2592000; // Assuming 30 days in a month - // Calculate the number of blocks in the given period - const blocksInPeriod = Math.floor((periodInMonths * secondsPerMonth) / blockTime); + // Calculate the number of blocks in the given period + const blocksInPeriod = Math.floor((periodInMonths * secondsPerMonth) / blockTime); - return blocksInPeriod; - } + return blocksInPeriod; +} -export function calculateReleaseSchedule(amount: number | bigint): ReleaseSchedule { - const start = 0; - const period = getBlocksInMonthPeriod(6, 4); - const periodCount = 4; +function calculateReleaseSchedule(amount: number | bigint): ReleaseSchedule { + const start = 0; + const period = getBlocksInMonthPeriod(6, 4); + const periodCount = 4; - const perPeriod = BigInt(amount) / (BigInt(periodCount)); + const perPeriod = BigInt(amount) / BigInt(periodCount); - return { - start, - period, - periodCount, - perPeriod, - }; + return { + start, + period, + periodCount, + perPeriod, + }; } -describe("TimeRelease", function () { - let vesterKeys: KeyringPair; - const sourceKey: KeyringPair = getFundingSource("time-release"); +const fundingSource: KeyringPair = getFundingSource('time-release'); - before(async function () { - vesterKeys = await createAndFundKeypair(sourceKey, 50_000_000n); - }); +describe('TimeRelease', function () { + let vesterKeys: KeyringPair; + + before(async function () { + vesterKeys = await createAndFundKeypair(fundingSource, 50_000_000n); + }); - describe("vested transfer and claim flow", function () { - it("creates a vested transfer", async function () { - let amount = 100000n * BigInt(DOLLARS); - let schedule: ReleaseSchedule = calculateReleaseSchedule(amount); + describe('vested transfer and claim flow', function () { + it('creates a vested transfer', async function () { + const amount = 100000n * BigInt(DOLLARS); + const schedule: ReleaseSchedule = calculateReleaseSchedule(amount); - const vestedTransferTx = ExtrinsicHelper.timeReleaseTransfer(sourceKey, vesterKeys, schedule); - const { target: event, eventMap } = await vestedTransferTx.signAndSend(); - assert.notEqual(event, undefined, "should have returned ReleaseScheduleAdded event"); - }); + const vestedTransferTx = ExtrinsicHelper.timeReleaseTransfer(fundingSource, vesterKeys, schedule); + const { target } = await vestedTransferTx.signAndSend(); + assert.notEqual(target, undefined, 'should have returned ReleaseScheduleAdded event'); }); -}) + }); +}); diff --git a/e2e/tsconfig.json b/e2e/tsconfig.json index 21a7bc6e58..9c229ab5bd 100644 --- a/e2e/tsconfig.json +++ b/e2e/tsconfig.json @@ -1,27 +1,25 @@ { - "$schema": "https://json.schemastore.org/tsconfig", - "display": "Base", - "compilerOptions": { - "allowSyntheticDefaultImports": true, - "esModuleInterop": true, - "module": "esnext", - "moduleResolution": "node", - "noImplicitAny": false, - "noImplicitThis": false, - "outDir": "dist", - "resolveJsonModule": true, - "sourceMap": false, - "strict": true, - "skipLibCheck": true, - "target": "es2022", - "typeRoots": [ - "node_modules/@types" - ] - }, - "ts-node": { - "esm": true, - "experimentalSpecifierResolution": "node" - }, - "include": ["./**/*.ts"], - "exclude": ["node_modules/**", "./dist/**"] + "$schema": "https://json.schemastore.org/tsconfig", + "display": "Base", + "compilerOptions": { + "allowSyntheticDefaultImports": true, + "esModuleInterop": true, + "module": "esnext", + "moduleResolution": "node", + "noImplicitAny": false, + "noImplicitThis": false, + "outDir": "dist", + "resolveJsonModule": true, + "sourceMap": false, + "strict": true, + "skipLibCheck": true, + "target": "es2022", + "typeRoots": ["node_modules/@types"] + }, + "ts-node": { + "esm": true, + "experimentalSpecifierResolution": "node" + }, + "include": ["./**/*.ts"], + "exclude": ["node_modules/**", "./dist/**"] }