From 9631ec836f729cc5d83f924e42c564899ea286c6 Mon Sep 17 00:00:00 2001 From: Alberto Benegiamo Date: Mon, 5 Feb 2024 18:55:49 +0100 Subject: [PATCH] introduced dynamic fees for x-chain --- scripts/mocks.mockgen.txt | 2 + vms/avm/block/builder/builder.go | 13 +- vms/avm/block/executor/block.go | 13 +- vms/avm/block/executor/manager.go | 13 +- vms/avm/config/dynamic_fees_config.go | 45 ++ vms/avm/txs/executor/semantic_verifier.go | 27 +- .../txs/executor/semantic_verifier_test.go | 162 +++- .../txs/executor/syntactic_verifier_test.go | 12 +- vms/avm/txs/fees/calculator.go | 184 ++++- vms/avm/txs/fees/calculator_test.go | 698 +++++++++++++++++ vms/components/fees/dimensions.go | 34 + vms/components/fees/helpers.go | 70 ++ vms/components/fees/manager.go | 92 +++ wallet/chain/x/builder.go | 40 +- wallet/chain/x/builder_dynamic_fees.go | 701 ++++++++++++++++++ wallet/chain/x/builder_dynamic_fees_test.go | 475 ++++++++++++ wallet/chain/x/mocks/mock_builder_backend.go | 127 ++++ wallet/chain/x/mocks/mock_signer_backend.go | 57 ++ 18 files changed, 2668 insertions(+), 97 deletions(-) create mode 100644 vms/avm/config/dynamic_fees_config.go create mode 100644 vms/avm/txs/fees/calculator_test.go create mode 100644 vms/components/fees/dimensions.go create mode 100644 vms/components/fees/helpers.go create mode 100644 vms/components/fees/manager.go create mode 100644 wallet/chain/x/builder_dynamic_fees.go create mode 100644 wallet/chain/x/builder_dynamic_fees_test.go create mode 100644 wallet/chain/x/mocks/mock_builder_backend.go create mode 100644 wallet/chain/x/mocks/mock_signer_backend.go diff --git a/scripts/mocks.mockgen.txt b/scripts/mocks.mockgen.txt index ba2be886b0b6..5ffee16b3bee 100644 --- a/scripts/mocks.mockgen.txt +++ b/scripts/mocks.mockgen.txt @@ -40,5 +40,7 @@ github.com/ava-labs/avalanchego/vms/proposervm=PostForkBlock=vms/proposervm/mock github.com/ava-labs/avalanchego/vms/registry=VMGetter=vms/registry/mock_vm_getter.go github.com/ava-labs/avalanchego/vms/registry=VMRegistry=vms/registry/mock_vm_registry.go github.com/ava-labs/avalanchego/vms=Factory,Manager=vms/mock_manager.go +github.com/ava-labs/avalanchego/wallet/chain/x=BuilderBackend=wallet/chain/x/mocks/mock_builder_backend.go +github.com/ava-labs/avalanchego/wallet/chain/x=SignerBackend=wallet/chain/x/mocks/mock_signer_backend.go github.com/ava-labs/avalanchego/x/sync=Client=x/sync/mock_client.go github.com/ava-labs/avalanchego/x/sync=NetworkClient=x/sync/mock_network_client.go diff --git a/vms/avm/block/builder/builder.go b/vms/avm/block/builder/builder.go index 80e734812a6c..09843461ad2d 100644 --- a/vms/avm/block/builder/builder.go +++ b/vms/avm/block/builder/builder.go @@ -13,9 +13,11 @@ import ( "github.com/ava-labs/avalanchego/utils/timer/mockable" "github.com/ava-labs/avalanchego/utils/units" "github.com/ava-labs/avalanchego/vms/avm/block" + "github.com/ava-labs/avalanchego/vms/avm/config" "github.com/ava-labs/avalanchego/vms/avm/state" "github.com/ava-labs/avalanchego/vms/avm/txs" "github.com/ava-labs/avalanchego/vms/avm/txs/mempool" + "github.com/ava-labs/avalanchego/vms/components/fees" blockexecutor "github.com/ava-labs/avalanchego/vms/avm/block/executor" txexecutor "github.com/ava-labs/avalanchego/vms/avm/txs/executor" @@ -91,6 +93,9 @@ func (b *builder) BuildBlock(context.Context) (snowman.Block, error) { blockTxs []*txs.Tx inputs set.Set[ids.ID] remainingSize = targetBlockSize + + feeCfg = config.EUpgradeDynamicFeesConfig + feeManager = fees.NewManager(feeCfg.UnitFees) ) for { tx, exists := b.mempool.Peek() @@ -111,9 +116,11 @@ func (b *builder) BuildBlock(context.Context) (snowman.Block, error) { } err = tx.Unsigned.Visit(&txexecutor.SemanticVerifier{ - Backend: b.backend, - State: txDiff, - Tx: tx, + Backend: b.backend, + BlkFeeManager: feeManager, + UnitCaps: feeCfg.BlockUnitsCap, + State: txDiff, + Tx: tx, }) if err != nil { txID := tx.ID() diff --git a/vms/avm/block/executor/block.go b/vms/avm/block/executor/block.go index 8663f27ba123..15b7b29172fd 100644 --- a/vms/avm/block/executor/block.go +++ b/vms/avm/block/executor/block.go @@ -17,8 +17,10 @@ import ( "github.com/ava-labs/avalanchego/snow/choices" "github.com/ava-labs/avalanchego/snow/consensus/snowman" "github.com/ava-labs/avalanchego/vms/avm/block" + "github.com/ava-labs/avalanchego/vms/avm/config" "github.com/ava-labs/avalanchego/vms/avm/state" "github.com/ava-labs/avalanchego/vms/avm/txs/executor" + "github.com/ava-labs/avalanchego/vms/components/fees" ) const SyncBound = 10 * time.Second @@ -130,13 +132,18 @@ func (b *Block) Verify(context.Context) error { atomicRequests: make(map[ids.ID]*atomic.Requests), } + feeCfg := config.EUpgradeDynamicFeesConfig + feeManager := fees.NewManager(feeCfg.UnitFees) + for _, tx := range txs { // Verify that the tx is valid according to the current state of the // chain. err := tx.Unsigned.Visit(&executor.SemanticVerifier{ - Backend: b.manager.backend, - State: stateDiff, - Tx: tx, + Backend: b.manager.backend, + BlkFeeManager: feeManager, + UnitCaps: feeCfg.BlockUnitsCap, + State: stateDiff, + Tx: tx, }) if err != nil { txID := tx.ID() diff --git a/vms/avm/block/executor/manager.go b/vms/avm/block/executor/manager.go index 9822743b7fd3..b85de77a0515 100644 --- a/vms/avm/block/executor/manager.go +++ b/vms/avm/block/executor/manager.go @@ -12,11 +12,13 @@ import ( "github.com/ava-labs/avalanchego/utils/set" "github.com/ava-labs/avalanchego/utils/timer/mockable" "github.com/ava-labs/avalanchego/vms/avm/block" + "github.com/ava-labs/avalanchego/vms/avm/config" "github.com/ava-labs/avalanchego/vms/avm/metrics" "github.com/ava-labs/avalanchego/vms/avm/state" "github.com/ava-labs/avalanchego/vms/avm/txs" "github.com/ava-labs/avalanchego/vms/avm/txs/executor" "github.com/ava-labs/avalanchego/vms/avm/txs/mempool" + "github.com/ava-labs/avalanchego/vms/components/fees" ) var ( @@ -160,10 +162,15 @@ func (m *manager) VerifyTx(tx *txs.Tx) error { return err } + feeCfg := config.EUpgradeDynamicFeesConfig + feeManager := fees.NewManager(feeCfg.UnitFees) + err = tx.Unsigned.Visit(&executor.SemanticVerifier{ - Backend: m.backend, - State: stateDiff, - Tx: tx, + Backend: m.backend, + BlkFeeManager: feeManager, + UnitCaps: feeCfg.BlockUnitsCap, + State: stateDiff, + Tx: tx, }) if err != nil { return err diff --git a/vms/avm/config/dynamic_fees_config.go b/vms/avm/config/dynamic_fees_config.go new file mode 100644 index 000000000000..d7994896c7da --- /dev/null +++ b/vms/avm/config/dynamic_fees_config.go @@ -0,0 +1,45 @@ +// Copyright (C) 2019-2024, Ava Labs, Inc. All rights reserved. +// See the file LICENSE for licensing terms. + +package config + +import ( + "math" + + commonfees "github.com/ava-labs/avalanchego/vms/components/fees" +) + +// Dynamic fees configs become relevant with dynamic fees introduction in E-fork +// We cannot easily include then in Config since they do not come from genesis +// They don't feel like an execution config either, since we need a fork upgrade +// to update them (testing is a different story). +// I am setting them in a separate config object, but will access it via Config +// so to have fork control over which dynamic fees is picked + +// EUpgradeDynamicFeesConfig to be tuned TODO ABENEGIA +var EUpgradeDynamicFeesConfig = DynamicFeesConfig{ + UnitFees: commonfees.Dimensions{ + 1, + 2, + 3, + 4, + }, + + BlockUnitsCap: commonfees.Dimensions{ + math.MaxUint64, + math.MaxUint64, + math.MaxUint64, + math.MaxUint64, + }, +} + +type DynamicFeesConfig struct { + // UnitFees contains, per each fee dimension, the + // unit fees valid as soon as fork introducing dynamic fees + // activates. Unit fees will be then updated by the dynamic fees algo. + UnitFees commonfees.Dimensions + + // BlockUnitsCap contains, per each fee dimension, the + // maximal complexity a valid P-chain block can host + BlockUnitsCap commonfees.Dimensions +} diff --git a/vms/avm/txs/executor/semantic_verifier.go b/vms/avm/txs/executor/semantic_verifier.go index d8d95d6313af..f7c44aa83c02 100644 --- a/vms/avm/txs/executor/semantic_verifier.go +++ b/vms/avm/txs/executor/semantic_verifier.go @@ -9,11 +9,14 @@ import ( "reflect" "github.com/ava-labs/avalanchego/ids" + "github.com/ava-labs/avalanchego/vms/avm/fxs" "github.com/ava-labs/avalanchego/vms/avm/state" "github.com/ava-labs/avalanchego/vms/avm/txs" "github.com/ava-labs/avalanchego/vms/avm/txs/fees" "github.com/ava-labs/avalanchego/vms/components/avax" "github.com/ava-labs/avalanchego/vms/components/verify" + + commonfees "github.com/ava-labs/avalanchego/vms/components/fees" ) var ( @@ -27,20 +30,22 @@ var ( type SemanticVerifier struct { *Backend - State state.ReadOnlyChain - Tx *txs.Tx + BlkFeeManager *commonfees.Manager + UnitCaps commonfees.Dimensions + State state.ReadOnlyChain + Tx *txs.Tx } func (v *SemanticVerifier) BaseTx(tx *txs.BaseTx) error { - return v.verifyBaseTx(tx, nil, nil) + return v.verifyBaseTx(tx, nil, nil, v.Tx.Creds) } func (v *SemanticVerifier) CreateAssetTx(tx *txs.CreateAssetTx) error { - return v.verifyBaseTx(&tx.BaseTx, nil, nil) + return v.verifyBaseTx(&tx.BaseTx, nil, nil, v.Tx.Creds) } func (v *SemanticVerifier) OperationTx(tx *txs.OperationTx) error { - if err := v.verifyBaseTx(&tx.BaseTx, nil, nil); err != nil { + if err := v.verifyBaseTx(&tx.BaseTx, nil, nil, v.Tx.Creds); err != nil { return err } @@ -61,7 +66,7 @@ func (v *SemanticVerifier) OperationTx(tx *txs.OperationTx) error { } func (v *SemanticVerifier) ImportTx(tx *txs.ImportTx) error { - if err := v.verifyBaseTx(&tx.BaseTx, tx.ImportedIns, nil); err != nil { + if err := v.verifyBaseTx(&tx.BaseTx, tx.ImportedIns, nil, v.Tx.Creds); err != nil { return err } @@ -102,7 +107,7 @@ func (v *SemanticVerifier) ImportTx(tx *txs.ImportTx) error { } func (v *SemanticVerifier) ExportTx(tx *txs.ExportTx) error { - if err := v.verifyBaseTx(&tx.BaseTx, nil, tx.ExportedOuts); err != nil { + if err := v.verifyBaseTx(&tx.BaseTx, nil, tx.ExportedOuts, v.Tx.Creds); err != nil { return err } @@ -130,9 +135,15 @@ func (v *SemanticVerifier) verifyBaseTx( tx *txs.BaseTx, importedIns []*avax.TransferableInput, exportedOuts []*avax.TransferableOutput, + creds []*fxs.FxCredential, ) error { feeCalculator := fees.Calculator{ - Config: v.Config, + IsEUpgradeActive: v.Config.IsEUpgradeActivated(v.State.GetTimestamp()), + Config: v.Config, + FeeManager: v.BlkFeeManager, + ConsumedUnitsCap: v.UnitCaps, + Codec: v.Codec, + Credentials: creds, } if err := tx.Visit(&feeCalculator); err != nil { return err diff --git a/vms/avm/txs/executor/semantic_verifier_test.go b/vms/avm/txs/executor/semantic_verifier_test.go index 98b28ac5ea52..558466ddd97e 100644 --- a/vms/avm/txs/executor/semantic_verifier_test.go +++ b/vms/avm/txs/executor/semantic_verifier_test.go @@ -24,16 +24,25 @@ import ( "github.com/ava-labs/avalanchego/utils/crypto/secp256k1" "github.com/ava-labs/avalanchego/utils/logging" "github.com/ava-labs/avalanchego/utils/timer/mockable" + "github.com/ava-labs/avalanchego/vms/avm/config" "github.com/ava-labs/avalanchego/vms/avm/fxs" "github.com/ava-labs/avalanchego/vms/avm/state" "github.com/ava-labs/avalanchego/vms/avm/txs" "github.com/ava-labs/avalanchego/vms/components/avax" + "github.com/ava-labs/avalanchego/vms/components/fees" "github.com/ava-labs/avalanchego/vms/components/verify" "github.com/ava-labs/avalanchego/vms/secp256k1fx" safemath "github.com/ava-labs/avalanchego/utils/math" ) +var feeConfig = config.Config{ + TxFee: 2, + CreateAssetTxFee: 3, + DurangoTime: time.Time{}, + EUpgradeTime: mockable.MaxTime, +} + func TestSemanticVerifierBaseTx(t *testing.T) { ctx := snowtest.Context(t, snowtest.XChainID) @@ -147,6 +156,7 @@ func TestSemanticVerifierBaseTx(t *testing.T) { stateFunc: func(ctrl *gomock.Controller) state.Chain { state := state.NewMockChain(ctrl) + state.EXPECT().GetTimestamp().Return(time.Now().Truncate(time.Second)) state.EXPECT().GetUTXO(utxoID.InputID()).Return(&utxo, nil) state.EXPECT().GetTx(asset.ID).Return(&createAssetTx, nil).Times(2) @@ -171,7 +181,9 @@ func TestSemanticVerifierBaseTx(t *testing.T) { { name: "invalid output", stateFunc: func(ctrl *gomock.Controller) state.Chain { - return nil + state := state.NewMockChain(ctrl) + state.EXPECT().GetTimestamp().Return(time.Now().Truncate(time.Second)) + return state }, txFunc: func(require *require.Assertions) *txs.Tx { output := output @@ -199,7 +211,9 @@ func TestSemanticVerifierBaseTx(t *testing.T) { { name: "unsorted outputs", stateFunc: func(ctrl *gomock.Controller) state.Chain { - return nil + state := state.NewMockChain(ctrl) + state.EXPECT().GetTimestamp().Return(time.Now().Truncate(time.Second)) + return state }, txFunc: func(require *require.Assertions) *txs.Tx { output0 := output @@ -238,7 +252,9 @@ func TestSemanticVerifierBaseTx(t *testing.T) { { name: "invalid input", stateFunc: func(ctrl *gomock.Controller) state.Chain { - return nil + state := state.NewMockChain(ctrl) + state.EXPECT().GetTimestamp().Return(time.Now().Truncate(time.Second)) + return state }, txFunc: func(require *require.Assertions) *txs.Tx { input := input @@ -266,7 +282,9 @@ func TestSemanticVerifierBaseTx(t *testing.T) { { name: "duplicate inputs", stateFunc: func(ctrl *gomock.Controller) state.Chain { - return nil + state := state.NewMockChain(ctrl) + state.EXPECT().GetTimestamp().Return(time.Now().Truncate(time.Second)) + return state }, txFunc: func(require *require.Assertions) *txs.Tx { baseTx := baseTx @@ -290,7 +308,9 @@ func TestSemanticVerifierBaseTx(t *testing.T) { { name: "input overflow", stateFunc: func(ctrl *gomock.Controller) state.Chain { - return nil + state := state.NewMockChain(ctrl) + state.EXPECT().GetTimestamp().Return(time.Now().Truncate(time.Second)) + return state }, txFunc: func(require *require.Assertions) *txs.Tx { input0 := input @@ -327,7 +347,9 @@ func TestSemanticVerifierBaseTx(t *testing.T) { { name: "output overflow", stateFunc: func(ctrl *gomock.Controller) state.Chain { - return nil + state := state.NewMockChain(ctrl) + state.EXPECT().GetTimestamp().Return(time.Now().Truncate(time.Second)) + return state }, txFunc: func(require *require.Assertions) *txs.Tx { output0 := output @@ -378,6 +400,7 @@ func TestSemanticVerifierBaseTx(t *testing.T) { Out: &utxoOut, } + state.EXPECT().GetTimestamp().Return(time.Now().Truncate(time.Second)) state.EXPECT().GetUTXO(utxoID.InputID()).Return(&utxo, nil) state.EXPECT().GetTx(asset.ID).Return(&createAssetTx, nil).Times(2) @@ -409,7 +432,9 @@ func TestSemanticVerifierBaseTx(t *testing.T) { { name: "insufficient funds", stateFunc: func(ctrl *gomock.Controller) state.Chain { - return nil + state := state.NewMockChain(ctrl) + state.EXPECT().GetTimestamp().Return(time.Now().Truncate(time.Second)) + return state }, txFunc: func(require *require.Assertions) *txs.Tx { input := input @@ -437,7 +462,9 @@ func TestSemanticVerifierBaseTx(t *testing.T) { { name: "barely insufficient funds", stateFunc: func(ctrl *gomock.Controller) state.Chain { - return nil + state := state.NewMockChain(ctrl) + state.EXPECT().GetTimestamp().Return(time.Now().Truncate(time.Second)) + return state }, txFunc: func(require *require.Assertions) *txs.Tx { input := input @@ -470,6 +497,7 @@ func TestSemanticVerifierBaseTx(t *testing.T) { utxo := utxo utxo.Asset.ID = ids.GenerateTestID() + state.EXPECT().GetTimestamp().Return(time.Now().Truncate(time.Second)) state.EXPECT().GetUTXO(utxoID.InputID()).Return(&utxo, nil) return state @@ -502,6 +530,7 @@ func TestSemanticVerifierBaseTx(t *testing.T) { Unsigned: &unsignedCreateAssetTx, } + state.EXPECT().GetTimestamp().Return(time.Now().Truncate(time.Second)) state.EXPECT().GetUTXO(utxoID.InputID()).Return(&utxo, nil) state.EXPECT().GetTx(asset.ID).Return(&createAssetTx, nil) @@ -528,6 +557,7 @@ func TestSemanticVerifierBaseTx(t *testing.T) { stateFunc: func(ctrl *gomock.Controller) state.Chain { state := state.NewMockChain(ctrl) + state.EXPECT().GetTimestamp().Return(time.Now().Truncate(time.Second)) state.EXPECT().GetUTXO(utxoID.InputID()).Return(&utxo, nil) state.EXPECT().GetTx(asset.ID).Return(&createAssetTx, nil) @@ -554,6 +584,7 @@ func TestSemanticVerifierBaseTx(t *testing.T) { stateFunc: func(ctrl *gomock.Controller) state.Chain { state := state.NewMockChain(ctrl) + state.EXPECT().GetTimestamp().Return(time.Now().Truncate(time.Second)) state.EXPECT().GetUTXO(utxoID.InputID()).Return(nil, database.ErrNotFound) return state @@ -585,6 +616,7 @@ func TestSemanticVerifierBaseTx(t *testing.T) { utxo := utxo utxo.Out = &utxoOut + state.EXPECT().GetTimestamp().Return(time.Now().Truncate(time.Second)) state.EXPECT().GetUTXO(utxoID.InputID()).Return(&utxo, nil) state.EXPECT().GetTx(asset.ID).Return(&createAssetTx, nil) @@ -618,6 +650,7 @@ func TestSemanticVerifierBaseTx(t *testing.T) { Unsigned: &unsignedCreateAssetTx, } + state.EXPECT().GetTimestamp().Return(time.Now().Truncate(time.Second)) state.EXPECT().GetTx(asset.ID).Return(&createAssetTx, nil) state.EXPECT().GetUTXO(utxoID.InputID()).Return(&utxo, nil) @@ -644,6 +677,7 @@ func TestSemanticVerifierBaseTx(t *testing.T) { stateFunc: func(ctrl *gomock.Controller) state.Chain { state := state.NewMockChain(ctrl) + state.EXPECT().GetTimestamp().Return(time.Now().Truncate(time.Second)) state.EXPECT().GetUTXO(utxoID.InputID()).Return(&utxo, nil) state.EXPECT().GetTx(asset.ID).Return(nil, database.ErrNotFound) @@ -676,6 +710,7 @@ func TestSemanticVerifierBaseTx(t *testing.T) { }, } + state.EXPECT().GetTimestamp().Return(time.Now().Truncate(time.Second)) state.EXPECT().GetUTXO(utxoID.InputID()).Return(&utxo, nil) state.EXPECT().GetTx(asset.ID).Return(&tx, nil) @@ -706,10 +741,14 @@ func TestSemanticVerifierBaseTx(t *testing.T) { state := test.stateFunc(ctrl) tx := test.txFunc(require) + feeCfg := config.EUpgradeDynamicFeesConfig + err = tx.Unsigned.Visit(&SemanticVerifier{ - Backend: backend, - State: state, - Tx: tx, + Backend: backend, + BlkFeeManager: fees.NewManager(feeCfg.UnitFees), + UnitCaps: feeCfg.BlockUnitsCap, + State: state, + Tx: tx, }) require.ErrorIs(err, test.err) }) @@ -834,6 +873,7 @@ func TestSemanticVerifierExportTx(t *testing.T) { stateFunc: func(ctrl *gomock.Controller) state.Chain { state := state.NewMockChain(ctrl) + state.EXPECT().GetTimestamp().Return(time.Now().Truncate(time.Second)) state.EXPECT().GetUTXO(utxoID.InputID()).Return(&utxo, nil) state.EXPECT().GetTx(asset.ID).Return(&createAssetTx, nil).Times(2) @@ -856,7 +896,9 @@ func TestSemanticVerifierExportTx(t *testing.T) { { name: "invalid output", stateFunc: func(ctrl *gomock.Controller) state.Chain { - return nil + state := state.NewMockChain(ctrl) + state.EXPECT().GetTimestamp().Return(time.Now().Truncate(time.Second)) + return state }, txFunc: func(require *require.Assertions) *txs.Tx { output := output @@ -886,7 +928,9 @@ func TestSemanticVerifierExportTx(t *testing.T) { { name: "unsorted outputs", stateFunc: func(ctrl *gomock.Controller) state.Chain { - return nil + state := state.NewMockChain(ctrl) + state.EXPECT().GetTimestamp().Return(time.Now().Truncate(time.Second)) + return state }, txFunc: func(require *require.Assertions) *txs.Tx { output0 := output @@ -927,7 +971,9 @@ func TestSemanticVerifierExportTx(t *testing.T) { { name: "unsorted exported outputs", stateFunc: func(ctrl *gomock.Controller) state.Chain { - return nil + state := state.NewMockChain(ctrl) + state.EXPECT().GetTimestamp().Return(time.Now().Truncate(time.Second)) + return state }, txFunc: func(require *require.Assertions) *txs.Tx { output0 := output @@ -965,7 +1011,9 @@ func TestSemanticVerifierExportTx(t *testing.T) { { name: "invalid input", stateFunc: func(ctrl *gomock.Controller) state.Chain { - return nil + state := state.NewMockChain(ctrl) + state.EXPECT().GetTimestamp().Return(time.Now().Truncate(time.Second)) + return state }, txFunc: func(require *require.Assertions) *txs.Tx { input := input @@ -995,7 +1043,9 @@ func TestSemanticVerifierExportTx(t *testing.T) { { name: "duplicate inputs", stateFunc: func(ctrl *gomock.Controller) state.Chain { - return nil + state := state.NewMockChain(ctrl) + state.EXPECT().GetTimestamp().Return(time.Now().Truncate(time.Second)) + return state }, txFunc: func(require *require.Assertions) *txs.Tx { baseTx := baseTx @@ -1021,7 +1071,9 @@ func TestSemanticVerifierExportTx(t *testing.T) { { name: "input overflow", stateFunc: func(ctrl *gomock.Controller) state.Chain { - return nil + state := state.NewMockChain(ctrl) + state.EXPECT().GetTimestamp().Return(time.Now().Truncate(time.Second)) + return state }, txFunc: func(require *require.Assertions) *txs.Tx { input0 := input @@ -1060,7 +1112,9 @@ func TestSemanticVerifierExportTx(t *testing.T) { { name: "output overflow", stateFunc: func(ctrl *gomock.Controller) state.Chain { - return nil + state := state.NewMockChain(ctrl) + state.EXPECT().GetTimestamp().Return(time.Now().Truncate(time.Second)) + return state }, txFunc: func(require *require.Assertions) *txs.Tx { output0 := output @@ -1113,6 +1167,7 @@ func TestSemanticVerifierExportTx(t *testing.T) { Out: &utxoOut, } + state.EXPECT().GetTimestamp().Return(time.Now().Truncate(time.Second)) state.EXPECT().GetUTXO(utxoID.InputID()).Return(&utxo, nil) state.EXPECT().GetTx(asset.ID).Return(&createAssetTx, nil).Times(2) @@ -1146,7 +1201,9 @@ func TestSemanticVerifierExportTx(t *testing.T) { { name: "insufficient funds", stateFunc: func(ctrl *gomock.Controller) state.Chain { - return nil + state := state.NewMockChain(ctrl) + state.EXPECT().GetTimestamp().Return(time.Now().Truncate(time.Second)) + return state }, txFunc: func(require *require.Assertions) *txs.Tx { input := input @@ -1176,7 +1233,9 @@ func TestSemanticVerifierExportTx(t *testing.T) { { name: "barely insufficient funds", stateFunc: func(ctrl *gomock.Controller) state.Chain { - return nil + state := state.NewMockChain(ctrl) + state.EXPECT().GetTimestamp().Return(time.Now().Truncate(time.Second)) + return state }, txFunc: func(require *require.Assertions) *txs.Tx { input := input @@ -1211,6 +1270,7 @@ func TestSemanticVerifierExportTx(t *testing.T) { utxo := utxo utxo.Asset.ID = ids.GenerateTestID() + state.EXPECT().GetTimestamp().Return(time.Now().Truncate(time.Second)) state.EXPECT().GetUTXO(utxoID.InputID()).Return(&utxo, nil) return state @@ -1241,6 +1301,7 @@ func TestSemanticVerifierExportTx(t *testing.T) { Unsigned: &unsignedCreateAssetTx, } + state.EXPECT().GetTimestamp().Return(time.Now().Truncate(time.Second)) state.EXPECT().GetUTXO(utxoID.InputID()).Return(&utxo, nil) state.EXPECT().GetTx(asset.ID).Return(&createAssetTx, nil) @@ -1265,6 +1326,7 @@ func TestSemanticVerifierExportTx(t *testing.T) { stateFunc: func(ctrl *gomock.Controller) state.Chain { state := state.NewMockChain(ctrl) + state.EXPECT().GetTimestamp().Return(time.Now().Truncate(time.Second)) state.EXPECT().GetUTXO(utxoID.InputID()).Return(&utxo, nil) state.EXPECT().GetTx(asset.ID).Return(&createAssetTx, nil) @@ -1289,6 +1351,7 @@ func TestSemanticVerifierExportTx(t *testing.T) { stateFunc: func(ctrl *gomock.Controller) state.Chain { state := state.NewMockChain(ctrl) + state.EXPECT().GetTimestamp().Return(time.Now().Truncate(time.Second)) state.EXPECT().GetUTXO(utxoID.InputID()).Return(nil, database.ErrNotFound) return state @@ -1318,6 +1381,7 @@ func TestSemanticVerifierExportTx(t *testing.T) { utxo := utxo utxo.Out = &output + state.EXPECT().GetTimestamp().Return(time.Now().Truncate(time.Second)) state.EXPECT().GetUTXO(utxoID.InputID()).Return(&utxo, nil) state.EXPECT().GetTx(asset.ID).Return(&createAssetTx, nil) @@ -1349,6 +1413,7 @@ func TestSemanticVerifierExportTx(t *testing.T) { Unsigned: &unsignedCreateAssetTx, } + state.EXPECT().GetTimestamp().Return(time.Now().Truncate(time.Second)) state.EXPECT().GetTx(asset.ID).Return(&createAssetTx, nil) state.EXPECT().GetUTXO(utxoID.InputID()).Return(&utxo, nil) @@ -1373,6 +1438,7 @@ func TestSemanticVerifierExportTx(t *testing.T) { stateFunc: func(ctrl *gomock.Controller) state.Chain { state := state.NewMockChain(ctrl) + state.EXPECT().GetTimestamp().Return(time.Now().Truncate(time.Second)) state.EXPECT().GetUTXO(utxoID.InputID()).Return(&utxo, nil) state.EXPECT().GetTx(asset.ID).Return(nil, database.ErrNotFound) @@ -1401,6 +1467,7 @@ func TestSemanticVerifierExportTx(t *testing.T) { Unsigned: &exportTx, } + state.EXPECT().GetTimestamp().Return(time.Now().Truncate(time.Second)) state.EXPECT().GetUTXO(utxoID.InputID()).Return(&utxo, nil) state.EXPECT().GetTx(asset.ID).Return(&tx, nil) @@ -1557,6 +1624,7 @@ func TestSemanticVerifierExportTxDifferentSubnet(t *testing.T) { state := state.NewMockChain(ctrl) + state.EXPECT().GetTimestamp().Return(time.Now().Truncate(time.Second)) state.EXPECT().GetUTXO(utxoID.InputID()).Return(&utxo, nil) state.EXPECT().GetTx(asset.ID).Return(&createAssetTx, nil).Times(2) @@ -1734,6 +1802,7 @@ func TestSemanticVerifierImportTx(t *testing.T) { name: "valid", stateFunc: func(ctrl *gomock.Controller) state.Chain { state := state.NewMockChain(ctrl) + state.EXPECT().GetTimestamp().Return(time.Now().Truncate(time.Second)) state.EXPECT().GetUTXO(utxoID.InputID()).Return(&utxo, nil).AnyTimes() state.EXPECT().GetTx(asset.ID).Return(&createAssetTx, nil).AnyTimes() return state @@ -1745,8 +1814,10 @@ func TestSemanticVerifierImportTx(t *testing.T) { }, { name: "invalid output", - stateFunc: func(c *gomock.Controller) state.Chain { - return nil + stateFunc: func(ctrl *gomock.Controller) state.Chain { + state := state.NewMockChain(ctrl) + state.EXPECT().GetTimestamp().Return(time.Now().Truncate(time.Second)) + return state }, txFunc: func(require *require.Assertions) *txs.Tx { output := output @@ -1773,8 +1844,10 @@ func TestSemanticVerifierImportTx(t *testing.T) { }, { name: "unsorted outputs", - stateFunc: func(c *gomock.Controller) state.Chain { - return nil + stateFunc: func(ctrl *gomock.Controller) state.Chain { + state := state.NewMockChain(ctrl) + state.EXPECT().GetTimestamp().Return(time.Now().Truncate(time.Second)) + return state }, txFunc: func(require *require.Assertions) *txs.Tx { output0 := output @@ -1811,8 +1884,10 @@ func TestSemanticVerifierImportTx(t *testing.T) { }, { name: "invalid input", - stateFunc: func(c *gomock.Controller) state.Chain { - return nil + stateFunc: func(ctrl *gomock.Controller) state.Chain { + state := state.NewMockChain(ctrl) + state.EXPECT().GetTimestamp().Return(time.Now().Truncate(time.Second)) + return state }, txFunc: func(require *require.Assertions) *txs.Tx { input := input @@ -1838,8 +1913,10 @@ func TestSemanticVerifierImportTx(t *testing.T) { }, { name: "duplicate inputs", - stateFunc: func(c *gomock.Controller) state.Chain { - return nil + stateFunc: func(ctrl *gomock.Controller) state.Chain { + state := state.NewMockChain(ctrl) + state.EXPECT().GetTimestamp().Return(time.Now().Truncate(time.Second)) + return state }, txFunc: func(require *require.Assertions) *txs.Tx { utx := unsignedImportTx @@ -1861,8 +1938,10 @@ func TestSemanticVerifierImportTx(t *testing.T) { }, { name: "duplicate imported inputs", - stateFunc: func(c *gomock.Controller) state.Chain { - return nil + stateFunc: func(ctrl *gomock.Controller) state.Chain { + state := state.NewMockChain(ctrl) + state.EXPECT().GetTimestamp().Return(time.Now().Truncate(time.Second)) + return state }, txFunc: func(require *require.Assertions) *txs.Tx { utx := unsignedImportTx @@ -1883,8 +1962,10 @@ func TestSemanticVerifierImportTx(t *testing.T) { }, { name: "input overflow", - stateFunc: func(c *gomock.Controller) state.Chain { - return nil + stateFunc: func(ctrl *gomock.Controller) state.Chain { + state := state.NewMockChain(ctrl) + state.EXPECT().GetTimestamp().Return(time.Now().Truncate(time.Second)) + return state }, txFunc: func(require *require.Assertions) *txs.Tx { input0 := input @@ -1919,8 +2000,10 @@ func TestSemanticVerifierImportTx(t *testing.T) { }, { name: "output overflow", - stateFunc: func(c *gomock.Controller) state.Chain { - return nil + stateFunc: func(ctrl *gomock.Controller) state.Chain { + state := state.NewMockChain(ctrl) + state.EXPECT().GetTimestamp().Return(time.Now().Truncate(time.Second)) + return state }, txFunc: func(require *require.Assertions) *txs.Tx { output := output @@ -1949,8 +2032,10 @@ func TestSemanticVerifierImportTx(t *testing.T) { }, { name: "insufficient funds", - stateFunc: func(c *gomock.Controller) state.Chain { - return nil + stateFunc: func(ctrl *gomock.Controller) state.Chain { + state := state.NewMockChain(ctrl) + state.EXPECT().GetTimestamp().Return(time.Now().Truncate(time.Second)) + return state }, txFunc: func(require *require.Assertions) *txs.Tx { input := input @@ -1983,6 +2068,7 @@ func TestSemanticVerifierImportTx(t *testing.T) { createAssetTx := txs.Tx{ Unsigned: &unsignedCreateAssetTx, } + state.EXPECT().GetTimestamp().Return(time.Now().Truncate(time.Second)) state.EXPECT().GetUTXO(utxoID.InputID()).Return(&utxo, nil).AnyTimes() state.EXPECT().GetTx(asset.ID).Return(&createAssetTx, nil).AnyTimes() return state @@ -1996,6 +2082,7 @@ func TestSemanticVerifierImportTx(t *testing.T) { name: "invalid signature", stateFunc: func(ctrl *gomock.Controller) state.Chain { state := state.NewMockChain(ctrl) + state.EXPECT().GetTimestamp().Return(time.Now().Truncate(time.Second)) state.EXPECT().GetUTXO(utxoID.InputID()).Return(&utxo, nil).AnyTimes() state.EXPECT().GetTx(asset.ID).Return(&createAssetTx, nil).AnyTimes() return state @@ -2023,6 +2110,7 @@ func TestSemanticVerifierImportTx(t *testing.T) { createAssetTx := txs.Tx{ Unsigned: &unsignedCreateAssetTx, } + state.EXPECT().GetTimestamp().Return(time.Now().Truncate(time.Second)) state.EXPECT().GetTx(asset.ID).Return(&createAssetTx, nil).AnyTimes() return state }, @@ -2047,6 +2135,7 @@ func TestSemanticVerifierImportTx(t *testing.T) { name: "unknown asset", stateFunc: func(ctrl *gomock.Controller) state.Chain { state := state.NewMockChain(ctrl) + state.EXPECT().GetTimestamp().Return(time.Now().Truncate(time.Second)) state.EXPECT().GetUTXO(utxoID.InputID()).Return(&utxo, nil).AnyTimes() state.EXPECT().GetTx(asset.ID).Return(nil, database.ErrNotFound) return state @@ -2065,6 +2154,7 @@ func TestSemanticVerifierImportTx(t *testing.T) { BaseTx: baseTx, }, } + state.EXPECT().GetTimestamp().Return(time.Now().Truncate(time.Second)) state.EXPECT().GetUTXO(utxoID.InputID()).Return(&utxo, nil).AnyTimes() state.EXPECT().GetTx(asset.ID).Return(&tx, nil) return state diff --git a/vms/avm/txs/executor/syntactic_verifier_test.go b/vms/avm/txs/executor/syntactic_verifier_test.go index 050491109f35..70b07ece76c4 100644 --- a/vms/avm/txs/executor/syntactic_verifier_test.go +++ b/vms/avm/txs/executor/syntactic_verifier_test.go @@ -14,8 +14,6 @@ import ( "github.com/ava-labs/avalanchego/snow/snowtest" "github.com/ava-labs/avalanchego/utils/constants" "github.com/ava-labs/avalanchego/utils/crypto/secp256k1" - "github.com/ava-labs/avalanchego/utils/timer/mockable" - "github.com/ava-labs/avalanchego/vms/avm/config" "github.com/ava-labs/avalanchego/vms/avm/fxs" "github.com/ava-labs/avalanchego/vms/avm/txs" "github.com/ava-labs/avalanchego/vms/components/avax" @@ -23,15 +21,7 @@ import ( "github.com/ava-labs/avalanchego/vms/secp256k1fx" ) -var ( - keys = secp256k1.TestKeys() - feeConfig = config.Config{ - TxFee: 2, - CreateAssetTxFee: 3, - DurangoTime: time.Time{}, - EUpgradeTime: mockable.MaxTime, - } -) +var keys = secp256k1.TestKeys() func TestSyntacticVerifierBaseTx(t *testing.T) { ctx := snowtest.Context(t, snowtest.XChainID) diff --git a/vms/avm/txs/fees/calculator.go b/vms/avm/txs/fees/calculator.go index 5bcf9d770991..a232384e9b35 100644 --- a/vms/avm/txs/fees/calculator.go +++ b/vms/avm/txs/fees/calculator.go @@ -4,41 +4,191 @@ package fees import ( + "errors" + "fmt" + + "github.com/ava-labs/avalanchego/codec" "github.com/ava-labs/avalanchego/vms/avm/config" + "github.com/ava-labs/avalanchego/vms/avm/fxs" "github.com/ava-labs/avalanchego/vms/avm/txs" + "github.com/ava-labs/avalanchego/vms/components/avax" + "github.com/ava-labs/avalanchego/vms/components/fees" ) -var _ txs.Visitor = (*Calculator)(nil) +var ( + _ txs.Visitor = (*Calculator)(nil) + + errFailedFeeCalculation = errors.New("failed fee calculation") + errFailedConsumedUnitsCumulation = errors.New("failed cumulating consumed units") +) type Calculator struct { - // Pre E-fork inputs + // setup, to be filled before visitor methods are called + IsEUpgradeActive bool + + // Pre E-Upgrade inputs Config *config.Config + // Post E-Upgrade inputs + FeeManager *fees.Manager + ConsumedUnitsCap fees.Dimensions + Codec codec.Manager + + // common inputs + Credentials []*fxs.FxCredential + // outputs of visitor execution Fee uint64 } -func (fc *Calculator) BaseTx(*txs.BaseTx) error { - fc.Fee = fc.Config.TxFee - return nil +func (fc *Calculator) BaseTx(tx *txs.BaseTx) error { + if !fc.IsEUpgradeActive { + fc.Fee = fc.Config.TxFee + return nil + } + + consumedUnits, err := fc.commonConsumedUnits(tx, tx.Outs, tx.Ins) + if err != nil { + return err + } + + _, err = fc.AddFeesFor(consumedUnits) + return err +} + +func (fc *Calculator) CreateAssetTx(tx *txs.CreateAssetTx) error { + if !fc.IsEUpgradeActive { + fc.Fee = fc.Config.CreateAssetTxFee + return nil + } + + consumedUnits, err := fc.commonConsumedUnits(tx, tx.Outs, tx.Ins) + if err != nil { + return err + } + + _, err = fc.AddFeesFor(consumedUnits) + return err +} + +func (fc *Calculator) OperationTx(tx *txs.OperationTx) error { + if !fc.IsEUpgradeActive { + fc.Fee = fc.Config.TxFee + return nil + } + + consumedUnits, err := fc.commonConsumedUnits(tx, tx.Outs, tx.Ins) + if err != nil { + return err + } + + _, err = fc.AddFeesFor(consumedUnits) + return err +} + +func (fc *Calculator) ImportTx(tx *txs.ImportTx) error { + if !fc.IsEUpgradeActive { + fc.Fee = fc.Config.TxFee + return nil + } + + ins := make([]*avax.TransferableInput, len(tx.Ins)+len(tx.ImportedIns)) + copy(ins, tx.Ins) + copy(ins[len(tx.Ins):], tx.ImportedIns) + + consumedUnits, err := fc.commonConsumedUnits(tx, tx.Outs, ins) + if err != nil { + return err + } + + _, err = fc.AddFeesFor(consumedUnits) + return err } -func (fc *Calculator) CreateAssetTx(*txs.CreateAssetTx) error { - fc.Fee = fc.Config.CreateAssetTxFee - return nil +func (fc *Calculator) ExportTx(tx *txs.ExportTx) error { + if !fc.IsEUpgradeActive { + fc.Fee = fc.Config.TxFee + return nil + } + + outs := make([]*avax.TransferableOutput, len(tx.Outs)+len(tx.ExportedOuts)) + copy(outs, tx.Outs) + copy(outs[len(tx.Outs):], tx.ExportedOuts) + + consumedUnits, err := fc.commonConsumedUnits(tx, outs, tx.Ins) + if err != nil { + return err + } + + _, err = fc.AddFeesFor(consumedUnits) + return err } -func (fc *Calculator) OperationTx(*txs.OperationTx) error { - fc.Fee = fc.Config.TxFee - return nil +func (fc *Calculator) commonConsumedUnits( + uTx txs.UnsignedTx, + allOuts []*avax.TransferableOutput, + allIns []*avax.TransferableInput, +) (fees.Dimensions, error) { + var consumedUnits fees.Dimensions + + uTxSize, err := fc.Codec.Size(txs.CodecVersion, uTx) + if err != nil { + return consumedUnits, fmt.Errorf("couldn't calculate UnsignedTx marshal length: %w", err) + } + credsSize, err := fc.Codec.Size(txs.CodecVersion, fc.Credentials) + if err != nil { + return consumedUnits, fmt.Errorf("failed retrieving size of credentials: %w", err) + } + consumedUnits[fees.Bandwidth] = uint64(uTxSize + credsSize) + + inputDimensions, err := fees.GetInputsDimensions(fc.Codec, txs.CodecVersion, allIns) + if err != nil { + return consumedUnits, fmt.Errorf("failed retrieving size of inputs: %w", err) + } + inputDimensions[fees.Bandwidth] = 0 // inputs bandwidth is already accounted for above, so we zero it + consumedUnits, err = fees.Add(consumedUnits, inputDimensions) + if err != nil { + return consumedUnits, fmt.Errorf("failed adding inputs: %w", err) + } + + outputDimensions, err := fees.GetOutputsDimensions(fc.Codec, txs.CodecVersion, allOuts) + if err != nil { + return consumedUnits, fmt.Errorf("failed retrieving size of outputs: %w", err) + } + outputDimensions[fees.Bandwidth] = 0 // outputs bandwidth is already accounted for above, so we zero it + consumedUnits, err = fees.Add(consumedUnits, outputDimensions) + if err != nil { + return consumedUnits, fmt.Errorf("failed adding outputs: %w", err) + } + + return consumedUnits, nil } -func (fc *Calculator) ImportTx(*txs.ImportTx) error { - fc.Fee = fc.Config.TxFee - return nil +func (fc *Calculator) AddFeesFor(consumedUnits fees.Dimensions) (uint64, error) { + boundBreached, dimension := fc.FeeManager.CumulateUnits(consumedUnits, fc.ConsumedUnitsCap) + if boundBreached { + return 0, fmt.Errorf("%w: breached dimension %d", errFailedConsumedUnitsCumulation, dimension) + } + + fee, err := fc.FeeManager.CalculateFee(consumedUnits) + if err != nil { + return 0, fmt.Errorf("%w: %w", errFailedFeeCalculation, err) + } + + fc.Fee += fee + return fee, nil } -func (fc *Calculator) ExportTx(*txs.ExportTx) error { - fc.Fee = fc.Config.TxFee - return nil +func (fc *Calculator) RemoveFeesFor(unitsToRm fees.Dimensions) (uint64, error) { + if err := fc.FeeManager.RemoveUnits(unitsToRm); err != nil { + return 0, fmt.Errorf("failed removing units: %w", err) + } + + fee, err := fc.FeeManager.CalculateFee(unitsToRm) + if err != nil { + return 0, fmt.Errorf("%w: %w", errFailedFeeCalculation, err) + } + + fc.Fee -= fee + return fee, nil } diff --git a/vms/avm/txs/fees/calculator_test.go b/vms/avm/txs/fees/calculator_test.go new file mode 100644 index 000000000000..c9116ae09868 --- /dev/null +++ b/vms/avm/txs/fees/calculator_test.go @@ -0,0 +1,698 @@ +// Copyright (C) 2019-2024, Ava Labs, Inc. All rights reserved. +// See the file LICENSE for licensing terms. + +package fees + +import ( + "reflect" + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/ava-labs/avalanchego/codec" + "github.com/ava-labs/avalanchego/ids" + "github.com/ava-labs/avalanchego/snow" + "github.com/ava-labs/avalanchego/snow/snowtest" + "github.com/ava-labs/avalanchego/utils/crypto/secp256k1" + "github.com/ava-labs/avalanchego/utils/timer/mockable" + "github.com/ava-labs/avalanchego/utils/units" + "github.com/ava-labs/avalanchego/vms/avm/block" + "github.com/ava-labs/avalanchego/vms/avm/config" + "github.com/ava-labs/avalanchego/vms/avm/fxs" + "github.com/ava-labs/avalanchego/vms/avm/txs" + "github.com/ava-labs/avalanchego/vms/components/avax" + "github.com/ava-labs/avalanchego/vms/components/fees" + "github.com/ava-labs/avalanchego/vms/components/verify" + "github.com/ava-labs/avalanchego/vms/nftfx" + "github.com/ava-labs/avalanchego/vms/secp256k1fx" +) + +var ( + testUnitFees = fees.Dimensions{ + 1 * units.MicroAvax, + 2 * units.MicroAvax, + 3 * units.MicroAvax, + 4 * units.MicroAvax, + } + testBlockMaxConsumedUnits = fees.Dimensions{ + 3000, + 3500, + 1000, + 1000, + } + + feeTestsDefaultCfg = config.Config{ + TxFee: 1 * units.Avax, + CreateAssetTxFee: 2 * units.Avax, + } + + feeTestKeys = secp256k1.TestKeys() + feeTestSigners = [][]*secp256k1.PrivateKey{} + + durangoTime = time.Time{} // assume durango is active in these tests +) + +type feeTests struct { + description string + cfgAndChainTimeF func() (*config.Config, time.Time) + consumedUnitCapsF func() fees.Dimensions + expectedError error + checksF func(*testing.T, *Calculator) +} + +func TestBaseTxFees(t *testing.T) { + r := require.New(t) + + defaultCtx := snowtest.Context(t, snowtest.PChainID) + codec, err := createTestFeesCodec(defaultCtx) + r.NoError(err) + + baseTx, _ := txsCreationHelpers(defaultCtx) + uTx := &baseTx + sTx := &txs.Tx{ + Unsigned: uTx, + } + r.NoError(sTx.SignSECP256K1Fx(codec, feeTestSigners)) + + tests := []feeTests{ + { + description: "pre E fork", + cfgAndChainTimeF: func() (*config.Config, time.Time) { + eForkTime := time.Now().Truncate(time.Second) + chainTime := eForkTime.Add(-1 * time.Second) + + cfg := feeTestsDefaultCfg + cfg.DurangoTime = durangoTime + cfg.EUpgradeTime = eForkTime + + return &cfg, chainTime + }, + expectedError: nil, + checksF: func(t *testing.T, fc *Calculator) { + require.Equal(t, fc.Config.TxFee, fc.Fee) + }, + }, + { + description: "post E fork, success", + cfgAndChainTimeF: func() (*config.Config, time.Time) { + eForkTime := time.Now().Truncate(time.Second) + chainTime := eForkTime.Add(time.Second) + + cfg := feeTestsDefaultCfg + cfg.DurangoTime = durangoTime + cfg.EUpgradeTime = eForkTime + + return &cfg, chainTime + }, + expectedError: nil, + checksF: func(t *testing.T, fc *Calculator) { + require.Equal(t, 2920*units.MicroAvax, fc.Fee) + require.Equal(t, + fees.Dimensions{ + 224, + 1090, + 172, + 0, + }, + fc.FeeManager.GetCumulatedUnits(), + ) + }, + }, + { + description: "post E fork, utxos read cap breached", + cfgAndChainTimeF: func() (*config.Config, time.Time) { + eForkTime := time.Now().Truncate(time.Second) + chainTime := eForkTime.Add(time.Second) + + cfg := feeTestsDefaultCfg + cfg.DurangoTime = durangoTime + cfg.EUpgradeTime = eForkTime + return &cfg, chainTime + }, + consumedUnitCapsF: func() fees.Dimensions { + caps := testBlockMaxConsumedUnits + caps[fees.UTXORead] = 1090 - 1 + return caps + }, + expectedError: errFailedConsumedUnitsCumulation, + checksF: func(t *testing.T, fc *Calculator) {}, + }, + } + + for _, tt := range tests { + t.Run(tt.description, func(t *testing.T) { + cfg, chainTime := tt.cfgAndChainTimeF() + + consumedUnitCaps := testBlockMaxConsumedUnits + if tt.consumedUnitCapsF != nil { + consumedUnitCaps = tt.consumedUnitCapsF() + } + + fc := &Calculator{ + IsEUpgradeActive: cfg.IsEUpgradeActivated(chainTime), + Config: cfg, + Codec: codec, + FeeManager: fees.NewManager(testUnitFees), + ConsumedUnitsCap: consumedUnitCaps, + Credentials: sTx.Creds, + } + err := uTx.Visit(fc) + r.ErrorIs(err, tt.expectedError) + tt.checksF(t, fc) + }) + } +} + +func TestCreateAssetTxFees(t *testing.T) { + r := require.New(t) + + defaultCtx := snowtest.Context(t, snowtest.PChainID) + codec, err := createTestFeesCodec(defaultCtx) + r.NoError(err) + + baseTx, _ := txsCreationHelpers(defaultCtx) + uTx := &txs.CreateAssetTx{ + BaseTx: baseTx, + Name: "name", + Symbol: "symb", + Denomination: 0, + States: []*txs.InitialState{ + { + FxIndex: 0, + Outs: []verify.State{ + &secp256k1fx.MintOutput{ + OutputOwners: secp256k1fx.OutputOwners{ + Threshold: 1, + Addrs: []ids.ShortID{feeTestKeys[0].PublicKey().Address()}, + }, + }, + }, + }, + }, + } + sTx := &txs.Tx{ + Unsigned: uTx, + } + r.NoError(sTx.SignSECP256K1Fx(codec, feeTestSigners)) + + tests := []feeTests{ + { + description: "pre E fork", + cfgAndChainTimeF: func() (*config.Config, time.Time) { + eForkTime := time.Now().Truncate(time.Second) + chainTime := eForkTime.Add(-1 * time.Second) + + cfg := feeTestsDefaultCfg + cfg.DurangoTime = durangoTime + cfg.EUpgradeTime = eForkTime + + return &cfg, chainTime + }, + expectedError: nil, + checksF: func(t *testing.T, fc *Calculator) { + require.Equal(t, fc.Config.CreateAssetTxFee, fc.Fee) + }, + }, + { + description: "post E fork, success", + cfgAndChainTimeF: func() (*config.Config, time.Time) { + eForkTime := time.Now().Truncate(time.Second) + chainTime := eForkTime.Add(time.Second) + + cfg := feeTestsDefaultCfg + cfg.DurangoTime = durangoTime + cfg.EUpgradeTime = eForkTime + + return &cfg, chainTime + }, + expectedError: nil, + checksF: func(t *testing.T, fc *Calculator) { + require.Equal(t, 2985*units.MicroAvax, fc.Fee) + require.Equal(t, + fees.Dimensions{ + 289, + 1090, + 172, + 0, + }, + fc.FeeManager.GetCumulatedUnits(), + ) + }, + }, + { + description: "post E fork, bandwidth cap breached", + cfgAndChainTimeF: func() (*config.Config, time.Time) { + eForkTime := time.Now().Truncate(time.Second) + chainTime := eForkTime.Add(time.Second) + + cfg := feeTestsDefaultCfg + cfg.DurangoTime = durangoTime + cfg.EUpgradeTime = eForkTime + return &cfg, chainTime + }, + consumedUnitCapsF: func() fees.Dimensions { + caps := testBlockMaxConsumedUnits + caps[fees.Bandwidth] = 289 - 1 + return caps + }, + expectedError: errFailedConsumedUnitsCumulation, + checksF: func(t *testing.T, fc *Calculator) {}, + }, + } + + for _, tt := range tests { + t.Run(tt.description, func(t *testing.T) { + cfg, chainTime := tt.cfgAndChainTimeF() + + consumedUnitCaps := testBlockMaxConsumedUnits + if tt.consumedUnitCapsF != nil { + consumedUnitCaps = tt.consumedUnitCapsF() + } + + fc := &Calculator{ + IsEUpgradeActive: cfg.IsEUpgradeActivated(chainTime), + Config: cfg, + Codec: codec, + FeeManager: fees.NewManager(testUnitFees), + ConsumedUnitsCap: consumedUnitCaps, + Credentials: sTx.Creds, + } + err := uTx.Visit(fc) + r.ErrorIs(err, tt.expectedError) + tt.checksF(t, fc) + }) + } +} + +func TestOperationTxFees(t *testing.T) { + r := require.New(t) + + defaultCtx := snowtest.Context(t, snowtest.PChainID) + codec, err := createTestFeesCodec(defaultCtx) + r.NoError(err) + + baseTx, _ := txsCreationHelpers(defaultCtx) + assetID := ids.GenerateTestID() + uTx := &txs.OperationTx{ + BaseTx: baseTx, + Ops: []*txs.Operation{{ + Asset: avax.Asset{ID: assetID}, + UTXOIDs: []*avax.UTXOID{{ + TxID: assetID, + OutputIndex: 1, + }}, + Op: &nftfx.MintOperation{ + MintInput: secp256k1fx.Input{ + SigIndices: []uint32{0}, + }, + GroupID: 1, + Payload: []byte{'h', 'e', 'l', 'l', 'o'}, + Outputs: []*secp256k1fx.OutputOwners{{}}, + }, + }}, + } + sTx := &txs.Tx{ + Unsigned: uTx, + } + r.NoError(sTx.SignSECP256K1Fx(codec, feeTestSigners)) + + tests := []feeTests{ + { + description: "pre E fork", + cfgAndChainTimeF: func() (*config.Config, time.Time) { + eForkTime := time.Now().Truncate(time.Second) + chainTime := eForkTime.Add(-1 * time.Second) + + cfg := feeTestsDefaultCfg + cfg.DurangoTime = durangoTime + cfg.EUpgradeTime = eForkTime + + return &cfg, chainTime + }, + expectedError: nil, + checksF: func(t *testing.T, fc *Calculator) { + require.Equal(t, fc.Config.TxFee, fc.Fee) + }, + }, + { + description: "post E fork, success", + cfgAndChainTimeF: func() (*config.Config, time.Time) { + eForkTime := time.Now().Truncate(time.Second) + chainTime := eForkTime.Add(time.Second) + + cfg := feeTestsDefaultCfg + cfg.DurangoTime = durangoTime + cfg.EUpgradeTime = eForkTime + + return &cfg, chainTime + }, + expectedError: nil, + checksF: func(t *testing.T, fc *Calculator) { + require.Equal(t, 3041*units.MicroAvax, fc.Fee) + require.Equal(t, + fees.Dimensions{ + 345, + 1090, + 172, + 0, + }, + fc.FeeManager.GetCumulatedUnits(), + ) + }, + }, + { + description: "post E fork, utxos read cap breached", + cfgAndChainTimeF: func() (*config.Config, time.Time) { + eForkTime := time.Now().Truncate(time.Second) + chainTime := eForkTime.Add(time.Second) + + cfg := feeTestsDefaultCfg + cfg.DurangoTime = durangoTime + cfg.EUpgradeTime = eForkTime + return &cfg, chainTime + }, + consumedUnitCapsF: func() fees.Dimensions { + caps := testBlockMaxConsumedUnits + caps[fees.UTXORead] = 1090 - 1 + return caps + }, + expectedError: errFailedConsumedUnitsCumulation, + checksF: func(t *testing.T, fc *Calculator) {}, + }, + } + + for _, tt := range tests { + t.Run(tt.description, func(t *testing.T) { + cfg, chainTime := tt.cfgAndChainTimeF() + + consumedUnitCaps := testBlockMaxConsumedUnits + if tt.consumedUnitCapsF != nil { + consumedUnitCaps = tt.consumedUnitCapsF() + } + + fc := &Calculator{ + IsEUpgradeActive: cfg.IsEUpgradeActivated(chainTime), + Config: cfg, + Codec: codec, + FeeManager: fees.NewManager(testUnitFees), + ConsumedUnitsCap: consumedUnitCaps, + Credentials: sTx.Creds, + } + err := uTx.Visit(fc) + r.ErrorIs(err, tt.expectedError) + tt.checksF(t, fc) + }) + } +} + +func TestImportTxFees(t *testing.T) { + r := require.New(t) + + defaultCtx := snowtest.Context(t, snowtest.PChainID) + codec, err := createTestFeesCodec(defaultCtx) + r.NoError(err) + + baseTx, _ := txsCreationHelpers(defaultCtx) + uTx := &txs.ImportTx{ + BaseTx: baseTx, + SourceChain: ids.GenerateTestID(), + ImportedIns: []*avax.TransferableInput{{ + UTXOID: avax.UTXOID{ + TxID: ids.Empty.Prefix(1), + OutputIndex: 1, + }, + Asset: avax.Asset{ID: ids.ID{'a', 's', 's', 'e', 'r', 't'}}, + In: &secp256k1fx.TransferInput{ + Amt: 50000, + Input: secp256k1fx.Input{SigIndices: []uint32{0}}, + }, + }}, + } + sTx := &txs.Tx{ + Unsigned: uTx, + } + r.NoError(sTx.SignSECP256K1Fx(codec, feeTestSigners)) + + tests := []feeTests{ + { + description: "pre E fork", + cfgAndChainTimeF: func() (*config.Config, time.Time) { + eForkTime := time.Now().Truncate(time.Second) + chainTime := eForkTime.Add(-1 * time.Second) + + cfg := feeTestsDefaultCfg + cfg.DurangoTime = durangoTime + cfg.EUpgradeTime = eForkTime + + return &cfg, chainTime + }, + expectedError: nil, + checksF: func(t *testing.T, fc *Calculator) { + require.Equal(t, fc.Config.TxFee, fc.Fee) + }, + }, + { + description: "post E fork, success", + cfgAndChainTimeF: func() (*config.Config, time.Time) { + eForkTime := time.Now().Truncate(time.Second) + chainTime := eForkTime.Add(time.Second) + + cfg := feeTestsDefaultCfg + cfg.DurangoTime = durangoTime + cfg.EUpgradeTime = eForkTime + + return &cfg, chainTime + }, + expectedError: nil, + checksF: func(t *testing.T, fc *Calculator) { + require.Equal(t, 5494*units.MicroAvax, fc.Fee) + require.Equal(t, + fees.Dimensions{ + 348, + 2180, + 262, + 0, + }, + fc.FeeManager.GetCumulatedUnits(), + ) + }, + }, + { + description: "post E fork, utxos read cap breached", + cfgAndChainTimeF: func() (*config.Config, time.Time) { + eForkTime := time.Now().Truncate(time.Second) + chainTime := eForkTime.Add(time.Second) + + cfg := feeTestsDefaultCfg + cfg.DurangoTime = durangoTime + cfg.EUpgradeTime = eForkTime + return &cfg, chainTime + }, + consumedUnitCapsF: func() fees.Dimensions { + caps := testBlockMaxConsumedUnits + caps[fees.UTXORead] = 1090 - 1 + return caps + }, + expectedError: errFailedConsumedUnitsCumulation, + checksF: func(t *testing.T, fc *Calculator) {}, + }, + } + + for _, tt := range tests { + t.Run(tt.description, func(t *testing.T) { + cfg, chainTime := tt.cfgAndChainTimeF() + + consumedUnitCaps := testBlockMaxConsumedUnits + if tt.consumedUnitCapsF != nil { + consumedUnitCaps = tt.consumedUnitCapsF() + } + + fc := &Calculator{ + IsEUpgradeActive: cfg.IsEUpgradeActivated(chainTime), + Config: cfg, + Codec: codec, + FeeManager: fees.NewManager(testUnitFees), + ConsumedUnitsCap: consumedUnitCaps, + Credentials: sTx.Creds, + } + err := uTx.Visit(fc) + r.ErrorIs(err, tt.expectedError) + tt.checksF(t, fc) + }) + } +} + +func TestExportTxFees(t *testing.T) { + r := require.New(t) + + defaultCtx := snowtest.Context(t, snowtest.PChainID) + codec, err := createTestFeesCodec(defaultCtx) + r.NoError(err) + + baseTx, outputs := txsCreationHelpers(defaultCtx) + uTx := &txs.ExportTx{ + BaseTx: baseTx, + DestinationChain: ids.GenerateTestID(), + ExportedOuts: outputs, + } + sTx := &txs.Tx{ + Unsigned: uTx, + } + r.NoError(sTx.SignSECP256K1Fx(codec, feeTestSigners)) + + tests := []feeTests{ + { + description: "pre E fork", + cfgAndChainTimeF: func() (*config.Config, time.Time) { + eForkTime := time.Now().Truncate(time.Second) + chainTime := eForkTime.Add(-1 * time.Second) + + cfg := feeTestsDefaultCfg + cfg.DurangoTime = durangoTime + cfg.EUpgradeTime = eForkTime + + return &cfg, chainTime + }, + expectedError: nil, + checksF: func(t *testing.T, fc *Calculator) { + require.Equal(t, fc.Config.TxFee, fc.Fee) + }, + }, + { + description: "post E fork, success", + cfgAndChainTimeF: func() (*config.Config, time.Time) { + eForkTime := time.Now().Truncate(time.Second) + chainTime := eForkTime.Add(time.Second) + + cfg := feeTestsDefaultCfg + cfg.DurangoTime = durangoTime + cfg.EUpgradeTime = eForkTime + + return &cfg, chainTime + }, + expectedError: nil, + checksF: func(t *testing.T, fc *Calculator) { + require.Equal(t, 3282*units.MicroAvax, fc.Fee) + require.Equal(t, + fees.Dimensions{ + 340, + 1090, + 254, + 0, + }, + fc.FeeManager.GetCumulatedUnits(), + ) + }, + }, + { + description: "post E fork, utxos read cap breached", + cfgAndChainTimeF: func() (*config.Config, time.Time) { + eForkTime := time.Now().Truncate(time.Second) + chainTime := eForkTime.Add(time.Second) + + cfg := feeTestsDefaultCfg + cfg.DurangoTime = durangoTime + cfg.EUpgradeTime = eForkTime + return &cfg, chainTime + }, + consumedUnitCapsF: func() fees.Dimensions { + caps := testBlockMaxConsumedUnits + caps[fees.UTXORead] = 1090 - 1 + return caps + }, + expectedError: errFailedConsumedUnitsCumulation, + checksF: func(t *testing.T, fc *Calculator) {}, + }, + } + + for _, tt := range tests { + t.Run(tt.description, func(t *testing.T) { + cfg, chainTime := tt.cfgAndChainTimeF() + + consumedUnitCaps := testBlockMaxConsumedUnits + if tt.consumedUnitCapsF != nil { + consumedUnitCaps = tt.consumedUnitCapsF() + } + + fc := &Calculator{ + IsEUpgradeActive: cfg.IsEUpgradeActivated(chainTime), + Config: cfg, + Codec: codec, + FeeManager: fees.NewManager(testUnitFees), + ConsumedUnitsCap: consumedUnitCaps, + Credentials: sTx.Creds, + } + err := uTx.Visit(fc) + r.ErrorIs(err, tt.expectedError) + tt.checksF(t, fc) + }) + } +} + +func createTestFeesCodec(defaultCtx *snow.Context) (codec.Manager, error) { + fxs := []fxs.Fx{ + &secp256k1fx.Fx{}, + &nftfx.Fx{}, + } + + parser, err := block.NewCustomParser( + durangoTime, + map[reflect.Type]int{}, + &mockable.Clock{}, + defaultCtx.Log, + fxs, + ) + if err != nil { + return nil, err + } + + return parser.Codec(), nil +} + +func txsCreationHelpers(defaultCtx *snow.Context) ( + baseTx txs.BaseTx, + otherOutputs []*avax.TransferableOutput, +) { + inputs := []*avax.TransferableInput{{ + UTXOID: avax.UTXOID{ + TxID: ids.ID{'t', 'x', 'I', 'D'}, + OutputIndex: 2, + }, + Asset: avax.Asset{ID: defaultCtx.AVAXAssetID}, + In: &secp256k1fx.TransferInput{ + Amt: uint64(5678), + Input: secp256k1fx.Input{SigIndices: []uint32{0}}, + }, + }} + outputs := []*avax.TransferableOutput{{ + Asset: avax.Asset{ID: defaultCtx.AVAXAssetID}, + Out: &secp256k1fx.TransferOutput{ + Amt: uint64(1234), + OutputOwners: secp256k1fx.OutputOwners{ + Threshold: 1, + Addrs: []ids.ShortID{feeTestKeys[0].PublicKey().Address()}, + }, + }, + }} + otherOutputs = []*avax.TransferableOutput{{ + Asset: avax.Asset{ID: defaultCtx.AVAXAssetID}, + Out: &secp256k1fx.TransferOutput{ + Amt: uint64(4567), + OutputOwners: secp256k1fx.OutputOwners{ + Threshold: 1, + Addrs: []ids.ShortID{feeTestKeys[1].PublicKey().Address()}, + }, + }, + }} + baseTx = txs.BaseTx{ + BaseTx: avax.BaseTx{ + NetworkID: defaultCtx.NetworkID, + BlockchainID: defaultCtx.ChainID, + Ins: inputs, + Outs: outputs, + }, + } + + return baseTx, otherOutputs +} diff --git a/vms/components/fees/dimensions.go b/vms/components/fees/dimensions.go new file mode 100644 index 000000000000..332f11b750f8 --- /dev/null +++ b/vms/components/fees/dimensions.go @@ -0,0 +1,34 @@ +// Copyright (C) 2019-2024, Ava Labs, Inc. All rights reserved. +// See the file LICENSE for licensing terms. + +package fees + +import "github.com/ava-labs/avalanchego/utils/math" + +const ( + Bandwidth Dimension = 0 + UTXORead Dimension = 1 + UTXOWrite Dimension = 2 // includes delete + Compute Dimension = 3 // signatures checks, tx-specific + + FeeDimensions = 4 +) + +var Empty = Dimensions{} + +type ( + Dimension int + Dimensions [FeeDimensions]uint64 +) + +func Add(lhs, rhs Dimensions) (Dimensions, error) { + var res Dimensions + for i := 0; i < FeeDimensions; i++ { + v, err := math.Add64(lhs[i], rhs[i]) + if err != nil { + return res, err + } + res[i] = v + } + return res, nil +} diff --git a/vms/components/fees/helpers.go b/vms/components/fees/helpers.go new file mode 100644 index 000000000000..8db95ae9bff9 --- /dev/null +++ b/vms/components/fees/helpers.go @@ -0,0 +1,70 @@ +// Copyright (C) 2019-2024, Ava Labs, Inc. All rights reserved. +// See the file LICENSE for licensing terms. + +package fees + +import ( + "fmt" + + "github.com/ava-labs/avalanchego/codec" + "github.com/ava-labs/avalanchego/utils/crypto/secp256k1" + "github.com/ava-labs/avalanchego/utils/wrappers" + "github.com/ava-labs/avalanchego/vms/components/avax" + "github.com/ava-labs/avalanchego/vms/components/verify" + "github.com/ava-labs/avalanchego/vms/secp256k1fx" +) + +func GetInputsDimensions(c codec.Manager, v uint16, ins []*avax.TransferableInput) (Dimensions, error) { + var consumedUnits Dimensions + for _, in := range ins { + cost, err := in.In.Cost() + if err != nil { + return consumedUnits, fmt.Errorf("failed retrieving cost of input %s: %w", in.ID, err) + } + + inSize, err := c.Size(v, in) + if err != nil { + return consumedUnits, fmt.Errorf("failed retrieving size of input %s: %w", in.ID, err) + } + uInSize := uint64(inSize) + + consumedUnits[Bandwidth] += uInSize - codec.CodecVersionSize + consumedUnits[UTXORead] += cost + uInSize // inputs are read + consumedUnits[UTXOWrite] += uInSize // inputs are deleted + } + return consumedUnits, nil +} + +func GetOutputsDimensions(c codec.Manager, v uint16, outs []*avax.TransferableOutput) (Dimensions, error) { + var consumedUnits Dimensions + for _, out := range outs { + outSize, err := c.Size(v, out) + if err != nil { + return consumedUnits, fmt.Errorf("failed retrieving size of output %s: %w", out.ID, err) + } + uOutSize := uint64(outSize) + + consumedUnits[Bandwidth] += uOutSize - codec.CodecVersionSize + consumedUnits[UTXOWrite] += uOutSize + } + return consumedUnits, nil +} + +func GetCredentialsDimensions(c codec.Manager, v uint16, inputSigIndices []uint32) (Dimensions, error) { + var consumedUnits Dimensions + + // Ensure that codec picks interface instead of the pointer to evaluate size. + creds := make([]verify.Verifiable, 0, 1) + creds = append(creds, &secp256k1fx.Credential{ + Sigs: make([][secp256k1.SignatureLen]byte, len(inputSigIndices)), + }) + + credSize, err := c.Size(v, creds) + if err != nil { + return consumedUnits, fmt.Errorf("failed retrieving size of credentials: %w", err) + } + credSize -= wrappers.IntLen // length of the slice, we want the single credential + + consumedUnits[Bandwidth] += uint64(credSize) - codec.CodecVersionSize + return consumedUnits, nil +} diff --git a/vms/components/fees/manager.go b/vms/components/fees/manager.go new file mode 100644 index 000000000000..325c0a73f994 --- /dev/null +++ b/vms/components/fees/manager.go @@ -0,0 +1,92 @@ +// Copyright (C) 2019-2024, Ava Labs, Inc. All rights reserved. +// See the file LICENSE for licensing terms. + +package fees + +import ( + "fmt" + + safemath "github.com/ava-labs/avalanchego/utils/math" +) + +type Manager struct { + // Avax denominated unit fees for all fee dimensions + unitFees Dimensions + + // cumulatedUnits helps aggregating the units consumed by a block + // so that we can verify it's not too big/build it properly. + cumulatedUnits Dimensions +} + +func NewManager(unitFees Dimensions) *Manager { + return &Manager{ + unitFees: unitFees, + } +} + +func (m *Manager) GetUnitFees() Dimensions { + return m.unitFees +} + +func (m *Manager) GetCumulatedUnits() Dimensions { + return m.cumulatedUnits +} + +// CalculateFee must be a stateless method +func (m *Manager) CalculateFee(units Dimensions) (uint64, error) { + fee := uint64(0) + + for i := Dimension(0); i < FeeDimensions; i++ { + contribution, err := safemath.Mul64(m.unitFees[i], units[i]) + if err != nil { + return 0, err + } + fee, err = safemath.Add64(contribution, fee) + if err != nil { + return 0, err + } + } + return fee, nil +} + +// CumulateUnits tries to cumulate the consumed units [units]. Before +// actually cumulating them, it checks whether the result would breach [bounds]. +// If so, it returns the first dimension to breach bounds. +func (m *Manager) CumulateUnits(units, bounds Dimensions) (bool, Dimension) { + // Ensure we can consume (don't want partial update of values) + for i := Dimension(0); i < FeeDimensions; i++ { + consumed, err := safemath.Add64(m.cumulatedUnits[i], units[i]) + if err != nil { + return true, i + } + if consumed > bounds[i] { + return true, i + } + } + + // Commit to consumption + for i := Dimension(0); i < FeeDimensions; i++ { + consumed, err := safemath.Add64(m.cumulatedUnits[i], units[i]) + if err != nil { + return true, i + } + m.cumulatedUnits[i] = consumed + } + return false, 0 +} + +// Sometimes, e.g. while building a tx, we'd like freedom to speculatively add units +// and to remove them later on. [RemoveUnits] grants this freedom +func (m *Manager) RemoveUnits(unitsToRm Dimensions) error { + var revertedUnits Dimensions + for i := Dimension(0); i < FeeDimensions; i++ { + prev, err := safemath.Sub(m.cumulatedUnits[i], unitsToRm[i]) + if err != nil { + return fmt.Errorf("%w: dimension %d", err, i) + } + revertedUnits[i] = prev + } + + m.cumulatedUnits = revertedUnits + return nil +} diff --git a/wallet/chain/x/builder.go b/wallet/chain/x/builder.go index 27932019e1f4..7b9b5e5cde45 100644 --- a/wallet/chain/x/builder.go +++ b/wallet/chain/x/builder.go @@ -306,7 +306,7 @@ func (b *builder) NewOperationTxMintFT( options ...common.Option, ) (*txs.OperationTx, error) { ops := common.NewOptions(options) - operations, err := b.mintFTs(outputs, ops) + operations, err := mintFTs(b.addrs, b.backend, outputs, ops) if err != nil { return nil, err } @@ -320,7 +320,7 @@ func (b *builder) NewOperationTxMintNFT( options ...common.Option, ) (*txs.OperationTx, error) { ops := common.NewOptions(options) - operations, err := b.mintNFTs(assetID, payload, owners, ops) + operations, err := mintNFTs(b.addrs, b.backend, assetID, payload, owners, ops) if err != nil { return nil, err } @@ -333,7 +333,7 @@ func (b *builder) NewOperationTxMintProperty( options ...common.Option, ) (*txs.OperationTx, error) { ops := common.NewOptions(options) - operations, err := b.mintProperty(assetID, owner, ops) + operations, err := mintProperty(b.addrs, b.backend, assetID, owner, ops) if err != nil { return nil, err } @@ -345,7 +345,7 @@ func (b *builder) NewOperationTxBurnProperty( options ...common.Option, ) (*txs.OperationTx, error) { ops := common.NewOptions(options) - operations, err := b.burnProperty(assetID, ops) + operations, err := burnProperty(b.addrs, b.backend, assetID, ops) if err != nil { return nil, err } @@ -635,19 +635,21 @@ func (b *builder) spend( return inputs, outputs, nil } -func (b *builder) mintFTs( +func mintFTs( + addresses set.Set[ids.ShortID], + backend BuilderBackend, outputs map[ids.ID]*secp256k1fx.TransferOutput, options *common.Options, ) ( operations []*txs.Operation, err error, ) { - utxos, err := b.backend.UTXOs(options.Context(), b.backend.BlockchainID()) + utxos, err := backend.UTXOs(options.Context(), backend.BlockchainID()) if err != nil { return nil, err } - addrs := options.Addresses(b.addrs) + addrs := options.Addresses(addresses) minIssuanceTime := options.MinIssuanceTime() for _, utxo := range utxos { @@ -696,7 +698,9 @@ func (b *builder) mintFTs( } // TODO: make this able to generate multiple NFT groups -func (b *builder) mintNFTs( +func mintNFTs( + addresses set.Set[ids.ShortID], + backend BuilderBackend, assetID ids.ID, payload []byte, owners []*secp256k1fx.OutputOwners, @@ -705,12 +709,12 @@ func (b *builder) mintNFTs( operations []*txs.Operation, err error, ) { - utxos, err := b.backend.UTXOs(options.Context(), b.backend.BlockchainID()) + utxos, err := backend.UTXOs(options.Context(), backend.BlockchainID()) if err != nil { return nil, err } - addrs := options.Addresses(b.addrs) + addrs := options.Addresses(addresses) minIssuanceTime := options.MinIssuanceTime() for _, utxo := range utxos { @@ -754,7 +758,9 @@ func (b *builder) mintNFTs( ) } -func (b *builder) mintProperty( +func mintProperty( + addresses set.Set[ids.ShortID], + backend BuilderBackend, assetID ids.ID, owner *secp256k1fx.OutputOwners, options *common.Options, @@ -762,12 +768,12 @@ func (b *builder) mintProperty( operations []*txs.Operation, err error, ) { - utxos, err := b.backend.UTXOs(options.Context(), b.backend.BlockchainID()) + utxos, err := backend.UTXOs(options.Context(), backend.BlockchainID()) if err != nil { return nil, err } - addrs := options.Addresses(b.addrs) + addrs := options.Addresses(addresses) minIssuanceTime := options.MinIssuanceTime() for _, utxo := range utxos { @@ -812,19 +818,21 @@ func (b *builder) mintProperty( ) } -func (b *builder) burnProperty( +func burnProperty( + addresses set.Set[ids.ShortID], + backend BuilderBackend, assetID ids.ID, options *common.Options, ) ( operations []*txs.Operation, err error, ) { - utxos, err := b.backend.UTXOs(options.Context(), b.backend.BlockchainID()) + utxos, err := backend.UTXOs(options.Context(), backend.BlockchainID()) if err != nil { return nil, err } - addrs := options.Addresses(b.addrs) + addrs := options.Addresses(addresses) minIssuanceTime := options.MinIssuanceTime() for _, utxo := range utxos { diff --git a/wallet/chain/x/builder_dynamic_fees.go b/wallet/chain/x/builder_dynamic_fees.go new file mode 100644 index 000000000000..04a9244178d3 --- /dev/null +++ b/wallet/chain/x/builder_dynamic_fees.go @@ -0,0 +1,701 @@ +// Copyright (C) 2019-2024, Ava Labs, Inc. All rights reserved. +// See the file LICENSE for licensing terms. + +package x + +import ( + "fmt" + + "golang.org/x/exp/slices" + + "github.com/ava-labs/avalanchego/ids" + "github.com/ava-labs/avalanchego/utils" + "github.com/ava-labs/avalanchego/utils/constants" + "github.com/ava-labs/avalanchego/utils/math" + "github.com/ava-labs/avalanchego/utils/set" + "github.com/ava-labs/avalanchego/vms/avm/txs" + "github.com/ava-labs/avalanchego/vms/avm/txs/fees" + "github.com/ava-labs/avalanchego/vms/components/avax" + "github.com/ava-labs/avalanchego/vms/components/verify" + "github.com/ava-labs/avalanchego/vms/platformvm/stakeable" + "github.com/ava-labs/avalanchego/vms/secp256k1fx" + "github.com/ava-labs/avalanchego/wallet/subnet/primary/common" + + commonfees "github.com/ava-labs/avalanchego/vms/components/fees" +) + +type DynamicFeesBuilder struct { + addrs set.Set[ids.ShortID] + backend BuilderBackend +} + +func NewDynamicFeesBuilder(addrs set.Set[ids.ShortID], backend BuilderBackend) *DynamicFeesBuilder { + return &DynamicFeesBuilder{ + addrs: addrs, + backend: backend, + } +} + +func (b *DynamicFeesBuilder) NewBaseTx( + outputs []*avax.TransferableOutput, + unitFees, unitCaps commonfees.Dimensions, + options ...common.Option, +) (*txs.BaseTx, error) { + // 1. Build core transaction without utxos + ops := common.NewOptions(options) + + utx := &txs.BaseTx{ + BaseTx: avax.BaseTx{ + NetworkID: b.backend.NetworkID(), + BlockchainID: constants.PlatformChainID, + Memo: ops.Memo(), + Outs: outputs, // not sorted yet, we'll sort later on when we have all the outputs + }, + } + + // 2. Finance the tx by building the utxos (inputs, outputs and stakes) + toBurn := map[ids.ID]uint64{} // fees are calculated in financeTx + for _, out := range outputs { + assetID := out.AssetID() + amountToBurn, err := math.Add64(toBurn[assetID], out.Out.Amount()) + if err != nil { + return nil, err + } + toBurn[assetID] = amountToBurn + } + + feesMan := commonfees.NewManager(unitFees) + feeCalc := &fees.Calculator{ + IsEUpgradeActive: true, + Codec: Parser.Codec(), + FeeManager: feesMan, + ConsumedUnitsCap: unitCaps, + } + + // feesMan cumulates consumed units. Let's init it with utx filled so far + if err := feeCalc.BaseTx(utx); err != nil { + return nil, err + } + + inputs, changeOuts, err := b.financeTx(toBurn, feeCalc, ops) + if err != nil { + return nil, err + } + + outputs = append(outputs, changeOuts...) + avax.SortTransferableOutputs(outputs, Parser.Codec()) + utx.Ins = inputs + utx.Outs = outputs + + return utx, b.initCtx(utx) +} + +func (b *DynamicFeesBuilder) NewCreateAssetTx( + name string, + symbol string, + denomination byte, + initialState map[uint32][]verify.State, + unitFees, unitCaps commonfees.Dimensions, + options ...common.Option, +) (*txs.CreateAssetTx, error) { + // 1. Build core transaction without utxos + ops := common.NewOptions(options) + codec := Parser.Codec() + states := make([]*txs.InitialState, 0, len(initialState)) + for fxIndex, outs := range initialState { + state := &txs.InitialState{ + FxIndex: fxIndex, + FxID: fxIndexToID[fxIndex], + Outs: outs, + } + state.Sort(codec) // sort the outputs + states = append(states, state) + } + + utils.Sort(states) // sort the initial states + + utx := &txs.CreateAssetTx{ + BaseTx: txs.BaseTx{BaseTx: avax.BaseTx{ + NetworkID: b.backend.NetworkID(), + BlockchainID: b.backend.BlockchainID(), + Memo: ops.Memo(), + }}, + Name: name, + Symbol: symbol, + Denomination: denomination, + States: states, + } + + // 2. Finance the tx by building the utxos (inputs, outputs and stakes) + toBurn := map[ids.ID]uint64{} // fees are calculated in financeTx + feesMan := commonfees.NewManager(unitFees) + feeCalc := &fees.Calculator{ + IsEUpgradeActive: true, + Codec: Parser.Codec(), + FeeManager: feesMan, + ConsumedUnitsCap: unitCaps, + } + + // feesMan cumulates consumed units. Let's init it with utx filled so far + if err := feeCalc.CreateAssetTx(utx); err != nil { + return nil, err + } + + inputs, changeOuts, err := b.financeTx(toBurn, feeCalc, ops) + if err != nil { + return nil, err + } + + utx.Ins = inputs + utx.Outs = changeOuts + + return utx, b.initCtx(utx) +} + +func (b *DynamicFeesBuilder) NewOperationTx( + operations []*txs.Operation, + unitFees, unitCaps commonfees.Dimensions, + options ...common.Option, +) (*txs.OperationTx, error) { + // 1. Build core transaction without utxos + ops := common.NewOptions(options) + codec := Parser.Codec() + txs.SortOperations(operations, codec) + + utx := &txs.OperationTx{ + BaseTx: txs.BaseTx{BaseTx: avax.BaseTx{ + NetworkID: b.backend.NetworkID(), + BlockchainID: b.backend.BlockchainID(), + Memo: ops.Memo(), + }}, + Ops: operations, + } + + // 2. Finance the tx by building the utxos (inputs, outputs and stakes) + toBurn := map[ids.ID]uint64{} // fees are calculated in financeTx + feesMan := commonfees.NewManager(unitFees) + feeCalc := &fees.Calculator{ + IsEUpgradeActive: true, + Codec: Parser.Codec(), + FeeManager: feesMan, + ConsumedUnitsCap: unitCaps, + } + + // feesMan cumulates consumed units. Let's init it with utx filled so far + if err := feeCalc.OperationTx(utx); err != nil { + return nil, err + } + + inputs, changeOuts, err := b.financeTx(toBurn, feeCalc, ops) + if err != nil { + return nil, err + } + + utx.Ins = inputs + utx.Outs = changeOuts + + return utx, b.initCtx(utx) +} + +func (b *DynamicFeesBuilder) NewOperationTxMintFT( + outputs map[ids.ID]*secp256k1fx.TransferOutput, + unitFees, unitCaps commonfees.Dimensions, + options ...common.Option, +) (*txs.OperationTx, error) { + ops := common.NewOptions(options) + operations, err := mintFTs(b.addrs, b.backend, outputs, ops) + if err != nil { + return nil, err + } + return b.NewOperationTx( + operations, + unitFees, + unitCaps, + options..., + ) +} + +func (b *DynamicFeesBuilder) NewOperationTxMintNFT( + assetID ids.ID, + payload []byte, + owners []*secp256k1fx.OutputOwners, + unitFees, unitCaps commonfees.Dimensions, + options ...common.Option, +) (*txs.OperationTx, error) { + ops := common.NewOptions(options) + operations, err := mintNFTs(b.addrs, b.backend, assetID, payload, owners, ops) + if err != nil { + return nil, err + } + return b.NewOperationTx( + operations, + unitFees, + unitCaps, + options..., + ) +} + +func (b *DynamicFeesBuilder) NewOperationTxMintProperty( + assetID ids.ID, + owner *secp256k1fx.OutputOwners, + unitFees, unitCaps commonfees.Dimensions, + options ...common.Option, +) (*txs.OperationTx, error) { + ops := common.NewOptions(options) + operations, err := mintProperty(b.addrs, b.backend, assetID, owner, ops) + if err != nil { + return nil, err + } + return b.NewOperationTx( + operations, + unitFees, + unitCaps, + options..., + ) +} + +func (b *DynamicFeesBuilder) NewOperationTxBurnProperty( + assetID ids.ID, + unitFees, unitCaps commonfees.Dimensions, + options ...common.Option, +) (*txs.OperationTx, error) { + ops := common.NewOptions(options) + operations, err := burnProperty(b.addrs, b.backend, assetID, ops) + if err != nil { + return nil, err + } + return b.NewOperationTx( + operations, + unitFees, + unitCaps, + options..., + ) +} + +func (b *DynamicFeesBuilder) NewImportTx( + sourceChainID ids.ID, + to *secp256k1fx.OutputOwners, + unitFees, unitCaps commonfees.Dimensions, + options ...common.Option, +) (*txs.ImportTx, error) { + ops := common.NewOptions(options) + // 1. Build core transaction + utx := &txs.ImportTx{ + BaseTx: txs.BaseTx{BaseTx: avax.BaseTx{ + NetworkID: b.backend.NetworkID(), + BlockchainID: constants.PlatformChainID, + Memo: ops.Memo(), + }}, + SourceChain: sourceChainID, + } + + // 2. Add imported inputs first + utxos, err := b.backend.UTXOs(ops.Context(), sourceChainID) + if err != nil { + return nil, err + } + + var ( + addrs = ops.Addresses(b.addrs) + minIssuanceTime = ops.MinIssuanceTime() + avaxAssetID = b.backend.AVAXAssetID() + + importedInputs = make([]*avax.TransferableInput, 0, len(utxos)) + importedSigIndices = make([][]uint32, 0) + importedAmounts = make(map[ids.ID]uint64) + ) + + for _, utxo := range utxos { + out, ok := utxo.Out.(*secp256k1fx.TransferOutput) + if !ok { + continue + } + + inputSigIndices, ok := common.MatchOwners(&out.OutputOwners, addrs, minIssuanceTime) + if !ok { + // We couldn't spend this UTXO, so we skip to the next one + continue + } + + importedInputs = append(importedInputs, &avax.TransferableInput{ + UTXOID: utxo.UTXOID, + Asset: utxo.Asset, + In: &secp256k1fx.TransferInput{ + Amt: out.Amt, + Input: secp256k1fx.Input{ + SigIndices: inputSigIndices, + }, + }, + }) + + assetID := utxo.AssetID() + newImportedAmount, err := math.Add64(importedAmounts[assetID], out.Amt) + if err != nil { + return nil, err + } + importedAmounts[assetID] = newImportedAmount + importedSigIndices = append(importedSigIndices, inputSigIndices) + } + if len(importedInputs) == 0 { + return nil, fmt.Errorf( + "%w: no UTXOs available to import", + errInsufficientFunds, + ) + } + + utils.Sort(importedInputs) // sort imported inputs + utx.ImportedIns = importedInputs + + // 3. Add an output for all non-avax denominated inputs. + for assetID, amount := range importedAmounts { + if assetID == avaxAssetID { + // Avax-denominated inputs may be used to fully or partially pay fees, + // so we'll handle them later on. + continue + } + + utx.Outs = append(utx.Outs, &avax.TransferableOutput{ + Asset: avax.Asset{ID: assetID}, + Out: &secp256k1fx.TransferOutput{ + Amt: amount, + OutputOwners: *to, + }, + }) // we'll sort them later on + } + + // 3. Finance fees as much as possible with imported, Avax-denominated UTXOs + feesMan := commonfees.NewManager(unitFees) + feeCalc := &fees.Calculator{ + IsEUpgradeActive: true, + Codec: Parser.Codec(), + FeeManager: feesMan, + ConsumedUnitsCap: unitCaps, + } + + // feesMan cumulates consumed units. Let's init it with utx filled so far + if err := feeCalc.ImportTx(utx); err != nil { + return nil, err + } + + for _, sigIndices := range importedSigIndices { + if _, err = financeCredential(feeCalc, sigIndices); err != nil { + return nil, fmt.Errorf("account for credential fees: %w", err) + } + } + + switch importedAVAX := importedAmounts[avaxAssetID]; { + case importedAVAX == feeCalc.Fee: + // imported inputs match exactly the fees to be paid + avax.SortTransferableOutputs(utx.Outs, Parser.Codec()) // sort imported outputs + return utx, b.initCtx(utx) + + case importedAVAX < feeCalc.Fee: + // imported inputs can partially pay fees + feeCalc.Fee -= importedAmounts[avaxAssetID] + + default: + // imported inputs may be enough to pay taxes by themselves + changeOut := &avax.TransferableOutput{ + Asset: avax.Asset{ID: avaxAssetID}, + Out: &secp256k1fx.TransferOutput{ + OutputOwners: *to, // we set amount after considering own fees + }, + } + + // update fees to target given the extra output added + outDimensions, err := commonfees.GetOutputsDimensions(Parser.Codec(), txs.CodecVersion, []*avax.TransferableOutput{changeOut}) + if err != nil { + return nil, fmt.Errorf("failed calculating output size: %w", err) + } + if _, err := feeCalc.AddFeesFor(outDimensions); err != nil { + return nil, fmt.Errorf("account for output fees: %w", err) + } + + switch { + case feeCalc.Fee < importedAVAX: + changeOut.Out.(*secp256k1fx.TransferOutput).Amt = importedAVAX - feeCalc.Fee + utx.Outs = append(utx.Outs, changeOut) + avax.SortTransferableOutputs(utx.Outs, Parser.Codec()) // sort imported outputs + return utx, b.initCtx(utx) + + case feeCalc.Fee == importedAVAX: + // imported fees pays exactly the tx cost. We don't include the outputs + avax.SortTransferableOutputs(utx.Outs, Parser.Codec()) // sort imported outputs + return utx, b.initCtx(utx) + + default: + // imported avax are not enough to pay fees + // Drop the changeOut and finance the tx + if _, err := feeCalc.RemoveFeesFor(outDimensions); err != nil { + return nil, fmt.Errorf("failed reverting change output: %w", err) + } + feeCalc.Fee -= importedAVAX + } + } + + toBurn := map[ids.ID]uint64{} + inputs, changeOuts, err := b.financeTx(toBurn, feeCalc, ops) + if err != nil { + return nil, err + } + + utx.Ins = inputs + utx.Outs = append(utx.Outs, changeOuts...) + avax.SortTransferableOutputs(utx.Outs, Parser.Codec()) // sort imported outputs + return utx, b.initCtx(utx) +} + +func (b *DynamicFeesBuilder) NewExportTx( + chainID ids.ID, + outputs []*avax.TransferableOutput, + unitFees, unitCaps commonfees.Dimensions, + options ...common.Option, +) (*txs.ExportTx, error) { + // 1. Build core transaction without utxos + ops := common.NewOptions(options) + avax.SortTransferableOutputs(outputs, Parser.Codec()) // sort exported outputs + + utx := &txs.ExportTx{ + BaseTx: txs.BaseTx{BaseTx: avax.BaseTx{ + NetworkID: b.backend.NetworkID(), + BlockchainID: constants.PlatformChainID, + Memo: ops.Memo(), + }}, + DestinationChain: chainID, + ExportedOuts: outputs, + } + + // 2. Finance the tx by building the utxos (inputs, outputs and stakes) + toBurn := map[ids.ID]uint64{} // fees are calculated in financeTx + for _, out := range outputs { + assetID := out.AssetID() + amountToBurn, err := math.Add64(toBurn[assetID], out.Out.Amount()) + if err != nil { + return nil, err + } + toBurn[assetID] = amountToBurn + } + + feesMan := commonfees.NewManager(unitFees) + feeCalc := &fees.Calculator{ + IsEUpgradeActive: true, + Codec: Parser.Codec(), + FeeManager: feesMan, + ConsumedUnitsCap: unitCaps, + } + + // feesMan cumulates consumed units. Let's init it with utx filled so far + if err := feeCalc.ExportTx(utx); err != nil { + return nil, err + } + + inputs, changeOuts, err := b.financeTx(toBurn, feeCalc, ops) + if err != nil { + return nil, err + } + + utx.Ins = inputs + utx.Outs = changeOuts + + return utx, b.initCtx(utx) +} + +func (b *DynamicFeesBuilder) financeTx( + amountsToBurn map[ids.ID]uint64, + feeCalc *fees.Calculator, + options *common.Options, +) ( + inputs []*avax.TransferableInput, + changeOutputs []*avax.TransferableOutput, + err error, +) { + avaxAssetID := b.backend.AVAXAssetID() + utxos, err := b.backend.UTXOs(options.Context(), constants.PlatformChainID) + if err != nil { + return nil, nil, err + } + + // we can only pay fees in avax, so we sort avax-denominated UTXOs last + // to maximize probability of being able to pay fees. + slices.SortFunc(utxos, func(lhs, rhs *avax.UTXO) int { + switch { + case lhs.Asset.AssetID() == avaxAssetID && rhs.Asset.AssetID() != avaxAssetID: + return 1 + case lhs.Asset.AssetID() != avaxAssetID && rhs.Asset.AssetID() == avaxAssetID: + return -1 + default: + return 0 + } + }) + + addrs := options.Addresses(b.addrs) + minIssuanceTime := options.MinIssuanceTime() + + addr, ok := addrs.Peek() + if !ok { + return nil, nil, errNoChangeAddress + } + changeOwner := options.ChangeOwner(&secp256k1fx.OutputOwners{ + Threshold: 1, + Addrs: []ids.ShortID{addr}, + }) + + amountsToBurn[avaxAssetID] += feeCalc.Fee + + // Iterate over the unlocked UTXOs + for _, utxo := range utxos { + assetID := utxo.AssetID() + + // If we have consumed enough of the asset, then we have no need burn + // more. + if amountsToBurn[assetID] == 0 { + continue + } + + outIntf := utxo.Out + if lockedOut, ok := outIntf.(*stakeable.LockOut); ok { + if lockedOut.Locktime > minIssuanceTime { + // This output is currently locked, so this output can't be + // burned. + continue + } + outIntf = lockedOut.TransferableOut + } + + out, ok := outIntf.(*secp256k1fx.TransferOutput) + if !ok { + return nil, nil, errUnknownOutputType + } + + inputSigIndices, ok := common.MatchOwners(&out.OutputOwners, addrs, minIssuanceTime) + if !ok { + // We couldn't spend this UTXO, so we skip to the next one + continue + } + + input := &avax.TransferableInput{ + UTXOID: utxo.UTXOID, + Asset: utxo.Asset, + In: &secp256k1fx.TransferInput{ + Amt: out.Amt, + Input: secp256k1fx.Input{ + SigIndices: inputSigIndices, + }, + }, + } + + addedFees, err := financeInput(feeCalc, input) + if err != nil { + return nil, nil, fmt.Errorf("account for input fees: %w", err) + } + amountsToBurn[avaxAssetID] += addedFees + + addedFees, err = financeCredential(feeCalc, inputSigIndices) + if err != nil { + return nil, nil, fmt.Errorf("account for credential fees: %w", err) + } + amountsToBurn[avaxAssetID] += addedFees + + inputs = append(inputs, input) + + // Burn any value that should be burned + amountToBurn := math.Min( + amountsToBurn[assetID], // Amount we still need to burn + out.Amt, // Amount available to burn + ) + amountsToBurn[assetID] -= amountToBurn + + // Burn any value that should be burned + if remainingAmount := out.Amt - amountToBurn; remainingAmount > 0 { + // This input had extra value, so some of it must be returned, once fees are removed + changeOut := &avax.TransferableOutput{ + Asset: utxo.Asset, + Out: &secp256k1fx.TransferOutput{ + OutputOwners: *changeOwner, + }, + } + + // update fees to account for the change output + addedFees, err = financeOutput(feeCalc, changeOut) + if err != nil { + return nil, nil, fmt.Errorf("account for output fees: %w", err) + } + + if assetID != avaxAssetID { + changeOut.Out.(*secp256k1fx.TransferOutput).Amt = remainingAmount + amountsToBurn[avaxAssetID] += addedFees + changeOutputs = append(changeOutputs, changeOut) + } else { + // here assetID == b.backend.AVAXAssetID() + switch { + case addedFees < remainingAmount: + changeOut.Out.(*secp256k1fx.TransferOutput).Amt = remainingAmount - addedFees + changeOutputs = append(changeOutputs, changeOut) + case addedFees >= remainingAmount: + amountsToBurn[assetID] += addedFees - remainingAmount + } + } + } + } + + for assetID, amount := range amountsToBurn { + if amount != 0 { + return nil, nil, fmt.Errorf( + "%w: provided UTXOs need %d more units of asset %q", + errInsufficientFunds, + amount, + assetID, + ) + } + } + + utils.Sort(inputs) // sort inputs + avax.SortTransferableOutputs(changeOutputs, Parser.Codec()) // sort the change outputs + return inputs, changeOutputs, nil +} + +func (b *DynamicFeesBuilder) initCtx(tx txs.UnsignedTx) error { + ctx, err := newSnowContext(b.backend) + if err != nil { + return err + } + + tx.InitCtx(ctx) + return nil +} + +func financeInput(feeCalc *fees.Calculator, input *avax.TransferableInput) (uint64, error) { + insDimensions, err := commonfees.GetInputsDimensions(Parser.Codec(), txs.CodecVersion, []*avax.TransferableInput{input}) + if err != nil { + return 0, fmt.Errorf("failed calculating input size: %w", err) + } + addedFees, err := feeCalc.AddFeesFor(insDimensions) + if err != nil { + return 0, fmt.Errorf("account for input fees: %w", err) + } + return addedFees, nil +} + +func financeOutput(feeCalc *fees.Calculator, output *avax.TransferableOutput) (uint64, error) { + outDimensions, err := commonfees.GetOutputsDimensions(Parser.Codec(), txs.CodecVersion, []*avax.TransferableOutput{output}) + if err != nil { + return 0, fmt.Errorf("failed calculating changeOut size: %w", err) + } + addedFees, err := feeCalc.AddFeesFor(outDimensions) + if err != nil { + return 0, fmt.Errorf("account for stakedOut fees: %w", err) + } + return addedFees, nil +} + +func financeCredential(feeCalc *fees.Calculator, inputSigIndices []uint32) (uint64, error) { + credsDimensions, err := commonfees.GetCredentialsDimensions(Parser.Codec(), txs.CodecVersion, inputSigIndices) + if err != nil { + return 0, fmt.Errorf("failed calculating input size: %w", err) + } + addedFees, err := feeCalc.AddFeesFor(credsDimensions) + if err != nil { + return 0, fmt.Errorf("account for input fees: %w", err) + } + return addedFees, nil +} diff --git a/wallet/chain/x/builder_dynamic_fees_test.go b/wallet/chain/x/builder_dynamic_fees_test.go new file mode 100644 index 000000000000..b9b1cf566bb7 --- /dev/null +++ b/wallet/chain/x/builder_dynamic_fees_test.go @@ -0,0 +1,475 @@ +// Copyright (C) 2019-2024, Ava Labs, Inc. All rights reserved. +// See the file LICENSE for licensing terms. + +package x + +import ( + stdcontext "context" + "math" + "testing" + "time" + + "github.com/stretchr/testify/require" + "go.uber.org/mock/gomock" + + "github.com/ava-labs/avalanchego/ids" + "github.com/ava-labs/avalanchego/utils/constants" + "github.com/ava-labs/avalanchego/utils/crypto/secp256k1" + "github.com/ava-labs/avalanchego/utils/set" + "github.com/ava-labs/avalanchego/utils/units" + "github.com/ava-labs/avalanchego/vms/avm/txs/fees" + "github.com/ava-labs/avalanchego/vms/components/avax" + "github.com/ava-labs/avalanchego/vms/components/verify" + "github.com/ava-labs/avalanchego/vms/nftfx" + "github.com/ava-labs/avalanchego/vms/platformvm/stakeable" + "github.com/ava-labs/avalanchego/vms/propertyfx" + "github.com/ava-labs/avalanchego/vms/secp256k1fx" + "github.com/ava-labs/avalanchego/wallet/chain/x/mocks" + + commonfees "github.com/ava-labs/avalanchego/vms/components/fees" +) + +var ( + testKeys = secp256k1.TestKeys() + testUnitFees = commonfees.Dimensions{ + 1 * units.MicroAvax, + 2 * units.MicroAvax, + 3 * units.MicroAvax, + 4 * units.MicroAvax, + } + testBlockMaxConsumedUnits = commonfees.Dimensions{ + math.MaxUint64, + math.MaxUint64, + math.MaxUint64, + math.MaxUint64, + } +) + +// These tests create and sign a tx, then verify that utxos included +// in the tx are exactly necessary to pay fees for it + +func TestBaseTx(t *testing.T) { + require := require.New(t) + ctrl := gomock.NewController(t) + + be := mocks.NewMockBuilderBackend(ctrl) + + var ( + utxosKey = testKeys[1] + utxoAddr = utxosKey.PublicKey().Address() + utxos, avaxAssetID = testUTXOsList(utxosKey) + + outputsToMove = []*avax.TransferableOutput{{ + Asset: avax.Asset{ID: avaxAssetID}, + Out: &secp256k1fx.TransferOutput{ + Amt: 7 * units.Avax, + OutputOwners: secp256k1fx.OutputOwners{ + Threshold: 1, + Addrs: []ids.ShortID{utxosKey.PublicKey().Address()}, + }, + }, + }} + ) + + b := &DynamicFeesBuilder{ + addrs: set.Of(utxoAddr), + backend: be, + } + + be.EXPECT().AVAXAssetID().Return(avaxAssetID).AnyTimes() + be.EXPECT().NetworkID().Return(constants.MainnetID).AnyTimes() + be.EXPECT().BlockchainID().Return(constants.PlatformChainID) + be.EXPECT().UTXOs(gomock.Any(), constants.PlatformChainID).Return(utxos, nil) + + utx, err := b.NewBaseTx( + outputsToMove, + testUnitFees, + testBlockMaxConsumedUnits, + ) + require.NoError(err) + + var ( + kc = secp256k1fx.NewKeychain(utxosKey) + sbe = mocks.NewMockSignerBackend(ctrl) + s = NewSigner(kc, sbe) + ) + + for _, utxo := range utxos { + sbe.EXPECT().GetUTXO(gomock.Any(), gomock.Any(), utxo.InputID()).Return(utxo, nil).AnyTimes() + } + + tx, err := s.SignUnsigned(stdcontext.Background(), utx) + require.NoError(err) + + fc := &fees.Calculator{ + IsEUpgradeActive: true, + Codec: Parser.Codec(), + FeeManager: commonfees.NewManager(testUnitFees), + ConsumedUnitsCap: testBlockMaxConsumedUnits, + Credentials: tx.Creds, + } + require.NoError(utx.Visit(fc)) + require.Equal(5930*units.MicroAvax, fc.Fee) + + ins := utx.Ins + outs := utx.Outs + require.Len(ins, 2) + require.Len(outs, 2) + require.Equal(fc.Fee+outputsToMove[0].Out.Amount(), ins[0].In.Amount()+ins[1].In.Amount()-outs[0].Out.Amount()) + require.Equal(outputsToMove[0], outs[1]) +} + +func TestCreateAssetTx(t *testing.T) { + require := require.New(t) + ctrl := gomock.NewController(t) + + be := mocks.NewMockBuilderBackend(ctrl) + + var ( + utxosKey = testKeys[1] + utxoAddr = utxosKey.PublicKey().Address() + utxos, avaxAssetID = testUTXOsList(utxosKey) + + assetName = "Team Rocket" + symbol = "TR" + denomination uint8 = 0 + initialState = map[uint32][]verify.State{ + 0: { + &secp256k1fx.MintOutput{ + OutputOwners: secp256k1fx.OutputOwners{ + Threshold: 1, + Addrs: []ids.ShortID{testKeys[0].PublicKey().Address()}, + }, + }, &secp256k1fx.MintOutput{ + OutputOwners: secp256k1fx.OutputOwners{ + Threshold: 1, + Addrs: []ids.ShortID{testKeys[0].PublicKey().Address()}, + }, + }, + }, + 1: { + &nftfx.MintOutput{ + GroupID: 1, + OutputOwners: secp256k1fx.OutputOwners{ + Threshold: 1, + Addrs: []ids.ShortID{testKeys[1].PublicKey().Address()}, + }, + }, + &nftfx.MintOutput{ + GroupID: 2, + OutputOwners: secp256k1fx.OutputOwners{ + Threshold: 1, + Addrs: []ids.ShortID{testKeys[1].PublicKey().Address()}, + }, + }, + }, + 2: { + &propertyfx.MintOutput{ + OutputOwners: secp256k1fx.OutputOwners{ + Threshold: 1, + Addrs: []ids.ShortID{testKeys[2].PublicKey().Address()}, + }, + }, + &propertyfx.MintOutput{ + OutputOwners: secp256k1fx.OutputOwners{ + Threshold: 1, + Addrs: []ids.ShortID{testKeys[2].PublicKey().Address()}, + }, + }, + }, + } + ) + + b := &DynamicFeesBuilder{ + addrs: set.Of(utxoAddr), + backend: be, + } + + be.EXPECT().AVAXAssetID().Return(avaxAssetID).AnyTimes() + be.EXPECT().NetworkID().Return(constants.MainnetID).AnyTimes() + be.EXPECT().BlockchainID().Return(constants.PlatformChainID).AnyTimes() + be.EXPECT().UTXOs(gomock.Any(), constants.PlatformChainID).Return(utxos, nil) + + utx, err := b.NewCreateAssetTx( + assetName, + symbol, + denomination, + initialState, + testUnitFees, + testBlockMaxConsumedUnits, + ) + require.NoError(err) + + var ( + kc = secp256k1fx.NewKeychain(utxosKey) + sbe = mocks.NewMockSignerBackend(ctrl) + s = NewSigner(kc, sbe) + ) + + for _, utxo := range utxos { + sbe.EXPECT().GetUTXO(gomock.Any(), gomock.Any(), utxo.InputID()).Return(utxo, nil).AnyTimes() + } + + tx, err := s.SignUnsigned(stdcontext.Background(), utx) + require.NoError(err) + + fc := &fees.Calculator{ + IsEUpgradeActive: true, + Codec: Parser.Codec(), + FeeManager: commonfees.NewManager(testUnitFees), + ConsumedUnitsCap: testBlockMaxConsumedUnits, + Credentials: tx.Creds, + } + require.NoError(utx.Visit(fc)) + require.Equal(5898*units.MicroAvax, fc.Fee) + + ins := utx.Ins + outs := utx.Outs + require.Len(ins, 2) + require.Len(outs, 1) + require.Equal(fc.Fee, ins[0].In.Amount()+ins[1].In.Amount()-outs[0].Out.Amount()) +} + +func TestImportTx(t *testing.T) { + require := require.New(t) + ctrl := gomock.NewController(t) + + be := mocks.NewMockBuilderBackend(ctrl) + + var ( + utxosKey = testKeys[1] + utxoAddr = utxosKey.PublicKey().Address() + sourceChainID = ids.GenerateTestID() + utxos, avaxAssetID = testUTXOsList(utxosKey) + + importKey = testKeys[0] + importTo = &secp256k1fx.OutputOwners{ + Threshold: 1, + Addrs: []ids.ShortID{ + importKey.Address(), + }, + } + ) + + importedUtxo := utxos[0] + utxos = utxos[1:] + + b := &DynamicFeesBuilder{ + addrs: set.Of(utxoAddr), + backend: be, + } + be.EXPECT().AVAXAssetID().Return(avaxAssetID).AnyTimes() + be.EXPECT().NetworkID().Return(constants.MainnetID).AnyTimes() + be.EXPECT().BlockchainID().Return(constants.PlatformChainID).AnyTimes() + be.EXPECT().UTXOs(gomock.Any(), sourceChainID).Return([]*avax.UTXO{importedUtxo}, nil) + be.EXPECT().UTXOs(gomock.Any(), constants.PlatformChainID).Return(utxos, nil) + + utx, err := b.NewImportTx( + sourceChainID, + importTo, + testUnitFees, + testBlockMaxConsumedUnits, + ) + require.NoError(err) + + var ( + kc = secp256k1fx.NewKeychain(utxosKey) + sbe = mocks.NewMockSignerBackend(ctrl) + s = NewSigner(kc, sbe) + ) + + sbe.EXPECT().GetUTXO(gomock.Any(), gomock.Any(), importedUtxo.InputID()).Return(importedUtxo, nil).AnyTimes() + for _, utxo := range utxos { + sbe.EXPECT().GetUTXO(gomock.Any(), gomock.Any(), utxo.InputID()).Return(utxo, nil).AnyTimes() + } + + tx, err := s.SignUnsigned(stdcontext.Background(), utx) + require.NoError(err) + + fc := &fees.Calculator{ + IsEUpgradeActive: true, + Codec: Parser.Codec(), + FeeManager: commonfees.NewManager(testUnitFees), + ConsumedUnitsCap: testBlockMaxConsumedUnits, + Credentials: tx.Creds, + } + require.NoError(utx.Visit(fc)) + require.Equal(5640*units.MicroAvax, fc.Fee) + + ins := utx.Ins + outs := utx.Outs + importedIns := utx.ImportedIns + require.Len(ins, 1) + require.Len(importedIns, 1) + require.Len(outs, 1) + require.Equal(fc.Fee, importedIns[0].In.Amount()+ins[0].In.Amount()-outs[0].Out.Amount()) +} + +func TestExportTx(t *testing.T) { + require := require.New(t) + ctrl := gomock.NewController(t) + + be := mocks.NewMockBuilderBackend(ctrl) + + var ( + utxosKey = testKeys[1] + utxoAddr = utxosKey.PublicKey().Address() + subnetID = ids.GenerateTestID() + utxos, avaxAssetID = testUTXOsList(utxosKey) + + exportedOutputs = []*avax.TransferableOutput{{ + Asset: avax.Asset{ID: avaxAssetID}, + Out: &secp256k1fx.TransferOutput{ + Amt: 7 * units.Avax, + OutputOwners: secp256k1fx.OutputOwners{ + Threshold: 1, + Addrs: []ids.ShortID{utxosKey.PublicKey().Address()}, + }, + }, + }} + ) + + b := &DynamicFeesBuilder{ + addrs: set.Of(utxoAddr), + backend: be, + } + be.EXPECT().AVAXAssetID().Return(avaxAssetID).AnyTimes() + be.EXPECT().NetworkID().Return(constants.MainnetID).AnyTimes() + be.EXPECT().BlockchainID().Return(constants.PlatformChainID) + be.EXPECT().UTXOs(gomock.Any(), constants.PlatformChainID).Return(utxos, nil) + + utx, err := b.NewExportTx( + subnetID, + exportedOutputs, + testUnitFees, + testBlockMaxConsumedUnits, + ) + require.NoError(err) + + var ( + kc = secp256k1fx.NewKeychain(utxosKey) + sbe = mocks.NewMockSignerBackend(ctrl) + s = NewSigner(kc, sbe) + ) + + for _, utxo := range utxos { + sbe.EXPECT().GetUTXO(gomock.Any(), gomock.Any(), utxo.InputID()).Return(utxo, nil).AnyTimes() + } + + tx, err := s.SignUnsigned(stdcontext.Background(), utx) + require.NoError(err) + + fc := &fees.Calculator{ + IsEUpgradeActive: true, + Codec: Parser.Codec(), + FeeManager: commonfees.NewManager(testUnitFees), + ConsumedUnitsCap: testBlockMaxConsumedUnits, + Credentials: tx.Creds, + } + require.NoError(utx.Visit(fc)) + require.Equal(5966*units.MicroAvax, fc.Fee) + + ins := utx.Ins + outs := utx.Outs + require.Len(ins, 2) + require.Len(outs, 1) + require.Equal(fc.Fee+exportedOutputs[0].Out.Amount(), ins[0].In.Amount()+ins[1].In.Amount()-outs[0].Out.Amount()) + require.Equal(utx.ExportedOuts, exportedOutputs) +} + +func testUTXOsList(utxosKey *secp256k1.PrivateKey) ( + []*avax.UTXO, + ids.ID, // avaxAssetID, +) { + // Note: we avoid ids.GenerateTestNodeID here to make sure that UTXO IDs won't change + // run by run. This simplifies checking what utxos are included in the built txs. + utxosOffset := uint64(2024) + + var ( + avaxAssetID = ids.Empty.Prefix(utxosOffset) + subnetAssetID = ids.Empty.Prefix(utxosOffset + 1) + ) + + return []*avax.UTXO{ // currently, the wallet scans UTXOs in the order provided here + { // a small UTXO first, which should not be enough to pay fees + UTXOID: avax.UTXOID{ + TxID: ids.Empty.Prefix(utxosOffset), + OutputIndex: uint32(utxosOffset), + }, + Asset: avax.Asset{ID: avaxAssetID}, + Out: &secp256k1fx.TransferOutput{ + Amt: 2 * units.MilliAvax, + OutputOwners: secp256k1fx.OutputOwners{ + Locktime: 0, + Addrs: []ids.ShortID{utxosKey.PublicKey().Address()}, + Threshold: 1, + }, + }, + }, + { // a locked, small UTXO + UTXOID: avax.UTXOID{ + TxID: ids.Empty.Prefix(utxosOffset + 1), + OutputIndex: uint32(utxosOffset + 1), + }, + Asset: avax.Asset{ID: avaxAssetID}, + Out: &stakeable.LockOut{ + Locktime: uint64(time.Now().Add(time.Hour).Unix()), + TransferableOut: &secp256k1fx.TransferOutput{ + Amt: 3 * units.MilliAvax, + OutputOwners: secp256k1fx.OutputOwners{ + Threshold: 1, + Addrs: []ids.ShortID{utxosKey.PublicKey().Address()}, + }, + }, + }, + }, + { // a subnetAssetID denominated UTXO + UTXOID: avax.UTXOID{ + TxID: ids.Empty.Prefix(utxosOffset + 2), + OutputIndex: uint32(utxosOffset + 2), + }, + Asset: avax.Asset{ID: subnetAssetID}, + Out: &secp256k1fx.TransferOutput{ + Amt: 99 * units.MegaAvax, + OutputOwners: secp256k1fx.OutputOwners{ + Locktime: 0, + Addrs: []ids.ShortID{utxosKey.PublicKey().Address()}, + Threshold: 1, + }, + }, + }, + { // a locked, large UTXO + UTXOID: avax.UTXOID{ + TxID: ids.Empty.Prefix(utxosOffset + 3), + OutputIndex: uint32(utxosOffset + 3), + }, + Asset: avax.Asset{ID: avaxAssetID}, + Out: &stakeable.LockOut{ + Locktime: uint64(time.Now().Add(time.Hour).Unix()), + TransferableOut: &secp256k1fx.TransferOutput{ + Amt: 88 * units.Avax, + OutputOwners: secp256k1fx.OutputOwners{ + Threshold: 1, + Addrs: []ids.ShortID{utxosKey.PublicKey().Address()}, + }, + }, + }, + }, + { // a large UTXO last, which should be enough to pay any fee by itself + UTXOID: avax.UTXOID{ + TxID: ids.Empty.Prefix(utxosOffset + 4), + OutputIndex: uint32(utxosOffset + 4), + }, + Asset: avax.Asset{ID: avaxAssetID}, + Out: &secp256k1fx.TransferOutput{ + Amt: 9 * units.Avax, + OutputOwners: secp256k1fx.OutputOwners{ + Locktime: 0, + Addrs: []ids.ShortID{utxosKey.PublicKey().Address()}, + Threshold: 1, + }, + }, + }, + }, + avaxAssetID +} diff --git a/wallet/chain/x/mocks/mock_builder_backend.go b/wallet/chain/x/mocks/mock_builder_backend.go new file mode 100644 index 000000000000..9e2a047ae7d1 --- /dev/null +++ b/wallet/chain/x/mocks/mock_builder_backend.go @@ -0,0 +1,127 @@ +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ava-labs/avalanchego/wallet/chain/x (interfaces: BuilderBackend) +// +// Generated by this command: +// +// mockgen -package=mocks -destination=wallet/chain/x/mocks/mock_builder_backend.go github.com/ava-labs/avalanchego/wallet/chain/x BuilderBackend +// + +// Package mocks is a generated GoMock package. +package mocks + +import ( + context "context" + reflect "reflect" + + ids "github.com/ava-labs/avalanchego/ids" + avax "github.com/ava-labs/avalanchego/vms/components/avax" + gomock "go.uber.org/mock/gomock" +) + +// MockBuilderBackend is a mock of BuilderBackend interface. +type MockBuilderBackend struct { + ctrl *gomock.Controller + recorder *MockBuilderBackendMockRecorder +} + +// MockBuilderBackendMockRecorder is the mock recorder for MockBuilderBackend. +type MockBuilderBackendMockRecorder struct { + mock *MockBuilderBackend +} + +// NewMockBuilderBackend creates a new mock instance. +func NewMockBuilderBackend(ctrl *gomock.Controller) *MockBuilderBackend { + mock := &MockBuilderBackend{ctrl: ctrl} + mock.recorder = &MockBuilderBackendMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockBuilderBackend) EXPECT() *MockBuilderBackendMockRecorder { + return m.recorder +} + +// AVAXAssetID mocks base method. +func (m *MockBuilderBackend) AVAXAssetID() ids.ID { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "AVAXAssetID") + ret0, _ := ret[0].(ids.ID) + return ret0 +} + +// AVAXAssetID indicates an expected call of AVAXAssetID. +func (mr *MockBuilderBackendMockRecorder) AVAXAssetID() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "AVAXAssetID", reflect.TypeOf((*MockBuilderBackend)(nil).AVAXAssetID)) +} + +// BaseTxFee mocks base method. +func (m *MockBuilderBackend) BaseTxFee() uint64 { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "BaseTxFee") + ret0, _ := ret[0].(uint64) + return ret0 +} + +// BaseTxFee indicates an expected call of BaseTxFee. +func (mr *MockBuilderBackendMockRecorder) BaseTxFee() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "BaseTxFee", reflect.TypeOf((*MockBuilderBackend)(nil).BaseTxFee)) +} + +// BlockchainID mocks base method. +func (m *MockBuilderBackend) BlockchainID() ids.ID { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "BlockchainID") + ret0, _ := ret[0].(ids.ID) + return ret0 +} + +// BlockchainID indicates an expected call of BlockchainID. +func (mr *MockBuilderBackendMockRecorder) BlockchainID() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "BlockchainID", reflect.TypeOf((*MockBuilderBackend)(nil).BlockchainID)) +} + +// CreateAssetTxFee mocks base method. +func (m *MockBuilderBackend) CreateAssetTxFee() uint64 { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CreateAssetTxFee") + ret0, _ := ret[0].(uint64) + return ret0 +} + +// CreateAssetTxFee indicates an expected call of CreateAssetTxFee. +func (mr *MockBuilderBackendMockRecorder) CreateAssetTxFee() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateAssetTxFee", reflect.TypeOf((*MockBuilderBackend)(nil).CreateAssetTxFee)) +} + +// NetworkID mocks base method. +func (m *MockBuilderBackend) NetworkID() uint32 { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "NetworkID") + ret0, _ := ret[0].(uint32) + return ret0 +} + +// NetworkID indicates an expected call of NetworkID. +func (mr *MockBuilderBackendMockRecorder) NetworkID() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "NetworkID", reflect.TypeOf((*MockBuilderBackend)(nil).NetworkID)) +} + +// UTXOs mocks base method. +func (m *MockBuilderBackend) UTXOs(arg0 context.Context, arg1 ids.ID) ([]*avax.UTXO, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "UTXOs", arg0, arg1) + ret0, _ := ret[0].([]*avax.UTXO) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// UTXOs indicates an expected call of UTXOs. +func (mr *MockBuilderBackendMockRecorder) UTXOs(arg0, arg1 any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UTXOs", reflect.TypeOf((*MockBuilderBackend)(nil).UTXOs), arg0, arg1) +} diff --git a/wallet/chain/x/mocks/mock_signer_backend.go b/wallet/chain/x/mocks/mock_signer_backend.go new file mode 100644 index 000000000000..0ac6e768bb70 --- /dev/null +++ b/wallet/chain/x/mocks/mock_signer_backend.go @@ -0,0 +1,57 @@ +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/ava-labs/avalanchego/wallet/chain/x (interfaces: SignerBackend) +// +// Generated by this command: +// +// mockgen -package=mocks -destination=wallet/chain/x/mocks/mock_signer_backend.go github.com/ava-labs/avalanchego/wallet/chain/x SignerBackend +// + +// Package mocks is a generated GoMock package. +package mocks + +import ( + context "context" + reflect "reflect" + + ids "github.com/ava-labs/avalanchego/ids" + avax "github.com/ava-labs/avalanchego/vms/components/avax" + gomock "go.uber.org/mock/gomock" +) + +// MockSignerBackend is a mock of SignerBackend interface. +type MockSignerBackend struct { + ctrl *gomock.Controller + recorder *MockSignerBackendMockRecorder +} + +// MockSignerBackendMockRecorder is the mock recorder for MockSignerBackend. +type MockSignerBackendMockRecorder struct { + mock *MockSignerBackend +} + +// NewMockSignerBackend creates a new mock instance. +func NewMockSignerBackend(ctrl *gomock.Controller) *MockSignerBackend { + mock := &MockSignerBackend{ctrl: ctrl} + mock.recorder = &MockSignerBackendMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockSignerBackend) EXPECT() *MockSignerBackendMockRecorder { + return m.recorder +} + +// GetUTXO mocks base method. +func (m *MockSignerBackend) GetUTXO(arg0 context.Context, arg1, arg2 ids.ID) (*avax.UTXO, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetUTXO", arg0, arg1, arg2) + ret0, _ := ret[0].(*avax.UTXO) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetUTXO indicates an expected call of GetUTXO. +func (mr *MockSignerBackendMockRecorder) GetUTXO(arg0, arg1, arg2 any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetUTXO", reflect.TypeOf((*MockSignerBackend)(nil).GetUTXO), arg0, arg1, arg2) +}