From d4acf1051c487c86d629ad2fe5c0ccd15c6cf798 Mon Sep 17 00:00:00 2001 From: Lev Gorodetskiy Date: Thu, 29 Apr 2021 14:59:18 +0300 Subject: [PATCH] Multiple contracts in operation index, big_map index (#38) --- README.md | 34 +- src/demo_hic_et_nunc/dipdup.yml | 3 +- .../handlers/on_cancel_swap.py | 8 +- src/demo_hic_et_nunc/handlers/on_collect.py | 8 +- src/demo_hic_et_nunc/handlers/on_mint.py | 12 +- src/demo_hic_et_nunc/handlers/on_swap.py | 8 +- .../types/hen_minter/parameter/cancel_swap.py | 2 +- .../types/hen_minter/parameter/collect.py | 2 +- .../types/hen_minter/parameter/mint_objkt.py | 2 +- .../types/hen_minter/parameter/swap.py | 2 +- .../types/hen_minter/storage.py | 2 +- .../types/hen_objkts/parameter/mint.py | 2 +- .../types/hen_objkts/storage.py | 4 +- src/demo_quipuswap/dipdup.yml | 6 +- .../handlers/on_fa12_divest_liquidity.py | 12 +- .../handlers/on_fa12_invest_liquidity.py | 14 +- .../handlers/on_fa12_tez_to_token.py | 12 +- .../handlers/on_fa12_token_to_tez.py | 12 +- .../handlers/on_fa12_withdraw_profit.py | 8 +- .../handlers/on_fa20_divest_liquidity.py | 12 +- .../handlers/on_fa20_invest_liquidity.py | 14 +- .../handlers/on_fa20_withdraw_profit.py | 8 +- .../handlers/on_fa2_tez_to_token.py | 12 +- .../handlers/on_fa2_token_to_tez.py | 12 +- .../types/fa12_token/parameter/transfer.py | 2 +- .../types/fa12_token/storage.py | 4 +- .../types/fa2_token/parameter/transfer.py | 6 +- src/demo_quipuswap/types/fa2_token/storage.py | 4 +- .../quipu_fa12/parameter/divest_liquidity.py | 2 +- .../quipu_fa12/parameter/invest_liquidity.py | 2 +- .../parameter/tez_to_token_payment.py | 2 +- .../parameter/token_to_tez_payment.py | 2 +- .../quipu_fa12/parameter/withdraw_profit.py | 2 +- .../types/quipu_fa12/storage.py | 6 +- .../quipu_fa2/parameter/divest_liquidity.py | 2 +- .../quipu_fa2/parameter/invest_liquidity.py | 2 +- .../parameter/tez_to_token_payment.py | 2 +- .../parameter/token_to_tez_payment.py | 2 +- .../quipu_fa2/parameter/withdraw_profit.py | 2 +- src/demo_quipuswap/types/quipu_fa2/storage.py | 6 +- src/demo_registrydao/dipdup.yml | 3 +- src/demo_registrydao/handlers/on_propose.py | 8 +- .../types/registry/parameter/propose.py | 2 +- .../types/registry/storage.py | 84 ++-- src/demo_tezos_domains/dipdup.yml | 5 +- .../handlers/on_admin_update.py | 8 +- src/demo_tezos_domains/handlers/on_execute.py | 8 +- .../handlers/on_storage_diff.py | 6 +- .../name_registry/parameter/admin_update.py | 2 +- .../types/name_registry/parameter/execute.py | 2 +- .../types/name_registry/storage.py | 2 +- src/demo_tezos_domains_big_map/__init__.py | 0 .../dipdup-docker.yml | 12 + src/demo_tezos_domains_big_map/dipdup.yml | 35 ++ .../handlers/__init__.py | 0 .../handlers/on_rollback.py | 15 + .../handlers/on_update.py | 53 +++ src/demo_tezos_domains_big_map/models.py | 20 + .../types/__init__.py | 0 .../types/name_registry/__init__.py | 0 .../types/name_registry/big_map/__init__.py | 0 .../big_map/store_expiry_map_key.py | 10 + .../big_map/store_expiry_map_value.py | 10 + .../big_map/store_records_key.py | 10 + .../big_map/store_records_value.py | 18 + src/demo_tzcolors/dipdup.yml | 3 +- src/demo_tzcolors/handlers/on_bid.py | 8 +- .../handlers/on_create_auction.py | 8 +- src/demo_tzcolors/handlers/on_withdraw.py | 8 +- .../types/tzcolors_auction/parameter/bid.py | 2 +- .../parameter/create_auction.py | 2 +- .../tzcolors_auction/parameter/withdraw.py | 2 +- .../types/tzcolors_auction/storage.py | 2 +- src/dipdup/cli.py | 4 +- src/dipdup/codegen.py | 148 +++++-- src/dipdup/config.py | 298 ++++++++----- src/dipdup/datasources/tzkt/cache.py | 169 ++++++-- src/dipdup/datasources/tzkt/datasource.py | 390 ++++++++++++++---- src/dipdup/dipdup.py | 16 +- src/dipdup/models.py | 41 +- src/dipdup/templates/big_map_handler.py.j2 | 19 + src/dipdup/templates/handler.py.j2 | 16 - src/dipdup/templates/operation_handler.py.j2 | 16 + src/dipdup/utils.py | 6 +- tests/integration_tests/hic_et_nunc.yml | 3 +- tests/integration_tests/quipuswap.yml | 6 +- tests/integration_tests/test_codegen.py | 1 - tests/integration_tests/test_demos.py | 22 + tests/integration_tests/tezos_domains.yml | 40 ++ .../tezos_domains_big_map.yml | 36 ++ tests/integration_tests/tzcolors.yml | 3 +- tests/test_dipdup/dipdup.yml | 3 +- .../test_datasources/test_tzkt/test_cache.py | 15 +- .../test_tzkt/test_datasource.py | 207 +--------- tests/test_dipdup/test_models.py | 90 ++++ 95 files changed, 1489 insertions(+), 667 deletions(-) create mode 100644 src/demo_tezos_domains_big_map/__init__.py create mode 100644 src/demo_tezos_domains_big_map/dipdup-docker.yml create mode 100644 src/demo_tezos_domains_big_map/dipdup.yml create mode 100644 src/demo_tezos_domains_big_map/handlers/__init__.py create mode 100644 src/demo_tezos_domains_big_map/handlers/on_rollback.py create mode 100644 src/demo_tezos_domains_big_map/handlers/on_update.py create mode 100644 src/demo_tezos_domains_big_map/models.py create mode 100644 src/demo_tezos_domains_big_map/types/__init__.py create mode 100644 src/demo_tezos_domains_big_map/types/name_registry/__init__.py create mode 100644 src/demo_tezos_domains_big_map/types/name_registry/big_map/__init__.py create mode 100644 src/demo_tezos_domains_big_map/types/name_registry/big_map/store_expiry_map_key.py create mode 100644 src/demo_tezos_domains_big_map/types/name_registry/big_map/store_expiry_map_value.py create mode 100644 src/demo_tezos_domains_big_map/types/name_registry/big_map/store_records_key.py create mode 100644 src/demo_tezos_domains_big_map/types/name_registry/big_map/store_records_value.py create mode 100644 src/dipdup/templates/big_map_handler.py.j2 delete mode 100644 src/dipdup/templates/handler.py.j2 create mode 100644 src/dipdup/templates/operation_handler.py.j2 create mode 100644 tests/integration_tests/tezos_domains.yml create mode 100644 tests/integration_tests/tezos_domains_big_map.yml create mode 100644 tests/test_dipdup/test_models.py diff --git a/README.md b/README.md index 5de18b722..c73d1595e 100644 --- a/README.md +++ b/README.md @@ -55,7 +55,8 @@ indexes: hen_mainnet: kind: operation datasource: tzkt_mainnet - contract: HEN_minter + contracts: + - HEN_minter handlers: - callback: on_mint pattern: @@ -125,19 +126,21 @@ class Token(Model): Now take a look at `handlers` module generated by `init` command. When operation group matching `pattern` block of corresponding handler at config will arrive callback will be fired. This example will simply save minted Hic Et Nunc tokens and their owners to the database: ```python -from demo_hic_et_nunc.models import Holder, Token -from demo_hic_et_nunc.types.hen_minter.parameter.mint_objkt import MintOBJKT -from demo_hic_et_nunc.types.hen_objkts.parameter.mint import Mint -from dipdup.models import HandlerContext, OperationContext +import demo_hic_et_nunc.models as models +from demo_hic_et_nunc.types.hen_minter.parameter.mint_objkt import MintOBJKTParameter +from demo_hic_et_nunc.types.hen_minter.storage import HenMinterStorage +from demo_hic_et_nunc.types.hen_objkts.parameter.mint import MintParameter +from demo_hic_et_nunc.types.hen_objkts.storage import HenObjktsStorage +from dipdup.models import OperationContext, OperationHandlerContext async def on_mint( - ctx: HandlerContext, - mint_objkt: OperationContext[MintOBJKT], - mint: OperationContext[Mint], + ctx: OperationHandlerContext, + mint_objkt: OperationContext[MintOBJKTParameter, HenMinterStorage], + mint: OperationContext[MintParameter, HenObjktsStorage], ) -> None: - holder, _ = await Holder.get_or_create(address=mint.parameter.address) - token = Token( + holder, _ = await models.Holder.get_or_create(address=mint.parameter.address) + token = models.Token( id=mint.parameter.token_id, creator=holder, supply=mint.parameter.amount, @@ -193,19 +196,20 @@ templates: trades: kind: operation datasource: tzkt_staging - contract: < dex > + contracts: + - handlers: - callback: on_fa12_token_to_tez pattern: - - destination: < dex > + - destination: entrypoint: tokenToTezPayment - - destination: < token > + - destination: entrypoint: transfer - callback: on_fa20_tez_to_token pattern: - - destination: < dex > + - destination: entrypoint: tezToTokenPayment - - destination: < token > + - destination: entrypoint: transfer indexes: diff --git a/src/demo_hic_et_nunc/dipdup.yml b/src/demo_hic_et_nunc/dipdup.yml index 044fc2c0c..1b3b1464b 100644 --- a/src/demo_hic_et_nunc/dipdup.yml +++ b/src/demo_hic_et_nunc/dipdup.yml @@ -22,7 +22,8 @@ indexes: hen_mainnet: kind: operation datasource: tzkt_mainnet - contract: HEN_minter + contracts: + - HEN_minter handlers: - callback: on_mint pattern: diff --git a/src/demo_hic_et_nunc/handlers/on_cancel_swap.py b/src/demo_hic_et_nunc/handlers/on_cancel_swap.py index e86815992..c7836ae54 100644 --- a/src/demo_hic_et_nunc/handlers/on_cancel_swap.py +++ b/src/demo_hic_et_nunc/handlers/on_cancel_swap.py @@ -1,11 +1,11 @@ import demo_hic_et_nunc.models as models -from demo_hic_et_nunc.types.hen_minter.parameter.cancel_swap import CancelSwap as CancelSwapParameter -from demo_hic_et_nunc.types.hen_minter.storage import Storage as HenMinterStorage -from dipdup.models import HandlerContext, OperationContext +from demo_hic_et_nunc.types.hen_minter.parameter.cancel_swap import CancelSwapParameter +from demo_hic_et_nunc.types.hen_minter.storage import HenMinterStorage +from dipdup.models import OperationContext, OperationHandlerContext async def on_cancel_swap( - ctx: HandlerContext, + ctx: OperationHandlerContext, cancel_swap: OperationContext[CancelSwapParameter, HenMinterStorage], ) -> None: swap = await models.Swap.filter(id=int(cancel_swap.parameter.__root__)).get() diff --git a/src/demo_hic_et_nunc/handlers/on_collect.py b/src/demo_hic_et_nunc/handlers/on_collect.py index bfc287bb7..288ecb1dc 100644 --- a/src/demo_hic_et_nunc/handlers/on_collect.py +++ b/src/demo_hic_et_nunc/handlers/on_collect.py @@ -1,11 +1,11 @@ import demo_hic_et_nunc.models as models -from demo_hic_et_nunc.types.hen_minter.parameter.collect import Collect as CollectParameter -from demo_hic_et_nunc.types.hen_minter.storage import Storage as HenMinterStorage -from dipdup.models import HandlerContext, OperationContext +from demo_hic_et_nunc.types.hen_minter.parameter.collect import CollectParameter +from demo_hic_et_nunc.types.hen_minter.storage import HenMinterStorage +from dipdup.models import OperationContext, OperationHandlerContext async def on_collect( - ctx: HandlerContext, + ctx: OperationHandlerContext, collect: OperationContext[CollectParameter, HenMinterStorage], ) -> None: swap = await models.Swap.filter(id=collect.parameter.swap_id).get() diff --git a/src/demo_hic_et_nunc/handlers/on_mint.py b/src/demo_hic_et_nunc/handlers/on_mint.py index 993d23768..e7409091c 100644 --- a/src/demo_hic_et_nunc/handlers/on_mint.py +++ b/src/demo_hic_et_nunc/handlers/on_mint.py @@ -1,13 +1,13 @@ import demo_hic_et_nunc.models as models -from demo_hic_et_nunc.types.hen_minter.parameter.mint_objkt import MintOBJKT as MintOBJKTParameter -from demo_hic_et_nunc.types.hen_minter.storage import Storage as HenMinterStorage -from demo_hic_et_nunc.types.hen_objkts.parameter.mint import Mint as MintParameter -from demo_hic_et_nunc.types.hen_objkts.storage import Storage as HenObjktsStorage -from dipdup.models import HandlerContext, OperationContext +from demo_hic_et_nunc.types.hen_minter.parameter.mint_objkt import MintOBJKTParameter +from demo_hic_et_nunc.types.hen_minter.storage import HenMinterStorage +from demo_hic_et_nunc.types.hen_objkts.parameter.mint import MintParameter +from demo_hic_et_nunc.types.hen_objkts.storage import HenObjktsStorage +from dipdup.models import OperationContext, OperationHandlerContext async def on_mint( - ctx: HandlerContext, + ctx: OperationHandlerContext, mint_objkt: OperationContext[MintOBJKTParameter, HenMinterStorage], mint: OperationContext[MintParameter, HenObjktsStorage], ) -> None: diff --git a/src/demo_hic_et_nunc/handlers/on_swap.py b/src/demo_hic_et_nunc/handlers/on_swap.py index 47f530843..7fac41a14 100644 --- a/src/demo_hic_et_nunc/handlers/on_swap.py +++ b/src/demo_hic_et_nunc/handlers/on_swap.py @@ -1,11 +1,11 @@ import demo_hic_et_nunc.models as models -from demo_hic_et_nunc.types.hen_minter.parameter.swap import Swap as SwapParameter -from demo_hic_et_nunc.types.hen_minter.storage import Storage as HenMinterStorage -from dipdup.models import HandlerContext, OperationContext +from demo_hic_et_nunc.types.hen_minter.parameter.swap import SwapParameter +from demo_hic_et_nunc.types.hen_minter.storage import HenMinterStorage +from dipdup.models import OperationContext, OperationHandlerContext async def on_swap( - ctx: HandlerContext, + ctx: OperationHandlerContext, swap: OperationContext[SwapParameter, HenMinterStorage], ) -> None: holder, _ = await models.Holder.get_or_create(address=swap.data.sender_address) diff --git a/src/demo_hic_et_nunc/types/hen_minter/parameter/cancel_swap.py b/src/demo_hic_et_nunc/types/hen_minter/parameter/cancel_swap.py index 9a1f3ded3..42a9fbe86 100644 --- a/src/demo_hic_et_nunc/types/hen_minter/parameter/cancel_swap.py +++ b/src/demo_hic_et_nunc/types/hen_minter/parameter/cancel_swap.py @@ -6,5 +6,5 @@ from pydantic import BaseModel -class CancelSwap(BaseModel): +class CancelSwapParameter(BaseModel): __root__: str diff --git a/src/demo_hic_et_nunc/types/hen_minter/parameter/collect.py b/src/demo_hic_et_nunc/types/hen_minter/parameter/collect.py index f22c74b5d..ba3c32f0c 100644 --- a/src/demo_hic_et_nunc/types/hen_minter/parameter/collect.py +++ b/src/demo_hic_et_nunc/types/hen_minter/parameter/collect.py @@ -6,6 +6,6 @@ from pydantic import BaseModel -class Collect(BaseModel): +class CollectParameter(BaseModel): objkt_amount: str swap_id: str diff --git a/src/demo_hic_et_nunc/types/hen_minter/parameter/mint_objkt.py b/src/demo_hic_et_nunc/types/hen_minter/parameter/mint_objkt.py index 44acb6685..f136ac1e2 100644 --- a/src/demo_hic_et_nunc/types/hen_minter/parameter/mint_objkt.py +++ b/src/demo_hic_et_nunc/types/hen_minter/parameter/mint_objkt.py @@ -6,7 +6,7 @@ from pydantic import BaseModel -class MintOBJKT(BaseModel): +class MintOBJKTParameter(BaseModel): address: str amount: str metadata: str diff --git a/src/demo_hic_et_nunc/types/hen_minter/parameter/swap.py b/src/demo_hic_et_nunc/types/hen_minter/parameter/swap.py index 2117e21e0..2931e67ac 100644 --- a/src/demo_hic_et_nunc/types/hen_minter/parameter/swap.py +++ b/src/demo_hic_et_nunc/types/hen_minter/parameter/swap.py @@ -6,7 +6,7 @@ from pydantic import BaseModel -class Swap(BaseModel): +class SwapParameter(BaseModel): objkt_amount: str objkt_id: str xtz_per_objkt: str diff --git a/src/demo_hic_et_nunc/types/hen_minter/storage.py b/src/demo_hic_et_nunc/types/hen_minter/storage.py index 79eb73e44..58e86fa1f 100644 --- a/src/demo_hic_et_nunc/types/hen_minter/storage.py +++ b/src/demo_hic_et_nunc/types/hen_minter/storage.py @@ -20,7 +20,7 @@ class Swaps(BaseModel): xtz_per_objkt: str -class Storage(BaseModel): +class HenMinterStorage(BaseModel): curate: str genesis: str hdao: str diff --git a/src/demo_hic_et_nunc/types/hen_objkts/parameter/mint.py b/src/demo_hic_et_nunc/types/hen_objkts/parameter/mint.py index cee855bf1..d1b1907ee 100644 --- a/src/demo_hic_et_nunc/types/hen_objkts/parameter/mint.py +++ b/src/demo_hic_et_nunc/types/hen_objkts/parameter/mint.py @@ -8,7 +8,7 @@ from pydantic import BaseModel -class Mint(BaseModel): +class MintParameter(BaseModel): address: str amount: str token_id: str diff --git a/src/demo_hic_et_nunc/types/hen_objkts/storage.py b/src/demo_hic_et_nunc/types/hen_objkts/storage.py index f8734c504..d4e3002d2 100644 --- a/src/demo_hic_et_nunc/types/hen_objkts/storage.py +++ b/src/demo_hic_et_nunc/types/hen_objkts/storage.py @@ -19,8 +19,8 @@ class LedgerItem(BaseModel): class Key1(BaseModel): - owner: str operator: str + owner: str token_id: str @@ -34,7 +34,7 @@ class TokenMetadata(BaseModel): token_info: Dict[str, str] -class Storage(BaseModel): +class HenObjktsStorage(BaseModel): administrator: str all_tokens: str ledger: List[LedgerItem] diff --git a/src/demo_quipuswap/dipdup.yml b/src/demo_quipuswap/dipdup.yml index 4ea0cda9a..f8e398780 100644 --- a/src/demo_quipuswap/dipdup.yml +++ b/src/demo_quipuswap/dipdup.yml @@ -34,7 +34,8 @@ templates: quipuswap_fa12: kind: operation datasource: tzkt_staging_mainnet - contract: + contracts: + - handlers: - callback: on_fa12_token_to_tez pattern: @@ -68,7 +69,8 @@ templates: quipuswap_fa2: kind: operation datasource: tzkt_staging_mainnet - contract: + contracts: + - handlers: - callback: on_fa2_token_to_tez pattern: diff --git a/src/demo_quipuswap/handlers/on_fa12_divest_liquidity.py b/src/demo_quipuswap/handlers/on_fa12_divest_liquidity.py index 0c8421a5e..730426cdd 100644 --- a/src/demo_quipuswap/handlers/on_fa12_divest_liquidity.py +++ b/src/demo_quipuswap/handlers/on_fa12_divest_liquidity.py @@ -1,15 +1,15 @@ from decimal import Decimal import demo_quipuswap.models as models -from demo_quipuswap.types.fa12_token.parameter.transfer import Transfer as TransferParameter -from demo_quipuswap.types.fa12_token.storage import Storage as Fa12TokenStorage -from demo_quipuswap.types.quipu_fa12.parameter.divest_liquidity import DivestLiquidity as DivestLiquidityParameter -from demo_quipuswap.types.quipu_fa12.storage import Storage as QuipuFa12Storage -from dipdup.models import HandlerContext, OperationContext +from demo_quipuswap.types.fa12_token.parameter.transfer import TransferParameter +from demo_quipuswap.types.fa12_token.storage import Fa12TokenStorage +from demo_quipuswap.types.quipu_fa12.parameter.divest_liquidity import DivestLiquidityParameter +from demo_quipuswap.types.quipu_fa12.storage import QuipuFa12Storage +from dipdup.models import OperationContext, OperationHandlerContext async def on_fa12_divest_liquidity( - ctx: HandlerContext, + ctx: OperationHandlerContext, divest_liquidity: OperationContext[DivestLiquidityParameter, QuipuFa12Storage], transfer: OperationContext[TransferParameter, Fa12TokenStorage], ) -> None: diff --git a/src/demo_quipuswap/handlers/on_fa12_invest_liquidity.py b/src/demo_quipuswap/handlers/on_fa12_invest_liquidity.py index 9df61b486..c0b5c95f9 100644 --- a/src/demo_quipuswap/handlers/on_fa12_invest_liquidity.py +++ b/src/demo_quipuswap/handlers/on_fa12_invest_liquidity.py @@ -1,15 +1,15 @@ from decimal import Decimal import demo_quipuswap.models as models -from demo_quipuswap.types.fa12_token.parameter.transfer import Transfer as TransferParameter -from demo_quipuswap.types.fa12_token.storage import Storage as Fa12TokenStorage -from demo_quipuswap.types.quipu_fa12.parameter.invest_liquidity import InvestLiquidity as InvestLiquidityParameter -from demo_quipuswap.types.quipu_fa12.storage import Storage as QuipuFa12Storage -from dipdup.models import HandlerContext, OperationContext +from demo_quipuswap.types.fa12_token.parameter.transfer import TransferParameter +from demo_quipuswap.types.fa12_token.storage import Fa12TokenStorage +from demo_quipuswap.types.quipu_fa12.parameter.invest_liquidity import InvestLiquidityParameter +from demo_quipuswap.types.quipu_fa12.storage import QuipuFa12Storage +from dipdup.models import OperationContext, OperationHandlerContext async def on_fa12_invest_liquidity( - ctx: HandlerContext, + ctx: OperationHandlerContext, invest_liquidity: OperationContext[InvestLiquidityParameter, QuipuFa12Storage], transfer: OperationContext[TransferParameter, Fa12TokenStorage], ) -> None: @@ -26,7 +26,7 @@ async def on_fa12_invest_liquidity( tez_qty = Decimal(invest_liquidity.data.amount) / (10 ** 6) token_qty = Decimal(transfer.parameter.value) / (10 ** decimals) - new_shares_qty = int(storage.storage.ledger[trader].balance) + int(storage.storage.ledger[trader].frozen_balance) + new_shares_qty = int(storage.storage.ledger[trader].balance) + int(storage.storage.ledger[trader].frozen_balance) # type: ignore price = (Decimal(storage.storage.tez_pool) / (10 ** 6)) / (Decimal(storage.storage.token_pool) / (10 ** decimals)) value = tez_qty + price * token_qty diff --git a/src/demo_quipuswap/handlers/on_fa12_tez_to_token.py b/src/demo_quipuswap/handlers/on_fa12_tez_to_token.py index faa7ae16f..86ed3b395 100644 --- a/src/demo_quipuswap/handlers/on_fa12_tez_to_token.py +++ b/src/demo_quipuswap/handlers/on_fa12_tez_to_token.py @@ -1,15 +1,15 @@ from decimal import Decimal import demo_quipuswap.models as models -from demo_quipuswap.types.fa12_token.parameter.transfer import Transfer as TransferParameter -from demo_quipuswap.types.fa12_token.storage import Storage as Fa12TokenStorage -from demo_quipuswap.types.quipu_fa12.parameter.tez_to_token_payment import TezToTokenPayment as TezToTokenPaymentParameter -from demo_quipuswap.types.quipu_fa12.storage import Storage as QuipuFa12Storage -from dipdup.models import HandlerContext, OperationContext +from demo_quipuswap.types.fa12_token.parameter.transfer import TransferParameter +from demo_quipuswap.types.fa12_token.storage import Fa12TokenStorage +from demo_quipuswap.types.quipu_fa12.parameter.tez_to_token_payment import TezToTokenPaymentParameter +from demo_quipuswap.types.quipu_fa12.storage import QuipuFa12Storage +from dipdup.models import OperationContext, OperationHandlerContext async def on_fa12_tez_to_token( - ctx: HandlerContext, + ctx: OperationHandlerContext, tez_to_token_payment: OperationContext[TezToTokenPaymentParameter, QuipuFa12Storage], transfer: OperationContext[TransferParameter, Fa12TokenStorage], ) -> None: diff --git a/src/demo_quipuswap/handlers/on_fa12_token_to_tez.py b/src/demo_quipuswap/handlers/on_fa12_token_to_tez.py index aa2b0af03..ff92ad1d8 100644 --- a/src/demo_quipuswap/handlers/on_fa12_token_to_tez.py +++ b/src/demo_quipuswap/handlers/on_fa12_token_to_tez.py @@ -1,15 +1,15 @@ from decimal import Decimal import demo_quipuswap.models as models -from demo_quipuswap.types.fa12_token.parameter.transfer import Transfer as TransferParameter -from demo_quipuswap.types.fa12_token.storage import Storage as Fa12TokenStorage -from demo_quipuswap.types.quipu_fa12.parameter.token_to_tez_payment import TokenToTezPayment as TokenToTezPaymentParameter -from demo_quipuswap.types.quipu_fa12.storage import Storage as QuipuFa12Storage -from dipdup.models import HandlerContext, OperationContext +from demo_quipuswap.types.fa12_token.parameter.transfer import TransferParameter +from demo_quipuswap.types.fa12_token.storage import Fa12TokenStorage +from demo_quipuswap.types.quipu_fa12.parameter.token_to_tez_payment import TokenToTezPaymentParameter +from demo_quipuswap.types.quipu_fa12.storage import QuipuFa12Storage +from dipdup.models import OperationContext, OperationHandlerContext async def on_fa12_token_to_tez( - ctx: HandlerContext, + ctx: OperationHandlerContext, token_to_tez_payment: OperationContext[TokenToTezPaymentParameter, QuipuFa12Storage], transfer: OperationContext[TransferParameter, Fa12TokenStorage], ) -> None: diff --git a/src/demo_quipuswap/handlers/on_fa12_withdraw_profit.py b/src/demo_quipuswap/handlers/on_fa12_withdraw_profit.py index 5efd2dd77..417f2be28 100644 --- a/src/demo_quipuswap/handlers/on_fa12_withdraw_profit.py +++ b/src/demo_quipuswap/handlers/on_fa12_withdraw_profit.py @@ -1,13 +1,13 @@ from decimal import Decimal import demo_quipuswap.models as models -from demo_quipuswap.types.quipu_fa12.parameter.withdraw_profit import WithdrawProfit as WithdrawProfitParameter -from demo_quipuswap.types.quipu_fa12.storage import Storage as QuipuFa12Storage -from dipdup.models import HandlerContext, OperationContext +from demo_quipuswap.types.quipu_fa12.parameter.withdraw_profit import WithdrawProfitParameter +from demo_quipuswap.types.quipu_fa12.storage import QuipuFa12Storage +from dipdup.models import OperationContext, OperationHandlerContext async def on_fa12_withdraw_profit( - ctx: HandlerContext, + ctx: OperationHandlerContext, withdraw_profit: OperationContext[WithdrawProfitParameter, QuipuFa12Storage], ) -> None: diff --git a/src/demo_quipuswap/handlers/on_fa20_divest_liquidity.py b/src/demo_quipuswap/handlers/on_fa20_divest_liquidity.py index e2e0b9e4d..0adb266cf 100644 --- a/src/demo_quipuswap/handlers/on_fa20_divest_liquidity.py +++ b/src/demo_quipuswap/handlers/on_fa20_divest_liquidity.py @@ -1,15 +1,15 @@ from decimal import Decimal import demo_quipuswap.models as models -from demo_quipuswap.types.fa2_token.parameter.transfer import Transfer as TransferParameter -from demo_quipuswap.types.fa2_token.storage import Storage as Fa2TokenStorage -from demo_quipuswap.types.quipu_fa2.parameter.divest_liquidity import DivestLiquidity as DivestLiquidityParameter -from demo_quipuswap.types.quipu_fa2.storage import Storage as QuipuFa2Storage -from dipdup.models import HandlerContext, OperationContext +from demo_quipuswap.types.fa2_token.parameter.transfer import TransferParameter +from demo_quipuswap.types.fa2_token.storage import Fa2TokenStorage +from demo_quipuswap.types.quipu_fa2.parameter.divest_liquidity import DivestLiquidityParameter +from demo_quipuswap.types.quipu_fa2.storage import QuipuFa2Storage +from dipdup.models import OperationContext, OperationHandlerContext async def on_fa20_divest_liquidity( - ctx: HandlerContext, + ctx: OperationHandlerContext, divest_liquidity: OperationContext[DivestLiquidityParameter, QuipuFa2Storage], transfer: OperationContext[TransferParameter, Fa2TokenStorage], ) -> None: diff --git a/src/demo_quipuswap/handlers/on_fa20_invest_liquidity.py b/src/demo_quipuswap/handlers/on_fa20_invest_liquidity.py index a8bdd40c1..ef3232f1c 100644 --- a/src/demo_quipuswap/handlers/on_fa20_invest_liquidity.py +++ b/src/demo_quipuswap/handlers/on_fa20_invest_liquidity.py @@ -1,15 +1,15 @@ from decimal import Decimal import demo_quipuswap.models as models -from demo_quipuswap.types.fa2_token.parameter.transfer import Transfer as TransferParameter -from demo_quipuswap.types.fa2_token.storage import Storage as Fa2TokenStorage -from demo_quipuswap.types.quipu_fa2.parameter.invest_liquidity import InvestLiquidity as InvestLiquidityParameter -from demo_quipuswap.types.quipu_fa2.storage import Storage as QuipuFa2Storage -from dipdup.models import HandlerContext, OperationContext +from demo_quipuswap.types.fa2_token.parameter.transfer import TransferParameter +from demo_quipuswap.types.fa2_token.storage import Fa2TokenStorage +from demo_quipuswap.types.quipu_fa2.parameter.invest_liquidity import InvestLiquidityParameter +from demo_quipuswap.types.quipu_fa2.storage import QuipuFa2Storage +from dipdup.models import OperationContext, OperationHandlerContext async def on_fa20_invest_liquidity( - ctx: HandlerContext, + ctx: OperationHandlerContext, invest_liquidity: OperationContext[InvestLiquidityParameter, QuipuFa2Storage], transfer: OperationContext[TransferParameter, Fa2TokenStorage], ) -> None: @@ -27,7 +27,7 @@ async def on_fa20_invest_liquidity( tez_qty = Decimal(invest_liquidity.data.amount) / (10 ** 6) token_qty = Decimal(transfer.parameter.__root__[0].txs[0].amount) / (10 ** decimals) - new_shares_qty = int(storage.storage.ledger[trader].balance) + int(storage.storage.ledger[trader].frozen_balance) + new_shares_qty = int(storage.storage.ledger[trader].balance) + int(storage.storage.ledger[trader].frozen_balance) # type: ignore price = (Decimal(storage.storage.tez_pool) / (10 ** 6)) / (Decimal(storage.storage.token_pool) / (10 ** decimals)) value = tez_qty + price * token_qty diff --git a/src/demo_quipuswap/handlers/on_fa20_withdraw_profit.py b/src/demo_quipuswap/handlers/on_fa20_withdraw_profit.py index 10dd58c63..2f2aa0151 100644 --- a/src/demo_quipuswap/handlers/on_fa20_withdraw_profit.py +++ b/src/demo_quipuswap/handlers/on_fa20_withdraw_profit.py @@ -1,13 +1,13 @@ from decimal import Decimal import demo_quipuswap.models as models -from demo_quipuswap.types.quipu_fa2.parameter.withdraw_profit import WithdrawProfit as WithdrawProfitParameter -from demo_quipuswap.types.quipu_fa2.storage import Storage as QuipuFa2Storage -from dipdup.models import HandlerContext, OperationContext +from demo_quipuswap.types.quipu_fa2.parameter.withdraw_profit import WithdrawProfitParameter +from demo_quipuswap.types.quipu_fa2.storage import QuipuFa2Storage +from dipdup.models import OperationContext, OperationHandlerContext async def on_fa20_withdraw_profit( - ctx: HandlerContext, + ctx: OperationHandlerContext, withdraw_profit: OperationContext[WithdrawProfitParameter, QuipuFa2Storage], ) -> None: diff --git a/src/demo_quipuswap/handlers/on_fa2_tez_to_token.py b/src/demo_quipuswap/handlers/on_fa2_tez_to_token.py index 3ea8f5541..e6aa6d8bd 100644 --- a/src/demo_quipuswap/handlers/on_fa2_tez_to_token.py +++ b/src/demo_quipuswap/handlers/on_fa2_tez_to_token.py @@ -1,15 +1,15 @@ from decimal import Decimal import demo_quipuswap.models as models -from demo_quipuswap.types.fa2_token.parameter.transfer import Transfer as TransferParameter -from demo_quipuswap.types.fa2_token.storage import Storage as Fa2TokenStorage -from demo_quipuswap.types.quipu_fa2.parameter.tez_to_token_payment import TezToTokenPayment as TezToTokenPaymentParameter -from demo_quipuswap.types.quipu_fa2.storage import Storage as QuipuFa2Storage -from dipdup.models import HandlerContext, OperationContext +from demo_quipuswap.types.fa2_token.parameter.transfer import TransferParameter +from demo_quipuswap.types.fa2_token.storage import Fa2TokenStorage +from demo_quipuswap.types.quipu_fa2.parameter.tez_to_token_payment import TezToTokenPaymentParameter +from demo_quipuswap.types.quipu_fa2.storage import QuipuFa2Storage +from dipdup.models import OperationContext, OperationHandlerContext async def on_fa2_tez_to_token( - ctx: HandlerContext, + ctx: OperationHandlerContext, tez_to_token_payment: OperationContext[TezToTokenPaymentParameter, QuipuFa2Storage], transfer: OperationContext[TransferParameter, Fa2TokenStorage], ) -> None: diff --git a/src/demo_quipuswap/handlers/on_fa2_token_to_tez.py b/src/demo_quipuswap/handlers/on_fa2_token_to_tez.py index 0a66429ae..ac691540c 100644 --- a/src/demo_quipuswap/handlers/on_fa2_token_to_tez.py +++ b/src/demo_quipuswap/handlers/on_fa2_token_to_tez.py @@ -1,15 +1,15 @@ from decimal import Decimal import demo_quipuswap.models as models -from demo_quipuswap.types.fa2_token.parameter.transfer import Transfer as TransferParameter -from demo_quipuswap.types.fa2_token.storage import Storage as Fa2TokenStorage -from demo_quipuswap.types.quipu_fa2.parameter.token_to_tez_payment import TokenToTezPayment as TokenToTezPaymentParameter -from demo_quipuswap.types.quipu_fa2.storage import Storage as QuipuFa2Storage -from dipdup.models import HandlerContext, OperationContext +from demo_quipuswap.types.fa2_token.parameter.transfer import TransferParameter +from demo_quipuswap.types.fa2_token.storage import Fa2TokenStorage +from demo_quipuswap.types.quipu_fa2.parameter.token_to_tez_payment import TokenToTezPaymentParameter +from demo_quipuswap.types.quipu_fa2.storage import QuipuFa2Storage +from dipdup.models import OperationContext, OperationHandlerContext async def on_fa2_token_to_tez( - ctx: HandlerContext, + ctx: OperationHandlerContext, token_to_tez_payment: OperationContext[TokenToTezPaymentParameter, QuipuFa2Storage], transfer: OperationContext[TransferParameter, Fa2TokenStorage], ) -> None: diff --git a/src/demo_quipuswap/types/fa12_token/parameter/transfer.py b/src/demo_quipuswap/types/fa12_token/parameter/transfer.py index d568b46e7..0007deb29 100644 --- a/src/demo_quipuswap/types/fa12_token/parameter/transfer.py +++ b/src/demo_quipuswap/types/fa12_token/parameter/transfer.py @@ -6,7 +6,7 @@ from pydantic import BaseModel, Field -class Transfer(BaseModel): +class TransferParameter(BaseModel): from_: str = Field(..., alias='from') to: str value: str diff --git a/src/demo_quipuswap/types/fa12_token/storage.py b/src/demo_quipuswap/types/fa12_token/storage.py index 1131830b3..4257f61d5 100644 --- a/src/demo_quipuswap/types/fa12_token/storage.py +++ b/src/demo_quipuswap/types/fa12_token/storage.py @@ -14,11 +14,11 @@ class Balances(BaseModel): class TokenMetadata(BaseModel): - nat: str map: Dict[str, str] + nat: str -class Storage(BaseModel): +class Fa12TokenStorage(BaseModel): administrator: str balances: Dict[str, Balances] debtCeiling: str diff --git a/src/demo_quipuswap/types/fa2_token/parameter/transfer.py b/src/demo_quipuswap/types/fa2_token/parameter/transfer.py index f85d4e5ec..f634623fd 100644 --- a/src/demo_quipuswap/types/fa2_token/parameter/transfer.py +++ b/src/demo_quipuswap/types/fa2_token/parameter/transfer.py @@ -14,10 +14,10 @@ class Tx(BaseModel): amount: str -class TransferItem(BaseModel): +class TransferParameterItem(BaseModel): from_: str txs: List[Tx] -class Transfer(BaseModel): - __root__: List[TransferItem] +class TransferParameter(BaseModel): + __root__: List[TransferParameterItem] diff --git a/src/demo_quipuswap/types/fa2_token/storage.py b/src/demo_quipuswap/types/fa2_token/storage.py index f8734c504..f772cddfb 100644 --- a/src/demo_quipuswap/types/fa2_token/storage.py +++ b/src/demo_quipuswap/types/fa2_token/storage.py @@ -19,8 +19,8 @@ class LedgerItem(BaseModel): class Key1(BaseModel): - owner: str operator: str + owner: str token_id: str @@ -34,7 +34,7 @@ class TokenMetadata(BaseModel): token_info: Dict[str, str] -class Storage(BaseModel): +class Fa2TokenStorage(BaseModel): administrator: str all_tokens: str ledger: List[LedgerItem] diff --git a/src/demo_quipuswap/types/quipu_fa12/parameter/divest_liquidity.py b/src/demo_quipuswap/types/quipu_fa12/parameter/divest_liquidity.py index 7eee7e7af..b361ef306 100644 --- a/src/demo_quipuswap/types/quipu_fa12/parameter/divest_liquidity.py +++ b/src/demo_quipuswap/types/quipu_fa12/parameter/divest_liquidity.py @@ -6,7 +6,7 @@ from pydantic import BaseModel -class DivestLiquidity(BaseModel): +class DivestLiquidityParameter(BaseModel): min_tez: str min_tokens: str shares: str diff --git a/src/demo_quipuswap/types/quipu_fa12/parameter/invest_liquidity.py b/src/demo_quipuswap/types/quipu_fa12/parameter/invest_liquidity.py index e19262fac..663a3cbf9 100644 --- a/src/demo_quipuswap/types/quipu_fa12/parameter/invest_liquidity.py +++ b/src/demo_quipuswap/types/quipu_fa12/parameter/invest_liquidity.py @@ -6,5 +6,5 @@ from pydantic import BaseModel -class InvestLiquidity(BaseModel): +class InvestLiquidityParameter(BaseModel): __root__: str diff --git a/src/demo_quipuswap/types/quipu_fa12/parameter/tez_to_token_payment.py b/src/demo_quipuswap/types/quipu_fa12/parameter/tez_to_token_payment.py index 07bf21a19..2b8553432 100644 --- a/src/demo_quipuswap/types/quipu_fa12/parameter/tez_to_token_payment.py +++ b/src/demo_quipuswap/types/quipu_fa12/parameter/tez_to_token_payment.py @@ -6,6 +6,6 @@ from pydantic import BaseModel -class TezToTokenPayment(BaseModel): +class TezToTokenPaymentParameter(BaseModel): min_out: str receiver: str diff --git a/src/demo_quipuswap/types/quipu_fa12/parameter/token_to_tez_payment.py b/src/demo_quipuswap/types/quipu_fa12/parameter/token_to_tez_payment.py index d5ee080b5..7ac0f6411 100644 --- a/src/demo_quipuswap/types/quipu_fa12/parameter/token_to_tez_payment.py +++ b/src/demo_quipuswap/types/quipu_fa12/parameter/token_to_tez_payment.py @@ -6,7 +6,7 @@ from pydantic import BaseModel -class TokenToTezPayment(BaseModel): +class TokenToTezPaymentParameter(BaseModel): amount: str min_out: str receiver: str diff --git a/src/demo_quipuswap/types/quipu_fa12/parameter/withdraw_profit.py b/src/demo_quipuswap/types/quipu_fa12/parameter/withdraw_profit.py index b2c1b9146..46706b43c 100644 --- a/src/demo_quipuswap/types/quipu_fa12/parameter/withdraw_profit.py +++ b/src/demo_quipuswap/types/quipu_fa12/parameter/withdraw_profit.py @@ -6,5 +6,5 @@ from pydantic import BaseModel -class WithdrawProfit(BaseModel): +class WithdrawProfitParameter(BaseModel): __root__: str diff --git a/src/demo_quipuswap/types/quipu_fa12/storage.py b/src/demo_quipuswap/types/quipu_fa12/storage.py index c231d9f1e..53ae43766 100644 --- a/src/demo_quipuswap/types/quipu_fa12/storage.py +++ b/src/demo_quipuswap/types/quipu_fa12/storage.py @@ -26,7 +26,7 @@ class Voters(BaseModel): vote: str -class Storage1(BaseModel): +class Storage(BaseModel): current_candidate: Optional[str] current_delegated: Optional[str] invariant: str @@ -51,8 +51,8 @@ class Storage1(BaseModel): votes: Dict[str, str] -class Storage(BaseModel): +class QuipuFa12Storage(BaseModel): dex_lambdas: Dict[str, str] metadata: Dict[str, str] - storage: Storage1 + storage: Storage token_lambdas: Dict[str, str] diff --git a/src/demo_quipuswap/types/quipu_fa2/parameter/divest_liquidity.py b/src/demo_quipuswap/types/quipu_fa2/parameter/divest_liquidity.py index 7eee7e7af..b361ef306 100644 --- a/src/demo_quipuswap/types/quipu_fa2/parameter/divest_liquidity.py +++ b/src/demo_quipuswap/types/quipu_fa2/parameter/divest_liquidity.py @@ -6,7 +6,7 @@ from pydantic import BaseModel -class DivestLiquidity(BaseModel): +class DivestLiquidityParameter(BaseModel): min_tez: str min_tokens: str shares: str diff --git a/src/demo_quipuswap/types/quipu_fa2/parameter/invest_liquidity.py b/src/demo_quipuswap/types/quipu_fa2/parameter/invest_liquidity.py index e19262fac..663a3cbf9 100644 --- a/src/demo_quipuswap/types/quipu_fa2/parameter/invest_liquidity.py +++ b/src/demo_quipuswap/types/quipu_fa2/parameter/invest_liquidity.py @@ -6,5 +6,5 @@ from pydantic import BaseModel -class InvestLiquidity(BaseModel): +class InvestLiquidityParameter(BaseModel): __root__: str diff --git a/src/demo_quipuswap/types/quipu_fa2/parameter/tez_to_token_payment.py b/src/demo_quipuswap/types/quipu_fa2/parameter/tez_to_token_payment.py index 07bf21a19..2b8553432 100644 --- a/src/demo_quipuswap/types/quipu_fa2/parameter/tez_to_token_payment.py +++ b/src/demo_quipuswap/types/quipu_fa2/parameter/tez_to_token_payment.py @@ -6,6 +6,6 @@ from pydantic import BaseModel -class TezToTokenPayment(BaseModel): +class TezToTokenPaymentParameter(BaseModel): min_out: str receiver: str diff --git a/src/demo_quipuswap/types/quipu_fa2/parameter/token_to_tez_payment.py b/src/demo_quipuswap/types/quipu_fa2/parameter/token_to_tez_payment.py index d5ee080b5..7ac0f6411 100644 --- a/src/demo_quipuswap/types/quipu_fa2/parameter/token_to_tez_payment.py +++ b/src/demo_quipuswap/types/quipu_fa2/parameter/token_to_tez_payment.py @@ -6,7 +6,7 @@ from pydantic import BaseModel -class TokenToTezPayment(BaseModel): +class TokenToTezPaymentParameter(BaseModel): amount: str min_out: str receiver: str diff --git a/src/demo_quipuswap/types/quipu_fa2/parameter/withdraw_profit.py b/src/demo_quipuswap/types/quipu_fa2/parameter/withdraw_profit.py index b2c1b9146..46706b43c 100644 --- a/src/demo_quipuswap/types/quipu_fa2/parameter/withdraw_profit.py +++ b/src/demo_quipuswap/types/quipu_fa2/parameter/withdraw_profit.py @@ -6,5 +6,5 @@ from pydantic import BaseModel -class WithdrawProfit(BaseModel): +class WithdrawProfitParameter(BaseModel): __root__: str diff --git a/src/demo_quipuswap/types/quipu_fa2/storage.py b/src/demo_quipuswap/types/quipu_fa2/storage.py index 0c4c6f8cd..84a43dd4f 100644 --- a/src/demo_quipuswap/types/quipu_fa2/storage.py +++ b/src/demo_quipuswap/types/quipu_fa2/storage.py @@ -26,7 +26,7 @@ class Voters(BaseModel): vote: str -class Storage1(BaseModel): +class Storage(BaseModel): current_candidate: Optional[str] current_delegated: Optional[str] invariant: str @@ -52,8 +52,8 @@ class Storage1(BaseModel): votes: Dict[str, str] -class Storage(BaseModel): +class QuipuFa2Storage(BaseModel): dex_lambdas: Dict[str, str] metadata: Dict[str, str] - storage: Storage1 + storage: Storage token_lambdas: Dict[str, str] diff --git a/src/demo_registrydao/dipdup.yml b/src/demo_registrydao/dipdup.yml index 38c9bc5d8..5250e9bd7 100644 --- a/src/demo_registrydao/dipdup.yml +++ b/src/demo_registrydao/dipdup.yml @@ -20,7 +20,8 @@ templates: registry_dao: kind: operation datasource: tzkt - contract: + contracts: + - handlers: - callback: on_propose pattern: diff --git a/src/demo_registrydao/handlers/on_propose.py b/src/demo_registrydao/handlers/on_propose.py index 4e05b3d66..fee75dc01 100644 --- a/src/demo_registrydao/handlers/on_propose.py +++ b/src/demo_registrydao/handlers/on_propose.py @@ -1,11 +1,11 @@ import demo_registrydao.models as models -from demo_registrydao.types.registry.parameter.propose import Propose as ProposeParameter -from demo_registrydao.types.registry.storage import Storage as RegistryStorage -from dipdup.models import HandlerContext, OperationContext +from demo_registrydao.types.registry.parameter.propose import ProposeParameter +from demo_registrydao.types.registry.storage import RegistryStorage +from dipdup.models import OperationContext, OperationHandlerContext async def on_propose( - ctx: HandlerContext, + ctx: OperationHandlerContext, propose: OperationContext[ProposeParameter, RegistryStorage], ) -> None: print(propose.storage) diff --git a/src/demo_registrydao/types/registry/parameter/propose.py b/src/demo_registrydao/types/registry/parameter/propose.py index 86080243f..d043a36d4 100644 --- a/src/demo_registrydao/types/registry/parameter/propose.py +++ b/src/demo_registrydao/types/registry/parameter/propose.py @@ -42,6 +42,6 @@ class ProposalMetadatum3(BaseModel): receivers_1: List[str] -class Propose(BaseModel): +class ProposeParameter(BaseModel): frozen_token: str proposal_metadata: Union[ProposalMetadatum, ProposalMetadatum1, ProposalMetadatum2, ProposalMetadatum3] diff --git a/src/demo_registrydao/types/registry/storage.py b/src/demo_registrydao/types/registry/storage.py index 4de5fe6f1..c78df45b9 100644 --- a/src/demo_registrydao/types/registry/storage.py +++ b/src/demo_registrydao/types/registry/storage.py @@ -8,6 +8,22 @@ from pydantic import BaseModel +class Registry(BaseModel): + affected_proposal_key: str + last_updated: str + value: Optional[str] + + +class Extra(BaseModel): + frozen_extra_value: str + frozen_scale_value: str + max_proposal_size: str + proposal_receivers: List[str] + registry: Dict[str, Registry] + slash_division_value: str + slash_scale_value: str + + class Key(BaseModel): address: str nat: str @@ -18,16 +34,6 @@ class LedgerItem(BaseModel): value: str -class Key1(BaseModel): - owner: str - operator: str - - -class Operator(BaseModel): - key: Key1 - value: Dict[str, Any] - - class MigrationStatu(BaseModel): notInMigration: Dict[str, Any] @@ -40,20 +46,19 @@ class MigrationStatu2(BaseModel): migratedTo: str -class Registry(BaseModel): - value: Optional[str] - affected_proposal_key: str - last_updated: str +class Key1(BaseModel): + operator: str + owner: str -class Extra(BaseModel): - registry: Dict[str, Registry] - proposal_receivers: List[str] - frozen_scale_value: str - frozen_extra_value: str - slash_scale_value: str - slash_division_value: str - max_proposal_size: str +class Operator(BaseModel): + key: Key1 + value: Dict[str, Any] + + +class ProposalKeyListSortByDateItem(BaseModel): + bytes: str + timestamp: str class DiffItem(BaseModel): @@ -71,11 +76,11 @@ class Metadatum(BaseModel): class ProposalType1(BaseModel): - frozen_scale_value: Optional[str] frozen_extra_value: Optional[str] - slash_scale_value: Optional[str] - slash_division_value: Optional[str] + frozen_scale_value: Optional[str] max_proposal_size: Optional[str] + slash_division_value: Optional[str] + slash_scale_value: Optional[str] class Metadatum1(BaseModel): @@ -96,32 +101,27 @@ class Voter(BaseModel): class Proposals(BaseModel): - upvotes: str downvotes: str - start_date: str metadata: Union[Metadatum, Metadatum1, Metadatum2, Metadatum3] proposer: str proposer_frozen_token: str + start_date: str + upvotes: str voters: List[Voter] -class ProposalKeyListSortByDateItem(BaseModel): - timestamp: str - bytes: str - - -class Storage(BaseModel): +class RegistryStorage(BaseModel): + admin: str + extra: Extra ledger: List[LedgerItem] + metadata: Dict[str, str] + migration_status: Union[MigrationStatu, MigrationStatu1, MigrationStatu2] operators: List[Operator] - token_address: str - admin: str pending_owner: str - migration_status: Union[MigrationStatu, MigrationStatu1, MigrationStatu2] - voting_period: str - quorum_threshold: str - extra: Extra - proposals: Dict[str, Proposals] - proposal_key_list_sort_by_date: List[ProposalKeyListSortByDateItem] permits_counter: str - metadata: Dict[str, str] + proposal_key_list_sort_by_date: List[ProposalKeyListSortByDateItem] + proposals: Dict[str, Proposals] + quorum_threshold: str + token_address: str total_supply: Dict[str, str] + voting_period: str diff --git a/src/demo_tezos_domains/dipdup.yml b/src/demo_tezos_domains/dipdup.yml index efcc6a32b..e260ace92 100644 --- a/src/demo_tezos_domains/dipdup.yml +++ b/src/demo_tezos_domains/dipdup.yml @@ -19,7 +19,8 @@ templates: tezos_domains: kind: operation datasource: - contract: + contracts: + - handlers: - callback: on_admin_update pattern: @@ -35,4 +36,4 @@ indexes: template: tezos_domains values: datasource: tzkt_staging_edo - name_registry: edo_name_registry \ No newline at end of file + name_registry: edo_name_registry diff --git a/src/demo_tezos_domains/handlers/on_admin_update.py b/src/demo_tezos_domains/handlers/on_admin_update.py index d3214d8fa..2d0e438ad 100644 --- a/src/demo_tezos_domains/handlers/on_admin_update.py +++ b/src/demo_tezos_domains/handlers/on_admin_update.py @@ -1,12 +1,12 @@ import demo_tezos_domains.models as models from demo_tezos_domains.handlers.on_storage_diff import on_storage_diff -from demo_tezos_domains.types.name_registry.parameter.admin_update import AdminUpdate as AdminUpdateParameter -from demo_tezos_domains.types.name_registry.storage import Storage as NameRegistryStorage -from dipdup.models import HandlerContext, OperationContext +from demo_tezos_domains.types.name_registry.parameter.admin_update import AdminUpdateParameter +from demo_tezos_domains.types.name_registry.storage import NameRegistryStorage +from dipdup.models import OperationContext, OperationHandlerContext async def on_admin_update( - ctx: HandlerContext, + ctx: OperationHandlerContext, admin_update: OperationContext[AdminUpdateParameter, NameRegistryStorage], ) -> None: storage = admin_update.storage diff --git a/src/demo_tezos_domains/handlers/on_execute.py b/src/demo_tezos_domains/handlers/on_execute.py index 62411126f..c036e0bcb 100644 --- a/src/demo_tezos_domains/handlers/on_execute.py +++ b/src/demo_tezos_domains/handlers/on_execute.py @@ -1,12 +1,12 @@ import demo_tezos_domains.models as models from demo_tezos_domains.handlers.on_storage_diff import on_storage_diff -from demo_tezos_domains.types.name_registry.parameter.execute import Execute as ExecuteParameter -from demo_tezos_domains.types.name_registry.storage import Storage as NameRegistryStorage -from dipdup.models import HandlerContext, OperationContext +from demo_tezos_domains.types.name_registry.parameter.execute import ExecuteParameter +from demo_tezos_domains.types.name_registry.storage import NameRegistryStorage +from dipdup.models import OperationContext, OperationHandlerContext async def on_execute( - ctx: HandlerContext, + ctx: OperationHandlerContext, execute: OperationContext[ExecuteParameter, NameRegistryStorage], ) -> None: storage = execute.storage diff --git a/src/demo_tezos_domains/handlers/on_storage_diff.py b/src/demo_tezos_domains/handlers/on_storage_diff.py index 3538bc201..3e5a0a791 100644 --- a/src/demo_tezos_domains/handlers/on_storage_diff.py +++ b/src/demo_tezos_domains/handlers/on_storage_diff.py @@ -1,13 +1,13 @@ import logging import demo_tezos_domains.models as models -from demo_tezos_domains.types.name_registry.storage import Storage as NameRegistryStorage +from demo_tezos_domains.types.name_registry.storage import NameRegistryStorage _logger = logging.getLogger(__name__) async def on_storage_diff(storage: NameRegistryStorage) -> None: - for name, item in storage.store.records.items(): + for name, item in storage.store.records.items(): # type: ignore record_name = bytes.fromhex(name).decode() record_path = record_name.split('.') _logger.info('Processing `%s`', record_name) @@ -25,7 +25,7 @@ async def on_storage_diff(storage: NameRegistryStorage) -> None: defaults=dict( tld_id=record_path[-1], owner=item.owner, - expiry=storage.store.expiry_map.get(item.expiry_key) if item.expiry_key else None, + expiry=storage.store.expiry_map.get(item.expiry_key) if item.expiry_key else None, # type: ignore token_id=int(item.tzip12_token_id) if item.tzip12_token_id else None, ), ) diff --git a/src/demo_tezos_domains/types/name_registry/parameter/admin_update.py b/src/demo_tezos_domains/types/name_registry/parameter/admin_update.py index eca079977..a229b2cbc 100644 --- a/src/demo_tezos_domains/types/name_registry/parameter/admin_update.py +++ b/src/demo_tezos_domains/types/name_registry/parameter/admin_update.py @@ -6,5 +6,5 @@ from pydantic import BaseModel -class AdminUpdate(BaseModel): +class AdminUpdateParameter(BaseModel): __root__: str diff --git a/src/demo_tezos_domains/types/name_registry/parameter/execute.py b/src/demo_tezos_domains/types/name_registry/parameter/execute.py index 4f6b242c2..5b6081934 100644 --- a/src/demo_tezos_domains/types/name_registry/parameter/execute.py +++ b/src/demo_tezos_domains/types/name_registry/parameter/execute.py @@ -6,7 +6,7 @@ from pydantic import BaseModel -class Execute(BaseModel): +class ExecuteParameter(BaseModel): action_name: str original_sender: str payload: str diff --git a/src/demo_tezos_domains/types/name_registry/storage.py b/src/demo_tezos_domains/types/name_registry/storage.py index 1b79c6aba..1784b5eb7 100644 --- a/src/demo_tezos_domains/types/name_registry/storage.py +++ b/src/demo_tezos_domains/types/name_registry/storage.py @@ -35,7 +35,7 @@ class Store(BaseModel): tzip12_tokens: Dict[str, str] -class Storage(BaseModel): +class NameRegistryStorage(BaseModel): actions: Dict[str, str] store: Store trusted_senders: List[str] diff --git a/src/demo_tezos_domains_big_map/__init__.py b/src/demo_tezos_domains_big_map/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/demo_tezos_domains_big_map/dipdup-docker.yml b/src/demo_tezos_domains_big_map/dipdup-docker.yml new file mode 100644 index 000000000..40b0814b4 --- /dev/null +++ b/src/demo_tezos_domains_big_map/dipdup-docker.yml @@ -0,0 +1,12 @@ +database: + kind: postgres + host: db + port: 5432 + user: ${POSTGRES_USER:-dipdup} + password: ${POSTGRES_PASSWORD:-changeme} + database: ${POSTGRES_DB:-dipdup} + schema_name: tezos_domains_big_map + +hasura: + url: http://hasura:8080 + admin_secret: ${ADMIN_SECRET:-changeme} diff --git a/src/demo_tezos_domains_big_map/dipdup.yml b/src/demo_tezos_domains_big_map/dipdup.yml new file mode 100644 index 000000000..98d28b9e5 --- /dev/null +++ b/src/demo_tezos_domains_big_map/dipdup.yml @@ -0,0 +1,35 @@ +spec_version: 0.1 +package: demo_tezos_domains_big_map + +database: + kind: sqlite + path: tezos_domains_big_map.sqlite3 + +contracts: + edo_name_registry: + address: KT1JJbWfW8CHUY95hG9iq2CEMma1RiKhMHDR + typename: name_registry + +datasources: + tzkt_staging_edo: + kind: tzkt + url: ${TZKT_URL:-https://staging.api.edo2net.tzkt.io} + +templates: + tezos_domains_big_map: + kind: big_map + datasource: + handlers: + - callback: on_update + pattern: + - contract: + path: store.records + - contract: + path: store.expiry_map + +indexes: + tezos_domains_big_map_edo: + template: tezos_domains_big_map + values: + datasource: tzkt_staging_edo + name_registry: edo_name_registry diff --git a/src/demo_tezos_domains_big_map/handlers/__init__.py b/src/demo_tezos_domains_big_map/handlers/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/demo_tezos_domains_big_map/handlers/on_rollback.py b/src/demo_tezos_domains_big_map/handlers/on_rollback.py new file mode 100644 index 000000000..ce12d06f5 --- /dev/null +++ b/src/demo_tezos_domains_big_map/handlers/on_rollback.py @@ -0,0 +1,15 @@ +import logging + +from dipdup.utils import reindex + +_logger = logging.getLogger(__name__) + + +async def on_rollback( + from_level: int, + to_level: int, +) -> None: + if from_level - to_level == 1: + return + _logger.warning('Rollback event received, reindexing') + await reindex() diff --git a/src/demo_tezos_domains_big_map/handlers/on_update.py b/src/demo_tezos_domains_big_map/handlers/on_update.py new file mode 100644 index 000000000..2c6273fb1 --- /dev/null +++ b/src/demo_tezos_domains_big_map/handlers/on_update.py @@ -0,0 +1,53 @@ +import logging +from typing import List, Optional + +import demo_tezos_domains_big_map.models as models +from demo_tezos_domains_big_map.types.name_registry.big_map.store_expiry_map_key import StoreExpiryMapKey +from demo_tezos_domains_big_map.types.name_registry.big_map.store_expiry_map_value import StoreExpiryMapValue +from demo_tezos_domains_big_map.types.name_registry.big_map.store_records_key import StoreRecordsKey +from demo_tezos_domains_big_map.types.name_registry.big_map.store_records_value import StoreRecordsValue +from dipdup.models import BigMapAction, BigMapContext, BigMapHandlerContext + +_logger = logging.getLogger(__name__) + + +async def on_update( + ctx: BigMapHandlerContext, + store_records: List[BigMapContext[StoreRecordsKey, StoreRecordsValue]], + store_expiry_map: List[BigMapContext[StoreExpiryMapKey, StoreExpiryMapValue]], +) -> None: + for diff in store_records: + if diff.action in (BigMapAction.ADD, BigMapAction.UPDATE): + assert diff.value + record_name = bytes.fromhex(diff.key.__root__).decode() + record_path = record_name.split('.') + _logger.info('Processing `%s`', record_name) + + if len(record_path) != int(diff.value.level): + _logger.error('Invalid record `%s`: expected %s chunks, got %s', record_name, diff.value.level, len(record_path)) + return + + if diff.value.level == "1": + await models.TLD.update_or_create(id=record_name, defaults=dict(owner=diff.value.owner)) + else: + if diff.value.level == "2": + expiry: Optional[str] + if store_expiry_map: + assert store_expiry_map[0].value + expiry = store_expiry_map[0].value.__root__ + else: + expiry = None + await models.Domain.update_or_create( + id=record_name, + defaults=dict( + tld_id=record_path[-1], + owner=diff.value.owner, + expiry=expiry, + token_id=int(diff.value.tzip12_token_id) if diff.value.tzip12_token_id else None, + ), + ) + + await models.Record.update_or_create( + id=record_name, + defaults=dict(domain_id='.'.join(record_path[-2:]), address=diff.value.address), + ) diff --git a/src/demo_tezos_domains_big_map/models.py b/src/demo_tezos_domains_big_map/models.py new file mode 100644 index 000000000..7cd1cd924 --- /dev/null +++ b/src/demo_tezos_domains_big_map/models.py @@ -0,0 +1,20 @@ +from tortoise import Model, fields + + +class TLD(Model): + id = fields.CharField(max_length=255, pk=True) + owner = fields.CharField(max_length=36) + + +class Domain(Model): + id = fields.CharField(max_length=255, pk=True) + tld = fields.ForeignKeyField('models.TLD', 'domains') + expiry = fields.DatetimeField(null=True) + owner = fields.CharField(max_length=36) + token_id = fields.BigIntField(null=True) + + +class Record(Model): + id = fields.CharField(max_length=255, pk=True) + domain = fields.ForeignKeyField('models.Domain', 'records') + address = fields.CharField(max_length=36, null=True) diff --git a/src/demo_tezos_domains_big_map/types/__init__.py b/src/demo_tezos_domains_big_map/types/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/demo_tezos_domains_big_map/types/name_registry/__init__.py b/src/demo_tezos_domains_big_map/types/name_registry/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/demo_tezos_domains_big_map/types/name_registry/big_map/__init__.py b/src/demo_tezos_domains_big_map/types/name_registry/big_map/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/demo_tezos_domains_big_map/types/name_registry/big_map/store_expiry_map_key.py b/src/demo_tezos_domains_big_map/types/name_registry/big_map/store_expiry_map_key.py new file mode 100644 index 000000000..3ffd5293b --- /dev/null +++ b/src/demo_tezos_domains_big_map/types/name_registry/big_map/store_expiry_map_key.py @@ -0,0 +1,10 @@ +# generated by datamodel-codegen: +# filename: store.expiry_map.key.json + +from __future__ import annotations + +from pydantic import BaseModel + + +class StoreExpiryMapKey(BaseModel): + __root__: str diff --git a/src/demo_tezos_domains_big_map/types/name_registry/big_map/store_expiry_map_value.py b/src/demo_tezos_domains_big_map/types/name_registry/big_map/store_expiry_map_value.py new file mode 100644 index 000000000..c3b482c98 --- /dev/null +++ b/src/demo_tezos_domains_big_map/types/name_registry/big_map/store_expiry_map_value.py @@ -0,0 +1,10 @@ +# generated by datamodel-codegen: +# filename: store.expiry_map.value.json + +from __future__ import annotations + +from pydantic import BaseModel + + +class StoreExpiryMapValue(BaseModel): + __root__: str diff --git a/src/demo_tezos_domains_big_map/types/name_registry/big_map/store_records_key.py b/src/demo_tezos_domains_big_map/types/name_registry/big_map/store_records_key.py new file mode 100644 index 000000000..f800a1ae6 --- /dev/null +++ b/src/demo_tezos_domains_big_map/types/name_registry/big_map/store_records_key.py @@ -0,0 +1,10 @@ +# generated by datamodel-codegen: +# filename: store.records.key.json + +from __future__ import annotations + +from pydantic import BaseModel + + +class StoreRecordsKey(BaseModel): + __root__: str diff --git a/src/demo_tezos_domains_big_map/types/name_registry/big_map/store_records_value.py b/src/demo_tezos_domains_big_map/types/name_registry/big_map/store_records_value.py new file mode 100644 index 000000000..d80a288fb --- /dev/null +++ b/src/demo_tezos_domains_big_map/types/name_registry/big_map/store_records_value.py @@ -0,0 +1,18 @@ +# generated by datamodel-codegen: +# filename: store.records.value.json + +from __future__ import annotations + +from typing import Dict, Optional + +from pydantic import BaseModel + + +class StoreRecordsValue(BaseModel): + address: Optional[str] + data: Dict[str, str] + expiry_key: Optional[str] + internal_data: Dict[str, str] + level: str + owner: str + tzip12_token_id: Optional[str] diff --git a/src/demo_tzcolors/dipdup.yml b/src/demo_tzcolors/dipdup.yml index 5600ddb2e..529ece913 100644 --- a/src/demo_tzcolors/dipdup.yml +++ b/src/demo_tzcolors/dipdup.yml @@ -23,7 +23,8 @@ templates: tzcolors_auction: kind: operation datasource: - contract: + contracts: + - handlers: - callback: on_create_auction pattern: diff --git a/src/demo_tzcolors/handlers/on_bid.py b/src/demo_tzcolors/handlers/on_bid.py index 4f241926a..8c436f3a2 100644 --- a/src/demo_tzcolors/handlers/on_bid.py +++ b/src/demo_tzcolors/handlers/on_bid.py @@ -1,11 +1,11 @@ import demo_tzcolors.models as models -from demo_tzcolors.types.tzcolors_auction.parameter.bid import Bid as BidParameter -from demo_tzcolors.types.tzcolors_auction.storage import Storage as TzcolorsAuctionStorage -from dipdup.models import HandlerContext, OperationContext +from demo_tzcolors.types.tzcolors_auction.parameter.bid import BidParameter +from demo_tzcolors.types.tzcolors_auction.storage import TzcolorsAuctionStorage +from dipdup.models import OperationContext, OperationHandlerContext async def on_bid( - ctx: HandlerContext, + ctx: OperationHandlerContext, bid: OperationContext[BidParameter, TzcolorsAuctionStorage], ) -> None: auction = await models.Auction.filter( diff --git a/src/demo_tzcolors/handlers/on_create_auction.py b/src/demo_tzcolors/handlers/on_create_auction.py index bb38833f4..aa2af316d 100644 --- a/src/demo_tzcolors/handlers/on_create_auction.py +++ b/src/demo_tzcolors/handlers/on_create_auction.py @@ -1,11 +1,11 @@ import demo_tzcolors.models as models -from demo_tzcolors.types.tzcolors_auction.parameter.create_auction import CreateAuction as CreateAuctionParameter -from demo_tzcolors.types.tzcolors_auction.storage import Storage as TzcolorsAuctionStorage -from dipdup.models import HandlerContext, OperationContext +from demo_tzcolors.types.tzcolors_auction.parameter.create_auction import CreateAuctionParameter +from demo_tzcolors.types.tzcolors_auction.storage import TzcolorsAuctionStorage +from dipdup.models import OperationContext, OperationHandlerContext async def on_create_auction( - ctx: HandlerContext, + ctx: OperationHandlerContext, create_auction: OperationContext[CreateAuctionParameter, TzcolorsAuctionStorage], ) -> None: diff --git a/src/demo_tzcolors/handlers/on_withdraw.py b/src/demo_tzcolors/handlers/on_withdraw.py index 3c76d97a5..1510676c7 100644 --- a/src/demo_tzcolors/handlers/on_withdraw.py +++ b/src/demo_tzcolors/handlers/on_withdraw.py @@ -1,11 +1,11 @@ import demo_tzcolors.models as models -from demo_tzcolors.types.tzcolors_auction.parameter.withdraw import Withdraw as WithdrawParameter -from demo_tzcolors.types.tzcolors_auction.storage import Storage as TzcolorsAuctionStorage -from dipdup.models import HandlerContext, OperationContext +from demo_tzcolors.types.tzcolors_auction.parameter.withdraw import WithdrawParameter +from demo_tzcolors.types.tzcolors_auction.storage import TzcolorsAuctionStorage +from dipdup.models import OperationContext, OperationHandlerContext async def on_withdraw( - ctx: HandlerContext, + ctx: OperationHandlerContext, withdraw: OperationContext[WithdrawParameter, TzcolorsAuctionStorage], ) -> None: auction = await models.Auction.filter( diff --git a/src/demo_tzcolors/types/tzcolors_auction/parameter/bid.py b/src/demo_tzcolors/types/tzcolors_auction/parameter/bid.py index 804651249..8e4fdf4c6 100644 --- a/src/demo_tzcolors/types/tzcolors_auction/parameter/bid.py +++ b/src/demo_tzcolors/types/tzcolors_auction/parameter/bid.py @@ -6,5 +6,5 @@ from pydantic import BaseModel -class Bid(BaseModel): +class BidParameter(BaseModel): __root__: str diff --git a/src/demo_tzcolors/types/tzcolors_auction/parameter/create_auction.py b/src/demo_tzcolors/types/tzcolors_auction/parameter/create_auction.py index a9030970d..688d85f64 100644 --- a/src/demo_tzcolors/types/tzcolors_auction/parameter/create_auction.py +++ b/src/demo_tzcolors/types/tzcolors_auction/parameter/create_auction.py @@ -6,7 +6,7 @@ from pydantic import BaseModel -class CreateAuction(BaseModel): +class CreateAuctionParameter(BaseModel): auction_id: str bid_amount: str end_timestamp: str diff --git a/src/demo_tzcolors/types/tzcolors_auction/parameter/withdraw.py b/src/demo_tzcolors/types/tzcolors_auction/parameter/withdraw.py index 9587c08f0..5120b0cad 100644 --- a/src/demo_tzcolors/types/tzcolors_auction/parameter/withdraw.py +++ b/src/demo_tzcolors/types/tzcolors_auction/parameter/withdraw.py @@ -6,5 +6,5 @@ from pydantic import BaseModel -class Withdraw(BaseModel): +class WithdrawParameter(BaseModel): __root__: str diff --git a/src/demo_tzcolors/types/tzcolors_auction/storage.py b/src/demo_tzcolors/types/tzcolors_auction/storage.py index 9624e47bf..7ae2f4e01 100644 --- a/src/demo_tzcolors/types/tzcolors_auction/storage.py +++ b/src/demo_tzcolors/types/tzcolors_auction/storage.py @@ -6,7 +6,7 @@ from pydantic import BaseModel, Extra -class Storage(BaseModel): +class TzcolorsAuctionStorage(BaseModel): pass class Config: diff --git a/src/dipdup/cli.py b/src/dipdup/cli.py index f351d32ed..286f2958e 100644 --- a/src/dipdup/cli.py +++ b/src/dipdup/cli.py @@ -54,7 +54,7 @@ async def cli(ctx, config: List[str], logging_config: str): ) -@cli.command(help='Run dipdap') +@cli.command(help='Run existing dipdup project') @click.pass_context @click_async async def run(ctx) -> None: @@ -63,7 +63,7 @@ async def run(ctx) -> None: await dipdup.run() -@cli.command(help='Initialize new dipdap') +@cli.command(help='Initialize new dipdup project') @click.pass_context @click_async async def init(ctx): diff --git a/src/dipdup/codegen.py b/src/dipdup/codegen.py index 70ee5d651..1d51ec4cc 100644 --- a/src/dipdup/codegen.py +++ b/src/dipdup/codegen.py @@ -10,7 +10,16 @@ from jinja2 import Template -from dipdup.config import ROLLBACK_HANDLER, DipDupConfig, OperationIndexConfig +from dipdup.config import ( + ROLLBACK_HANDLER, + BigMapIndexConfig, + ContractConfig, + DipDupConfig, + IndexTemplateConfig, + OperationHandlerConfig, + OperationIndexConfig, + TzktDatasourceConfig, +) from dipdup.datasources.tzkt.datasource import TzktDatasource from dipdup.utils import camel_to_snake, snake_to_camel @@ -19,13 +28,37 @@ def resolve_big_maps(schema: Dict[str, Any]) -> Dict[str, Any]: if 'properties' in schema: - return {**schema, 'properties': {prop: resolve_big_maps(sub_schema) for prop, sub_schema in schema['properties'].items()}} + return { + **schema, + 'properties': {prop: resolve_big_maps(sub_schema) for prop, sub_schema in schema['properties'].items()}, + } elif schema.get('$comment') == 'big_map': return schema['oneOf'][1] else: return schema +class SchemasCache: + def __init__(self) -> None: + self._logger = logging.getLogger(f'{__name__}.{self.__class__.__qualname__}') + self._datasources: Dict[TzktDatasourceConfig, TzktDatasource] = {} + self._schemas: Dict[TzktDatasourceConfig, Dict[str, Dict[str, Any]]] = {} + + async def get( + self, + datasource_config: TzktDatasourceConfig, + contract_config: ContractConfig, + ) -> Dict[str, Any]: + if datasource_config not in self._datasources: + self._datasources[datasource_config] = TzktDatasource(datasource_config.url) + self._schemas[datasource_config] = {} + if contract_config.address not in self._schemas[datasource_config]: + self._logger.info('Fetching schemas for contract `%s`', contract_config.address) + address_schemas_json = await self._datasources[datasource_config].fetch_jsonschemas(contract_config.address) + self._schemas[datasource_config][contract_config.address] = address_schemas_json + return self._schemas[datasource_config][contract_config.address] + + async def create_package(config: DipDupConfig): try: package_path = config.package_path @@ -50,50 +83,81 @@ async def fetch_schemas(config: DipDupConfig): with suppress(FileExistsError): mkdir(schemas_path) - schemas_cache: Dict[str, Dict[str, Any]] = {} + schemas_cache = SchemasCache() for index_name, index_config in config.indexes.items(): if isinstance(index_config, OperationIndexConfig): - datasource = TzktDatasource(index_config.tzkt_config.url) - for handler in index_config.handlers: - for item in handler.pattern: - contract = item.contract_config - if item.contract_config.address in schemas_cache: - address_schemas_json = schemas_cache[contract.address] - else: - _logger.info('Fetching schemas for contract `%s`', contract.address) - address_schemas_json = await datasource.fetch_jsonschemas(contract.address) - schemas_cache[contract.address] = address_schemas_json - - contract_schemas_path = join(schemas_path, contract.module_name) + for operation_handler_config in index_config.handlers: + for operation_pattern_config in operation_handler_config.pattern: + contract_config = operation_pattern_config.contract_config + contract_schemas = await schemas_cache.get(index_config.datasource_config, contract_config) + + contract_schemas_path = join(schemas_path, contract_config.module_name) with suppress(FileExistsError): mkdir(contract_schemas_path) storage_schema_path = join(contract_schemas_path, 'storage.json') - storage_schema = resolve_big_maps(address_schemas_json['storageSchema']) + storage_schema = resolve_big_maps(contract_schemas['storageSchema']) if not exists(storage_schema_path): with open(storage_schema_path, 'w') as file: - file.write(json.dumps(storage_schema, indent=4)) - # TODO: check contract.typename + file.write(json.dumps(storage_schema, indent=4, sort_keys=True)) parameter_schemas_path = join(contract_schemas_path, 'parameter') with suppress(FileExistsError): mkdir(parameter_schemas_path) entrypoint_schema = next( - ep['parameterSchema'] for ep in address_schemas_json['entrypoints'] if ep['name'] == item.entrypoint + ep['parameterSchema'] for ep in contract_schemas['entrypoints'] if ep['name'] == operation_pattern_config.entrypoint ) - entrypoint_schema_path = join(parameter_schemas_path, f'{item.entrypoint}.json') + entrypoint_schema_path = join(parameter_schemas_path, f'{operation_pattern_config.entrypoint}.json') if not exists(entrypoint_schema_path): with open(entrypoint_schema_path, 'w') as file: file.write(json.dumps(entrypoint_schema, indent=4)) - elif contract.typename is not None: + elif contract_config.typename is not None: with open(entrypoint_schema_path, 'r') as file: existing_schema = json.loads(file.read()) if entrypoint_schema != existing_schema: - raise ValueError(f'Contract "{contract.address}" falsely claims to be a "{contract.typename}"') + # FIXME: Different field order counts as different schema + # raise ValueError(f'Contract "{contract.address}" falsely claims to be a "{contract.typename}"') + _logger.warning('Contract "%s" falsely claims to be a "%s"', contract_config.address, contract_config.typename) + + elif isinstance(index_config, BigMapIndexConfig): + for big_map_handler_config in index_config.handlers: + for big_map_pattern_config in big_map_handler_config.pattern: + contract_config = big_map_pattern_config.contract_config + + contract_schemas = await schemas_cache.get(index_config.datasource_config, contract_config) + + contract_schemas_path = join(schemas_path, contract_config.module_name) + with suppress(FileExistsError): + mkdir(contract_schemas_path) + + big_map_schemas_path = join(contract_schemas_path, 'big_map') + with suppress(FileExistsError): + mkdir(big_map_schemas_path) + + big_map_schema = next(ep for ep in contract_schemas['bigMaps'] if ep['path'] == big_map_pattern_config.path) + big_map_key_schema = big_map_schema['keySchema'] + big_map_key_schema_path = join(big_map_schemas_path, f'{big_map_pattern_config.path}.key.json') + + if not exists(big_map_key_schema_path): + with open(big_map_key_schema_path, 'w') as file: + file.write(json.dumps(big_map_key_schema, indent=4)) + + big_map_value_schema = big_map_schema['valueSchema'] + big_map_value_schema_path = join(big_map_schemas_path, f'{big_map_pattern_config.path}.value.json') + + if not exists(big_map_value_schema_path): + with open(big_map_value_schema_path, 'w') as file: + file.write(json.dumps(big_map_value_schema, indent=4)) + + elif isinstance(index_config, IndexTemplateConfig): + raise RuntimeError('Config is not initialized') + + else: + raise NotImplementedError(f'Index kind `{index_config.kind}` is not supported') async def generate_types(config: DipDupConfig): @@ -123,6 +187,12 @@ async def generate_types(config: DipDupConfig): input_path = join(root, file) output_path = join(types_root, f'{camel_to_snake(name)}.py') + + if name == 'storage': + name = '_'.join([root.split('/')[-1], name]) + if root.split('/')[-1] == 'parameter': + name += '_parameter' + _logger.info('Generating type `%s`', name) subprocess.run( [ @@ -142,8 +212,10 @@ async def generate_types(config: DipDupConfig): async def generate_handlers(config: DipDupConfig): _logger.info('Loading handler templates') - with open(join(dirname(__file__), 'templates', 'handler.py.j2')) as file: - template = Template(file.read()) + with open(join(dirname(__file__), 'templates', 'operation_handler.py.j2')) as file: + operation_handler_template = Template(file.read()) + with open(join(dirname(__file__), 'templates', 'big_map_handler.py.j2')) as file: + big_map_handler_template = Template(file.read()) with open(join(dirname(__file__), 'templates', f'{ROLLBACK_HANDLER}.py.j2')) as file: rollback_template = Template(file.read()) @@ -161,11 +233,28 @@ async def generate_handlers(config: DipDupConfig): with open(handler_path, 'w') as file: file.write(handler_code) - for index in config.indexes.values(): - if isinstance(index, OperationIndexConfig): - for handler in index.handlers: + for index_config in config.indexes.values(): + if isinstance(index_config, OperationIndexConfig): + for handler_config in index_config.handlers: + _logger.info('Generating handler `%s`', handler_config.callback) + handler_code = operation_handler_template.render( + package=config.package, + handler=handler_config.callback, + patterns=handler_config.pattern, + snake_to_camel=snake_to_camel, + camel_to_snake=camel_to_snake, + ) + handler_path = join(handlers_path, f'{handler_config.callback}.py') + if not exists(handler_path): + with open(handler_path, 'w') as file: + file.write(handler_code) + + elif isinstance(index_config, BigMapIndexConfig): + for handler in index_config.handlers: _logger.info('Generating handler `%s`', handler.callback) - handler_code = template.render( + for pattern_config in handler.pattern: + pattern_config.path = pattern_config.path.replace('.', '_') + handler_code = big_map_handler_template.render( package=config.package, handler=handler.callback, patterns=handler.pattern, @@ -177,6 +266,9 @@ async def generate_handlers(config: DipDupConfig): with open(handler_path, 'w') as file: file.write(handler_code) + else: + raise NotImplementedError(f'Index kind `{index_config.kind}` is not supported') + async def cleanup(config: DipDupConfig): _logger.info('Cleaning up') diff --git a/src/dipdup/config.py b/src/dipdup/config.py index 4542b5cb0..b396c86c6 100644 --- a/src/dipdup/config.py +++ b/src/dipdup/config.py @@ -6,19 +6,20 @@ import re import sys from collections import defaultdict +from dataclasses import field from os import environ as env from os.path import dirname -from typing import Any, Callable, Dict, List, Optional, Type, Union +from typing import Any, Callable, Dict, List, Optional, Type, Union, cast from urllib.parse import urlparse -from pydantic import validator +from pydantic import Field, validator from pydantic.dataclasses import dataclass from pydantic.json import pydantic_encoder from ruamel.yaml import YAML from typing_extensions import Literal from dipdup.exceptions import ConfigurationError -from dipdup.models import IndexType, State +from dipdup.models import State from dipdup.utils import camel_to_snake, reindex, snake_to_camel ROLLBACK_HANDLER = 'on_rollback' @@ -163,7 +164,7 @@ def contract_config(self) -> ContractConfig: @property def parameter_type_cls(self) -> Type: if self._parameter_type_cls is None: - raise Exception('Parameter type is not registered') + raise RuntimeError('Config is not initialized') return self._parameter_type_cls @parameter_type_cls.setter @@ -173,7 +174,7 @@ def parameter_type_cls(self, typ: Type) -> None: @property def storage_type_cls(self) -> Type: if self._storage_type_cls is None: - raise Exception('Storage type is not registered') + raise RuntimeError('Config is not initialized') return self._storage_type_cls @storage_type_cls.setter @@ -182,15 +183,8 @@ def storage_type_cls(self, typ: Type) -> None: @dataclass -class OperationHandlerConfig: - """Operation handler config - - :param callback: Name of method in `handlers` package - :param pattern: Filters to match operations in group - """ - +class HandlerConfig: callback: str - pattern: List[OperationHandlerPatternConfig] def __post_init_post_parse__(self): self._callback_fn = None @@ -207,26 +201,22 @@ def callback_fn(self, fn: Callable) -> None: @dataclass -class OperationIndexConfig: - """Operation index config +class OperationHandlerConfig(HandlerConfig): + """Operation handler config - :param datasource: Alias of datasource in `datasources` block - :param contract: Alias of contract to fetch operations for - :param first_block: First block to process - :param last_block: Last block to process - :param handlers: List of indexer handlers + :param callback: Name of method in `handlers` package + :param pattern: Filters to match operations in group """ - kind: Literal["operation"] + pattern: List[OperationHandlerPatternConfig] + + +@dataclass +class IndexConfig: datasource: Union[str, TzktDatasourceConfig] - contract: Union[str, ContractConfig] - handlers: List[OperationHandlerConfig] - first_block: int = 0 - last_block: int = 0 def __post_init_post_parse__(self): self._state: Optional[State] = None - self._rollback_fn: Optional[Callable] = None self._template_values: Dict[str, str] = None def hash(self) -> str: @@ -238,16 +228,11 @@ def hash(self) -> str: ).hexdigest() @property - def tzkt_config(self) -> TzktDatasourceConfig: + def datasource_config(self) -> TzktDatasourceConfig: if not isinstance(self.datasource, TzktDatasourceConfig): raise RuntimeError('Config is not initialized') return self.datasource - @property - def contract_config(self) -> ContractConfig: - assert isinstance(self.contract, ContractConfig) - return self.contract - @property def state(self): if not self._state: @@ -258,16 +243,6 @@ def state(self): def state(self, value: State): self._state = value - @property - def rollback_fn(self) -> Callable: - if not self._rollback_fn: - raise RuntimeError('Config is not initialized') - return self._rollback_fn - - @rollback_fn.setter - def rollback_fn(self, value: Callable) -> None: - self._rollback_fn = value - @property def template_values(self) -> Optional[Dict[str, str]]: return self._template_values @@ -278,58 +253,103 @@ def template_values(self, value: Dict[str, str]) -> None: @dataclass -class BigmapdiffHandlerPatternConfig: - name: str - entry_type: str +class OperationIndexConfig(IndexConfig): + """Operation index config + :param datasource: Alias of datasource in `datasources` block + :param contract: Alias of contract to fetch operations for + :param first_block: First block to process + :param last_block: Last block to process + :param handlers: List of indexer handlers + """ -@dataclass -class BigmapdiffHandlerConfig: - callback: str - pattern: List[BigmapdiffHandlerPatternConfig] + kind: Literal["operation"] + contracts: List[Union[str, ContractConfig]] + handlers: List[OperationHandlerConfig] + first_block: int = 0 + last_block: int = 0 + + @property + def contract_configs(self) -> List[ContractConfig]: + for contract in self.contracts: + if not isinstance(contract, ContractConfig): + raise RuntimeError('Config is not initialized') + return cast(List[ContractConfig], self.contracts) @dataclass -class BigmapdiffIndexConfig: - kind: Literal['bigmapdiff'] - datasource: Union[str, TzktDatasourceConfig] +class BigMapHandlerPatternConfig: contract: Union[str, ContractConfig] - handlers: List[BigmapdiffHandlerConfig] + path: str + + def __post_init_post_parse__(self): + self._key_type_cls = None + self._value_type_cls = None @property - def tzkt_config(self) -> TzktDatasourceConfig: - if not isinstance(self.datasource, TzktDatasourceConfig): + def contract_config(self) -> ContractConfig: + if not isinstance(self.contract, ContractConfig): raise RuntimeError('Config is not initialized') - return self.datasource + return self.contract + + @property + def key_type_cls(self) -> Type: + if self._key_type_cls is None: + raise RuntimeError('Config is not initialized') + return self._key_type_cls + + @key_type_cls.setter + def key_type_cls(self, typ: Type) -> None: + self._key_type_cls = typ + + @property + def value_type_cls(self) -> Type: + if self._value_type_cls is None: + raise RuntimeError('Config is not initialized') + return self._value_type_cls + + @value_type_cls.setter + def value_type_cls(self, typ: Type) -> None: + self._value_type_cls = typ @dataclass -class BlockHandlerConfig: - callback: str +class BigMapHandlerConfig(HandlerConfig): + pattern: List[BigMapHandlerPatternConfig] + + +@dataclass +class BigMapIndexConfig(IndexConfig): + kind: Literal['big_map'] + datasource: Union[str, TzktDatasourceConfig] + handlers: List[BigMapHandlerConfig] + first_block: int = 0 + last_block: int = 0 + + +@dataclass +class BlockHandlerConfig(HandlerConfig): pattern = None @dataclass -class BlockIndexConfig: +class BlockIndexConfig(IndexConfig): kind: Literal['block'] datasource: Union[str, TzktDatasourceConfig] handlers: List[BlockHandlerConfig] - - @property - def tzkt_config(self) -> TzktDatasourceConfig: - if not isinstance(self.datasource, TzktDatasourceConfig): - raise RuntimeError('Config is not initialized') - return self.datasource + first_block: int = 0 + last_block: int = 0 @dataclass class IndexTemplateConfig: + kind = 'template' template: str values: Dict[str, str] -IndexConfigT = Union[OperationIndexConfig, BigmapdiffIndexConfig, BlockIndexConfig, IndexTemplateConfig] -IndexConfigTemplateT = Union[OperationIndexConfig, BigmapdiffIndexConfig, BlockIndexConfig] +IndexConfigT = Union[OperationIndexConfig, BigMapIndexConfig, BlockIndexConfig, IndexTemplateConfig] +IndexConfigTemplateT = Union[OperationIndexConfig, BigMapIndexConfig, BlockIndexConfig] @dataclass @@ -369,6 +389,9 @@ class DipDupConfig: hasura: Optional[HasuraConfig] = None def __post_init_post_parse__(self): + if isinstance(self.database, SqliteDatabaseConfig) and self.hasura: + raise ConfigurationError('SQLite DB engine is not supported by Hasura') + _logger.info('Substituting index templates') for index_name, index_config in self.indexes.items(): if isinstance(index_config, IndexTemplateConfig): @@ -388,15 +411,27 @@ def __post_init_post_parse__(self): if isinstance(index_config, OperationIndexConfig): if isinstance(index_config.datasource, str): index_config.datasource = self.datasources[index_config.datasource] - if isinstance(index_config.contract, str): - index_config.contract = self.contracts[index_config.contract] + + for i, contract in enumerate(index_config.contracts): + if isinstance(contract, str): + index_config.contracts[i] = self.contracts[contract] for handler in index_config.handlers: - if isinstance(handler.pattern, list): - callback_patterns[handler.callback].append(handler.pattern) - for pattern in handler.pattern: - if isinstance(pattern.destination, str): - pattern.destination = self.contracts[pattern.destination] + callback_patterns[handler.callback].append(handler.pattern) + for pattern in handler.pattern: + if isinstance(pattern.destination, str): + pattern.destination = self.contracts[pattern.destination] + + elif isinstance(index_config, BigMapIndexConfig): + if isinstance(index_config.datasource, str): + index_config.datasource = self.datasources[index_config.datasource] + + for handler in index_config.handlers: + callback_patterns[handler.callback].append(handler.pattern) + for pattern in handler.pattern: + if isinstance(pattern.contract, str): + pattern.contract = self.contracts[pattern.contract] + else: raise NotImplementedError(f'Index kind `{index_config.kind}` is not supported') @@ -450,59 +485,102 @@ def load( config = cls(**json_config) return config + async def _initialize_index_state( + self, index_name: str, index_config: Union[OperationIndexConfig, BigMapIndexConfig, BlockIndexConfig] + ): + _logger.info('Getting state for index `%s`', index_name) + index_hash = index_config.hash() + state = await State.get_or_none( + index_name=index_name, + index_type=index_config.kind, + ) + if state is None: + state = State( + index_name=index_name, + index_type=index_config.kind, + hash=index_hash, + level=index_config.first_block - 1, + ) + await state.save() + + elif state.hash != index_hash: + _logger.warning('Config hash mismatch, reindexing') + await reindex() + + index_config.state = state + + async def _initialize_handler_callback(self, handler_config: HandlerConfig) -> None: + _logger.info('Registering handler callback `%s`', handler_config.callback) + handler_module = importlib.import_module(f'{self.package}.handlers.{handler_config.callback}') + callback_fn = getattr(handler_module, handler_config.callback) + handler_config.callback_fn = callback_fn + async def initialize(self) -> None: _logger.info('Setting up handlers and types for package `%s`', self.package) - rollback_fn = getattr(importlib.import_module(f'{self.package}.handlers.{ROLLBACK_HANDLER}'), ROLLBACK_HANDLER) - for index_name, index_config in self.indexes.items(): - if isinstance(index_config, OperationIndexConfig): - _logger.info('Getting state for index `%s`', index_name) - index_config.rollback_fn = rollback_fn - index_hash = index_config.hash() - state = await State.get_or_none( - index_name=index_name, - index_type=IndexType.operation, - ) - if state is None: - state = State( - index_name=index_name, - index_type=IndexType.operation, - hash=index_hash, - level=index_config.first_block - 1, - ) - await state.save() - elif state.hash != index_hash: - _logger.warning('Config hash mismatch, reindexing') - await reindex() + if isinstance(index_config, IndexTemplateConfig): + raise RuntimeError('Config is not initialized') - index_config.state = state + await self._initialize_index_state(index_name, index_config) - for handler in index_config.handlers: - _logger.info('Registering handler callback `%s`', handler.callback) - handler_module = importlib.import_module(f'{self.package}.handlers.{handler.callback}') - callback_fn = getattr(handler_module, handler.callback) - handler.callback_fn = callback_fn + if isinstance(index_config, OperationIndexConfig): - for pattern in handler.pattern: - _logger.info('Registering parameter type for entrypoint `%s`', pattern.entrypoint) + for operation_handler_config in index_config.handlers: + await self._initialize_handler_callback(operation_handler_config) + + for operation_pattern_config in operation_handler_config.pattern: + _logger.info('Registering parameter type for entrypoint `%s`', operation_pattern_config.entrypoint) parameter_type_module = importlib.import_module( f'{self.package}' f'.types' - f'.{pattern.contract_config.module_name}' + f'.{operation_pattern_config.contract_config.module_name}' f'.parameter' - f'.{camel_to_snake(pattern.entrypoint)}' + f'.{camel_to_snake(operation_pattern_config.entrypoint)}' ) - parameter_type_cls = getattr(parameter_type_module, snake_to_camel(pattern.entrypoint)) - pattern.parameter_type_cls = parameter_type_cls + parameter_type_cls = getattr( + parameter_type_module, snake_to_camel(operation_pattern_config.entrypoint) + 'Parameter' + ) + operation_pattern_config.parameter_type_cls = parameter_type_cls _logger.info('Registering storage type') storage_type_module = importlib.import_module( - f'{self.package}' f'.types' f'.{pattern.contract_config.module_name}' f'.storage' + f'{self.package}.types.{operation_pattern_config.contract_config.module_name}.storage' + ) + storage_type_cls = getattr( + storage_type_module, snake_to_camel(operation_pattern_config.contract_config.module_name) + 'Storage' + ) + operation_pattern_config.storage_type_cls = storage_type_cls + + elif isinstance(index_config, BigMapIndexConfig): + for big_map_handler_config in index_config.handlers: + await self._initialize_handler_callback(big_map_handler_config) + + for big_map_pattern_config in big_map_handler_config.pattern: + _logger.info('Registering big map types for path `%s`', big_map_pattern_config.path) + key_type_module = importlib.import_module( + f'{self.package}' + f'.types' + f'.{big_map_pattern_config.contract_config.module_name}' + f'.big_map' + f'.{camel_to_snake(big_map_pattern_config.path)}_key' ) - storage_type_cls = getattr(storage_type_module, 'Storage') - pattern.storage_type_cls = storage_type_cls + key_type_cls = getattr(key_type_module, snake_to_camel(big_map_pattern_config.path + '_key')) + big_map_pattern_config.key_type_cls = key_type_cls + + value_type_module = importlib.import_module( + f'{self.package}' + f'.types' + f'.{big_map_pattern_config.contract_config.module_name}' + f'.big_map' + f'.{camel_to_snake(big_map_pattern_config.path)}_value' + ) + value_type_cls = getattr(value_type_module, snake_to_camel(big_map_pattern_config.path + '_value')) + big_map_pattern_config.value_type_cls = value_type_cls + + else: + raise NotImplementedError(f'Index kind `{index_config.kind}` is not supported') @dataclass diff --git a/src/dipdup/datasources/tzkt/cache.py b/src/dipdup/datasources/tzkt/cache.py index c52562a97..27223cf58 100644 --- a/src/dipdup/datasources/tzkt/cache.py +++ b/src/dipdup/datasources/tzkt/cache.py @@ -1,28 +1,48 @@ import logging from collections import namedtuple from copy import copy -from typing import Awaitable, Callable, Dict, List +from typing import Awaitable, Callable, Dict, List, Optional -from dipdup.config import OperationHandlerConfig, OperationHandlerPatternConfig, OperationIndexConfig -from dipdup.models import OperationData +from dipdup.config import ( + BigMapHandlerConfig, + BigMapHandlerPatternConfig, + BigMapIndexConfig, + OperationHandlerConfig, + OperationHandlerPatternConfig, + OperationIndexConfig, +) +from dipdup.models import BigMapData, OperationData OperationGroup = namedtuple('OperationGroup', ('hash', 'counter')) +OperationID = int class OperationCache: - def __init__(self, index_config: OperationIndexConfig, level: int) -> None: + def __init__(self) -> None: super().__init__() - self._index_config = index_config - self._level = level self._logger = logging.getLogger(__name__) - self._stopped = False + self._level: Optional[int] = None + self._indexes: Dict[str, OperationIndexConfig] = {} self._operations: Dict[OperationGroup, List[OperationData]] = {} - self._previous_operations: Dict[OperationGroup, List[OperationData]] = {} + + async def add_index(self, index_config: OperationIndexConfig) -> None: + self._logger.debug('Adding index %s to cache', index_config) + for contract in index_config.contract_configs: + if contract.address in self._indexes: + raise RuntimeError(f'Address `{contract.address}` used in multiple indexes') + self._indexes[contract.address] = index_config async def add(self, operation: OperationData): self._logger.debug('Adding operation %s to cache (%s, %s)', operation.id, operation.hash, operation.counter) + self._logger.debug('level=%s operation.level=%s', self._level, operation.level) + + if self._level is not None: + if self._level != operation.level: + raise RuntimeError('Operations must be splitted by level before caching') + else: + self._level = operation.level + key = OperationGroup(operation.hash, operation.counter) - self._level = max(operation.level, self._level) if key not in self._operations: self._operations[key] = [] self._operations[key].append(operation) @@ -41,29 +61,132 @@ async def process( self, callback: Callable[[OperationIndexConfig, OperationHandlerConfig, List[OperationData], List[OperationData]], Awaitable[None]], ) -> int: + if self._level is None: + raise RuntimeError('Add operations to cache before processing') + keys = list(self._operations.keys()) self._logger.info('Matching %s operation groups', len(keys)) for key, operations in copy(self._operations).items(): self._logger.debug('Processing %s', key) - for handler_config in self._index_config.handlers: - matched_operations = [] - for pattern_config in handler_config.pattern: - for operation in operations: - operation_matched = self.match_operation(pattern_config, operation) - if operation_matched: - matched_operations.append(operation) - - if len(matched_operations) == len(handler_config.pattern): - self._logger.info('Handler `%s` matched! %s', handler_config.callback, key) - await callback(self._index_config, handler_config, matched_operations, operations) - if key in self._operations: + matched = False + + for index_config in self._indexes.values(): + if matched: + break + for handler_config in index_config.handlers: + matched_operations = [] + for pattern_config in handler_config.pattern: + for operation in operations: + operation_matched = self.match_operation(pattern_config, operation) + if operation_matched: + matched_operations.append(operation) + + if len(matched_operations) == len(handler_config.pattern): + self._logger.info('Handler `%s` matched! %s', handler_config.callback, key) + matched = True + await callback(index_config, handler_config, matched_operations, operations) + + index_config.state.level = self._level + await index_config.state.save() + del self._operations[key] + break keys_left = self._operations.keys() self._logger.info('%s operation groups unmatched', len(keys_left)) self._logger.info('Current level: %s', self._level) self._operations = {} + + level = self._level + self._level = None + return level + + @property + def level(self) -> Optional[int]: return self._level - async def stop(self): - self._stopped = True + +class BigMapCache: + def __init__(self) -> None: + super().__init__() + self._logger = logging.getLogger(__name__) + self._level: Optional[int] = None + self._indexes: List[BigMapIndexConfig] = [] + self._big_maps: Dict[OperationID, List[BigMapData]] = {} + + async def add_index(self, index_config: BigMapIndexConfig) -> None: + self._logger.debug('Adding index %s to cache', index_config) + self._indexes.append(index_config) + + async def add(self, big_map: BigMapData): + self._logger.debug('Adding big map %s to cache (%s)', big_map.id, big_map.operation_id) + self._logger.debug('level=%s operation.level=%s', self._level, big_map.level) + + if self._level is not None: + if self._level != big_map.level: + raise RuntimeError('Big maps must be splitted by level before caching') + else: + self._level = big_map.level + + key = big_map.operation_id + if key not in self._big_maps: + self._big_maps[key] = [] + self._big_maps[key].append(big_map) + + def match_big_map(self, pattern_config: BigMapHandlerPatternConfig, big_map: BigMapData) -> bool: + self._logger.debug('pattern: %s, %s', pattern_config.path, pattern_config.contract_config.address) + self._logger.debug('big_map: %s, %s', big_map.path, big_map.contract_address) + if pattern_config.path != big_map.path: + return False + if pattern_config.contract_config.address != big_map.contract_address: + return False + self._logger.debug('match!') + return True + + async def process( + self, + callback: Callable[[BigMapIndexConfig, BigMapHandlerConfig, List[List[BigMapData]]], Awaitable[None]], + ) -> int: + if self._level is None: + raise RuntimeError('Add big maps to cache before processing') + + keys = list(self._big_maps.keys()) + self._logger.info('Matching %s big map groups', len(keys)) + for key, big_maps in copy(self._big_maps).items(): + self._logger.debug('Processing %s', key) + matched = False + + for index_config in self._indexes: + if matched: + break + for handler_config in index_config.handlers: + matched_big_maps: List[List[BigMapData]] = [[] for _ in range(len(handler_config.pattern))] + for i, pattern_config in enumerate(handler_config.pattern): + for big_map in big_maps: + big_map_matched = self.match_big_map(pattern_config, big_map) + if big_map_matched: + matched_big_maps[i].append(big_map) + + if any([len(big_map_group) for big_map_group in matched_big_maps]): + self._logger.info('Handler `%s` matched! %s', handler_config.callback, key) + matched = True + await callback(index_config, handler_config, matched_big_maps) + + index_config.state.level = self._level + await index_config.state.save() + + del self._big_maps[key] + break + + keys_left = self._big_maps.keys() + self._logger.info('%s operation groups unmatched', len(keys_left)) + self._logger.info('Current level: %s', self._level) + self._big_maps = {} + + level = self._level + self._level = None + return level + + @property + def level(self) -> Optional[int]: + return self._level diff --git a/src/dipdup/datasources/tzkt/datasource.py b/src/dipdup/datasources/tzkt/datasource.py index 5b0fd7d44..da6741a05 100644 --- a/src/dipdup/datasources/tzkt/datasource.py +++ b/src/dipdup/datasources/tzkt/datasource.py @@ -1,7 +1,7 @@ import asyncio import logging from functools import partial -from typing import Any, Dict, List, Optional, Tuple, Union +from typing import Any, Awaitable, Callable, Dict, List, Optional, Tuple, Union from aiosignalrcore.hub.base_hub_connection import BaseHubConnection # type: ignore from aiosignalrcore.hub_connection_builder import HubConnectionBuilder # type: ignore @@ -10,10 +10,25 @@ from tortoise.transactions import in_transaction from dipdup import __version__ -from dipdup.config import ROLLBACK_HANDLER, BigmapdiffIndexConfig, BlockIndexConfig, OperationHandlerConfig, OperationIndexConfig -from dipdup.datasources.tzkt.cache import OperationCache +from dipdup.config import ( + ROLLBACK_HANDLER, + BigMapHandlerConfig, + BigMapIndexConfig, + BlockIndexConfig, + OperationHandlerConfig, + OperationIndexConfig, +) +from dipdup.datasources.tzkt.cache import BigMapCache, OperationCache from dipdup.datasources.tzkt.enums import TzktMessageType -from dipdup.models import HandlerContext, OperationContext, OperationData +from dipdup.models import ( + BigMapAction, + BigMapContext, + BigMapData, + BigMapHandlerContext, + OperationContext, + OperationData, + OperationHandlerContext, +) from dipdup.utils import http_request TZKT_HTTP_REQUEST_LIMIT = 10000 @@ -47,31 +62,45 @@ "diffs,", ) +IndexName = str +Address = str +Path = str +OperationType = str + class TzktDatasource: def __init__(self, url: str): super().__init__() self._url = url.rstrip('/') - self._operation_index_configs: Dict[str, OperationIndexConfig] = {} - self._synchronized = asyncio.Event() - self._callback_lock = asyncio.Lock() self._logger = logging.getLogger(__name__) - self._subscriptions: Dict[str, List[str]] = {} - self._subscriptions_registered: List[Tuple[str, str]] = [] - self._sync_events: Dict[str, asyncio.Event] = {} + self._operation_index_by_name: Dict[IndexName, OperationIndexConfig] = {} + self._big_map_index_by_name: Dict[IndexName, BigMapIndexConfig] = {} + self._big_map_index_by_address: Dict[Address, BigMapIndexConfig] = {} + self._callback_lock = asyncio.Lock() + self._operation_subscriptions: Dict[Address, List[OperationType]] = {} + self._big_map_subscriptions: Dict[Address, List[Path]] = {} + self._operations_synchronized = asyncio.Event() + self._big_maps_synchronized = asyncio.Event() self._client: Optional[BaseHubConnection] = None - self._caches: Dict[str, OperationCache] = {} - - def add_index(self, config: Union[OperationIndexConfig, BigmapdiffIndexConfig, BlockIndexConfig]): - if isinstance(config, OperationIndexConfig): - self._logger.info('Adding index `%s`', config.state.index_name) - self._operation_index_configs[config.contract_config.address] = config - self._sync_events[config.state.index_name] = asyncio.Event() - self._caches[config.contract_config.address] = OperationCache(config, config.state.level) - if len(self._operation_index_configs) > 1: - self._logger.warning('Using more than one operation index. Be careful, indexing is not atomic.') + self._operation_cache = OperationCache() + self._big_map_cache = BigMapCache() + self._rollback_fn: Optional[Callable[[int, int], Awaitable[None]]] = None + + async def add_index(self, index_name: str, index_config: Union[OperationIndexConfig, BigMapIndexConfig, BlockIndexConfig]): + self._logger.info('Adding index `%s`', index_name) + if isinstance(index_config, OperationIndexConfig): + self._operation_index_by_name[index_name] = index_config + await self._operation_cache.add_index(index_config) + + elif isinstance(index_config, BigMapIndexConfig): + self._big_map_index_by_name[index_name] = index_config + await self._big_map_cache.add_index(index_config) + else: - raise NotImplementedError(f'Index kind `{config.kind}` is not supported') + raise NotImplementedError(f'Index kind `{index_config.kind}` is not supported') + + def set_rollback_fn(self, fn: Callable[[int, int], Awaitable[None]]) -> None: + self._rollback_fn = fn def _get_client(self) -> BaseHubConnection: if self._client is None: @@ -90,26 +119,50 @@ def _get_client(self) -> BaseHubConnection: ).build() self._client.on_open(self.on_connect) self._client.on_error(self.on_error) + self._client.on('operations', self.on_operation_message) + self._client.on('bigmaps', self.on_big_map_message) return self._client async def start(self): self._logger.info('Starting datasource') rest_only = False - for operation_index_config in self._operation_index_configs.values(): + + self._logger.info('Initial synchronizing operation indexes') + for operation_index_config in self._operation_index_by_name.values(): if operation_index_config.last_block: await self.fetch_operations(operation_index_config.last_block, initial=True) rest_only = True continue - await self.add_subscription(operation_index_config.contract) + for contract in operation_index_config.contracts: + await self.add_operation_subscription(contract.address) + latest_block = await self.get_latest_block() current_level = latest_block['level'] state_level = operation_index_config.state.level if current_level != state_level: await self.fetch_operations(current_level, initial=True) + self._logger.info('Initial synchronizing big map indexes') + for big_map_index_config in self._big_map_index_by_name.values(): + + if big_map_index_config.last_block: + await self.fetch_big_maps(big_map_index_config.last_block, initial=True) + rest_only = True + continue + + for handler_config in big_map_index_config.handlers: + for pattern_config in handler_config.pattern: + await self.add_big_map_subscription(pattern_config.contract_config.address, pattern_config.path) + + latest_block = await self.get_latest_block() + current_level = latest_block['level'] + state_level = big_map_index_config.state.level + if current_level != state_level: + await self.fetch_big_maps(current_level, initial=True) + if not rest_only: self._logger.info('Starting websocket client') await self._get_client().start() @@ -119,8 +172,11 @@ async def stop(self): async def on_connect(self): self._logger.info('Connected to server') - for contract, subscriptions in self._subscriptions.items(): - await self.subscribe_to_operations(contract.address, subscriptions) + for address, types in self._operation_subscriptions.items(): + await self.subscribe_to_operations(address, types) + for address, paths in self._big_map_subscriptions.items(): + for path in paths: + await self.subscribe_to_big_maps(address, paths) def on_error(self, message: CompletionMessage): raise Exception(message.error) @@ -128,14 +184,6 @@ def on_error(self, message: CompletionMessage): async def subscribe_to_operations(self, address: str, types: List[str]) -> None: self._logger.info('Subscribing to %s, %s', address, types) - key = ('operations', address) - if key not in self._subscriptions_registered: - self._subscriptions_registered.append(key) - self._get_client().on( - 'operations', - partial(self.on_operation_message, address=address), - ) - while self._get_client().transport.state != ConnectionState.connected: await asyncio.sleep(0.1) @@ -149,13 +197,17 @@ async def subscribe_to_operations(self, address: str, types: List[str]) -> None: ], ) - async def _fetch_operations(self, address: str, offset: int, first_level: int, last_level: int) -> List[Dict[str, Any]]: + async def subscribe_to_big_maps(self, address: Address, path: Path) -> None: + self._logger.info('Subscribing to %s, %s', address, path) + + async def _fetch_operations(self, addresses: List[str], offset: int, first_level: int, last_level: int) -> List[Dict[str, Any]]: self._logger.info('Fetching levels %s-%s with offset %s', first_level, last_level, offset) + async with http_request( 'get', url=f'{self._url}/v1/operations/transactions', params={ - "anyof.sender.target.initiator": address, + "sender.in": ','.join(addresses), "offset": offset, "limit": TZKT_HTTP_REQUEST_LIMIT, "level.gt": first_level, @@ -165,15 +217,37 @@ async def _fetch_operations(self, address: str, offset: int, first_level: int, l }, ) as resp: operations = await resp.json() + + async with http_request( + 'get', + url=f'{self._url}/v1/operations/transactions', + params={ + "target.in": ','.join(addresses), + "offset": offset, + "limit": TZKT_HTTP_REQUEST_LIMIT, + "level.gt": first_level, + "level.le": last_level, + "select": ','.join(OPERATION_FIELDS), + "status": "applied", + }, + ) as resp: + target_operations = await resp.json() + + sender_operation_keys = {op['id'] for op in operations} + for op in target_operations: + if op['id'] not in sender_operation_keys: + operations.append(op) + + operations = sorted(operations, key=lambda op: op['id']) + self._logger.info('%s operations fetched', len(operations)) self._logger.debug(operations) return operations async def fetch_operations(self, last_level: int, initial: bool = False) -> None: - async def _process_operations(address, operations): + async def _process_level_operations(operations) -> None: self._logger.info('Processing %s operations of level %s', len(operations), operations[0]['level']) await self.on_operation_message( - address=address, message=[ { 'type': TzktMessageType.DATA.value, @@ -184,22 +258,22 @@ async def _process_operations(address, operations): ) self._logger.info('Fetching operations prior to level %s', last_level) - for index_config in self._operation_index_configs.values(): + for index_config in self._operation_index_by_name.values(): - sync_event = self._sync_events[index_config.state.index_name] level = index_config.state.level operations = [] offset = 0 + addresses = [c.address for c in index_config.contract_configs] while True: - fetched_operations = await self._fetch_operations(index_config.contract_config.address, offset, level, last_level) + fetched_operations = await self._fetch_operations(addresses, offset, level, last_level) operations += fetched_operations while True: for i in range(len(operations) - 1): if operations[i]['level'] != operations[i + 1]['level']: - await _process_operations(index_config.contract_config.address, operations[: i + 1]) + await _process_level_operations(operations[: i + 1]) operations = operations[i + 1 :] break else: @@ -213,14 +287,85 @@ async def _process_operations(address, operations): await asyncio.sleep(TZKT_HTTP_REQUEST_SLEEP) if operations: - await _process_operations(index_config.contract_config.address, operations) + await _process_level_operations(operations) + + if not initial: + self._operations_synchronized.set() + + async def _fetch_big_maps( + self, addresses: List[Address], paths: List[Path], offset: int, first_level: int, last_level: int + ) -> List[Dict[str, Any]]: + self._logger.info('Fetching levels %s-%s with offset %s', first_level, last_level, offset) + + async with http_request( + 'get', + url=f'{self._url}/v1/bigmaps/updates', + params={ + "contract.in": ",".join(addresses), + "paths.in": ",".join(paths), + "offset": offset, + "limit": TZKT_HTTP_REQUEST_LIMIT, + "level.gt": first_level, + "level.le": last_level, + }, + ) as resp: + big_maps = await resp.json() + + self._logger.info('%s big map updates fetched', len(big_maps)) + self._logger.debug(big_maps) + return big_maps + + async def fetch_big_maps(self, last_level: int, initial: bool = False) -> None: + async def _process_level_big_maps(big_maps): + self._logger.info('Processing %s big map updates of level %s', len(big_maps), big_maps[0]['level']) + await self.on_big_map_message( + message=[ + { + 'type': TzktMessageType.DATA.value, + 'data': big_maps, + }, + ], + sync=True, + ) - if not initial: - sync_event.set() + self._logger.info('Fetching big map updates prior to level %s', last_level) + for index_config in self._big_map_index_by_name.values(): + + level = index_config.state.level + + big_maps = [] + offset = 0 + addresses, paths = set(), set() + for handler_config in index_config.handlers: + for pattern_config in handler_config.pattern: + addresses.add(pattern_config.contract_config.address) + paths.add(pattern_config.path) + + while True: + fetched_big_maps = await self._fetch_big_maps(list(addresses), list(paths), offset, level, last_level) + big_maps += fetched_big_maps + + while True: + for i in range(len(big_maps) - 1): + if big_maps[i]['level'] != big_maps[i + 1]['level']: + await _process_level_big_maps(big_maps[: i + 1]) + big_maps = big_maps[i + 1 :] + break + else: + break + + if len(fetched_big_maps) < TZKT_HTTP_REQUEST_LIMIT: + break + + offset += TZKT_HTTP_REQUEST_LIMIT + self._logger.info('Sleeping %s seconds before fetching next batch', TZKT_HTTP_REQUEST_SLEEP) + await asyncio.sleep(TZKT_HTTP_REQUEST_SLEEP) + + if big_maps: + await _process_level_big_maps(big_maps) if not initial: - self._logger.info('Synchronization finished') - self._synchronized.set() + self._big_maps_synchronized.set() async def fetch_jsonschemas(self, address: str) -> Dict[str, Any]: self._logger.info('Fetching jsonschemas for address `%s', address) @@ -235,25 +380,23 @@ async def fetch_jsonschemas(self, address: str) -> Dict[str, Any]: async def on_operation_message( self, message: List[Dict[str, Any]], - address: str, sync=False, ) -> None: - self._logger.info('Got operation message on %s', address) + self._logger.info('Got operation message') self._logger.debug('%s', message) - index_config = self._operation_index_configs[address] + for item in message: message_type = TzktMessageType(item['type']) if message_type == TzktMessageType.STATE: level = item['state'] - self._logger.info('Got state message, current level %s, index level %s', level, index_config.state.level) + self._logger.info('Got state message, current level %s, index level %s', level, self._operation_cache.level) await self.fetch_operations(level) elif message_type == TzktMessageType.DATA: - sync_event = self._sync_events[index_config.state.index_name] - if not sync and not sync_event.is_set(): + if not sync and not self._operations_synchronized.is_set(): self._logger.info('Waiting until synchronization is complete') - await sync_event.wait() + await self._operations_synchronized.wait() self._logger.info('Synchronization is complete, processing websocket message') self._logger.info('Acquiring callback lock') @@ -262,27 +405,78 @@ async def on_operation_message( operation = self.convert_operation(operation_json) if operation.type != 'transaction': continue - await self._caches[address].add(operation) + if operation.status != 'applied': + continue + await self._operation_cache.add(operation) async with in_transaction(): - last_level = await self._caches[address].process(self.on_operation_match) - index_config.state.level = last_level # type: ignore - await index_config.state.save() + await self._operation_cache.process(self.on_operation_match) elif message_type == TzktMessageType.REORG: + if self._rollback_fn is None: + raise RuntimeError('rollback_fn is not set') self._logger.info('Got reorg message, calling `%s` handler', ROLLBACK_HANDLER) - from_level = self._operation_index_configs[address].state.level + # NOTE: It doesn't matter which index to get + from_level = list(self._operation_index_by_name.values())[0].state.level to_level = item['state'] - await self._operation_index_configs[address].rollback_fn(from_level, to_level) + await self._rollback_fn(from_level, to_level) else: self._logger.warning('%s is not supported', message_type) - async def add_subscription(self, address: str, types: Optional[List[str]] = None) -> None: + async def on_big_map_message( + self, + message: List[Dict[str, Any]], + sync=False, + ) -> None: + self._logger.info('Got big map message') + self._logger.debug('%s', message) + + for item in message: + message_type = TzktMessageType(item['type']) + + if message_type == TzktMessageType.STATE: + level = item['state'] + self._logger.info('Got state message, current level %s, index level %s', level, self._operation_cache.level) + await self.fetch_big_maps(level) + + elif message_type == TzktMessageType.DATA: + if not sync and not self._big_maps_synchronized.is_set(): + self._logger.info('Waiting until synchronization is complete') + await self._big_maps_synchronized.wait() + self._logger.info('Synchronization is complete, processing websocket message') + + self._logger.info('Acquiring callback lock') + async with self._callback_lock: + for big_map_json in item['data']: + big_map = self.convert_big_map(big_map_json) + await self._big_map_cache.add(big_map) + + async with in_transaction(): + await self._big_map_cache.process(self.on_big_map_match) + + elif message_type == TzktMessageType.REORG: + if self._rollback_fn is None: + raise RuntimeError('rollback_fn is not set') + self._logger.info('Got reorg message, calling `%s` handler', ROLLBACK_HANDLER) + # NOTE: It doesn't matter which index to get + from_level = list(self._big_map_index_by_name.values())[0].state.level + to_level = item['state'] + await self._rollback_fn(from_level, to_level) + + else: + self._logger.warning('%s is not supported', message_type) + + async def add_operation_subscription(self, address: str, types: Optional[List[str]] = None) -> None: if types is None: types = ['transaction'] - if address not in self._subscriptions: - self._subscriptions[address] = types + if address not in self._operation_subscriptions: + self._operation_subscriptions[address] = types + + async def add_big_map_subscription(self, address: str, path: str) -> None: + if address not in self._big_map_subscriptions: + self._big_map_subscriptions[address] = [] + self._big_map_subscriptions[address].append('path') async def on_operation_match( self, @@ -291,11 +485,11 @@ async def on_operation_match( matched_operations: List[OperationData], operations: List[OperationData], ): - handler_context = HandlerContext( + handler_context = OperationHandlerContext( operations=operations, template_values=index_config.template_values, ) - args: List[Union[OperationContext, HandlerContext]] = [handler_context] + args: List[Union[OperationHandlerContext, OperationContext]] = [handler_context] for pattern_config, operation in zip(handler_config.pattern, matched_operations): parameter_type = pattern_config.parameter_type_cls @@ -313,6 +507,46 @@ async def on_operation_match( await handler_config.callback_fn(*args) + async def on_big_map_match( + self, + index_config: BigMapIndexConfig, + handler_config: BigMapHandlerConfig, + matched_big_maps: List[List[BigMapData]], + ): + handler_context = BigMapHandlerContext( + template_values=index_config.template_values, + ) + args: List[Union[BigMapHandlerContext, List[BigMapContext]]] = [handler_context] + for matched_big_map_group, pattern_config in zip(matched_big_maps, handler_config.pattern): + big_map_contexts = [] + for big_map in matched_big_map_group: + + try: + action = BigMapAction(big_map.action) + except ValueError: + continue + + key_type = pattern_config.key_type_cls + key = key_type.parse_obj(big_map.key) + + if action == BigMapAction.REMOVE: + value = None + else: + value_type = pattern_config.value_type_cls + value = value_type.parse_obj(big_map.value) + + big_map_context = BigMapContext( # type: ignore + action=action, + key=key, + value=value, + ) + + big_map_contexts.append(big_map_context) + + args.append(big_map_contexts) + + await handler_config.callback_fn(*args) + @classmethod def convert_operation(cls, operation_json: Dict[str, Any]) -> OperationData: storage = operation_json.get('storage') @@ -345,16 +579,28 @@ def convert_operation(cls, operation_json: Dict[str, Any]) -> OperationData: diffs=operation_json.get('diffs'), ) + @classmethod + def convert_big_map(cls, big_map_json: Dict[str, Any]) -> BigMapData: + return BigMapData( + id=big_map_json['id'], + level=big_map_json['level'], + # FIXME: operation_id field in API + operation_id=big_map_json['level'], + timestamp=big_map_json['timestamp'], + bigmap=big_map_json['bigmap'], + contract_address=big_map_json['contract']['address'], + path=big_map_json['path'], + action=big_map_json['action'], + key=big_map_json.get('content', {}).get('key'), + value=big_map_json.get('content', {}).get('value'), + ) + async def get_latest_block(self) -> Dict[str, Any]: self._logger.info('Fetching latest block') async with http_request( 'get', - url=f'{self._url}/v1/blocks', - params={ - "limit": 1, - "sort.desc": "id", - }, + url=f'{self._url}/v1/head', ) as resp: - blocks = await resp.json() - self._logger.debug(blocks) - return blocks[0] + block = await resp.json() + self._logger.debug(block) + return block diff --git a/src/dipdup/dipdup.py b/src/dipdup/dipdup.py index 5cb5960e7..3348071e7 100644 --- a/src/dipdup/dipdup.py +++ b/src/dipdup/dipdup.py @@ -1,5 +1,6 @@ import asyncio import hashlib +import importlib import logging from typing import Dict @@ -9,7 +10,7 @@ import dipdup.codegen as codegen from dipdup import __version__ -from dipdup.config import DipDupConfig, IndexTemplateConfig, PostgresDatabaseConfig, TzktDatasourceConfig +from dipdup.config import ROLLBACK_HANDLER, DipDupConfig, IndexTemplateConfig, PostgresDatabaseConfig, TzktDatasourceConfig from dipdup.datasources.tzkt.datasource import TzktDatasource from dipdup.hasura import configure_hasura from dipdup.models import IndexType, State @@ -31,6 +32,8 @@ async def init(self) -> None: async def run(self) -> None: url = self._config.database.connection_string models = f'{self._config.package}.models' + rollback_fn = getattr(importlib.import_module(f'{self._config.package}.handlers.{ROLLBACK_HANDLER}'), ROLLBACK_HANDLER) + async with tortoise_wrapper(url, models): await self.initialize_database() @@ -39,12 +42,15 @@ async def run(self) -> None: datasources: Dict[TzktDatasourceConfig, TzktDatasource] = {} for index_name, index_config in self._config.indexes.items(): - assert not isinstance(index_config, IndexTemplateConfig) + if isinstance(index_config, IndexTemplateConfig): + raise RuntimeError('Config is not initialized') + self._logger.info('Processing index `%s`', index_name) if isinstance(index_config.datasource, TzktDatasourceConfig): - if index_config.tzkt_config not in datasources: - datasources[index_config.tzkt_config] = TzktDatasource(index_config.tzkt_config.url) - datasources[index_config.tzkt_config].add_index(index_config) + if index_config.datasource_config not in datasources: + datasources[index_config.datasource_config] = TzktDatasource(index_config.datasource_config.url) + datasources[index_config.datasource_config].set_rollback_fn(rollback_fn) + await datasources[index_config.datasource_config].add_index(index_name, index_config) else: raise NotImplementedError(f'Datasource `{index_config.datasource}` is not supported') diff --git a/src/dipdup/models.py b/src/dipdup/models.py index 813980fa2..d3751c8ac 100644 --- a/src/dipdup/models.py +++ b/src/dipdup/models.py @@ -8,8 +8,10 @@ from pydantic.dataclasses import dataclass from tortoise import Model, fields -ParameterType = TypeVar('ParameterType') +ParameterType = TypeVar('ParameterType', bound=BaseModel) StorageType = TypeVar('StorageType', bound=BaseModel) +KeyType = TypeVar('KeyType', bound=BaseModel) +ValueType = TypeVar('ValueType', bound=BaseModel) _logger = logging.getLogger(__name__) @@ -17,7 +19,7 @@ class IndexType(Enum): operation = 'operation' - bigmapdiff = 'bigmapdiff' + big_map = 'big_map' block = 'block' schema = 'schema' @@ -86,6 +88,7 @@ def _process_storage(self, storage_type: Type[StorageType], storage: Dict, prefi # NOTE: TzKT could return bigmaps as object or as array of key-value objects. We need to guess this from storage. # TODO: This code should be a part of datasource module. if field.type_ not in (int, bool) and isinstance(storage[key], int): + _logger.debug(field.type_) if hasattr(field.type_, '__fields__') and 'key' in field.type_.__fields__ and 'value' in field.type_.__fields__: storage[key] = [] if self.diffs: @@ -122,7 +125,39 @@ class OperationContext(Generic[ParameterType, StorageType]): storage: StorageType +class BigMapAction(Enum): + ADD = 'add_key' + UPDATE = 'update_key' + REMOVE = 'remove_key' + + +@dataclass +class BigMapContext(Generic[KeyType, ValueType]): + action: BigMapAction + key: KeyType + value: Optional[ValueType] + + @dataclass -class HandlerContext: +class BigMapData: + id: int + level: int + operation_id: int + timestamp: datetime + bigmap: int + contract_address: str + path: str + action: str + key: Optional[Any] = None + value: Optional[Any] = None + + +@dataclass +class OperationHandlerContext: operations: List[OperationData] template_values: Optional[Dict[str, str]] + + +@dataclass +class BigMapHandlerContext: + template_values: Optional[Dict[str, str]] diff --git a/src/dipdup/templates/big_map_handler.py.j2 b/src/dipdup/templates/big_map_handler.py.j2 new file mode 100644 index 000000000..f2f0d4de5 --- /dev/null +++ b/src/dipdup/templates/big_map_handler.py.j2 @@ -0,0 +1,19 @@ +from typing import List + +from dipdup.models import BigMapHandlerContext, BigMapContext + +import {{ package }}.models as models +{% for pattern in patterns %} +from {{ package }}.types.{{ pattern.contract.module_name }}.big_map.{{ camel_to_snake(pattern.path) }}_key import {{ snake_to_camel(pattern.path) }}Key +from {{ package }}.types.{{ pattern.contract.module_name }}.big_map.{{ camel_to_snake(pattern.path) }}_value import {{ snake_to_camel(pattern.path) }}Value +{%- endfor %} + + +async def {{ handler }}( + ctx: BigMapHandlerContext, +{%- for pattern in patterns %} + {{ camel_to_snake(pattern.path) }}: List[BigMapContext[{{ snake_to_camel(pattern.path) }}Key, {{ snake_to_camel(pattern.path) }}Value]], +{%- endfor %} +) -> None: + ... + \ No newline at end of file diff --git a/src/dipdup/templates/handler.py.j2 b/src/dipdup/templates/handler.py.j2 deleted file mode 100644 index 2d4675d99..000000000 --- a/src/dipdup/templates/handler.py.j2 +++ /dev/null @@ -1,16 +0,0 @@ -from dipdup.models import HandlerContext, OperationContext - -import {{ package }}.models as models -{% for pattern in patterns %} -from {{ package }}.types.{{ pattern.destination.module_name }}.parameter.{{ camel_to_snake(pattern.entrypoint) }} import {{ snake_to_camel(pattern.entrypoint) }} as {{ snake_to_camel(pattern.entrypoint) }}Parameter -from {{ package }}.types.{{ pattern.destination.module_name }}.storage import Storage as {{ snake_to_camel(pattern.destination.module_name) }}Storage -{%- endfor %} - - -async def {{ handler }}( - ctx: HandlerContext, -{%- for pattern in patterns %} - {{ camel_to_snake(pattern.entrypoint) }}: OperationContext[{{ snake_to_camel(pattern.entrypoint) }}Parameter, {{ snake_to_camel(pattern.destination.module_name) }}Storage], -{%- endfor %} -) -> None: - ... diff --git a/src/dipdup/templates/operation_handler.py.j2 b/src/dipdup/templates/operation_handler.py.j2 new file mode 100644 index 000000000..61153892b --- /dev/null +++ b/src/dipdup/templates/operation_handler.py.j2 @@ -0,0 +1,16 @@ +from dipdup.models import OperationHandlerContext, OperationContext + +import {{ package }}.models as models +{% for pattern in patterns %} +from {{ package }}.types.{{ pattern.destination.module_name }}.parameter.{{ camel_to_snake(pattern.entrypoint) }} import {{ snake_to_camel(pattern.entrypoint) }}Parameter +from {{ package }}.types.{{ pattern.destination.module_name }}.storage import {{ snake_to_camel(pattern.destination.module_name) }}Storage +{%- endfor %} + + +async def {{ handler }}( + ctx: OperationHandlerContext, +{%- for pattern in patterns %} + {{ camel_to_snake(pattern.entrypoint) }}: OperationContext[{{ snake_to_camel(pattern.entrypoint) }}Parameter, {{ snake_to_camel(pattern.destination.module_name) }}Storage], +{%- endfor %} +) -> None: + ... diff --git a/src/dipdup/utils.py b/src/dipdup/utils.py index 72cde60ae..592683a16 100644 --- a/src/dipdup/utils.py +++ b/src/dipdup/utils.py @@ -17,11 +17,11 @@ def snake_to_camel(value: str) -> str: - return ''.join(map(lambda x: x[0].upper() + x[1:], value.split('_'))) + return ''.join(map(lambda x: x[0].upper() + x[1:], value.replace('.', '_').split('_'))) def camel_to_snake(name): - name = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', name) + name = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', name.replace('.', '_')) return re.sub('([a-z0-9])([A-Z])', r'\1_\2', name).lower() @@ -51,7 +51,7 @@ async def http_request(method: str, **kwargs): async with aiohttp.ClientSession() as session: headers = { **kwargs.pop('headers', {}), - 'User-Agent': f'dupdup/{__version__}', + 'User-Agent': f'dipdup/{__version__}', } async with getattr(session, method)( skip_auto_headers={'User-Agent'}, diff --git a/tests/integration_tests/hic_et_nunc.yml b/tests/integration_tests/hic_et_nunc.yml index 4f239786d..42a13e245 100644 --- a/tests/integration_tests/hic_et_nunc.yml +++ b/tests/integration_tests/hic_et_nunc.yml @@ -22,7 +22,8 @@ indexes: hen_mainnet: kind: operation datasource: tzkt_mainnet - contract: HEN_minter + contracts: + - HEN_minter handlers: - callback: on_mint pattern: diff --git a/tests/integration_tests/quipuswap.yml b/tests/integration_tests/quipuswap.yml index df6e55b5d..f01637619 100644 --- a/tests/integration_tests/quipuswap.yml +++ b/tests/integration_tests/quipuswap.yml @@ -34,7 +34,8 @@ templates: quipuswap_fa12: kind: operation datasource: tzkt_staging_mainnet - contract: + contracts: + - handlers: - callback: on_fa12_token_to_tez pattern: @@ -66,7 +67,8 @@ templates: quipuswap_fa2: kind: operation datasource: tzkt_staging_mainnet - contract: + contracts: + - handlers: - callback: on_fa2_token_to_tez pattern: diff --git a/tests/integration_tests/test_codegen.py b/tests/integration_tests/test_codegen.py index f43a0c41b..dce540d3e 100644 --- a/tests/integration_tests/test_codegen.py +++ b/tests/integration_tests/test_codegen.py @@ -44,7 +44,6 @@ async def test_codegen(self): await dipdup.init() import_submodules(config.package) - except Exception as exc: rmtree('tmp_test_dipdup') raise exc diff --git a/tests/integration_tests/test_demos.py b/tests/integration_tests/test_demos.py index c3e33cabe..c93e746d9 100644 --- a/tests/integration_tests/test_demos.py +++ b/tests/integration_tests/test_demos.py @@ -6,6 +6,8 @@ import demo_hic_et_nunc.models import demo_quipuswap.models +import demo_tezos_domains.models +import demo_tezos_domains_big_map.models import demo_tzcolors.models from dipdup.utils import tortoise_wrapper @@ -67,3 +69,23 @@ async def test_tzcolors(self): self.assertEqual(14, tokens) self.assertEqual(14, auctions) self.assertEqual(44, bids) + + async def test_tezos_domains(self): + self.run_dipdup('tezos_domains.yml') + + async with tortoise_wrapper('sqlite:///tmp/dipdup/db.sqlite3', 'demo_tezos_domains.models'): + tlds = await demo_tezos_domains.models.TLD.filter().count() + domains = await demo_tezos_domains.models.Domain.filter().count() + + self.assertEqual(5, tlds) + self.assertEqual(237, domains) + + async def test_tezos_domains_big_map(self): + self.run_dipdup('tezos_domains_big_map.yml') + + async with tortoise_wrapper('sqlite:///tmp/dipdup/db.sqlite3', 'demo_tezos_domains_big_map.models'): + tlds = await demo_tezos_domains_big_map.models.TLD.filter().count() + domains = await demo_tezos_domains_big_map.models.Domain.filter().count() + + self.assertEqual(5, tlds) + self.assertEqual(237, domains) diff --git a/tests/integration_tests/tezos_domains.yml b/tests/integration_tests/tezos_domains.yml new file mode 100644 index 000000000..a1da95d41 --- /dev/null +++ b/tests/integration_tests/tezos_domains.yml @@ -0,0 +1,40 @@ +spec_version: 0.1 +package: demo_tezos_domains + +database: + kind: sqlite + path: db.sqlite3 + +contracts: + edo_name_registry: + address: KT1JJbWfW8CHUY95hG9iq2CEMma1RiKhMHDR + typename: name_registry + +datasources: + tzkt_staging_edo: + kind: tzkt + url: ${TZKT_URL:-https://staging.api.edo2net.tzkt.io} + +templates: + tezos_domains: + kind: operation + datasource: + contracts: + - + handlers: + - callback: on_admin_update + pattern: + - destination: + entrypoint: admin_update + - callback: on_execute + pattern: + - destination: + entrypoint: execute + last_block: 55363 + +indexes: + tezos_domains_edo: + template: tezos_domains + values: + datasource: tzkt_staging_edo + name_registry: edo_name_registry diff --git a/tests/integration_tests/tezos_domains_big_map.yml b/tests/integration_tests/tezos_domains_big_map.yml new file mode 100644 index 000000000..5a5fe0657 --- /dev/null +++ b/tests/integration_tests/tezos_domains_big_map.yml @@ -0,0 +1,36 @@ +spec_version: 0.1 +package: demo_tezos_domains_big_map + +database: + kind: sqlite + path: db.sqlite3 + +contracts: + edo_name_registry: + address: KT1JJbWfW8CHUY95hG9iq2CEMma1RiKhMHDR + typename: name_registry + +datasources: + tzkt_staging_edo: + kind: tzkt + url: ${TZKT_URL:-https://staging.api.edo2net.tzkt.io} + +templates: + tezos_domains_big_map: + kind: big_map + datasource: + handlers: + - callback: on_update + pattern: + - contract: + path: store.records + - contract: + path: store.expiry_map + last_block: 55363 + +indexes: + tezos_domains_big_map_edo: + template: tezos_domains_big_map + values: + datasource: tzkt_staging_edo + name_registry: edo_name_registry diff --git a/tests/integration_tests/tzcolors.yml b/tests/integration_tests/tzcolors.yml index 11de1c139..20ae4e48a 100644 --- a/tests/integration_tests/tzcolors.yml +++ b/tests/integration_tests/tzcolors.yml @@ -23,7 +23,8 @@ templates: tzcolors_auction: kind: operation datasource: - contract: + contracts: + - handlers: - callback: on_create_auction pattern: diff --git a/tests/test_dipdup/dipdup.yml b/tests/test_dipdup/dipdup.yml index 9cfc83978..a898b4d58 100644 --- a/tests/test_dipdup/dipdup.yml +++ b/tests/test_dipdup/dipdup.yml @@ -22,7 +22,8 @@ indexes: hen_mainnet: kind: operation datasource: tzkt_mainnet - contract: HEN_minter + contracts: + - HEN_minter handlers: - callback: on_mint pattern: diff --git a/tests/test_dipdup/test_datasources/test_tzkt/test_cache.py b/tests/test_dipdup/test_datasources/test_tzkt/test_cache.py index beb551d93..668f39acd 100644 --- a/tests/test_dipdup/test_datasources/test_tzkt/test_cache.py +++ b/tests/test_dipdup/test_datasources/test_tzkt/test_cache.py @@ -1,20 +1,20 @@ import json from os.path import dirname, join from unittest.async_case import IsolatedAsyncioTestCase # type: ignore -from unittest.mock import ANY, AsyncMock # type: ignore +from unittest.mock import ANY, AsyncMock, MagicMock # type: ignore from dipdup.config import ContractConfig, OperationHandlerConfig, OperationHandlerPatternConfig, OperationIndexConfig from dipdup.datasources.tzkt.cache import OperationCache, OperationGroup from dipdup.datasources.tzkt.datasource import TzktDatasource -from dipdup.models import OperationData +from dipdup.models import OperationData, State -class TzktDatasourceTest(IsolatedAsyncioTestCase): +class TzktOperationCacheTest(IsolatedAsyncioTestCase): async def asyncSetUp(self): - self.config = OperationIndexConfig( + self.index_config = OperationIndexConfig( kind='operation', datasource='', - contract='', + contracts=[ContractConfig(address='KT1AFA2mwNUMNd4SsujE1YYp29vd8BZejyKW')], handlers=[ OperationHandlerConfig( callback='', @@ -26,7 +26,10 @@ async def asyncSetUp(self): ) ], ) - self.cache = OperationCache(self.config, 0) + self.index_config.state = MagicMock() + self.index_config.state.save = AsyncMock() + self.cache = OperationCache() + await self.cache.add_index(self.index_config) async def test_add(self): with open(join(dirname(__file__), 'operations.json')) as file: diff --git a/tests/test_dipdup/test_datasources/test_tzkt/test_datasource.py b/tests/test_dipdup/test_datasources/test_tzkt/test_datasource.py index 56b5f46dd..b55222e7a 100644 --- a/tests/test_dipdup/test_datasources/test_tzkt/test_datasource.py +++ b/tests/test_dipdup/test_datasources/test_tzkt/test_datasource.py @@ -1,189 +1,27 @@ import json from os.path import dirname, join -from typing import Any, Dict, List, Optional, Union from unittest import IsolatedAsyncioTestCase from unittest.mock import ANY, AsyncMock, MagicMock, call, patch from aiosignalrcore.hub.base_hub_connection import BaseHubConnection # type: ignore from aiosignalrcore.transport.websockets.connection import ConnectionState # type: ignore -from pydantic import BaseModel, Extra from tortoise import Tortoise +from demo_hic_et_nunc.types.hen_minter.parameter.collect import CollectParameter +from demo_registrydao.types.registry.parameter.propose import ProposeParameter +from demo_registrydao.types.registry.storage import Proposals, RegistryStorage from dipdup.config import ContractConfig, OperationHandlerConfig, OperationHandlerPatternConfig, OperationIndexConfig from dipdup.datasources.tzkt.datasource import TzktDatasource -from dipdup.models import HandlerContext, IndexType, OperationContext, OperationData, State +from dipdup.models import IndexType, OperationContext, OperationData, OperationHandlerContext, State from dipdup.utils import tortoise_wrapper -class Key(BaseModel): - address: str - nat: str - - -class LedgerItem(BaseModel): - key: Key - value: str - - -class Key1(BaseModel): - owner: str - operator: str - - -class Operator(BaseModel): - key: Key1 - value: Dict[str, Any] - - -class MigrationStatu(BaseModel): - notInMigration: Dict[str, Any] - - -class MigrationStatu1(BaseModel): - migratingTo: str - - -class MigrationStatu2(BaseModel): - migratedTo: str - - -class RegistryItem(BaseModel): - pass - - class Config: - extra = Extra.allow - - -class ExtraModel(BaseModel): - registry: Union[int, RegistryItem] - proposal_receivers: List[str] - frozen_scale_value: str - frozen_extra_value: str - slash_scale_value: str - slash_division_value: str - max_proposal_size: str - - -class DiffItem(BaseModel): - key: str - new_value: Optional[str] - - -class ProposalType0(BaseModel): - agora_post_id: str - diff: List[DiffItem] - - -class Metadatum(BaseModel): - proposal_type_0: ProposalType0 - - -class ProposalType1(BaseModel): - frozen_scale_value: Optional[str] - frozen_extra_value: Optional[str] - slash_scale_value: Optional[str] - slash_division_value: Optional[str] - max_proposal_size: Optional[str] - - -class Metadatum1(BaseModel): - proposal_type_1: ProposalType1 - - -class Metadatum2(BaseModel): - receivers_0: List[str] - - -class Metadatum3(BaseModel): - receivers_1: List[str] - - -class Voter(BaseModel): - address: str - nat: str - - -class Proposals(BaseModel): - class Config: - extra = Extra.allow - - upvotes: str - downvotes: str - start_date: str - metadata: Union[Metadatum, Metadatum1, Metadatum2, Metadatum3] - proposer: str - proposer_frozen_token: str - voters: List[Voter] - - -class ProposalKeyListSortByDateItem(BaseModel): - timestamp: str - bytes: str - - -class Metadata(BaseModel): - class Config: - extra = Extra.allow - - __root__: str - - -class TotalSupply(BaseModel): - class Config: - extra = Extra.allow - - __root__: str - - -class Storage(BaseModel): - ledger: List[LedgerItem] - operators: List[Operator] - token_address: str - admin: str - pending_owner: str - migration_status: Union[MigrationStatu, MigrationStatu1, MigrationStatu2] - voting_period: str - quorum_threshold: str - extra: ExtraModel - proposals: Dict[str, Proposals] - proposal_key_list_sort_by_date: List[ProposalKeyListSortByDateItem] - permits_counter: str - metadata: Dict[str, Metadata] - total_supply: Dict[str, TotalSupply] - - -class ProposalMetadatum(BaseModel): - proposal_type_0: ProposalType0 - - -class ProposalMetadatum1(BaseModel): - proposal_type_1: ProposalType1 - - -class ProposalMetadatum2(BaseModel): - receivers_0: List[str] - - -class ProposalMetadatum3(BaseModel): - receivers_1: List[str] - - -class Propose(BaseModel): - frozen_token: str - proposal_metadata: Union[ProposalMetadatum, ProposalMetadatum1, ProposalMetadatum2, ProposalMetadatum3] - - -class Collect(BaseModel): - objkt_amount: str - swap_id: str - - class TzktDatasourceTest(IsolatedAsyncioTestCase): async def asyncSetUp(self): self.index_config = OperationIndexConfig( kind='operation', datasource='tzkt', - contract=ContractConfig(address='KT1Hkg5qeNhfwpKW4fXvq7HGZB9z2EnmCCA9'), + contracts=[ContractConfig(address='KT1Hkg5qeNhfwpKW4fXvq7HGZB9z2EnmCCA9')], handlers=[ OperationHandlerConfig( callback='', @@ -196,9 +34,9 @@ async def asyncSetUp(self): ], ) self.index_config.state = State(index_name='test', index_type=IndexType.operation, hash='') - self.index_config.handlers[0].pattern[0].parameter_type_cls = Collect + self.index_config.handlers[0].pattern[0].parameter_type_cls = CollectParameter self.datasource = TzktDatasource('tzkt.test') - self.datasource.add_index(self.index_config) + await self.datasource.add_index('test', self.index_config) async def test_convert_operation(self): with open(join(dirname(__file__), 'operations.json')) as file: @@ -221,13 +59,13 @@ async def test_start(self): self.datasource.fetch_operations = fetch_operations_mock get_mock = MagicMock() - get_mock.return_value.__aenter__.return_value.json.return_value = [{'level': 1337}] + get_mock.return_value.__aenter__.return_value.json.return_value = {'level': 1337} with patch('aiohttp.ClientSession.get', get_mock): await self.datasource.start() fetch_operations_mock.assert_awaited_with(1337, initial=True) - self.assertEqual({self.index_config.contract: ['transaction']}, self.datasource._subscriptions) + self.assertEqual({self.index_config.contracts[0].address: ['transaction']}, self.datasource._operation_subscriptions) client.start.assert_awaited() async def test_on_connect_subscribe_to_operations(self): @@ -235,21 +73,21 @@ async def test_on_connect_subscribe_to_operations(self): client = self.datasource._get_client() client.send = send_mock client.transport.state = ConnectionState.connected - self.datasource._subscriptions = { - self.index_config.contract: ['transaction'], + self.datasource._operation_subscriptions = { + self.index_config.contracts[0].address: ['transaction'], } await self.datasource.on_connect() send_mock.assert_has_awaits( [ - call('SubscribeToOperations', [{'address': self.index_config.contract.address, 'types': 'transaction'}]), + call('SubscribeToOperations', [{'address': self.index_config.contracts[0].address, 'types': 'transaction'}]), ] ) - self.assertEqual(1, len(client.handlers)) + self.assertEqual(2, len(client.handlers)) async def test_on_fetch_operations(self): - self.datasource._subscriptions = {self.index_config.contract.address: ['transaction']} + self.datasource._operation_subscriptions = {self.index_config.contracts[0].address: ['transaction']} with open(join(dirname(__file__), 'operations.json')) as file: operations_message = json.load(file) del operations_message['state'] @@ -265,7 +103,6 @@ async def test_on_fetch_operations(self): await self.datasource.fetch_operations(1337) on_operation_message_mock.assert_awaited_with( - address=self.index_config.contract.address, message=[operations_message], sync=True, ) @@ -274,7 +111,7 @@ async def test_on_operation_message_state(self): fetch_operations_mock = AsyncMock() self.datasource.fetch_operations = fetch_operations_mock - await self.datasource.on_operation_message([{'type': 0, 'state': 123}], self.index_config.contract.address) + await self.datasource.on_operation_message([{'type': 0, 'state': 123}], self.index_config) fetch_operations_mock.assert_awaited_with(123) async def test_on_operation_message_data(self): @@ -289,7 +126,7 @@ async def test_on_operation_message_data(self): async with tortoise_wrapper('sqlite://:memory:'): await Tortoise.generate_schemas() - await self.datasource.on_operation_message([operations_message], self.index_config.contract.address, sync=True) + await self.datasource.on_operation_message([operations_message], sync=True) on_operation_match_mock.assert_awaited_with( self.index_config, @@ -314,18 +151,17 @@ async def test_on_operation_match(self): self.index_config.handlers[0].callback_fn = callback_mock self.index_config.handlers[0].pattern[0].storage_type_cls = storage_type_mock - self.datasource._synchronized.set() await self.datasource.on_operation_match(self.index_config, self.index_config.handlers[0], [matched_operation], operations) - self.assertIsInstance(callback_mock.await_args[0][0], HandlerContext) + self.assertIsInstance(callback_mock.await_args[0][0], OperationHandlerContext) self.assertIsInstance(callback_mock.await_args[0][1], OperationContext) - self.assertIsInstance(callback_mock.await_args[0][1].parameter, Collect) + self.assertIsInstance(callback_mock.await_args[0][1].parameter, CollectParameter) self.assertIsInstance(callback_mock.await_args[0][1].data, OperationData) async def test_on_operation_match_with_storage(self): with open(join(dirname(__file__), 'operations-storage.json')) as file: operations_message = json.load(file) - self.index_config.handlers[0].pattern[0].parameter_type_cls = Propose + self.index_config.handlers[0].pattern[0].parameter_type_cls = ProposeParameter operations = [TzktDatasource.convert_operation(op) for op in operations_message['data']] matched_operation = operations[0] @@ -336,12 +172,11 @@ async def test_on_operation_match_with_storage(self): callback_mock = AsyncMock() self.index_config.handlers[0].callback_fn = callback_mock - self.index_config.handlers[0].pattern[0].storage_type_cls = Storage + self.index_config.handlers[0].pattern[0].storage_type_cls = RegistryStorage - self.datasource._synchronized.set() await self.datasource.on_operation_match(self.index_config, self.index_config.handlers[0], [matched_operation], operations) - self.assertIsInstance(callback_mock.await_args[0][1].storage, Storage) + self.assertIsInstance(callback_mock.await_args[0][1].storage, RegistryStorage) self.assertIsInstance(callback_mock.await_args[0][1].storage.ledger, list) self.assertIsInstance( callback_mock.await_args[0][1].storage.proposals['e710c1a066bbbf73692168e783607996785260cec4d60930579827298493b8b9'], diff --git a/tests/test_dipdup/test_models.py b/tests/test_dipdup/test_models.py new file mode 100644 index 000000000..41526feed --- /dev/null +++ b/tests/test_dipdup/test_models.py @@ -0,0 +1,90 @@ +from unittest import TestCase +from unittest.mock import MagicMock + +from demo_tezos_domains.types.name_registry.storage import NameRegistryStorage +from dipdup.models import OperationData + + +class ModelsTest(TestCase): + def test_merged_storage(self): + storage = { + 'store': { + 'data': 15023, + 'owner': 'tz1VBLpuDKMoJuHRLZ4HrCgRuiLpEr7zZx2E', + 'records': 15026, + 'metadata': 15025, + 'expiry_map': 15024, + 'tzip12_tokens': 15028, + 'reverse_records': 15027, + 'next_tzip12_token_id': '18', + }, + 'actions': 15022, + 'trusted_senders': [ + 'KT19fHFeGecCBRePPMoRjMthJ9YZCJkB5MsN', + 'KT1A84aNsVCG7EsZyKHSyqZacVVSN1zcQzS7', + 'KT1AQmVzLnNWtCmksbCGg7np9dmAU5CKYH72', + 'KT1EeRLdEPJPFx96tDM1VgRka2V6ZyKV4vRg', + 'KT1FpHyP8vUd7p2aq7DLRccUVPixoGVB4fJE', + 'KT1HKtJxcr8dMTJMUiiFhttA6rk4v6xqTkmH', + 'KT1KP2Yy6MNkYKkHqroGBZ7KFN5NdNfnUHHv', + 'KT1LE3iTYfJNWkmPoa3KzN45y1QFKF6GA42Q', + 'KT1Mq1zd986PxK4C2y9S7UaJkhTBbY15AU32', + ], + } + diffs = [ + { + 'bigmap': 15028, + 'path': 'store.tzip12_tokens', + 'action': 'add_key', + 'content': { + 'hash': 'expruh5diuJb6Vu4B127cxWhiJ3927mvmG9oZ1pYKSNERPpefM4KBg', + 'key': '17', + 'value': '6672657175656e742d616e616c7973742e65646f', + }, + }, + { + 'bigmap': 15026, + 'path': 'store.records', + 'action': 'add_key', + 'content': { + 'hash': 'expruDKynBfQW5KFzPfKyRxNTfFzTJGrHUU4FpzBZcoRYXjyhdPPrM', + 'key': '6672657175656e742d616e616c7973742e65646f', + 'value': { + 'data': {}, + 'level': '2', + 'owner': 'tz1SUrXU6cxioeyURSxTgaxmpSWgQq4PMSov', + 'address': 'tz1SUrXU6cxioeyURSxTgaxmpSWgQq4PMSov', + 'expiry_key': '6672657175656e742d616e616c7973742e65646f', + 'internal_data': {}, + 'tzip12_token_id': '17', + }, + }, + }, + { + 'bigmap': 15024, + 'path': 'store.expiry_map', + 'action': 'add_key', + 'content': { + 'hash': 'expruDKynBfQW5KFzPfKyRxNTfFzTJGrHUU4FpzBZcoRYXjyhdPPrM', + 'key': '6672657175656e742d616e616c7973742e65646f', + 'value': '2024-02-29T15:45:49Z', + }, + }, + ] + operation_data = OperationData( + storage=storage, + diffs=diffs, + type=None, + id=0, + level=0, + timestamp=0, + hash='', + counter=0, + sender_address='', + target_address='', + amount=0, + status='', + has_internals=False, + ) + merged_storage = OperationData.get_merged_storage(operation_data, NameRegistryStorage) + self.assertTrue('6672657175656e742d616e616c7973742e65646f' in merged_storage.store.records)