From 6a7aa5824d34117a4241493ee6550f38f40bb9bd Mon Sep 17 00:00:00 2001 From: Lev Gorodetskiy Date: Fri, 9 Jul 2021 16:17:15 +0300 Subject: [PATCH] Hasura v2: user limits and camelcase (#80) --- docker-compose.yml | 2 +- poetry.lock | 94 +-- pyproject.toml | 1 + scripts/init_demos.sh | 5 + scripts/migrate_demos.sh | 5 + src/demo_hic_et_nunc/dipdup-local.yml | 13 + src/demo_hic_et_nunc/graphql/.keep | 0 src/demo_hic_et_nunc/graphql/token.graphql | 9 + src/demo_hic_et_nunc/sql/on_reindex/.keep | 0 src/demo_hic_et_nunc/sql/on_restart/.keep | 0 src/demo_quipuswap/graphql/.keep | 0 src/demo_quipuswap/sql/on_reindex/.keep | 0 src/demo_quipuswap/sql/on_restart/.keep | 0 src/demo_registrydao/graphql/.keep | 0 src/demo_registrydao/sql/on_reindex/.keep | 0 src/demo_registrydao/sql/on_restart/.keep | 0 src/demo_tezos_domains/graphql/.keep | 0 src/demo_tezos_domains/sql/on_reindex/.keep | 0 src/demo_tezos_domains/sql/on_restart/.keep | 0 src/demo_tezos_domains_big_map/graphql/.keep | 0 .../sql/on_reindex/.keep | 0 .../sql/on_restart/.keep | 0 src/demo_tzbtc/graphql/.keep | 0 src/demo_tzbtc/sql/on_reindex/.keep | 0 src/demo_tzbtc/sql/on_restart/.keep | 0 src/demo_tzcolors/graphql/.keep | 0 src/demo_tzcolors/sql/on_reindex/.keep | 0 src/demo_tzcolors/sql/on_restart/.keep | 0 src/dipdup/cli.py | 46 +- src/dipdup/codegen.py | 11 + src/dipdup/config.py | 44 +- src/dipdup/datasources/bcd/datasource.py | 17 +- src/dipdup/datasources/coinbase/datasource.py | 17 +- src/dipdup/datasources/proxy.py | 41 +- src/dipdup/datasources/tzkt/datasource.py | 32 +- src/dipdup/dipdup.py | 39 +- src/dipdup/hasura.py | 606 +++++++++++++----- src/dipdup/index.py | 10 +- src/dipdup/utils.py | 6 +- tests/integration_tests/hasura/empty.json | 25 + .../hasura/query_dipdup_state.json | 70 ++ .../hasura/query_holder.json | 118 ++++ .../integration_tests/hasura/query_root.json | 175 +++++ .../integration_tests/hasura/query_swap.json | 142 ++++ .../integration_tests/hasura/query_token.json | 82 +++ .../integration_tests/hasura/query_trade.json | 130 ++++ .../hasura/replace_metadata_request.json | 344 ++++++++++ tests/integration_tests/test_hasura.py | 57 ++ tests/test_dipdup/hasura-metadata.json | 192 ------ tests/test_dipdup/test_hasura.py | 21 - 50 files changed, 1840 insertions(+), 514 deletions(-) create mode 100755 scripts/init_demos.sh create mode 100755 scripts/migrate_demos.sh create mode 100644 src/demo_hic_et_nunc/dipdup-local.yml create mode 100644 src/demo_hic_et_nunc/graphql/.keep create mode 100644 src/demo_hic_et_nunc/graphql/token.graphql create mode 100644 src/demo_hic_et_nunc/sql/on_reindex/.keep create mode 100644 src/demo_hic_et_nunc/sql/on_restart/.keep create mode 100644 src/demo_quipuswap/graphql/.keep create mode 100644 src/demo_quipuswap/sql/on_reindex/.keep create mode 100644 src/demo_quipuswap/sql/on_restart/.keep create mode 100644 src/demo_registrydao/graphql/.keep create mode 100644 src/demo_registrydao/sql/on_reindex/.keep create mode 100644 src/demo_registrydao/sql/on_restart/.keep create mode 100644 src/demo_tezos_domains/graphql/.keep create mode 100644 src/demo_tezos_domains/sql/on_reindex/.keep create mode 100644 src/demo_tezos_domains/sql/on_restart/.keep create mode 100644 src/demo_tezos_domains_big_map/graphql/.keep create mode 100644 src/demo_tezos_domains_big_map/sql/on_reindex/.keep create mode 100644 src/demo_tezos_domains_big_map/sql/on_restart/.keep create mode 100644 src/demo_tzbtc/graphql/.keep create mode 100644 src/demo_tzbtc/sql/on_reindex/.keep create mode 100644 src/demo_tzbtc/sql/on_restart/.keep create mode 100644 src/demo_tzcolors/graphql/.keep create mode 100644 src/demo_tzcolors/sql/on_reindex/.keep create mode 100644 src/demo_tzcolors/sql/on_restart/.keep create mode 100644 tests/integration_tests/hasura/empty.json create mode 100644 tests/integration_tests/hasura/query_dipdup_state.json create mode 100644 tests/integration_tests/hasura/query_holder.json create mode 100644 tests/integration_tests/hasura/query_root.json create mode 100644 tests/integration_tests/hasura/query_swap.json create mode 100644 tests/integration_tests/hasura/query_token.json create mode 100644 tests/integration_tests/hasura/query_trade.json create mode 100644 tests/integration_tests/hasura/replace_metadata_request.json create mode 100644 tests/integration_tests/test_hasura.py delete mode 100644 tests/test_dipdup/hasura-metadata.json delete mode 100644 tests/test_dipdup/test_hasura.py diff --git a/docker-compose.yml b/docker-compose.yml index 5864b6e78..5bce721c7 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -72,7 +72,7 @@ services: retries: 5 hasura: - image: hasura/graphql-engine:v1.3.3 + image: hasura/graphql-engine:v2.0.1 ports: - 127.0.0.1:8080:8080 depends_on: diff --git a/poetry.lock b/poetry.lock index 478340bdb..edae06433 100644 --- a/poetry.lock +++ b/poetry.lock @@ -645,6 +645,14 @@ category = "dev" optional = false python-versions = ">=3.5" +[[package]] +name = "pyhumps" +version = "3.0.2" +description = "🐫 Convert strings (and dictionary keys) between snake case, camel case and pascal case in Python. Inspired by Humps for Node" +category = "main" +optional = false +python-versions = "*" + [[package]] name = "pypika-tortoise" version = "0.1.1" @@ -722,7 +730,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" [[package]] name = "regex" -version = "2021.4.4" +version = "2021.7.1" description = "Alternative regular expression module, to replace re." category = "main" optional = false @@ -905,7 +913,7 @@ multidict = ">=4.0" [metadata] lock-version = "1.1" python-versions = "^3.8" -content-hash = "07fa7c998b750bb6d719190d0c24558aab27b8173f0d0c1fa46886d1cc61a164" +content-hash = "8a278d87e1b138551bcbdf69833b6632a381cc08398dc130d1c7ab346b6e4333" [metadata.files] aiohttp = [ @@ -1351,6 +1359,10 @@ pygments = [ {file = "Pygments-2.9.0-py3-none-any.whl", hash = "sha256:d66e804411278594d764fc69ec36ec13d9ae9147193a1740cd34d272ca383b8e"}, {file = "Pygments-2.9.0.tar.gz", hash = "sha256:a18f47b506a429f6f4b9df81bb02beab9ca21d0a5fee38ed15aef65f0545519f"}, ] +pyhumps = [ + {file = "pyhumps-3.0.2-py3-none-any.whl", hash = "sha256:367b1aadcaa64f8196a3cd14f56559a5602950aeb8486f49318e7394f5e18052"}, + {file = "pyhumps-3.0.2.tar.gz", hash = "sha256:042b4b6eec6c1f862f8310c0eebbae19293e9edab8cafb030ff78c890ef1aa34"}, +] pypika-tortoise = [ {file = "pypika-tortoise-0.1.1.tar.gz", hash = "sha256:6831d0a56e5e0ecefac3307dd9bdb3e5073fdac5d617401601d3a6713e059f3c"}, {file = "pypika_tortoise-0.1.1-py3-none-any.whl", hash = "sha256:860020094e01058ea80602c90d4a843d0a42cffefcf4f3cb1a7f2c18b880c638"}, @@ -1426,47 +1438,43 @@ pyyaml = [ {file = "PyYAML-5.4.1.tar.gz", hash = "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e"}, ] regex = [ - {file = "regex-2021.4.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:619d71c59a78b84d7f18891fe914446d07edd48dc8328c8e149cbe0929b4e000"}, - {file = "regex-2021.4.4-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:47bf5bf60cf04d72bf6055ae5927a0bd9016096bf3d742fa50d9bf9f45aa0711"}, - {file = "regex-2021.4.4-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:281d2fd05555079448537fe108d79eb031b403dac622621c78944c235f3fcf11"}, - {file = "regex-2021.4.4-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:bd28bc2e3a772acbb07787c6308e00d9626ff89e3bfcdebe87fa5afbfdedf968"}, - {file = "regex-2021.4.4-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:7c2a1af393fcc09e898beba5dd59196edaa3116191cc7257f9224beaed3e1aa0"}, - {file = "regex-2021.4.4-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c38c71df845e2aabb7fb0b920d11a1b5ac8526005e533a8920aea97efb8ec6a4"}, - {file = "regex-2021.4.4-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:96fcd1888ab4d03adfc9303a7b3c0bd78c5412b2bfbe76db5b56d9eae004907a"}, - {file = "regex-2021.4.4-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:ade17eb5d643b7fead300a1641e9f45401c98eee23763e9ed66a43f92f20b4a7"}, - {file = "regex-2021.4.4-cp36-cp36m-win32.whl", hash = "sha256:e8e5b509d5c2ff12f8418006d5a90e9436766133b564db0abaec92fd27fcee29"}, - {file = "regex-2021.4.4-cp36-cp36m-win_amd64.whl", hash = "sha256:11d773d75fa650cd36f68d7ca936e3c7afaae41b863b8c387a22aaa78d3c5c79"}, - {file = "regex-2021.4.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d3029c340cfbb3ac0a71798100ccc13b97dddf373a4ae56b6a72cf70dfd53bc8"}, - {file = "regex-2021.4.4-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:18c071c3eb09c30a264879f0d310d37fe5d3a3111662438889ae2eb6fc570c31"}, - {file = "regex-2021.4.4-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:4c557a7b470908b1712fe27fb1ef20772b78079808c87d20a90d051660b1d69a"}, - {file = "regex-2021.4.4-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:01afaf2ec48e196ba91b37451aa353cb7eda77efe518e481707e0515025f0cd5"}, - {file = "regex-2021.4.4-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:3a9cd17e6e5c7eb328517969e0cb0c3d31fd329298dd0c04af99ebf42e904f82"}, - {file = "regex-2021.4.4-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:90f11ff637fe8798933fb29f5ae1148c978cccb0452005bf4c69e13db951e765"}, - {file = "regex-2021.4.4-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:919859aa909429fb5aa9cf8807f6045592c85ef56fdd30a9a3747e513db2536e"}, - {file = "regex-2021.4.4-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:339456e7d8c06dd36a22e451d58ef72cef293112b559010db3d054d5560ef439"}, - {file = "regex-2021.4.4-cp37-cp37m-win32.whl", hash = "sha256:67bdb9702427ceddc6ef3dc382455e90f785af4c13d495f9626861763ee13f9d"}, - {file = "regex-2021.4.4-cp37-cp37m-win_amd64.whl", hash = "sha256:32e65442138b7b76dd8173ffa2cf67356b7bc1768851dded39a7a13bf9223da3"}, - {file = "regex-2021.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1e1c20e29358165242928c2de1482fb2cf4ea54a6a6dea2bd7a0e0d8ee321500"}, - {file = "regex-2021.4.4-cp38-cp38-manylinux1_i686.whl", hash = "sha256:314d66636c494ed9c148a42731b3834496cc9a2c4251b1661e40936814542b14"}, - {file = "regex-2021.4.4-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:6d1b01031dedf2503631d0903cb563743f397ccaf6607a5e3b19a3d76fc10480"}, - {file = "regex-2021.4.4-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:741a9647fcf2e45f3a1cf0e24f5e17febf3efe8d4ba1281dcc3aa0459ef424dc"}, - {file = "regex-2021.4.4-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:4c46e22a0933dd783467cf32b3516299fb98cfebd895817d685130cc50cd1093"}, - {file = "regex-2021.4.4-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:e512d8ef5ad7b898cdb2d8ee1cb09a8339e4f8be706d27eaa180c2f177248a10"}, - {file = "regex-2021.4.4-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:980d7be47c84979d9136328d882f67ec5e50008681d94ecc8afa8a65ed1f4a6f"}, - {file = "regex-2021.4.4-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:ce15b6d103daff8e9fee13cf7f0add05245a05d866e73926c358e871221eae87"}, - {file = "regex-2021.4.4-cp38-cp38-win32.whl", hash = "sha256:a91aa8619b23b79bcbeb37abe286f2f408d2f2d6f29a17237afda55bb54e7aac"}, - {file = "regex-2021.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:c0502c0fadef0d23b128605d69b58edb2c681c25d44574fc673b0e52dce71ee2"}, - {file = "regex-2021.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:598585c9f0af8374c28edd609eb291b5726d7cbce16be6a8b95aa074d252ee17"}, - {file = "regex-2021.4.4-cp39-cp39-manylinux1_i686.whl", hash = "sha256:ee54ff27bf0afaf4c3b3a62bcd016c12c3fdb4ec4f413391a90bd38bc3624605"}, - {file = "regex-2021.4.4-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7d9884d86dd4dd489e981d94a65cd30d6f07203d90e98f6f657f05170f6324c9"}, - {file = "regex-2021.4.4-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:bf5824bfac591ddb2c1f0a5f4ab72da28994548c708d2191e3b87dd207eb3ad7"}, - {file = "regex-2021.4.4-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:563085e55b0d4fb8f746f6a335893bda5c2cef43b2f0258fe1020ab1dd874df8"}, - {file = "regex-2021.4.4-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b9c3db21af35e3b3c05764461b262d6f05bbca08a71a7849fd79d47ba7bc33ed"}, - {file = "regex-2021.4.4-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:3916d08be28a1149fb97f7728fca1f7c15d309a9f9682d89d79db75d5e52091c"}, - {file = "regex-2021.4.4-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:fd45ff9293d9274c5008a2054ecef86a9bfe819a67c7be1afb65e69b405b3042"}, - {file = "regex-2021.4.4-cp39-cp39-win32.whl", hash = "sha256:fa4537fb4a98fe8fde99626e4681cc644bdcf2a795038533f9f711513a862ae6"}, - {file = "regex-2021.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:97f29f57d5b84e73fbaf99ab3e26134e6687348e95ef6b48cfd2c06807005a07"}, - {file = "regex-2021.4.4.tar.gz", hash = "sha256:52ba3d3f9b942c49d7e4bc105bb28551c44065f139a65062ab7912bef10c9afb"}, + {file = "regex-2021.7.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:494d0172774dc0beeea984b94c95389143db029575f7ca908edd74469321ea99"}, + {file = "regex-2021.7.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:8cf6728f89b071bd3ab37cb8a0e306f4de897553a0ed07442015ee65fbf53d62"}, + {file = "regex-2021.7.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:1806370b2bef4d4193eebe8ee59a9fd7547836a34917b7badbe6561a8594d9cb"}, + {file = "regex-2021.7.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:d0cf2651a8804f6325747c7e55e3be0f90ee2848e25d6b817aa2728d263f9abb"}, + {file = "regex-2021.7.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:268fe9dd1deb4a30c8593cabd63f7a241dfdc5bd9dd0233906c718db22cdd49a"}, + {file = "regex-2021.7.1-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:7743798dfb573d006f1143d745bf17efad39775a5190b347da5d83079646be56"}, + {file = "regex-2021.7.1-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:0e46c1191b2eb293a6912269ed08b4512e7e241bbf591f97e527492e04c77e93"}, + {file = "regex-2021.7.1-cp36-cp36m-win32.whl", hash = "sha256:b1dbeef938281f240347d50f28ae53c4b046a23389cd1fc4acec5ea0eae646a1"}, + {file = "regex-2021.7.1-cp36-cp36m-win_amd64.whl", hash = "sha256:6c72ebb72e64e9bd195cb35a9b9bbfb955fd953b295255b8ae3e4ad4a146b615"}, + {file = "regex-2021.7.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:bf819c5b77ff44accc9a24e31f1f7ceaaf6c960816913ed3ef8443b9d20d81b6"}, + {file = "regex-2021.7.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:e80d2851109e56420b71f9702ad1646e2f0364528adbf6af85527bc61e49f394"}, + {file = "regex-2021.7.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:a1b6a3f600d6aff97e3f28c34192c9ed93fee293bd96ef327b64adb51a74b2f6"}, + {file = "regex-2021.7.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:ed77b97896312bc2deafe137ca2626e8b63808f5bedb944f73665c68093688a7"}, + {file = "regex-2021.7.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:a548bb51c4476332ce4139df8e637386730f79a92652a907d12c696b6252b64d"}, + {file = "regex-2021.7.1-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:210c359e6ee5b83f7d8c529ba3c75ba405481d50f35a420609b0db827e2e3bb5"}, + {file = "regex-2021.7.1-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:1d386402ae7f3c9b107ae5863f7ecccb0167762c82a687ae6526b040feaa5ac6"}, + {file = "regex-2021.7.1-cp37-cp37m-win32.whl", hash = "sha256:5049d00dbb78f9d166d1c704e93934d42cce0570842bb1a61695123d6b01de09"}, + {file = "regex-2021.7.1-cp37-cp37m-win_amd64.whl", hash = "sha256:361be4d311ac995a8c7ad577025a3ae3a538531b1f2cf32efd8b7e5d33a13e5a"}, + {file = "regex-2021.7.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f32f47fb22c988c0b35756024b61d156e5c4011cb8004aa53d93b03323c45657"}, + {file = "regex-2021.7.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:b024ee43ee6b310fad5acaee23e6485b21468718cb792a9d1693eecacc3f0b7e"}, + {file = "regex-2021.7.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:b092754c06852e8a8b022004aff56c24b06310189186805800d09313c37ce1f8"}, + {file = "regex-2021.7.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:a8a5826d8a1b64e2ff9af488cc179e1a4d0f144d11ce486a9f34ea38ccedf4ef"}, + {file = "regex-2021.7.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:444723ebaeb7fa8125f29c01a31101a3854ac3de293e317944022ae5effa53a4"}, + {file = "regex-2021.7.1-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:fdad3122b69cdabdb3da4c2a4107875913ac78dab0117fc73f988ad589c66b66"}, + {file = "regex-2021.7.1-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:4b1999ef60c45357598935c12508abf56edbbb9c380df6f336de38a6c3a294ae"}, + {file = "regex-2021.7.1-cp38-cp38-win32.whl", hash = "sha256:e07e92935040c67f49571779d115ecb3e727016d42fb36ee0d8757db4ca12ee0"}, + {file = "regex-2021.7.1-cp38-cp38-win_amd64.whl", hash = "sha256:6b8b629f93246e507287ee07e26744beaffb4c56ed520576deac8b615bd76012"}, + {file = "regex-2021.7.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:56bef6b414949e2c9acf96cb5d78de8b529c7b99752619494e78dc76f99fd005"}, + {file = "regex-2021.7.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:78a2a885345a2d60b5e68099e877757d5ed12e46ba1e87507175f14f80892af3"}, + {file = "regex-2021.7.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:3f7a92e60930f8fca2623d9e326c173b7cf2c8b7e4fdcf984b75a1d2fb08114d"}, + {file = "regex-2021.7.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4fc86b729ab88fe8ac3ec92287df253c64aa71560d76da5acd8a2e245839c629"}, + {file = "regex-2021.7.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:59845101de68fd5d3a1145df9ea022e85ecd1b49300ea68307ad4302320f6f61"}, + {file = "regex-2021.7.1-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:ce269e903b00d1ab4746793e9c50a57eec5d5388681abef074d7b9a65748fca5"}, + {file = "regex-2021.7.1-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:c11f2fca544b5e30a0e813023196a63b1cb9869106ef9a26e9dae28bce3e4e26"}, + {file = "regex-2021.7.1-cp39-cp39-win32.whl", hash = "sha256:1ccbd41dbee3a31e18938096510b7d4ee53aa9fce2ee3dcc8ec82ae264f6acfd"}, + {file = "regex-2021.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:18040755606b0c21281493ec309214bd61e41a170509e5014f41d6a5a586e161"}, + {file = "regex-2021.7.1.tar.gz", hash = "sha256:849802379a660206277675aa5a5c327f5c910c690649535863ddf329b0ba8c87"}, ] requests = [ {file = "requests-2.25.1-py2.py3-none-any.whl", hash = "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e"}, diff --git a/pyproject.toml b/pyproject.toml index 625cb1688..6aa6de0de 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -34,6 +34,7 @@ click = "^8.0.1" pyee = "^8.1.0" APScheduler = "^3.7.0" sentry-sdk = "^1.1.0" +pyhumps = "^3.0.2" aiolimiter = "^1.0.0-beta.1" [tool.poetry.dev-dependencies] diff --git a/scripts/init_demos.sh b/scripts/init_demos.sh new file mode 100755 index 000000000..35f183e99 --- /dev/null +++ b/scripts/init_demos.sh @@ -0,0 +1,5 @@ +#$/bin/bash +for name in `ls src | grep demo` +do + dipdup -c src/$name/dipdup.yml init +done \ No newline at end of file diff --git a/scripts/migrate_demos.sh b/scripts/migrate_demos.sh new file mode 100755 index 000000000..df4068b96 --- /dev/null +++ b/scripts/migrate_demos.sh @@ -0,0 +1,5 @@ +#$/bin/bash +for name in `ls src | grep demo` +do + dipdup -c src/$name/dipdup.yml migrate +done \ No newline at end of file diff --git a/src/demo_hic_et_nunc/dipdup-local.yml b/src/demo_hic_et_nunc/dipdup-local.yml new file mode 100644 index 000000000..ba3a897b2 --- /dev/null +++ b/src/demo_hic_et_nunc/dipdup-local.yml @@ -0,0 +1,13 @@ +database: + kind: postgres + host: 127.0.0.1 + port: 6423 + user: ${POSTGRES_USER:-dipdup} + password: ${POSTGRES_PASSWORD:-changeme} + database: ${POSTGRES_DB:-dipdup} + schema_name: hic_et_nunc + +hasura: + url: http://127.0.0.1:8080 + admin_secret: ${ADMIN_SECRET:-changeme} + camel_case: True \ No newline at end of file diff --git a/src/demo_hic_et_nunc/graphql/.keep b/src/demo_hic_et_nunc/graphql/.keep new file mode 100644 index 000000000..e69de29bb diff --git a/src/demo_hic_et_nunc/graphql/token.graphql b/src/demo_hic_et_nunc/graphql/token.graphql new file mode 100644 index 000000000..f2d236742 --- /dev/null +++ b/src/demo_hic_et_nunc/graphql/token.graphql @@ -0,0 +1,9 @@ +query tokens($id: bigint) { + hicEtNuncToken(where: {id: {_eq: $id}}) { + creatorId + id + level + supply + timestamp + } +} diff --git a/src/demo_hic_et_nunc/sql/on_reindex/.keep b/src/demo_hic_et_nunc/sql/on_reindex/.keep new file mode 100644 index 000000000..e69de29bb diff --git a/src/demo_hic_et_nunc/sql/on_restart/.keep b/src/demo_hic_et_nunc/sql/on_restart/.keep new file mode 100644 index 000000000..e69de29bb diff --git a/src/demo_quipuswap/graphql/.keep b/src/demo_quipuswap/graphql/.keep new file mode 100644 index 000000000..e69de29bb diff --git a/src/demo_quipuswap/sql/on_reindex/.keep b/src/demo_quipuswap/sql/on_reindex/.keep new file mode 100644 index 000000000..e69de29bb diff --git a/src/demo_quipuswap/sql/on_restart/.keep b/src/demo_quipuswap/sql/on_restart/.keep new file mode 100644 index 000000000..e69de29bb diff --git a/src/demo_registrydao/graphql/.keep b/src/demo_registrydao/graphql/.keep new file mode 100644 index 000000000..e69de29bb diff --git a/src/demo_registrydao/sql/on_reindex/.keep b/src/demo_registrydao/sql/on_reindex/.keep new file mode 100644 index 000000000..e69de29bb diff --git a/src/demo_registrydao/sql/on_restart/.keep b/src/demo_registrydao/sql/on_restart/.keep new file mode 100644 index 000000000..e69de29bb diff --git a/src/demo_tezos_domains/graphql/.keep b/src/demo_tezos_domains/graphql/.keep new file mode 100644 index 000000000..e69de29bb diff --git a/src/demo_tezos_domains/sql/on_reindex/.keep b/src/demo_tezos_domains/sql/on_reindex/.keep new file mode 100644 index 000000000..e69de29bb diff --git a/src/demo_tezos_domains/sql/on_restart/.keep b/src/demo_tezos_domains/sql/on_restart/.keep new file mode 100644 index 000000000..e69de29bb diff --git a/src/demo_tezos_domains_big_map/graphql/.keep b/src/demo_tezos_domains_big_map/graphql/.keep new file mode 100644 index 000000000..e69de29bb diff --git a/src/demo_tezos_domains_big_map/sql/on_reindex/.keep b/src/demo_tezos_domains_big_map/sql/on_reindex/.keep new file mode 100644 index 000000000..e69de29bb diff --git a/src/demo_tezos_domains_big_map/sql/on_restart/.keep b/src/demo_tezos_domains_big_map/sql/on_restart/.keep new file mode 100644 index 000000000..e69de29bb diff --git a/src/demo_tzbtc/graphql/.keep b/src/demo_tzbtc/graphql/.keep new file mode 100644 index 000000000..e69de29bb diff --git a/src/demo_tzbtc/sql/on_reindex/.keep b/src/demo_tzbtc/sql/on_reindex/.keep new file mode 100644 index 000000000..e69de29bb diff --git a/src/demo_tzbtc/sql/on_restart/.keep b/src/demo_tzbtc/sql/on_restart/.keep new file mode 100644 index 000000000..e69de29bb diff --git a/src/demo_tzcolors/graphql/.keep b/src/demo_tzcolors/graphql/.keep new file mode 100644 index 000000000..e69de29bb diff --git a/src/demo_tzcolors/sql/on_reindex/.keep b/src/demo_tzcolors/sql/on_reindex/.keep new file mode 100644 index 000000000..e69de29bb diff --git a/src/demo_tzcolors/sql/on_restart/.keep b/src/demo_tzcolors/sql/on_restart/.keep new file mode 100644 index 000000000..e69de29bb diff --git a/src/dipdup/cli.py b/src/dipdup/cli.py index a5114e005..f66db9c49 100644 --- a/src/dipdup/cli.py +++ b/src/dipdup/cli.py @@ -5,7 +5,7 @@ from dataclasses import dataclass from functools import wraps from os.path import dirname, join -from typing import List, NoReturn +from typing import List, NoReturn, cast import click import sentry_sdk @@ -13,9 +13,11 @@ from sentry_sdk.integrations.aiohttp import AioHttpIntegration from dipdup import __spec_version__, __version__ -from dipdup.config import DipDupConfig, LoggingConfig +from dipdup.config import DipDupConfig, LoggingConfig, PostgresDatabaseConfig from dipdup.dipdup import DipDup from dipdup.exceptions import ConfigurationError +from dipdup.hasura import HasuraManager +from dipdup.utils import tortoise_wrapper _logger = logging.getLogger(__name__) @@ -153,3 +155,43 @@ def _bump_spec_version(spec_version: str): @click_async async def clear_cache(ctx): FileCache('dipdup', flag='cs').clear() + + +@cli.command(help='Configure Hasura GraphQL Engine') +@click.option('--reset', is_flag=True, help='Reset metadata before configuring') +@click.pass_context +@click_async +async def configure_hasura(ctx, reset: bool): + config: DipDupConfig = ctx.obj.config + url = config.database.connection_string + models = f'{config.package}.models' + if not config.hasura: + _logger.error('`hasura` config section is empty') + return + hasura = HasuraManager(config.package, config.hasura, cast(PostgresDatabaseConfig, config.database)) + + async with tortoise_wrapper(url, models): + try: + await hasura.configure(reset) + finally: + await hasura.close_session() + + +@cli.command(help='Configure Hasura GraphQL Engine') +@click.option('--reset', is_flag=True, help='Reset metadata before configuring') +@click.pass_context +@click_async +async def cache(ctx, reset: bool): + config: DipDupConfig = ctx.obj.config + url = config.database.connection_string + models = f'{config.package}.models' + if not config.hasura: + _logger.error('`hasura` config section is empty') + return + hasura = HasuraManager(config.package, config.hasura, cast(PostgresDatabaseConfig, config.database)) + + async with tortoise_wrapper(url, models): + try: + await hasura.configure(reset) + finally: + await hasura.close_session() diff --git a/src/dipdup/codegen.py b/src/dipdup/codegen.py index 9b5817968..23f1bbd55 100644 --- a/src/dipdup/codegen.py +++ b/src/dipdup/codegen.py @@ -95,9 +95,20 @@ async def create_package(self) -> None: sql_on_restart_path = join(sql_path, 'on_restart') with suppress(FileExistsError): mkdir(sql_on_restart_path) + with open(join(sql_on_restart_path, '.keep'), 'w'): + pass sql_on_reindex_path = join(sql_path, 'on_reindex') with suppress(FileExistsError): mkdir(sql_on_reindex_path) + with open(join(sql_on_reindex_path, '.keep'), 'w'): + pass + + self._logger.info('Creating `%s/graphql` directory', self._config.package) + graphql_path = join(self._config.package_path, 'graphql') + with suppress(FileExistsError): + mkdir(graphql_path) + with open(join(graphql_path, '.keep'), 'w'): + pass async def fetch_schemas(self) -> None: """Fetch JSONSchemas for all contracts used in config""" diff --git a/src/dipdup/config.py b/src/dipdup/config.py index d6ebbdc3d..6c157a588 100644 --- a/src/dipdup/config.py +++ b/src/dipdup/config.py @@ -89,6 +89,15 @@ def valid_immune_tables(cls, v): return v +@dataclass +class HTTPConfig: + cache: bool = True + retry_count: int = DEFAULT_RETRY_COUNT + retry_sleep: int = DEFAULT_RETRY_SLEEP + ratelimit_rate: Optional[int] = None + ratelimit_period: Optional[int] = None + + @dataclass class ContractConfig: """Contract config @@ -141,10 +150,7 @@ class TzktDatasourceConfig(NameMixin): kind: Literal['tzkt'] url: str - - cache: Optional[bool] = None - retry_count: int = DEFAULT_RETRY_COUNT - retry_sleep: int = DEFAULT_RETRY_SLEEP + http: Optional[HTTPConfig] = None def __hash__(self): return hash(self.url) @@ -167,10 +173,7 @@ class BcdDatasourceConfig(NameMixin): kind: Literal['bcd'] url: str network: str - - cache: Optional[bool] = None - retry_count: int = DEFAULT_RETRY_COUNT - retry_sleep: int = DEFAULT_RETRY_SLEEP + http: Optional[HTTPConfig] = None def __hash__(self): return hash(self.url + self.network) @@ -189,10 +192,7 @@ class CoinbaseDatasourceConfig(NameMixin): api_key: Optional[str] = None secret_key: Optional[str] = None passphrase: Optional[str] = None - - cache: Optional[bool] = None - retry_count: int = DEFAULT_RETRY_COUNT - retry_sleep: int = DEFAULT_RETRY_SLEEP + http: Optional[HTTPConfig] = None def __hash__(self): return hash(self.kind) @@ -630,13 +630,31 @@ class StaticTemplateConfig: class HasuraConfig: url: str admin_secret: Optional[str] = None + source: str = 'default' + select_limit: int = 100 + allow_aggregations: bool = True + camel_case: bool = False + connection_timeout: int = 5 + rest: bool = True + http: Optional[HTTPConfig] = None @validator('url', allow_reuse=True) def valid_url(cls, v): parsed_url = urlparse(v) if not (parsed_url.scheme and parsed_url.netloc): raise ConfigurationError(f'`{v}` is not a valid Hasura URL') - return v + return v.rstrip('/') + + @validator('source', allow_reuse=True) + def valid_source(cls, v): + if v != 'default': + raise NotImplementedError('Multiple Hasura sources are not supported at the moment') + + @property + def headers(self) -> Dict[str, str]: + if self.admin_secret: + return {'X-Hasura-Admin-Secret': self.admin_secret} + return {} @dataclass diff --git a/src/dipdup/datasources/bcd/datasource.py b/src/dipdup/datasources/bcd/datasource.py index c03157fdb..6648be5a5 100644 --- a/src/dipdup/datasources/bcd/datasource.py +++ b/src/dipdup/datasources/bcd/datasource.py @@ -1,16 +1,25 @@ import logging -from typing import Any, Dict, List +from typing import Any, Dict, List, Optional -from dipdup.datasources.proxy import DatasourceRequestProxy +from dipdup.config import HTTPConfig +from dipdup.datasources.proxy import HTTPRequestProxy Address = str class BcdDatasource: - def __init__(self, url: str, network: str, proxy=DatasourceRequestProxy()) -> None: + def __init__( + self, + url: str, + network: str, + http_config: Optional[HTTPConfig] = None, + ) -> None: + if http_config is None: + http_config = HTTPConfig() + self._url = url.rstrip('/') self._network = network - self._proxy = proxy + self._proxy = HTTPRequestProxy(http_config) self._logger = logging.getLogger('dipdup.bcd') async def close_session(self) -> None: diff --git a/src/dipdup/datasources/coinbase/datasource.py b/src/dipdup/datasources/coinbase/datasource.py index 66e558bc6..e72519e84 100644 --- a/src/dipdup/datasources/coinbase/datasource.py +++ b/src/dipdup/datasources/coinbase/datasource.py @@ -1,9 +1,10 @@ import logging from datetime import datetime, timedelta, timezone -from typing import Any, Dict, List, Tuple +from typing import Any, Dict, List, Optional, Tuple +from dipdup.config import HTTPConfig from dipdup.datasources.coinbase.models import CandleData, CandleInterval -from dipdup.datasources.proxy import DatasourceRequestProxy +from dipdup.datasources.proxy import HTTPRequestProxy CANDLES_REQUEST_LIMIT = 300 REST_API_URL = 'https://api.pro.coinbase.com' @@ -11,9 +12,16 @@ class CoinbaseDatasource: - def __init__(self, proxy: DatasourceRequestProxy) -> None: + def __init__(self, http_config: Optional[HTTPConfig] = None) -> None: + if http_config is None: + http_config = HTTPConfig( + cache=True, + ratelimit_rate=10, + ratelimit_period=1, + ) + self._logger = logging.getLogger('dipdup.coinbase') - self._proxy = proxy + self._proxy = HTTPRequestProxy(http_config) async def close_session(self) -> None: await self._proxy.close_session() @@ -41,6 +49,7 @@ async def get_candles(self, since: datetime, until: datetime, interval: CandleIn 'end': _until.replace(tzinfo=timezone.utc).isoformat(), 'granularity': interval.seconds, }, + cache=True, ) candles += [CandleData.from_json(c) for c in candles_json] return sorted(candles, key=lambda c: c.timestamp) diff --git a/src/dipdup/datasources/proxy.py b/src/dipdup/datasources/proxy.py index f298e3c20..abb9f83b1 100644 --- a/src/dipdup/datasources/proxy.py +++ b/src/dipdup/datasources/proxy.py @@ -8,42 +8,41 @@ from aiolimiter import AsyncLimiter from fcache.cache import FileCache # type: ignore -from dipdup.config import DEFAULT_RETRY_COUNT, DEFAULT_RETRY_SLEEP # type: ignore +from dipdup.config import HTTPConfig # type: ignore from dipdup.utils import http_request -class DatasourceRequestProxy: - """Wrapper for datasource HTTP requests. +class HTTPRequestProxy: + """Wrapper for aiohttp HTTP requests. Covers caching, retrying failed requests and ratelimiting""" - def __init__( - self, - cache: bool = False, - retry_count: int = DEFAULT_RETRY_COUNT, - retry_sleep: int = DEFAULT_RETRY_SLEEP, - ratelimiter: Optional[AsyncLimiter] = None, - ) -> None: + def __init__(self, config: Optional[HTTPConfig] = None) -> None: + if config is None: + config = HTTPConfig() self._logger = logging.getLogger(__name__) - self._cache = FileCache('dipdup', flag='cs') if cache else None - self._retry_count = retry_count - self._retry_sleep = retry_sleep - self._ratelimiter = ratelimiter + self._config = config + self._cache = FileCache('dipdup', flag='cs') + self._ratelimiter = ( + AsyncLimiter(max_rate=config.ratelimit_rate, time_period=config.ratelimit_period) + if config.ratelimit_rate and config.ratelimit_period + else None + ) self._session = aiohttp.ClientSession() async def _wrapped_request(self, method: str, **kwargs): - for attempt in range(self._retry_count): - self._logger.debug('Datasource request attempt %s/%s', attempt + 1, self._retry_count) + for attempt in range(self._config.retry_count): + self._logger.debug('HTTP request attempt %s/%s', attempt + 1, self._config.retry_count) try: return await http_request(self._session, method, **kwargs) except (aiohttp.ClientConnectionError, aiohttp.ClientConnectorError) as e: - if attempt + 1 == self._retry_count: + if attempt + 1 == self._config.retry_count: raise e - self._logger.warning('Datasource request failed: %s', e) - await asyncio.sleep(self._retry_sleep) + self._logger.warning('HTTP request failed: %s', e) + await asyncio.sleep(self._config.retry_sleep) - async def http_request(self, method: str, skip_cache: bool = False, weight: int = 1, **kwargs): - if self._cache is not None and not skip_cache: + async def http_request(self, method: str, cache: bool = False, weight: int = 1, **kwargs): + if self._config.cache and cache: key = hashlib.sha256(pickle.dumps([method, kwargs])).hexdigest() try: return self._cache[key] diff --git a/src/dipdup/datasources/tzkt/datasource.py b/src/dipdup/datasources/tzkt/datasource.py index 6c755f68d..f8cc4b979 100644 --- a/src/dipdup/datasources/tzkt/datasource.py +++ b/src/dipdup/datasources/tzkt/datasource.py @@ -11,12 +11,13 @@ from dipdup.config import ( BigMapIndexConfig, ContractConfig, + HTTPConfig, IndexConfigTemplateT, OperationHandlerOriginationPatternConfig, OperationIndexConfig, ) from dipdup.datasources.datasource import IndexDatasource -from dipdup.datasources.proxy import DatasourceRequestProxy +from dipdup.datasources.proxy import HTTPRequestProxy from dipdup.datasources.tzkt.enums import TzktMessageType from dipdup.models import BigMapAction, BigMapData, OperationData from dipdup.utils import split_by_chunks @@ -80,12 +81,14 @@ def __init__( last_level: int, transaction_addresses: Set[str], origination_addresses: Set[str], + cache: bool = False, ) -> None: self._datasource = datasource self._first_level = first_level self._last_level = last_level self._transaction_addresses = transaction_addresses self._origination_addresses = origination_addresses + self._cache = cache self._logger = logging.getLogger('dipdup.tzkt.fetcher') self._head: int = 0 @@ -117,6 +120,7 @@ async def _fetch_originations(self) -> None: offset=self._offsets[key], first_level=self._first_level, last_level=self._last_level, + cache=self._cache, ) for op in originations: @@ -151,6 +155,7 @@ async def _fetch_transactions(self, field: str) -> None: offset=self._offsets[key], first_level=self._first_level, last_level=self._last_level, + cache=self._cache, ) for op in transactions: @@ -211,12 +216,14 @@ def __init__( last_level: int, big_map_addresses: Set[str], big_map_paths: Set[str], + cache: bool = False, ) -> None: self._datasource = datasource self._first_level = first_level self._last_level = last_level self._big_map_addresses = big_map_addresses self._big_map_paths = big_map_paths + self._cache = cache self._logger = logging.getLogger('dipdup.tzkt.fetcher') @@ -233,6 +240,7 @@ async def fetch_big_maps_by_level(self) -> AsyncGenerator[Tuple[int, List[BigMap offset, self._first_level, self._last_level, + cache=self._cache, ) big_maps += fetched_big_maps @@ -266,11 +274,13 @@ class TzktDatasource(IndexDatasource): def __init__( self, url: str, - proxy: DatasourceRequestProxy, + http_config: Optional[HTTPConfig] = None, ) -> None: super().__init__() + if http_config is None: + http_config = HTTPConfig() self._url = url.rstrip('/') - self._proxy = proxy + self._proxy = HTTPRequestProxy(http_config) self._logger = logging.getLogger('dipdup.tzkt') self._transaction_subscriptions: Set[str] = set() @@ -321,7 +331,6 @@ async def get_originated_contracts(self, address: str) -> List[str]: params=dict( limit=self.request_limit, ), - skip_cache=True, ) return [c['address'] for c in contracts] @@ -347,6 +356,7 @@ async def get_jsonschemas(self, address: str) -> Dict[str, Any]: jsonschemas = await self._proxy.http_request( 'get', url=f'{self._url}/v1/contracts/{address}/interface', + cache=True, ) self._logger.debug(jsonschemas) return jsonschemas @@ -357,12 +367,13 @@ async def get_latest_block(self) -> Dict[str, Any]: block = await self._proxy.http_request( 'get', url=f'{self._url}/v1/head', - skip_cache=True, ) self._logger.debug(block) return block - async def get_originations(self, addresses: Set[str], offset: int, first_level: int, last_level: int) -> List[OperationData]: + async def get_originations( + self, addresses: Set[str], offset: int, first_level: int, last_level: int, cache: bool = False + ) -> List[OperationData]: raw_originations = [] # NOTE: TzKT may hit URL length limit with hundreds of originations in a single request. # NOTE: Chunk of 100 addresses seems like a reasonable choice - URL of ~3971 characters. @@ -380,6 +391,7 @@ async def get_originations(self, addresses: Set[str], offset: int, first_level: "select": ','.join(ORIGINATION_OPERATION_FIELDS), "status": "applied", }, + cache=cache, ) originations = [] @@ -390,7 +402,7 @@ async def get_originations(self, addresses: Set[str], offset: int, first_level: return originations async def get_transactions( - self, field: str, addresses: Set[str], offset: int, first_level: int, last_level: int + self, field: str, addresses: Set[str], offset: int, first_level: int, last_level: int, cache: bool = False ) -> List[OperationData]: raw_transactions = await self._proxy.http_request( 'get', @@ -404,6 +416,7 @@ async def get_transactions( "select": ','.join(TRANSACTION_OPERATION_FIELDS), "status": "applied", }, + cache=cache, ) transactions = [] for op in raw_transactions: @@ -412,7 +425,9 @@ async def get_transactions( transactions.append(self.convert_operation(op)) return transactions - async def get_big_maps(self, addresses: Set[str], paths: Set[str], offset: int, first_level: int, last_level: int) -> List[BigMapData]: + async def get_big_maps( + self, addresses: Set[str], paths: Set[str], offset: int, first_level: int, last_level: int, cache: bool = False + ) -> List[BigMapData]: raw_big_maps = await self._proxy.http_request( 'get', url=f'{self._url}/v1/bigmaps/updates', @@ -424,6 +439,7 @@ async def get_big_maps(self, addresses: Set[str], paths: Set[str], offset: int, "level.gt": first_level, "level.le": last_level, }, + cache=cache, ) big_maps = [] for bm in raw_big_maps: diff --git a/src/dipdup/dipdup.py b/src/dipdup/dipdup.py index e9066d528..1e4f61dbc 100644 --- a/src/dipdup/dipdup.py +++ b/src/dipdup/dipdup.py @@ -4,9 +4,8 @@ from contextlib import suppress from os.path import join from posix import listdir -from typing import Dict, List, cast +from typing import Dict, List, Optional, cast -from aiolimiter import AsyncLimiter from apscheduler.schedulers import SchedulerNotRunningError # type: ignore from genericpath import exists from tortoise import Tortoise @@ -34,10 +33,9 @@ from dipdup.datasources.bcd.datasource import BcdDatasource from dipdup.datasources.coinbase.datasource import CoinbaseDatasource from dipdup.datasources.datasource import IndexDatasource -from dipdup.datasources.proxy import DatasourceRequestProxy from dipdup.datasources.tzkt.datasource import TzktDatasource from dipdup.exceptions import ConfigurationError -from dipdup.hasura import configure_hasura +from dipdup.hasura import HasuraManager from dipdup.index import BigMapIndex, Index, OperationIndex from dipdup.models import BigMapData, IndexType, OperationData, State from dipdup.scheduler import add_job, create_scheduler @@ -186,8 +184,14 @@ async def run(self, reindex: bool, oneshot: bool) -> None: datasource_tasks = [] if oneshot else [asyncio.create_task(d.run()) for d in self._datasources.values()] worker_tasks = [] + hasura_manager: Optional[HasuraManager] if self._config.hasura: - worker_tasks.append(asyncio.create_task(configure_hasura(self._config))) + if not isinstance(self._config.database, PostgresDatabaseConfig): + raise RuntimeError + hasura_manager = HasuraManager(self._config.package, self._config.hasura, self._config.database) + worker_tasks.append(asyncio.create_task(hasura_manager.configure())) + else: + hasura_manager = None if self._config.jobs and not oneshot: for job_name, job_config in self._config.jobs.items(): @@ -203,6 +207,8 @@ async def run(self, reindex: bool, oneshot: bool) -> None: finally: self._logger.info('Closing datasource sessions') await asyncio.gather(*[d.close_session() for d in self._datasources.values()]) + if hasura_manager: + await hasura_manager.close_session() # FIXME: AttributeError: 'NoneType' object has no attribute 'call_soon_threadsafe' with suppress(AttributeError, SchedulerNotRunningError): self._scheduler.shutdown(wait=True) @@ -230,37 +236,20 @@ async def _create_datasources(self) -> None: if name in self._datasources: continue - cache = self._config.cache_enabled if datasource_config.cache is None else datasource_config.cache if isinstance(datasource_config, TzktDatasourceConfig): - proxy = DatasourceRequestProxy( - cache=cache, - retry_count=datasource_config.retry_count, - retry_sleep=datasource_config.retry_sleep, - ) datasource = TzktDatasource( url=datasource_config.url, - proxy=proxy, + http_config=datasource_config.http, ) elif isinstance(datasource_config, BcdDatasourceConfig): - proxy = DatasourceRequestProxy( - cache=cache, - retry_count=datasource_config.retry_count, - retry_sleep=datasource_config.retry_sleep, - ) datasource = BcdDatasource( url=datasource_config.url, network=datasource_config.network, - proxy=proxy, + http_config=datasource_config.http, ) elif isinstance(datasource_config, CoinbaseDatasourceConfig): - proxy = DatasourceRequestProxy( - cache=cache, - retry_count=datasource_config.retry_count, - retry_sleep=datasource_config.retry_sleep, - ratelimiter=AsyncLimiter(max_rate=10, time_period=1), - ) datasource = CoinbaseDatasource( - proxy=proxy, + http_config=datasource_config.http, ) else: raise NotImplementedError diff --git a/src/dipdup/hasura.py b/src/dipdup/hasura.py index 10de706a4..7098673b6 100644 --- a/src/dipdup/hasura.py +++ b/src/dipdup/hasura.py @@ -1,230 +1,486 @@ import asyncio import importlib -import json import logging from contextlib import suppress -from typing import Any, Dict, Iterator, List, Tuple, Type +from os import listdir +from os.path import dirname, join +from types import ModuleType +from typing import Any, Dict, Iterable, Iterator, List, Optional, Tuple, Type -import aiohttp +import humps # type: ignore from aiohttp import ClientConnectorError, ClientOSError +from genericpath import exists +from pydantic.dataclasses import dataclass from tortoise import Model, fields from tortoise.transactions import get_connection -from dipdup.config import DipDupConfig, PostgresDatabaseConfig, pascal_to_snake +from dipdup.config import HasuraConfig, HTTPConfig, PostgresDatabaseConfig, pascal_to_snake +from dipdup.datasources.proxy import HTTPRequestProxy from dipdup.exceptions import ConfigurationError -from dipdup.utils import http_request -_logger = logging.getLogger(__name__) +@dataclass +class Field: + name: str + type: Optional[str] -class HasuraError(RuntimeError): - ... + def camelize(self) -> 'Field': + return Field( + name=humps.camelize(self.name), + type=self.type, + ) -def _is_model_class(obj) -> bool: +def _is_model_class(obj: Any) -> bool: """Is subclass of tortoise.Model, but not the base class""" return isinstance(obj, type) and issubclass(obj, Model) and obj != Model and not getattr(obj.Meta, 'abstract', False) -def _format_array_relationship( - related_name: str, - table: str, - column: str, - schema: str = 'public', -) -> Dict[str, Any]: - return { - "name": related_name, - "using": { - "foreign_key_constraint_on": { - "column": column, - "table": { - "schema": schema, - "name": table, - }, - }, - }, - } +def _iter_models(modules: Iterable[ModuleType]) -> Iterator[Tuple[str, Type[Model]]]: + """Iterate over built-in and project's models""" + for models in modules: + for attr in dir(models): + model = getattr(models, attr) + if _is_model_class(model): + app = 'int_models' if models.__name__ == 'dipdup.models' else 'models' + yield app, model -def _format_object_relationship(name: str, column: str) -> Dict[str, Any]: - return { - "name": name, - "using": { - "foreign_key_constraint_on": column, - }, - } +class HasuraError(RuntimeError): + ... -def _format_select_permissions() -> Dict[str, Any]: - return { - "role": "user", - "permission": { - "columns": "*", - "filter": {}, - "allow_aggregations": True, - }, - } +class HasuraManager: + def __init__( + self, package: str, hasura_config: HasuraConfig, database_config: PostgresDatabaseConfig, proxy: Optional[HTTPRequestProxy] = None + ) -> None: + if proxy is None: + proxy = HTTPRequestProxy(HTTPConfig(cache=False)) + self._logger = logging.getLogger(__name__) + self._package = package + self._hasura_config = hasura_config + self._database_config = database_config + self._proxy = proxy + async def configure(self, reset: bool = False) -> None: + """Generate Hasura metadata and apply to instance with credentials from `hasura` config section.""" -def _format_table(name: str, schema: str = 'public') -> Dict[str, Any]: - return { - "table": { - "schema": schema, - "name": name, - }, - "object_relationships": [], - "array_relationships": [], - "select_permissions": [], - } + self._logger.info('Configuring Hasura') + await self._healthcheck() + if reset: + await self._reset_metadata() -def _format_metadata(tables: List[Dict[str, Any]]) -> Dict[str, Any]: - return { - "version": 2, - "tables": tables, - } + metadata = await self._fetch_metadata() + self._logger.info('Generating metadata') -def _iter_models(*modules) -> Iterator[Tuple[str, Type[Model]]]: - for models in modules: - for attr in dir(models): - model = getattr(models, attr) - if _is_model_class(model): - app = 'int_models' if models.__name__ == 'dipdup.models' else 'models' - yield app, model + # NOTE: Hasura metadata updated in three steps, order matters: + # NOTE: 1. Generate and apply tables metadata. + # FIXME: Existing select permissions are lost + source_tables_metadata = await self._generate_source_tables_metadata() + metadata['sources'][0]['tables'] = self._merge_metadata( + existing=metadata['sources'][0]['tables'], + generated=source_tables_metadata, + key=lambda m: m['table']['name'], + ) + await self._replace_metadata(metadata) + + # NOTE: 2. Apply camelcase and refresh metadata + if self._hasura_config.camel_case: + await self._apply_camelcase() + metadata = await self._fetch_metadata() + + # NOTE: 3. Generate and apply queries and rest endpoints + query_collections_metadata = await self._generate_query_collections_metadata() + query_names = [q['name'] for q in query_collections_metadata] + rest_endpoints_metadata = await self._generate_rest_endpoints_metadata(query_names) + + try: + metadata['query_collections'][0]['definition']['queries'] = self._merge_metadata( + # TODO: Separate collection? + existing=metadata['query_collections'][0]['definition']['queries'], + generated=query_collections_metadata, + key=lambda m: m['name'], + ) + except KeyError: + metadata['query_collections'] = [{"name": "allowed-queries", "definition": {"queries": query_collections_metadata}}] + metadata['rest_endpoints'] = self._merge_metadata( + existing=metadata.get('rest_endpoints', []), + generated=rest_endpoints_metadata, + key=lambda m: m['name'], + ) -async def generate_hasura_metadata(config: DipDupConfig, views: List[str]) -> Dict[str, Any]: - """Generate metadata based on dapp models. + await self._replace_metadata(metadata) - Includes tables and their relations (but not entities created during execution of snippets from `sql` package directory) - """ - if not isinstance(config.database, PostgresDatabaseConfig): - raise RuntimeError - _logger.info('Generating Hasura metadata') - metadata_tables = {} - model_tables = {} + self._logger.info('Hasura instance has been configured') - int_models = importlib.import_module('dipdup.models') - models = importlib.import_module(f'{config.package}.models') + async def close_session(self) -> None: + await self._proxy.close_session() - for app, model in _iter_models(models, int_models): - table_name = model._meta.db_table or pascal_to_snake(model.__name__) # pylint: disable=protected-access - model_tables[f'{app}.{model.__name__}'] = table_name - metadata_tables[table_name] = _format_table( - name=table_name, - schema=config.database.schema_name, + async def _hasura_http_request(self, endpoint: str, json: Dict[str, Any]) -> Dict[str, Any]: + self._logger.debug('Sending `%s` request: %s', endpoint, json) + result = await self._proxy.http_request( + method='post', + cache=False, + url=f'{self._hasura_config.url}/v1/{endpoint}', + json=json, + headers=self._hasura_config.headers, ) + self._logger.debug('Response: %s', result) + if 'error' in result or 'errors' in result: + raise HasuraError('Can\'t configure Hasura instance', result) + return result - for view in views: - metadata_tables[view] = _format_table( - name=view, - schema=config.database.schema_name, - ) - metadata_tables[view]['select_permissions'].append( - _format_select_permissions(), + async def _healthcheck(self) -> None: + self._logger.info('Waiting for Hasura instance to be ready') + for _ in range(self._hasura_config.connection_timeout): + with suppress(ClientConnectorError, ClientOSError): + response = await self._proxy._session.get(f'{self._hasura_config.url}/healthz') + if response.status == 200: + break + await asyncio.sleep(1) + else: + raise HasuraError(f'Hasura instance not responding for {self._hasura_config.connection_timeout} seconds') + + async def _reset_metadata(self) -> None: + self._logger.info('Resetting metadata') + await self._hasura_http_request( + endpoint='metadata', + json={ + "type": "clear_metadata", + "args": {}, + }, ) - for app, model in _iter_models(models, int_models): - table_name = model_tables[f'{app}.{model.__name__}'] + async def _fetch_metadata(self) -> Dict[str, Any]: + self._logger.info('Fetching existing metadata') + return await self._hasura_http_request( + endpoint='metadata', + json={ + "type": "export_metadata", + "args": {}, + }, + ) - metadata_tables[table_name]['select_permissions'].append( - _format_select_permissions(), + async def _replace_metadata(self, metadata: Dict[str, Any]) -> None: + self._logger.info('Replacing metadata') + await self._hasura_http_request( + endpoint='query', + json={ + "type": "replace_metadata", + "args": metadata, + }, ) - for field in model._meta.fields_map.values(): - if isinstance(field, fields.relational.ForeignKeyFieldInstance): - if not isinstance(field.related_name, str): - raise HasuraError(f'`related_name` of `{field}` must be set') - related_table_name = model_tables[field.model_name] - metadata_tables[table_name]['object_relationships'].append( - _format_object_relationship( - name=field.model_field_name, - column=field.model_field_name + '_id', - ) + async def _get_views(self) -> List[str]: + return [ + row[0] + for row in ( + await get_connection(None).execute_query( + f"SELECT table_name FROM information_schema.views WHERE table_schema = '{self._database_config.schema_name}'" ) - metadata_tables[related_table_name]['array_relationships'].append( - _format_array_relationship( - related_name=field.related_name, - table=table_name, - column=field.model_field_name + '_id', - schema=config.database.schema_name, + )[1] + ] + + def _get_model_modules(self) -> Tuple[ModuleType, ModuleType]: + int_models = importlib.import_module('dipdup.models') + models = importlib.import_module(f'{self._package}.models') + return int_models, models + + async def _generate_source_tables_metadata(self) -> List[Dict[str, Any]]: + """Generate source tables metadata based on project models and views. + + Includes tables and their relations. + """ + + self._logger.info('Generating Hasura metadata') + views = await self._get_views() + + metadata_tables = {} + model_tables = {} + model_modules = self._get_model_modules() + + for app, model in _iter_models(model_modules): + table_name = model._meta.db_table or pascal_to_snake(model.__name__) + model_tables[f'{app}.{model.__name__}'] = table_name + metadata_tables[table_name] = self._format_table(table_name) + + for view in views: + metadata_tables[view] = self._format_table(view) + + for app, model in _iter_models(model_modules): + table_name = model_tables[f'{app}.{model.__name__}'] + + for field in model._meta.fields_map.values(): + if isinstance(field, fields.relational.ForeignKeyFieldInstance): + if not isinstance(field.related_name, str): + raise HasuraError(f'`related_name` of `{field}` must be set') + related_table_name = model_tables[field.model_name] + metadata_tables[table_name]['object_relationships'].append( + self._format_object_relationship( + name=field.model_field_name, + column=field.model_field_name + '_id', + ) + ) + metadata_tables[related_table_name]['array_relationships'].append( + self._format_array_relationship( + related_name=field.related_name, + table=table_name, + column=field.model_field_name + '_id', + ) ) + + return list(metadata_tables.values()) + + def _iterate_graphql_queries(self) -> Iterator[Tuple[str, str]]: + package = importlib.import_module(self._package) + package_path = dirname(package.__file__) + graphql_path = join(package_path, 'graphql') + if not exists(graphql_path): + return + for filename in sorted(listdir(graphql_path)): + if not filename.endswith('.graphql'): + continue + + with open(join(graphql_path, filename)) as file: + yield filename[:-8], file.read() + + async def _generate_query_collections_metadata(self) -> List[Dict[str, Any]]: + queries = [] + model_modules = self._get_model_modules() + for _, model in _iter_models(model_modules): + table_name = model._meta.db_table or pascal_to_snake(model.__name__) + + for field_name, field in model._meta.fields_map.items(): + if field.pk: + filter = field_name + break + else: + raise RuntimeError(f'Table `{table_name}` has no primary key. How is that possible?') + + table_name = f'{self._database_config.schema_name}_{table_name}' + fields = await self._get_fields(table_name) + queries.append(self._format_rest_query(table_name, table_name, filter, fields)) + + for query_name, query in self._iterate_graphql_queries(): + queries.append(dict(name=query_name, query=query)) + + return queries + + async def _generate_rest_endpoints_metadata(self, query_names: List[str]) -> List[Dict[str, Any]]: + rest_endpoints = [] + for query_name in query_names: + rest_endpoints.append(self._format_rest_endpoint(query_name)) + return rest_endpoints + + def _merge_metadata(self, existing: List[Dict[str, Any]], generated: List[Dict[str, Any]], key) -> List[Dict[str, Any]]: + existing_dict = {key(t): t for t in existing} + generated_dict = {key(t): t for t in generated} + return list({**existing_dict, **generated_dict}.values()) + + async def _get_fields(self, name: str = 'query_root') -> List[Field]: + query = ''' +query introspectionQuery($name: String!) { + __type(name: $name) { + kind + name + fields { + name + description + type { + name + kind + ofType { + name + kind + } + } + } + } +} + '''.replace( + '\n', ' ' + ).replace( + ' ', '' + ) + result = await self._hasura_http_request( + endpoint='graphql', + json={ + 'query': query, + 'variables': {'name': name}, + }, + ) + try: + fields_json = result['data']['__type']['fields'] + except TypeError as e: + raise HasuraError(f'Unknown table `{name}`') from e + + fields = [] + for field_json in fields_json: + # NOTE: Exclude autogenerated aggregate and pk fields + ignore_postfixes = ('_aggregate', '_by_pk', 'Aggregate', 'ByPk') + if any(map(lambda postfix: field_json['name'].endswith(postfix), ignore_postfixes)): + continue + + # NOTE: Exclude relations. Not reliable enough but ok for now. + if (field_json['description'] or '').endswith('relationship'): + continue + + # TODO: More precise matching + try: + type_ = field_json['type']['ofType']['name'] + except TypeError: + type_ = field_json['type']['name'] + fields.append( + Field( + name=field_json['name'], + type=type_, ) + ) - return _format_metadata(tables=list(metadata_tables.values())) + return fields + async def _apply_camelcase(self) -> None: + """Convert table and column names to camelCase. -async def configure_hasura(config: DipDupConfig): - """Generate Hasura metadata and apply to instance with credentials from `hasura` config section.""" + Based on https://github.com/m-rgba/hasura-snake-to-camel + """ - if config.hasura is None: - raise ConfigurationError('`hasura` config section missing') - if not isinstance(config.database, PostgresDatabaseConfig): - raise RuntimeError + tables = await self._get_fields() - _logger.info('Configuring Hasura') - url = config.hasura.url.rstrip("/") - views = [ - row[0] - for row in ( - await get_connection(None).execute_query( - f"SELECT table_name FROM information_schema.views WHERE table_schema = '{config.database.schema_name}'" - ) - )[1] - ] + for table in tables: + decamelized_table = humps.decamelize(table.name) - hasura_metadata = await generate_hasura_metadata(config, views) + # NOTE: Skip tables from different schemas + if not decamelized_table.startswith(self._database_config.schema_name): + continue - async with aiohttp.ClientSession() as session: - _logger.info('Waiting for Hasura instance to be healthy') - for _ in range(60): - with suppress(ClientConnectorError, ClientOSError): - response = await session.get(f'{url}/healthz') - if response.status == 200: - break - await asyncio.sleep(1) - else: - raise HasuraError('Hasura instance not responding for 60 seconds') - - headers = {} - if config.hasura.admin_secret: - headers['X-Hasura-Admin-Secret'] = config.hasura.admin_secret - - _logger.info('Fetching existing metadata') - existing_hasura_metadata = await http_request( - session, - 'post', - url=f'{url}/v1/query', - data=json.dumps( - { - "type": "export_metadata", - "args": {}, + custom_root_fields = self._format_custom_root_fields(decamelized_table) + columns = await self._get_fields(decamelized_table) + custom_column_names = self._format_custom_column_names(columns) + args: Dict[str, Any] = { + 'table': { + # NOTE: Remove schema prefix from table name + 'name': decamelized_table.replace(self._database_config.schema_name, '')[1:], + 'schema': self._database_config.schema_name, }, - ), - headers=headers, - ) + 'source': self._hasura_config.source, + 'configuration': { + 'identifier': custom_root_fields['select_by_pk'], + 'custom_root_fields': custom_root_fields, + 'custom_column_names': custom_column_names, + }, + } - _logger.info('Merging existing metadata') - hasura_metadata_tables = [table['table'] for table in hasura_metadata['tables']] - for table in existing_hasura_metadata['tables']: - if table['table'] not in hasura_metadata_tables: - hasura_metadata['tables'].append(table) - - _logger.info('Sending replace metadata request') - result = await http_request( - session, - 'post', - url=f'{url}/v1/query', - data=json.dumps( - { - "type": "replace_metadata", - "args": hasura_metadata, + await self._hasura_http_request( + endpoint='metadata', + json={ + 'type': 'pg_set_table_customization', + 'args': args, }, - ), - headers=headers, - ) - if result.get('message') != 'success': - raise HasuraError('Can\'t configure Hasura instance', result) + ) - _logger.info('Hasura instance has been configured') + def _format_rest_query(self, name: str, table: str, filter: str, fields: List[Field]) -> Dict[str, Any]: + if not table.endswith('_by_pk'): + table += '_by_pk' + name = humps.camelize(name) if self._hasura_config.camel_case else name + filter = humps.camelize(filter) if self._hasura_config.camel_case else filter + table = humps.camelize(table) if self._hasura_config.camel_case else table + fields = [f.camelize() for f in fields] if self._hasura_config.camel_case else fields + + try: + filter_field = next(f for f in fields if f.name == filter) + except StopIteration as e: + raise ConfigurationError(f'Table `{table}` has no column `{filter}`') from e + + query_arg = f'${filter_field.name}: {filter_field.type}!' + query_filter = filter_field.name + ': $' + filter_field.name + query_fields = ' '.join(f.name for f in fields) + return { + 'name': name, + 'query': 'query ' + name + ' (' + query_arg + ') {' + table + '(' + query_filter + ') {' + query_fields + '}}', + } + + def _format_rest_endpoint(self, query_name: str) -> Dict[str, Any]: + return { + "definition": { + "query": { + "collection_name": "allowed-queries", + "query_name": query_name, + }, + }, + "url": query_name, + "methods": ["GET", "POST"], + "name": query_name, + "comment": None, + } + + def _format_custom_root_fields(self, table: str) -> Dict[str, Any]: + # NOTE: Do not change original Hasura format, REST endpoints generation will be broken otherwise + return { + 'select': humps.camelize(table), + 'select_by_pk': humps.camelize(f'{table}_by_pk'), + 'select_aggregate': humps.camelize(f'{table}_aggregate'), + 'insert': humps.camelize(f'insert_{table}'), + 'insert_one': humps.camelize(f'insert_{table}_one'), + 'update': humps.camelize(f'update_{table}'), + 'update_by_pk': humps.camelize(f'update_{table}_by_pk'), + 'delete': humps.camelize(f'delete_{table}'), + 'delete_by_pk': humps.camelize(f'delete_{table}_by_pk'), + } + + def _format_custom_column_names(self, fields: List[Field]) -> Dict[str, Any]: + return {humps.decamelize(f.name): humps.camelize(f.name) for f in fields} + + def _format_table(self, name: str) -> Dict[str, Any]: + return { + "table": { + "schema": self._database_config.schema_name, + "name": name, + }, + "object_relationships": [], + "array_relationships": [], + "select_permissions": [ + self._format_select_permissions(), + ], + } + + def _format_array_relationship( + self, + related_name: str, + table: str, + column: str, + ) -> Dict[str, Any]: + return { + "name": related_name if not self._hasura_config.camel_case else humps.camelize(related_name), + "using": { + "foreign_key_constraint_on": { + "column": column, + "table": { + "schema": self._database_config.schema_name, + "name": table, + }, + }, + }, + } + + def _format_object_relationship(self, name: str, column: str) -> Dict[str, Any]: + return { + "name": name if not self._hasura_config.camel_case else humps.camelize(name), + "using": { + "foreign_key_constraint_on": column, + }, + } + + def _format_select_permissions(self) -> Dict[str, Any]: + return { + "role": "user", + "permission": { + "columns": "*", + "filter": {}, + "allow_aggregations": self._hasura_config.allow_aggregations, + "limit": self._hasura_config.select_limit, + }, + } diff --git a/src/dipdup/index.py b/src/dipdup/index.py index 90c80fc28..e7e67f3b1 100644 --- a/src/dipdup/index.py +++ b/src/dipdup/index.py @@ -47,7 +47,7 @@ async def process(self) -> None: state = await self.get_state() if self._config.last_block: last_level = self._config.last_block - await self._synchronize(last_level) + await self._synchronize(last_level, cache=True) elif self._datasource.sync_level is None: self._logger.info('Datasource is not active, sync to the latest block') last_level = (await self._datasource.get_latest_block())['level'] @@ -60,7 +60,7 @@ async def process(self) -> None: await self._process_queue() @abstractmethod - async def _synchronize(self, last_level: int) -> None: + async def _synchronize(self, last_level: int, cache: bool = False) -> None: ... @abstractmethod @@ -111,7 +111,7 @@ async def _process_queue(self) -> None: level, operations = self._queue.popleft() await self._process_level_operations(level, operations) - async def _synchronize(self, last_level: int) -> None: + async def _synchronize(self, last_level: int, cache: bool = False) -> None: """Fetch operations via Fetcher and pass to message callback""" state = await self.get_state() first_level = state.level @@ -131,6 +131,7 @@ async def _synchronize(self, last_level: int) -> None: last_level=last_level, transaction_addresses=transaction_addresses, origination_addresses=origination_addresses, + cache=cache, ) async for level, operations in fetcher.fetch_operations_by_level(): @@ -346,7 +347,7 @@ async def _process_queue(self): level, big_maps = self._queue.popleft() await self._process_level_big_maps(level, big_maps) - async def _synchronize(self, last_level: int) -> None: + async def _synchronize(self, last_level: int, cache: bool = False) -> None: """Fetch operations via Fetcher and pass to message callback""" state = await self.get_state() first_level = state.level @@ -366,6 +367,7 @@ async def _synchronize(self, last_level: int) -> None: last_level=last_level, big_map_addresses=big_map_addresses, big_map_paths=big_map_paths, + cache=cache, ) async for level, big_maps in fetcher.fetch_big_maps_by_level(): diff --git a/src/dipdup/utils.py b/src/dipdup/utils.py index 2ebe330fe..b059d08ec 100644 --- a/src/dipdup/utils.py +++ b/src/dipdup/utils.py @@ -29,6 +29,7 @@ async def slowdown(seconds: int): await asyncio.sleep(seconds - time_spent) +# NOTE: These two helpers are not the same as humps.camelize/decamelize as could be used with Python module paths def snake_to_pascal(value: str) -> str: """method_name -> MethodName""" return ''.join(map(lambda x: x[0].upper() + x[1:], value.replace('.', '_').split('_'))) @@ -106,7 +107,10 @@ async def http_request(session: aiohttp.ClientSession, method: str, **kwargs): **kwargs.pop('headers', {}), 'User-Agent': f'dipdup/{__version__}', } - request_string = kwargs['url'] + '?' + '&'.join([f'{key}={value}' for key, value in kwargs.get('params', {}).items()]) + url = kwargs['url'] + params = kwargs.get('params', {}) + params_string = '&'.join([f'{k}={v}' for k, v in params.items()]) + request_string = f'{url}?{params_string}'.rstrip('?') _logger.debug('Calling `%s`', request_string) async with getattr(session, method)( skip_auto_headers={'User-Agent'}, diff --git a/tests/integration_tests/hasura/empty.json b/tests/integration_tests/hasura/empty.json new file mode 100644 index 000000000..766abc0a6 --- /dev/null +++ b/tests/integration_tests/hasura/empty.json @@ -0,0 +1,25 @@ +{ + "version": 3, + "sources": [ + { + "name": "default", + "kind": "postgres", + "tables": [], + "configuration": { + "connection_info": { + "use_prepared_statements": true, + "database_url": { + "from_env": "HASURA_GRAPHQL_DATABASE_URL" + }, + "isolation_level": "read-committed", + "pool_settings": { + "connection_lifetime": 600, + "retries": 1, + "idle_timeout": 180, + "max_connections": 50 + } + } + } + } + ] +} \ No newline at end of file diff --git a/tests/integration_tests/hasura/query_dipdup_state.json b/tests/integration_tests/hasura/query_dipdup_state.json new file mode 100644 index 000000000..814a63ec6 --- /dev/null +++ b/tests/integration_tests/hasura/query_dipdup_state.json @@ -0,0 +1,70 @@ +{ + "data": { + "__type": { + "kind": "OBJECT", + "name": "hic_et_nunc_dipdup_state", + "fields": [ + { + "name": "hash", + "description": null, + "type": { + "name": null, + "kind": "NON_NULL", + "ofType": { + "name": "String", + "kind": "SCALAR" + } + } + }, + { + "name": "id", + "description": null, + "type": { + "name": null, + "kind": "NON_NULL", + "ofType": { + "name": "Int", + "kind": "SCALAR" + } + } + }, + { + "name": "index_name", + "description": null, + "type": { + "name": null, + "kind": "NON_NULL", + "ofType": { + "name": "String", + "kind": "SCALAR" + } + } + }, + { + "name": "index_type", + "description": "operation: operation\\nbig_map: big_map\\nblock: block\\nschema: schema", + "type": { + "name": null, + "kind": "NON_NULL", + "ofType": { + "name": "String", + "kind": "SCALAR" + } + } + }, + { + "name": "level", + "description": null, + "type": { + "name": null, + "kind": "NON_NULL", + "ofType": { + "name": "Int", + "kind": "SCALAR" + } + } + } + ] + } + } +} \ No newline at end of file diff --git a/tests/integration_tests/hasura/query_holder.json b/tests/integration_tests/hasura/query_holder.json new file mode 100644 index 000000000..97a2b36cd --- /dev/null +++ b/tests/integration_tests/hasura/query_holder.json @@ -0,0 +1,118 @@ +{ + "data": { + "__type": { + "kind": "OBJECT", + "name": "hic_et_nunc_holder", + "fields": [ + { + "name": "address", + "description": null, + "type": { + "name": null, + "kind": "NON_NULL", + "ofType": { + "name": "String", + "kind": "SCALAR" + } + } + }, + { + "name": "purchases", + "description": "An array relationship", + "type": { + "name": null, + "kind": "NON_NULL", + "ofType": { + "name": null, + "kind": "LIST" + } + } + }, + { + "name": "purchases_aggregate", + "description": "An aggregate relationship", + "type": { + "name": null, + "kind": "NON_NULL", + "ofType": { + "name": "hic_et_nunc_trade_aggregate", + "kind": "OBJECT" + } + } + }, + { + "name": "sales", + "description": "An array relationship", + "type": { + "name": null, + "kind": "NON_NULL", + "ofType": { + "name": null, + "kind": "LIST" + } + } + }, + { + "name": "sales_aggregate", + "description": "An aggregate relationship", + "type": { + "name": null, + "kind": "NON_NULL", + "ofType": { + "name": "hic_et_nunc_trade_aggregate", + "kind": "OBJECT" + } + } + }, + { + "name": "swaps", + "description": "An array relationship", + "type": { + "name": null, + "kind": "NON_NULL", + "ofType": { + "name": null, + "kind": "LIST" + } + } + }, + { + "name": "swaps_aggregate", + "description": "An aggregate relationship", + "type": { + "name": null, + "kind": "NON_NULL", + "ofType": { + "name": "hic_et_nunc_swap_aggregate", + "kind": "OBJECT" + } + } + }, + { + "name": "tokens", + "description": "An array relationship", + "type": { + "name": null, + "kind": "NON_NULL", + "ofType": { + "name": null, + "kind": "LIST" + } + } + }, + { + "name": "tokens_aggregate", + "description": "An aggregate relationship", + "type": { + "name": null, + "kind": "NON_NULL", + "ofType": { + "name": "hic_et_nunc_token_aggregate", + "kind": "OBJECT" + } + } + } + ] + } + } +} \ No newline at end of file diff --git a/tests/integration_tests/hasura/query_root.json b/tests/integration_tests/hasura/query_root.json new file mode 100644 index 000000000..be9291817 --- /dev/null +++ b/tests/integration_tests/hasura/query_root.json @@ -0,0 +1,175 @@ +{ + "data": { + "__type": { + "kind": "OBJECT", + "name": "query_root", + "fields": [ + { + "name": "hic_et_nunc_dipdup_state", + "description": "fetch data from the table: \"hic_et_nunc.dipdup_state\"", + "type": { + "name": null, + "kind": "NON_NULL", + "ofType": { + "name": null, + "kind": "LIST" + } + } + }, + { + "name": "hic_et_nunc_dipdup_state_aggregate", + "description": "fetch aggregated fields from the table: \"hic_et_nunc.dipdup_state\"", + "type": { + "name": null, + "kind": "NON_NULL", + "ofType": { + "name": "hic_et_nunc_dipdup_state_aggregate", + "kind": "OBJECT" + } + } + }, + { + "name": "hic_et_nunc_dipdup_state_by_pk", + "description": "fetch data from the table: \"hic_et_nunc.dipdup_state\" using primary key columns", + "type": { + "name": "hic_et_nunc_dipdup_state", + "kind": "OBJECT", + "ofType": null + } + }, + { + "name": "hic_et_nunc_holder", + "description": "fetch data from the table: \"hic_et_nunc.holder\"", + "type": { + "name": null, + "kind": "NON_NULL", + "ofType": { + "name": null, + "kind": "LIST" + } + } + }, + { + "name": "hic_et_nunc_holder_aggregate", + "description": "fetch aggregated fields from the table: \"hic_et_nunc.holder\"", + "type": { + "name": null, + "kind": "NON_NULL", + "ofType": { + "name": "hic_et_nunc_holder_aggregate", + "kind": "OBJECT" + } + } + }, + { + "name": "hic_et_nunc_holder_by_pk", + "description": "fetch data from the table: \"hic_et_nunc.holder\" using primary key columns", + "type": { + "name": "hic_et_nunc_holder", + "kind": "OBJECT", + "ofType": null + } + }, + { + "name": "hic_et_nunc_swap", + "description": "fetch data from the table: \"hic_et_nunc.swap\"", + "type": { + "name": null, + "kind": "NON_NULL", + "ofType": { + "name": null, + "kind": "LIST" + } + } + }, + { + "name": "hic_et_nunc_swap_aggregate", + "description": "fetch aggregated fields from the table: \"hic_et_nunc.swap\"", + "type": { + "name": null, + "kind": "NON_NULL", + "ofType": { + "name": "hic_et_nunc_swap_aggregate", + "kind": "OBJECT" + } + } + }, + { + "name": "hic_et_nunc_swap_by_pk", + "description": "fetch data from the table: \"hic_et_nunc.swap\" using primary key columns", + "type": { + "name": "hic_et_nunc_swap", + "kind": "OBJECT", + "ofType": null + } + }, + { + "name": "hic_et_nunc_token", + "description": "fetch data from the table: \"hic_et_nunc.token\"", + "type": { + "name": null, + "kind": "NON_NULL", + "ofType": { + "name": null, + "kind": "LIST" + } + } + }, + { + "name": "hic_et_nunc_token_aggregate", + "description": "fetch aggregated fields from the table: \"hic_et_nunc.token\"", + "type": { + "name": null, + "kind": "NON_NULL", + "ofType": { + "name": "hic_et_nunc_token_aggregate", + "kind": "OBJECT" + } + } + }, + { + "name": "hic_et_nunc_token_by_pk", + "description": "fetch data from the table: \"hic_et_nunc.token\" using primary key columns", + "type": { + "name": "hic_et_nunc_token", + "kind": "OBJECT", + "ofType": null + } + }, + { + "name": "hic_et_nunc_trade", + "description": "fetch data from the table: \"hic_et_nunc.trade\"", + "type": { + "name": null, + "kind": "NON_NULL", + "ofType": { + "name": null, + "kind": "LIST" + } + } + }, + { + "name": "hic_et_nunc_trade_aggregate", + "description": "fetch aggregated fields from the table: \"hic_et_nunc.trade\"", + "type": { + "name": null, + "kind": "NON_NULL", + "ofType": { + "name": "hic_et_nunc_trade_aggregate", + "kind": "OBJECT" + } + } + }, + { + "name": "hic_et_nunc_trade_by_pk", + "description": "fetch data from the table: \"hic_et_nunc.trade\" using primary key columns", + "type": { + "name": "hic_et_nunc_trade", + "kind": "OBJECT", + "ofType": null + } + } + ] + } + } +} \ No newline at end of file diff --git a/tests/integration_tests/hasura/query_swap.json b/tests/integration_tests/hasura/query_swap.json new file mode 100644 index 000000000..33e636133 --- /dev/null +++ b/tests/integration_tests/hasura/query_swap.json @@ -0,0 +1,142 @@ +{ + "data": { + "__type": { + "kind": "OBJECT", + "name": "hic_et_nunc_swap", + "fields": [ + { + "name": "amount", + "description": null, + "type": { + "name": null, + "kind": "NON_NULL", + "ofType": { + "name": "bigint", + "kind": "SCALAR" + } + } + }, + { + "name": "amount_left", + "description": null, + "type": { + "name": null, + "kind": "NON_NULL", + "ofType": { + "name": "bigint", + "kind": "SCALAR" + } + } + }, + { + "name": "creator", + "description": "An object relationship", + "type": { + "name": null, + "kind": "NON_NULL", + "ofType": { + "name": "hic_et_nunc_holder", + "kind": "OBJECT" + } + } + }, + { + "name": "creator_id", + "description": null, + "type": { + "name": null, + "kind": "NON_NULL", + "ofType": { + "name": "String", + "kind": "SCALAR" + } + } + }, + { + "name": "id", + "description": null, + "type": { + "name": null, + "kind": "NON_NULL", + "ofType": { + "name": "bigint", + "kind": "SCALAR" + } + } + }, + { + "name": "level", + "description": null, + "type": { + "name": null, + "kind": "NON_NULL", + "ofType": { + "name": "bigint", + "kind": "SCALAR" + } + } + }, + { + "name": "price", + "description": null, + "type": { + "name": null, + "kind": "NON_NULL", + "ofType": { + "name": "bigint", + "kind": "SCALAR" + } + } + }, + { + "name": "status", + "description": "ACTIVE: 0\\nFINISHED: 1\\nCANCELED: 2", + "type": { + "name": null, + "kind": "NON_NULL", + "ofType": { + "name": "smallint", + "kind": "SCALAR" + } + } + }, + { + "name": "timestamp", + "description": null, + "type": { + "name": null, + "kind": "NON_NULL", + "ofType": { + "name": "timestamptz", + "kind": "SCALAR" + } + } + }, + { + "name": "trades", + "description": "An array relationship", + "type": { + "name": null, + "kind": "NON_NULL", + "ofType": { + "name": null, + "kind": "LIST" + } + } + }, + { + "name": "trades_aggregate", + "description": "An aggregate relationship", + "type": { + "name": null, + "kind": "NON_NULL", + "ofType": { + "name": "hic_et_nunc_trade_aggregate", + "kind": "OBJECT" + } + } + } + ] + } + } +} \ No newline at end of file diff --git a/tests/integration_tests/hasura/query_token.json b/tests/integration_tests/hasura/query_token.json new file mode 100644 index 000000000..0aad53ba0 --- /dev/null +++ b/tests/integration_tests/hasura/query_token.json @@ -0,0 +1,82 @@ +{ + "data": { + "__type": { + "kind": "OBJECT", + "name": "hic_et_nunc_token", + "fields": [ + { + "name": "creator", + "description": "An object relationship", + "type": { + "name": null, + "kind": "NON_NULL", + "ofType": { + "name": "hic_et_nunc_holder", + "kind": "OBJECT" + } + } + }, + { + "name": "creator_id", + "description": null, + "type": { + "name": null, + "kind": "NON_NULL", + "ofType": { + "name": "String", + "kind": "SCALAR" + } + } + }, + { + "name": "id", + "description": null, + "type": { + "name": null, + "kind": "NON_NULL", + "ofType": { + "name": "bigint", + "kind": "SCALAR" + } + } + }, + { + "name": "level", + "description": null, + "type": { + "name": null, + "kind": "NON_NULL", + "ofType": { + "name": "bigint", + "kind": "SCALAR" + } + } + }, + { + "name": "supply", + "description": null, + "type": { + "name": null, + "kind": "NON_NULL", + "ofType": { + "name": "bigint", + "kind": "SCALAR" + } + } + }, + { + "name": "timestamp", + "description": null, + "type": { + "name": null, + "kind": "NON_NULL", + "ofType": { + "name": "timestamptz", + "kind": "SCALAR" + } + } + } + ] + } + } +} \ No newline at end of file diff --git a/tests/integration_tests/hasura/query_trade.json b/tests/integration_tests/hasura/query_trade.json new file mode 100644 index 000000000..14e3cfad7 --- /dev/null +++ b/tests/integration_tests/hasura/query_trade.json @@ -0,0 +1,130 @@ +{ + "data": { + "__type": { + "kind": "OBJECT", + "name": "hic_et_nunc_trade", + "fields": [ + { + "name": "amount", + "description": null, + "type": { + "name": null, + "kind": "NON_NULL", + "ofType": { + "name": "bigint", + "kind": "SCALAR" + } + } + }, + { + "name": "buyer", + "description": "An object relationship", + "type": { + "name": null, + "kind": "NON_NULL", + "ofType": { + "name": "hic_et_nunc_holder", + "kind": "OBJECT" + } + } + }, + { + "name": "buyer_id", + "description": null, + "type": { + "name": null, + "kind": "NON_NULL", + "ofType": { + "name": "String", + "kind": "SCALAR" + } + } + }, + { + "name": "id", + "description": null, + "type": { + "name": null, + "kind": "NON_NULL", + "ofType": { + "name": "bigint", + "kind": "SCALAR" + } + } + }, + { + "name": "level", + "description": null, + "type": { + "name": null, + "kind": "NON_NULL", + "ofType": { + "name": "bigint", + "kind": "SCALAR" + } + } + }, + { + "name": "seller", + "description": "An object relationship", + "type": { + "name": null, + "kind": "NON_NULL", + "ofType": { + "name": "hic_et_nunc_holder", + "kind": "OBJECT" + } + } + }, + { + "name": "seller_id", + "description": null, + "type": { + "name": null, + "kind": "NON_NULL", + "ofType": { + "name": "String", + "kind": "SCALAR" + } + } + }, + { + "name": "swap", + "description": "An object relationship", + "type": { + "name": null, + "kind": "NON_NULL", + "ofType": { + "name": "hic_et_nunc_swap", + "kind": "OBJECT" + } + } + }, + { + "name": "swap_id", + "description": null, + "type": { + "name": null, + "kind": "NON_NULL", + "ofType": { + "name": "bigint", + "kind": "SCALAR" + } + } + }, + { + "name": "timestamp", + "description": null, + "type": { + "name": null, + "kind": "NON_NULL", + "ofType": { + "name": "timestamptz", + "kind": "SCALAR" + } + } + } + ] + } + } +} \ No newline at end of file diff --git a/tests/integration_tests/hasura/replace_metadata_request.json b/tests/integration_tests/hasura/replace_metadata_request.json new file mode 100644 index 000000000..e20726037 --- /dev/null +++ b/tests/integration_tests/hasura/replace_metadata_request.json @@ -0,0 +1,344 @@ +{ + "type": "replace_metadata", + "args": { + "version": 3, + "sources": [ + { + "name": "default", + "kind": "postgres", + "tables": [ + { + "table": { + "schema": "hic_et_nunc", + "name": "dipdup_state" + }, + "object_relationships": [], + "array_relationships": [], + "select_permissions": [ + { + "role": "user", + "permission": { + "columns": "*", + "filter": {}, + "allow_aggregations": true, + "limit": 100 + } + } + ] + }, + { + "table": { + "schema": "hic_et_nunc", + "name": "holder" + }, + "object_relationships": [], + "array_relationships": [ + { + "name": "swaps", + "using": { + "foreign_key_constraint_on": { + "column": "creator_id", + "table": { + "schema": "hic_et_nunc", + "name": "swap" + } + } + } + }, + { + "name": "tokens", + "using": { + "foreign_key_constraint_on": { + "column": "creator_id", + "table": { + "schema": "hic_et_nunc", + "name": "token" + } + } + } + }, + { + "name": "sales", + "using": { + "foreign_key_constraint_on": { + "column": "seller_id", + "table": { + "schema": "hic_et_nunc", + "name": "trade" + } + } + } + }, + { + "name": "purchases", + "using": { + "foreign_key_constraint_on": { + "column": "buyer_id", + "table": { + "schema": "hic_et_nunc", + "name": "trade" + } + } + } + } + ], + "select_permissions": [ + { + "role": "user", + "permission": { + "columns": "*", + "filter": {}, + "allow_aggregations": true, + "limit": 100 + } + } + ] + }, + { + "table": { + "schema": "hic_et_nunc", + "name": "swap" + }, + "object_relationships": [ + { + "name": "creator", + "using": { + "foreign_key_constraint_on": "creator_id" + } + } + ], + "array_relationships": [ + { + "name": "trades", + "using": { + "foreign_key_constraint_on": { + "column": "swap_id", + "table": { + "schema": "hic_et_nunc", + "name": "trade" + } + } + } + } + ], + "select_permissions": [ + { + "role": "user", + "permission": { + "columns": "*", + "filter": {}, + "allow_aggregations": true, + "limit": 100 + } + } + ] + }, + { + "table": { + "schema": "hic_et_nunc", + "name": "token" + }, + "object_relationships": [ + { + "name": "creator", + "using": { + "foreign_key_constraint_on": "creator_id" + } + } + ], + "array_relationships": [], + "select_permissions": [ + { + "role": "user", + "permission": { + "columns": "*", + "filter": {}, + "allow_aggregations": true, + "limit": 100 + } + } + ] + }, + { + "table": { + "schema": "hic_et_nunc", + "name": "trade" + }, + "object_relationships": [ + { + "name": "swap", + "using": { + "foreign_key_constraint_on": "swap_id" + } + }, + { + "name": "seller", + "using": { + "foreign_key_constraint_on": "seller_id" + } + }, + { + "name": "buyer", + "using": { + "foreign_key_constraint_on": "buyer_id" + } + } + ], + "array_relationships": [], + "select_permissions": [ + { + "role": "user", + "permission": { + "columns": "*", + "filter": {}, + "allow_aggregations": true, + "limit": 100 + } + } + ] + } + ], + "configuration": { + "connection_info": { + "use_prepared_statements": true, + "database_url": { + "from_env": "HASURA_GRAPHQL_DATABASE_URL" + }, + "isolation_level": "read-committed", + "pool_settings": { + "connection_lifetime": 600, + "retries": 1, + "idle_timeout": 180, + "max_connections": 50 + } + } + } + } + ], + "query_collections": [ + { + "name": "allowed-queries", + "definition": { + "queries": [ + { + "name": "hic_et_nunc_dipdup_state", + "query": "query hic_et_nunc_dipdup_state ($id: Int!) {hic_et_nunc_dipdup_state_by_pk(id: $id) {hash id index_name index_type level}}" + }, + { + "name": "hic_et_nunc_holder", + "query": "query hic_et_nunc_holder ($address: String!) {hic_et_nunc_holder_by_pk(address: $address) {address}}" + }, + { + "name": "hic_et_nunc_swap", + "query": "query hic_et_nunc_swap ($id: bigint!) {hic_et_nunc_swap_by_pk(id: $id) {amount amount_left creator_id id level price status timestamp}}" + }, + { + "name": "hic_et_nunc_token", + "query": "query hic_et_nunc_token ($id: bigint!) {hic_et_nunc_token_by_pk(id: $id) {creator_id id level supply timestamp}}" + }, + { + "name": "hic_et_nunc_trade", + "query": "query hic_et_nunc_trade ($id: bigint!) {hic_et_nunc_trade_by_pk(id: $id) {amount buyer_id id level seller_id swap_id timestamp}}" + }, + { + "name": "token", + "query": "query tokens($id: bigint) {\n hicEtNuncToken(where: {id: {_eq: $id}}) {\n creatorId\n id\n level\n supply\n timestamp\n }\n}\n" + } + ] + } + } + ], + "rest_endpoints": [ + { + "definition": { + "query": { + "collection_name": "allowed-queries", + "query_name": "hic_et_nunc_dipdup_state" + } + }, + "url": "hic_et_nunc_dipdup_state", + "methods": [ + "GET", + "POST" + ], + "name": "hic_et_nunc_dipdup_state", + "comment": null + }, + { + "definition": { + "query": { + "collection_name": "allowed-queries", + "query_name": "hic_et_nunc_holder" + } + }, + "url": "hic_et_nunc_holder", + "methods": [ + "GET", + "POST" + ], + "name": "hic_et_nunc_holder", + "comment": null + }, + { + "definition": { + "query": { + "collection_name": "allowed-queries", + "query_name": "hic_et_nunc_swap" + } + }, + "url": "hic_et_nunc_swap", + "methods": [ + "GET", + "POST" + ], + "name": "hic_et_nunc_swap", + "comment": null + }, + { + "definition": { + "query": { + "collection_name": "allowed-queries", + "query_name": "hic_et_nunc_token" + } + }, + "url": "hic_et_nunc_token", + "methods": [ + "GET", + "POST" + ], + "name": "hic_et_nunc_token", + "comment": null + }, + { + "definition": { + "query": { + "collection_name": "allowed-queries", + "query_name": "hic_et_nunc_trade" + } + }, + "url": "hic_et_nunc_trade", + "methods": [ + "GET", + "POST" + ], + "name": "hic_et_nunc_trade", + "comment": null + }, + { + "definition": { + "query": { + "collection_name": "allowed-queries", + "query_name": "token" + } + }, + "url": "token", + "methods": [ + "GET", + "POST" + ], + "name": "token", + "comment": null + } + ] + } +} \ No newline at end of file diff --git a/tests/integration_tests/test_hasura.py b/tests/integration_tests/test_hasura.py new file mode 100644 index 000000000..49d0ade3b --- /dev/null +++ b/tests/integration_tests/test_hasura.py @@ -0,0 +1,57 @@ +import json +from os.path import dirname, join +from unittest import IsolatedAsyncioTestCase +from unittest.mock import AsyncMock, Mock + +from tortoise import Tortoise + +from dipdup.config import HasuraConfig, PostgresDatabaseConfig +from dipdup.hasura import HasuraManager +from dipdup.utils import tortoise_wrapper + + +class HasuraTest(IsolatedAsyncioTestCase): + async def test_configure_hasura(self): + + with open(join(dirname(__file__), 'hasura', 'empty.json')) as file: + empty_metadata = json.load(file) + with open(join(dirname(__file__), 'hasura', 'query_dipdup_state.json')) as file: + query_dipdup_state = json.load(file) + with open(join(dirname(__file__), 'hasura', 'query_holder.json')) as file: + query_holder = json.load(file) + with open(join(dirname(__file__), 'hasura', 'query_swap.json')) as file: + query_swap = json.load(file) + with open(join(dirname(__file__), 'hasura', 'query_token.json')) as file: + query_token = json.load(file) + with open(join(dirname(__file__), 'hasura', 'query_trade.json')) as file: + query_trade = json.load(file) + with open(join(dirname(__file__), 'hasura', 'replace_metadata_request.json')) as file: + replace_metadata_request = json.load(file) + + async with tortoise_wrapper('sqlite://:memory:', 'demo_hic_et_nunc.models'): + await Tortoise.generate_schemas() + + database_config = PostgresDatabaseConfig(kind='postgres', host='', port=0, user='', database='', schema_name='hic_et_nunc') + hasura_config = HasuraConfig('http://localhost') + + hasura_manager = HasuraManager('demo_hic_et_nunc', hasura_config, database_config) + hasura_manager._get_views = AsyncMock(return_value=[]) + await hasura_manager._proxy._session.close() + hasura_manager._proxy = Mock() + hasura_manager._proxy.http_request = AsyncMock( + side_effect=[ + empty_metadata, + {}, + query_dipdup_state, + query_holder, + query_swap, + query_token, + query_trade, + {}, + ] + ) + hasura_manager._healthcheck = AsyncMock() + + await hasura_manager.configure() + + self.assertEqual(hasura_manager._proxy.http_request.call_args[-1]['json'], replace_metadata_request) diff --git a/tests/test_dipdup/hasura-metadata.json b/tests/test_dipdup/hasura-metadata.json deleted file mode 100644 index fda0b893e..000000000 --- a/tests/test_dipdup/hasura-metadata.json +++ /dev/null @@ -1,192 +0,0 @@ -{ - "version": 2, - "tables": [ - { - "table": { - "schema": "public", - "name": "holder" - }, - "object_relationships": [], - "array_relationships": [ - { - "name": "swaps", - "using": { - "foreign_key_constraint_on": { - "column": "creator_id", - "table": { - "schema": "public", - "name": "swap" - } - } - } - }, - { - "name": "tokens", - "using": { - "foreign_key_constraint_on": { - "column": "creator_id", - "table": { - "schema": "public", - "name": "token" - } - } - } - }, - { - "name": "sales", - "using": { - "foreign_key_constraint_on": { - "column": "seller_id", - "table": { - "schema": "public", - "name": "trade" - } - } - } - }, - { - "name": "purchases", - "using": { - "foreign_key_constraint_on": { - "column": "buyer_id", - "table": { - "schema": "public", - "name": "trade" - } - } - } - } - ], - "select_permissions": [ - { - "role": "user", - "permission": { - "columns": [ - "address" - ], - "filter": {}, - "allow_aggregations": true - } - } - ] - }, - { - "table": { - "schema": "public", - "name": "swap" - }, - "object_relationships": [ - { - "name": "creator", - "using": { - "foreign_key_constraint_on": "creator_id" - } - } - ], - "array_relationships": [ - { - "name": "trades", - "using": { - "foreign_key_constraint_on": { - "column": "swap_id", - "table": { - "schema": "public", - "name": "trade" - } - } - } - } - ], - "select_permissions": [ - { - "role": "user", - "permission": { - "columns": [ - "amount", - "amount_left", - "id", - "level", - "price", - "status", - "timestamp" - ], - "filter": {}, - "allow_aggregations": true - } - } - ] - }, - { - "table": { - "schema": "public", - "name": "token" - }, - "object_relationships": [ - { - "name": "creator", - "using": { - "foreign_key_constraint_on": "creator_id" - } - } - ], - "array_relationships": [], - "select_permissions": [ - { - "role": "user", - "permission": { - "columns": [ - "id", - "level", - "supply", - "timestamp" - ], - "filter": {}, - "allow_aggregations": true - } - } - ] - }, - { - "table": { - "schema": "public", - "name": "trade" - }, - "object_relationships": [ - { - "name": "swap", - "using": { - "foreign_key_constraint_on": "swap_id" - } - }, - { - "name": "seller", - "using": { - "foreign_key_constraint_on": "seller_id" - } - }, - { - "name": "buyer", - "using": { - "foreign_key_constraint_on": "buyer_id" - } - } - ], - "array_relationships": [], - "select_permissions": [ - { - "role": "user", - "permission": { - "columns": [ - "amount", - "id", - "level", - "timestamp" - ], - "filter": {}, - "allow_aggregations": true - } - } - ] - } - ] -} \ No newline at end of file diff --git a/tests/test_dipdup/test_hasura.py b/tests/test_dipdup/test_hasura.py deleted file mode 100644 index ef508ece6..000000000 --- a/tests/test_dipdup/test_hasura.py +++ /dev/null @@ -1,21 +0,0 @@ -import json -from os.path import dirname, join -from unittest import IsolatedAsyncioTestCase, skip - -from dipdup.config import DipDupConfig -from dipdup.hasura import generate_hasura_metadata - - -class ConfigTest(IsolatedAsyncioTestCase): - maxDiff = None - - async def asyncSetUp(self): - self.path = join(dirname(__file__), 'dipdup.yml') - - @skip('FIXME: Foreign key fields skipped, not reproducing on single test run') - async def test_generate_hasura_metadata(self): - with open(join(dirname(__file__), 'hasura-metadata.json')) as file: - expected_metadata = json.load(file) - config = DipDupConfig.load(self.path) - metadata = await generate_hasura_metadata(config) - self.assertDictEqual(expected_metadata, metadata)