diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index 441c7e91b5..20c25f2ce4 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -23,7 +23,7 @@ ENV LLVM_SYS_170_PREFIX=/usr/lib/llvm-17 ENV TABLEGEN_170_PREFIX=/usr/lib/llvm-17 # To allow independent workflow of the container, the rust-toolchain is explicitely given. -RUN echo "1.74.0" > rust_toolchain_version +RUN echo "1.76.0" > rust_toolchain_version # Install cargo-binstall RUN curl -L --proto '=https' --tlsv1.2 -sSf https://raw.githubusercontent.com/cargo-bins/cargo-binstall/main/install-from-binstall-release.sh | bash @@ -60,10 +60,11 @@ RUN if [ "$TARGETPLATFORM" = "linux/arm64" ] ; then \ rustup target add x86_64-fortanix-unknown-sgx --toolchain nightly; \ fi +ARG DOJO_VERSION=stable RUN curl -L https://install.dojoengine.org | bash RUN curl --proto '=https' --tlsv1.2 -sSf https://docs.swmansion.com/scarb/install.sh | bash ENV PATH=${PATH}:/root/.dojo/bin -RUN dojoup +RUN dojoup -v $DOJO_VERSION RUN chown -R root:root /usr/local/cargo RUN chmod -R 700 /usr/local/cargo diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 8c85ce685b..00beccd3b5 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -2,7 +2,7 @@ // https://github.com/microsoft/vscode-dev-containers/tree/v0.245.2/containers/rust { "name": "Rust", - "image": "ghcr.io/dojoengine/dojo-dev:5d61184", + "image": "ghcr.io/dojoengine/dojo-dev:3153a80", "runArgs": [ "--cap-add=SYS_PTRACE", "--security-opt", diff --git a/.github/workflows/bench.yml b/.github/workflows/bench.yml index 7562d587c0..1812bd6a9b 100644 --- a/.github/workflows/bench.yml +++ b/.github/workflows/bench.yml @@ -14,7 +14,7 @@ jobs: build: runs-on: ubuntu-latest container: - image: ghcr.io/dojoengine/dojo-dev:5d61184 + image: ghcr.io/dojoengine/dojo-dev:3153a80 steps: - uses: actions/checkout@v3 - run: git config --global --add safe.directory "$GITHUB_WORKSPACE" diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index d9414adf04..66f013059b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -8,22 +8,22 @@ on: env: CARGO_TERM_COLOR: always - RUST_VERSION: 1.74.0 + RUST_VERSION: 1.76.0 jobs: test: runs-on: ubuntu-latest-16-cores container: - image: nondeterministickari/dojo-dev:cc6554da + image: ghcr.io/dojoengine/dojo-dev:3153a80 steps: - uses: actions/checkout@v3 - uses: Swatinem/rust-cache@v2 - - run: | + - run: | cargo llvm-cov nextest --no-report --all-features --workspace --exclude katana --build-jobs 10 cargo llvm-cov nextest --no-report -p katana cargo llvm-cov nextest --no-report -p katana --no-default-features --features sir cargo llvm-cov report --lcov --output-path lcov.info - - uses: codecov/codecov-action@v3 + - uses: codecov/codecov-action@v4 with: token: ${{ secrets.CODECOV_TOKEN }} files: lcov.info @@ -31,7 +31,7 @@ jobs: ensure-wasm: runs-on: ubuntu-latest container: - image: nondeterministickari/dojo-dev:cc6554da + image: ghcr.io/dojoengine/dojo-dev:3153a80 steps: - uses: actions/checkout@v3 - uses: Swatinem/rust-cache@v2 @@ -43,12 +43,12 @@ jobs: - uses: actions/checkout@v3 - uses: actions-rs/toolchain@v1 with: - toolchain: ${{ env.RUST_VERSION }} + toolchain: ${{ env.rust_version }} target: x86_64-pc-windows-msvc - - uses: Swatinem/rust-cache@v2 + - uses: swatinem/rust-cache@v2 - uses: arduino/setup-protoc@v2 with: - repo-token: ${{ secrets.GITHUB_TOKEN }} + repo-token: ${{ secrets.github_token }} - run: cargo build --target x86_64-pc-windows-msvc --bins # cairofmt: @@ -66,7 +66,7 @@ jobs: dojo-core-test: runs-on: ubuntu-latest container: - image: nondeterministickari/dojo-dev:cc6554da + image: ghcr.io/dojoengine/dojo-dev:3153a80 steps: - uses: actions/checkout@v3 - uses: Swatinem/rust-cache@v2 @@ -75,7 +75,7 @@ jobs: dojo-spawn-and-move-example-test: runs-on: ubuntu-latest container: - image: nondeterministickari/dojo-dev:cc6554da + image: ghcr.io/dojoengine/dojo-dev:3153a80 steps: - uses: actions/checkout@v3 - uses: Swatinem/rust-cache@v2 @@ -84,7 +84,7 @@ jobs: dojo-world-bindings-check: runs-on: ubuntu-latest container: - image: nondeterministickari/dojo-dev:cc6554da + image: ghcr.io/dojoengine/dojo-dev:3153a80 steps: - uses: actions/checkout@v3 - uses: Swatinem/rust-cache@v2 @@ -93,7 +93,7 @@ jobs: clippy: runs-on: ubuntu-latest container: - image: nondeterministickari/dojo-dev:cc6554da + image: ghcr.io/dojoengine/dojo-dev:3153a80 steps: - uses: actions/checkout@v3 - uses: Swatinem/rust-cache@v2 @@ -102,7 +102,7 @@ jobs: fmt: runs-on: ubuntu-latest container: - image: nondeterministickari/dojo-dev:cc6554da + image: ghcr.io/dojoengine/dojo-dev:3153a80 steps: - uses: actions/checkout@v3 - uses: Swatinem/rust-cache@v2 @@ -111,7 +111,7 @@ jobs: docs: runs-on: ubuntu-latest container: - image: nondeterministickari/dojo-dev:cc6554da + image: ghcr.io/dojoengine/dojo-dev:3153a80 steps: - uses: actions/checkout@v3 - uses: Swatinem/rust-cache@v2 diff --git a/.github/workflows/devcontainer.yml b/.github/workflows/devcontainer.yml index 8ab6643801..e7537194f6 100644 --- a/.github/workflows/devcontainer.yml +++ b/.github/workflows/devcontainer.yml @@ -1,11 +1,19 @@ name: devcontainer on: + workflow_dispatch: + inputs: + docker_tag: + description: Descriptive name of the devcontainer for the Docker tag + required: true + type: string push: paths: - ".devcontainer/**" - ".github/workflows/devcontainer.yml" - "!.devcontainer/devcontainer.json" + release: + types: [created] jobs: build-and-push: @@ -43,6 +51,11 @@ jobs: SHORT_SHA=$(echo "${{ github.sha }}" | cut -c 1-7) echo "DOCKER_TAG=$SHORT_SHA" >> $GITHUB_ENV + - name: Set Docker tag for workflow_dispatch event + if: github.event_name == 'workflow_dispatch' + run: | + echo "DOCKER_TAG=${{ inputs.docker_tag }}" >> $GITHUB_ENV + - name: Set outputs id: release_info run: | @@ -56,6 +69,7 @@ jobs: tags: ghcr.io/${{ github.repository }}-dev:latest,ghcr.io/${{ github.repository }}-dev:${{ env.DOCKER_TAG }} build-args: | VARIANT=bookworm + DOJO_VERSION=${{ github.event_name == 'release' && github.event.release.tag_name || 'stable' }} platforms: linux/amd64,linux/arm64 cache-from: type=registry,ref=ghcr.io/${{ github.repository }}-dev:latest @@ -85,8 +99,8 @@ jobs: with: # We have to use a PAT in order to trigger ci token: ${{ secrets.CREATE_PR_TOKEN }} - title: "Update devcontainer image hash: ${{ needs.build-and-push.outputs.tag_name }}" - commit-message: "Update devcontainer image hash: ${{ needs.build-and-push.outputs.tag_name }}" + title: "Update devcontainer image: ${{ needs.build-and-push.outputs.tag_name }}" + commit-message: "Update devcontainer image: ${{ needs.build-and-push.outputs.tag_name }}" branch: bump-devcontainer base: main delete-branch: true diff --git a/.github/workflows/release-dispatch.yml b/.github/workflows/release-dispatch.yml index f060c096ae..58f0991ba5 100644 --- a/.github/workflows/release-dispatch.yml +++ b/.github/workflows/release-dispatch.yml @@ -14,12 +14,17 @@ jobs: contents: write runs-on: ubuntu-latest container: - image: ghcr.io/dojoengine/dojo-dev:5d61184 + image: ghcr.io/dojoengine/dojo-dev:3153a80 + env: + VERSION: "" steps: # Workaround described here: https://github.com/actions/checkout/issues/760 - uses: actions/checkout@v3 - run: git config --global --add safe.directory "$GITHUB_WORKSPACE" - - run: cargo release version ${{ inputs.version }} --execute --no-confirm && cargo release replace --execute --no-confirm + - run: | + VERSION=${{ inputs.version }} + VERSION=${VERSION#v} + cargo release version $VERSION --execute --no-confirm && cargo release replace --execute --no-confirm - id: version_info run: | cargo install cargo-get diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 5fd3f3870b..70c88bd2c0 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -8,7 +8,7 @@ on: env: CARGO_TERM_COLOR: always - RUST_VERSION: 1.74.0 + RUST_VERSION: 1.76.0 REGISTRY_IMAGE: ghcr.io/${{ github.repository }} jobs: diff --git a/.gitignore b/.gitignore index c6be219244..6498958892 100644 --- a/.gitignore +++ b/.gitignore @@ -16,3 +16,4 @@ output.txt crates/benches/bench_results.txt **/generated .vscode +bindings diff --git a/.gitmodules b/.gitmodules index 4882156aee..5c2ae58272 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,3 +1,6 @@ [submodule "crates/katana/primitives/contracts/messaging/solidity/lib/forge-std"] path = crates/katana/primitives/contracts/messaging/solidity/lib/forge-std url = https://github.com/foundry-rs/forge-std +[submodule "crates/katana/contracts/messaging/solidity/lib/forge-std"] + path = crates/katana/contracts/messaging/solidity/lib/forge-std + url = https://github.com/foundry-rs/forge-std diff --git a/Cargo.lock b/Cargo.lock index 80b3dce833..ba6b1f6805 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -116,16 +116,367 @@ version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0942ffc6dcaadf03badf6e6a2d0228460359d5e34b57ccdc720b7382dfbd5ec5" +[[package]] +name = "alloy-consensus" +version = "0.1.0" +source = "git+https://github.com/alloy-rs/alloy?rev=d68a6b787b2904061f0ae7fcc02ece8513e3c500#d68a6b787b2904061f0ae7fcc02ece8513e3c500" +dependencies = [ + "alloy-eips", + "alloy-primitives", + "alloy-rlp", + "alloy-serde", + "c-kzg", + "serde", + "sha2 0.10.8", +] + +[[package]] +name = "alloy-contract" +version = "0.1.0" +source = "git+https://github.com/alloy-rs/alloy?rev=d68a6b787b2904061f0ae7fcc02ece8513e3c500#d68a6b787b2904061f0ae7fcc02ece8513e3c500" +dependencies = [ + "alloy-dyn-abi", + "alloy-json-abi", + "alloy-network", + "alloy-primitives", + "alloy-provider", + "alloy-rpc-types", + "alloy-sol-types", + "alloy-transport", + "futures", + "futures-util", + "thiserror", +] + +[[package]] +name = "alloy-dyn-abi" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22ab339ca7b4ea9115f0578c941abc80a171edf8e5eadd01e6c4237b68db8083" +dependencies = [ + "alloy-json-abi", + "alloy-primitives", + "alloy-sol-type-parser", + "alloy-sol-types", + "const-hex", + "itoa", + "serde", + "serde_json", + "winnow 0.6.1", +] + +[[package]] +name = "alloy-eips" +version = "0.1.0" +source = "git+https://github.com/alloy-rs/alloy?rev=d68a6b787b2904061f0ae7fcc02ece8513e3c500#d68a6b787b2904061f0ae7fcc02ece8513e3c500" +dependencies = [ + "alloy-primitives", + "alloy-rlp", + "alloy-serde", + "c-kzg", + "once_cell", + "serde", +] + +[[package]] +name = "alloy-genesis" +version = "0.1.0" +source = "git+https://github.com/alloy-rs/alloy?rev=d68a6b787b2904061f0ae7fcc02ece8513e3c500#d68a6b787b2904061f0ae7fcc02ece8513e3c500" +dependencies = [ + "alloy-primitives", + "alloy-serde", + "serde", +] + +[[package]] +name = "alloy-json-abi" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44294729c145cf7ae65feab544b5b81fb2bb7e2fd060214842eb3989a1e9d882" +dependencies = [ + "alloy-primitives", + "alloy-sol-type-parser", + "serde", + "serde_json", +] + +[[package]] +name = "alloy-json-rpc" +version = "0.1.0" +source = "git+https://github.com/alloy-rs/alloy?rev=d68a6b787b2904061f0ae7fcc02ece8513e3c500#d68a6b787b2904061f0ae7fcc02ece8513e3c500" +dependencies = [ + "alloy-primitives", + "serde", + "serde_json", + "thiserror", + "tracing", +] + +[[package]] +name = "alloy-network" +version = "0.1.0" +source = "git+https://github.com/alloy-rs/alloy?rev=d68a6b787b2904061f0ae7fcc02ece8513e3c500#d68a6b787b2904061f0ae7fcc02ece8513e3c500" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-json-rpc", + "alloy-primitives", + "alloy-rpc-types", + "alloy-signer", + "async-trait", + "futures-utils-wasm", + "thiserror", +] + +[[package]] +name = "alloy-primitives" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50c715249705afa1e32be79dabfd35e2ef0f1cc02ad2cf48c9d1e20026ee637b" +dependencies = [ + "alloy-rlp", + "bytes", + "cfg-if", + "const-hex", + "derive_more", + "hex-literal", + "itoa", + "k256", + "keccak-asm", + "proptest", + "rand", + "ruint", + "serde", + "tiny-keccak", +] + +[[package]] +name = "alloy-provider" +version = "0.1.0" +source = "git+https://github.com/alloy-rs/alloy?rev=d68a6b787b2904061f0ae7fcc02ece8513e3c500#d68a6b787b2904061f0ae7fcc02ece8513e3c500" +dependencies = [ + "alloy-eips", + "alloy-json-rpc", + "alloy-network", + "alloy-primitives", + "alloy-rpc-client", + "alloy-rpc-types", + "alloy-rpc-types-trace", + "alloy-transport", + "alloy-transport-http", + "async-stream", + "async-trait", + "auto_impl", + "dashmap", + "futures", + "futures-utils-wasm", + "lru 0.12.2", + "reqwest 0.12.4", + "serde_json", + "tokio", + "tracing", + "url", +] + [[package]] name = "alloy-rlp" version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8d58d9f5da7b40e9bfff0b7e7816700be4019db97d4b6359fe7f94a9e22e42ac" dependencies = [ + "alloy-rlp-derive", "arrayvec", "bytes", ] +[[package]] +name = "alloy-rlp-derive" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a047897373be4bbb0224c1afdabca92648dc57a9c9ef6e7b0be3aff7a859c83" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.55", +] + +[[package]] +name = "alloy-rpc-client" +version = "0.1.0" +source = "git+https://github.com/alloy-rs/alloy?rev=d68a6b787b2904061f0ae7fcc02ece8513e3c500#d68a6b787b2904061f0ae7fcc02ece8513e3c500" +dependencies = [ + "alloy-json-rpc", + "alloy-transport", + "alloy-transport-http", + "futures", + "pin-project", + "reqwest 0.12.4", + "serde", + "serde_json", + "tokio", + "tokio-stream", + "tower", + "tracing", + "url", +] + +[[package]] +name = "alloy-rpc-types" +version = "0.1.0" +source = "git+https://github.com/alloy-rs/alloy?rev=d68a6b787b2904061f0ae7fcc02ece8513e3c500#d68a6b787b2904061f0ae7fcc02ece8513e3c500" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-genesis", + "alloy-primitives", + "alloy-rlp", + "alloy-serde", + "alloy-sol-types", + "itertools 0.12.1", + "serde", + "serde_json", + "thiserror", +] + +[[package]] +name = "alloy-rpc-types-trace" +version = "0.1.0" +source = "git+https://github.com/alloy-rs/alloy?rev=d68a6b787b2904061f0ae7fcc02ece8513e3c500#d68a6b787b2904061f0ae7fcc02ece8513e3c500" +dependencies = [ + "alloy-primitives", + "alloy-rpc-types", + "alloy-serde", + "serde", + "serde_json", +] + +[[package]] +name = "alloy-serde" +version = "0.1.0" +source = "git+https://github.com/alloy-rs/alloy?rev=d68a6b787b2904061f0ae7fcc02ece8513e3c500#d68a6b787b2904061f0ae7fcc02ece8513e3c500" +dependencies = [ + "alloy-primitives", + "serde", + "serde_json", +] + +[[package]] +name = "alloy-signer" +version = "0.1.0" +source = "git+https://github.com/alloy-rs/alloy?rev=d68a6b787b2904061f0ae7fcc02ece8513e3c500#d68a6b787b2904061f0ae7fcc02ece8513e3c500" +dependencies = [ + "alloy-primitives", + "async-trait", + "auto_impl", + "elliptic-curve", + "k256", + "thiserror", +] + +[[package]] +name = "alloy-signer-wallet" +version = "0.1.0" +source = "git+https://github.com/alloy-rs/alloy?rev=d68a6b787b2904061f0ae7fcc02ece8513e3c500#d68a6b787b2904061f0ae7fcc02ece8513e3c500" +dependencies = [ + "alloy-consensus", + "alloy-network", + "alloy-primitives", + "alloy-signer", + "async-trait", + "k256", + "rand", + "thiserror", +] + +[[package]] +name = "alloy-sol-macro" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef9a94a27345fb31e3fcb5f5e9f592bb4847493b07fa1e47dd9fde2222f2e28" +dependencies = [ + "alloy-json-abi", + "alloy-sol-macro-input", + "const-hex", + "heck 0.4.1", + "indexmap 2.2.5", + "proc-macro-error", + "proc-macro2", + "quote", + "syn 2.0.55", + "syn-solidity", + "tiny-keccak", +] + +[[package]] +name = "alloy-sol-macro-input" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c31fe73cd259527e24dc2dbfe64bc95e5ddfcd2b2731f670a11ff72b2be2c25b" +dependencies = [ + "alloy-json-abi", + "const-hex", + "dunce", + "heck 0.5.0", + "proc-macro2", + "quote", + "serde_json", + "syn 2.0.55", + "syn-solidity", +] + +[[package]] +name = "alloy-sol-type-parser" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c8d6e74e4feeaa2bcfdecfd3da247ab53c67bd654ba1907270c32e02b142331" +dependencies = [ + "winnow 0.6.1", +] + +[[package]] +name = "alloy-sol-types" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "afaffed78bfb17526375754931e045f96018aa810844b29c7aef823266dd4b4b" +dependencies = [ + "alloy-json-abi", + "alloy-primitives", + "alloy-sol-macro", + "const-hex", + "serde", +] + +[[package]] +name = "alloy-transport" +version = "0.1.0" +source = "git+https://github.com/alloy-rs/alloy?rev=d68a6b787b2904061f0ae7fcc02ece8513e3c500#d68a6b787b2904061f0ae7fcc02ece8513e3c500" +dependencies = [ + "alloy-json-rpc", + "base64 0.22.0", + "futures-util", + "futures-utils-wasm", + "serde", + "serde_json", + "thiserror", + "tokio", + "tower", + "url", + "wasm-bindgen-futures", +] + +[[package]] +name = "alloy-transport-http" +version = "0.1.0" +source = "git+https://github.com/alloy-rs/alloy?rev=d68a6b787b2904061f0ae7fcc02ece8513e3c500#d68a6b787b2904061f0ae7fcc02ece8513e3c500" +dependencies = [ + "alloy-json-rpc", + "alloy-transport", + "reqwest 0.12.4", + "serde_json", + "tower", + "url", +] + [[package]] name = "android-tzdata" version = "0.1.1" @@ -569,7 +920,7 @@ dependencies = [ "fnv", "futures-util", "handlebars", - "http", + "http 0.2.11", "indexmap 2.2.5", "mime", "multer", @@ -594,11 +945,11 @@ dependencies = [ "Inflector", "async-graphql-parser", "darling 0.20.6", - "proc-macro-crate 1.1.3", + "proc-macro-crate", "proc-macro2", "quote", "strum 0.25.0", - "syn 2.0.49", + "syn 2.0.55", "thiserror", ] @@ -705,7 +1056,7 @@ checksum = "5fd55a5ba1179988837d24ab4c7cc8ed6efdeff578ede0416b4225a5fca35bd0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.49", + "syn 2.0.55", ] [[package]] @@ -753,7 +1104,7 @@ checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ "proc-macro2", "quote", - "syn 2.0.49", + "syn 2.0.55", ] [[package]] @@ -770,18 +1121,7 @@ checksum = "c980ee35e870bd1a4d2c8294d4c04d0499e67bca1e4b5cefcc693c2fa00caea9" dependencies = [ "proc-macro2", "quote", - "syn 2.0.49", -] - -[[package]] -name = "async_io_stream" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6d7b9decdf35d8908a7e3ef02f64c5e9b1695e230154c0e8de3969142d9b94c" -dependencies = [ - "futures", - "pharos", - "rustc_version 0.4.0", + "syn 2.0.55", ] [[package]] @@ -828,7 +1168,7 @@ version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8d9a9bf8b79a749ee0b911b91b671cc2b6c670bdbc7e3dfd537576ddc94bb2a2" dependencies = [ - "http", + "http 0.2.11", "log", "url", ] @@ -841,7 +1181,7 @@ checksum = "3c87f3f15e7794432337fc718554eaa4dc8f04c9677a950ffe366f20a162ae42" dependencies = [ "proc-macro2", "quote", - "syn 2.0.49", + "syn 2.0.55", ] [[package]] @@ -861,9 +1201,9 @@ dependencies = [ "bitflags 1.3.2", "bytes", "futures-util", - "http", - "http-body", - "hyper", + "http 0.2.11", + "http-body 0.4.6", + "hyper 0.14.28", "itoa", "matchit", "memchr", @@ -887,8 +1227,8 @@ dependencies = [ "async-trait", "bytes", "futures-util", - "http", - "http-body", + "http 0.2.11", + "http-body 0.4.6", "mime", "rustversion", "tower-layer", @@ -934,6 +1274,12 @@ version = "0.21.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" +[[package]] +name = "base64" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9475866fec1451be56a3c2400fd081ff546538961565ccb5b7142cbd22bc7a51" + [[package]] name = "base64ct" version = "1.6.0" @@ -957,7 +1303,7 @@ dependencies = [ [[package]] name = "benches" -version = "0.6.0-alpha.8" +version = "0.6.1-alpha.4" dependencies = [ "anyhow", "clap", @@ -970,7 +1316,7 @@ dependencies = [ "katana-runner", "lazy_static", "proptest", - "reqwest", + "reqwest 0.11.24", "scarb", "serde", "serde_json", @@ -992,6 +1338,19 @@ dependencies = [ "serde", ] +[[package]] +name = "bigdecimal" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9324c8014cd04590682b34f1e9448d38f0674d0f7b2dc553331016ef0e4e9ebc" +dependencies = [ + "autocfg", + "libm", + "num-bigint", + "num-integer", + "num-traits 0.2.18", +] + [[package]] name = "bincode" version = "1.3.3" @@ -1027,7 +1386,7 @@ dependencies = [ "regex", "rustc-hash", "shlex", - "syn 2.0.49", + "syn 2.0.55", ] [[package]] @@ -1173,6 +1532,18 @@ dependencies = [ "thiserror", ] +[[package]] +name = "blst" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c94087b935a822949d3291a9989ad2b2051ea141eda0fd4e478a75f6aa3e604b" +dependencies = [ + "cc", + "glob", + "threadpool", + "zeroize", +] + [[package]] name = "brotli" version = "3.4.0" @@ -1200,7 +1571,6 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f5353f36341f7451062466f0b755b96ac3a9547e4d7f6b70d603fc721a7d7896" dependencies = [ - "sha2 0.10.8", "tinyvec", ] @@ -1248,7 +1618,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c3ac9f8b63eca6fd385229b3675f6cc0dc5c8a5c8a54a59d4f52ffd670d87b0c" [[package]] -name = "byteorder" +name = "bytemuck" +version = "1.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d6d68c57235a3a081186990eca2867354726650f42f7516ca50c28d6281fd15" + +[[package]] +name = "byteorder" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" @@ -1289,6 +1665,20 @@ dependencies = [ "pkg-config", ] +[[package]] +name = "c-kzg" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3130f3d8717cc02e668a896af24984d5d5d4e8bf12e278e982e0f1bd88a0f9af" +dependencies = [ + "blst", + "cc", + "glob", + "hex", + "libc", + "serde", +] + [[package]] name = "cached" version = "0.44.0" @@ -1399,7 +1789,7 @@ dependencies = [ "quote", "serde_json", "starknet 0.8.0", - "syn 2.0.49", + "syn 2.0.55", "thiserror", ] @@ -1412,7 +1802,7 @@ dependencies = [ "quote", "serde_json", "starknet 0.9.0", - "syn 2.0.49", + "syn 2.0.55", "thiserror", ] @@ -1428,7 +1818,7 @@ dependencies = [ "quote", "serde_json", "starknet 0.8.0", - "syn 2.0.49", + "syn 2.0.55", "thiserror", ] @@ -1444,7 +1834,7 @@ dependencies = [ "quote", "serde_json", "starknet 0.9.0", - "syn 2.0.49", + "syn 2.0.55", "thiserror", ] @@ -1461,7 +1851,7 @@ dependencies = [ "quote", "serde_json", "starknet 0.9.0", - "syn 2.0.49", + "syn 2.0.55", "thiserror", ] @@ -1697,7 +2087,7 @@ checksum = "fef002aac874d76492eb9577dab663f9a84fe4584b4215c7ebfda7d025fcadae" dependencies = [ "cairo-lang-debug", "quote", - "syn 2.0.49", + "syn 2.0.55", ] [[package]] @@ -2023,6 +2413,19 @@ dependencies = [ "time", ] +[[package]] +name = "cairo-proof-parser" +version = "0.1.2" +source = "git+https://github.com/cartridge-gg/cairo-proof-parser.git#4ced00edb19b75f7bd1990de36aa44fe4e51236a" +dependencies = [ + "anyhow", + "clap", + "num-bigint", + "regex", + "serde", + "serde_json", +] + [[package]] name = "cairo-vm" version = "0.9.2" @@ -2178,7 +2581,7 @@ checksum = "4f4c948ab3cd9562d256b752d874d573c836ec8b200bba87d1154bbf662d3a00" dependencies = [ "async-trait", "celestia-types", - "http", + "http 0.2.11", "jsonrpsee 0.20.3", "serde", "thiserror", @@ -2430,7 +2833,7 @@ dependencies = [ "heck 0.4.1", "proc-macro2", "quote", - "syn 2.0.49", + "syn 2.0.55", ] [[package]] @@ -2451,58 +2854,6 @@ version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "67ba02a97a2bd10f4b59b25c7973101c79642302776489e030cd13cdab09ed15" -[[package]] -name = "coins-bip32" -version = "0.8.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b6be4a5df2098cd811f3194f64ddb96c267606bffd9689ac7b0160097b01ad3" -dependencies = [ - "bs58", - "coins-core", - "digest 0.10.7", - "hmac", - "k256", - "serde", - "sha2 0.10.8", - "thiserror", -] - -[[package]] -name = "coins-bip39" -version = "0.8.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3db8fba409ce3dc04f7d804074039eb68b960b0829161f8e06c95fea3f122528" -dependencies = [ - "bitvec", - "coins-bip32", - "hmac", - "once_cell", - "pbkdf2 0.12.2", - "rand", - "sha2 0.10.8", - "thiserror", -] - -[[package]] -name = "coins-core" -version = "0.8.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5286a0843c21f8367f7be734f89df9b822e0321d8bcce8d6e735aadff7d74979" -dependencies = [ - "base64 0.21.7", - "bech32", - "bs58", - "digest 0.10.7", - "generic-array", - "hex", - "ripemd", - "serde", - "serde_derive", - "sha2 0.10.8", - "sha3", - "thiserror", -] - [[package]] name = "colorchoice" version = "1.0.0" @@ -2519,6 +2870,18 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "colored_json" +version = "3.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74cb9ce6b86f6e54bfa9518df2eeeef65d424ec7244d083ed97229185e366a91" +dependencies = [ + "is-terminal", + "serde", + "serde_json", + "yansi", +] + [[package]] name = "combine" version = "4.6.6" @@ -2531,10 +2894,10 @@ dependencies = [ [[package]] name = "common" -version = "0.6.0-alpha.8" +version = "0.6.1-alpha.4" dependencies = [ "anyhow", - "reqwest", + "reqwest 0.11.24", "thiserror", ] @@ -2547,7 +2910,7 @@ dependencies = [ "bytes", "futures-core", "futures-util", - "http", + "http 0.2.11", "mime", "mime_guess", "rand", @@ -2569,7 +2932,7 @@ version = "0.15.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0e1f83fc076bd6dd27517eacdf25fef6c4dfe5f1d7448bafaaf3a26f13b5e4eb" dependencies = [ - "encode_unicode", + "encode_unicode 0.3.6", "lazy_static", "libc", "unicode-width", @@ -2843,6 +3206,27 @@ dependencies = [ "typenum", ] +[[package]] +name = "csv" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac574ff4d437a7b5ad237ef331c17ccca63c46479e5b5453eb8e10bb99a759fe" +dependencies = [ + "csv-core", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "csv-core" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5efa2b3d7902f4b634a20cae3c9c4e6209dc4779feb6863329607560143efa70" +dependencies = [ + "memchr", +] + [[package]] name = "ctor" version = "0.2.6" @@ -2850,7 +3234,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "30d2b3721e861707777e3195b0158f950ae6dc4a27e4d02ff9f67e3eb3de199e" dependencies = [ "quote", - "syn 2.0.49", + "syn 2.0.55", ] [[package]] @@ -2897,7 +3281,7 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.49", + "syn 2.0.55", ] [[package]] @@ -2958,7 +3342,7 @@ dependencies = [ "proc-macro2", "quote", "strsim 0.10.0", - "syn 2.0.49", + "syn 2.0.55", ] [[package]] @@ -2980,7 +3364,7 @@ checksum = "c5a91391accf613803c2a9bf9abccdbaa07c54b4244a5b64883f9c3c137c86be" dependencies = [ "darling_core 0.20.6", "quote", - "syn 2.0.49", + "syn 2.0.55", ] [[package]] @@ -3084,34 +3468,13 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "derive_builder" -version = "0.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d67778784b508018359cbc8696edb3db78160bab2c2a28ba7f56ef6932997f8" -dependencies = [ - "derive_builder_macro 0.12.0", -] - [[package]] name = "derive_builder" version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0350b5cb0331628a5916d6c5c0b72e97393b8b6b03b47a9284f4e7f5a405ffd7" dependencies = [ - "derive_builder_macro 0.20.0", -] - -[[package]] -name = "derive_builder_core" -version = "0.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c11bdc11a0c47bc7d37d582b5285da6849c96681023680b906673c5707af7b0f" -dependencies = [ - "darling 0.14.4", - "proc-macro2", - "quote", - "syn 1.0.109", + "derive_builder_macro", ] [[package]] @@ -3123,17 +3486,7 @@ dependencies = [ "darling 0.20.6", "proc-macro2", "quote", - "syn 2.0.49", -] - -[[package]] -name = "derive_builder_macro" -version = "0.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebcda35c7a396850a55ffeac740804b40ffec779b98fffbb1738f4033f0ee79e" -dependencies = [ - "derive_builder_core 0.12.0", - "syn 1.0.109", + "syn 2.0.55", ] [[package]] @@ -3142,8 +3495,8 @@ version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "206868b8242f27cecce124c19fd88157fbd0dd334df2587f36417bafbc85097b" dependencies = [ - "derive_builder_core 0.20.0", - "syn 2.0.49", + "derive_builder_core", + "syn 2.0.55", ] [[package]] @@ -3280,7 +3633,7 @@ checksum = "487585f4d0c6655fe74905e2504d8ad6908e4db67f744eb140876906c2f3175d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.49", + "syn 2.0.55", ] [[package]] @@ -3291,13 +3644,14 @@ checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10" [[package]] name = "dojo-bindgen" -version = "0.6.0-alpha.8" +version = "0.6.1-alpha.4" dependencies = [ "async-trait", "cainome 0.1.5", "camino", "chrono", "convert_case 0.6.0", + "dojo-world", "serde", "serde_json", "starknet 0.9.0", @@ -3306,15 +3660,15 @@ dependencies = [ [[package]] name = "dojo-core" -version = "0.6.0-alpha.8" +version = "0.6.1-alpha.4" [[package]] name = "dojo-examples-spawn-and-move" -version = "0.6.0-alpha.8" +version = "0.6.1-alpha.4" [[package]] name = "dojo-lang" -version = "0.6.0-alpha.8" +version = "0.6.1-alpha.4" dependencies = [ "anyhow", "cairo-lang-compiler", @@ -3364,7 +3718,7 @@ dependencies = [ [[package]] name = "dojo-language-server" -version = "0.6.0-alpha.8" +version = "0.6.1-alpha.4" dependencies = [ "anyhow", "cairo-lang-compiler", @@ -3384,9 +3738,27 @@ dependencies = [ "tower-lsp", ] +[[package]] +name = "dojo-metrics" +version = "0.6.1-alpha.4" +dependencies = [ + "anyhow", + "hyper 0.14.28", + "jemalloc-ctl", + "jemallocator", + "metrics", + "metrics-exporter-prometheus", + "metrics-process", + "metrics-util", + "reth-metrics-derive", + "thiserror", + "tokio", + "tracing", +] + [[package]] name = "dojo-test-utils" -version = "0.6.0-alpha.8" +version = "0.6.1-alpha.4" dependencies = [ "anyhow", "assert_fs", @@ -3420,7 +3792,7 @@ dependencies = [ [[package]] name = "dojo-types" -version = "0.6.0-alpha.8" +version = "0.6.1-alpha.4" dependencies = [ "crypto-bigint", "hex", @@ -3435,7 +3807,7 @@ dependencies = [ [[package]] name = "dojo-world" -version = "0.6.0-alpha.8" +version = "0.6.1-alpha.4" dependencies = [ "anyhow", "assert_fs", @@ -3451,7 +3823,7 @@ dependencies = [ "dojo-test-utils", "dojo-types", "futures", - "http", + "http 0.2.11", "ipfs-api-backend-hyper", "scarb", "serde", @@ -3461,6 +3833,7 @@ dependencies = [ "smol_str", "starknet 0.9.0", "starknet-crypto 0.6.1", + "tempfile", "thiserror", "tokio", "toml 0.7.8", @@ -3470,7 +3843,7 @@ dependencies = [ [[package]] name = "dojo-world-abigen" -version = "0.6.0-alpha.8" +version = "0.6.1-alpha.4" dependencies = [ "cairo-lang-starknet", "camino", @@ -3607,6 +3980,12 @@ version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f" +[[package]] +name = "encode_unicode" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0" + [[package]] name = "encoding_rs" version = "0.8.33" @@ -3622,24 +4001,6 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c34f04666d835ff5d62e058c3995147c06f42fe86ff053337632bca83e42702d" -[[package]] -name = "enr" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe81b5c06ecfdbc71dd845216f225f53b62a10cb8a16c946836a3467f701d05b" -dependencies = [ - "base64 0.21.7", - "bytes", - "hex", - "k256", - "log", - "rand", - "rlp", - "serde", - "sha3", - "zeroize", -] - [[package]] name = "enum-as-inner" version = "0.6.0" @@ -3649,7 +4010,7 @@ dependencies = [ "heck 0.4.1", "proc-macro2", "quote", - "syn 2.0.49", + "syn 2.0.55", ] [[package]] @@ -3661,7 +4022,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.49", + "syn 2.0.55", ] [[package]] @@ -3698,337 +4059,68 @@ version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245" dependencies = [ - "libc", - "windows-sys 0.52.0", -] - -[[package]] -name = "etcetera" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "136d1b5283a1ab77bd9257427ffd09d8667ced0570b6f938942bc7568ed5b943" -dependencies = [ - "cfg-if", - "home", - "windows-sys 0.48.0", -] - -[[package]] -name = "eth-keystore" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fda3bf123be441da5260717e0661c25a2fd9cb2b2c1d20bf2e05580047158ab" -dependencies = [ - "aes", - "ctr", - "digest 0.10.7", - "hex", - "hmac", - "pbkdf2 0.11.0", - "rand", - "scrypt", - "serde", - "serde_json", - "sha2 0.10.8", - "sha3", - "thiserror", - "uuid 0.8.2", -] - -[[package]] -name = "ethabi" -version = "18.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7413c5f74cc903ea37386a8965a936cbeb334bd270862fdece542c1b2dcbc898" -dependencies = [ - "ethereum-types", - "hex", - "once_cell", - "regex", - "serde", - "serde_json", - "sha3", - "thiserror", - "uint", -] - -[[package]] -name = "ethbloom" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c22d4b5885b6aa2fe5e8b9329fb8d232bf739e434e6b87347c63bdd00c120f60" -dependencies = [ - "crunchy", - "fixed-hash", - "impl-codec", - "impl-rlp", - "impl-serde", - "scale-info", - "tiny-keccak", -] - -[[package]] -name = "ethereum-types" -version = "0.14.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02d215cbf040552efcbe99a38372fe80ab9d00268e20012b79fcd0f073edd8ee" -dependencies = [ - "ethbloom", - "fixed-hash", - "impl-codec", - "impl-rlp", - "impl-serde", - "primitive-types", - "scale-info", - "uint", -] - -[[package]] -name = "ethers" -version = "2.0.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c7cd562832e2ff584fa844cd2f6e5d4f35bbe11b28c7c9b8df957b2e1d0c701" -dependencies = [ - "ethers-addressbook", - "ethers-contract", - "ethers-core", - "ethers-etherscan", - "ethers-middleware", - "ethers-providers", - "ethers-signers", - "ethers-solc", -] - -[[package]] -name = "ethers-addressbook" -version = "2.0.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35dc9a249c066d17e8947ff52a4116406163cf92c7f0763cb8c001760b26403f" -dependencies = [ - "ethers-core", - "once_cell", - "serde", - "serde_json", -] - -[[package]] -name = "ethers-contract" -version = "2.0.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43304317c7f776876e47f2f637859f6d0701c1ec7930a150f169d5fbe7d76f5a" -dependencies = [ - "const-hex", - "ethers-contract-abigen", - "ethers-contract-derive", - "ethers-core", - "ethers-providers", - "futures-util", - "once_cell", - "pin-project", - "serde", - "serde_json", - "thiserror", -] - -[[package]] -name = "ethers-contract-abigen" -version = "2.0.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9f96502317bf34f6d71a3e3d270defaa9485d754d789e15a8e04a84161c95eb" -dependencies = [ - "Inflector", - "const-hex", - "dunce", - "ethers-core", - "ethers-etherscan", - "eyre", - "prettyplease 0.2.16", - "proc-macro2", - "quote", - "regex", - "reqwest", - "serde", - "serde_json", - "syn 2.0.49", - "toml 0.8.10", - "walkdir", -] - -[[package]] -name = "ethers-contract-derive" -version = "2.0.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "452ff6b0a64507ce8d67ffd48b1da3b42f03680dcf5382244e9c93822cbbf5de" -dependencies = [ - "Inflector", - "const-hex", - "ethers-contract-abigen", - "ethers-core", - "proc-macro2", - "quote", - "serde_json", - "syn 2.0.49", -] - -[[package]] -name = "ethers-core" -version = "2.0.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aab3cef6cc1c9fd7f787043c81ad3052eff2b96a3878ef1526aa446311bdbfc9" -dependencies = [ - "arrayvec", - "bytes", - "cargo_metadata", - "chrono", - "const-hex", - "elliptic-curve", - "ethabi", - "generic-array", - "k256", - "num_enum", - "once_cell", - "open-fastrlp", - "rand", - "rlp", - "serde", - "serde_json", - "strum 0.25.0", - "syn 2.0.49", - "tempfile", - "thiserror", - "tiny-keccak", - "unicode-xid", -] - -[[package]] -name = "ethers-etherscan" -version = "2.0.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16d45b981f5fa769e1d0343ebc2a44cfa88c9bc312eb681b676318b40cef6fb1" -dependencies = [ - "chrono", - "ethers-core", - "reqwest", - "semver 1.0.22", - "serde", - "serde_json", - "thiserror", - "tracing", -] - -[[package]] -name = "ethers-middleware" -version = "2.0.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "145211f34342487ef83a597c1e69f0d3e01512217a7c72cc8a25931854c7dca0" -dependencies = [ - "async-trait", - "auto_impl", - "ethers-contract", - "ethers-core", - "ethers-etherscan", - "ethers-providers", - "ethers-signers", - "futures-channel", - "futures-locks", - "futures-util", - "instant", - "reqwest", - "serde", - "serde_json", - "thiserror", - "tokio", - "tracing", - "tracing-futures", - "url", + "libc", + "windows-sys 0.52.0", ] [[package]] -name = "ethers-providers" -version = "2.0.13" +name = "etcetera" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb6b15393996e3b8a78ef1332d6483c11d839042c17be58decc92fa8b1c3508a" +checksum = "136d1b5283a1ab77bd9257427ffd09d8667ced0570b6f938942bc7568ed5b943" dependencies = [ - "async-trait", - "auto_impl", - "base64 0.21.7", - "bytes", - "const-hex", - "enr", - "ethers-core", - "futures-core", - "futures-timer", - "futures-util", - "hashers", - "http", - "instant", - "jsonwebtoken", - "once_cell", - "pin-project", - "reqwest", - "serde", - "serde_json", - "thiserror", - "tokio", - "tokio-tungstenite", - "tracing", - "tracing-futures", - "url", - "wasm-bindgen", - "wasm-bindgen-futures", - "web-sys", - "ws_stream_wasm", + "cfg-if", + "home", + "windows-sys 0.48.0", ] [[package]] -name = "ethers-signers" -version = "2.0.13" +name = "eth-keystore" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3b125a103b56aef008af5d5fb48191984aa326b50bfd2557d231dc499833de3" +checksum = "1fda3bf123be441da5260717e0661c25a2fd9cb2b2c1d20bf2e05580047158ab" dependencies = [ - "async-trait", - "coins-bip32", - "coins-bip39", - "const-hex", - "elliptic-curve", - "eth-keystore", - "ethers-core", + "aes", + "ctr", + "digest 0.10.7", + "hex", + "hmac", + "pbkdf2", "rand", + "scrypt", + "serde", + "serde_json", "sha2 0.10.8", + "sha3", "thiserror", - "tracing", + "uuid 0.8.2", ] [[package]] -name = "ethers-solc" -version = "2.0.13" +name = "ethbloom" +version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d21df08582e0a43005018a858cc9b465c5fff9cf4056651be64f844e57d1f55f" +checksum = "c22d4b5885b6aa2fe5e8b9329fb8d232bf739e434e6b87347c63bdd00c120f60" dependencies = [ - "cfg-if", - "const-hex", - "dirs 5.0.1", - "dunce", - "ethers-core", - "glob", - "home", - "md-5", - "num_cpus", - "once_cell", - "path-slash", - "rayon", - "regex", - "semver 1.0.22", - "serde", - "serde_json", - "solang-parser", - "svm-rs", - "thiserror", + "crunchy", + "fixed-hash", + "impl-rlp", + "impl-serde", "tiny-keccak", - "tokio", - "tracing", - "walkdir", - "yansi", +] + +[[package]] +name = "ethereum-types" +version = "0.14.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02d215cbf040552efcbe99a38372fe80ab9d00268e20012b79fcd0f073edd8ee" +dependencies = [ + "ethbloom", + "fixed-hash", + "impl-rlp", + "impl-serde", + "primitive-types", + "uint", ] [[package]] @@ -4246,16 +4338,6 @@ dependencies = [ "percent-encoding", ] -[[package]] -name = "fs2" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9564fc758e15025b46aa6643b1b77d047d1a56a1aea6e01002ac0c7026876213" -dependencies = [ - "libc", - "winapi", -] - [[package]] name = "fs4" version = "0.7.0" @@ -4386,16 +4468,6 @@ dependencies = [ "pin-project-lite", ] -[[package]] -name = "futures-locks" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45ec6fe3675af967e67c5536c0b9d44e34e6c52f86bedc4ea49c5317b8e94d06" -dependencies = [ - "futures-channel", - "futures-task", -] - [[package]] name = "futures-macro" version = "0.3.30" @@ -4404,7 +4476,7 @@ checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" dependencies = [ "proc-macro2", "quote", - "syn 2.0.49", + "syn 2.0.55", ] [[package]] @@ -4469,13 +4541,10 @@ dependencies = [ ] [[package]] -name = "fxhash" -version = "0.2.1" +name = "futures-utils-wasm" +version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c" -dependencies = [ - "byteorder", -] +checksum = "42012b0f064e01aa58b545fe3727f90f7dd4020f4a3ea735b50344965f5a57e9" [[package]] name = "genco" @@ -4496,7 +4565,7 @@ checksum = "d4cf186fea4af17825116f72932fe52cce9a13bae39ff63b4dc0cfdb3fb4bde1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.49", + "syn 2.0.55", ] [[package]] @@ -4941,7 +5010,7 @@ checksum = "d75e7ab728059f595f6ddc1ad8771b8d6a231971ae493d9d5948ecad366ee8bb" dependencies = [ "proc-macro2", "quote", - "syn 2.0.49", + "syn 2.0.55", ] [[package]] @@ -5444,7 +5513,7 @@ dependencies = [ "futures-core", "futures-sink", "futures-util", - "http", + "http 0.2.11", "indexmap 2.2.5", "slab", "tokio", @@ -5505,15 +5574,6 @@ dependencies = [ "serde", ] -[[package]] -name = "hashers" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2bca93b15ea5a746f220e56587f71e73c6165eab783df9e26590069953e3c30" -dependencies = [ - "fxhash", -] - [[package]] name = "hashlink" version = "0.8.4" @@ -5542,7 +5602,7 @@ dependencies = [ "base64 0.21.7", "bytes", "headers-core", - "http", + "http 0.2.11", "httpdate", "mime", "sha1", @@ -5554,7 +5614,7 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e7f66481bfee273957b1f20485a4ff3362987f85b2c236580d81b4eb7a326429" dependencies = [ - "http", + "http 0.2.11", ] [[package]] @@ -5575,6 +5635,12 @@ dependencies = [ "unicode-segmentation", ] +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + [[package]] name = "hermit-abi" version = "0.3.6" @@ -5586,6 +5652,15 @@ name = "hex" version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" +dependencies = [ + "serde", +] + +[[package]] +name = "hex-literal" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6fe2267d4ed49bc07b63801559be28c718ea06c4738b7a03c94df7386d2cde46" [[package]] name = "hex_fmt" @@ -5688,6 +5763,17 @@ dependencies = [ "itoa", ] +[[package]] +name = "http" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + [[package]] name = "http-body" version = "0.4.6" @@ -5695,7 +5781,30 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" dependencies = [ "bytes", - "http", + "http 0.2.11", + "pin-project-lite", +] + +[[package]] +name = "http-body" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1cac85db508abc24a2e48553ba12a996e87244a0395ce011e62b37158745d643" +dependencies = [ + "bytes", + "http 1.1.0", +] + +[[package]] +name = "http-body-util" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0475f8b2ac86659c21b64320d5d653f9efe42acd2a4e560073ec61a155a34f1d" +dependencies = [ + "bytes", + "futures-core", + "http 1.1.0", + "http-body 1.0.0", "pin-project-lite", ] @@ -5740,8 +5849,8 @@ dependencies = [ "futures-core", "futures-util", "h2", - "http", - "http-body", + "http 0.2.11", + "http-body 0.4.6", "httparse", "httpdate", "itoa", @@ -5753,6 +5862,25 @@ dependencies = [ "want", ] +[[package]] +name = "hyper" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe575dd17d0862a9a33781c8c4696a55c320909004a67a00fb286ba8b1bc496d" +dependencies = [ + "bytes", + "futures-channel", + "futures-util", + "http 1.1.0", + "http-body 1.0.0", + "httparse", + "itoa", + "pin-project-lite", + "smallvec", + "tokio", + "want", +] + [[package]] name = "hyper-multipart-rfc7578" version = "0.8.0" @@ -5762,8 +5890,8 @@ dependencies = [ "bytes", "common-multipart-rfc7578", "futures-core", - "http", - "hyper", + "http 0.2.11", + "hyper 0.14.28", ] [[package]] @@ -5771,7 +5899,7 @@ name = "hyper-reverse-proxy" version = "0.5.2-dev" source = "git+https://github.com/tarrencev/hyper-reverse-proxy#4bfaf98b7ae61a49c6238ee1bd38ad030e7fa7f6" dependencies = [ - "hyper", + "hyper 0.14.28", "lazy_static", "tokio", "tracing", @@ -5783,8 +5911,8 @@ version = "0.23.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1788965e61b367cd03a62950836d5cd41560c3577d90e40e0819373194d1661c" dependencies = [ - "http", - "hyper", + "http 0.2.11", + "hyper 0.14.28", "log", "rustls 0.20.9", "rustls-native-certs", @@ -5799,8 +5927,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" dependencies = [ "futures-util", - "http", - "hyper", + "http 0.2.11", + "hyper 0.14.28", "log", "rustls 0.21.10", "rustls-native-certs", @@ -5815,7 +5943,7 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1" dependencies = [ - "hyper", + "hyper 0.14.28", "pin-project-lite", "tokio", "tokio-io-timeout", @@ -5828,12 +5956,32 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" dependencies = [ "bytes", - "hyper", + "hyper 0.14.28", "native-tls", "tokio", "tokio-native-tls", ] +[[package]] +name = "hyper-util" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca38ef113da30126bbff9cd1705f9273e15d45498615d138b0c20279ac7a76aa" +dependencies = [ + "bytes", + "futures-channel", + "futures-util", + "http 1.1.0", + "http-body 1.0.0", + "hyper 1.3.1", + "pin-project-lite", + "socket2 0.5.5", + "tokio", + "tower", + "tower-service", + "tracing", +] + [[package]] name = "iana-time-zone" version = "0.1.60" @@ -5928,8 +6076,8 @@ dependencies = [ "attohttpc", "bytes", "futures", - "http", - "hyper", + "http 0.2.11", + "hyper 0.14.28", "log", "rand", "tokio", @@ -6170,7 +6318,7 @@ dependencies = [ "socket2 0.5.5", "widestring", "windows-sys 0.48.0", - "winreg", + "winreg 0.50.0", ] [[package]] @@ -6182,8 +6330,8 @@ dependencies = [ "base64 0.13.1", "bytes", "futures", - "http", - "hyper", + "http 0.2.11", + "hyper 0.14.28", "hyper-multipart-rfc7578", "hyper-rustls 0.23.2", "ipfs-api-prelude", @@ -6201,7 +6349,7 @@ dependencies = [ "common-multipart-rfc7578", "dirs 4.0.0", "futures", - "http", + "http 0.2.11", "multiaddr 0.17.1", "multibase", "serde", @@ -6387,7 +6535,7 @@ dependencies = [ "futures-timer", "futures-util", "gloo-net", - "http", + "http 0.2.11", "jsonrpsee-core 0.16.3", "jsonrpsee-types 0.16.3", "pin-project", @@ -6408,7 +6556,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b5b005c793122d03217da09af68ba9383363caa950b90d3436106df8cabce935" dependencies = [ "futures-util", - "http", + "http 0.2.11", "jsonrpsee-core 0.20.3", "pin-project", "rustls-native-certs", @@ -6436,7 +6584,7 @@ dependencies = [ "futures-timer", "futures-util", "globset", - "hyper", + "hyper 0.14.28", "jsonrpsee-types 0.16.3", "parking_lot 0.12.1", "rand", @@ -6462,7 +6610,7 @@ dependencies = [ "beef", "futures-timer", "futures-util", - "hyper", + "hyper 0.14.28", "jsonrpsee-types 0.20.3", "rustc-hash", "serde", @@ -6479,7 +6627,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7e5f9fabdd5d79344728521bb65e3106b49ec405a78b66fbff073b72b389fa43" dependencies = [ "async-trait", - "hyper", + "hyper 0.14.28", "hyper-rustls 0.24.2", "jsonrpsee-core 0.16.3", "jsonrpsee-types 0.16.3", @@ -6498,7 +6646,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5f80c17f62c7653ce767e3d7288b793dfec920f97067ceb189ebdd3570f2bc20" dependencies = [ "async-trait", - "hyper", + "hyper 0.14.28", "hyper-rustls 0.24.2", "jsonrpsee-core 0.20.3", "jsonrpsee-types 0.20.3", @@ -6518,7 +6666,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "44e8ab85614a08792b9bff6c8feee23be78c98d0182d4c622c05256ab553892a" dependencies = [ "heck 0.4.1", - "proc-macro-crate 1.1.3", + "proc-macro-crate", "proc-macro2", "quote", "syn 1.0.109", @@ -6531,7 +6679,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "29110019693a4fa2dbda04876499d098fa16d70eba06b1e6e2b3f1b251419515" dependencies = [ "heck 0.4.1", - "proc-macro-crate 1.1.3", + "proc-macro-crate", "proc-macro2", "quote", "syn 1.0.109", @@ -6545,8 +6693,8 @@ checksum = "cf4d945a6008c9b03db3354fb3c83ee02d2faa9f2e755ec1dfb69c3551b8f4ba" dependencies = [ "futures-channel", "futures-util", - "http", - "hyper", + "http 0.2.11", + "hyper 0.14.28", "jsonrpsee-core 0.16.3", "jsonrpsee-types 0.16.3", "serde", @@ -6604,7 +6752,7 @@ version = "0.16.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4e1b3975ed5d73f456478681a417128597acd6a2487855fdb7b4a3d4d195bf5e" dependencies = [ - "http", + "http 0.2.11", "jsonrpsee-client-transport 0.16.3", "jsonrpsee-core 0.16.3", "jsonrpsee-types 0.16.3", @@ -6616,27 +6764,13 @@ version = "0.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bca9cb3933ccae417eb6b08c3448eb1cb46e39834e5b503e395e5e5bd08546c0" dependencies = [ - "http", + "http 0.2.11", "jsonrpsee-client-transport 0.20.3", "jsonrpsee-core 0.20.3", "jsonrpsee-types 0.20.3", "url", ] -[[package]] -name = "jsonwebtoken" -version = "8.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6971da4d9c3aa03c3d8f3ff0f4155b534aad021292003895a469716b2a230378" -dependencies = [ - "base64 0.21.7", - "pem 1.1.1", - "ring 0.16.20", - "serde", - "serde_json", - "simple_asn1", -] - [[package]] name = "jwalk" version = "0.8.1" @@ -6663,8 +6797,9 @@ dependencies = [ [[package]] name = "katana" -version = "0.6.0-alpha.8" +version = "0.6.1-alpha.4" dependencies = [ + "alloy-primitives", "anyhow", "assert_matches", "cfg-if", @@ -6672,13 +6807,12 @@ dependencies = [ "clap_complete", "common", "console", + "dojo-metrics", "katana-core", "katana-executor", "katana-primitives", "katana-rpc", "katana-rpc-api", - "metrics 0.6.0-alpha.8", - "metrics-process", "serde_json", "shellexpand", "starknet_api", @@ -6690,7 +6824,7 @@ dependencies = [ [[package]] name = "katana-codecs" -version = "0.6.0-alpha.8" +version = "0.6.1-alpha.4" dependencies = [ "bytes", "katana-primitives", @@ -6698,18 +6832,27 @@ dependencies = [ [[package]] name = "katana-codecs-derive" -version = "0.6.0-alpha.8" +version = "0.6.1-alpha.4" dependencies = [ "proc-macro2", "quote", "serde", - "syn 2.0.49", + "syn 2.0.55", ] [[package]] name = "katana-core" -version = "0.6.0-alpha.8" -dependencies = [ +version = "0.6.1-alpha.4" +dependencies = [ + "alloy-contract", + "alloy-network", + "alloy-primitives", + "alloy-provider", + "alloy-rpc-types", + "alloy-signer", + "alloy-signer-wallet", + "alloy-sol-types", + "alloy-transport", "anyhow", "assert_matches", "async-trait", @@ -6718,19 +6861,21 @@ dependencies = [ "cairo-vm 0.9.2", "convert_case 0.6.0", "derive_more", - "ethers", + "dojo-metrics", "flate2", "futures", "hex", + "k256", "katana-db", "katana-executor", "katana-primitives", "katana-provider", "katana-tasks", "lazy_static", + "metrics", "parking_lot 0.12.1", - "primitive-types", "rand", + "reqwest 0.11.24", "serde", "serde_json", "serde_with", @@ -6745,7 +6890,7 @@ dependencies = [ [[package]] name = "katana-db" -version = "0.6.0-alpha.8" +version = "0.6.1-alpha.4" dependencies = [ "anyhow", "cairo-lang-starknet", @@ -6756,6 +6901,7 @@ dependencies = [ "parking_lot 0.12.1", "postcard", "reth-libmdbx", + "roaring", "serde", "serde_json", "starknet 0.9.0", @@ -6766,8 +6912,9 @@ dependencies = [ [[package]] name = "katana-executor" -version = "0.6.0-alpha.8" +version = "0.6.1-alpha.4" dependencies = [ + "alloy-primitives", "anyhow", "blockifier", "cairo-lang-sierra", @@ -6793,15 +6940,15 @@ dependencies = [ [[package]] name = "katana-primitives" -version = "0.6.0-alpha.8" +version = "0.6.1-alpha.4" dependencies = [ + "alloy-primitives", "anyhow", "base64 0.21.7", "cairo-lang-sierra", "cairo-lang-starknet", "cairo-vm 0.9.2", "derive_more", - "ethers", "flate2", "lazy_static", "rand", @@ -6819,7 +6966,7 @@ dependencies = [ [[package]] name = "katana-provider" -version = "0.6.0-alpha.8" +version = "0.6.1-alpha.4" dependencies = [ "anyhow", "auto_impl", @@ -6844,16 +6991,17 @@ dependencies = [ [[package]] name = "katana-rpc" -version = "0.6.0-alpha.8" +version = "0.6.1-alpha.4" dependencies = [ "anyhow", "assert_matches", "cairo-lang-starknet", + "dojo-metrics", "dojo-test-utils", "flate2", "futures", "hex", - "hyper", + "hyper 0.14.28", "jsonrpsee 0.16.3", "katana-core", "katana-executor", @@ -6863,6 +7011,7 @@ dependencies = [ "katana-rpc-types", "katana-rpc-types-builder", "katana-tasks", + "metrics", "serde", "serde_json", "serde_with", @@ -6878,7 +7027,7 @@ dependencies = [ [[package]] name = "katana-rpc-api" -version = "0.6.0-alpha.8" +version = "0.6.1-alpha.4" dependencies = [ "jsonrpsee 0.16.3", "katana-core", @@ -6889,17 +7038,18 @@ dependencies = [ [[package]] name = "katana-rpc-types" -version = "0.6.0-alpha.8" +version = "0.6.1-alpha.4" dependencies = [ + "alloy-primitives", "anyhow", "derive_more", - "ethers", "futures", "jsonrpsee 0.16.3", "katana-core", "katana-executor", "katana-primitives", "katana-provider", + "rstest", "serde", "serde_json", "serde_with", @@ -6909,7 +7059,7 @@ dependencies = [ [[package]] name = "katana-rpc-types-builder" -version = "0.6.0-alpha.8" +version = "0.6.1-alpha.4" dependencies = [ "anyhow", "katana-executor", @@ -6921,7 +7071,7 @@ dependencies = [ [[package]] name = "katana-runner" -version = "0.6.0-alpha.8" +version = "0.6.1-alpha.4" dependencies = [ "anyhow", "chrono", @@ -6940,7 +7090,7 @@ dependencies = [ [[package]] name = "katana-tasks" -version = "0.6.0-alpha.8" +version = "0.6.1-alpha.4" dependencies = [ "futures", "rayon", @@ -6957,6 +7107,16 @@ dependencies = [ "cpufeatures", ] +[[package]] +name = "keccak-asm" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb8515fff80ed850aea4a1595f2e519c003e2a00a82fe168ebf5269196caf444" +dependencies = [ + "digest 0.10.7", + "sha3-asm", +] + [[package]] name = "kqueue" version = "1.0.8" @@ -7504,7 +7664,7 @@ dependencies = [ "heck 0.4.1", "proc-macro2", "quote", - "syn 2.0.49", + "syn 2.0.55", ] [[package]] @@ -7833,23 +7993,6 @@ dependencies = [ "autocfg", ] -[[package]] -name = "metrics" -version = "0.6.0-alpha.8" -dependencies = [ - "anyhow", - "hyper", - "jemalloc-ctl", - "jemallocator", - "metrics 0.21.1", - "metrics-exporter-prometheus", - "metrics-process", - "metrics-util", - "thiserror", - "tokio", - "tracing", -] - [[package]] name = "metrics" version = "0.21.1" @@ -7861,16 +8004,6 @@ dependencies = [ "portable-atomic", ] -[[package]] -name = "metrics" -version = "0.22.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77b9e10a211c839210fd7f99954bda26e5f8e26ec686ad68da6a32df7c80e782" -dependencies = [ - "ahash 0.8.8", - "portable-atomic", -] - [[package]] name = "metrics-exporter-prometheus" version = "0.12.2" @@ -7878,10 +8011,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1d4fa7ce7c4862db464a37b0b31d89bca874562f034bd7993895572783d02950" dependencies = [ "base64 0.21.7", - "hyper", + "hyper 0.14.28", "indexmap 1.9.3", "ipnet", - "metrics 0.21.1", + "metrics", "metrics-util", "quanta", "thiserror", @@ -7897,18 +8030,18 @@ checksum = "38b4faf00617defe497754acde3024865bc143d44a86799b24e191ecff91354f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.49", + "syn 2.0.55", ] [[package]] name = "metrics-process" -version = "1.2.0" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97ab55aa892047d9fa19d390afc5318492f956de3ec88a098adb4c0e663b4914" +checksum = "6aa2a67e2580fbeba4d5a96e659945981e700a383b4cea1432e0cfc18f58c5da" dependencies = [ "libproc", "mach2", - "metrics 0.22.0", + "metrics", "once_cell", "procfs", "rlimit", @@ -7926,7 +8059,7 @@ dependencies = [ "crossbeam-utils", "hashbrown 0.13.1", "indexmap 1.9.3", - "metrics 0.21.1", + "metrics", "num_cpus", "ordered-float 3.9.2", "quanta", @@ -8011,7 +8144,7 @@ dependencies = [ "bytes", "encoding_rs", "futures-util", - "http", + "http 0.2.11", "httparse", "log", "memchr", @@ -8096,7 +8229,7 @@ version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1d6d4752e6230d8ef7adf7bd5d8c4b1f6561c1014c5ba9a37445ccefe18aa1db" dependencies = [ - "proc-macro-crate 1.1.3", + "proc-macro-crate", "proc-macro-error", "proc-macro2", "quote", @@ -8494,27 +8627,6 @@ dependencies = [ "libc", ] -[[package]] -name = "num_enum" -version = "0.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02339744ee7253741199f897151b38e72257d13802d4ee837285cc2990a90845" -dependencies = [ - "num_enum_derive", -] - -[[package]] -name = "num_enum_derive" -version = "0.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "681030a937600a36906c185595136d26abfebb4aa9c65701cefcaf8578bb982b" -dependencies = [ - "proc-macro-crate 3.1.0", - "proc-macro2", - "quote", - "syn 2.0.49", -] - [[package]] name = "num_threads" version = "0.1.7" @@ -8565,40 +8677,15 @@ checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" [[package]] name = "oorandom" -version = "11.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575" - -[[package]] -name = "opaque-debug" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" - -[[package]] -name = "open-fastrlp" -version = "0.1.4" +version = "11.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "786393f80485445794f6043fd3138854dd109cc6c4bd1a6383db304c9ce9b9ce" -dependencies = [ - "arrayvec", - "auto_impl", - "bytes", - "ethereum-types", - "open-fastrlp-derive", -] +checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575" [[package]] -name = "open-fastrlp-derive" -version = "0.1.1" +name = "opaque-debug" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "003b2be5c6c53c1cfeb0a238b8a1c3915cd410feb684457a36c10038f764bb1c" -dependencies = [ - "bytes", - "proc-macro2", - "quote", - "syn 1.0.109", -] +checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" [[package]] name = "openssl" @@ -8623,7 +8710,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.49", + "syn 2.0.55", ] [[package]] @@ -8738,7 +8825,7 @@ version = "3.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "312270ee71e1cd70289dacf597cab7b207aa107d2f28191c2ae45b2ece18a260" dependencies = [ - "proc-macro-crate 1.1.3", + "proc-macro-crate", "proc-macro2", "quote", "syn 1.0.109", @@ -8830,12 +8917,6 @@ dependencies = [ "once_cell", ] -[[package]] -name = "path-slash" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e91099d4268b0e11973f036e885d652fb0b21fedcf69738c627f94db6a44f42" - [[package]] name = "pathdiff" version = "0.2.1" @@ -8857,31 +8938,12 @@ dependencies = [ "sha2 0.10.8", ] -[[package]] -name = "pbkdf2" -version = "0.12.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8ed6a7761f76e3b9f92dfb0a60a6a6477c61024b775147ff0973a02653abaf2" -dependencies = [ - "digest 0.10.7", - "hmac", -] - [[package]] name = "peeking_take_while" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099" -[[package]] -name = "pem" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8835c273a76a90455d7344889b0964598e3316e2a79ede8e36f16bdcf2228b8" -dependencies = [ - "base64 0.13.1", -] - [[package]] name = "pem" version = "3.0.3" @@ -8938,7 +9000,7 @@ dependencies = [ "pest_meta", "proc-macro2", "quote", - "syn 2.0.49", + "syn 2.0.55", ] [[package]] @@ -8962,16 +9024,6 @@ dependencies = [ "indexmap 2.2.5", ] -[[package]] -name = "pharos" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9567389417feee6ce15dd6527a8a1ecac205ef62c2932bcf3d9f6fc5b78b414" -dependencies = [ - "futures", - "rustc_version 0.4.0", -] - [[package]] name = "phf" version = "0.11.2" @@ -9002,7 +9054,7 @@ dependencies = [ "phf_shared 0.11.2", "proc-macro2", "quote", - "syn 2.0.49", + "syn 2.0.55", ] [[package]] @@ -9046,7 +9098,7 @@ checksum = "266c042b60c9c76b8d53061e52b2e0d1116abc57cefc8c5cd671619a56ac3690" dependencies = [ "proc-macro2", "quote", - "syn 2.0.49", + "syn 2.0.55", ] [[package]] @@ -9275,7 +9327,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a41cf62165e97c7f814d2221421dbb9afcbcdb0a88068e5ea206e19951c2cbb5" dependencies = [ "proc-macro2", - "syn 2.0.49", + "syn 2.0.55", +] + +[[package]] +name = "prettytable-rs" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eea25e07510aa6ab6547308ebe3c036016d162b8da920dbb079e3ba8acf3d95a" +dependencies = [ + "csv", + "encode_unicode 1.0.0", + "is-terminal", + "lazy_static", + "term", + "unicode-width", ] [[package]] @@ -9297,7 +9363,6 @@ dependencies = [ "impl-codec", "impl-rlp", "impl-serde", - "scale-info", "uint", ] @@ -9311,15 +9376,6 @@ dependencies = [ "toml 0.5.11", ] -[[package]] -name = "proc-macro-crate" -version = "3.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d37c51ca738a55da99dc0c4a34860fd675453b8b36209178c2249bb13651284" -dependencies = [ - "toml_edit 0.21.1", -] - [[package]] name = "proc-macro-error" version = "1.0.4" @@ -9346,9 +9402,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.78" +version = "1.0.79" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2422ad645d89c99f8f3e6b88a9fdeca7fabeac836b1002371c4367c8f984aae" +checksum = "e835ff2298f5721608eb1a980ecaee1aef2c132bf95ecc026a11b7bf3c01c02e" dependencies = [ "unicode-ident", ] @@ -9406,7 +9462,7 @@ checksum = "440f724eba9f6996b75d63681b0a92b06947f1457076d503a4d2e2c8f56442b8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.49", + "syn 2.0.55", ] [[package]] @@ -9488,7 +9544,7 @@ dependencies = [ "prost 0.12.3", "prost-types 0.12.3", "regex", - "syn 2.0.49", + "syn 2.0.55", "tempfile", "which 4.4.2", ] @@ -9516,7 +9572,7 @@ dependencies = [ "itertools 0.11.0", "proc-macro2", "quote", - "syn 2.0.49", + "syn 2.0.55", ] [[package]] @@ -9752,7 +9808,7 @@ version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "52c4f3084aa3bc7dfbba4eff4fab2a54db4324965d8872ab933565e6fbd83bc6" dependencies = [ - "pem 3.0.3", + "pem", "ring 0.16.20", "time", "x509-parser", @@ -9866,9 +9922,9 @@ dependencies = [ "futures-core", "futures-util", "h2", - "http", - "http-body", - "hyper", + "http 0.2.11", + "http-body 0.4.6", + "hyper 0.14.28", "hyper-rustls 0.24.2", "hyper-tls", "ipnet", @@ -9898,7 +9954,42 @@ dependencies = [ "wasm-streams 0.4.0", "web-sys", "webpki-roots", - "winreg", + "winreg 0.50.0", +] + +[[package]] +name = "reqwest" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "566cafdd92868e0939d3fb961bd0dc25fcfaaed179291093b3d43e6b3150ea10" +dependencies = [ + "base64 0.22.0", + "bytes", + "futures-core", + "futures-util", + "http 1.1.0", + "http-body 1.0.0", + "http-body-util", + "hyper 1.3.1", + "hyper-util", + "ipnet", + "js-sys", + "log", + "mime", + "once_cell", + "percent-encoding", + "pin-project-lite", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper", + "tokio", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", + "winreg 0.52.0", ] [[package]] @@ -9936,6 +10027,18 @@ dependencies = [ "libc", ] +[[package]] +name = "reth-metrics-derive" +version = "0.2.0-beta.4" +source = "git+https://github.com/paradigmxyz/reth.git?tag=v0.2.0-beta.4#c04dbe6e9bd05be5da3a5d541adbf76166c14a08" +dependencies = [ + "once_cell", + "proc-macro2", + "quote", + "regex", + "syn 2.0.55", +] + [[package]] name = "rfc6979" version = "0.4.0" @@ -9976,15 +10079,6 @@ dependencies = [ "windows-sys 0.52.0", ] -[[package]] -name = "ripemd" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd124222d17ad93a644ed9d011a40f4fb64aa54275c08cc216524a9ea82fb09f" -dependencies = [ - "digest 0.10.7", -] - [[package]] name = "rlimit" version = "0.10.1" @@ -10001,19 +10095,29 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bb919243f34364b6bd2fc10ef797edbfa75f33c252e7998527479c6d6b47e1ec" dependencies = [ "bytes", - "rlp-derive", "rustc-hex", ] [[package]] -name = "rlp-derive" -version = "0.1.0" +name = "roaring" +version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e33d7b2abe0c340d8797fe2907d3f20d3b5ea5908683618bfe80df7f621f672a" +checksum = "a1c77081a55300e016cb86f2864415b7518741879db925b8d488a0ee0d2da6bf" dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", + "bytemuck", + "byteorder", + "serde", +] + +[[package]] +name = "rpassword" +version = "7.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "80472be3c897911d0137b2d2b9055faf6eeac5b14e324073d83bc17b191d7e3f" +dependencies = [ + "libc", + "rtoolbox", + "windows-sys 0.48.0", ] [[package]] @@ -10061,7 +10165,7 @@ dependencies = [ "regex", "relative-path", "rustc_version 0.4.0", - "syn 2.0.49", + "syn 2.0.55", "unicode-ident", ] @@ -10074,7 +10178,7 @@ dependencies = [ "quote", "rand", "rustc_version 0.4.0", - "syn 2.0.49", + "syn 2.0.55", ] [[package]] @@ -10103,6 +10207,16 @@ dependencies = [ "tokio", ] +[[package]] +name = "rtoolbox" +version = "0.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c247d24e63230cdb56463ae328478bd5eac8b8faa8c69461a77e8e323afac90e" +dependencies = [ + "libc", + "windows-sys 0.48.0", +] + [[package]] name = "rtp" version = "0.9.0" @@ -10148,10 +10262,10 @@ checksum = "e666a5496a0b2186dbcd0ff6106e29e093c15591bde62c20d3842007c6978a09" [[package]] name = "runner-macro" -version = "0.6.0-alpha.8" +version = "0.6.1-alpha.4" dependencies = [ "quote", - "syn 2.0.49", + "syn 2.0.55", ] [[package]] @@ -10375,7 +10489,7 @@ dependencies = [ [[package]] name = "saya" -version = "0.6.0-alpha.8" +version = "0.6.1-alpha.4" dependencies = [ "anyhow", "clap", @@ -10394,10 +10508,12 @@ dependencies = [ [[package]] name = "saya-core" -version = "0.6.0-alpha.8" +version = "0.6.1-alpha.4" dependencies = [ "anyhow", "async-trait", + "cairo-felt", + "cairo-proof-parser", "cairo-vm 0.9.2", "celestia-rpc", "celestia-types", @@ -10410,6 +10526,7 @@ dependencies = [ "katana-provider", "katana-rpc-types", "lazy_static", + "num-bigint", "parking_lot 0.12.1", "rand", "saya-provider", @@ -10427,13 +10544,13 @@ dependencies = [ [[package]] name = "saya-provider" -version = "0.6.0-alpha.8" +version = "0.6.1-alpha.4" dependencies = [ + "alloy-primitives", "anyhow", "async-trait", "auto_impl", "convert_case 0.6.0", - "ethers", "flate2", "futures", "jsonrpsee 0.16.3", @@ -10455,30 +10572,6 @@ dependencies = [ "url", ] -[[package]] -name = "scale-info" -version = "2.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f7d66a1128282b7ef025a8ead62a4a9fcf017382ec53b8ffbf4d7bf77bd3c60" -dependencies = [ - "cfg-if", - "derive_more", - "parity-scale-codec", - "scale-info-derive", -] - -[[package]] -name = "scale-info-derive" -version = "2.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "abf2c68b89cafb3b8d918dd07b42be0da66ff202cf1155c5739a4e0c1ea0dc19" -dependencies = [ - "proc-macro-crate 1.1.3", - "proc-macro2", - "quote", - "syn 1.0.109", -] - [[package]] name = "scarb" version = "2.5.4" @@ -10503,7 +10596,7 @@ dependencies = [ "create-output-dir", "data-encoding", "deno_task_shell", - "derive_builder 0.20.0", + "derive_builder", "directories", "dunce", "fs4", @@ -10519,7 +10612,7 @@ dependencies = [ "pathdiff", "petgraph", "redb", - "reqwest", + "reqwest 0.11.24", "scarb-build-metadata", "scarb-macro-interface", "scarb-metadata 1.11.1 (git+https://github.com/software-mansion/scarb?tag=v2.5.4)", @@ -10589,7 +10682,7 @@ version = "1.11.1" source = "git+https://github.com/software-mansion/scarb?tag=v2.5.4#28dee92c87e97bacefb2a300e7a102455936eeca" dependencies = [ "camino", - "derive_builder 0.12.0", + "derive_builder", "semver 1.0.22", "serde", "serde_json", @@ -10664,7 +10757,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9f9e24d2b632954ded8ab2ef9fea0a0c769ea56ea98bddbafbad22caeeadf45d" dependencies = [ "hmac", - "pbkdf2 0.11.0", + "pbkdf2", "salsa20", "sha2 0.10.8", ] @@ -10772,9 +10865,9 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.196" +version = "1.0.197" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "870026e60fa08c69f064aa766c10f10b1d62db9ccd4d0abb206472bee0ce3b32" +checksum = "3fb1c873e1b9b056a4dc4c0c198b24c3ffa059243875552b2bd0933b1aee4ce2" dependencies = [ "serde_derive", ] @@ -10810,13 +10903,13 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.196" +version = "1.0.197" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33c85360c95e7d137454dc81d9a4ed2b8efd8fbe19cee57357b32b9771fccb67" +checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.49", + "syn 2.0.55", ] [[package]] @@ -10861,7 +10954,7 @@ checksum = "0b2e6b945e9d3df726b65d6ee24060aff8e3533d431f677a9695db04eff9dfdb" dependencies = [ "proc-macro2", "quote", - "syn 2.0.49", + "syn 2.0.55", ] [[package]] @@ -10910,7 +11003,7 @@ dependencies = [ "darling 0.20.6", "proc-macro2", "quote", - "syn 2.0.49", + "syn 2.0.55", ] [[package]] @@ -10945,7 +11038,7 @@ checksum = "91d129178576168c589c9ec973feedf7d3126c01ac2bf08795109aa35b69fb8f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.49", + "syn 2.0.55", ] [[package]] @@ -11012,6 +11105,16 @@ dependencies = [ "keccak", ] +[[package]] +name = "sha3-asm" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bac61da6b35ad76b195eb4771210f947734321a8d81d7738e1580d953bc7a15e" +dependencies = [ + "cc", + "cfg-if", +] + [[package]] name = "sharded-slab" version = "0.1.7" @@ -11091,18 +11194,6 @@ dependencies = [ "similar", ] -[[package]] -name = "simple_asn1" -version = "0.6.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adc4e5204eb1910f40f9cfa375f6f05b68c3abac4b6fd879c8ff5e7ae8a0a085" -dependencies = [ - "num-bigint", - "num-traits 0.2.18", - "thiserror", - "time", -] - [[package]] name = "siphasher" version = "0.3.11" @@ -11207,34 +11298,21 @@ dependencies = [ "base64 0.13.1", "bytes", "futures", - "http", + "http 0.2.11", "httparse", "log", "rand", "sha-1", ] -[[package]] -name = "solang-parser" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c425ce1c59f4b154717592f0bdf4715c3a1d55058883622d3157e1f0908a5b26" -dependencies = [ - "itertools 0.11.0", - "lalrpop", - "lalrpop-util", - "phf", - "thiserror", - "unicode-xid", -] - [[package]] name = "sozo" -version = "0.6.0-alpha.8" +version = "0.6.1-alpha.4" dependencies = [ "anyhow", "assert_fs", "async-trait", + "bigdecimal 0.4.3", "cainome 0.1.5", "cairo-lang-compiler", "cairo-lang-defs", @@ -11258,9 +11336,13 @@ dependencies = [ "dojo-types", "dojo-world", "futures", + "katana-rpc-api", "katana-runner", "notify", "notify-debouncer-mini", + "num-integer", + "prettytable-rs", + "rpassword", "scarb", "scarb-ui", "semver 1.0.22", @@ -11280,7 +11362,7 @@ dependencies = [ [[package]] name = "sozo-ops" -version = "0.6.0-alpha.8" +version = "0.6.1-alpha.4" dependencies = [ "anyhow", "assert_fs", @@ -11301,6 +11383,8 @@ dependencies = [ "clap", "clap-verbosity-flag", "clap_complete", + "colored", + "colored_json", "console", "dojo-bindgen", "dojo-lang", @@ -11308,14 +11392,17 @@ dependencies = [ "dojo-types", "dojo-world", "futures", + "ipfs-api-backend-hyper", "katana-runner", "notify", "notify-debouncer-mini", + "rpassword", "scarb", "scarb-ui", "semver 1.0.22", "serde", "serde_json", + "serde_with", "smol_str", "snapbox", "starknet 0.9.0", @@ -11329,7 +11416,7 @@ dependencies = [ [[package]] name = "sozo-signers" -version = "0.6.0-alpha.8" +version = "0.6.1-alpha.4" dependencies = [ "anyhow", "starknet 0.9.0", @@ -11828,7 +11915,7 @@ checksum = "af6527b845423542c8a16e060ea1bc43f67229848e7cd4c4d80be994a84220ce" dependencies = [ "starknet-curve 0.4.1", "starknet-ff", - "syn 2.0.49", + "syn 2.0.55", ] [[package]] @@ -11856,7 +11943,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "067419451efdea1ee968df8438369960c167e0e905c05b84afd074f50e1d6f3d" dependencies = [ "ark-ff 0.4.2", - "bigdecimal", + "bigdecimal 0.3.1", "crypto-bigint", "getrandom", "hex", @@ -11871,7 +11958,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7c5d2964612f0ccd0a700279e33cfc98d6db04f64645ff834f3b7ec422142d7a" dependencies = [ "starknet-core 0.9.0", - "syn 2.0.49", + "syn 2.0.55", ] [[package]] @@ -11885,7 +11972,7 @@ dependencies = [ "ethereum-types", "flate2", "log", - "reqwest", + "reqwest 0.11.24", "serde", "serde_json", "serde_with", @@ -11905,7 +11992,7 @@ dependencies = [ "ethereum-types", "flate2", "log", - "reqwest", + "reqwest 0.11.24", "serde", "serde_json", "serde_with", @@ -11925,7 +12012,7 @@ dependencies = [ "ethereum-types", "flate2", "log", - "reqwest", + "reqwest 0.11.24", "serde", "serde_json", "serde_with", @@ -12139,7 +12226,7 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn 2.0.49", + "syn 2.0.55", ] [[package]] @@ -12192,30 +12279,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "734676eb262c623cec13c3155096e08d1f8f29adce39ba17948b18dad1e54142" [[package]] -name = "svm-rs" -version = "0.3.5" +name = "syn" +version = "1.0.109" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11297baafe5fa0c99d5722458eac6a5e25c01eb1b8e5cd137f54079093daa7a4" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" dependencies = [ - "dirs 5.0.1", - "fs2", - "hex", - "once_cell", - "reqwest", - "semver 1.0.22", - "serde", - "serde_json", - "sha2 0.10.8", - "thiserror", - "url", - "zip", + "proc-macro2", + "quote", + "unicode-ident", ] [[package]] name = "syn" -version = "1.0.109" +version = "2.0.55" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +checksum = "002a1b3dbf967edfafc32655d0f377ab0bb7b994aa1d32c8cc7e9b8bf3ebb8f0" dependencies = [ "proc-macro2", "quote", @@ -12223,14 +12301,15 @@ dependencies = [ ] [[package]] -name = "syn" -version = "2.0.49" +name = "syn-solidity" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "915aea9e586f80826ee59f8453c1101f9d1c4b3964cd2460185ee8e299ada496" +checksum = "70aba06097b6eda3c15f6eebab8a6339e121475bcf08bbe6758807e716c372a1" dependencies = [ + "paste", "proc-macro2", "quote", - "unicode-ident", + "syn 2.0.55", ] [[package]] @@ -12345,7 +12424,7 @@ checksum = "7ba277e77219e9eea169e8508942db1bf5d8a41ff2db9b20aab5a5aadc9fa25d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.49", + "syn 2.0.55", ] [[package]] @@ -12365,7 +12444,7 @@ checksum = "a953cb265bef375dae3de6663da4d3804eee9682ea80d8e2542529b73c531c81" dependencies = [ "proc-macro2", "quote", - "syn 2.0.49", + "syn 2.0.55", ] [[package]] @@ -12398,6 +12477,15 @@ dependencies = [ "once_cell", ] +[[package]] +name = "threadpool" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d050e60b33d41c19108b32cea32164033a9013fe3b46cbd4457559bfbf77afaa" +dependencies = [ + "num_cpus", +] + [[package]] name = "time" version = "0.3.34" @@ -12502,7 +12590,7 @@ checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.49", + "syn 2.0.55", ] [[package]] @@ -12545,6 +12633,7 @@ dependencies = [ "futures-core", "pin-project-lite", "tokio", + "tokio-util", ] [[package]] @@ -12555,11 +12644,8 @@ checksum = "212d5dcb2a1ce06d81107c3d0ffa3121fe974b73f068c8282cb1c32328113b6c" dependencies = [ "futures-util", "log", - "rustls 0.21.10", "tokio", - "tokio-rustls 0.24.1", "tungstenite", - "webpki-roots", ] [[package]] @@ -12632,17 +12718,6 @@ dependencies = [ "winnow 0.5.40", ] -[[package]] -name = "toml_edit" -version = "0.21.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a8534fd7f78b5405e860340ad6575217ce99f38d4d5c8f2442cb5ecb50090e1" -dependencies = [ - "indexmap 2.2.5", - "toml_datetime", - "winnow 0.5.40", -] - [[package]] name = "toml_edit" version = "0.22.6" @@ -12668,8 +12743,8 @@ dependencies = [ "flate2", "futures-core", "futures-util", - "http", - "http-body", + "http 0.2.11", + "http-body 0.4.6", "percent-encoding", "pin-project", "prost 0.11.9", @@ -12691,9 +12766,9 @@ dependencies = [ "base64 0.21.7", "bytes", "h2", - "http", - "http-body", - "hyper", + "http 0.2.11", + "http-body 0.4.6", + "hyper 0.14.28", "hyper-timeout", "percent-encoding", "pin-project", @@ -12729,7 +12804,7 @@ dependencies = [ "proc-macro2", "prost-build 0.12.3", "quote", - "syn 2.0.49", + "syn 2.0.55", ] [[package]] @@ -12753,9 +12828,9 @@ checksum = "0fddb2a37b247e6adcb9f239f4e5cefdcc5ed526141a416b943929f13aea2cce" dependencies = [ "base64 0.21.7", "bytes", - "http", - "http-body", - "hyper", + "http 0.2.11", + "http-body 0.4.6", + "hyper 0.14.28", "pin-project", "tokio-stream", "tonic 0.10.2", @@ -12775,8 +12850,8 @@ dependencies = [ "byteorder", "bytes", "futures-util", - "http", - "http-body", + "http 0.2.11", + "http-body 0.4.6", "httparse", "js-sys", "pin-project", @@ -12791,7 +12866,7 @@ dependencies = [ [[package]] name = "torii" -version = "0.6.0-alpha.8" +version = "0.6.1-alpha.4" dependencies = [ "anyhow", "async-trait", @@ -12801,18 +12876,17 @@ dependencies = [ "clap", "common", "ctrlc", + "dojo-metrics", "dojo-types", "dojo-world", "either", "futures", - "http", - "http-body", - "hyper", + "http 0.2.11", + "http-body 0.4.6", + "hyper 0.14.28", "hyper-reverse-proxy", "indexmap 1.9.3", "lazy_static", - "metrics 0.6.0-alpha.8", - "metrics-process", "scarb", "serde", "serde_json", @@ -12837,7 +12911,7 @@ dependencies = [ [[package]] name = "torii-client" -version = "0.6.0-alpha.8" +version = "0.6.1-alpha.4" dependencies = [ "async-trait", "camino", @@ -12865,7 +12939,7 @@ dependencies = [ [[package]] name = "torii-core" -version = "0.6.0-alpha.8" +version = "0.6.1-alpha.4" dependencies = [ "anyhow", "async-trait", @@ -12883,7 +12957,7 @@ dependencies = [ "lazy_static", "log", "once_cell", - "reqwest", + "reqwest 0.11.24", "scarb", "scarb-ui", "serde", @@ -12904,7 +12978,7 @@ dependencies = [ [[package]] name = "torii-graphql" -version = "0.6.0-alpha.8" +version = "0.6.1-alpha.4" dependencies = [ "anyhow", "async-graphql", @@ -12945,22 +13019,26 @@ dependencies = [ [[package]] name = "torii-grpc" -version = "0.6.0-alpha.8" +version = "0.6.1-alpha.4" dependencies = [ "bytes", "crypto-bigint", + "dojo-test-utils", "dojo-types", + "dojo-world", "futures", "futures-util", "hex", - "hyper", + "hyper 0.14.28", "parking_lot 0.12.1", "prost 0.11.9", "prost 0.12.3", "rand", "rayon", + "scarb", "serde", "serde_json", + "sozo-ops", "sqlx", "starknet 0.9.0", "starknet-crypto 0.6.1", @@ -12984,12 +13062,13 @@ dependencies = [ [[package]] name = "torii-relay" -version = "0.6.0-alpha.8" +version = "0.6.1-alpha.4" dependencies = [ "anyhow", "async-trait", "chrono", "crypto-bigint", + "dojo-test-utils", "dojo-types", "dojo-world", "futures", @@ -13002,9 +13081,8 @@ dependencies = [ "serde", "serde_json", "sqlx", - "starknet-core 0.9.0", + "starknet 0.9.0", "starknet-crypto 0.6.1", - "starknet-ff", "tempfile", "thiserror", "tokio", @@ -13019,7 +13097,7 @@ dependencies = [ [[package]] name = "torii-server" -version = "0.6.0-alpha.8" +version = "0.6.1-alpha.4" dependencies = [ "anyhow", "async-trait", @@ -13028,18 +13106,17 @@ dependencies = [ "chrono", "clap", "ctrlc", + "dojo-metrics", "dojo-types", "dojo-world", "either", "futures", - "http", - "http-body", - "hyper", + "http 0.2.11", + "http-body 0.4.6", + "hyper 0.14.28", "hyper-reverse-proxy", "indexmap 1.9.3", "lazy_static", - "metrics 0.6.0-alpha.8", - "metrics-process", "scarb", "serde", "serde_json", @@ -13092,8 +13169,8 @@ dependencies = [ "bytes", "futures-core", "futures-util", - "http", - "http-body", + "http 0.2.11", + "http-body 0.4.6", "http-range-header", "httpdate", "iri-string", @@ -13147,7 +13224,7 @@ checksum = "84fd902d4e0b9a4b27f2f440108dc034e1758628a9b702f8ec61ad66355422fa" dependencies = [ "proc-macro2", "quote", - "syn 2.0.49", + "syn 2.0.55", ] [[package]] @@ -13176,7 +13253,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.49", + "syn 2.0.55", ] [[package]] @@ -13189,16 +13266,6 @@ dependencies = [ "valuable", ] -[[package]] -name = "tracing-futures" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97d095ae15e245a057c8e8451bab9b3ee1e1f68e9ba2b4fbc18d0ac5237835f2" -dependencies = [ - "pin-project", - "tracing", -] - [[package]] name = "tracing-log" version = "0.1.4" @@ -13278,11 +13345,10 @@ dependencies = [ "byteorder", "bytes", "data-encoding", - "http", + "http 0.2.11", "httparse", "log", "rand", - "rustls 0.21.10", "sha1", "thiserror", "url", @@ -13325,7 +13391,7 @@ checksum = "563b3b88238ec95680aef36bdece66896eaa7ce3c0f1b4f39d38fb2435261352" dependencies = [ "proc-macro2", "quote", - "syn 2.0.49", + "syn 2.0.55", ] [[package]] @@ -13336,7 +13402,7 @@ checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" [[package]] name = "types-test" -version = "0.6.0-alpha.8" +version = "0.6.1-alpha.4" [[package]] name = "ucd-trie" @@ -13606,8 +13672,8 @@ dependencies = [ "futures-channel", "futures-util", "headers", - "http", - "hyper", + "http 0.2.11", + "hyper 0.14.28", "log", "mime", "mime_guess", @@ -13654,7 +13720,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.49", + "syn 2.0.55", "wasm-bindgen-shared", ] @@ -13688,7 +13754,7 @@ checksum = "642f325be6301eb8107a83d12a8ac6c1e1c54345a7ef1a9261962dfefda09e66" dependencies = [ "proc-macro2", "quote", - "syn 2.0.49", + "syn 2.0.55", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -13721,7 +13787,7 @@ checksum = "a5211b7550606857312bba1d978a8ec75692eae187becc5e680444fffc5e6f89" dependencies = [ "proc-macro2", "quote", - "syn 2.0.49", + "syn 2.0.55", ] [[package]] @@ -13822,7 +13888,7 @@ dependencies = [ "interceptor", "lazy_static", "log", - "pem 3.0.3", + "pem", "rand", "rcgen", "regex", @@ -13885,7 +13951,7 @@ dependencies = [ "log", "p256", "p384", - "pem 3.0.3", + "pem", "rand", "rand_core", "rcgen", @@ -14347,22 +14413,13 @@ dependencies = [ ] [[package]] -name = "ws_stream_wasm" -version = "0.7.4" +name = "winreg" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7999f5f4217fe3818726b66257a4475f71e74ffd190776ad053fa159e50737f5" +checksum = "a277a57398d4bfa075df44f501a17cfdf8542d224f0d36095a2adc7aee4ef0a5" dependencies = [ - "async_io_stream", - "futures", - "js-sys", - "log", - "pharos", - "rustc_version 0.4.0", - "send_wrapper 0.6.0", - "thiserror", - "wasm-bindgen", - "wasm-bindgen-futures", - "web-sys", + "cfg-if", + "windows-sys 0.48.0", ] [[package]] @@ -14514,7 +14571,7 @@ checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.49", + "syn 2.0.55", ] [[package]] @@ -14534,7 +14591,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.49", + "syn 2.0.55", ] [[package]] @@ -14551,7 +14608,7 @@ dependencies = [ "crossbeam-utils", "flate2", "hmac", - "pbkdf2 0.11.0", + "pbkdf2", "sha1", "time", "zstd 0.11.2+zstd.1.5.2", diff --git a/Cargo.toml b/Cargo.toml index e6b35536f0..0353a53c89 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -45,7 +45,7 @@ edition = "2021" license = "Apache-2.0" license-file = "LICENSE" repository = "https://github.com/dojoengine/dojo/" -version = "0.6.0-alpha.8" +version = "0.6.1-alpha.4" [profile.performance] codegen-units = 1 @@ -57,7 +57,7 @@ lto = "fat" common = { path = "crates/common" } # metrics -metrics = { path = "crates/metrics" } +dojo-metrics = { path = "crates/metrics" } # dojo-lang dojo-bindgen = { path = "crates/dojo-bindgen" } @@ -95,10 +95,10 @@ saya-core = { path = "crates/saya/core" } saya-provider = { path = "crates/saya/provider" } # sozo -sozo-signers = { path = "crates/sozo/signers" } sozo-ops = { path = "crates/sozo/ops" } +sozo-signers = { path = "crates/sozo/signers" } -anyhow = "1.0.75" +anyhow = "1.0.80" assert_matches = "1.5.0" async-trait = "0.1.68" base64 = "0.21.2" @@ -123,10 +123,11 @@ cairo-lang-test-plugin = "=2.5.4" cairo-lang-test-runner = "=2.5.4" cairo-lang-test-utils = "=2.5.4" cairo-lang-utils = "=2.5.4" +cairo-proof-parser = { git = "https://github.com/cartridge-gg/cairo-proof-parser.git", version = "0.1.2" } cairo-vm = "0.9.2" camino = { version = "1.1.2", features = [ "serde1" ] } chrono = { version = "0.4.24", features = [ "serde" ] } -clap = { version = "4.2", features = [ "derive" ] } +clap = { version = "4.5", features = [ "derive" ] } clap_complete = "4.3" console = "0.15.7" convert_case = "0.6.0" @@ -139,17 +140,19 @@ indoc = "1.0.7" itertools = "0.10.3" jsonrpsee = { version = "0.16.2", default-features = false } lazy_static = "1.4.0" -metrics-process = "1.0.9" +metrics = "0.21.1" num-traits = { version = "0.2", default-features = false } once_cell = "1.0" parking_lot = "0.12.1" pretty_assertions = "1.2.1" rayon = "1.8.0" +regex = "1.10.3" +rpassword = "7.2.0" salsa = "0.16.1" scarb = { git = "https://github.com/software-mansion/scarb", tag = "v2.5.4" } scarb-ui = { git = "https://github.com/software-mansion/scarb", tag = "v2.5.4" } semver = "1.0.5" -serde = { version = "1.0.192", features = [ "derive" ] } +serde = { version = "1.0.197", features = [ "derive" ] } serde_json = { version = "1.0", features = [ "arbitrary_precision" ] } serde_with = "2.3.1" similar-asserts = "1.5.0" @@ -166,7 +169,6 @@ tokio = { version = "1.32.0", features = [ "full" ] } toml = "0.7.4" tracing = "0.1.34" tracing-subscriber = { version = "0.3.16", features = [ "env-filter", "json" ] } -regex = "1.10.3" url = { version = "2.4.0", features = [ "serde" ] } rstest = "0.18.2" @@ -189,6 +191,18 @@ wasm-prost = { version = "0.11.9", package = "prost" } wasm-tonic = { version = "0.9.2", default-features = false, features = [ "codegen", "gzip", "prost" ], package = "tonic" } wasm-tonic-build = { version = "0.9.2", default-features = false, features = [ "prost" ], package = "tonic-build" } +alloy-primitives = { version = "0.7.0", default-features = false } +alloy-sol-types = { version = "0.7.0", default-features = false } + [patch.crates-io] cairo-felt = { git = "https://github.com/dojoengine/cairo-rs.git", rev = "1031381" } cairo-vm = { git = "https://github.com/dojoengine/cairo-rs.git", rev = "1031381" } + +alloy-contract = { git = "https://github.com/alloy-rs/alloy", rev = "d68a6b787b2904061f0ae7fcc02ece8513e3c500" } +alloy-json-rpc = { git = "https://github.com/alloy-rs/alloy", rev = "d68a6b787b2904061f0ae7fcc02ece8513e3c500" } +alloy-network = { git = "https://github.com/alloy-rs/alloy", rev = "d68a6b787b2904061f0ae7fcc02ece8513e3c500" } +alloy-provider = { git = "https://github.com/alloy-rs/alloy", rev = "d68a6b787b2904061f0ae7fcc02ece8513e3c500"} +alloy-rpc-types = { git = "https://github.com/alloy-rs/alloy", rev = "d68a6b787b2904061f0ae7fcc02ece8513e3c500" } +alloy-signer = { git = "https://github.com/alloy-rs/alloy", rev = "d68a6b787b2904061f0ae7fcc02ece8513e3c500" } +alloy-signer-wallet = { git = "https://github.com/alloy-rs/alloy", rev = "d68a6b787b2904061f0ae7fcc02ece8513e3c500" } +alloy-transport = { git = "https://github.com/alloy-rs/alloy", rev = "d68a6b787b2904061f0ae7fcc02ece8513e3c500" } diff --git a/bin/katana/Cargo.toml b/bin/katana/Cargo.toml index 774a438211..8edb17a859 100644 --- a/bin/katana/Cargo.toml +++ b/bin/katana/Cargo.toml @@ -13,13 +13,12 @@ clap.workspace = true clap_complete.workspace = true common.workspace = true console.workspace = true +dojo-metrics.workspace = true katana-core.workspace = true katana-executor.workspace = true katana-primitives.workspace = true katana-rpc-api.workspace = true katana-rpc.workspace = true -metrics-process.workspace = true -metrics.workspace = true serde_json.workspace = true shellexpand = "3.1.0" starknet_api.workspace = true @@ -27,6 +26,7 @@ tokio.workspace = true tracing-subscriber.workspace = true tracing.workspace = true url.workspace = true +alloy-primitives.workspace = true [dev-dependencies] assert_matches = "1.5.0" @@ -37,6 +37,6 @@ default = [ "blockifier", "jemalloc", "messaging" ] blockifier = [ "katana-executor/blockifier" ] sir = [ "katana-executor/sir" ] -jemalloc = [ "metrics/jemalloc" ] +jemalloc = [ "dojo-metrics/jemalloc" ] messaging = [ "katana-core/messaging" ] starknet-messaging = [ "katana-core/starknet-messaging", "messaging" ] diff --git a/bin/katana/src/args.rs b/bin/katana/src/args.rs index c583434afe..2ad0aa1e8f 100644 --- a/bin/katana/src/args.rs +++ b/bin/katana/src/args.rs @@ -13,6 +13,7 @@ use std::net::SocketAddr; use std::path::PathBuf; +use alloy_primitives::U256; use clap::{Args, Parser, Subcommand}; use clap_complete::Shell; use common::parse::parse_socket_address; @@ -174,19 +175,25 @@ pub struct EnvironmentOptions { #[arg(long)] #[arg(help = "The maximum number of steps available for the account validation logic.")] - pub validate_max_steps: Option, + #[arg(default_value_t = DEFAULT_VALIDATE_MAX_STEPS)] + pub validate_max_steps: u32, #[arg(long)] #[arg(help = "The maximum number of steps available for the account execution logic.")] - pub invoke_max_steps: Option, + #[arg(default_value_t = DEFAULT_INVOKE_MAX_STEPS)] + pub invoke_max_steps: u32, #[arg(long = "eth-gas-price")] - #[arg(help = "The L1 ETH gas price.")] - pub l1_eth_gas_price: Option, + #[arg(conflicts_with = "genesis")] + #[arg(help = "The L1 ETH gas price. (denominated in wei)")] + #[arg(default_value_t = DEFAULT_ETH_L1_GAS_PRICE)] + pub l1_eth_gas_price: u128, #[arg(long = "strk-gas-price")] - #[arg(help = "The L1 STRK gas price.")] - pub l1_strk_gas_price: Option, + #[arg(conflicts_with = "genesis")] + #[arg(help = "The L1 STRK gas price. (denominated in fri)")] + #[arg(default_value_t = DEFAULT_STRK_L1_GAS_PRICE)] + pub l1_strk_gas_price: u128, } impl KatanaArgs { @@ -233,21 +240,21 @@ impl KatanaArgs { } pub fn starknet_config(&self) -> StarknetConfig { - let gas_price = GasPrices { - eth: self.starknet.environment.l1_eth_gas_price.unwrap_or(DEFAULT_ETH_L1_GAS_PRICE), - strk: self.starknet.environment.l1_strk_gas_price.unwrap_or(DEFAULT_STRK_L1_GAS_PRICE), - }; - let genesis = match self.starknet.genesis.clone() { Some(genesis) => genesis, None => { + let gas_prices = GasPrices { + eth: self.starknet.environment.l1_eth_gas_price, + strk: self.starknet.environment.l1_strk_gas_price, + }; + let accounts = DevAllocationsGenerator::new(self.starknet.total_accounts) .with_seed(parse_seed(&self.starknet.seed)) - .with_balance(DEFAULT_PREFUNDED_ACCOUNT_BALANCE) + .with_balance(U256::from(DEFAULT_PREFUNDED_ACCOUNT_BALANCE)) .generate(); let mut genesis = Genesis { - gas_prices: gas_price.clone(), + gas_prices, sequencer_address: *DEFAULT_SEQUENCER_ADDRESS, ..Default::default() }; @@ -263,18 +270,9 @@ impl KatanaArgs { fork_rpc_url: self.rpc_url.clone(), fork_block_number: self.fork_block_number, env: Environment { - gas_price, chain_id: self.starknet.environment.chain_id, - invoke_max_steps: self - .starknet - .environment - .invoke_max_steps - .unwrap_or(DEFAULT_INVOKE_MAX_STEPS), - validate_max_steps: self - .starknet - .environment - .validate_max_steps - .unwrap_or(DEFAULT_VALIDATE_MAX_STEPS), + invoke_max_steps: self.starknet.environment.invoke_max_steps, + validate_max_steps: self.starknet.environment.validate_max_steps, }, db_dir: self.db_dir.clone(), genesis, @@ -287,32 +285,51 @@ mod test { use super::*; #[test] - fn default_block_context_from_args() { + fn test_starknet_config_default() { let args = KatanaArgs::parse_from(["katana"]); - let block_context = args.starknet_config().block_env(); - assert_eq!(block_context.l1_gas_prices.eth, DEFAULT_ETH_L1_GAS_PRICE); - assert_eq!(block_context.l1_gas_prices.strk, DEFAULT_STRK_L1_GAS_PRICE); + let config = args.starknet_config(); + + assert!(!config.disable_fee); + assert!(!config.disable_validate); + assert_eq!(config.fork_rpc_url, None); + assert_eq!(config.fork_block_number, None); + assert_eq!(config.env.chain_id, ChainId::parse("KATANA").unwrap()); + assert_eq!(config.env.invoke_max_steps, DEFAULT_INVOKE_MAX_STEPS); + assert_eq!(config.env.validate_max_steps, DEFAULT_VALIDATE_MAX_STEPS); + assert_eq!(config.db_dir, None); + assert_eq!(config.genesis.gas_prices.eth, DEFAULT_ETH_L1_GAS_PRICE); + assert_eq!(config.genesis.gas_prices.strk, DEFAULT_STRK_L1_GAS_PRICE); + assert_eq!(config.genesis.sequencer_address, *DEFAULT_SEQUENCER_ADDRESS); } #[test] - fn custom_block_context_from_args() { + fn test_starknet_config_custom() { let args = KatanaArgs::parse_from([ "katana", - "--eth-gas-price", - "10", - "--strk-gas-price", - "20", + "--disable-fee", + "--disable-validate", "--chain-id", "SN_GOERLI", - "--validate-max-steps", - "100", "--invoke-max-steps", "200", + "--validate-max-steps", + "100", + "--db-dir", + "/path/to/db", + "--eth-gas-price", + "10", + "--strk-gas-price", + "20", ]); - - let block_context = args.starknet_config().block_env(); - - assert_eq!(block_context.l1_gas_prices.eth, 10); - assert_eq!(block_context.l1_gas_prices.strk, 20); + let config = args.starknet_config(); + + assert!(config.disable_fee); + assert!(config.disable_validate); + assert_eq!(config.env.chain_id, ChainId::GOERLI); + assert_eq!(config.env.invoke_max_steps, 200); + assert_eq!(config.env.validate_max_steps, 100); + assert_eq!(config.db_dir, Some(PathBuf::from("/path/to/db"))); + assert_eq!(config.genesis.gas_prices.eth, 10); + assert_eq!(config.genesis.gas_prices.strk, 20); } } diff --git a/bin/katana/src/main.rs b/bin/katana/src/main.rs index 0c62ce949a..958fc9f217 100644 --- a/bin/katana/src/main.rs +++ b/bin/katana/src/main.rs @@ -5,6 +5,7 @@ use std::sync::Arc; use clap::{CommandFactory, Parser}; use clap_complete::{generate, Shell}; use console::Style; +use dojo_metrics::{metrics_process, prometheus_exporter}; use katana_core::constants::MAX_RECURSION_DEPTH; use katana_core::env::get_default_vm_resource_fee_cost; use katana_core::sequencer::KatanaSequencer; @@ -15,7 +16,6 @@ use katana_primitives::env::{CfgEnv, FeeTokenAddressses}; use katana_primitives::genesis::allocation::GenesisAccountAlloc; use katana_primitives::genesis::Genesis; use katana_rpc::{spawn, NodeHandle}; -use metrics::prometheus_exporter; use tokio::signal::ctrl_c; use tracing::info; @@ -25,6 +25,8 @@ mod utils; use args::Commands::Completions; use args::KatanaArgs; +pub(crate) const LOG_TARGET: &str = "katana::cli"; + #[tokio::main] async fn main() -> Result<(), Box> { let args = KatanaArgs::parse(); @@ -75,19 +77,10 @@ async fn main() -> Result<(), Box> { } } - let sequencer = - Arc::new(KatanaSequencer::new(executor_factory, sequencer_config, starknet_config).await?); - let NodeHandle { addr, handle, .. } = spawn(Arc::clone(&sequencer), server_config).await?; - - if !args.silent { - let genesis = &sequencer.backend().config.genesis; - print_intro(&args, genesis, addr); - } - if let Some(listen_addr) = args.metrics { let prometheus_handle = prometheus_exporter::install_recorder("katana")?; - info!(target: "katana::cli", addr = %listen_addr, "Starting metrics endpoint"); + info!(target: LOG_TARGET, addr = %listen_addr, "Starting metrics endpoint."); prometheus_exporter::serve( listen_addr, prometheus_handle, @@ -96,6 +89,15 @@ async fn main() -> Result<(), Box> { .await?; } + let sequencer = + Arc::new(KatanaSequencer::new(executor_factory, sequencer_config, starknet_config).await?); + let NodeHandle { addr, handle, .. } = spawn(Arc::clone(&sequencer), server_config).await?; + + if !args.silent { + let genesis = &sequencer.backend().config.genesis; + print_intro(&args, genesis, addr); + } + // Wait until Ctrl + C is pressed, then shutdown ctrl_c().await?; handle.stop()?; @@ -116,6 +118,7 @@ fn print_intro(args: &KatanaArgs, genesis: &Genesis, address: SocketAddr) { if args.json_log { info!( + target: LOG_TARGET, "{}", serde_json::json!({ "accounts": accounts.map(|a| serde_json::json!(a)).collect::>(), diff --git a/bin/saya/src/args/mod.rs b/bin/saya/src/args/mod.rs index 48e73a9d97..f23142b5e1 100644 --- a/bin/saya/src/args/mod.rs +++ b/bin/saya/src/args/mod.rs @@ -12,8 +12,10 @@ use tracing_subscriber::{fmt, EnvFilter}; use url::Url; use crate::args::data_availability::{DataAvailabilityChain, DataAvailabilityOptions}; +use crate::args::proof::ProofOptions; mod data_availability; +mod proof; #[derive(Parser, Debug)] #[command(author, version, about, long_about = None)] @@ -45,6 +47,10 @@ pub struct SayaArgs { #[command(flatten)] #[command(next_help_heading = "Data availability options")] pub data_availability: DataAvailabilityOptions, + + #[command(flatten)] + #[command(next_help_heading = "Choose the proof pipeline configuration")] + pub proof: ProofOptions, } impl SayaArgs { @@ -109,6 +115,8 @@ impl TryFrom for SayaConfig { katana_rpc: args.rpc_url, start_block: args.start_block, data_availability: da_config, + prover: args.proof.prover.into(), + verifier: args.proof.verifier.into(), }) } } @@ -140,7 +148,9 @@ mod tests { celestia_namespace: None, }, }, + proof: ProofOptions { prover: Default::default(), verifier: Default::default() }, }; + let config: SayaConfig = args.try_into().unwrap(); assert_eq!(config.katana_rpc.as_str(), "http://localhost:5050/"); diff --git a/bin/saya/src/args/proof.rs b/bin/saya/src/args/proof.rs new file mode 100644 index 0000000000..604e774bc9 --- /dev/null +++ b/bin/saya/src/args/proof.rs @@ -0,0 +1,135 @@ +//! Selecting prover and verifier. +use std::fmt::Display; +use std::str::FromStr; + +use anyhow::Result; +use clap::builder::PossibleValue; +use clap::{Args, ValueEnum}; +use saya_core::prover::ProverIdentifier; +use saya_core::verifier::VerifierIdentifier; + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum Prover { + Stone, +} + +impl From for ProverIdentifier { + fn from(p: Prover) -> Self { + match p { + Prover::Stone => ProverIdentifier::Stone, + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum Verifier { + StoneLocal, + HerodotusStarknetSepolia, +} + +impl From for VerifierIdentifier { + fn from(p: Verifier) -> Self { + match p { + Verifier::StoneLocal => VerifierIdentifier::StoneLocal, + Verifier::HerodotusStarknetSepolia => VerifierIdentifier::HerodotusStarknetSepolia, + } + } +} + +#[derive(Debug, Args, Clone)] +pub struct ProofOptions { + #[arg(long)] + #[arg(help = "Prover to generated the proof from the provable program.")] + pub prover: Prover, + + #[arg(long)] + #[arg(help = "Verifier on which the proof should be sent to.")] + pub verifier: Verifier, +} + +// -- Prover. +impl Default for Prover { + fn default() -> Self { + Self::Stone + } +} + +impl ValueEnum for Prover { + fn value_variants<'a>() -> &'a [Self] { + &[Self::Stone] + } + + fn to_possible_value(&self) -> Option { + match self { + Self::Stone => Some(PossibleValue::new("stone").alias("Stone")), + } + } +} + +impl FromStr for Prover { + type Err = anyhow::Error; + + fn from_str(s: &str) -> Result { + match s { + "stone" | "Stone" => Ok(Self::Stone), + _ => Err(anyhow::anyhow!("unknown prover: {}", s)), + } + } +} + +impl Display for Prover { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Prover::Stone => write!(f, "stone"), + } + } +} + +// -- Verifier. +impl Default for Verifier { + fn default() -> Self { + Self::StoneLocal + } +} + +impl ValueEnum for Verifier { + fn value_variants<'a>() -> &'a [Self] { + &[Self::StoneLocal, Self::HerodotusStarknetSepolia] + } + + fn to_possible_value(&self) -> Option { + match self { + Self::StoneLocal => { + Some(PossibleValue::new("stone-local").alias("stone_local").alias("StoneLocal")) + } + Self::HerodotusStarknetSepolia => Some( + PossibleValue::new("herodotus_starknet_sepolia") + .alias("herodotus-starknet-sepolia") + .alias("HerodotusStarknetSepolia"), + ), + } + } +} + +impl FromStr for Verifier { + type Err = anyhow::Error; + + fn from_str(s: &str) -> Result { + match s { + "stone-local" | "stone_local" | "StoneLocal" => Ok(Self::StoneLocal), + "herodotus-starknet-sepolia" + | "herodotus_starknet_sepolia" + | "HerodotusStarknetSepolia" => Ok(Self::HerodotusStarknetSepolia), + _ => Err(anyhow::anyhow!("unknown verifier: {}", s)), + } + } +} + +impl Display for Verifier { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Verifier::StoneLocal => write!(f, "local-stone"), + Verifier::HerodotusStarknetSepolia => write!(f, "herodotus-starknet-sepolia"), + } + } +} diff --git a/bin/saya/src/args/test_saya_config_file.json b/bin/saya/src/args/test_saya_config_file.json index 7fb85a942d..d0d18a7c4f 100644 --- a/bin/saya/src/args/test_saya_config_file.json +++ b/bin/saya/src/args/test_saya_config_file.json @@ -7,5 +7,7 @@ "node_auth_token": "your_auth_token", "namespace": "katana" } - } + }, + "prover": "Stone", + "verifier": "StoneLocal" } diff --git a/bin/sozo/Cargo.toml b/bin/sozo/Cargo.toml index fecb6807cc..e1b56eabd4 100644 --- a/bin/sozo/Cargo.toml +++ b/bin/sozo/Cargo.toml @@ -6,8 +6,10 @@ version.workspace = true # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] +prettytable-rs = "0.10.0" anyhow.workspace = true async-trait.workspace = true +bigdecimal = "0.4.1" cairo-lang-compiler.workspace = true cairo-lang-defs.workspace = true cairo-lang-filesystem.workspace = true @@ -31,12 +33,15 @@ dojo-world = { workspace = true, features = [ "contracts", "metadata", "migratio futures.workspace = true notify = "6.0.1" notify-debouncer-mini = "0.3.0" +num-integer = "0.1.45" +rpassword.workspace = true scarb-ui.workspace = true scarb.workspace = true semver.workspace = true serde.workspace = true serde_json.workspace = true smol_str.workspace = true +sozo-ops.workspace = true starknet-crypto.workspace = true starknet.workspace = true thiserror.workspace = true @@ -44,7 +49,7 @@ tokio.workspace = true tracing-log = "0.1.3" tracing.workspace = true url.workspace = true -sozo-ops.workspace = true +katana-rpc-api.workspace = true cainome = { git = "https://github.com/cartridge-gg/cainome", tag = "v0.2.2" } diff --git a/bin/sozo/README.md b/bin/sozo/README.md index 5c465a1296..c88c71a7e2 100644 --- a/bin/sozo/README.md +++ b/bin/sozo/README.md @@ -5,3 +5,6 @@ curl -L https://install.dojoengine.org | bash ``` [Documentation](https://book.dojoengine.org/toolchain/sozo/overview) + +Some parts of sozo were inspired by [starkli](https://github.com/xJonathanLEI/starkli) created by Jonathan LEI. +It is licensed under Apache 2.0 / MIT License. diff --git a/bin/sozo/src/args.rs b/bin/sozo/src/args.rs index fc3c7c5f5b..142d1ee90f 100644 --- a/bin/sozo/src/args.rs +++ b/bin/sozo/src/args.rs @@ -1,27 +1,17 @@ use anyhow::Result; use camino::Utf8PathBuf; -use clap::{Parser, Subcommand}; +use clap::Parser; use scarb::compiler::Profile; use scarb_ui::Verbosity; use smol_str::SmolStr; use tracing::level_filters::LevelFilter; use tracing_log::AsTrace; -use crate::commands::auth::AuthArgs; -use crate::commands::build::BuildArgs; -use crate::commands::clean::CleanArgs; -use crate::commands::completions::CompletionsArgs; -use crate::commands::dev::DevArgs; -use crate::commands::events::EventsArgs; -use crate::commands::execute::ExecuteArgs; -use crate::commands::init::InitArgs; -use crate::commands::migrate::MigrateArgs; -use crate::commands::model::ModelArgs; -use crate::commands::register::RegisterArgs; -use crate::commands::test::TestArgs; +use crate::commands::Commands; +use crate::utils::generate_version; #[derive(Parser)] -#[command(author, version, about, long_about = None)] +#[command(author, version=generate_version(), about, long_about = None)] #[command(propagate_version = true)] pub struct SozoArgs { #[arg(long)] @@ -49,35 +39,6 @@ pub struct SozoArgs { pub command: Commands, } -#[derive(Subcommand)] -pub enum Commands { - #[command(about = "Build the world, generating the necessary artifacts for deployment")] - Build(BuildArgs), - #[command(about = "Initialize a new project")] - Init(InitArgs), - #[command(about = "Remove generated artifacts, manifests and abis")] - Clean(CleanArgs), - #[command(about = "Run a migration, declaring and deploying contracts as necessary to \ - update the world")] - Migrate(Box), - #[command(about = "Developer mode: watcher for building and migration")] - Dev(DevArgs), - #[command(about = "Test the project's smart contracts")] - Test(TestArgs), - #[command(about = "Execute a world's system")] - Execute(ExecuteArgs), - #[command(about = "Interact with a worlds models")] - Model(ModelArgs), - #[command(about = "Register new models")] - Register(RegisterArgs), - #[command(about = "Queries world events")] - Events(EventsArgs), - #[command(about = "Manage world authorization")] - Auth(AuthArgs), - #[command(about = "Generate shell completion file for specified shell")] - Completions(CompletionsArgs), -} - impl SozoArgs { pub fn ui_verbosity(&self) -> Verbosity { let filter = self.verbose.log_level_filter().as_trace(); diff --git a/bin/sozo/src/commands/account.rs b/bin/sozo/src/commands/account.rs new file mode 100644 index 0000000000..fd99c9756b --- /dev/null +++ b/bin/sozo/src/commands/account.rs @@ -0,0 +1,126 @@ +use std::path::PathBuf; + +use anyhow::Result; +use clap::{Args, Subcommand}; +use scarb::core::Config; +use sozo_ops::account; +use starknet::signers::LocalWallet; +use starknet_crypto::FieldElement; + +use super::options::fee::FeeOptions; +use super::options::signer::SignerOptions; +use super::options::starknet::StarknetOptions; +use crate::utils; + +#[derive(Debug, Args)] +pub struct AccountArgs { + #[clap(subcommand)] + command: AccountCommand, +} + +#[allow(clippy::large_enum_variant)] +#[derive(Debug, Subcommand)] +pub enum AccountCommand { + #[clap(about = "Create a new account configuration without actually deploying.")] + New { + #[clap(flatten)] + signer: SignerOptions, + + #[clap(long, short, help = "Overwrite the account config file if it already exists")] + force: bool, + + #[clap(help = "Path to save the account config file")] + file: PathBuf, + }, + + #[clap(about = "Deploy account contract with a DeployAccount transaction.")] + Deploy { + #[clap(flatten)] + starknet: StarknetOptions, + + #[clap(flatten)] + signer: SignerOptions, + + #[clap(flatten)] + fee: FeeOptions, + + #[clap(long, help = "Simulate the transaction only")] + simulate: bool, + + #[clap(long, help = "Provide transaction nonce manually")] + nonce: Option, + + #[clap( + long, + env = "STARKNET_POLL_INTERVAL", + default_value = "1000", + help = "Transaction result poll interval in milliseconds" + )] + poll_interval: u64, + + #[clap(help = "Path to the account config file")] + file: PathBuf, + + #[clap(long, help = "Don't wait for user confirmation")] + no_confirmation: bool, + }, + + #[clap(about = "Fetch account config from an already deployed account contract.")] + Fetch { + #[clap(flatten)] + starknet: StarknetOptions, + + #[clap(long, help = "Overwrite the file if it already exists")] + force: bool, + + #[clap(long, help = "Path to save the account config file")] + output: PathBuf, + + #[clap(help = "Contract address")] + address: FieldElement, + }, +} + +impl AccountArgs { + pub fn run(self, config: &Config) -> Result<()> { + let env_metadata = utils::load_metadata_from_config(config)?; + + config.tokio_handle().block_on(async { + match self.command { + AccountCommand::New { signer, force, file } => { + let signer: LocalWallet = signer.signer(env_metadata.as_ref(), false)?; + account::new(signer, force, file).await + } + AccountCommand::Deploy { + starknet, + signer, + fee, + simulate, + nonce, + poll_interval, + file, + no_confirmation, + } => { + let provider = starknet.provider(env_metadata.as_ref())?; + let signer = signer.signer(env_metadata.as_ref(), false)?; + let fee_setting = fee.into_setting()?; + account::deploy( + provider, + signer, + fee_setting, + simulate, + nonce, + poll_interval, + file, + no_confirmation, + ) + .await + } + AccountCommand::Fetch { starknet, force, output, address } => { + let provider = starknet.provider(env_metadata.as_ref())?; + account::fetch(provider, force, output, address).await + } + } + }) + } +} diff --git a/bin/sozo/src/commands/auth.rs b/bin/sozo/src/commands/auth.rs index c07157077d..434e8c0313 100644 --- a/bin/sozo/src/commands/auth.rs +++ b/bin/sozo/src/commands/auth.rs @@ -1,11 +1,8 @@ use anyhow::Result; use clap::{Args, Subcommand}; -use dojo_world::contracts::WorldContractReader; use dojo_world::metadata::Environment; use scarb::core::Config; use sozo_ops::auth; -use starknet::accounts::ConnectedAccount; -use starknet::core::types::{BlockId, BlockTag}; use super::options::account::AccountOptions; use super::options::starknet::StarknetOptions; @@ -19,56 +16,6 @@ pub struct AuthArgs { pub command: AuthCommand, } -#[derive(Debug, Subcommand)] -pub enum AuthKind { - #[command(about = "Grant a contract permission to write to a model.")] - Writer { - #[arg(num_args = 1..)] - #[arg(required = true)] - #[arg(value_name = "model,contract_address")] - #[arg(help = "A list of models and contract address to grant write access to. Comma \ - separated values to indicate model name and contract address e.g. \ - model_name,path::to::contract model_name,contract_address ")] - models_contracts: Vec, - }, - #[command(about = "Grant ownership of a resource.")] - Owner { - #[arg(num_args = 1..)] - #[arg(required = true)] - #[arg(value_name = "resource,owner_address")] - #[arg(help = "A list of owners and resources to grant ownership to. Comma separated \ - values to indicate owner address and resouce e.g. \ - contract:path::to::contract,0x1234 contract:contract_address,0x1111, \ - model:model_name,0xbeef")] - owners_resources: Vec, - }, -} - -pub async fn grant( - world: WorldOptions, - account: AccountOptions, - starknet: StarknetOptions, - env_metadata: Option, - kind: AuthKind, - transaction: TransactionOptions, -) -> Result<()> { - let world_address = world.world_address.unwrap_or_default(); - let world = - utils::world_from_env_metadata(world, account, starknet, &env_metadata).await.unwrap(); - let provider = world.account.provider(); - let world_reader = WorldContractReader::new(world_address, &provider) - .with_block(BlockId::Tag(BlockTag::Pending)); - - match kind { - AuthKind::Writer { models_contracts } => { - auth::grant_writer(&world, models_contracts, world_reader, transaction.into()).await - } - AuthKind::Owner { owners_resources } => { - auth::grant_owner(world, owners_resources, transaction.into()).await - } - } -} - #[derive(Debug, Subcommand)] pub enum AuthCommand { #[command(about = "Grant an auth role.")] @@ -115,7 +62,75 @@ impl AuthArgs { AuthCommand::Grant { kind, world, starknet, account, transaction } => config .tokio_handle() .block_on(grant(world, account, starknet, env_metadata, kind, transaction)), - _ => todo!(), + AuthCommand::Revoke { kind, world, starknet, account, transaction } => config + .tokio_handle() + .block_on(revoke(world, account, starknet, env_metadata, kind, transaction)), + } + } +} + +#[derive(Debug, Subcommand)] +pub enum AuthKind { + #[command(about = "Grant a contract permission to write to a model.")] + Writer { + #[arg(num_args = 1..)] + #[arg(required = true)] + #[arg(value_name = "model,contract_address")] + #[arg(help = "A list of models and contract address to grant write access to. Comma \ + separated values to indicate model name and contract address e.g. \ + model_name,path::to::contract model_name,contract_address ")] + models_contracts: Vec, + }, + #[command(about = "Grant ownership of a resource.")] + Owner { + #[arg(num_args = 1..)] + #[arg(required = true)] + #[arg(value_name = "resource,owner_address")] + #[arg(help = "A list of owners and resources to grant ownership to. Comma separated \ + values to indicate owner address and resouce e.g. \ + contract:path::to::contract,0x1234 contract:contract_address,0x1111, \ + model:model_name,0xbeef")] + owners_resources: Vec, + }, +} + +pub async fn grant( + world: WorldOptions, + account: AccountOptions, + starknet: StarknetOptions, + env_metadata: Option, + kind: AuthKind, + transaction: TransactionOptions, +) -> Result<()> { + let world = + utils::world_from_env_metadata(world, account, starknet, &env_metadata).await.unwrap(); + + match kind { + AuthKind::Writer { models_contracts } => { + auth::grant_writer(&world, models_contracts, transaction.into()).await + } + AuthKind::Owner { owners_resources } => { + auth::grant_owner(&world, owners_resources, transaction.into()).await + } + } +} + +pub async fn revoke( + world: WorldOptions, + account: AccountOptions, + starknet: StarknetOptions, + env_metadata: Option, + kind: AuthKind, + transaction: TransactionOptions, +) -> Result<()> { + let world = + utils::world_from_env_metadata(world, account, starknet, &env_metadata).await.unwrap(); + match kind { + AuthKind::Writer { models_contracts } => { + auth::revoke_writer(&world, models_contracts, transaction.into()).await + } + AuthKind::Owner { owners_resources } => { + auth::revoke_owner(&world, owners_resources, transaction.into()).await } } } diff --git a/bin/sozo/src/commands/build.rs b/bin/sozo/src/commands/build.rs index cd7db4f1d9..0ed7661a30 100644 --- a/bin/sozo/src/commands/build.rs +++ b/bin/sozo/src/commands/build.rs @@ -1,16 +1,23 @@ -use anyhow::Result; +use anyhow::{Context, Result}; use clap::Args; use dojo_bindgen::{BuiltinPlugins, PluginManager}; use dojo_lang::scarb_internal::compile_workspace; +use prettytable::format::consts::FORMAT_NO_LINESEP_WITH_TITLE; +use prettytable::{format, Cell, Row, Table}; use scarb::core::{Config, TargetKind}; use scarb::ops::CompileOpts; +use sozo_ops::statistics::{get_contract_statistics_for_dir, ContractStatistics}; -#[derive(Args, Debug)] +#[derive(Debug, Args)] pub struct BuildArgs { #[arg(long)] #[arg(help = "Generate Typescript bindings.")] pub typescript: bool, + #[arg(long)] + #[arg(help = "Generate Typescript bindings.")] + pub typescript_v2: bool, + #[arg(long)] #[arg(help = "Generate Unity bindings.")] pub unity: bool, @@ -18,6 +25,9 @@ pub struct BuildArgs { #[arg(long)] #[arg(help = "Output directory.", default_value = "bindings")] pub bindings_output: String, + + #[arg(long, help = "Display statistics about the compiled contracts")] + pub stats: bool, } impl BuildArgs { @@ -32,14 +42,30 @@ impl BuildArgs { builtin_plugins.push(BuiltinPlugins::Typescript); } + if self.typescript_v2 { + builtin_plugins.push(BuiltinPlugins::TypeScriptV2); + } + if self.unity { builtin_plugins.push(BuiltinPlugins::Unity); } + if self.stats { + let target_dir = &compile_info.target_dir; + let contracts_statistics = get_contract_statistics_for_dir(target_dir) + .context("Error getting contracts stats")?; + let table = create_stats_table(contracts_statistics); + table.printstd() + } + // Custom plugins are always empty for now. let bindgen = PluginManager { + profile_name: compile_info.profile_name, output_path: self.bindings_output.into(), - artifacts_path: compile_info.target_dir, + manifest_path: compile_info.manifest_path, + root_package_name: compile_info + .root_package_name + .unwrap_or("NO_ROOT_PACKAGE".to_string()), plugins: vec![], builtin_plugins, }; @@ -53,19 +79,105 @@ impl BuildArgs { } } +fn create_stats_table(contracts_statistics: Vec) -> Table { + let mut table = Table::new(); + table.set_format(*FORMAT_NO_LINESEP_WITH_TITLE); + + // Add table headers + table.set_titles(Row::new(vec![ + Cell::new_align("Contract", format::Alignment::CENTER), + Cell::new_align("Bytecode size (felts)", format::Alignment::CENTER), + Cell::new_align("Class size (bytes)", format::Alignment::CENTER), + ])); + + for contract_stats in contracts_statistics { + // Add table rows + let contract_name = contract_stats.contract_name; + let number_felts = contract_stats.number_felts; + let file_size = contract_stats.file_size; + + table.add_row(Row::new(vec![ + Cell::new_align(&contract_name, format::Alignment::LEFT), + Cell::new_align(format!("{}", number_felts).as_str(), format::Alignment::RIGHT), + Cell::new_align(format!("{}", file_size).as_str(), format::Alignment::RIGHT), + ])); + } + + table +} + #[cfg(test)] mod tests { use dojo_test_utils::compiler::build_test_config; + use prettytable::format::consts::FORMAT_NO_LINESEP_WITH_TITLE; + use prettytable::{format, Cell, Row, Table}; + use sozo_ops::statistics::ContractStatistics; - use super::BuildArgs; + use super::{create_stats_table, BuildArgs}; #[test] fn build_example_with_typescript_and_unity_bindings() { let config = build_test_config("../../examples/spawn-and-move/Scarb.toml").unwrap(); - let build_args = - BuildArgs { bindings_output: "generated".to_string(), typescript: true, unity: true }; + let build_args = BuildArgs { + bindings_output: "generated".to_string(), + typescript: true, + unity: true, + typescript_v2: true, + stats: true, + }; let result = build_args.run(&config); assert!(result.is_ok()); } + + #[test] + fn test_create_stats_table() { + // Arrange + let contracts_statistics = vec![ + ContractStatistics { + contract_name: "Test1".to_string(), + number_felts: 33, + file_size: 33, + }, + ContractStatistics { + contract_name: "Test2".to_string(), + number_felts: 43, + file_size: 24, + }, + ContractStatistics { + contract_name: "Test3".to_string(), + number_felts: 36, + file_size: 12, + }, + ]; + + let mut expected_table = Table::new(); + expected_table.set_format(*FORMAT_NO_LINESEP_WITH_TITLE); + expected_table.set_titles(Row::new(vec![ + Cell::new_align("Contract", format::Alignment::CENTER), + Cell::new_align("Bytecode size (felts)", format::Alignment::CENTER), + Cell::new_align("Class size (bytes)", format::Alignment::CENTER), + ])); + expected_table.add_row(Row::new(vec![ + Cell::new_align("Test1", format::Alignment::LEFT), + Cell::new_align(format!("{}", 33).as_str(), format::Alignment::RIGHT), + Cell::new_align(format!("{}", 33).as_str(), format::Alignment::RIGHT), + ])); + expected_table.add_row(Row::new(vec![ + Cell::new_align("Test2", format::Alignment::LEFT), + Cell::new_align(format!("{}", 43).as_str(), format::Alignment::RIGHT), + Cell::new_align(format!("{}", 24).as_str(), format::Alignment::RIGHT), + ])); + expected_table.add_row(Row::new(vec![ + Cell::new_align("Test3", format::Alignment::LEFT), + Cell::new_align(format!("{}", 36).as_str(), format::Alignment::RIGHT), + Cell::new_align(format!("{}", 12).as_str(), format::Alignment::RIGHT), + ])); + + // Act + let table = create_stats_table(contracts_statistics); + + // Assert + assert_eq!(table, expected_table, "Tables mismatch") + } } diff --git a/bin/sozo/src/commands/call.rs b/bin/sozo/src/commands/call.rs new file mode 100644 index 0000000000..109f8af41c --- /dev/null +++ b/bin/sozo/src/commands/call.rs @@ -0,0 +1,56 @@ +use anyhow::Result; +use clap::Args; +use scarb::core::Config; +use starknet::core::types::FieldElement; + +use super::options::starknet::StarknetOptions; +use super::options::world::WorldOptions; +use crate::utils; + +#[derive(Debug, Args)] +#[command(about = "Call a system with the given calldata.")] +pub struct CallArgs { + #[arg(help = "The address or the fully qualified name of the contract to call.")] + pub contract: String, + + #[arg(help = "The name of the entrypoint to call.")] + pub entrypoint: String, + + #[arg(short, long)] + #[arg(value_delimiter = ',')] + #[arg(help = "The calldata to be passed to the entrypoint. Comma separated values e.g., \ + 0x12345,0x69420.")] + pub calldata: Vec, + + #[arg(short, long)] + #[arg(help = "The block ID (could be a hash, a number, 'pending' or 'latest')")] + pub block_id: Option, + + #[command(flatten)] + pub starknet: StarknetOptions, + + #[command(flatten)] + pub world: WorldOptions, +} + +impl CallArgs { + pub fn run(self, config: &Config) -> Result<()> { + let env_metadata = utils::load_metadata_from_config(config)?; + + config.tokio_handle().block_on(async { + let world_reader = + utils::world_reader_from_env_metadata(self.world, self.starknet, &env_metadata) + .await + .unwrap(); + + sozo_ops::call::call( + world_reader, + self.contract, + self.entrypoint, + self.calldata, + self.block_id, + ) + .await + }) + } +} diff --git a/bin/sozo/src/commands/clean.rs b/bin/sozo/src/commands/clean.rs index 0398c545a7..da6245c488 100644 --- a/bin/sozo/src/commands/clean.rs +++ b/bin/sozo/src/commands/clean.rs @@ -9,22 +9,19 @@ use scarb::core::Config; #[derive(Debug, Args)] pub struct CleanArgs { #[arg(short, long)] - #[arg(help = "Remove manifests and abis only.")] - #[arg(long_help = "Remove manifests and abis only.")] - pub manifests_abis: bool, - - #[arg(short, long)] - #[arg(help = "Remove artifacts only.")] - #[arg(long_help = "Remove artifacts only.")] - pub artifacts: bool, + #[arg(help = "Removes all the generated files, including scarb artifacts and ALL the \ + manifests files.")] + pub all: bool, } impl CleanArgs { - pub fn clean_manifests_abis(&self, root_dir: &Utf8PathBuf) -> Result<()> { - let dirs = vec![ - root_dir.join(MANIFESTS_DIR).join(BASE_DIR), - root_dir.join(ABIS_DIR).join(BASE_DIR), - ]; + /// Cleans the manifests and abis files that are generated at build time. + /// + /// # Arguments + /// + /// * `profile_dir` - The directory where the profile files are located. + pub fn clean_manifests(&self, profile_dir: &Utf8PathBuf) -> Result<()> { + let dirs = vec![profile_dir.join(BASE_DIR), profile_dir.join(ABIS_DIR).join(BASE_DIR)]; for d in dirs { if d.exists() { @@ -38,18 +35,99 @@ impl CleanArgs { pub fn run(self, config: &Config) -> Result<()> { let ws = scarb::ops::read_workspace(config.manifest_path(), config)?; - let clean_manifests_abis = self.manifests_abis || !self.artifacts; - let clean_artifacts = self.artifacts || !self.manifests_abis; + let profile_name = + ws.current_profile().expect("Scarb profile is expected at this point.").to_string(); - if clean_manifests_abis { - let manifest_dir = ws.manifest_path().parent().unwrap().to_path_buf(); - self.clean_manifests_abis(&manifest_dir)?; - } + // Manifest path is always a file, we can unwrap safely to get the + // parent folder. + let manifest_dir = ws.manifest_path().parent().unwrap().to_path_buf(); + + let profile_dir = manifest_dir.join(MANIFESTS_DIR).join(profile_name); + + // By default, this command cleans the build manifests and scarb artifacts. + scarb::ops::clean(config)?; + self.clean_manifests(&profile_dir)?; - if clean_artifacts { - scarb::ops::clean(config)?; + if self.all && profile_dir.exists() { + fs::remove_dir_all(profile_dir)?; } Ok(()) } } + +#[cfg(test)] +mod tests { + use dojo_test_utils::compiler; + use dojo_world::migration::TxnConfig; + use katana_runner::KatanaRunner; + use sozo_ops::migration; + + use super::*; + + #[test] + fn test_clean() { + let source_project = "../../examples/spawn-and-move/Scarb.toml"; + + // Build a completely new project in it's own directory. + let (temp_project_dir, config, _) = compiler::copy_build_project_temp(source_project, true); + + let runner = KatanaRunner::new().expect("Fail to set runner"); + + let ws = scarb::ops::read_workspace(config.manifest_path(), &config).unwrap(); + + // Plan the migration to generate some manifests other than base. + config.tokio_handle().block_on(async { + migration::migrate( + &ws, + None, + runner.endpoint(), + &runner.account(0), + "dojo_examples", + true, + TxnConfig::default(), + ) + .await + .unwrap() + }); + + let clean_cmd = CleanArgs { all: false }; + clean_cmd.run(&config).unwrap(); + + let profile_name = config.profile().to_string(); + + let target_dev_dir = temp_project_dir.join("target").join(&profile_name); + let profile_manifests_dir = temp_project_dir.join("manifests").join(&profile_name); + let manifests_dev_base_dir = profile_manifests_dir.join("base"); + let manifests_dev_abis_base_dir = profile_manifests_dir.join("abis").join("base"); + let manifests_dev_abis_depl_dir = profile_manifests_dir.join("abis").join("deployments"); + let manifest_toml = profile_manifests_dir.join("manifest").with_extension("toml"); + let manifest_json = profile_manifests_dir.join("manifest").with_extension("json"); + + assert!(fs::read_dir(target_dev_dir).is_err(), "Expected 'target/dev' to be empty"); + assert!( + fs::read_dir(manifests_dev_base_dir).is_err(), + "Expected 'manifests/dev/base' to be empty" + ); + assert!( + fs::read_dir(manifests_dev_abis_base_dir).is_err(), + "Expected 'manifests/dev/abis/base' to be empty" + ); + assert!( + fs::read_dir(&manifests_dev_abis_depl_dir).is_ok(), + "Expected 'manifests/dev/abis/deployments' to not be empty" + ); + assert!(manifest_toml.exists(), "Expected 'manifest.toml' to exist"); + assert!(manifest_json.exists(), "Expected 'manifest.json' to exist"); + + let clean_cmd = CleanArgs { all: true }; + clean_cmd.run(&config).unwrap(); + + assert!( + fs::read_dir(&manifests_dev_abis_depl_dir).is_err(), + "Expected 'manifests/dev/abis/deployments' to be empty" + ); + assert!(!manifest_toml.exists(), "Expected 'manifest.toml' to not exist"); + assert!(!manifest_json.exists(), "Expected 'manifest.json' to not exist"); + } +} diff --git a/bin/sozo/src/commands/completions.rs b/bin/sozo/src/commands/completions.rs index 65c99ac2bb..1f71098822 100644 --- a/bin/sozo/src/commands/completions.rs +++ b/bin/sozo/src/commands/completions.rs @@ -6,7 +6,7 @@ use clap_complete::{generate, Shell}; use crate::args::SozoArgs; -#[derive(Args, Debug)] +#[derive(Debug, Args)] pub struct CompletionsArgs { shell: Shell, } diff --git a/bin/sozo/src/commands/dev.rs b/bin/sozo/src/commands/dev.rs index c5dc207510..772ad87fcc 100644 --- a/bin/sozo/src/commands/dev.rs +++ b/bin/sozo/src/commands/dev.rs @@ -13,23 +13,26 @@ use dojo_lang::scarb_internal::build_scarb_root_database; use dojo_world::manifest::{BaseManifest, DeploymentManifest}; use dojo_world::metadata::dojo_metadata_from_workspace; use dojo_world::migration::world::WorldDiff; +use dojo_world::migration::TxnConfig; use notify_debouncer_mini::notify::RecursiveMode; use notify_debouncer_mini::{new_debouncer, DebouncedEvent, DebouncedEventKind}; use scarb::compiler::CompilationUnit; use scarb::core::{Config, Workspace}; -use sozo_ops::migration::{self, prepare_migration}; +use sozo_ops::migration; use starknet::accounts::SingleOwnerAccount; use starknet::core::types::FieldElement; use starknet::providers::Provider; use starknet::signers::Signer; -use tracing_log::log; +use tracing::error; use super::migrate::setup_env; use super::options::account::AccountOptions; use super::options::starknet::StarknetOptions; use super::options::world::WorldOptions; -#[derive(Args)] +pub(crate) const LOG_TARGET: &str = "sozo::cli::commands::dev"; + +#[derive(Debug, Args)] pub struct DevArgs { #[arg(long)] #[arg(help = "Name of the World.")] @@ -47,6 +50,108 @@ pub struct DevArgs { pub account: AccountOptions, } +impl DevArgs { + pub fn run(self, config: &Config) -> Result<()> { + let ws = scarb::ops::read_workspace(config.manifest_path(), config)?; + + let env_metadata = if config.manifest_path().exists() { + dojo_metadata_from_workspace(&ws).env().cloned() + } else { + None + }; + + let mut context = load_context(config)?; + let (tx, rx) = channel(); + let mut debouncer = new_debouncer(Duration::from_secs(1), None, tx)?; + + debouncer.watcher().watch( + config.manifest_path().parent().unwrap().as_std_path(), + RecursiveMode::Recursive, + )?; + + let name = self.name.unwrap_or_else(|| ws.root_package().unwrap().id.name.to_string()); + + let mut previous_manifest: Option = Option::None; + let result = build(&mut context); + + let Some((mut world_address, account, _)) = context + .ws + .config() + .tokio_handle() + .block_on(setup_env( + &context.ws, + self.account, + self.starknet, + self.world, + &name, + env_metadata.as_ref(), + )) + .ok() + else { + return Err(anyhow!("Failed to setup environment")); + }; + + match context.ws.config().tokio_handle().block_on(migrate( + world_address, + &account, + &name, + &context.ws, + previous_manifest.clone(), + )) { + Ok((manifest, address)) => { + previous_manifest = Some(manifest); + world_address = address; + } + Err(error) => { + error!( + target: LOG_TARGET, + error = ?error, + address = ?world_address, + "Migrating world." + ); + } + } + loop { + let action = match rx.recv() { + Ok(Ok(events)) => events + .iter() + .map(|event| process_event(event, &mut context)) + .last() + .unwrap_or(DevAction::None), + Ok(Err(_)) => DevAction::None, + Err(error) => { + error!(target: LOG_TARGET, error = ?error, "Receiving dev action."); + break; + } + }; + + if action != DevAction::None && build(&mut context).is_ok() { + match context.ws.config().tokio_handle().block_on(migrate( + world_address, + &account, + &name, + &context.ws, + previous_manifest.clone(), + )) { + Ok((manifest, address)) => { + previous_manifest = Some(manifest); + world_address = address; + } + Err(error) => { + error!( + target: LOG_TARGET, + error = ?error, + address = ?world_address, + "Migrating world.", + ); + } + } + } + } + result + } +} + #[derive(Clone, PartialEq, Eq, PartialOrd, Ord)] enum DevAction { None, @@ -112,7 +217,7 @@ fn build(context: &mut DevContext<'_>) -> Result<()> { async fn migrate( mut world_address: Option, account: &SingleOwnerAccount, - name: Option, + name: &str, ws: &Workspace<'_>, previous_manifest: Option, ) -> Result<(DeploymentManifest, Option)> @@ -142,9 +247,9 @@ where } let ui = ws.config().ui(); - let strategy = prepare_migration(&target_dir, diff, name, world_address, &ui)?; + let mut strategy = migration::prepare_migration(&target_dir, diff, name, world_address, &ui)?; - match migration::apply_diff(ws, account, None, &strategy).await { + match migration::apply_diff(ws, account, TxnConfig::default(), &mut strategy).await { Ok(migration_output) => { config.ui().print(format!( "🎉 World at address {} updated!", @@ -191,93 +296,3 @@ fn handle_reload_action(context: &mut DevContext<'_>) { let new_context = load_context(config).expect("Failed to load context"); let _ = mem::replace(context, new_context); } - -impl DevArgs { - pub fn run(self, config: &Config) -> Result<()> { - let env_metadata = if config.manifest_path().exists() { - let ws = scarb::ops::read_workspace(config.manifest_path(), config)?; - - dojo_metadata_from_workspace(&ws).and_then(|inner| inner.env().cloned()) - } else { - None - }; - - let mut context = load_context(config)?; - let (tx, rx) = channel(); - let mut debouncer = new_debouncer(Duration::from_secs(1), None, tx)?; - - debouncer.watcher().watch( - config.manifest_path().parent().unwrap().as_std_path(), - RecursiveMode::Recursive, - )?; - let name = self.name.clone(); - let mut previous_manifest: Option = Option::None; - let result = build(&mut context); - - let Some((mut world_address, account, _)) = context - .ws - .config() - .tokio_handle() - .block_on(setup_env( - &context.ws, - self.account, - self.starknet, - self.world, - name.as_ref(), - env_metadata.as_ref(), - )) - .ok() - else { - return Err(anyhow!("Failed to setup environment")); - }; - - match context.ws.config().tokio_handle().block_on(migrate( - world_address, - &account, - name.clone(), - &context.ws, - previous_manifest.clone(), - )) { - Ok((manifest, address)) => { - previous_manifest = Some(manifest); - world_address = address; - } - Err(error) => { - log::error!("Error: {error:?}"); - } - } - loop { - let action = match rx.recv() { - Ok(Ok(events)) => events - .iter() - .map(|event| process_event(event, &mut context)) - .last() - .unwrap_or(DevAction::None), - Ok(Err(_)) => DevAction::None, - Err(error) => { - log::error!("Error: {error:?}"); - break; - } - }; - - if action != DevAction::None && build(&mut context).is_ok() { - match context.ws.config().tokio_handle().block_on(migrate( - world_address, - &account, - name.clone(), - &context.ws, - previous_manifest.clone(), - )) { - Ok((manifest, address)) => { - previous_manifest = Some(manifest); - world_address = address; - } - Err(error) => { - log::error!("Error: {error:?}"); - } - } - } - } - result - } -} diff --git a/bin/sozo/src/commands/events.rs b/bin/sozo/src/commands/events.rs index 95eb001ac1..d08a3a74d3 100644 --- a/bin/sozo/src/commands/events.rs +++ b/bin/sozo/src/commands/events.rs @@ -1,5 +1,5 @@ use anyhow::Result; -use clap::Parser; +use clap::Args; use scarb::core::Config; use sozo_ops::events; @@ -7,7 +7,7 @@ use super::options::starknet::StarknetOptions; use super::options::world::WorldOptions; use crate::utils; -#[derive(Parser, Debug)] +#[derive(Debug, Args)] pub struct EventsArgs { #[arg(help = "List of specific events to be filtered")] #[arg(value_delimiter = ',')] @@ -54,6 +54,9 @@ impl EventsArgs { self.world.world_address, ); + let profile_name = + ws.current_profile().expect("Scarb profile expected at this point.").to_string(); + config.tokio_handle().block_on(async { events::parse( self.chunk_size, @@ -62,6 +65,7 @@ impl EventsArgs { event_filter, self.json, &manifest_dir, + &profile_name, ) .await }) diff --git a/bin/sozo/src/commands/execute.rs b/bin/sozo/src/commands/execute.rs index ed3f1b11c8..b53ffa9691 100644 --- a/bin/sozo/src/commands/execute.rs +++ b/bin/sozo/src/commands/execute.rs @@ -22,7 +22,7 @@ pub struct ExecuteArgs { #[arg(short, long)] #[arg(value_delimiter = ',')] - #[arg(help = "The calldata to be passed to the system. Comma seperated values e.g., \ + #[arg(help = "The calldata to be passed to the system. Comma separated values e.g., \ 0x12345,0x69420.")] pub calldata: Vec, @@ -54,7 +54,8 @@ impl ExecuteArgs { .unwrap(); let tx_config = self.transaction.into(); - execute::execute(self.contract, self.entrypoint, self.calldata, world, tx_config).await + execute::execute(self.contract, self.entrypoint, self.calldata, &world, &tx_config) + .await }) } } diff --git a/bin/sozo/src/commands/init.rs b/bin/sozo/src/commands/init.rs index 7fffbbb2e9..f37bc70036 100644 --- a/bin/sozo/src/commands/init.rs +++ b/bin/sozo/src/commands/init.rs @@ -7,7 +7,7 @@ use anyhow::{ensure, Result}; use clap::Args; use scarb::core::Config; -#[derive(Args, Debug)] +#[derive(Debug, Args)] pub struct InitArgs { #[arg(help = "Target directory")] path: Option, diff --git a/bin/sozo/src/commands/keystore.rs b/bin/sozo/src/commands/keystore.rs new file mode 100644 index 0000000000..ac5d567b53 --- /dev/null +++ b/bin/sozo/src/commands/keystore.rs @@ -0,0 +1,83 @@ +use std::path::PathBuf; + +use anyhow::Result; +use clap::{Args, Subcommand}; +use scarb::core::Config; +use sozo_ops::keystore; + +#[derive(Debug, Args)] +pub struct KeystoreArgs { + #[clap(subcommand)] + command: KeystoreCommand, +} + +#[derive(Debug, Subcommand)] +pub enum KeystoreCommand { + #[clap(about = "Randomly generate a new keystore.")] + New { + #[clap(long, help = "Supply password from command line option instead of prompt")] + password: Option, + + #[clap(long, help = "Overwrite the file if it already exists")] + force: bool, + + #[clap(help = "Path to save the JSON keystore")] + file: PathBuf, + }, + + #[clap(about = "Create a keystore file from an existing private key.")] + FromKey { + #[clap(long, help = "Overwrite the file if it already exists")] + force: bool, + + #[clap(long, help = "Supply private key from command line option instead of prompt")] + private_key: Option, + + #[clap(long, help = "Supply password from command line option instead of prompt")] + password: Option, + + #[clap(help = "Path to save the JSON keystore")] + file: PathBuf, + }, + + #[clap(about = "Check the public key of an existing keystore file.")] + Inspect { + #[clap(long, help = "Supply password from command line option instead of prompt")] + password: Option, + + #[clap(long, help = "Print the public key only")] + raw: bool, + + #[clap(help = "Path to the JSON keystore")] + file: PathBuf, + }, + + #[clap(about = "Check the private key of an existing keystore file.")] + InspectPrivate { + #[clap(long, help = "Supply password from command line option instead of prompt")] + password: Option, + + #[clap(long, help = "Print the private key only")] + raw: bool, + + #[clap(help = "Path to the JSON keystore")] + file: PathBuf, + }, +} + +impl KeystoreArgs { + pub fn run(self, _config: &Config) -> Result<()> { + match self.command { + KeystoreCommand::New { password, force, file } => keystore::new(password, force, file), + KeystoreCommand::FromKey { force, private_key, password, file } => { + keystore::from_key(force, private_key, password, file) + } + KeystoreCommand::Inspect { password, raw, file } => { + keystore::inspect(password, raw, file) + } + KeystoreCommand::InspectPrivate { password, raw, file } => { + keystore::inspect_private(password, raw, file) + } + } + } +} diff --git a/bin/sozo/src/commands/migrate.rs b/bin/sozo/src/commands/migrate.rs index 5690a21936..538de0fba8 100644 --- a/bin/sozo/src/commands/migrate.rs +++ b/bin/sozo/src/commands/migrate.rs @@ -1,7 +1,9 @@ use anyhow::{anyhow, Context, Result}; -use clap::Args; +use clap::{Args, Subcommand}; use dojo_lang::compiler::MANIFESTS_DIR; use dojo_world::metadata::{dojo_metadata_from_workspace, Environment}; +use dojo_world::migration::TxnConfig; +use katana_rpc_api::starknet::RPC_SPEC_VERSION; use scarb::core::{Config, Workspace}; use sozo_ops::migration; use starknet::accounts::{Account, ConnectedAccount, SingleOwnerAccount}; @@ -16,29 +18,85 @@ use super::options::starknet::StarknetOptions; use super::options::transaction::TransactionOptions; use super::options::world::WorldOptions; -#[derive(Args)] +#[derive(Debug, Args)] pub struct MigrateArgs { - #[arg(short, long)] - #[arg(help = "Perform a dry run and outputs the plan to be executed.")] - pub dry_run: bool, + #[command(subcommand)] + pub command: MigrateCommand, - #[arg(long)] + #[arg(long, global = true)] #[arg(help = "Name of the World.")] #[arg(long_help = "Name of the World. It's hash will be used as a salt when deploying the \ - contract to avoid address conflicts.")] - pub name: Option, + contract to avoid address conflicts. If not provided root package's name \ + will be used.")] + name: Option, #[command(flatten)] - pub world: WorldOptions, + world: WorldOptions, #[command(flatten)] - pub starknet: StarknetOptions, + starknet: StarknetOptions, #[command(flatten)] - pub account: AccountOptions, + account: AccountOptions, +} - #[command(flatten)] - pub transaction: TransactionOptions, +#[derive(Debug, Subcommand)] +pub enum MigrateCommand { + #[command(about = "Plan the migration and output the manifests.")] + Plan, + #[command(about = "Apply the migration on-chain.")] + Apply { + #[command(flatten)] + transaction: TransactionOptions, + }, +} + +impl MigrateArgs { + pub fn run(self, config: &Config) -> Result<()> { + let ws = scarb::ops::read_workspace(config.manifest_path(), config)?; + + let env_metadata = if config.manifest_path().exists() { + dojo_metadata_from_workspace(&ws).env().cloned() + } else { + None + }; + + let manifest_dir = ws.manifest_path().parent().unwrap().to_path_buf(); + if !manifest_dir.join(MANIFESTS_DIR).exists() { + return Err(anyhow!("Build project using `sozo build` first")); + } + + let MigrateArgs { name, world, starknet, account, .. } = self; + + let name = name.unwrap_or_else(|| { + ws.root_package().expect("Root package to be present").id.name.to_string() + }); + + let (world_address, account, rpc_url) = config.tokio_handle().block_on(async { + setup_env(&ws, account, starknet, world, &name, env_metadata.as_ref()).await + })?; + + match self.command { + MigrateCommand::Plan => config.tokio_handle().block_on(async { + migration::migrate( + &ws, + world_address, + rpc_url, + &account, + &name, + true, + TxnConfig::default(), + ) + .await + }), + MigrateCommand::Apply { transaction } => config.tokio_handle().block_on(async { + let txn_config: TxnConfig = transaction.into(); + + migration::migrate(&ws, world_address, rpc_url, &account, &name, false, txn_config) + .await + }), + } + } } pub async fn setup_env<'a>( @@ -46,7 +104,7 @@ pub async fn setup_env<'a>( account: AccountOptions, starknet: StarknetOptions, world: WorldOptions, - name: Option<&'a String>, + name: &str, env: Option<&'a Environment>, ) -> Result<( Option, @@ -57,8 +115,21 @@ pub async fn setup_env<'a>( let world_address = world.address(env).ok(); - let (account, chain_id) = { + let (account, rpc_url) = { let provider = starknet.provider(env)?; + + let spec_version = provider.spec_version().await?; + + if spec_version != RPC_SPEC_VERSION { + return Err(anyhow!( + "Unsupported Starknet RPC version: {}, expected {}.", + spec_version, + RPC_SPEC_VERSION + )); + } + + let rpc_url = starknet.url(env)?; + let chain_id = provider.chain_id().await?; let chain_id = parse_cairo_short_string(&chain_id) .with_context(|| "Cannot parse chain_id as string")?; @@ -69,12 +140,13 @@ pub async fn setup_env<'a>( let address = account.address(); ui.print(format!("\nMigration account: {address:#x}")); - if let Some(name) = name { - ui.print(format!("\nWorld name: {name}\n")); - } + + ui.print(format!("\nWorld name: {name}")); + + ui.print(format!("\nChain ID: {chain_id}\n")); match account.provider().get_class_hash_at(BlockId::Tag(BlockTag::Pending), address).await { - Ok(_) => Ok((account, chain_id)), + Ok(_) => Ok((account, rpc_url)), Err(ProviderError::StarknetError(StarknetError::ContractNotFound)) => { Err(anyhow!("Account with address {:#x} doesn't exist.", account.address())) } @@ -83,44 +155,5 @@ pub async fn setup_env<'a>( } .with_context(|| "Problem initializing account for migration.")?; - Ok((world_address, account, chain_id)) -} - -impl MigrateArgs { - pub fn run(mut self, config: &Config) -> Result<()> { - let ws = scarb::ops::read_workspace(config.manifest_path(), config)?; - - // If `name` was not specified use package name from `Scarb.toml` file if it exists - if self.name.is_none() { - if let Some(root_package) = ws.root_package() { - self.name = Some(root_package.id.name.to_string()); - } - } - - let env_metadata = if config.manifest_path().exists() { - dojo_metadata_from_workspace(&ws).and_then(|inner| inner.env().cloned()) - } else { - None - }; - - let manifest_dir = ws.manifest_path().parent().unwrap().to_path_buf(); - if !manifest_dir.join(MANIFESTS_DIR).exists() { - return Err(anyhow!("Build project using `sozo build` first")); - } - - config.tokio_handle().block_on(async { - let (world_address, account, chain_id) = setup_env( - &ws, - self.account, - self.starknet, - self.world, - self.name.as_ref(), - env_metadata.as_ref(), - ) - .await?; - - migration::migrate(&ws, world_address, chain_id, &account, self.name, self.dry_run) - .await - }) - } + Ok((world_address, account, rpc_url.to_string())) } diff --git a/bin/sozo/src/commands/mod.rs b/bin/sozo/src/commands/mod.rs index fce588da4f..cff0a2aed9 100644 --- a/bin/sozo/src/commands/mod.rs +++ b/bin/sozo/src/commands/mod.rs @@ -1,24 +1,79 @@ use anyhow::Result; +use clap::{command, Subcommand}; use scarb::core::Config; -use crate::args::Commands; - +pub(crate) mod account; pub(crate) mod auth; pub(crate) mod build; +pub(crate) mod call; pub(crate) mod clean; pub(crate) mod completions; pub(crate) mod dev; pub(crate) mod events; pub(crate) mod execute; pub(crate) mod init; +pub(crate) mod keystore; pub(crate) mod migrate; pub(crate) mod model; pub(crate) mod options; pub(crate) mod register; pub(crate) mod test; +use account::AccountArgs; +use auth::AuthArgs; +use build::BuildArgs; +use call::CallArgs; +use clean::CleanArgs; +use completions::CompletionsArgs; +use dev::DevArgs; +use events::EventsArgs; +use execute::ExecuteArgs; +use init::InitArgs; +use keystore::KeystoreArgs; +use migrate::MigrateArgs; +use model::ModelArgs; +use register::RegisterArgs; +use test::TestArgs; + +#[derive(Subcommand)] +pub enum Commands { + #[command(about = "Manage accounts")] + Account(AccountArgs), + #[command(about = "Manage keystore files")] + Keystore(KeystoreArgs), + #[command(about = "Build the world, generating the necessary artifacts for deployment")] + Build(BuildArgs), + #[command(about = "Initialize a new project")] + Init(InitArgs), + #[command(about = "Remove generated artifacts, manifests and abis")] + Clean(CleanArgs), + #[command(about = "Run a migration, declaring and deploying contracts as necessary to \ + update the world")] + Migrate(Box), + #[command(about = "Developer mode: watcher for building and migration")] + Dev(DevArgs), + #[command(about = "Test the project's smart contracts")] + Test(TestArgs), + #[command(about = "Execute a world's system")] + Execute(ExecuteArgs), + #[command(about = "Call a world's system")] + Call(CallArgs), + #[command(about = "Interact with a worlds models")] + Model(ModelArgs), + #[command(about = "Register new models")] + Register(RegisterArgs), + #[command(about = "Queries world events")] + Events(EventsArgs), + #[command(about = "Manage world authorization")] + Auth(AuthArgs), + #[command(about = "Generate shell completion file for specified shell")] + Completions(CompletionsArgs), +} + pub fn run(command: Commands, config: &Config) -> Result<()> { match command { + Commands::Account(args) => args.run(config), + Commands::Keystore(args) => args.run(config), Commands::Init(args) => args.run(config), Commands::Clean(args) => args.run(config), Commands::Test(args) => args.run(config), @@ -27,6 +82,7 @@ pub fn run(command: Commands, config: &Config) -> Result<()> { Commands::Dev(args) => args.run(config), Commands::Auth(args) => args.run(config), Commands::Execute(args) => args.run(config), + Commands::Call(args) => args.run(config), Commands::Model(args) => args.run(config), Commands::Register(args) => args.run(config), Commands::Events(args) => args.run(config), diff --git a/bin/sozo/src/commands/model.rs b/bin/sozo/src/commands/model.rs index 1c69bbe3fe..e5e0aae244 100644 --- a/bin/sozo/src/commands/model.rs +++ b/bin/sozo/src/commands/model.rs @@ -11,11 +11,11 @@ use crate::utils; #[derive(Debug, Args)] pub struct ModelArgs { #[command(subcommand)] - command: ModelCommands, + command: ModelCommand, } #[derive(Debug, Subcommand)] -pub enum ModelCommands { +pub enum ModelCommand { #[command(about = "Retrieve the class hash of a model")] ClassHash { #[arg(help = "The name of the model")] @@ -80,22 +80,22 @@ impl ModelArgs { config.tokio_handle().block_on(async { match self.command { - ModelCommands::ClassHash { name, starknet, world } => { + ModelCommand::ClassHash { name, starknet, world } => { let world_address = world.address(env_metadata.as_ref()).unwrap(); let provider = starknet.provider(env_metadata.as_ref()).unwrap(); model::model_class_hash(name, world_address, provider).await } - ModelCommands::ContractAddress { name, starknet, world } => { + ModelCommand::ContractAddress { name, starknet, world } => { let world_address = world.address(env_metadata.as_ref()).unwrap(); let provider = starknet.provider(env_metadata.as_ref()).unwrap(); model::model_contract_address(name, world_address, provider).await } - ModelCommands::Schema { name, to_json, starknet, world } => { + ModelCommand::Schema { name, to_json, starknet, world } => { let world_address = world.address(env_metadata.as_ref()).unwrap(); let provider = starknet.provider(env_metadata.as_ref()).unwrap(); model::model_schema(name, world_address, provider, to_json).await } - ModelCommands::Get { name, keys, starknet, world } => { + ModelCommand::Get { name, keys, starknet, world } => { let world_address = world.address(env_metadata.as_ref()).unwrap(); let provider = starknet.provider(env_metadata.as_ref()).unwrap(); model::model_get(name, keys, world_address, provider).await diff --git a/bin/sozo/src/commands/options/account.rs b/bin/sozo/src/commands/options/account.rs index 1538e8f106..53a0193380 100644 --- a/bin/sozo/src/commands/options/account.rs +++ b/bin/sozo/src/commands/options/account.rs @@ -4,46 +4,32 @@ use anyhow::{anyhow, Context, Result}; use clap::Args; use dojo_world::metadata::Environment; use starknet::accounts::{ExecutionEncoding, SingleOwnerAccount}; -use starknet::core::types::FieldElement; +use starknet::core::types::{BlockId, BlockTag, FieldElement}; use starknet::providers::Provider; -use starknet::signers::{LocalWallet, SigningKey}; +use starknet::signers::LocalWallet; -use super::{ - DOJO_ACCOUNT_ADDRESS_ENV_VAR, DOJO_KEYSTORE_PASSWORD_ENV_VAR, DOJO_KEYSTORE_PATH_ENV_VAR, - DOJO_PRIVATE_KEY_ENV_VAR, -}; +use super::signer::SignerOptions; +use super::DOJO_ACCOUNT_ADDRESS_ENV_VAR; -#[derive(Debug, Args)] -#[command(next_help_heading = "Account options")] // INVARIANT: // - For commandline: we can either specify `private_key` or `keystore_path` along with // `keystore_password`. This is enforced by Clap. // - For `Scarb.toml`: if both private_key and keystore are specified in `Scarb.toml` private_key // will take priority +#[derive(Debug, Args)] +#[command(next_help_heading = "Account options")] pub struct AccountOptions { #[arg(long, env = DOJO_ACCOUNT_ADDRESS_ENV_VAR)] + #[arg(global = true)] pub account_address: Option, - #[arg(long, env = DOJO_PRIVATE_KEY_ENV_VAR)] - #[arg(conflicts_with = "keystore_path")] - #[arg(help_heading = "Signer options - RAW")] - #[arg(help = "The raw private key associated with the account contract.")] - pub private_key: Option, - - #[arg(long = "keystore", env = DOJO_KEYSTORE_PATH_ENV_VAR)] - #[arg(value_name = "PATH")] - #[arg(help_heading = "Signer options - KEYSTORE")] - #[arg(help = "Use the keystore in the given folder or file.")] - pub keystore_path: Option, - - #[arg(long = "password", env = DOJO_KEYSTORE_PASSWORD_ENV_VAR)] - #[arg(value_name = "PASSWORD")] - #[arg(help_heading = "Signer options - KEYSTORE")] - #[arg(help = "The keystore password. Used with --keystore.")] - pub keystore_password: Option, + #[command(flatten)] + #[command(next_help_heading = "Signer options")] + pub signer: SignerOptions, #[arg(long)] #[arg(help = "Use legacy account (cairo0 account)")] + #[arg(global = true)] pub legacy: bool, } @@ -57,47 +43,21 @@ impl AccountOptions { P: Provider + Send + Sync, { let account_address = self.account_address(env_metadata)?; - let signer = self.signer(env_metadata)?; + let signer = self.signer.signer(env_metadata, false)?; let chain_id = provider.chain_id().await.with_context(|| "Failed to retrieve network chain id.")?; let encoding = if self.legacy { ExecutionEncoding::Legacy } else { ExecutionEncoding::New }; - Ok(SingleOwnerAccount::new(provider, signer, account_address, chain_id, encoding)) - } - - fn signer(&self, env_metadata: Option<&Environment>) -> Result { - if let Some(private_key) = - self.private_key.as_deref().or_else(|| env_metadata.and_then(|env| env.private_key())) - { - return Ok(LocalWallet::from_signing_key(SigningKey::from_secret_scalar( - FieldElement::from_str(private_key)?, - ))); - } + let mut account = + SingleOwnerAccount::new(provider, signer, account_address, chain_id, encoding); - if let Some(path) = &self - .keystore_path - .as_deref() - .or_else(|| env_metadata.and_then(|env| env.keystore_path())) - { - if let Some(password) = self - .keystore_password - .as_deref() - .or_else(|| env_metadata.and_then(|env| env.keystore_password())) - { - return Ok(LocalWallet::from_signing_key(SigningKey::from_keystore( - path, password, - )?)); - } else { - return Err(anyhow!("Keystore path is specified but password is not.")); - } - } + // The default is `Latest` in starknet-rs, which does not reflect + // the nonce changes in the pending block. + account.set_block_id(BlockId::Tag(BlockTag::Pending)); - Err(anyhow!( - "Could not find private key. Please specify the private key or path to the keystore \ - file." - )) + Ok(account) } fn account_address(&self, env_metadata: Option<&Environment>) -> Result { @@ -116,17 +76,11 @@ impl AccountOptions { #[cfg(test)] mod tests { - use std::str::FromStr; - use clap::Parser; use starknet::accounts::{Call, ExecutionEncoder}; - use starknet::signers::{LocalWallet, Signer, SigningKey}; use starknet_crypto::FieldElement; - use super::{ - AccountOptions, DOJO_ACCOUNT_ADDRESS_ENV_VAR, DOJO_KEYSTORE_PASSWORD_ENV_VAR, - DOJO_PRIVATE_KEY_ENV_VAR, - }; + use super::{AccountOptions, DOJO_ACCOUNT_ADDRESS_ENV_VAR}; #[derive(clap::Parser, Debug)] struct Command { @@ -142,22 +96,6 @@ mod tests { assert_eq!(cmd.account.account_address, Some(FieldElement::from_hex_be("0x0").unwrap())); } - #[test] - fn private_key_read_from_env_variable() { - std::env::set_var(DOJO_PRIVATE_KEY_ENV_VAR, "private_key"); - - let cmd = Command::parse_from(["sozo", "--account-address", "0x0"]); - assert_eq!(cmd.account.private_key, Some("private_key".to_owned())); - } - - #[test] - fn keystore_path_read_from_env_variable() { - std::env::set_var(DOJO_KEYSTORE_PASSWORD_ENV_VAR, "keystore_password"); - - let cmd = Command::parse_from(["sozo", "--keystore", "./some/path"]); - assert_eq!(cmd.account.keystore_password, Some("keystore_password".to_owned())); - } - #[test] fn account_address_from_args() { let cmd = Command::parse_from(["sozo", "--account-address", "0x0"]); @@ -201,122 +139,6 @@ mod tests { assert!(cmd.account.account_address(None).is_err()); } - #[tokio::test] - async fn private_key_from_args() { - let private_key = "0x1"; - - let cmd = - Command::parse_from(["sozo", "--account-address", "0x0", "--private-key", private_key]); - let result_wallet = cmd.account.signer(None).unwrap(); - let expected_wallet = LocalWallet::from_signing_key(SigningKey::from_secret_scalar( - FieldElement::from_str(private_key).unwrap(), - )); - - let result_public_key = result_wallet.get_public_key().await.unwrap(); - let expected_public_key = expected_wallet.get_public_key().await.unwrap(); - assert!(result_public_key.scalar() == expected_public_key.scalar()); - } - - #[tokio::test] - async fn private_key_from_env_metadata() { - let private_key = "0x1"; - let env_metadata = dojo_world::metadata::Environment { - private_key: Some(private_key.to_owned()), - ..Default::default() - }; - - let cmd = Command::parse_from(["sozo", "--account-address", "0x0"]); - let result_wallet = cmd.account.signer(Some(&env_metadata)).unwrap(); - let expected_wallet = LocalWallet::from_signing_key(SigningKey::from_secret_scalar( - FieldElement::from_str(private_key).unwrap(), - )); - - let result_public_key = result_wallet.get_public_key().await.unwrap(); - let expected_public_key = expected_wallet.get_public_key().await.unwrap(); - assert!(result_public_key.scalar() == expected_public_key.scalar()); - } - - #[tokio::test] - async fn keystore_path_and_keystore_password_from_args() { - let keystore_path = "./tests/test_data/keystore/test.json"; - let keystore_password = "dojoftw"; - let private_key = "0x1"; - - let cmd = Command::parse_from([ - "sozo", - "--keystore", - keystore_path, - "--password", - keystore_password, - ]); - let result_wallet = cmd.account.signer(None).unwrap(); - let expected_wallet = LocalWallet::from_signing_key(SigningKey::from_secret_scalar( - FieldElement::from_str(private_key).unwrap(), - )); - - let result_public_key = result_wallet.get_public_key().await.unwrap(); - let expected_public_key = expected_wallet.get_public_key().await.unwrap(); - assert!(result_public_key.scalar() == expected_public_key.scalar()); - } - - #[tokio::test] - async fn keystore_path_from_env_metadata() { - let keystore_path = "./tests/test_data/keystore/test.json"; - let keystore_password = "dojoftw"; - - let private_key = "0x1"; - let env_metadata = dojo_world::metadata::Environment { - keystore_path: Some(keystore_path.to_owned()), - ..Default::default() - }; - - let cmd = Command::parse_from(["sozo", "--password", keystore_password]); - let result_wallet = cmd.account.signer(Some(&env_metadata)).unwrap(); - let expected_wallet = LocalWallet::from_signing_key(SigningKey::from_secret_scalar( - FieldElement::from_str(private_key).unwrap(), - )); - - let result_public_key = result_wallet.get_public_key().await.unwrap(); - let expected_public_key = expected_wallet.get_public_key().await.unwrap(); - assert!(result_public_key.scalar() == expected_public_key.scalar()); - } - - #[tokio::test] - async fn keystore_password_from_env_metadata() { - let keystore_path = "./tests/test_data/keystore/test.json"; - let keystore_password = "dojoftw"; - let private_key = "0x1"; - - let env_metadata = dojo_world::metadata::Environment { - keystore_password: Some(keystore_password.to_owned()), - ..Default::default() - }; - - let cmd = Command::parse_from(["sozo", "--keystore", keystore_path]); - let result_wallet = cmd.account.signer(Some(&env_metadata)).unwrap(); - let expected_wallet = LocalWallet::from_signing_key(SigningKey::from_secret_scalar( - FieldElement::from_str(private_key).unwrap(), - )); - - let result_public_key = result_wallet.get_public_key().await.unwrap(); - let expected_public_key = expected_wallet.get_public_key().await.unwrap(); - assert!(result_public_key.scalar() == expected_public_key.scalar()); - } - - #[test] - fn dont_allow_both_private_key_and_keystore() { - let keystore_path = "./tests/test_data/keystore/test.json"; - let private_key = "0x1"; - let parse_result = Command::try_parse_from([ - "sozo", - "--keystore", - keystore_path, - "--private_key", - private_key, - ]); - assert!(parse_result.is_err()); - } - #[katana_runner::katana_test(2, true, "katana", "")] async fn legacy_flag_works_as_expected() { let cmd = Command::parse_from([ @@ -363,22 +185,4 @@ mod tests { // 0x2 is the Calldata len. assert!(*result.get(3).unwrap() == FieldElement::from_hex_be("0x2").unwrap()); } - - #[test] - fn keystore_path_without_keystore_password() { - let keystore_path = "./tests/test_data/keystore/test.json"; - - let cmd = Command::parse_from(["sozo", "--keystore", keystore_path]); - let result = cmd.account.signer(None); - - assert!(result.is_err()); - } - - #[test] - fn signer_without_pk_or_keystore() { - let cmd = Command::parse_from(["sozo"]); - let result = cmd.account.signer(None); - - assert!(result.is_err()); - } } diff --git a/bin/sozo/src/commands/options/fee.rs b/bin/sozo/src/commands/options/fee.rs new file mode 100644 index 0000000000..2cb8c42960 --- /dev/null +++ b/bin/sozo/src/commands/options/fee.rs @@ -0,0 +1,82 @@ +use anyhow::Result; +use bigdecimal::{BigDecimal, Zero}; +use clap::Args; +use num_integer::Integer; +use sozo_ops::account::FeeSetting; +use starknet::macros::felt; +use starknet_crypto::FieldElement; + +#[derive(Debug, Args, Clone)] +#[command(next_help_heading = "Fee options")] +pub struct FeeOptions { + #[clap(long, help = "Maximum transaction fee in Ether (18 decimals)")] + #[arg(global = true)] + max_fee: Option, + + #[clap(long, help = "Maximum transaction fee in Wei")] + #[arg(global = true)] + max_fee_raw: Option, + + #[clap(long, help = "Only estimate transaction fee without sending transaction")] + #[arg(global = true)] + estimate_only: bool, +} + +impl FeeOptions { + pub fn into_setting(self) -> Result { + match (self.max_fee, self.max_fee_raw, self.estimate_only) { + (Some(max_fee), None, false) => { + let max_fee_felt = bigdecimal_to_felt(&max_fee, 18)?; + + // The user is most likely making a mistake for using a max fee higher than 1 ETH + if max_fee_felt > felt!("1000000000000000000") { + anyhow::bail!( + "the --max-fee value is too large. --max-fee expects a value in Ether (18 \ + decimals). Use --max-fee-raw instead to use a raw max_fee amount in Wei." + ) + } + + Ok(FeeSetting::Manual(max_fee_felt)) + } + (None, Some(max_fee_raw), false) => Ok(FeeSetting::Manual(max_fee_raw)), + (None, None, true) => Ok(FeeSetting::EstimateOnly), + (None, None, false) => Ok(FeeSetting::None), + _ => Err(anyhow::anyhow!( + "invalid fee option. At most one of --max-fee, --max-fee-raw, and --estimate-only \ + can be used." + )), + } + } +} + +#[allow(clippy::comparison_chain)] +fn bigdecimal_to_felt(dec: &BigDecimal, decimals: D) -> Result +where + D: Into, +{ + let decimals: i64 = decimals.into(); + + // Scale the bigint part up or down + let (bigint, exponent) = dec.as_bigint_and_exponent(); + + let mut biguint = match bigint.to_biguint() { + Some(value) => value, + None => anyhow::bail!("too many decimal places"), + }; + + if exponent < decimals { + for _ in 0..(decimals - exponent) { + biguint *= 10u32; + } + } else if exponent > decimals { + for _ in 0..(exponent - decimals) { + let (quotient, remainder) = biguint.div_rem(&10u32.into()); + if !remainder.is_zero() { + anyhow::bail!("too many decimal places") + } + biguint = quotient; + } + } + + Ok(FieldElement::from_byte_slice_be(&biguint.to_bytes_be())?) +} diff --git a/bin/sozo/src/commands/options/mod.rs b/bin/sozo/src/commands/options/mod.rs index 0bd599bcc1..9f817439fd 100644 --- a/bin/sozo/src/commands/options/mod.rs +++ b/bin/sozo/src/commands/options/mod.rs @@ -1,4 +1,6 @@ pub mod account; +pub mod fee; +pub mod signer; pub mod starknet; pub mod transaction; pub mod world; diff --git a/bin/sozo/src/commands/options/signer.rs b/bin/sozo/src/commands/options/signer.rs new file mode 100644 index 0000000000..8c36c2bea5 --- /dev/null +++ b/bin/sozo/src/commands/options/signer.rs @@ -0,0 +1,244 @@ +use std::str::FromStr; + +use anyhow::{anyhow, Result}; +use clap::Args; +use dojo_world::metadata::Environment; +use starknet::core::types::FieldElement; +use starknet::signers::{LocalWallet, SigningKey}; + +use super::{DOJO_KEYSTORE_PASSWORD_ENV_VAR, DOJO_KEYSTORE_PATH_ENV_VAR, DOJO_PRIVATE_KEY_ENV_VAR}; + +#[derive(Debug, Args)] +#[command(next_help_heading = "Signer options")] +// INVARIANT: +// - For commandline: we can either specify `private_key` or `keystore_path` along with +// `keystore_password`. This is enforced by Clap. +// - For `Scarb.toml`: if both private_key and keystore are specified in `Scarb.toml` private_key +// will take priority +pub struct SignerOptions { + #[arg(long, env = DOJO_PRIVATE_KEY_ENV_VAR)] + #[arg(conflicts_with = "keystore_path")] + #[arg(help_heading = "Signer options - RAW")] + #[arg(help = "The raw private key associated with the account contract.")] + #[arg(global = true)] + pub private_key: Option, + + #[arg(long = "keystore", env = DOJO_KEYSTORE_PATH_ENV_VAR)] + #[arg(value_name = "PATH")] + #[arg(help_heading = "Signer options - KEYSTORE")] + #[arg(help = "Use the keystore in the given folder or file.")] + #[arg(global = true)] + pub keystore_path: Option, + + #[arg(long = "password", env = DOJO_KEYSTORE_PASSWORD_ENV_VAR)] + #[arg(value_name = "PASSWORD")] + #[arg(help_heading = "Signer options - KEYSTORE")] + #[arg(help = "The keystore password. Used with --keystore.")] + #[arg(global = true)] + pub keystore_password: Option, +} + +impl SignerOptions { + pub fn signer(&self, env_metadata: Option<&Environment>, no_wait: bool) -> Result { + if let Some(private_key) = + self.private_key.as_deref().or_else(|| env_metadata.and_then(|env| env.private_key())) + { + return Ok(LocalWallet::from_signing_key(SigningKey::from_secret_scalar( + FieldElement::from_str(private_key)?, + ))); + } + + if let Some(path) = &self + .keystore_path + .as_deref() + .or_else(|| env_metadata.and_then(|env| env.keystore_path())) + { + let password = { + if let Some(password) = self + .keystore_password + .as_deref() + .or_else(|| env_metadata.and_then(|env| env.keystore_password())) + { + password.to_owned() + } else if no_wait { + return Err(anyhow!("Could not find password. Please specify the password.")); + } else { + rpassword::prompt_password("Enter password: ")? + } + }; + let private_key = SigningKey::from_keystore(path, &password)?; + return Ok(LocalWallet::from_signing_key(private_key)); + } + + Err(anyhow!( + "Could not find private key. Please specify the private key or path to the keystore \ + file." + )) + } +} + +#[cfg(test)] +mod tests { + use std::str::FromStr; + + use clap::Parser; + use starknet::signers::{LocalWallet, Signer, SigningKey}; + use starknet_crypto::FieldElement; + + use super::{SignerOptions, DOJO_KEYSTORE_PASSWORD_ENV_VAR, DOJO_PRIVATE_KEY_ENV_VAR}; + + #[derive(clap::Parser, Debug)] + struct Command { + #[clap(flatten)] + pub signer: SignerOptions, + } + + #[test] + fn private_key_read_from_env_variable() { + std::env::set_var(DOJO_PRIVATE_KEY_ENV_VAR, "private_key"); + + let cmd = Command::parse_from(["sozo"]); + assert_eq!(cmd.signer.private_key, Some("private_key".to_owned())); + } + + #[test] + fn keystore_path_read_from_env_variable() { + std::env::set_var(DOJO_KEYSTORE_PASSWORD_ENV_VAR, "keystore_password"); + + let cmd = Command::parse_from(["sozo", "--keystore", "./some/path"]); + assert_eq!(cmd.signer.keystore_password, Some("keystore_password".to_owned())); + } + + #[tokio::test] + async fn private_key_from_args() { + let private_key = "0x1"; + + let cmd = Command::parse_from(["sozo", "--private-key", private_key]); + let result_wallet = cmd.signer.signer(None, true).unwrap(); + let expected_wallet = LocalWallet::from_signing_key(SigningKey::from_secret_scalar( + FieldElement::from_str(private_key).unwrap(), + )); + + let result_public_key = result_wallet.get_public_key().await.unwrap(); + let expected_public_key = expected_wallet.get_public_key().await.unwrap(); + assert!(result_public_key.scalar() == expected_public_key.scalar()); + } + + #[tokio::test] + async fn private_key_from_env_metadata() { + let private_key = "0x1"; + let env_metadata = dojo_world::metadata::Environment { + private_key: Some(private_key.to_owned()), + ..Default::default() + }; + + let cmd = Command::parse_from(["sozo"]); + let result_wallet = cmd.signer.signer(Some(&env_metadata), true).unwrap(); + let expected_wallet = LocalWallet::from_signing_key(SigningKey::from_secret_scalar( + FieldElement::from_str(private_key).unwrap(), + )); + + let result_public_key = result_wallet.get_public_key().await.unwrap(); + let expected_public_key = expected_wallet.get_public_key().await.unwrap(); + assert!(result_public_key.scalar() == expected_public_key.scalar()); + } + + #[tokio::test] + async fn keystore_path_and_keystore_password_from_args() { + let keystore_path = "./tests/test_data/keystore/test.json"; + let keystore_password = "dojoftw"; + let private_key = "0x1"; + + let cmd = Command::parse_from([ + "sozo", + "--keystore", + keystore_path, + "--password", + keystore_password, + ]); + let result_wallet = cmd.signer.signer(None, true).unwrap(); + let expected_wallet = LocalWallet::from_signing_key(SigningKey::from_secret_scalar( + FieldElement::from_str(private_key).unwrap(), + )); + + let result_public_key = result_wallet.get_public_key().await.unwrap(); + let expected_public_key = expected_wallet.get_public_key().await.unwrap(); + assert!(result_public_key.scalar() == expected_public_key.scalar()); + } + + #[tokio::test] + async fn keystore_path_from_env_metadata() { + let keystore_path = "./tests/test_data/keystore/test.json"; + let keystore_password = "dojoftw"; + + let private_key = "0x1"; + let env_metadata = dojo_world::metadata::Environment { + keystore_path: Some(keystore_path.to_owned()), + ..Default::default() + }; + + let cmd = Command::parse_from(["sozo", "--password", keystore_password]); + let result_wallet = cmd.signer.signer(Some(&env_metadata), true).unwrap(); + let expected_wallet = LocalWallet::from_signing_key(SigningKey::from_secret_scalar( + FieldElement::from_str(private_key).unwrap(), + )); + + let result_public_key = result_wallet.get_public_key().await.unwrap(); + let expected_public_key = expected_wallet.get_public_key().await.unwrap(); + assert!(result_public_key.scalar() == expected_public_key.scalar()); + } + + #[tokio::test] + async fn keystore_password_from_env_metadata() { + let keystore_path = "./tests/test_data/keystore/test.json"; + let keystore_password = "dojoftw"; + let private_key = "0x1"; + + let env_metadata = dojo_world::metadata::Environment { + keystore_password: Some(keystore_password.to_owned()), + ..Default::default() + }; + + let cmd = Command::parse_from(["sozo", "--keystore", keystore_path]); + let result_wallet = cmd.signer.signer(Some(&env_metadata), true).unwrap(); + let expected_wallet = LocalWallet::from_signing_key(SigningKey::from_secret_scalar( + FieldElement::from_str(private_key).unwrap(), + )); + + let result_public_key = result_wallet.get_public_key().await.unwrap(); + let expected_public_key = expected_wallet.get_public_key().await.unwrap(); + assert!(result_public_key.scalar() == expected_public_key.scalar()); + } + + #[test] + fn dont_allow_both_private_key_and_keystore() { + let keystore_path = "./tests/test_data/keystore/test.json"; + let private_key = "0x1"; + let parse_result = Command::try_parse_from([ + "sozo", + "--keystore", + keystore_path, + "--private_key", + private_key, + ]); + assert!(parse_result.is_err()); + } + + #[test] + fn keystore_path_without_keystore_password() { + let keystore_path = "./tests/test_data/keystore/test.json"; + + let cmd = Command::parse_from(["sozo", "--keystore", keystore_path]); + let result = cmd.signer.signer(None, true); + + assert!(result.is_err()); + } + + #[test] + fn signer_without_pk_or_keystore() { + let cmd = Command::parse_from(["sozo"]); + let result = cmd.signer.signer(None, true); + + assert!(result.is_err()); + } +} diff --git a/bin/sozo/src/commands/options/starknet.rs b/bin/sozo/src/commands/options/starknet.rs index 011b04ae9e..91cce0b5dd 100644 --- a/bin/sozo/src/commands/options/starknet.rs +++ b/bin/sozo/src/commands/options/starknet.rs @@ -13,6 +13,7 @@ pub struct StarknetOptions { #[arg(long, env = STARKNET_RPC_URL_ENV_VAR)] #[arg(value_name = "URL")] #[arg(help = "The Starknet RPC endpoint.")] + #[arg(global = true)] pub rpc_url: Option, } @@ -24,8 +25,10 @@ impl StarknetOptions { Ok(JsonRpcClient::new(HttpTransport::new(self.url(env_metadata)?))) } - // we dont check the env var because that would be handled by `clap` - fn url(&self, env_metadata: Option<&Environment>) -> Result { + // We dont check the env var because that would be handled by `clap`. + // This function is made public because [`JsonRpcClient`] does not expose + // the raw rpc url. + pub fn url(&self, env_metadata: Option<&Environment>) -> Result { if let Some(url) = self.rpc_url.as_ref() { Ok(url.clone()) } else if let Some(url) = env_metadata.and_then(|env| env.rpc_url()) { diff --git a/bin/sozo/src/commands/options/transaction.rs b/bin/sozo/src/commands/options/transaction.rs index 7901c6ca6a..6349fb3377 100644 --- a/bin/sozo/src/commands/options/transaction.rs +++ b/bin/sozo/src/commands/options/transaction.rs @@ -1,40 +1,49 @@ use clap::Args; -use dojo_world::migration::TxConfig; +use dojo_world::migration::TxnConfig; +use starknet::core::types::FieldElement; -#[derive(Debug, Args, Clone)] +#[derive(Debug, Args)] #[command(next_help_heading = "Transaction options")] pub struct TransactionOptions { - #[arg(long)] - #[arg(value_name = "MULTIPLIER")] + #[arg(long, value_name = "MULTIPLIER")] #[arg(help = "The multiplier to use for the fee estimate.")] #[arg(long_help = "The multiplier to use for the fee estimate. This value will be used on \ the estimated fee which will be used as the max fee for the transaction. \ (max_fee = estimated_fee * multiplier)")] + #[arg(conflicts_with = "max_fee_raw")] + #[arg(global = true)] pub fee_estimate_multiplier: Option, - #[arg(short, long)] + #[arg(long)] + #[arg(help = "Maximum raw value to be used for fees, in Wei.")] + #[arg(conflicts_with = "fee_estimate_multiplier")] + #[arg(global = true)] + pub max_fee_raw: Option, + + #[arg(long)] #[arg(help = "Wait until the transaction is accepted by the sequencer, returning the status \ and hash.")] #[arg(long_help = "Wait until the transaction is accepted by the sequencer, returning the \ status and the hash. This will poll the transaction status until it gets \ accepted or rejected by the sequencer.")] + #[arg(global = true)] pub wait: bool, - #[arg(short, long)] + #[arg(long)] #[arg( help = "If --wait is set, returns the full transaction receipt. Otherwise, it is a no-op." )] - #[arg(long_help = "If --wait is set, returns the full transaction receipt. Otherwise, it is \ - a no-op.")] + #[arg(global = true)] pub receipt: bool, } -impl From for TxConfig { +impl From for TxnConfig { fn from(value: TransactionOptions) -> Self { Self { fee_estimate_multiplier: value.fee_estimate_multiplier, wait: value.wait, receipt: value.receipt, + max_fee_raw: value.max_fee_raw, } } } diff --git a/bin/sozo/src/commands/options/world.rs b/bin/sozo/src/commands/options/world.rs index 7a54617e7a..c8fe9d5be9 100644 --- a/bin/sozo/src/commands/options/world.rs +++ b/bin/sozo/src/commands/options/world.rs @@ -12,6 +12,7 @@ use super::DOJO_WORLD_ADDRESS_ENV_VAR; pub struct WorldOptions { #[arg(help = "The address of the World contract.")] #[arg(long = "world", env = DOJO_WORLD_ADDRESS_ENV_VAR)] + #[arg(global = true)] pub world_address: Option, } diff --git a/bin/sozo/src/commands/test.rs b/bin/sozo/src/commands/test.rs index 532f583387..9ca645c72c 100644 --- a/bin/sozo/src/commands/test.rs +++ b/bin/sozo/src/commands/test.rs @@ -18,8 +18,10 @@ use scarb::core::Config; use scarb::ops; use tracing::trace; +pub(crate) const LOG_TARGET: &str = "sozo::cli::commands::test"; + /// Execute all unit tests of a local package. -#[derive(Args, Clone)] +#[derive(Debug, Args)] pub struct TestArgs { /// The filter for the tests, running only tests containing the filter string. #[arg(short, long, default_value_t = String::default())] @@ -116,7 +118,7 @@ fn build_project_config(unit: &CompilationUnit) -> Result { let project_config = ProjectConfig { base_path: unit.main_component().package.root().into(), corelib, content }; - trace!(?project_config); + trace!(target: LOG_TARGET, ?project_config); Ok(project_config) } diff --git a/bin/sozo/src/main.rs b/bin/sozo/src/main.rs index 060a9d0cf9..18da01da73 100644 --- a/bin/sozo/src/main.rs +++ b/bin/sozo/src/main.rs @@ -1,17 +1,16 @@ use std::env; use std::process::exit; -use std::str::FromStr; use anyhow::Result; -use args::{Commands, SozoArgs}; -use camino::Utf8PathBuf; +use args::SozoArgs; use clap::Parser; use dojo_lang::compiler::DojoCompiler; use dojo_lang::plugin::CairoPluginRepository; use scarb::compiler::CompilerRepository; -use scarb::core::{Config, TomlManifest}; +use scarb::core::Config; use scarb_ui::{OutputFormat, Ui}; -use semver::Version; + +use crate::commands::Commands; mod args; mod commands; @@ -41,7 +40,7 @@ fn cli_main(args: SozoArgs) -> Result<()> { let manifest_path = scarb::ops::find_manifest_path(args.manifest_path.as_deref())?; - verify_cairo_version_compatibility(&manifest_path)?; + utils::verify_cairo_version_compatibility(&manifest_path)?; let config = Config::builder(manifest_path) .log_filter_directive(env::var_os("SCARB_LOG")) @@ -54,26 +53,3 @@ fn cli_main(args: SozoArgs) -> Result<()> { commands::run(args.command, &config) } - -fn verify_cairo_version_compatibility(manifest_path: &Utf8PathBuf) -> Result<()> { - let scarb_cairo_version = scarb::version::get().cairo; - // When manifest file doesn't exists ignore it. Would be the case during `sozo init` - let Ok(manifest) = TomlManifest::read_from_path(manifest_path) else { return Ok(()) }; - - // For any kind of error, like package not specified, cairo version not specified return - // without an error - let Some(package) = manifest.package else { return Ok(()) }; - - let Some(cairo_version) = package.cairo_version else { return Ok(()) }; - - // only when cairo version is found in manifest file confirm that it matches - let version_req = cairo_version.as_defined().unwrap(); - let version = Version::from_str(scarb_cairo_version.version).unwrap(); - if !version_req.matches(&version) { - anyhow::bail!( - "Specified cairo version not supported by dojo. Please verify and update dojo." - ); - }; - - Ok(()) -} diff --git a/bin/sozo/src/utils.rs b/bin/sozo/src/utils.rs index 76d6de797b..d277c8d910 100644 --- a/bin/sozo/src/utils.rs +++ b/bin/sozo/src/utils.rs @@ -1,7 +1,12 @@ -use anyhow::Error; +use std::str::FromStr; + +use anyhow::{Error, Result}; +use camino::Utf8PathBuf; use dojo_world::contracts::world::WorldContract; +use dojo_world::contracts::WorldContractReader; use dojo_world::metadata::{dojo_metadata_from_workspace, Environment}; -use scarb::core::Config; +use scarb::core::{Config, TomlManifest}; +use semver::Version; use starknet::accounts::SingleOwnerAccount; use starknet::providers::jsonrpc::HttpTransport; use starknet::providers::JsonRpcClient; @@ -11,11 +16,20 @@ use crate::commands::options::account::AccountOptions; use crate::commands::options::starknet::StarknetOptions; use crate::commands::options::world::WorldOptions; +/// Load metadata from the Scarb configuration. +/// +/// # Arguments +/// +/// * `config` - Scarb project configuration. +/// +/// # Returns +/// +/// A [`Environment`] on success. pub fn load_metadata_from_config(config: &Config) -> Result, Error> { let env_metadata = if config.manifest_path().exists() { let ws = scarb::ops::read_workspace(config.manifest_path(), config)?; - dojo_metadata_from_workspace(&ws).and_then(|inner| inner.env().cloned()) + dojo_metadata_from_workspace(&ws).env().cloned() } else { None }; @@ -23,6 +37,18 @@ pub fn load_metadata_from_config(config: &Config) -> Result, Ok(env_metadata) } +/// Build a world contract from the provided environment. +/// +/// # Arguments +/// +/// * `world` - The world options such as the world address, +/// * `account` - The account options, +/// * `starknet` - The Starknet options such as the RPC url, +/// * `env_metadata` - Optional environment coming from Scarb configuration. +/// +/// # Returns +/// +/// A [`WorldContract`] on success. pub async fn world_from_env_metadata( world: WorldOptions, account: AccountOptions, @@ -35,3 +61,61 @@ pub async fn world_from_env_metadata( let account = account.account(provider, env_metadata.as_ref()).await?; Ok(WorldContract::new(world_address, account)) } + +/// Build a world contract reader from the provided environment. +/// +/// # Arguments +/// +/// * `world` - The world options such as the world address, +/// * `starknet` - The Starknet options such as the RPC url, +/// * `env_metadata` - Optional environment coming from Scarb configuration. +/// +/// # Returns +/// +/// A [`WorldContractReader`] on success. +pub async fn world_reader_from_env_metadata( + world: WorldOptions, + starknet: StarknetOptions, + env_metadata: &Option, +) -> Result>, Error> { + let world_address = world.address(env_metadata.as_ref())?; + let provider = starknet.provider(env_metadata.as_ref())?; + + Ok(WorldContractReader::new(world_address, provider)) +} + +pub fn verify_cairo_version_compatibility(manifest_path: &Utf8PathBuf) -> Result<()> { + let scarb_cairo_version = scarb::version::get().cairo; + // When manifest file doesn't exists ignore it. Would be the case during `sozo init` + let Ok(manifest) = TomlManifest::read_from_path(manifest_path) else { return Ok(()) }; + + // For any kind of error, like package not specified, cairo version not specified return + // without an error + let Some(package) = manifest.package else { return Ok(()) }; + + let Some(cairo_version) = package.cairo_version else { return Ok(()) }; + + // only when cairo version is found in manifest file confirm that it matches + let version_req = cairo_version.as_defined().unwrap(); + let version = Version::from_str(scarb_cairo_version.version).unwrap(); + if !version_req.matches(&version) { + anyhow::bail!( + "Specified cairo version not supported by dojo. Please verify and update dojo." + ); + }; + + Ok(()) +} + +pub fn generate_version() -> String { + const DOJO_VERSION: &str = env!("CARGO_PKG_VERSION"); + let scarb_version = scarb::version::get().version; + let scarb_sierra_version = scarb::version::get().sierra.version; + let scarb_cairo_version = scarb::version::get().cairo.version; + + let version_string = format!( + "{}\nscarb: {}\ncairo: {}\nsierra: {}", + DOJO_VERSION, scarb_version, scarb_cairo_version, scarb_sierra_version, + ); + version_string +} diff --git a/bin/sozo/tests/register_test.rs b/bin/sozo/tests/register_test.rs index 176bd1c5ec..607b4634bd 100644 --- a/bin/sozo/tests/register_test.rs +++ b/bin/sozo/tests/register_test.rs @@ -5,6 +5,7 @@ use dojo_test_utils::migration::prepare_migration; use dojo_test_utils::sequencer::{ get_default_test_starknet_config, SequencerConfig, TestSequencer, }; +use dojo_world::migration::TxnConfig; use scarb::ops; use sozo_ops::migration::execute_strategy; use starknet::accounts::Account; @@ -27,7 +28,7 @@ async fn reregister_models() { let mut account = sequencer.account(); account.set_block_id(BlockId::Tag(BlockTag::Pending)); - execute_strategy(&ws, &migration, &account, None).await.unwrap(); + execute_strategy(&ws, &migration, &account, TxnConfig::default()).await.unwrap(); let world_address = &format!("0x{:x}", &migration.world_address().unwrap()); let account_address = &format!("0x{:x}", account.address()); let private_key = &format!("0x{:x}", sequencer.raw_account().private_key); @@ -35,7 +36,7 @@ async fn reregister_models() { let moves_model = migration.models.iter().find(|m| m.diff.name == "dojo_examples::models::moves").unwrap(); - let moves_model_class_hash = &format!("0x{:x}", moves_model.diff.local); + let moves_model_class_hash = &format!("0x{:x}", moves_model.diff.local_class_hash); let args_vec = [ "register", "model", diff --git a/bin/sozo/tests/test_account.rs b/bin/sozo/tests/test_account.rs new file mode 100644 index 0000000000..0d42870286 --- /dev/null +++ b/bin/sozo/tests/test_account.rs @@ -0,0 +1,46 @@ +mod utils; + +use std::fs; + +use assert_fs::fixture::PathChild; +use sozo_ops::account; +use starknet::accounts::Account; +use utils::snapbox::get_snapbox; + +#[test] +fn test_account_new() { + let pt = assert_fs::TempDir::new().unwrap(); + let dst_path = pt.child("keystore.json"); + let src_path = fs::canonicalize("./tests/test_data/keystore/keystore.json").unwrap(); + fs::copy(src_path, dst_path).unwrap(); + + get_snapbox() + .arg("account") + .arg("new") + .arg("account.json") + .arg("--keystore") + .arg("keystore.json") + .arg("--password") + .arg("password") + .current_dir(&pt) + .assert() + .success(); + + assert!(pt.child("account.json").exists()); +} + +#[katana_runner::katana_test(1, true)] +async fn test_account_fetch() { + let pt = assert_fs::TempDir::new().unwrap(); + + account::fetch( + runner.owned_provider(), + false, + pt.child("account.json").to_path_buf(), + runner.account(1).address(), + ) + .await + .unwrap(); + + assert!(pt.child("account.json").exists()); +} diff --git a/bin/sozo/tests/test_data/account/account.json b/bin/sozo/tests/test_data/account/account.json new file mode 100644 index 0000000000..542b9b9c4c --- /dev/null +++ b/bin/sozo/tests/test_data/account/account.json @@ -0,0 +1,14 @@ +{ + "version": 1, + "variant": { + "type": "open_zeppelin", + "version": 1, + "public_key": "0x566d69d8c99f62bc71118399bab25c1f03719463eab8d6a444cd11ece131616", + "legacy": false + }, + "deployment": { + "status": "undeployed", + "class_hash": "0x5400e90f7e0ae78bd02c77cd75527280470e2fe19c54970dd79dc37a9d3645c", + "salt": "0x39ee9015d35fae61ebf7348b4630b849033bd223fdd96f1e8c8d54e4611ebf2" + } +} diff --git a/bin/sozo/tests/test_data/compiled_contracts/test_contract.json b/bin/sozo/tests/test_data/compiled_contracts/test_contract.json new file mode 120000 index 0000000000..c7a135aa79 --- /dev/null +++ b/bin/sozo/tests/test_data/compiled_contracts/test_contract.json @@ -0,0 +1 @@ +../../../../../crates/katana/contracts/compiled/cairo1_contract.json \ No newline at end of file diff --git a/bin/sozo/tests/test_data/keystore/keystore.json b/bin/sozo/tests/test_data/keystore/keystore.json new file mode 100644 index 0000000000..41585d606e --- /dev/null +++ b/bin/sozo/tests/test_data/keystore/keystore.json @@ -0,0 +1 @@ +{"crypto":{"cipher":"aes-128-ctr","cipherparams":{"iv":"58141a1f640b439118e841655847b745"},"ciphertext":"7a077a8dcfc4ec1182f8f08768a2efcc31a9740658d9e3560196748fcf8b27a2","kdf":"scrypt","kdfparams":{"dklen":32,"n":8192,"p":1,"r":8,"salt":"a0fff244c4dab8f989cbe2b8d261401847f6c49c2470ca8b5d88296ae0ddce7e"},"mac":"26e659a2acadab44a689b776c07da1bdd435253be884b4deac95ca510ae7d27e"},"id":"c062226f-cfdd-416a-9e5e-a5cf8a8b5d9a","version":3} \ No newline at end of file diff --git a/bin/sozo/tests/test_keystore.rs b/bin/sozo/tests/test_keystore.rs new file mode 100644 index 0000000000..3da687d9d4 --- /dev/null +++ b/bin/sozo/tests/test_keystore.rs @@ -0,0 +1,129 @@ +mod utils; + +use std::fs; + +use assert_fs::fixture::{FileTouch, PathChild}; +use utils::snapbox::get_snapbox; + +#[test] +fn test_keystore_new() { + let pt = assert_fs::TempDir::new().unwrap(); + + get_snapbox() + .arg("keystore") + .arg("new") + .arg("keystore.json") + .arg("--password") + .arg("password") + .current_dir(&pt) + .assert() + .success(); + + assert!(pt.child("keystore.json").exists()); +} + +#[test] +fn test_keystore_new_force() { + let pt = assert_fs::TempDir::new().unwrap(); + + pt.child("keystore.json").touch().unwrap(); + + get_snapbox() + .arg("keystore") + .arg("new") + .arg("--password") + .arg("password") + .arg("keystore.json") + .arg("--force") + .current_dir(&pt) + .assert() + .success(); + + assert!(pt.child("keystore.json").exists()); + + let contents = fs::read_to_string(pt.child("keystore.json")).unwrap(); + assert!(!contents.is_empty()); +} + +#[test] +fn test_keystore_from_key() { + let pt = assert_fs::TempDir::new().unwrap(); + + get_snapbox() + .arg("keystore") + .arg("from-key") + .arg("keystore.json") + .arg("--password") + .arg("password") + .arg("--private-key") + .arg("0x123") + .current_dir(&pt) + .assert() + .success(); + + assert!(pt.child("keystore.json").exists()); +} + +#[test] +fn test_keystore_inspect() { + let path = fs::canonicalize("./tests/test_data/keystore").unwrap(); + + let assert = get_snapbox() + .arg("keystore") + .arg("inspect") + .arg("keystore.json") + .arg("--password") + .arg("password") + .current_dir(path) + .assert() + .success(); + + let output = String::from_utf8(assert.get_output().stdout.clone()).unwrap(); + + assert_eq!( + output.trim(), + "Public key: 0x0566d69d8c99f62bc71118399bab25c1f03719463eab8d6a444cd11ece131616" + ) +} + +#[test] +fn test_keystore_inspect_raw() { + let path = fs::canonicalize("./tests/test_data/keystore").unwrap(); + + let assert = get_snapbox() + .arg("keystore") + .arg("inspect") + .arg("keystore.json") + .arg("--password") + .arg("password") + .arg("--raw") + .current_dir(path) + .assert() + .success(); + + let output = String::from_utf8(assert.get_output().stdout.clone()).unwrap(); + + assert_eq!(output.trim(), "0x0566d69d8c99f62bc71118399bab25c1f03719463eab8d6a444cd11ece131616") +} + +#[test] +fn test_keystore_inspect_private() { + let path = fs::canonicalize("./tests/test_data/keystore").unwrap(); + + let assert = get_snapbox() + .arg("keystore") + .arg("inspect-private") + .arg("keystore.json") + .arg("--password") + .arg("password") + .current_dir(path) + .assert() + .success(); + + let output = String::from_utf8(assert.get_output().stdout.clone()).unwrap(); + + assert_eq!( + output.trim(), + "Private key: 0x0000000000000000000000000000000000000000000000000000000000000123" + ) +} diff --git a/bin/torii/Cargo.toml b/bin/torii/Cargo.toml index eb01a20f41..9c21d160b9 100644 --- a/bin/torii/Cargo.toml +++ b/bin/torii/Cargo.toml @@ -14,6 +14,7 @@ chrono.workspace = true clap.workspace = true common.workspace = true ctrlc = { version = "3.4", features = [ "termination" ] } +dojo-metrics.workspace = true dojo-types.workspace = true dojo-world.workspace = true either = "1.9.0" @@ -24,8 +25,6 @@ hyper-reverse-proxy = { git = "https://github.com/tarrencev/hyper-reverse-proxy" hyper.workspace = true indexmap = "1.9.3" lazy_static.workspace = true -metrics-process.workspace = true -metrics.workspace = true scarb.workspace = true serde.workspace = true serde_json.workspace = true @@ -38,13 +37,13 @@ tokio.workspace = true torii-core.workspace = true torii-graphql.workspace = true torii-grpc = { workspace = true, features = [ "server" ] } +torii-relay.workspace = true torii-server.workspace = true tower = "0.4.13" tower-http = "0.4.4" tracing-subscriber.workspace = true tracing.workspace = true url.workspace = true -torii-relay.workspace = true webbrowser = "0.8" [dev-dependencies] @@ -52,5 +51,5 @@ camino.workspace = true [features] default = [ "jemalloc", "sqlite" ] -jemalloc = [ "metrics/jemalloc" ] +jemalloc = [ "dojo-metrics/jemalloc" ] sqlite = [ "sqlx/sqlite" ] diff --git a/bin/torii/src/main.rs b/bin/torii/src/main.rs index e9eaef586c..bcaead3a19 100644 --- a/bin/torii/src/main.rs +++ b/bin/torii/src/main.rs @@ -16,8 +16,8 @@ use std::sync::Arc; use clap::Parser; use common::parse::{parse_socket_address, parse_url}; +use dojo_metrics::{metrics_process, prometheus_exporter}; use dojo_world::contracts::world::WorldContractReader; -use metrics::prometheus_exporter; use sqlx::sqlite::{SqliteConnectOptions, SqlitePoolOptions}; use sqlx::SqlitePool; use starknet::core::types::FieldElement; @@ -42,6 +42,8 @@ use tracing::{error, info}; use tracing_subscriber::{fmt, EnvFilter}; use url::{form_urlencoded, Url}; +pub(crate) const LOG_TARGET: &str = "torii::cli"; + /// Dojo World Indexer #[derive(Parser, Debug)] #[command(name = "torii", author, version, about, long_about = None)] @@ -92,7 +94,7 @@ struct Args { /// The external url of the server, used for configuring the GraphQL Playground in a hosted /// environment - #[arg(long)] + #[arg(long, value_parser = parse_url)] external_url: Option, /// Enable Prometheus metrics. @@ -198,6 +200,7 @@ async fn main() -> anyhow::Result<()> { let mut libp2p_relay_server = torii_relay::server::Relay::new( db, + provider.clone(), args.relay_port, args.relay_webrtc_port, args.relay_local_key_path, @@ -220,20 +223,20 @@ async fn main() -> anyhow::Result<()> { form_urlencoded::byte_serialize(gql_endpoint.replace("0.0.0.0", "localhost").as_bytes()) .collect(); let explorer_url = format!("https://worlds.dev/torii?url={}", encoded); - info!(target: "torii::cli", "Starting torii endpoint: {}", endpoint); - info!(target: "torii::cli", "Serving Graphql playground: {}", gql_endpoint); - info!(target: "torii::cli", "World Explorer is available on: {}\n", explorer_url); + info!(target: LOG_TARGET, endpoint = %endpoint, "Starting torii endpoint."); + info!(target: LOG_TARGET, endpoint = %gql_endpoint, "Serving Graphql playground."); + info!(target: LOG_TARGET, url = %explorer_url, "Serving World Explorer."); if args.explorer { if let Err(e) = webbrowser::open(&explorer_url) { - error!("Failed to open World Explorer in the browser: {e}"); + error!(target: LOG_TARGET, error = %e, "Opening World Explorer in the browser."); } } if let Some(listen_addr) = args.metrics { let prometheus_handle = prometheus_exporter::install_recorder("torii")?; - info!(target: "torii::cli", addr = %listen_addr, "Starting metrics endpoint"); + info!(target: LOG_TARGET, addr = %listen_addr, "Starting metrics endpoint."); prometheus_exporter::serve( listen_addr, prometheus_handle, diff --git a/crates/benches/src/deployer.rs b/crates/benches/src/deployer.rs index 6a02e4c905..abb0251c07 100644 --- a/crates/benches/src/deployer.rs +++ b/crates/benches/src/deployer.rs @@ -5,17 +5,14 @@ use anyhow::{anyhow, bail, Context, Ok, Result}; use clap::Parser; use dojo_lang::compiler::{DojoCompiler, DEPLOYMENTS_DIR, MANIFESTS_DIR}; use dojo_lang::plugin::CairoPluginRepository; -use dojo_lang::scarb_internal::compile_workspace; use dojo_world::manifest::DeploymentManifest; use futures::executor::block_on; use katana_runner::KatanaRunner; use scarb::compiler::CompilerRepository; -use scarb::core::{Config, TargetKind}; -use scarb::ops::CompileOpts; -use sozo::args::{Commands, SozoArgs}; +use scarb::core::Config; +use sozo::args::SozoArgs; +use sozo::commands::Commands; use starknet::core::types::FieldElement; -use starknet::core::utils::parse_cairo_short_string; -use starknet::providers::Provider; use tokio::process::Command; use crate::{CONTRACT, CONTRACT_RELATIVE_TO_TESTS, RUNTIME}; @@ -52,6 +49,7 @@ async fn deploy_contract( let args = SozoArgs::parse_from([ "sozo", "migrate", + "apply", "--rpc-url", &runner.endpoint(), "--manifest-path", @@ -81,7 +79,7 @@ async fn prepare_migration_args(args: SozoArgs) -> Result { compilers.add(Box::new(DojoCompiler)).unwrap(); let manifest_path = scarb::ops::find_manifest_path(args.manifest_path.as_deref())?; - let config = Config::builder(manifest_path) + let config = Config::builder(manifest_path.clone()) .log_filter_directive(env::var_os("SCARB_LOG")) .profile(args.profile_spec.determine()?) .offline(args.offline) @@ -92,36 +90,17 @@ async fn prepare_migration_args(args: SozoArgs) -> Result { .context("failed to build config")?; // Extractiong migration command, as here https://github.com/dojoengine/dojo/blob/25fbb7fc973cff4ce1273625c4664545d9b088e9/bin/sozo/src/commands/mod.rs#L24-L25 - let mut migrate = match args.command { + let migrate = match args.command { Commands::Migrate(migrate) => *migrate, _ => return Err(anyhow!("failed to parse migrate args")), }; - // Preparing workspace, as in https://github.com/dojoengine/dojo/blob/25fbb7fc973cff4ce1273625c4664545d9b088e9/bin/sozo/src/commands/migrate.rs#L40-L41 - let ws = scarb::ops::read_workspace(config.manifest_path(), &config)?; - if migrate.name.is_none() { - if let Some(root_package) = ws.root_package() { - migrate.name = Some(root_package.id.name.to_string()); - } - } - - compile_workspace( - &config, - CompileOpts { include_targets: vec![], exclude_targets: vec![TargetKind::TEST] }, - )?; - - let manifest_dir = ws.manifest_path().parent().unwrap(); - let chain_id = migrate.starknet.provider(None).unwrap().chain_id().await.unwrap(); - let chain_id = parse_cairo_short_string(&chain_id).unwrap(); - migrate.run(&config)?; + let manifest_dir = manifest_path.parent().unwrap(); + let manifest = DeploymentManifest::load_from_path( - &manifest_dir - .join(MANIFESTS_DIR) - .join(DEPLOYMENTS_DIR) - .join(chain_id) - .with_extension("toml"), + &manifest_dir.join(MANIFESTS_DIR).join("dev").join(DEPLOYMENTS_DIR).with_extension("toml"), ) .expect("failed to load manifest"); diff --git a/crates/benches/src/main.rs b/crates/benches/src/main.rs index 62330c14e4..8fe3434388 100644 --- a/crates/benches/src/main.rs +++ b/crates/benches/src/main.rs @@ -36,7 +36,7 @@ fn main() { } } - let mut pairs = map.into_iter().map(|(name, runs)| (name, runs)).collect::>(); + let mut pairs = map.into_iter().collect::>(); pairs.sort_by_key(|(key, _)| key.clone()); for (name, mut runs) in pairs { diff --git a/crates/dojo-bindgen/Cargo.toml b/crates/dojo-bindgen/Cargo.toml index 9d0d8800c5..43b271c123 100644 --- a/crates/dojo-bindgen/Cargo.toml +++ b/crates/dojo-bindgen/Cargo.toml @@ -20,4 +20,5 @@ chrono.workspace = true # https://github.com/dojoengine/dojo/actions/runs/7736050751/job/21092743552?pr=1501#step:6:249 # dojo-test-utils = { path = "../dojo-test-utils", features = [ "build-examples" ] } +dojo-world = { path = "../dojo-world", features = [ "manifest" ] } cainome = { git = "https://github.com/cartridge-gg/cainome", tag = "v0.2.2" } diff --git a/crates/dojo-bindgen/src/error.rs b/crates/dojo-bindgen/src/error.rs index 95553874b8..71adcd3e99 100644 --- a/crates/dojo-bindgen/src/error.rs +++ b/crates/dojo-bindgen/src/error.rs @@ -1,4 +1,5 @@ use cainome::parser::Error as CainomeError; +use dojo_world::manifest::AbstractManifestError; use thiserror::Error; #[derive(Error, Debug)] @@ -11,6 +12,8 @@ pub enum Error { Cainome(#[from] CainomeError), #[error("Format error: {0}")] Format(String), + #[error(transparent)] + Manifest(#[from] AbstractManifestError), } pub type BindgenResult = Result; diff --git a/crates/dojo-bindgen/src/lib.rs b/crates/dojo-bindgen/src/lib.rs index ce9962698d..ca8e24cd79 100644 --- a/crates/dojo-bindgen/src/lib.rs +++ b/crates/dojo-bindgen/src/lib.rs @@ -6,12 +6,13 @@ use cainome::parser::tokens::Token; use cainome::parser::{AbiParser, TokenizedAbi}; use camino::Utf8PathBuf; use convert_case::{Case, Casing}; - +use dojo_world::manifest::BaseManifest; pub mod error; use error::{BindgenResult, Error}; mod plugins; use plugins::typescript::TypescriptPlugin; +use plugins::typescript_v2::TypeScriptV2Plugin; use plugins::unity::UnityPlugin; use plugins::BuiltinPlugin; pub use plugins::BuiltinPlugins; @@ -29,8 +30,8 @@ pub struct DojoModel { #[derive(Debug, PartialEq)] pub struct DojoContract { - /// Contract's name. - pub contract_file_name: String, + /// Contract's fully qualified name. + pub qualified_path: String, /// Full ABI of the contract in case the plugin wants to make extra checks, /// or generated other functions than the systems. pub tokens: TokenizedAbi, @@ -38,21 +39,32 @@ pub struct DojoContract { pub systems: Vec, } +#[derive(Debug, PartialEq)] +pub struct DojoWorld { + /// The world's name from the Scarb manifest. + pub name: String, +} + #[derive(Debug)] pub struct DojoData { + /// World data. + pub world: DojoWorld, /// All contracts found in the project. pub contracts: HashMap, /// All the models contracts found in the project. pub models: HashMap, } -// TODO: include the manifest to have more metadata when new manifest is available. #[derive(Debug)] pub struct PluginManager { + /// Profile name. + pub profile_name: String, + /// Root package name. + pub root_package_name: String, /// Path of generated files. pub output_path: PathBuf, - /// Path of contracts artifacts. - pub artifacts_path: Utf8PathBuf, + /// Path of Dojo manifest. + pub manifest_path: Utf8PathBuf, /// A list of builtin plugins to invoke. pub builtin_plugins: Vec, /// A list of custom plugins to invoke. @@ -66,13 +78,15 @@ impl PluginManager { return Ok(()); } - let data = gather_dojo_data(&self.artifacts_path)?; + let data = + gather_dojo_data(&self.manifest_path, &self.root_package_name, &self.profile_name)?; for plugin in &self.builtin_plugins { // Get the plugin builder from the plugin enum. let builder: Box = match plugin { BuiltinPlugins::Typescript => Box::new(TypescriptPlugin::new()), BuiltinPlugins::Unity => Box::new(UnityPlugin::new()), + BuiltinPlugins::TypeScriptV2 => Box::new(TypeScriptV2Plugin::new()), }; let files = builder.generate_code(&data).await?; @@ -88,101 +102,90 @@ impl PluginManager { } } -/// Gathers dojo data from artifacts. -/// TODO: this should be modified later to use the new manifest structure. -/// it's currently done from the artifacts to decouple from the manifest. +/// Gathers dojo data from the manifests files. /// /// # Arguments /// -/// * `artifacts_path` - Artifacts path where contracts were generated. -fn gather_dojo_data(artifacts_path: &Utf8PathBuf) -> BindgenResult { +/// * `manifest_path` - Dojo manifest path. +fn gather_dojo_data( + manifest_path: &Utf8PathBuf, + root_package_name: &str, + profile_name: &str, +) -> BindgenResult { + let root_dir: Utf8PathBuf = manifest_path.parent().unwrap().into(); + let base_manifest_dir: Utf8PathBuf = root_dir.join("manifests").join(profile_name).join("base"); + let base_manifest = BaseManifest::load_from_path(&base_manifest_dir)?; + let mut models = HashMap::new(); let mut contracts = HashMap::new(); - for entry in fs::read_dir(artifacts_path)? { - let entry = entry?; - let path = entry.path(); - - if path.is_file() { - if let Some(file_name) = path.file_name().and_then(|n| n.to_str()) { - let file_content = fs::read_to_string(&path)?; - - // Models and Contracts must have a valid ABI. - if let Ok(tokens) = - AbiParser::tokens_from_abi_string(&file_content, &HashMap::new()) - { - // Contract. - if is_systems_contract(file_name, &file_content) { - // Identify the systems -> for now only take the functions from the - // interfaces. - let mut systems = vec![]; - let interface_blacklist = [ - "dojo::world::IWorldProvider", - "dojo::components::upgradeable::IUpgradeable", - ]; - - for (interface, funcs) in &tokens.interfaces { - if !interface_blacklist.contains(&interface.as_str()) { - systems.extend(funcs.clone()); - } - } - - contracts.insert( - file_name.to_string(), - DojoContract { - contract_file_name: file_name.to_string(), - tokens: tokens.clone(), - systems, - }, - ); - } - - // Model. - if is_model_contract(&tokens) { - if let Some(model_name) = model_name_from_artifact_filename(file_name) { - let model_pascal_case = - model_name.from_case(Case::Snake).to_case(Case::Pascal); - - let model = DojoModel { - name: model_pascal_case.clone(), - qualified_path: file_name - .replace(&model_name, &model_pascal_case) - .trim_end_matches(".json") - .to_string(), - tokens: filter_model_tokens(&tokens), - }; - - models.insert(model_pascal_case, model); - } else { - return Err(Error::Format(format!( - "Could not extract model name from file name `{file_name}`" - ))); - } - } - } + for contract_manifest in &base_manifest.contracts { + // Base manifest always use path for ABI. + let abi = contract_manifest + .inner + .abi + .as_ref() + .expect("Valid ABI for contract") + .load_abi_string(&root_dir)?; + + let tokens = AbiParser::tokens_from_abi_string(&abi, &HashMap::new())?; + + // Identify the systems -> for now only take the functions from the + // interfaces. + let mut systems = vec![]; + let interface_blacklist = + ["dojo::world::IWorldProvider", "dojo::components::upgradeable::IUpgradeable"]; + + for (interface, funcs) in &tokens.interfaces { + if !interface_blacklist.contains(&interface.as_str()) { + systems.extend(funcs.clone()); } } + + let contract_name = contract_manifest.name.to_string(); + + contracts.insert( + contract_name.clone(), + DojoContract { qualified_path: contract_name, tokens, systems }, + ); } - Ok(DojoData { models, contracts }) -} + for model_manifest in &base_manifest.models { + // Base manifest always use path for ABI. + let abi = model_manifest + .inner + .abi + .as_ref() + .expect("Valid ABI for contract") + .load_abi_string(&root_dir)?; + + let tokens = AbiParser::tokens_from_abi_string(&abi, &HashMap::new())?; + + let name = model_manifest.name.to_string(); + + if let Some(model_name) = model_name_from_fully_qualified_path(&name) { + let model_pascal_case = model_name.from_case(Case::Snake).to_case(Case::Pascal); + + let model = DojoModel { + name: model_pascal_case.clone(), + qualified_path: name + .replace(&model_name, &model_pascal_case) + .trim_end_matches(".json") + .to_string(), + tokens: filter_model_tokens(&tokens), + }; -/// Identifies if the given contract contains systems. -/// -/// For now the identification is very naive and don't use the manifest -/// as the manifest format will change soon. -/// TODO: use the new manifest files once available. -/// -/// # Arguments -/// -/// * `file_name` - Name of the contract file. -/// * `file_content` - Content of the contract artifact. -fn is_systems_contract(file_name: &str, file_content: &str) -> bool { - if file_name.starts_with("dojo::") || file_name == "manifest.json" { - return false; + models.insert(model_pascal_case, model); + } else { + return Err(Error::Format(format!( + "Could not extract model name from file name `{name}`" + ))); + } } - file_content.contains("IWorldDispatcher") + let world = DojoWorld { name: root_package_name.to_string() }; + + Ok(DojoData { world, models, contracts }) } /// Filters the model ABI to keep relevant types @@ -222,138 +225,60 @@ fn filter_model_tokens(tokens: &TokenizedAbi) -> TokenizedAbi { TokenizedAbi { structs, enums, ..Default::default() } } -/// Extracts a model name from the artifact file name. +/// Extracts a model name from the fully qualified path of the model. /// /// # Example /// -/// The file name "dojo_examples::models::position.json" should return "position". +/// The fully qualified name "dojo_examples::models::position" should return "position". /// /// # Arguments /// -/// * `file_name` - Artifact file name. -fn model_name_from_artifact_filename(file_name: &str) -> Option { +/// * `file_name` - Fully qualified model name. +fn model_name_from_fully_qualified_path(file_name: &str) -> Option { let parts: Vec<&str> = file_name.split("::").collect(); - if let Some(last_part) = parts.last() { - // TODO: for now, we always reconstruct with PascalCase. - // Once manifest data are available, use the exact name instead. - // We may have errors here is the struct is named like myStruct and not MyStruct. - // Plugin dev should consider case insensitive comparison. - last_part.split_once(".json").map(|m_ext| m_ext.0.to_string()) - } else { - None - } + // TODO: we may want to have inside the manifest the name of the model struct + // instead of extracting it from the file's name. + parts.last().map(|last_part| last_part.to_string()) } -/// Identifies if the given contract contains a model. -/// -/// The identification is based on the methods name. This must -/// be adjusted if the model attribute expansion change in the future. -/// -/// -/// # Arguments -/// -/// * `file_name` - Name of the contract file. -/// * `file_content` - Content of the contract artifact. -fn is_model_contract(tokens: &TokenizedAbi) -> bool { - let expected_funcs = ["name", "layout", "packed_size", "unpacked_size", "schema"]; - - let mut funcs_counts = 0; +#[cfg(test)] +mod tests { + use super::*; - for functions in tokens.interfaces.values() { - for f in functions { - if expected_funcs.contains(&f.to_function().expect("Function expected").name.as_str()) { - funcs_counts += 1; - } - } + #[test] + fn model_name_from_fully_qualified_path_ok() { + let file_name = "dojo_examples::models::position"; + assert_eq!(model_name_from_fully_qualified_path(file_name), Some("position".to_string())); } - funcs_counts == expected_funcs.len() -} + #[test] + fn gather_data_ok() { + let data = gather_dojo_data( + &Utf8PathBuf::from("src/test_data/spawn-and-move/Scarb.toml"), + "dojo_example", + "dev", + ) + .unwrap(); + + assert_eq!(data.models.len(), 4); + + assert_eq!(data.world.name, "dojo_example"); + + let pos = data.models.get("Position").unwrap(); + assert_eq!(pos.name, "Position"); + assert_eq!(pos.qualified_path, "dojo_examples::models::Position"); -// Uncomment tests once windows issue is solved. -// #[cfg(test)] -// mod tests { -// use super::*; - -// #[test] -// fn is_system_contract_ok() { -// let file_name = "dojo_examples::actions::actions.json"; -// let file_content = include_str!( -// "test_data/spawn-and-move/target/dev/dojo_examples::actions::actions.json" -// ); - -// assert!(is_systems_contract(file_name, file_content)); -// } - -// #[test] -// fn is_system_contract_ignore_dojo_files() { -// let file_name = "dojo::world::world.json"; -// let file_content = ""; -// assert!(!is_systems_contract(file_name, file_content)); - -// let file_name = "manifest.json"; -// assert!(!is_systems_contract(file_name, file_content)); -// } - -// #[test] -// fn test_is_system_contract_ignore_models() { -// let file_name = "dojo_examples::models::position.json"; -// let file_content = include_str!( -// "test_data/spawn-and-move/target/dev/dojo_examples::models::position.json" -// ); -// assert!(!is_systems_contract(file_name, file_content)); -// } - -// #[test] -// fn model_name_from_artifact_filename_ok() { -// let file_name = "dojo_examples::models::position.json"; -// assert_eq!(model_name_from_artifact_filename(file_name), Some("position".to_string())); -// } - -// #[test] -// fn is_model_contract_ok() { -// let file_content = -// -// include_str!("test_data/spawn-and-move/target/dev/dojo_examples::models::moves.json"); -// let tokens = AbiParser::tokens_from_abi_string(file_content, &HashMap::new()).unwrap(); - -// assert!(is_model_contract(&tokens)); -// } - -// #[test] -// fn is_model_contract_ignore_systems() { -// let file_content = include_str!( -// "test_data/spawn-and-move/target/dev/dojo_examples::actions::actions.json" -// ); -// let tokens = AbiParser::tokens_from_abi_string(file_content, &HashMap::new()).unwrap(); - -// assert!(!is_model_contract(&tokens)); -// } - -// #[test] -// fn is_model_contract_ignore_dojo_files() { -// let file_content = -// include_str!("test_data/spawn-and-move/target/dev/dojo::world::world.json"); -// let tokens = AbiParser::tokens_from_abi_string(file_content, &HashMap::new()).unwrap(); - -// assert!(!is_model_contract(&tokens)); -// } - -// #[test] -// fn gather_data_ok() { -// let data = -// gather_dojo_data(&Utf8PathBuf::from("src/test_data/spawn-and-move/target/dev")) -// .unwrap(); - -// assert_eq!(data.models.len(), 2); - -// let pos = data.models.get("Position").unwrap(); -// assert_eq!(pos.name, "Position"); -// assert_eq!(pos.qualified_path, "dojo_examples::models::Position"); - -// let moves = data.models.get("Moves").unwrap(); -// assert_eq!(moves.name, "Moves"); -// assert_eq!(moves.qualified_path, "dojo_examples::models::Moves"); -// } -// } + let moves = data.models.get("Moves").unwrap(); + assert_eq!(moves.name, "Moves"); + assert_eq!(moves.qualified_path, "dojo_examples::models::Moves"); + + let moved = data.models.get("Moved").unwrap(); + assert_eq!(moved.name, "Moved"); + assert_eq!(moved.qualified_path, "dojo_examples::actions::actions::Moved"); + + let moved = data.models.get("EmoteMessage").unwrap(); + assert_eq!(moved.name, "EmoteMessage"); + assert_eq!(moved.qualified_path, "dojo_examples::models::EmoteMessage"); + } +} diff --git a/crates/dojo-bindgen/src/plugins/mod.rs b/crates/dojo-bindgen/src/plugins/mod.rs index ab6abbcb8b..b603262e44 100644 --- a/crates/dojo-bindgen/src/plugins/mod.rs +++ b/crates/dojo-bindgen/src/plugins/mod.rs @@ -8,12 +8,14 @@ use crate::error::BindgenResult; use crate::DojoData; pub mod typescript; +pub mod typescript_v2; pub mod unity; #[derive(Debug)] pub enum BuiltinPlugins { Typescript, Unity, + TypeScriptV2, } impl fmt::Display for BuiltinPlugins { @@ -21,6 +23,7 @@ impl fmt::Display for BuiltinPlugins { match self { BuiltinPlugins::Typescript => write!(f, "typescript"), BuiltinPlugins::Unity => write!(f, "unity"), + BuiltinPlugins::TypeScriptV2 => write!(f, "typescript_v2"), } } } diff --git a/crates/dojo-bindgen/src/plugins/typescript/mod.rs b/crates/dojo-bindgen/src/plugins/typescript/mod.rs index 91f50d6306..de2f11bc91 100644 --- a/crates/dojo-bindgen/src/plugins/typescript/mod.rs +++ b/crates/dojo-bindgen/src/plugins/typescript/mod.rs @@ -2,8 +2,7 @@ use std::collections::HashMap; use std::path::{Path, PathBuf}; use async_trait::async_trait; -use cainome::parser::tokens::{Composite, CompositeType, Function}; -use convert_case::Casing; +use cainome::parser::tokens::{Composite, CompositeType, Function, Token}; use crate::error::BindgenResult; use crate::plugins::BuiltinPlugin; @@ -28,6 +27,7 @@ impl TypescriptPlugin { "u256" => "RecsType.BigInt".to_string(), "usize" => "RecsType.Number".to_string(), "felt252" => "RecsType.BigInt".to_string(), + "bytes31" => "RecsType.String".to_string(), "ClassHash" => "RecsType.BigInt".to_string(), "ContractAddress" => "RecsType.BigInt".to_string(), @@ -211,6 +211,11 @@ export enum {} {{ }); for token in &structs { + if handled_tokens.iter().filter(|t| t.type_name() == token.type_name()).count() > 1 + { + continue; + } + // first index is our model struct if token.type_name() == model.name { models_structs.push(token.to_composite().unwrap().clone()); @@ -222,6 +227,10 @@ export enum {} {{ } for token in &tokens.enums { + if handled_tokens.iter().filter(|t| t.type_name() == token.type_name()).count() > 1 + { + continue; + } out += TypescriptPlugin::format_enum(token.to_composite().unwrap()).as_str(); } @@ -247,20 +256,22 @@ export function defineContractComponents(world: World) { // Handled tokens should be a list of all structs and enums used by the contract // Such as a set of referenced tokens from a model fn format_system(system: &Function, handled_tokens: &[Composite]) -> String { + fn map_type(token: &Token) -> String { + match token { + Token::CoreBasic(t) => TypescriptPlugin::map_type(&t.type_name()) + .replace("RecsType.", "") + // types should be lowercased + .to_lowercase(), + Token::Composite(t) => format!("models.{}", t.type_name()), + Token::Array(t) => format!("{}[]", map_type(&t.inner)), + _ => panic!("Unsupported token type: {:?}", token), + } + } + let args = system .inputs .iter() - .map(|arg| { - format!( - "{}: {}", - arg.0, - if TypescriptPlugin::map_type(&arg.1.type_name()) == arg.1.type_name() { - format!("models.{}", arg.1.type_name()) - } else { - TypescriptPlugin::map_type(&arg.1.type_name()).replace("RecsType.", "") - } - ) - }) + .map(|arg| format!("{}: {}", arg.0, map_type(&arg.1))) .collect::>() .join(", "); @@ -295,7 +306,7 @@ export function defineContractComponents(world: World) { format!( " // Call the `{system_name}` system with the specified Account and calldata - const {pretty_system_name} = async (props: {{ account: Account{arg_sep}{args} }}) => {{ + const {system_name} = async (props: {{ account: Account{arg_sep}{args} }}) => {{ try {{ return await provider.execute( props.account, @@ -311,10 +322,6 @@ export function defineContractComponents(world: World) { ", // selector for execute system_name = system.name, - // pretty system name - // snake case to camel case - // move_to -> moveTo - pretty_system_name = system.name.to_case(convert_case::Case::Camel), // add comma if we have args arg_sep = if !args.is_empty() { ", " } else { "" }, // formatted args to use our mapped types @@ -376,17 +383,15 @@ export function defineContractComponents(world: World) { }}; }} ", - contract.contract_file_name, + contract.qualified_path, // capitalize contract name - TypescriptPlugin::formatted_contract_name(&contract.contract_file_name), - TypescriptPlugin::formatted_contract_name(&contract.contract_file_name), + TypescriptPlugin::formatted_contract_name(&contract.qualified_path), + TypescriptPlugin::formatted_contract_name(&contract.qualified_path), systems, contract .systems .iter() - .map(|system| { - system.to_function().unwrap().name.to_case(convert_case::Case::Camel) - }) + .map(|system| { system.to_function().unwrap().name.to_string() }) .collect::>() .join(", ") ); @@ -401,8 +406,8 @@ export function defineContractComponents(world: World) { .map(|c| { format!( "{}: {}()", - TypescriptPlugin::formatted_contract_name(&c.contract_file_name), - TypescriptPlugin::formatted_contract_name(&c.contract_file_name) + TypescriptPlugin::formatted_contract_name(&c.qualified_path), + TypescriptPlugin::formatted_contract_name(&c.qualified_path) ) }) .collect::>() diff --git a/crates/dojo-bindgen/src/plugins/typescript_v2/mod.rs b/crates/dojo-bindgen/src/plugins/typescript_v2/mod.rs new file mode 100644 index 0000000000..94c046cf24 --- /dev/null +++ b/crates/dojo-bindgen/src/plugins/typescript_v2/mod.rs @@ -0,0 +1,627 @@ +use std::collections::HashMap; +use std::path::{Path, PathBuf}; + +use async_trait::async_trait; +use cainome::parser::tokens::{Composite, CompositeType, Function}; +use convert_case::Casing; + +use crate::error::BindgenResult; +use crate::plugins::BuiltinPlugin; +use crate::{DojoContract, DojoData, DojoModel}; + +pub struct TypeScriptV2Plugin {} + +impl TypeScriptV2Plugin { + pub fn new() -> Self { + Self {} + } + + // Maps cairo types to TypeScript defined types + fn map_type(type_name: &str) -> String { + match type_name { + "bool" => "boolean".to_string(), + "u8" => "number".to_string(), + "u16" => "number".to_string(), + "u32" => "number".to_string(), + "u64" => "bigint".to_string(), + "u128" => "bigint".to_string(), + "u256" => "bigint".to_string(), + "usize" => "number".to_string(), + "felt252" => "string".to_string(), + "ClassHash" => "string".to_string(), + "ContractAddress" => "string".to_string(), + + _ => type_name.to_string(), + } + } + + fn generate_header() -> String { + format!( + "// Generated by dojo-bindgen on {}. Do not modify this file manually.\n", + chrono::Utc::now().to_rfc2822() + ) + } + + fn generate_imports() -> String { + "import { Account } from \"starknet\"; +import { + Clause, + Client, + ModelClause, + createClient, + valueToToriiValueAndOperator, +} from \"@dojoengine/torii-client\"; +import { + LOCAL_KATANA, + LOCAL_RELAY, + LOCAL_TORII, + createManifestFromJson, +} from \"@dojoengine/core\";" + .to_string() + } + + fn generate_query_types(models: &[&DojoModel]) -> String { + let mut query_fields = Vec::new(); + let mut result_mapping = Vec::new(); + + for model in models { + query_fields + .push(format!("{model_name}: ModelClause<{model_name}>;", model_name = model.name)); + + result_mapping.push(format!("{model_name}: {model_name};", model_name = model.name)); + } + + format!( + "type Query = Partial<{{ + {query_fields} +}}>; + +type ResultMapping = {{ + {result_mapping} +}}; + +type QueryResult = {{ + [K in keyof T]: K extends keyof ResultMapping ? ResultMapping[K] : never; +}}; + +// Only supports a single model for now, since torii doesn't support multiple models +// And inside that single model, there's only support for a single query. +function convertQueryToToriiClause(query: Query): Clause | undefined {{ + const [model, clause] = Object.entries(query)[0]; + + if (Object.keys(clause).length === 0) {{ + return undefined; + }} + + const clauses: Clause[] = Object.entries(clause).map(([key, value]) => {{ + return {{ + Member: {{ + model, + member: key, + ...valueToToriiValueAndOperator(value), + }}, + }} satisfies Clause; + }}); + + return clauses[0]; +}}", + query_fields = query_fields.join("\n "), + result_mapping = result_mapping.join("\n "), + ) + } + + fn generate_model_types(models: &[&DojoModel], handled_tokens: &mut Vec) -> String { + let mut out = String::new(); + + for model in models { + let tokens = &model.tokens; + + for token in &tokens.enums { + handled_tokens.push(token.to_composite().unwrap().to_owned()); + } + for token in &tokens.structs { + handled_tokens.push(token.to_composite().unwrap().to_owned()); + } + + let mut structs = tokens.structs.to_owned(); + structs.sort_by(|a, b| { + if a.to_composite() + .unwrap() + .inners + .iter() + .any(|field| field.token.type_name() == b.type_name()) + { + std::cmp::Ordering::Greater + } else { + std::cmp::Ordering::Less + } + }); + + for token in &structs { + out += TypeScriptV2Plugin::format_struct( + token.to_composite().unwrap(), + handled_tokens, + ) + .as_str(); + } + + for token in &tokens.enums { + out += TypeScriptV2Plugin::format_enum(token.to_composite().unwrap()).as_str(); + } + + out += "\n"; + } + + out + } + + fn generate_base_calls_class() -> String { + "class BaseCalls { + contractAddress: string; + account?: Account; + + constructor(contractAddress: string, account?: Account) { + this.account = account; + this.contractAddress = contractAddress; + } + + async execute(entrypoint: string, calldata: any[] = []): Promise { + if (!this.account) { + throw new Error(\"No account set to interact with dojo_starter\"); + } + + await this.account.execute( + { + contractAddress: this.contractAddress, + entrypoint, + calldata, + }, + undefined, + { + maxFee: 0, + } + ); + } +} +" + .to_string() + } + + fn generate_contracts(contracts: &[&DojoContract], handled_tokens: &[Composite]) -> String { + let mut out = String::new(); + + for contract in contracts { + let systems = contract + .systems + .iter() + .map(|system| { + TypeScriptV2Plugin::format_system(system.to_function().unwrap(), handled_tokens) + }) + .collect::>() + .join("\n\n "); + + out += &format!( + "class {}Calls extends BaseCalls {{ + constructor(contractAddress: string, account?: Account) {{ + super(contractAddress, account); + }} + + {} +}} +", + TypeScriptV2Plugin::formatted_contract_name(&contract.qualified_path) + .to_case(convert_case::Case::Pascal), + systems, + ); + } + + out + } + + fn generate_initial_params(contracts: &[&DojoContract]) -> String { + let system_addresses = contracts + .iter() + .map(|contract| { + format!( + "{}Address: string;", + TypeScriptV2Plugin::formatted_contract_name(&contract.qualified_path) + .to_case(convert_case::Case::Camel) + ) + }) + .collect::>() + .join("\n "); + + format!( + "type InitialParams = GeneralParams & + ( + | {{ + rpcUrl?: string; + worldAddress: string; + {system_addresses} + }} + | {{ + manifest: any; + }} + );" + ) + } + + fn generate_world_class(world_name: &String, contracts: &[&DojoContract]) -> String { + let mut out = String::new(); + + out += "type GeneralParams = { + toriiUrl?: string; + relayUrl?: string; + account?: Account; +};"; + + out += "\n\n"; + + out += TypeScriptV2Plugin::generate_initial_params(contracts).as_str(); + + out += "\n\n"; + + let system_properties = contracts + .iter() + .map(|contract| { + format!( + "{camel_case_name}: {pascal_case_name}Calls; + {camel_case_name}Address: string;", + camel_case_name = + TypeScriptV2Plugin::formatted_contract_name(&contract.qualified_path) + .to_case(convert_case::Case::Camel), + pascal_case_name = + TypeScriptV2Plugin::formatted_contract_name(&contract.qualified_path) + .to_case(convert_case::Case::Pascal) + ) + }) + .collect::>() + .join("\n "); + + let system_address_initializations = contracts + .iter() + .map(|contract| { + format!( + "const {contract_name}Address = config.contracts.find( + (contract) => + contract.name === \"dojo_starter::systems::{contract_name}::{contract_name}\" + )?.address; + + if (!{contract_name}Address) {{ + throw new Error(\"No {contract_name} contract found in the manifest\"); + }} + + this.{contract_name}Address = {contract_name}Address;", + contract_name = + TypeScriptV2Plugin::formatted_contract_name(&contract.qualified_path) + .to_case(convert_case::Case::Camel) + ) + }) + .collect::>() + .join("\n "); + + let system_address_initializations_from_params = contracts + .iter() + .map(|contract| { + format!( + "this.{camel_case_name}Address = params.{camel_case_name}Address;", + camel_case_name = + TypeScriptV2Plugin::formatted_contract_name(&contract.qualified_path) + .to_case(convert_case::Case::Camel), + ) + }) + .collect::>() + .join("\n "); + + let system_initializations = contracts + .iter() + .map(|contract| { + format!( + "this.{camel_case_name} = new \ + {pascal_case_name}Calls(this.{camel_case_name}Address, this._account);", + camel_case_name = + TypeScriptV2Plugin::formatted_contract_name(&contract.qualified_path) + .to_case(convert_case::Case::Camel), + pascal_case_name = + TypeScriptV2Plugin::formatted_contract_name(&contract.qualified_path) + .to_case(convert_case::Case::Pascal) + ) + }) + .collect::>() + .join("\n "); + + let formatted_world_name = world_name.to_case(convert_case::Case::Pascal); + + out += &format!( + "export class {formatted_world_name} {{ + rpcUrl: string; + toriiUrl: string; + toriiPromise: Promise; + relayUrl: string; + worldAddress: string; + private _account?: Account; + {system_properties} + + constructor(params: InitialParams) {{ + if (\"manifest\" in params) {{ + const config = createManifestFromJson(params.manifest); + this.rpcUrl = config.world.metadata.rpc_url; + this.worldAddress = config.world.address; + + {system_address_initializations} + }} else {{ + this.rpcUrl = params.rpcUrl || LOCAL_KATANA; + this.worldAddress = params.worldAddress; + {system_address_initializations_from_params} + }} + this.toriiUrl = params.toriiUrl || LOCAL_TORII; + this.relayUrl = params.relayUrl || LOCAL_RELAY; + this._account = params.account; + {system_initializations} + + this.toriiPromise = createClient([], {{ + rpcUrl: this.rpcUrl, + toriiUrl: this.toriiUrl, + worldAddress: this.worldAddress, + relayUrl: this.relayUrl, + }}); + }} + + get account(): Account | undefined {{ + return this._account; + }} + + set account(account: Account) {{ + this._account = account; + {system_initializations} + }} + + async query(query: T, limit = 10, offset = 0) {{ + const torii = await this.toriiPromise; + + return {{ + torii, + findEntities: async () => this.findEntities(query, limit, offset), + }}; + }} + + async findEntities(query: T, limit = 10, offset = 0) {{ + const torii = await this.toriiPromise; + + const clause = convertQueryToToriiClause(query); + + const toriiResult = await torii.getEntities({{ + limit, + offset, + clause, + }}); + + return toriiResult as Record>; + }} + + async findEntity(query: T) {{ + const result = await this.findEntities(query, 1); + + if (Object.values(result).length === 0) {{ + return undefined; + }} + + return Object.values(result)[0] as QueryResult; + }} +}}" + ); + + out + } + + // Token should be a struct + // This will be formatted into a TypeScript interface + // using TypeScript defined types + fn format_struct(token: &Composite, handled_tokens: &[Composite]) -> String { + let mut native_fields: Vec = Vec::new(); + + for field in &token.inners { + let mapped = TypeScriptV2Plugin::map_type(field.token.type_name().as_str()); + if mapped == field.token.type_name() { + let token = handled_tokens + .iter() + .find(|t| t.type_name() == field.token.type_name()) + .unwrap_or_else(|| panic!("Token not found: {}", field.token.type_name())); + if token.r#type == CompositeType::Enum { + native_fields.push(format!("{}: {};", field.name, mapped)); + } else { + native_fields.push(format!("{}: {};", field.name, field.token.type_name())); + } + } else { + native_fields.push(format!("{}: {};", field.name, mapped)); + } + } + + format!( + " +// Type definition for `{path}` struct +export interface {name} {{ + {native_fields} +}} +", + path = token.type_path, + name = token.type_name(), + native_fields = native_fields.join("\n ") + ) + } + + // Token should be an enum + // This will be formatted into a C# enum + // Enum is mapped using index of cairo enum + fn format_enum(token: &Composite) -> String { + let fields = token + .inners + .iter() + .map(|field| format!("{},", field.name,)) + .collect::>() + .join("\n "); + + format!( + " +// Type definition for `{}` enum +export enum {} {{ + {} +}} +", + token.type_path, + token.type_name(), + fields + ) + } + + // Formats a system into a JS method used by the contract class + // Handled tokens should be a list of all structs and enums used by the contract + // Such as a set of referenced tokens from a model + fn format_system(system: &Function, handled_tokens: &[Composite]) -> String { + let args = system + .inputs + .iter() + .map(|arg| { + format!( + "{}: {}", + arg.0, + if TypeScriptV2Plugin::map_type(&arg.1.type_name()) == arg.1.type_name() { + arg.1.type_name() + } else { + TypeScriptV2Plugin::map_type(&arg.1.type_name()) + } + ) + }) + .collect::>() + .join(", "); + + let calldata = system + .inputs + .iter() + .map(|arg| { + let token = &arg.1; + let type_name = &arg.0; + + match handled_tokens.iter().find(|t| t.type_name() == token.type_name()) { + Some(t) => { + // Need to flatten the struct members. + match t.r#type { + CompositeType::Struct => t + .inners + .iter() + .map(|field| format!("props.{}.{}", type_name, field.name)) + .collect::>() + .join(",\n "), + _ => type_name.to_string(), + } + } + None => type_name.to_string(), + } + }) + .collect::>() + .join(",\n "); + + format!( + "async {pretty_system_name}({args}): Promise {{ + try {{ + await this.execute(\"{system_name}\", [{calldata}]) + }} catch (error) {{ + console.error(\"Error executing {pretty_system_name}:\", error); + throw error; + }} + }}", + pretty_system_name = system.name.to_case(convert_case::Case::Camel), + // formatted args to use our mapped types + args = args, + system_name = system.name, + // calldata for execute + calldata = calldata + ) + } + + // Formats a contract file path into a pretty contract name + // eg. dojo_examples::actions::actions.json -> Actions + fn formatted_contract_name(contract_file_name: &str) -> String { + let contract_name = + contract_file_name.split("::").last().unwrap().trim_end_matches(".json"); + contract_name.to_string() + } + + fn generate_code_content(data: &DojoData) -> String { + let mut handled_tokens = Vec::::new(); + let models = data.models.values().collect::>(); + let contracts = data.contracts.values().collect::>(); + + let mut code = String::new(); + code += TypeScriptV2Plugin::generate_header().as_str(); + code += TypeScriptV2Plugin::generate_imports().as_str(); + code += "\n"; + code += TypeScriptV2Plugin::generate_model_types(models.as_slice(), &mut handled_tokens) + .as_str(); + code += "\n"; + code += TypeScriptV2Plugin::generate_base_calls_class().as_str(); + code += "\n"; + code += + TypeScriptV2Plugin::generate_contracts(contracts.as_slice(), &handled_tokens).as_str(); + code += "\n"; + code += TypeScriptV2Plugin::generate_query_types(models.as_slice()).as_str(); + code += "\n"; + code += TypeScriptV2Plugin::generate_world_class(&data.world.name, contracts.as_slice()) + .as_str(); + + code + } +} + +#[async_trait] +impl BuiltinPlugin for TypeScriptV2Plugin { + async fn generate_code(&self, data: &DojoData) -> BindgenResult>> { + let code: String = TypeScriptV2Plugin::generate_code_content(data); + + let mut out: HashMap> = HashMap::new(); + let output_path = Path::new(&format!("{}.ts", data.world.name)).to_owned(); + + out.insert(output_path, code.as_bytes().to_vec()); + + Ok(out) + } +} + +#[cfg(test)] +mod tests { + use std::fs; + use std::io::Read; + + use camino::Utf8PathBuf; + + use super::*; + use crate::gather_dojo_data; + + #[test] + fn test_output() { + let mut expected_output = String::new(); + let mut file = + fs::File::open("src/test_data/mocks/dojo_examples.ts").expect("file not found"); + file.read_to_string(&mut expected_output).expect("error reading file"); + + let expected_output_without_header = + expected_output.lines().skip(1).collect::>().join("\n"); + + let data = gather_dojo_data( + &Utf8PathBuf::from("src/test_data/spawn-and-move/Scarb.toml"), + "dojo_examples", + "dev", + ) + .unwrap(); + + let actual_output = TypeScriptV2Plugin::generate_code_content(&data); + let actual_output_without_header = + actual_output.lines().skip(1).collect::>().join("\n"); + + // This test currently is very naive, but DojoData is unsorted, so the output + // can change between tests. This is a temporary solution until we have a better + // way to test this. + assert_eq!(actual_output_without_header.len(), 7592); + assert_eq!(expected_output_without_header.len(), 7592); + } +} diff --git a/crates/dojo-bindgen/src/plugins/unity/mod.rs b/crates/dojo-bindgen/src/plugins/unity/mod.rs index c98ce28da3..addee69930 100644 --- a/crates/dojo-bindgen/src/plugins/unity/mod.rs +++ b/crates/dojo-bindgen/src/plugins/unity/mod.rs @@ -2,7 +2,7 @@ use std::collections::HashMap; use std::path::{Path, PathBuf}; use async_trait::async_trait; -use cainome::parser::tokens::{Composite, CompositeType, Function}; +use cainome::parser::tokens::{Composite, CompositeType, Function, Token}; use crate::error::BindgenResult; use crate::plugins::BuiltinPlugin; @@ -26,6 +26,7 @@ impl UnityPlugin { "u256" => "BigInteger".to_string(), "usize" => "uint".to_string(), "felt252" => "FieldElement".to_string(), + "bytes31" => "string".to_string(), "ClassHash" => "FieldElement".to_string(), "ContractAddress" => "FieldElement".to_string(), @@ -147,6 +148,10 @@ public class {} : ModelInstance {{ let mut model_struct: Option<&Composite> = None; let tokens = &model.tokens; for token in &tokens.structs { + if handled_tokens.iter().any(|t| t.type_name() == token.type_name()) { + continue; + } + handled_tokens.push(token.to_composite().unwrap().to_owned()); // first index is our model struct @@ -159,6 +164,10 @@ public class {} : ModelInstance {{ } for token in &tokens.enums { + if handled_tokens.iter().any(|t| t.type_name() == token.type_name()) { + continue; + } + handled_tokens.push(token.to_composite().unwrap().to_owned()); out += UnityPlugin::format_enum(token.to_composite().unwrap()).as_str(); } @@ -174,10 +183,19 @@ public class {} : ModelInstance {{ // Handled tokens should be a list of all structs and enums used by the contract // Such as a set of referenced tokens from a model fn format_system(system: &Function, handled_tokens: &[Composite]) -> String { + fn map_type(token: &Token) -> String { + match token { + Token::CoreBasic(t) => UnityPlugin::map_type(&t.type_name()), + Token::Composite(t) => t.type_name().to_string(), + Token::Array(t) => format!("{}[]", map_type(&t.inner)), + _ => panic!("Unsupported token type: {:?}", token), + } + } + let args = system .inputs .iter() - .map(|arg| format!("{} {}", UnityPlugin::map_type(&arg.1.type_name()), arg.0,)) + .map(|arg| format!("{} {}", map_type(&arg.1), &arg.0)) .collect::>() .join(", "); @@ -222,7 +240,7 @@ public class {} : ModelInstance {{ // Call the `{system_name}` system with the specified Account and calldata // Returns the transaction hash. Use `WaitForTransaction` to wait for the transaction to be \ confirmed. - public async Task {pretty_system_name}(Account account{arg_sep}{args}) {{ + public async Task {system_name}(Account account{arg_sep}{args}) {{ return await account.ExecuteRaw(new dojo.Call[] {{ new dojo.Call{{ to = contractAddress, @@ -236,21 +254,6 @@ public class {} : ModelInstance {{ ", // selector for execute system_name = system.name, - // pretty system name - // snake case to camel case - // move_to -> MoveTo - pretty_system_name = system - .name - .split('_') - .map(|s| { - let mut c = s.chars(); - match c.next() { - None => String::new(), - Some(f) => f.to_uppercase().collect::() + c.as_str(), - } - }) - .collect::>() - .join(""), // add comma if we have args arg_sep = if !args.is_empty() { ", " } else { "" }, // formatted args to use our mapped types @@ -300,9 +303,9 @@ public class {} : MonoBehaviour {{ {} }} ", - contract.contract_file_name, + contract.qualified_path, // capitalize contract name - UnityPlugin::formatted_contract_name(&contract.contract_file_name), + UnityPlugin::formatted_contract_name(&contract.qualified_path), systems ); diff --git a/crates/dojo-bindgen/src/test_data/mocks/dojo_examples.ts b/crates/dojo-bindgen/src/test_data/mocks/dojo_examples.ts new file mode 100644 index 0000000000..0e807cd262 --- /dev/null +++ b/crates/dojo-bindgen/src/test_data/mocks/dojo_examples.ts @@ -0,0 +1,302 @@ +// Generated by dojo-bindgen on Wed, 17 Apr 2024 07:58:49 +0000. Do not modify this file manually. +import { Account } from "starknet"; +import { + Clause, + Client, + ModelClause, + createClient, + valueToToriiValueAndOperator, +} from "@dojoengine/torii-client"; +import { + LOCAL_KATANA, + LOCAL_RELAY, + LOCAL_TORII, + createManifestFromJson, +} from "@dojoengine/core"; + +// Type definition for `dojo_examples::models::EmoteMessage` struct +export interface EmoteMessage { + identity: string; + emote: Emote; +} + +// Type definition for `dojo_examples::models::Emote` enum +export enum Emote { + None, + Happy, + Sad, + Angry, + Love, +} + + +// Type definition for `dojo_examples::models::Vec2` struct +export interface Vec2 { + x: number; + y: number; +} + +// Type definition for `dojo_examples::models::Position` struct +export interface Position { + player: string; + vec: Vec2; +} + + +// Type definition for `dojo_examples::actions::actions::Moved` struct +export interface Moved { + player: string; + direction: Direction; +} + +// Type definition for `dojo_examples::models::Direction` enum +export enum Direction { + None, + Left, + Right, + Up, + Down, +} + + +// Type definition for `dojo_examples::models::Moves` struct +export interface Moves { + player: string; + remaining: number; + last_direction: Direction; +} + +// Type definition for `dojo_examples::models::Direction` enum +export enum Direction { + None, + Left, + Right, + Up, + Down, +} + + +class BaseCalls { + contractAddress: string; + account?: Account; + + constructor(contractAddress: string, account?: Account) { + this.account = account; + this.contractAddress = contractAddress; + } + + async execute(entrypoint: string, calldata: any[] = []): Promise { + if (!this.account) { + throw new Error("No account set to interact with dojo_starter"); + } + + await this.account.execute( + { + contractAddress: this.contractAddress, + entrypoint, + calldata, + }, + undefined, + { + maxFee: 0, + } + ); + } +} + +class ActionsCalls extends BaseCalls { + constructor(contractAddress: string, account?: Account) { + super(contractAddress, account); + } + + async tileTerrain(vec: Vec2): Promise { + try { + await this.execute("tile_terrain", [props.vec.x, + props.vec.y]) + } catch (error) { + console.error("Error executing tileTerrain:", error); + throw error; + } + } + + async quadrant(pos: Position): Promise { + try { + await this.execute("quadrant", [props.pos.player, + props.pos.vec]) + } catch (error) { + console.error("Error executing quadrant:", error); + throw error; + } + } + + async dojoResource(): Promise { + try { + await this.execute("dojo_resource", []) + } catch (error) { + console.error("Error executing dojoResource:", error); + throw error; + } + } + + async spawn(): Promise { + try { + await this.execute("spawn", []) + } catch (error) { + console.error("Error executing spawn:", error); + throw error; + } + } + + async move(direction: Direction): Promise { + try { + await this.execute("move", [direction]) + } catch (error) { + console.error("Error executing move:", error); + throw error; + } + } +} + +type Query = Partial<{ + EmoteMessage: ModelClause; + Position: ModelClause; + Moved: ModelClause; + Moves: ModelClause; +}>; + +type ResultMapping = { + EmoteMessage: EmoteMessage; + Position: Position; + Moved: Moved; + Moves: Moves; +}; + +type QueryResult = { + [K in keyof T]: K extends keyof ResultMapping ? ResultMapping[K] : never; +}; + +// Only supports a single model for now, since torii doesn't support multiple models +// And inside that single model, there's only support for a single query. +function convertQueryToToriiClause(query: Query): Clause | undefined { + const [model, clause] = Object.entries(query)[0]; + + if (Object.keys(clause).length === 0) { + return undefined; + } + + const clauses: Clause[] = Object.entries(clause).map(([key, value]) => { + return { + Member: { + model, + member: key, + ...valueToToriiValueAndOperator(value), + }, + } satisfies Clause; + }); + + return clauses[0]; +} +type GeneralParams = { + toriiUrl?: string; + relayUrl?: string; + account?: Account; +}; + +type InitialParams = GeneralParams & + ( + | { + rpcUrl?: string; + worldAddress: string; + actionsAddress: string; + } + | { + manifest: any; + } + ); + +export class DojoExamples { + rpcUrl: string; + toriiUrl: string; + toriiPromise: Promise; + relayUrl: string; + worldAddress: string; + private _account?: Account; + actions: ActionsCalls; + actionsAddress: string; + + constructor(params: InitialParams) { + if ("manifest" in params) { + const config = createManifestFromJson(params.manifest); + this.rpcUrl = config.world.metadata.rpc_url; + this.worldAddress = config.world.address; + + const actionsAddress = config.contracts.find( + (contract) => + contract.name === "dojo_starter::systems::actions::actions" + )?.address; + + if (!actionsAddress) { + throw new Error("No actions contract found in the manifest"); + } + + this.actionsAddress = actionsAddress; + } else { + this.rpcUrl = params.rpcUrl || LOCAL_KATANA; + this.worldAddress = params.worldAddress; + this.actionsAddress = params.actionsAddress; + } + this.toriiUrl = params.toriiUrl || LOCAL_TORII; + this.relayUrl = params.relayUrl || LOCAL_RELAY; + this._account = params.account; + this.actions = new ActionsCalls(this.actionsAddress, this._account); + + this.toriiPromise = createClient([], { + rpcUrl: this.rpcUrl, + toriiUrl: this.toriiUrl, + worldAddress: this.worldAddress, + relayUrl: this.relayUrl, + }); + } + + get account(): Account | undefined { + return this._account; + } + + set account(account: Account) { + this._account = account; + this.actions = new ActionsCalls(this.actionsAddress, this._account); + } + + async query(query: T, limit = 10, offset = 0) { + const torii = await this.toriiPromise; + + return { + torii, + findEntities: async () => this.findEntities(query, limit, offset), + }; + } + + async findEntities(query: T, limit = 10, offset = 0) { + const torii = await this.toriiPromise; + + const clause = convertQueryToToriiClause(query); + + const toriiResult = await torii.getEntities({ + limit, + offset, + clause, + }); + + return toriiResult as Record>; + } + + async findEntity(query: T) { + const result = await this.findEntities(query, 1); + + if (Object.values(result).length === 0) { + return undefined; + } + + return Object.values(result)[0] as QueryResult; + } +} \ No newline at end of file diff --git a/crates/dojo-core/Scarb.lock b/crates/dojo-core/Scarb.lock index fdc4138ee1..87f054ddd0 100644 --- a/crates/dojo-core/Scarb.lock +++ b/crates/dojo-core/Scarb.lock @@ -3,7 +3,7 @@ version = 1 [[package]] name = "dojo" -version = "0.5.1" +version = "0.6.0" dependencies = [ "dojo_plugin", ] diff --git a/crates/dojo-core/Scarb.toml b/crates/dojo-core/Scarb.toml index 8ffe4dfb8d..fcd41ee8a8 100644 --- a/crates/dojo-core/Scarb.toml +++ b/crates/dojo-core/Scarb.toml @@ -2,7 +2,7 @@ cairo-version = "2.4.0" description = "The Dojo Core library for autonomous worlds." name = "dojo" -version = "0.5.1" +version = "0.6.0" [dependencies] dojo_plugin = { git = "https://github.com/dojoengine/dojo", tag = "v0.3.11" } diff --git a/crates/dojo-core/src/base_test.cairo b/crates/dojo-core/src/base_test.cairo index 54edc99085..282bdf6a40 100644 --- a/crates/dojo-core/src/base_test.cairo +++ b/crates/dojo-core/src/base_test.cairo @@ -107,7 +107,9 @@ mod invalid_model { impl InvalidModelName of super::INameOnly { fn name(self: @ContractState) -> felt252 { // Pre-computed address of a contract deployed through the world. - 0x7b6cc67bb03efdf091487465df2037cad74111d8b616536b013e70da7491a30 + // To print this addres, run: + // sozo test --manifest-path crates/dojo-core/Scarb.toml -f test_deploy_from_world_invalid_model + 0x647d90f9663c37478a5fba689fc7166d957f782ea4a8316e0042929d48cf8be } } } diff --git a/crates/dojo-core/src/world.cairo b/crates/dojo-core/src/world.cairo index e6de33f0dd..b21e72eac0 100644 --- a/crates/dojo-core/src/world.cairo +++ b/crates/dojo-core/src/world.cairo @@ -452,6 +452,7 @@ mod world { fn emit(self: @ContractState, mut keys: Array, values: Span) { let system = get_caller_address(); system.serialize(ref keys); + emit_event_syscall(keys.span(), values).unwrap_syscall(); } diff --git a/crates/dojo-lang/Cargo.toml b/crates/dojo-lang/Cargo.toml index 277d7ea15c..2541730c92 100644 --- a/crates/dojo-lang/Cargo.toml +++ b/crates/dojo-lang/Cargo.toml @@ -16,6 +16,7 @@ cairo-lang-debug.workspace = true cairo-lang-defs.workspace = true cairo-lang-diagnostics.workspace = true cairo-lang-filesystem.workspace = true +cairo-lang-formatter.workspace = true cairo-lang-lowering.workspace = true cairo-lang-parser.workspace = true cairo-lang-plugins.workspace = true @@ -50,7 +51,6 @@ tracing.workspace = true url = "2.2.2" [dev-dependencies] -cairo-lang-formatter.workspace = true cairo-lang-semantic.workspace = true cairo-lang-test-utils.workspace = true dojo-test-utils = { path = "../dojo-test-utils" } diff --git a/crates/dojo-lang/src/compiler.rs b/crates/dojo-lang/src/compiler.rs index 6270a95755..010dab774a 100644 --- a/crates/dojo-lang/src/compiler.rs +++ b/crates/dojo-lang/src/compiler.rs @@ -1,4 +1,5 @@ use std::collections::{BTreeMap, BTreeSet, HashMap}; +use std::io::Write; use std::iter::zip; use std::ops::DerefMut; @@ -8,13 +9,14 @@ use cairo_lang_defs::db::DefsGroup; use cairo_lang_defs::ids::{ModuleId, ModuleItemId}; use cairo_lang_filesystem::db::FilesGroup; use cairo_lang_filesystem::ids::{CrateId, CrateLongId}; +use cairo_lang_formatter::format_string; use cairo_lang_semantic::db::SemanticGroup; use cairo_lang_starknet::abi; use cairo_lang_starknet::contract::{find_contracts, ContractDeclaration}; use cairo_lang_starknet::contract_class::{compile_prepared_db, ContractClass}; use cairo_lang_starknet::plugin::aux_data::StarkNetContractAuxData; use cairo_lang_utils::UpcastMut; -use camino::Utf8PathBuf; +use camino::{Utf8Path, Utf8PathBuf}; use convert_case::{Case, Casing}; use dojo_world::manifest::{ AbiFormat, Class, ComputedValueEntrypoint, DojoContract, DojoModel, Manifest, ManifestMethods, @@ -46,6 +48,10 @@ pub const ABIS_DIR: &str = "abis"; pub const CONTRACTS_DIR: &str = "contracts"; pub const MODELS_DIR: &str = "models"; +pub const SOURCES_DIR: &str = "src"; + +pub(crate) const LOG_TARGET: &str = "dojo_lang::compiler"; + #[cfg(test)] #[path = "compiler_test.rs"] mod test; @@ -85,6 +91,8 @@ impl Compiler for DojoCompiler { ) -> Result<()> { let props: Props = unit.target().props()?; let target_dir = unit.target_dir(ws); + let sources_dir = target_dir.child(Utf8Path::new(SOURCES_DIR)); + let compiler_config = build_compiler_config(&unit, ws); let mut main_crate_ids = collect_main_crate_ids(&unit, db); @@ -101,7 +109,7 @@ impl Compiler for DojoCompiler { .iter() .map(|decl| decl.module_id().full_path(db.upcast_mut())) .collect::>(); - trace!(contracts = ?contract_paths); + trace!(target: LOG_TARGET, contracts = ?contract_paths); let contracts = contracts.iter().collect::>(); @@ -116,11 +124,31 @@ impl Compiler for DojoCompiler { for (decl, class) in zip(contracts, classes) { let contract_full_path = decl.module_id().full_path(db.upcast_mut()); - let file_name = format!("{contract_full_path}.json"); - let mut file = target_dir.open_rw(file_name.clone(), "output file", ws.config())?; - serde_json::to_writer_pretty(file.deref_mut(), &class) - .with_context(|| format!("failed to serialize contract: {contract_full_path}"))?; + // save expanded contract source file + if let Ok(file_id) = db.module_main_file(decl.module_id()) { + if let Some(file_content) = db.file_content(file_id) { + let src_file_name = format!("{contract_full_path}.cairo").replace("::", "_"); + + let mut file = + sources_dir.open_rw(src_file_name.clone(), "source file", ws.config())?; + file.write(format_string(db, file_content.to_string()).as_bytes()) + .with_context(|| { + format!("failed to serialize contract source: {contract_full_path}") + })?; + } else { + return Err(anyhow!("failed to get source file content: {contract_full_path}")); + } + } else { + return Err(anyhow!("failed to get source file: {contract_full_path}")); + } + + // save JSON artifact file + let file_name = format!("{contract_full_path}.json"); + let mut file = target_dir.open_rw(file_name.clone(), "class file", ws.config())?; + serde_json::to_writer_pretty(file.deref_mut(), &class).with_context(|| { + format!("failed to serialize contract artifact: {contract_full_path}") + })?; let class_hash = compute_class_hash_of_contract_class(&class).with_context(|| { format!("problem computing class hash for contract `{contract_full_path}`") @@ -152,7 +180,7 @@ fn find_project_contracts( let external_contracts = if let Some(external_contracts) = external_contracts { let _ = trace_span!("find_external_contracts").enter(); - debug!("external contracts selectors: {:?}", external_contracts); + debug!(target: LOG_TARGET, external_contracts = ?external_contracts, "External contracts selectors."); let crate_ids = external_contracts .iter() @@ -172,7 +200,7 @@ fn find_project_contracts( }) .collect::>() } else { - debug!("no external contracts selected"); + debug!(target: LOG_TARGET, "No external contracts selected."); Vec::new() }; @@ -210,8 +238,12 @@ fn update_manifest( compiled_artifacts: HashMap)>, external_contracts: Option>, ) -> anyhow::Result<()> { - let relative_manifests_dir = Utf8PathBuf::new().join(MANIFESTS_DIR).join(BASE_DIR); - let relative_abis_dir = Utf8PathBuf::new().join(ABIS_DIR).join(BASE_DIR); + let profile_name = + ws.current_profile().expect("Scarb profile expected to be defined.").to_string(); + let profile_dir = Utf8PathBuf::new().join(MANIFESTS_DIR).join(profile_name); + + let relative_manifests_dir = Utf8PathBuf::new().join(&profile_dir).join(BASE_DIR); + let relative_abis_dir = Utf8PathBuf::new().join(&profile_dir).join(ABIS_DIR).join(BASE_DIR); let manifest_dir = ws.manifest_path().parent().unwrap().to_path_buf(); fn get_compiled_artifact_from_map<'a>( @@ -225,17 +257,17 @@ fn update_manifest( let mut crate_ids = crate_ids.to_vec(); - let (hash, _) = get_compiled_artifact_from_map(&compiled_artifacts, WORLD_CONTRACT_NAME)?; + let (hash, abi) = get_compiled_artifact_from_map(&compiled_artifacts, WORLD_CONTRACT_NAME)?; write_manifest_and_abi( &relative_manifests_dir, &relative_abis_dir, &manifest_dir, &mut Manifest::new( // abi path will be written by `write_manifest` - Class { class_hash: *hash, abi: None }, + Class { class_hash: *hash, abi: None, original_class_hash: *hash }, WORLD_CONTRACT_NAME.into(), ), - &None, + abi, )?; let (hash, _) = get_compiled_artifact_from_map(&compiled_artifacts, BASE_CONTRACT_NAME)?; @@ -243,7 +275,10 @@ fn update_manifest( &relative_manifests_dir, &relative_abis_dir, &manifest_dir, - &mut Manifest::new(Class { class_hash: *hash, abi: None }, BASE_CONTRACT_NAME.into()), + &mut Manifest::new( + Class { class_hash: *hash, abi: None, original_class_hash: *hash }, + BASE_CONTRACT_NAME.into(), + ), &None, )?; @@ -309,13 +344,13 @@ fn update_manifest( )?; } - for (_, (manifest, _)) in models.iter_mut() { + for (_, (manifest, abi)) in models.iter_mut() { write_manifest_and_abi( &relative_manifests_dir.join(MODELS_DIR), &relative_abis_dir.join(MODELS_DIR), &manifest_dir, manifest, - &None, + abi, )?; } @@ -350,7 +385,12 @@ fn get_dojo_model_artifacts( model_full_name.clone(), ( Manifest::new( - DojoModel { class_hash, abi: None, members: model.members.clone() }, + DojoModel { + class_hash, + abi: None, + members: model.members.clone(), + original_class_hash: class_hash, + }, model_full_name.into(), ), abi, @@ -421,7 +461,7 @@ fn get_dojo_contract_artifacts( writes, reads, class_hash: *class_hash, - abi: None, + original_class_hash: *class_hash, ..Default::default() }, module_name.clone(), @@ -444,8 +484,7 @@ fn write_manifest_and_abi( where T: Serialize + DeserializeOwned + ManifestMethods, { - let parts: Vec<&str> = manifest.name.split("::").collect(); - let name: Utf8PathBuf = parts.last().unwrap().into(); + let name = manifest.name.to_string().replace("::", "_"); let relative_manifest_path = relative_manifest_dir.join(name.clone()).with_extension("toml"); let relative_abi_path = relative_abis_dir.join(name.clone()).with_extension("json"); diff --git a/crates/dojo-lang/src/contract.rs b/crates/dojo-lang/src/contract.rs index e96ad610f4..f8914801eb 100644 --- a/crates/dojo-lang/src/contract.rs +++ b/crates/dojo-lang/src/contract.rs @@ -41,10 +41,13 @@ impl DojoContract { has_storage = true; return system.merge_storage(db, struct_ast.clone()); } - } else if let ast::ModuleItem::FreeFunction(fn_ast) = el { - return system.rewrite_function(db, fn_ast.clone()); } else if let ast::ModuleItem::Impl(impl_ast) = el { - return system.rewrite_impl(db, impl_ast.clone()); + // If an implementation is not targetting the ContractState, + // the auto injection of self and world is not applied. + let trait_path = impl_ast.trait_path(db).node.get_text(db); + if trait_path.contains("") { + return system.rewrite_impl(db, impl_ast.clone()); + } } vec![RewriteNode::Copied(el.as_syntax_node())] @@ -110,6 +113,7 @@ impl DojoContract { name, dependencies: system.dependencies.values().cloned().collect(), }], + events: vec![], })), code_mappings: builder.code_mappings, }), @@ -235,7 +239,7 @@ impl DojoContract { if param_modifiers.contains(&"ref".to_string()) && param_type.eq(&"ContractState".to_string()) { - has_ref_self = false; + has_ref_self = true; add_self = false; } diff --git a/crates/dojo-lang/src/event.rs b/crates/dojo-lang/src/event.rs new file mode 100644 index 0000000000..38cdad880b --- /dev/null +++ b/crates/dojo-lang/src/event.rs @@ -0,0 +1,190 @@ +use cairo_lang_defs::patcher::{ModifiedNode, RewriteNode}; +use cairo_lang_defs::plugin::PluginDiagnostic; +use cairo_lang_starknet::plugin::aux_data::StarkNetEventAuxData; +use cairo_lang_starknet::plugin::consts::{ + EVENT_TRAIT, EVENT_TYPE_NAME, KEY_ATTR, NESTED_ATTR, SERDE_ATTR, +}; +use cairo_lang_starknet::plugin::events::{EventData, EventFieldKind}; +use cairo_lang_syntax::node::db::SyntaxGroup; +use cairo_lang_syntax::node::helpers::QueryAttrs; +use cairo_lang_syntax::node::{ast, Terminal, TypedSyntaxNode}; +use indoc::formatdoc; + +use crate::plugin::DojoAuxData; + +// A custom implementation of the starknet::Event derivation path. +// We append the event selector directly within the append_keys_and_data function. +// Without the need of the enum for all event variants. + +// https://github.com/starkware-libs/cairo/blob/main/crates/cairo-lang-starknet/src/plugin/derive/event.rs + +pub fn handle_event_struct( + db: &dyn SyntaxGroup, + aux_data: &mut DojoAuxData, + struct_ast: ast::ItemStruct, +) -> (RewriteNode, Vec) { + let mut diagnostics = vec![]; + + // TODO(spapini): Support generics. + let generic_params = struct_ast.generic_params(db); + match generic_params { + ast::OptionWrappedGenericParamList::Empty(_) => {} + _ => { + diagnostics.push(PluginDiagnostic::error( + generic_params.stable_ptr().untyped(), + format!("{EVENT_TYPE_NAME} structs with generic arguments are unsupported"), + )); + } + } + + // Generate append_keys_and_data() code. + let mut append_members = vec![]; + let mut deserialize_members = vec![]; + let mut ctor = vec![]; + let mut members = vec![]; + for member in struct_ast.members(db).elements(db) { + let member_name = RewriteNode::new_trimmed(member.name(db).as_syntax_node()); + let member_kind = + get_field_kind_for_member(db, &mut diagnostics, &member, EventFieldKind::DataSerde); + members.push((member.name(db).text(db), member_kind)); + + let member_for_append = RewriteNode::interpolate_patched( + "self.$member_name$", + &[("member_name".to_string(), member_name.clone())].into(), + ); + let append_member = append_field(member_kind, member_for_append); + let deserialize_member = deserialize_field(member_kind, member_name.clone()); + append_members.push(append_member); + deserialize_members.push(deserialize_member); + ctor.push(RewriteNode::interpolate_patched( + "$member_name$, ", + &[("member_name".to_string(), member_name)].into(), + )); + } + let event_data = EventData::Struct { members }; + aux_data.events.push(StarkNetEventAuxData { event_data }); + + let append_members = RewriteNode::Modified(ModifiedNode { children: Some(append_members) }); + let deserialize_members = + RewriteNode::Modified(ModifiedNode { children: Some(deserialize_members) }); + let ctor = RewriteNode::Modified(ModifiedNode { children: Some(ctor) }); + + // Add an implementation for `Event`. + let struct_name = RewriteNode::new_trimmed(struct_ast.name(db).as_syntax_node()); + ( + // Append the event selector using the struct_name for the selector + // and then append the members. + RewriteNode::interpolate_patched( + &formatdoc!( + " + impl $struct_name$IsEvent of {EVENT_TRAIT}<$struct_name$> {{ + fn append_keys_and_data( + self: @$struct_name$, ref keys: Array, ref data: Array + ) {{ + core::array::ArrayTrait::append(ref keys, selector!(\"$struct_name$\")); + $append_members$ + }} + fn deserialize( + ref keys: Span, ref data: Span, + ) -> Option<$struct_name$> {{$deserialize_members$ + Option::Some($struct_name$ {{$ctor$}}) + }} + }} + " + ), + &[ + ("struct_name".to_string(), struct_name), + ("append_members".to_string(), append_members), + ("deserialize_members".to_string(), deserialize_members), + ("ctor".to_string(), ctor), + ] + .into(), + ), + diagnostics, + ) +} + +/// Generates code to emit an event for a field +fn append_field(member_kind: EventFieldKind, field: RewriteNode) -> RewriteNode { + match member_kind { + EventFieldKind::Nested | EventFieldKind::Flat => RewriteNode::interpolate_patched( + &format!( + " + {EVENT_TRAIT}::append_keys_and_data( + $field$, ref keys, ref data + );" + ), + &[("field".to_string(), field)].into(), + ), + EventFieldKind::KeySerde => RewriteNode::interpolate_patched( + " + core::serde::Serde::serialize($field$, ref keys);", + &[("field".to_string(), field)].into(), + ), + EventFieldKind::DataSerde => RewriteNode::interpolate_patched( + " + core::serde::Serde::serialize($field$, ref data);", + &[("field".to_string(), field)].into(), + ), + } +} + +fn deserialize_field(member_kind: EventFieldKind, member_name: RewriteNode) -> RewriteNode { + RewriteNode::interpolate_patched( + match member_kind { + EventFieldKind::Nested | EventFieldKind::Flat => { + " + let $member_name$ = starknet::Event::deserialize( + ref keys, ref data + )?;" + } + EventFieldKind::KeySerde => { + " + let $member_name$ = core::serde::Serde::deserialize( + ref keys + )?;" + } + EventFieldKind::DataSerde => { + " + let $member_name$ = core::serde::Serde::deserialize( + ref data + )?;" + } + }, + &[("member_name".to_string(), member_name)].into(), + ) +} + +/// Retrieves the field kind for a given enum variant, +/// indicating how the field should be serialized. +/// See [EventFieldKind]. +fn get_field_kind_for_member( + db: &dyn SyntaxGroup, + diagnostics: &mut Vec, + member: &ast::Member, + default: EventFieldKind, +) -> EventFieldKind { + let is_nested = member.has_attr(db, NESTED_ATTR); + let is_key = member.has_attr(db, KEY_ATTR); + let is_serde = member.has_attr(db, SERDE_ATTR); + + // Currently, nested fields are unsupported. + if is_nested { + diagnostics.push(PluginDiagnostic::error( + member.stable_ptr().untyped(), + "Nested event fields are currently unsupported".to_string(), + )); + } + // Currently, serde fields are unsupported. + if is_serde { + diagnostics.push(PluginDiagnostic::error( + member.stable_ptr().untyped(), + "Serde event fields are currently unsupported".to_string(), + )); + } + + if is_key { + return EventFieldKind::KeySerde; + } + default +} diff --git a/crates/dojo-lang/src/inline_macros/emit.rs b/crates/dojo-lang/src/inline_macros/emit.rs index 05b5b5533e..743a6cc9c2 100644 --- a/crates/dojo-lang/src/inline_macros/emit.rs +++ b/crates/dojo-lang/src/inline_macros/emit.rs @@ -4,9 +4,10 @@ use cairo_lang_defs::plugin::{ }; use cairo_lang_diagnostics::Severity; use cairo_lang_semantic::inline_macros::unsupported_bracket_diagnostic; +use cairo_lang_starknet::plugin::consts::EVENT_TRAIT; use cairo_lang_syntax::node::{ast, TypedSyntaxNode}; -use super::unsupported_arg_diagnostic; +use crate::inline_macros::unsupported_arg_diagnostic; #[derive(Debug, Default)] pub struct EmitMacro; @@ -29,12 +30,12 @@ impl InlineMacroExprPlugin for EmitMacro { let args = arg_list.arguments(db).elements(db); - if args.len() != 2 { + if args.len() < 2 || args.len() > 3 { return InlinePluginResult { code: None, diagnostics: vec![PluginDiagnostic { stable_ptr: arg_list.arguments(db).stable_ptr().untyped(), - message: "Invalid arguments. Expected \"emit!(world, models,)\"".to_string(), + message: "Invalid arguments. Expected \"emit!(world, (events,))\"".to_string(), severity: Severity::Error, }], }; @@ -67,7 +68,7 @@ impl InlineMacroExprPlugin for EmitMacro { return InlinePluginResult { code: None, diagnostics: vec![PluginDiagnostic { - message: "Invalid arguments. Expected \"(world, (models,))\"".to_string(), + message: "Invalid arguments. Expected \"(world, (events,))\"".to_string(), stable_ptr: arg_list.arguments(db).stable_ptr().untyped(), severity: Severity::Error, }], @@ -95,14 +96,9 @@ impl InlineMacroExprPlugin for EmitMacro { let mut data = Default::::default();", ); - builder.add_str(&format!( - "keys.append(selector!(\"{}\"));", - event.split_whitespace().next().unwrap() - )); - builder.add_str(&format!( " - starknet::Event::append_keys_and_data(@{event}, ref keys, ref data);", + {EVENT_TRAIT}::append_keys_and_data(@{event}, ref keys, ref data);", event = event )); diff --git a/crates/dojo-lang/src/lib.rs b/crates/dojo-lang/src/lib.rs index ee15cb7bfb..b76ea602c8 100644 --- a/crates/dojo-lang/src/lib.rs +++ b/crates/dojo-lang/src/lib.rs @@ -5,6 +5,7 @@ //! Learn more at [dojoengine.gg](http://dojoengine.gg). pub mod compiler; pub mod contract; +pub mod event; pub mod inline_macros; pub mod interface; pub mod introspect; diff --git a/crates/dojo-lang/src/model.rs b/crates/dojo-lang/src/model.rs index 608844a2fe..f5dc228b8e 100644 --- a/crates/dojo-lang/src/model.rs +++ b/crates/dojo-lang/src/model.rs @@ -53,6 +53,18 @@ pub fn handle_model_struct( }); } + for k in &keys { + if k.ty == "u256" { + diagnostics.push(PluginDiagnostic { + message: "Key is only supported for core types that are 1 felt long once \ + serialized. `u256` is a struct of 2 u128, hence not supported." + .into(), + stable_ptr: struct_ast.name(db).stable_ptr().untyped(), + severity: Severity::Error, + }); + } + } + let serialize_member = |m: &Member, include_key: bool| { if m.key && !include_key { return None; diff --git a/crates/dojo-lang/src/plugin.rs b/crates/dojo-lang/src/plugin.rs index bc5b3b3cd9..9341807f78 100644 --- a/crates/dojo-lang/src/plugin.rs +++ b/crates/dojo-lang/src/plugin.rs @@ -1,3 +1,5 @@ +use std::cmp::Ordering; + use anyhow::Result; use cairo_lang_defs::patcher::PatchBuilder; use cairo_lang_defs::plugin::{ @@ -6,6 +8,7 @@ use cairo_lang_defs::plugin::{ }; use cairo_lang_diagnostics::Severity; use cairo_lang_semantic::plugin::PluginSuite; +use cairo_lang_starknet::plugin::aux_data::StarkNetEventAuxData; use cairo_lang_syntax::attribute::structured::{ AttributeArg, AttributeArgVariant, AttributeStructurize, }; @@ -23,6 +26,7 @@ use smol_str::SmolStr; use url::Url; use crate::contract::DojoContract; +use crate::event::handle_event_struct; use crate::inline_macros::array_cap::ArrayCapMacro; use crate::inline_macros::delete::DeleteMacro; use crate::inline_macros::emit::EmitMacro; @@ -33,8 +37,9 @@ use crate::introspect::{handle_introspect_enum, handle_introspect_struct}; use crate::model::handle_model_struct; use crate::print::{handle_print_enum, handle_print_struct}; -const DOJO_CONTRACT_ATTR: &str = "dojo::contract"; -const DOJO_INTERFACE_ATTR: &str = "dojo::interface"; +pub const DOJO_CONTRACT_ATTR: &str = "dojo::contract"; +pub const DOJO_INTERFACE_ATTR: &str = "dojo::interface"; +pub const DOJO_EVENT_ATTR: &str = "dojo::event"; const DOJO_PLUGIN_EXPAND_VAR_ENV: &str = "DOJO_PLUGIN_EXPAND"; #[derive(Clone, Debug, PartialEq)] @@ -56,6 +61,8 @@ pub struct DojoAuxData { pub models: Vec, /// A list of systems that were processed by the plugin and their model dependencies. pub systems: Vec, + /// A list of events that were processed by the plugin. + pub events: Vec, } impl GeneratedFileAuxData for DojoAuxData { @@ -406,6 +413,26 @@ impl MacroPlugin for BuiltinDojoPlugin { } } + let attributes = struct_ast.attributes(db).query_attr(db, DOJO_EVENT_ATTR); + + match attributes.len().cmp(&1) { + Ordering::Equal => { + let (event_rewrite_nodes, event_diagnostics) = + handle_event_struct(db, &mut aux_data, struct_ast.clone()); + rewrite_nodes.push(event_rewrite_nodes); + diagnostics.extend(event_diagnostics); + } + Ordering::Greater => { + diagnostics.push(PluginDiagnostic { + message: "A Dojo event must have zero or one dojo::event attribute." + .into(), + stable_ptr: struct_ast.stable_ptr().untyped(), + severity: Severity::Error, + }); + } + _ => {} + } + if rewrite_nodes.is_empty() { return PluginResult { diagnostics, ..PluginResult::default() }; } @@ -439,6 +466,7 @@ impl MacroPlugin for BuiltinDojoPlugin { fn declared_attributes(&self) -> Vec { vec![ "dojo::contract".to_string(), + "dojo::event".to_string(), "key".to_string(), "computed".to_string(), // Not adding capacity for now, this will automatically diff --git a/crates/dojo-lang/src/plugin_test_data/model b/crates/dojo-lang/src/plugin_test_data/model index c0d50164e6..14d91a6f21 100644 --- a/crates/dojo-lang/src/plugin_test_data/model +++ b/crates/dojo-lang/src/plugin_test_data/model @@ -49,6 +49,12 @@ struct OnlyKeyModel { id: felt252 } +#[derive(Model, Serde)] +struct U256KeyModel { + #[key] + id: u256 +} + use starknet::ContractAddress; #[derive(Model, Copy, Drop, Serde)] @@ -536,6 +542,16 @@ error: Model must define at least one member that is not a key struct OnlyKeyModel { ^**********^ +error: Model must define at least one member that is not a key + --> test_src/lib.cairo:47:8 +struct U256KeyModel { + ^**********^ + +error: Key is only supported for core types that are 1 felt long once serialized. `u256` is a struct of 2 u128, hence not supported. + --> test_src/lib.cairo:47:8 +struct U256KeyModel { + ^**********^ + error: Unsupported attribute. --> test_src/lib.cairo[Position]:73:13 #[starknet::contract] @@ -551,6 +567,11 @@ error: Unsupported attribute. #[starknet::contract] ^*******************^ +error: Unsupported attribute. + --> test_src/lib.cairo[U256KeyModel]:68:13 + #[starknet::contract] + ^*******************^ + error: Unsupported attribute. --> test_src/lib.cairo[Player]:77:13 #[starknet::contract] @@ -601,6 +622,21 @@ error: Unsupported attribute. #[abi(embed_v0)] ^**************^ +error: Unsupported attribute. + --> test_src/lib.cairo[U256KeyModel]:73:17 + #[storage] + ^********^ + +error: Unsupported attribute. + --> test_src/lib.cairo[U256KeyModel]:76:17 + #[abi(embed_v0)] + ^**************^ + +error: Unsupported attribute. + --> test_src/lib.cairo[U256KeyModel]:104:17 + #[abi(embed_v0)] + ^**************^ + error: Unsupported attribute. --> test_src/lib.cairo[Player]:82:17 #[storage] @@ -662,6 +698,12 @@ struct OnlyKeyModel { id: felt252 } +#[derive(Model, Serde)] +struct U256KeyModel { + #[key] + id: u256 +} + use starknet::ContractAddress; #[derive(Model, Copy, Drop, Serde)] @@ -1092,6 +1134,125 @@ impl OnlyKeyModelIntrospect<> of dojo::database::introspect::Introspect { + fn serialize(self: @U256KeyModel, ref output: core::array::Array) { + core::serde::Serde::serialize(self.id, ref output) + } + fn deserialize(ref serialized: core::array::Span) -> core::option::Option { + core::option::Option::Some(U256KeyModel { + id: core::serde::Serde::deserialize(ref serialized)?, + }) + } +} + + impl U256KeyModelModel of dojo::model::Model { + #[inline(always)] + fn name(self: @U256KeyModel) -> felt252 { + 'U256KeyModel' + } + + #[inline(always)] + fn keys(self: @U256KeyModel) -> Span { + let mut serialized = core::array::ArrayTrait::new(); + core::serde::Serde::serialize(self.id, ref serialized); + core::array::ArrayTrait::span(@serialized) + } + + #[inline(always)] + fn values(self: @U256KeyModel) -> Span { + let mut serialized = core::array::ArrayTrait::new(); + + core::array::ArrayTrait::span(@serialized) + } + + #[inline(always)] + fn layout(self: @U256KeyModel) -> Span { + let mut layout = core::array::ArrayTrait::new(); + dojo::database::introspect::Introspect::::layout(ref layout); + core::array::ArrayTrait::span(@layout) + } + + #[inline(always)] + fn packed_size(self: @U256KeyModel) -> usize { + let mut layout = self.layout(); + dojo::packing::calculate_packed_size(ref layout) + } + } + + +impl U256KeyModelIntrospect<> of dojo::database::introspect::Introspect> { + #[inline(always)] + fn size() -> usize { + 0 + } + + #[inline(always)] + fn layout(ref layout: Array) { + + } + + #[inline(always)] + fn ty() -> dojo::database::introspect::Ty { + dojo::database::introspect::Ty::Struct(dojo::database::introspect::Struct { + name: 'U256KeyModel', + attrs: array![].span(), + children: array![dojo::database::introspect::serialize_member(@dojo::database::introspect::Member { + name: 'id', + ty: dojo::database::introspect::Ty::Primitive('u256'), + attrs: array!['key'].span() + })].span() + }) + } +} + + + #[starknet::interface] + trait Iu_256_key_model { + fn ensure_abi(self: @T, model: U256KeyModel); + } + + #[starknet::contract] + mod u_256_key_model { + use super::U256KeyModel; + use super::Iu_256_key_model; + + #[storage] + struct Storage {} + + #[abi(embed_v0)] + impl DojoModelImpl of dojo::model::IDojoModel{ + fn name(self: @ContractState) -> felt252 { + 'U256KeyModel' + } + + fn unpacked_size(self: @ContractState) -> usize { + dojo::database::introspect::Introspect::::size() + } + + fn packed_size(self: @ContractState) -> usize { + let mut layout = core::array::ArrayTrait::new(); + dojo::database::introspect::Introspect::::layout(ref layout); + let mut layout_span = layout.span(); + dojo::packing::calculate_packed_size(ref layout_span) + } + + fn layout(self: @ContractState) -> Span { + let mut layout = core::array::ArrayTrait::new(); + dojo::database::introspect::Introspect::::layout(ref layout); + core::array::ArrayTrait::span(@layout) + } + + fn schema(self: @ContractState) -> dojo::database::introspect::Ty { + dojo::database::introspect::Introspect::::ty() + } + } + + #[abi(embed_v0)] + impl u_256_key_modelImpl of Iu_256_key_model{ + fn ensure_abi(self: @ContractState, model: U256KeyModel) { + } + } + } impl PlayerCopy of core::traits::Copy::; impl PlayerDrop of core::traits::Drop::; impl PlayerSerde of core::serde::Serde:: { diff --git a/crates/dojo-lang/src/plugin_test_data/system b/crates/dojo-lang/src/plugin_test_data/system index 9bad98a86b..e9ce18bde4 100644 --- a/crates/dojo-lang/src/plugin_test_data/system +++ b/crates/dojo-lang/src/plugin_test_data/system @@ -43,11 +43,6 @@ mod withevent { struct TestEvent { address: ContractAddress, } - - #[abi(embed_v0)] - fn test(value: felt252) -> value { - value - } } #[starknet::component] @@ -64,7 +59,6 @@ mod testcomponent2 { #[dojo::contract] mod withcomponent { - component!(path: testcomponent1, storage: testcomponent1_storage, event: testcomponent1_event); component!(path: testcomponent2, storage: testcomponent2_storage, event: testcomponent2_event); @@ -109,45 +103,25 @@ trait INominalTrait { fn do_return_value(p1: u8) -> u16; } +#[dojo::interface] +trait IWorldTrait { + fn do_with_ref_self(ref self: ContractState) -> felt252; + fn do_with_several_world_dispatchers( + world: IWorldDispatcher, + vec: Vec2, + another_world: IWorldDispatcher + ) -> felt252; + fn do_with_world_not_named_world(another_world: IWorldDispatcher) -> felt252; + fn do_with_self_and_world_not_named_world(self: @ContractState, another_world: IWorldDispatcher); + fn do_with_world_not_first(vec: Vec2, world: IWorldDispatcher) -> felt252; + fn do_with_self_and_world_not_first(self: @ContractState, vec: Vec2, world: IWorldDispatcher) -> felt252; +} + #[dojo::contract] mod MyFaultyContract { - #[external(v0)] - fn do_with_ref_self(ref self: ContractState) -> felt252 { - 'land' - } - - #[external(v0)] - fn do_with_several_world_dispatchers( - world: IWorldDispatcher, - vec: Vec2, - another_world: IWorldDispatcher - ) -> felt252 { - 'land' - } - - #[external(v0)] - fn do_with_world_not_named_world(another_world: IWorldDispatcher) -> felt252 { - 'land' - } - - #[external(v0)] - fn do_with_self_and_world_not_named_world(self: @ContractState, another_world: IWorldDispatcher) -> felt252 { - 'land' - } - - #[external(v0)] - fn do_with_world_not_first(vec: Vec2, world: IWorldDispatcher) -> felt252 { - 'land' - } - - #[external(v0)] - fn do_with_self_and_world_not_first(self: @ContractState, vec: Vec2, world: IWorldDispatcher) -> felt252 { - 'land' - } - #[abi(embed_v0)] - impl ActionsImpl of IActions { + impl TestWorldImpl of IWorldTrait { fn do_with_ref_self(ref self: ContractState) -> felt252 { 'land' } @@ -187,33 +161,8 @@ mod MyNominalContract { damage: u8 } - #[external(v0)] - #[computed] - fn do(vec: Vec2) -> felt252 { - 'land' - } - - #[external(v0)] - #[computed] - fn do_with_self(self: @ContractState, vec: Vec2) -> felt252 { - 'land' - } - - #[external(v0)] - #[computed] - fn do_with_world_first(world: IWorldDispatcher, vec: Vec2) -> felt252 { - 'land' - } - - #[external(v0)] - #[computed] - fn do_with_self_and_world_first(self: @ContractState, world: IWorldDispatcher, vec: Vec2) -> felt252 { - 'land' - } - #[abi(embed_v0)] - impl ActionsImpl of IActions { - + impl TestWorldImpl of IWorldTrait { fn do(vec: Vec2) -> felt252 { 'land' } @@ -230,6 +179,14 @@ mod MyNominalContract { 'land' } } + + #[generate_trait] + impl ImplInternalNoContractState of InternalNoContractState { + fn func1(world: IWorldDispatcher) -> felt252 { + let _w = world; + 42 + } + } } //! > generated_cairo_code @@ -343,72 +300,52 @@ mod ctxnamed { //! > expected_diagnostics error: Unsupported attribute. - --> test_src/lib.cairo:47:1 + --> test_src/lib.cairo:42:1 #[starknet::component] ^********************^ error: Unsupported attribute. - --> test_src/lib.cairo:53:1 + --> test_src/lib.cairo:48:1 #[starknet::component] ^********************^ error: Anything other than functions is not supported in a dojo::interface - --> test_src/lib.cairo:89:5 + --> test_src/lib.cairo:83:5 const ONE: u8; ^************^ error: Functions of dojo::interface cannot have `ref self` parameter. - --> test_src/lib.cairo:91:5 + --> test_src/lib.cairo:85:5 fn do_ref_self(ref self: TContractState); ^***************************************^ -error: Only one parameter of type IWorldDispatcher is allowed. - --> test_src/lib.cairo:114:5 - #[external(v0)] - ^*************^ - -error: The IWorldDispatcher parameter must be named 'world'. - --> test_src/lib.cairo:124:38 - fn do_with_world_not_named_world(another_world: IWorldDispatcher) -> felt252 { - ^*****************************^ - -error: The IWorldDispatcher parameter must be named 'world'. - --> test_src/lib.cairo:129:69 - fn do_with_self_and_world_not_named_world(self: @ContractState, another_world: IWorldDispatcher) -> felt252 { - ^*****************************^ - -error: The IWorldDispatcher parameter must be the first parameter of the function (self excluded). - --> test_src/lib.cairo:134:43 - fn do_with_world_not_first(vec: Vec2, world: IWorldDispatcher) -> felt252 { - ^*********************^ - -error: The IWorldDispatcher parameter must be the first parameter of the function (self excluded). - --> test_src/lib.cairo:139:74 - fn do_with_self_and_world_not_first(self: @ContractState, vec: Vec2, world: IWorldDispatcher) -> felt252 { - ^*********************^ +error: Functions of dojo::contract cannot have 'ref self' parameter. + --> test_src/lib.cairo:119:9 + fn do_with_ref_self(ref self: ContractState) -> felt252 { + ^*******************************************************^ error: Only one parameter of type IWorldDispatcher is allowed. - --> test_src/lib.cairo:149:9 + --> test_src/lib.cairo:123:9 fn do_with_several_world_dispatchers( ^***********************************^ error: The IWorldDispatcher parameter must be named 'world'. - --> test_src/lib.cairo:157:42 + --> test_src/lib.cairo:131:42 fn do_with_world_not_named_world(another_world: IWorldDispatcher) -> felt252 { ^*****************************^ error: The IWorldDispatcher parameter must be named 'world'. - --> test_src/lib.cairo:161:73 + --> test_src/lib.cairo:135:73 fn do_with_self_and_world_not_named_world(self: @ContractState, another_world: IWorldDispatcher) -> felt252 { ^*****************************^ error: The IWorldDispatcher parameter must be the first parameter of the function (self excluded). - --> test_src/lib.cairo:165:47 + --> test_src/lib.cairo:139:47 fn do_with_world_not_first(vec: Vec2, world: IWorldDispatcher) -> felt252 { ^*********************^ error: The IWorldDispatcher parameter must be the first parameter of the function (self excluded). - --> test_src/lib.cairo:169:78 + --> test_src/lib.cairo:143:78 fn do_with_self_and_world_not_first(self: @ContractState, vec: Vec2, world: IWorldDispatcher) -> felt252 { ^*********************^ @@ -438,7 +375,7 @@ error: Unsupported attribute. ^*******************^ error: Unsupported attribute. - --> test_src/lib.cairo:93:5 + --> test_src/lib.cairo:87:5 #[my_attr] ^********^ @@ -453,12 +390,12 @@ error: Unsupported attribute. ^*******************^ error: Unsupported attribute. - --> test_src/lib.cairo:49:5 + --> test_src/lib.cairo:44:5 #[storage] ^********^ error: Unsupported attribute. - --> test_src/lib.cairo:55:5 + --> test_src/lib.cairo:50:5 #[storage] ^********^ @@ -593,17 +530,12 @@ error: Unsupported attribute. ^******^ error: Unsupported attribute. - --> test_src/lib.cairo:41:5 - #[abi(embed_v0)] - ^**************^ - -error: Unsupported attribute. - --> test_src/lib.cairo[withevent]:47:13 + --> test_src/lib.cairo[withevent]:42:13 #[storage] ^********^ error: Unsupported attribute. - --> test_src/lib.cairo[withevent]:50:17 + --> test_src/lib.cairo[withevent]:45:17 #[substorage(v0)] ^***************^ @@ -628,42 +560,42 @@ error: Unsupported attribute. ^**************^ error: Unknown inline item macro: 'component'. - --> test_src/lib.cairo:62:5 + --> test_src/lib.cairo:56:5 component!(path: testcomponent1, storage: testcomponent1_storage, event: testcomponent1_event); ^*********************************************************************************************^ error: Unknown inline item macro: 'component'. - --> test_src/lib.cairo:63:5 + --> test_src/lib.cairo:57:5 component!(path: testcomponent2, storage: testcomponent2_storage, event: testcomponent2_event); ^*********************************************************************************************^ error: Unsupported attribute. - --> test_src/lib.cairo[withcomponent]:33:13 + --> test_src/lib.cairo[withcomponent]:32:13 #[storage] ^********^ error: Unsupported attribute. - --> test_src/lib.cairo[withcomponent]:36:17 + --> test_src/lib.cairo[withcomponent]:35:17 #[substorage(v0)] ^***************^ error: Unsupported attribute. - --> test_src/lib.cairo[withcomponent]:38:25 + --> test_src/lib.cairo[withcomponent]:37:25 #[substorage(v0)] ^***************^ error: Unsupported attribute. - --> test_src/lib.cairo[withcomponent]:40:9 + --> test_src/lib.cairo[withcomponent]:39:9 #[substorage(v0)] ^***************^ error: Unsupported attribute. - --> test_src/lib.cairo[withcomponent]:44:13 + --> test_src/lib.cairo[withcomponent]:43:13 #[event] ^******^ error: Unsupported attribute. - --> test_src/lib.cairo[withcomponent]:48:25 + --> test_src/lib.cairo[withcomponent]:47:25 #[flat] ^*****^ @@ -688,52 +620,22 @@ error: Unsupported attribute. ^**************^ error: Unsupported attribute. - --> test_src/lib.cairo:109:5 - #[external(v0)] - ^*************^ - -error: Unsupported attribute. - --> test_src/lib.cairo:114:5 - #[external(v0)] - ^*************^ - -error: Unsupported attribute. - --> test_src/lib.cairo:123:5 - #[external(v0)] - ^*************^ - -error: Unsupported attribute. - --> test_src/lib.cairo:128:5 - #[external(v0)] - ^*************^ - -error: Unsupported attribute. - --> test_src/lib.cairo:133:5 - #[external(v0)] - ^*************^ - -error: Unsupported attribute. - --> test_src/lib.cairo:138:5 - #[external(v0)] - ^*************^ - -error: Unsupported attribute. - --> test_src/lib.cairo:143:5 + --> test_src/lib.cairo:117:5 #[abi(embed_v0)] ^**************^ error: Unsupported attribute. - --> test_src/lib.cairo[MyFaultyContract]:92:13 + --> test_src/lib.cairo[MyFaultyContract]:60:13 #[event] ^******^ error: Unsupported attribute. - --> test_src/lib.cairo[MyFaultyContract]:98:13 + --> test_src/lib.cairo[MyFaultyContract]:66:13 #[storage] ^********^ error: Unsupported attribute. - --> test_src/lib.cairo[MyFaultyContract]:101:17 + --> test_src/lib.cairo[MyFaultyContract]:69:17 #[substorage(v0)] ^***************^ @@ -758,42 +660,22 @@ error: Unsupported attribute. ^**************^ error: Unsupported attribute. - --> test_src/lib.cairo:184:5 - #[external(v0)] - ^*************^ - -error: Unsupported attribute. - --> test_src/lib.cairo:190:5 - #[external(v0)] - ^*************^ - -error: Unsupported attribute. - --> test_src/lib.cairo:196:5 - #[external(v0)] - ^*************^ - -error: Unsupported attribute. - --> test_src/lib.cairo:202:5 - #[external(v0)] - ^*************^ - -error: Unsupported attribute. - --> test_src/lib.cairo:208:5 + --> test_src/lib.cairo:158:5 #[abi(embed_v0)] ^**************^ error: Unsupported attribute. - --> test_src/lib.cairo[MyNominalContract]:83:13 + --> test_src/lib.cairo[MyNominalContract]:64:13 #[event] ^******^ error: Unsupported attribute. - --> test_src/lib.cairo[MyNominalContract]:89:13 + --> test_src/lib.cairo[MyNominalContract]:70:13 #[storage] ^********^ error: Unsupported attribute. - --> test_src/lib.cairo[MyNominalContract]:92:17 + --> test_src/lib.cairo[MyNominalContract]:73:17 #[substorage(v0)] ^***************^ @@ -839,7 +721,7 @@ mod testcomponent2 { use traits::Into; use dojo::world::Context; - fn execute(self: @ContractState, ctx: Context, name: felt252) { + fn execute(ctx: Context, name: felt252) { return (); } @@ -884,7 +766,7 @@ impl EventDrop of core::traits::Drop::; #[abi(embed_v0)] impl UpgradableImpl = dojo::components::upgradeable::upgradeable::UpgradableImpl; - fn execute(self: @ContractState, value: felt252) -> felt252 { + fn execute(value: felt252) -> felt252 { value } @@ -932,7 +814,7 @@ impl EventDrop of core::traits::Drop::; use traits::Into; use dojo::world::Context; - fn execute(self: @ContractState, ctx2: Context, name: felt252) { + fn execute(ctx2: Context, name: felt252) { return (); } @@ -990,11 +872,6 @@ impl EventDrop of core::traits::Drop::; address: ContractAddress, } - #[abi(embed_v0)] - fn test(self: @ContractState, value: felt252) -> value { - value - } - #[storage] struct Storage { world_dispatcher: IWorldDispatcher, @@ -1077,6 +954,19 @@ impl EventDrop of core::traits::Drop::; } + #[starknet::interface] + trait IWorldTrait { + fn do_with_ref_self(self: @TContractState, ref self: ContractState) -> felt252; + fn do_with_several_world_dispatchers( +self: @TContractState, world: IWorldDispatcher, vec: Vec2, another_world: IWorldDispatcher + ) -> felt252; + fn do_with_world_not_named_world(self: @TContractState, another_world: IWorldDispatcher) -> felt252; + fn do_with_self_and_world_not_named_world(self: @TContractState, self: @ContractState, another_world: IWorldDispatcher); + fn do_with_world_not_first(self: @TContractState, vec: Vec2, world: IWorldDispatcher) -> felt252; + fn do_with_self_and_world_not_first(self: @TContractState, self: @ContractState, vec: Vec2, world: IWorldDispatcher) -> felt252; + + } + #[starknet::contract] mod MyFaultyContract { use dojo::world; @@ -1103,40 +993,8 @@ impl EventDrop of core::traits::Drop::; impl UpgradableImpl = dojo::components::upgradeable::upgradeable::UpgradableImpl; - #[external(v0)] - fn do_with_ref_self(ref self: ContractState) -> felt252 { - 'land' - } - - #[external(v0)] - fn do_with_several_world_dispatchers( -self: @ContractState, world: IWorldDispatcher, vec: Vec2, another_world: IWorldDispatcher - ) -> felt252 { - 'land' - } - - #[external(v0)] - fn do_with_world_not_named_world(self: @ContractState, another_world: IWorldDispatcher) -> felt252 { - 'land' - } - - #[external(v0)] - fn do_with_self_and_world_not_named_world(self: @ContractState, another_world: IWorldDispatcher) -> felt252 { - 'land' - } - - #[external(v0)] - fn do_with_world_not_first(self: @ContractState, vec: Vec2, world: IWorldDispatcher) -> felt252 { - 'land' - } - - #[external(v0)] - fn do_with_self_and_world_not_first(self: @ContractState, vec: Vec2, world: IWorldDispatcher) -> felt252 { - 'land' - } - #[abi(embed_v0)] - impl ActionsImpl of IActions { + impl TestWorldImpl of IWorldTrait { fn do_with_ref_self(ref self: ContractState) -> felt252 { 'land' } @@ -1212,35 +1070,8 @@ impl EventDrop of core::traits::Drop::; damage: u8 } - #[external(v0)] - #[computed] - fn do(self: @ContractState, vec: Vec2) -> felt252 { - 'land' - } - - #[external(v0)] - #[computed] - fn do_with_self(self: @ContractState, vec: Vec2) -> felt252 { - 'land' - } - - #[external(v0)] - #[computed] - fn do_with_world_first(self: @ContractState, vec: Vec2) -> felt252 { -let world = self.world_dispatcher.read(); - 'land' - } - - #[external(v0)] - #[computed] - fn do_with_self_and_world_first(self: @ContractState, vec: Vec2) -> felt252 { -let world = self.world_dispatcher.read(); - 'land' - } - #[abi(embed_v0)] - impl ActionsImpl of IActions { - + impl TestWorldImpl of IWorldTrait { fn do(self: @ContractState, vec: Vec2) -> felt252 { 'land' } @@ -1260,6 +1091,14 @@ let world = self.world_dispatcher.read(); } } + #[generate_trait] + impl ImplInternalNoContractState of InternalNoContractState { + fn func1(world: IWorldDispatcher) -> felt252 { + let _w = world; + 42 + } + } + #[event] #[derive(Drop, starknet::Event)] enum Event { @@ -1273,6 +1112,9 @@ let world = self.world_dispatcher.read(); upgradeable: dojo::components::upgradeable::upgradeable::Storage, } impl ActionDrop of core::traits::Drop::; + trait InternalNoContractState { + fn func1(world: IWorldDispatcher) -> felt252; + } impl EventDrop of core::traits::Drop::; } diff --git a/crates/dojo-lang/src/scarb_internal/mod.rs b/crates/dojo-lang/src/scarb_internal/mod.rs index 94c50e0006..54616d36e1 100644 --- a/crates/dojo-lang/src/scarb_internal/mod.rs +++ b/crates/dojo-lang/src/scarb_internal/mod.rs @@ -22,8 +22,13 @@ use tracing::trace; use crate::plugin::dojo_plugin_suite; +pub(crate) const LOG_TARGET: &str = "dojo_lang::scarb_internal"; + pub struct CompileInfo { + pub profile_name: String, + pub manifest_path: Utf8PathBuf, pub target_dir: Utf8PathBuf, + pub root_package_name: Option, } pub fn crates_config_for_compilation_unit(unit: &CompilationUnit) -> AllCratesConfig { @@ -79,10 +84,22 @@ pub fn compile_workspace(config: &Config, opts: CompileOpts) -> Result Result { @@ -104,7 +121,7 @@ fn build_project_config(unit: &CompilationUnit) -> Result { let project_config = ProjectConfig { base_path: unit.main_component().package.root().into(), corelib, content }; - trace!(?project_config); + trace!(target: LOG_TARGET, ?project_config); Ok(project_config) } diff --git a/crates/dojo-test-utils/Cargo.toml b/crates/dojo-test-utils/Cargo.toml index 8cdbb727bf..af8e2a4313 100644 --- a/crates/dojo-test-utils/Cargo.toml +++ b/crates/dojo-test-utils/Cargo.toml @@ -16,7 +16,7 @@ cairo-lang-starknet.workspace = true camino.workspace = true dojo-lang = { path = "../dojo-lang" } dojo-world = { path = "../dojo-world", features = [ "manifest", "migration" ] } -jsonrpsee = { version = "0.16.2", features = [ "server" ] } +jsonrpsee = { workspace = true, features = [ "server" ] } katana-core = { path = "../katana/core" } katana-executor = { workspace = true, features = [ "blockifier" ] } katana-primitives = { path = "../katana/primitives" } diff --git a/crates/dojo-test-utils/src/compiler.rs b/crates/dojo-test-utils/src/compiler.rs index 2486ec1eac..06ab15362d 100644 --- a/crates/dojo-test-utils/src/compiler.rs +++ b/crates/dojo-test-utils/src/compiler.rs @@ -1,30 +1,139 @@ -use std::env; +use std::fs::File; +use std::io::{Read, Write}; use std::path::PathBuf; +use std::{env, fs, io}; use assert_fs::TempDir; use camino::{Utf8Path, Utf8PathBuf}; use dojo_lang::compiler::DojoCompiler; use dojo_lang::plugin::CairoPluginRepository; +use dojo_lang::scarb_internal::{compile_workspace, CompileInfo}; use scarb::compiler::CompilerRepository; -use scarb::core::Config; +use scarb::core::{Config, TargetKind}; use scarb::ops; +use scarb::ops::CompileOpts; use scarb_ui::Verbosity; +use toml::{Table, Value}; -pub fn build_test_config(path: &str) -> anyhow::Result { +/// Copies a project to a new location, excluding the manifests +/// and target directories, build the temporary project and +/// return the temporary project directory. +/// +/// # Arguments +/// +/// * `source_project_path` - The path to the source project to copy and build at the temporary +/// location. +/// * `do_build` - Whether to build the temporary project. Only use this if you want to build the +/// project again to re-generate all the artifacts. This is a slow operation on the CI (~70s), use +/// it wisely. +pub fn copy_build_project_temp( + source_project_path: &str, + do_build: bool, +) -> (Utf8PathBuf, Config, Option) { + let source_project_dir = Utf8PathBuf::from(source_project_path).parent().unwrap().to_path_buf(); + + let temp_project_dir = Utf8PathBuf::from( + assert_fs::TempDir::new().unwrap().to_path_buf().to_string_lossy().to_string(), + ); + + let temp_project_path = temp_project_dir.join("Scarb").with_extension("toml").to_string(); + + copy_project_temp(&source_project_dir, &temp_project_dir).unwrap(); + + let config = build_test_config_default(&temp_project_path).unwrap(); + + let compile_info = if do_build { + Some( + compile_workspace( + &config, + CompileOpts { include_targets: vec![], exclude_targets: vec![TargetKind::TEST] }, + ) + .unwrap(), + ) + } else { + None + }; + + (temp_project_dir, config, compile_info) +} + +/// Copies a project to a new location, excluding the manifests and target directories. +/// +/// # Arguments +/// +/// * `source_dir` - The source directory to copy from. +pub fn copy_project_temp( + source_dir: &Utf8PathBuf, + destination_dir: &Utf8PathBuf, +) -> io::Result<()> { + let ignore_dirs = ["manifests", "target"]; + + if !destination_dir.exists() { + fs::create_dir_all(destination_dir)?; + } + + for entry in fs::read_dir(source_dir)? { + let entry = entry?; + let path = entry.path(); + if path.is_dir() { + let dir_name = match entry.file_name().into_string() { + Ok(name) => name, + Err(_) => continue, // Skip directories/files with non-UTF8 names + }; + + if ignore_dirs.contains(&dir_name.as_str()) { + continue; // Skip ignored directories + } + + copy_project_temp( + &Utf8PathBuf::from_path_buf(path).unwrap(), + &destination_dir.join(dir_name), + )?; + } else { + let file_name = entry.file_name().to_string_lossy().to_string(); + let dest_path = destination_dir.join(&file_name); + + // Replace in the Scarb.toml the path of dojo crate with the + // absolute path. + if file_name == "Scarb.toml" { + let mut contents = String::new(); + File::open(&path) + .and_then(|mut file| file.read_to_string(&mut contents)) + .unwrap_or_else(|_| panic!("Failed to read {file_name}")); + + let mut table = contents.parse::().expect("Failed to parse Scab.toml"); + + let dojo = table["dependencies"]["dojo"].as_table_mut().unwrap(); + + let absolute_path = Value::String( + fs::canonicalize(Utf8PathBuf::from(dojo["path"].as_str().unwrap())) + .unwrap() + .to_string_lossy() + .to_string(), + ); + + dojo["path"] = absolute_path; + + File::create(&dest_path) + .and_then(|mut file| file.write_all(table.to_string().as_bytes())) + .expect("Failed to write to Scab.toml"); + } else { + fs::copy(path, dest_path)?; + } + } + } + + Ok(()) +} + +pub fn build_test_config_default(path: &str) -> anyhow::Result { let mut compilers = CompilerRepository::empty(); compilers.add(Box::new(DojoCompiler)).unwrap(); let cairo_plugins = CairoPluginRepository::default(); - let cache_dir = TempDir::new().unwrap(); - let config_dir = TempDir::new().unwrap(); - let target_dir = TempDir::new().unwrap(); - let path = Utf8PathBuf::from_path_buf(path.into()).unwrap(); Config::builder(path.canonicalize_utf8().unwrap()) - .global_cache_dir_override(Some(Utf8Path::from_path(cache_dir.path()).unwrap())) - .global_config_dir_override(Some(Utf8Path::from_path(config_dir.path()).unwrap())) - .target_dir_override(Some(Utf8Path::from_path(target_dir.path()).unwrap().to_path_buf())) .ui_verbosity(Verbosity::Verbose) .log_filter_directive(env::var_os("SCARB_LOG")) .compilers(compilers) @@ -32,6 +141,43 @@ pub fn build_test_config(path: &str) -> anyhow::Result { .build() } +pub fn build_test_config(path: &str) -> anyhow::Result { + build_full_test_config(path, true) +} + +pub fn build_full_test_config(path: &str, override_dirs: bool) -> anyhow::Result { + let mut compilers = CompilerRepository::empty(); + compilers.add(Box::new(DojoCompiler)).unwrap(); + + let cairo_plugins = CairoPluginRepository::default(); + let path = Utf8PathBuf::from_path_buf(path.into()).unwrap(); + + if override_dirs { + let cache_dir = TempDir::new().unwrap(); + let config_dir = TempDir::new().unwrap(); + let target_dir = TempDir::new().unwrap(); + + Config::builder(path.canonicalize_utf8().unwrap()) + .global_cache_dir_override(Some(Utf8Path::from_path(cache_dir.path()).unwrap())) + .global_config_dir_override(Some(Utf8Path::from_path(config_dir.path()).unwrap())) + .target_dir_override(Some( + Utf8Path::from_path(target_dir.path()).unwrap().to_path_buf(), + )) + .ui_verbosity(Verbosity::Verbose) + .log_filter_directive(env::var_os("SCARB_LOG")) + .compilers(compilers) + .cairo_plugins(cairo_plugins.into()) + .build() + } else { + Config::builder(path.canonicalize_utf8().unwrap()) + .ui_verbosity(Verbosity::Verbose) + .log_filter_directive(env::var_os("SCARB_LOG")) + .compilers(compilers) + .cairo_plugins(cairo_plugins.into()) + .build() + } +} + pub fn corelib() -> PathBuf { let config = build_test_config("./src/manifest_test_data/spawn-and-move/Scarb.toml").unwrap(); let ws = ops::read_workspace(config.manifest_path(), &config).unwrap(); @@ -44,3 +190,64 @@ pub fn corelib() -> PathBuf { .source_root() .into() } + +#[cfg(test)] +mod tests { + use std::fs::{self, File}; + use std::io::Write; + + use assert_fs::TempDir; + + use super::*; + + #[test] + fn test_copy_project() { + let temp_dir = TempDir::new().unwrap(); + let project_dir = temp_dir.path().join("project"); + let dest_dir = temp_dir.path().join("dest"); + + fs::create_dir(&project_dir).unwrap(); + fs::create_dir(&dest_dir).unwrap(); + + // Create a file in the project directory + let file_path = project_dir.join("file.txt"); + let mut file = File::create(file_path).unwrap(); + writeln!(file, "Hello, world!").unwrap(); + + // Create a subdirectory with a file in the project directory + let sub_dir = project_dir.join("subdir"); + fs::create_dir(&sub_dir).unwrap(); + let sub_file_path = sub_dir.join("subfile.txt"); + let mut sub_file = File::create(sub_file_path).unwrap(); + writeln!(sub_file, "Hello, from subdir!").unwrap(); + + // Create a subdir that should be ignored + let ignored_sub_dir = project_dir.join("manifests"); + fs::create_dir(&ignored_sub_dir).unwrap(); + let ignored_sub_file_path = ignored_sub_dir.join("ignored_file.txt"); + let mut ignored_sub_file = File::create(ignored_sub_file_path).unwrap(); + writeln!(ignored_sub_file, "This should be ignored!").unwrap(); + + // Perform the copy + copy_project_temp( + &Utf8PathBuf::from(&project_dir.to_string_lossy()), + &Utf8PathBuf::from(&dest_dir.to_string_lossy()), + ) + .unwrap(); + + // Check that the file exists in the destination directory + let dest_file_path = dest_dir.join("file.txt"); + assert!(dest_file_path.exists()); + + // Check that the subdirectory and its file exist in the destination directory + let dest_sub_dir = dest_dir.join("subdir"); + let dest_sub_file_path = dest_sub_dir.join("subfile.txt"); + let dest_ignored_sub_dir = dest_sub_dir.join("manifests"); + assert!(dest_sub_dir.exists()); + assert!(dest_sub_file_path.exists()); + assert!(!dest_ignored_sub_dir.exists()); + + // Clean up + temp_dir.close().unwrap(); + } +} diff --git a/crates/dojo-test-utils/src/migration.rs b/crates/dojo-test-utils/src/migration.rs index e696fc5157..d2434ca761 100644 --- a/crates/dojo-test-utils/src/migration.rs +++ b/crates/dojo-test-utils/src/migration.rs @@ -4,14 +4,43 @@ use dojo_lang::compiler::{BASE_DIR, MANIFESTS_DIR}; use dojo_world::manifest::BaseManifest; use dojo_world::migration::strategy::{prepare_for_migration, MigrationStrategy}; use dojo_world::migration::world::WorldDiff; +use katana_primitives::FieldElement; +use starknet::core::utils::cairo_short_string_to_felt; use starknet::macros::felt; pub fn prepare_migration( manifest_dir: Utf8PathBuf, target_dir: Utf8PathBuf, ) -> Result { - let manifest = - BaseManifest::load_from_path(&manifest_dir.join(MANIFESTS_DIR).join(BASE_DIR)).unwrap(); + // In testing, profile name is always dev. + let profile_name = "dev"; + + let manifest = BaseManifest::load_from_path( + &manifest_dir.join(MANIFESTS_DIR).join(profile_name).join(BASE_DIR), + ) + .unwrap(); + + let world = WorldDiff::compute(manifest, None); + + prepare_for_migration(None, felt!("0x12345"), &target_dir, world) +} + +pub fn prepare_migration_with_world_and_seed( + manifest_dir: Utf8PathBuf, + target_dir: Utf8PathBuf, + world_address: Option, + seed: &str, +) -> Result { + // In testing, profile name is always dev. + let profile_name = "dev"; + + let manifest = BaseManifest::load_from_path( + &manifest_dir.join(MANIFESTS_DIR).join(profile_name).join(BASE_DIR), + ) + .unwrap(); + let world = WorldDiff::compute(manifest, None); - prepare_for_migration(None, Some(felt!("0x12345")), &target_dir, world) + + let seed = cairo_short_string_to_felt(seed).unwrap(); + prepare_for_migration(world_address, seed, &target_dir, world) } diff --git a/crates/dojo-test-utils/src/sequencer.rs b/crates/dojo-test-utils/src/sequencer.rs index 7017c6cc3c..7b93d7235b 100644 --- a/crates/dojo-test-utils/src/sequencer.rs +++ b/crates/dojo-test-utils/src/sequencer.rs @@ -15,7 +15,7 @@ use katana_rpc::{spawn, NodeHandle}; use katana_rpc_api::ApiKind; use starknet::accounts::{ExecutionEncoding, SingleOwnerAccount}; use starknet::core::chain_id; -use starknet::core::types::FieldElement; +use starknet::core::types::{BlockId, BlockTag, FieldElement}; use starknet::providers::jsonrpc::HttpTransport; use starknet::providers::JsonRpcClient; use starknet::signers::{LocalWallet, SigningKey}; @@ -92,13 +92,44 @@ impl TestSequencer { } pub fn account(&self) -> SingleOwnerAccount, LocalWallet> { - SingleOwnerAccount::new( + let mut account = SingleOwnerAccount::new( JsonRpcClient::new(HttpTransport::new(self.url.clone())), LocalWallet::from_signing_key(SigningKey::from_secret_scalar(self.account.private_key)), self.account.account_address, chain_id::TESTNET, ExecutionEncoding::New, - ) + ); + + account.set_block_id(BlockId::Tag(BlockTag::Pending)); + + account + } + + pub fn provider(&self) -> JsonRpcClient { + JsonRpcClient::new(HttpTransport::new(self.url.clone())) + } + + pub fn account_at_index( + &self, + index: usize, + ) -> SingleOwnerAccount, LocalWallet> { + let accounts: Vec<_> = self.sequencer.backend.config.genesis.accounts().collect::<_>(); + + let account = accounts[index]; + let private_key = account.1.private_key().unwrap(); + let address: FieldElement = (*(account.0)).into(); + + let mut account = SingleOwnerAccount::new( + JsonRpcClient::new(HttpTransport::new(self.url.clone())), + LocalWallet::from_signing_key(SigningKey::from_secret_scalar(private_key)), + address, + chain_id::TESTNET, + ExecutionEncoding::New, + ); + + account.set_block_id(BlockId::Tag(BlockTag::Pending)); + + account } pub fn raw_account(&self) -> &TestAccount { diff --git a/crates/dojo-types/src/packing.rs b/crates/dojo-types/src/packing.rs index c011a63511..acba62efd1 100644 --- a/crates/dojo-types/src/packing.rs +++ b/crates/dojo-types/src/packing.rs @@ -205,7 +205,7 @@ mod tests { #[test] fn parse_simple_with_invalid_value() { - let data = &vec![FieldElement::default()]; - assert!(matches!(super::parse_simple(data), Err(ParseError::InvalidSchema))); + let data = [FieldElement::default()]; + assert!(matches!(super::parse_simple(&data), Err(ParseError::InvalidSchema))); } } diff --git a/crates/dojo-world/Cargo.toml b/crates/dojo-world/Cargo.toml index 728721454c..3780dd8211 100644 --- a/crates/dojo-world/Cargo.toml +++ b/crates/dojo-world/Cargo.toml @@ -40,6 +40,7 @@ dojo-lang.workspace = true dojo-test-utils = { path = "../dojo-test-utils" } similar-asserts.workspace = true tokio.workspace = true +tempfile = "3.3.0" [features] contracts = [ "dep:dojo-types", "dep:http" ] diff --git a/crates/dojo-world/src/contracts/world_test.rs b/crates/dojo-world/src/contracts/world_test.rs index e9cfb18e16..2edf50cf6e 100644 --- a/crates/dojo-world/src/contracts/world_test.rs +++ b/crates/dojo-world/src/contracts/world_test.rs @@ -12,7 +12,7 @@ use super::{WorldContract, WorldContractReader}; use crate::manifest::BaseManifest; use crate::migration::strategy::prepare_for_migration; use crate::migration::world::WorldDiff; -use crate::migration::{Declarable, Deployable}; +use crate::migration::{Declarable, Deployable, TxnConfig}; #[tokio::test(flavor = "multi_thread")] async fn test_world_contract_reader() { @@ -35,21 +35,26 @@ pub async fn deploy_world( manifest_dir: &Utf8PathBuf, target_dir: &Utf8PathBuf, ) -> FieldElement { - let manifest = - BaseManifest::load_from_path(&manifest_dir.join(MANIFESTS_DIR).join(BASE_DIR)).unwrap(); + // Dev profile is used by default for testing: + let profile_name = "dev"; + + let manifest = BaseManifest::load_from_path( + &manifest_dir.join(MANIFESTS_DIR).join(profile_name).join(BASE_DIR), + ) + .unwrap(); let world = WorldDiff::compute(manifest.clone(), None); let account = sequencer.account(); let strategy = prepare_for_migration( None, - Some(FieldElement::from_hex_be("0x12345").unwrap()), + FieldElement::from_hex_be("0x12345").unwrap(), target_dir, world, ) .unwrap(); let base_class_hash = - strategy.base.unwrap().declare(&account, Default::default()).await.unwrap().class_hash; + strategy.base.unwrap().declare(&account, &TxnConfig::default()).await.unwrap().class_hash; // wait for the tx to be mined tokio::time::sleep(Duration::from_millis(250)).await; @@ -61,7 +66,7 @@ pub async fn deploy_world( manifest.clone().world.inner.class_hash, vec![base_class_hash], &account, - Default::default(), + &TxnConfig::default(), ) .await .unwrap() @@ -69,7 +74,7 @@ pub async fn deploy_world( let mut declare_output = vec![]; for model in strategy.models { - let res = model.declare(&account, Default::default()).await.unwrap(); + let res = model.declare(&account, &TxnConfig::default()).await.unwrap(); declare_output.push(res); } @@ -89,9 +94,15 @@ pub async fn deploy_world( tokio::time::sleep(Duration::from_millis(250)).await; for contract in strategy.contracts { - let declare_res = contract.declare(&account, Default::default()).await.unwrap(); + let declare_res = contract.declare(&account, &TxnConfig::default()).await.unwrap(); contract - .world_deploy(world_address, declare_res.class_hash, &account, Default::default()) + .deploy_dojo_contract( + world_address, + declare_res.class_hash, + base_class_hash, + &account, + &TxnConfig::default(), + ) .await .unwrap(); } diff --git a/crates/dojo-world/src/manifest_test.rs b/crates/dojo-world/src/manifest/manifest_test.rs similarity index 83% rename from crates/dojo-world/src/manifest_test.rs rename to crates/dojo-world/src/manifest/manifest_test.rs index f36c516e79..c39d7b4ad0 100644 --- a/crates/dojo-world/src/manifest_test.rs +++ b/crates/dojo-world/src/manifest/manifest_test.rs @@ -1,3 +1,5 @@ +use std::io::Write; + use camino::Utf8PathBuf; use dojo_lang::compiler::{BASE_DIR, MANIFESTS_DIR}; use dojo_test_utils::rpc::MockJsonRpcTransport; @@ -6,11 +8,12 @@ use dojo_test_utils::sequencer::{ }; use serde_json::json; use starknet::accounts::ConnectedAccount; +use starknet::core::types::contract::AbiEntry; use starknet::core::types::{EmittedEvent, FieldElement}; use starknet::macros::{felt, selector, short_string}; use starknet::providers::jsonrpc::{JsonRpcClient, JsonRpcMethod}; -use super::{parse_contracts_events, BaseManifest, DojoContract, DojoModel}; +use super::{parse_contracts_events, AbiFormat, BaseManifest, DojoContract, DojoModel}; use crate::contracts::world::test::deploy_world; use crate::manifest::{parse_models_events, AbstractManifestError, DeploymentManifest, Manifest}; use crate::migration::world::WorldDiff; @@ -375,15 +378,19 @@ async fn fetch_remote_manifest() { let world_address = deploy_world(&sequencer, &manifest_path, &artifacts_path).await; - let local_manifest = - BaseManifest::load_from_path(&manifest_path.join(MANIFESTS_DIR).join(BASE_DIR)).unwrap(); + let profile_name = "dev"; + + let local_manifest = BaseManifest::load_from_path( + &manifest_path.join(MANIFESTS_DIR).join(profile_name).join(BASE_DIR), + ) + .unwrap(); let remote_manifest = DeploymentManifest::load_from_remote(provider, world_address).await.unwrap(); - assert_eq!(local_manifest.models.len(), 3); + assert_eq!(local_manifest.models.len(), 4); assert_eq!(local_manifest.contracts.len(), 1); - assert_eq!(remote_manifest.models.len(), 3); + assert_eq!(remote_manifest.models.len(), 4); assert_eq!(remote_manifest.contracts.len(), 1); // compute diff from local and remote manifest @@ -392,3 +399,64 @@ async fn fetch_remote_manifest() { assert_eq!(diff.count_diffs(), 0, "there should not be any diff"); } + +#[test] +fn test_abi_format_to_embed() -> Result<(), Box> { + let temp_dir = tempfile::tempdir()?; + let temp_path = temp_dir.path().join("abi.json"); + let mut temp_file = std::fs::File::create(&temp_path)?; + + let temp_dir_utf8 = Utf8PathBuf::from_path_buf(temp_dir.path().into()).unwrap(); + + writeln!( + temp_file, + "[{{\"type\":\"function\",\"name\":\"testFunction\",\"inputs\":[],\"outputs\":[],\"\ + state_mutability\":\"view\"}}]" + )?; + + let abi_format_path = AbiFormat::Path(Utf8PathBuf::from_path_buf(temp_path).unwrap()); + let embedded_abi = abi_format_path.to_embed(&temp_dir_utf8)?; + + let abi_format_not_changed = embedded_abi.clone(); + + match &embedded_abi { + AbiFormat::Embed(abi_entries) => { + assert_eq!(abi_entries.len(), 1); + let entry_0 = &abi_entries[0]; + if let AbiEntry::Function(function) = entry_0 { + assert_eq!(function.name, "testFunction"); + } + } + _ => panic!("Expected AbiFormat::Embed variant"), + } + + assert_eq!(embedded_abi, abi_format_not_changed.to_embed(&temp_dir_utf8).unwrap()); + + Ok(()) +} + +#[test] +fn test_abi_format_to_path() { + let embedded = AbiFormat::Embed(vec![]); + assert!(embedded.to_path().is_none()); + + let path = AbiFormat::Path(Utf8PathBuf::from("/tmp")); + assert!(path.to_path().is_some()); +} + +#[test] +fn test_abi_format_load_abi_string() -> Result<(), Box> { + let temp_dir = tempfile::tempdir()?; + let temp_path = temp_dir.path().join("abi.json"); + let mut temp_file = std::fs::File::create(&temp_path)?; + + write!(temp_file, "[]")?; + + let path = AbiFormat::Path(Utf8PathBuf::from_path_buf(temp_path.clone()).unwrap()); + assert_eq!(path.load_abi_string(&Utf8PathBuf::new()).unwrap(), "[]"); + + let embedded = AbiFormat::Embed(vec![]); + assert_eq!(embedded.load_abi_string(&Utf8PathBuf::new()).unwrap(), "[]"); + + Ok(()) +} diff --git a/crates/dojo-world/src/manifest.rs b/crates/dojo-world/src/manifest/mod.rs similarity index 70% rename from crates/dojo-world/src/manifest.rs rename to crates/dojo-world/src/manifest/mod.rs index b2a3916d5f..0c3a9c73d6 100644 --- a/crates/dojo-world/src/manifest.rs +++ b/crates/dojo-world/src/manifest/mod.rs @@ -5,11 +5,7 @@ use anyhow::Result; use cainome::cairo_serde::Error as CainomeError; use camino::Utf8PathBuf; use serde::de::DeserializeOwned; -use serde::{Deserialize, Serialize}; -use serde_with::serde_as; use smol_str::SmolStr; -use starknet::core::serde::unsigned_field_element::UfeHex; -use starknet::core::types::contract::AbiEntry; use starknet::core::types::{ BlockId, BlockTag, EmittedEvent, EventFilter, FieldElement, FunctionCall, StarknetError, }; @@ -30,6 +26,14 @@ use crate::contracts::WorldContractReader; #[path = "manifest_test.rs"] mod test; +mod types; + +pub use types::{ + AbiFormat, BaseManifest, Class, ComputedValueEntrypoint, DeploymentManifest, DojoContract, + DojoModel, Manifest, ManifestMethods, Member, OverlayClass, OverlayContract, + OverlayDojoContract, OverlayDojoModel, OverlayManifest, WorldContract, WorldMetadata, +}; + pub const WORLD_CONTRACT_NAME: &str = "dojo::world::world"; pub const BASE_CONTRACT_NAME: &str = "dojo::base::base"; pub const RESOURCE_METADATA_CONTRACT_NAME: &str = "dojo::resource_metadata::resource_metadata"; @@ -55,181 +59,19 @@ pub enum AbstractManifestError { TOML(#[from] toml::de::Error), #[error(transparent)] IO(#[from] io::Error), + #[error("Abi couldn't be loaded from path: {0}")] + AbiError(String), + #[error(transparent)] + Json(#[from] serde_json::Error), } -/// Represents a model member. -#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)] -pub struct Member { - /// Name of the member. - pub name: String, - /// Type of the member. - #[serde(rename = "type")] - pub ty: String, - pub key: bool, -} - -impl From for Member { - fn from(m: dojo_types::schema::Member) -> Self { - Self { name: m.name, ty: m.ty.name(), key: m.key } - } -} - -/// Represents a declaration of a model. -#[serde_as] -#[derive(Clone, Default, Debug, Serialize, Deserialize)] -#[cfg_attr(test, derive(PartialEq))] -#[serde(tag = "kind")] -pub struct DojoModel { - pub members: Vec, - #[serde_as(as = "UfeHex")] - pub class_hash: FieldElement, - pub abi: Option, -} - -/// System input ABI. -#[derive(Clone, Default, Debug, Serialize, Deserialize, PartialEq)] -pub struct Input { - pub name: String, - #[serde(rename = "type")] - pub ty: String, -} - -/// System Output ABI. -#[derive(Clone, Default, Debug, Serialize, Deserialize, PartialEq)] -pub struct Output { - #[serde(rename = "type")] - pub ty: String, -} - -#[serde_as] -#[derive(Clone, Default, Debug, Serialize, Deserialize, PartialEq)] -pub struct ComputedValueEntrypoint { - // Name of the contract containing the entrypoint - pub contract: SmolStr, - // Name of entrypoint to get computed value - pub entrypoint: SmolStr, - // Component to compute for - pub model: Option, -} - -/// Format of the ABI into the manifest. -#[serde_as] -#[derive(Clone, Debug, Serialize, Deserialize)] -#[serde(untagged)] -pub enum AbiFormat { - /// Only a relative path to the ABI file is stored. - Path(Utf8PathBuf), - /// The full ABI is embedded. - Embed(Vec), -} - -#[cfg(test)] -impl PartialEq for AbiFormat { - fn eq(&self, other: &Self) -> bool { - match (self, other) { - (AbiFormat::Path(p1), AbiFormat::Path(p2)) => p1 == p2, - (AbiFormat::Embed(e1), AbiFormat::Embed(e2)) => { - // Currently, [`AbiEntry`] does not implement [`PartialEq`] so we cannot compare - // them directly. - let e1_json = serde_json::to_string(e1).expect("valid JSON from ABI"); - let e2_json = serde_json::to_string(e2).expect("valid JSON from ABI"); - e1_json == e2_json - } - _ => false, - } - } -} - -impl AbiFormat { - pub fn to_path(&self) -> Option<&Utf8PathBuf> { - match self { - AbiFormat::Path(p) => Some(p), - AbiFormat::Embed(_) => None, - } - } -} - -#[serde_as] -#[derive(Clone, Default, Debug, Serialize, Deserialize)] -#[cfg_attr(test, derive(PartialEq))] -#[serde(tag = "kind")] -pub struct DojoContract { - #[serde_as(as = "Option")] - pub address: Option, - #[serde_as(as = "UfeHex")] - pub class_hash: FieldElement, - pub abi: Option, - pub reads: Vec, - pub writes: Vec, - pub computed: Vec, -} - -#[serde_as] -#[derive(Clone, Default, Debug, Serialize, Deserialize)] -#[cfg_attr(test, derive(PartialEq))] -pub struct OverlayDojoContract { - pub name: SmolStr, - pub reads: Option>, - pub writes: Option>, -} - -#[serde_as] -#[derive(Clone, Default, Debug, Serialize, Deserialize)] -#[cfg_attr(test, derive(PartialEq))] -pub struct OverlayDojoModel {} - -#[serde_as] -#[derive(Clone, Default, Debug, Serialize, Deserialize)] -#[cfg_attr(test, derive(PartialEq))] -pub struct OverlayContract {} - -#[serde_as] -#[derive(Clone, Default, Debug, Serialize, Deserialize)] -#[cfg_attr(test, derive(PartialEq))] -pub struct OverlayClass {} - -#[serde_as] -#[derive(Clone, Default, Debug, Serialize, Deserialize)] -#[cfg_attr(test, derive(PartialEq))] -#[serde(tag = "kind")] -pub struct Class { - #[serde_as(as = "UfeHex")] - pub class_hash: FieldElement, - pub abi: Option, -} - -#[serde_as] -#[derive(Clone, Default, Debug, Serialize, Deserialize)] -#[cfg_attr(test, derive(PartialEq))] -#[serde(tag = "kind")] -pub struct Contract { - #[serde_as(as = "UfeHex")] - pub class_hash: FieldElement, - pub abi: Option, - #[serde_as(as = "Option")] - pub address: Option, - #[serde_as(as = "Option")] - pub transaction_hash: Option, - pub block_number: Option, - // used by World contract - pub seed: Option, -} - -#[derive(Clone, Debug, Serialize, Deserialize)] -#[cfg_attr(test, derive(PartialEq))] -pub struct BaseManifest { - pub world: Manifest, - pub base: Manifest, - pub contracts: Vec>, - pub models: Vec>, -} - -impl From> for Manifest { +impl From> for Manifest { fn from(value: Manifest) -> Self { Manifest::new( - Contract { + WorldContract { class_hash: value.inner.class_hash, abi: value.inner.abi, + original_class_hash: value.inner.original_class_hash, ..Default::default() }, value.name, @@ -248,62 +90,20 @@ impl From for DeploymentManifest { } } -#[derive(Clone, Debug, Serialize, Deserialize)] -#[cfg_attr(test, derive(PartialEq))] -pub struct DeploymentManifest { - pub world: Manifest, - pub base: Manifest, - pub contracts: Vec>, - pub models: Vec>, -} - -#[derive(Clone, Debug, Serialize, Deserialize)] -#[cfg_attr(test, derive(PartialEq))] -pub struct OverlayManifest { - pub contracts: Vec, -} - -#[derive(Clone, Serialize, Deserialize, Debug)] -#[cfg_attr(test, derive(PartialEq))] -pub struct Manifest -where - T: ManifestMethods, -{ - #[serde(flatten)] - pub inner: T, - pub name: SmolStr, -} - -impl Manifest -where - T: ManifestMethods, -{ - pub fn new(inner: T, name: SmolStr) -> Self { - Self { inner, name } - } -} - -pub trait ManifestMethods { - type OverlayType; - fn abi(&self) -> Option<&AbiFormat>; - fn set_abi(&mut self, abi: Option); - fn class_hash(&self) -> &FieldElement; - fn set_class_hash(&mut self, class_hash: FieldElement); - - /// This method is called when during compilation base manifest file already exists. - /// Manifest generated during compilation won't contains properties manually updated by users - /// (like calldata) so this method should override those fields - fn merge(&mut self, old: Self::OverlayType); -} - impl BaseManifest { /// Load the manifest from a file at the given path. pub fn load_from_path(path: &Utf8PathBuf) -> Result { let contract_dir = path.join("contracts"); let model_dir = path.join("models"); - let world: Manifest = toml::from_str(&fs::read_to_string(path.join("world.toml"))?)?; - let base: Manifest = toml::from_str(&fs::read_to_string(path.join("base.toml"))?)?; + let world: Manifest = toml::from_str(&fs::read_to_string( + path.join(WORLD_CONTRACT_NAME.replace("::", "_")).with_extension("toml"), + )?)?; + + let base: Manifest = toml::from_str(&fs::read_to_string( + path.join(BASE_CONTRACT_NAME.replace("::", "_")).with_extension("toml"), + )?)?; + let contracts = elements_from_path::(&contract_dir)?; let models = elements_from_path::(&model_dir)?; @@ -324,15 +124,42 @@ impl BaseManifest { .inner .merge(contract); } + + if let Some(overlay_world) = overlay.world { + self.world.inner.merge(overlay_world); + } + if let Some(overlay_base) = overlay.base { + self.base.inner.merge(overlay_base); + } } } impl OverlayManifest { pub fn load_from_path(path: &Utf8PathBuf) -> Result { + let mut world: Option = None; + + let world_path = path.join(WORLD_CONTRACT_NAME.replace("::", "_")).with_extension("toml"); + + if world_path.exists() { + world = Some(toml::from_str(&fs::read_to_string(world_path)?)?); + } + + let mut base: Option = None; + let base_path = path.join(BASE_CONTRACT_NAME.replace("::", "_")).with_extension("toml"); + + if base_path.exists() { + base = Some(toml::from_str(&fs::read_to_string(base_path)?)?); + } + let contract_dir = path.join("contracts"); - let contracts = overlay_elements_from_path::(&contract_dir)?; - Ok(Self { contracts }) + let contracts = if contract_dir.exists() { + overlay_elements_from_path::(&contract_dir)? + } else { + vec![] + }; + + Ok(Self { world, base, contracts }) } } @@ -347,6 +174,13 @@ impl DeploymentManifest { self.world.inner.transaction_hash = previous.world.inner.transaction_hash; self.world.inner.block_number = previous.world.inner.block_number; self.world.inner.seed = previous.world.inner.seed; + + self.contracts.iter_mut().for_each(|contract| { + let previous_contract = previous.contracts.iter().find(|c| c.name == contract.name); + if let Some(previous_contract) = previous_contract { + contract.inner.base_class_hash = previous_contract.inner.base_class_hash; + } + }); } pub fn write_to_path_toml(&self, path: &Utf8PathBuf) -> Result<()> { @@ -358,16 +192,25 @@ impl DeploymentManifest { Ok(()) } - pub fn write_to_path_json(&self, path: &Utf8PathBuf, manifest_dir: &Utf8PathBuf) -> Result<()> { + pub fn write_to_path_json(&self, path: &Utf8PathBuf, profile_dir: &Utf8PathBuf) -> Result<()> { fs::create_dir_all(path.parent().unwrap())?; // Embedding ABIs into the manifest. let mut manifest_with_abis = self.clone(); + + if let Some(abi_format) = &manifest_with_abis.world.inner.abi { + manifest_with_abis.world.inner.abi = Some(abi_format.to_embed(profile_dir)?); + } + for contract in &mut manifest_with_abis.contracts { - if let Some(AbiFormat::Path(abi_path)) = &contract.inner.abi { - let mut abi_file = std::fs::File::open(manifest_dir.join(abi_path))?; - let abi_entries: Vec = serde_json::from_reader(&mut abi_file)?; - contract.inner.abi = Some(AbiFormat::Embed(abi_entries)); + if let Some(abi_format) = &contract.inner.abi { + contract.inner.abi = Some(abi_format.to_embed(profile_dir)?); + } + } + + for model in &mut manifest_with_abis.models { + if let Some(abi_format) = &model.inner.abi { + model.inner.abi = Some(abi_format.to_embed(profile_dir)?); } } @@ -402,6 +245,7 @@ impl DeploymentManifest { let world = WorldContractReader::new(world_address, provider); let base_class_hash = world.base().block_id(BLOCK_ID).call().await?; + let base_class_hash = base_class_hash.into(); let (models, contracts) = get_remote_models_and_contracts(world_address, &world.provider()).await?; @@ -410,7 +254,7 @@ impl DeploymentManifest { models, contracts, world: Manifest::new( - Contract { + WorldContract { address: Some(world_address), class_hash: world_class_hash, ..Default::default() @@ -418,7 +262,11 @@ impl DeploymentManifest { WORLD_CONTRACT_NAME.into(), ), base: Manifest::new( - Class { class_hash: base_class_hash.into(), abi: None }, + Class { + class_hash: base_class_hash, + abi: None, + original_class_hash: base_class_hash, + }, BASE_CONTRACT_NAME.into(), ), }) @@ -712,7 +560,14 @@ impl ManifestMethods for DojoContract { self.class_hash = class_hash; } + fn original_class_hash(&self) -> &FieldElement { + self.original_class_hash.as_ref() + } + fn merge(&mut self, old: Self::OverlayType) { + if let Some(class_hash) = old.original_class_hash { + self.original_class_hash = class_hash; + } if let Some(reads) = old.reads { self.reads = reads; } @@ -741,10 +596,18 @@ impl ManifestMethods for DojoModel { self.class_hash = class_hash; } - fn merge(&mut self, _: Self::OverlayType) {} + fn original_class_hash(&self) -> &FieldElement { + self.original_class_hash.as_ref() + } + + fn merge(&mut self, old: Self::OverlayType) { + if let Some(class_hash) = old.original_class_hash { + self.original_class_hash = class_hash; + } + } } -impl ManifestMethods for Contract { +impl ManifestMethods for WorldContract { type OverlayType = OverlayContract; fn abi(&self) -> Option<&AbiFormat> { @@ -763,7 +626,15 @@ impl ManifestMethods for Contract { self.class_hash = class_hash; } - fn merge(&mut self, _: Self::OverlayType) {} + fn original_class_hash(&self) -> &FieldElement { + self.original_class_hash.as_ref() + } + + fn merge(&mut self, old: Self::OverlayType) { + if let Some(class_hash) = old.original_class_hash { + self.original_class_hash = class_hash; + } + } } impl ManifestMethods for Class { @@ -785,5 +656,13 @@ impl ManifestMethods for Class { self.class_hash = class_hash; } - fn merge(&mut self, _: Self::OverlayType) {} + fn original_class_hash(&self) -> &FieldElement { + self.original_class_hash.as_ref() + } + + fn merge(&mut self, old: Self::OverlayType) { + if let Some(class_hash) = old.original_class_hash { + self.original_class_hash = class_hash; + } + } } diff --git a/crates/dojo-world/src/manifest/types.rs b/crates/dojo-world/src/manifest/types.rs new file mode 100644 index 0000000000..afc6f13406 --- /dev/null +++ b/crates/dojo-world/src/manifest/types.rs @@ -0,0 +1,296 @@ +use std::fs; + +use camino::Utf8PathBuf; +use serde::{Deserialize, Serialize}; +use serde_with::serde_as; +use smol_str::SmolStr; +use starknet::core::serde::unsigned_field_element::UfeHex; +use starknet::core::types::contract::AbiEntry; +use starknet_crypto::FieldElement; + +use crate::manifest::AbstractManifestError; + +// Collection of different types of `Manifest`'s which are used by dojo compiler/sozo +// For example: +// - `BaseManifest` is generated by the compiler and wrote to `manifests/base` folder of project +// - `DeploymentManifest` is generated by sozo which represents the future onchain state after a +// successful migration +// - `OverlayManifest` is used by sozo to override values of specific manifest of `BaseManifest` +// thats generated by compiler + +#[derive(Clone, Debug, Serialize, Deserialize)] +#[cfg_attr(test, derive(PartialEq))] +pub struct BaseManifest { + pub world: Manifest, + pub base: Manifest, + pub contracts: Vec>, + pub models: Vec>, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +#[cfg_attr(test, derive(PartialEq))] +pub struct DeploymentManifest { + pub world: Manifest, + pub base: Manifest, + pub contracts: Vec>, + pub models: Vec>, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +#[cfg_attr(test, derive(PartialEq))] +pub struct OverlayManifest { + pub world: Option, + pub base: Option, + pub contracts: Vec, +} + +#[derive(Clone, Serialize, Deserialize, Debug)] +#[cfg_attr(test, derive(PartialEq))] +pub struct Manifest +where + T: ManifestMethods, +{ + #[serde(flatten)] + pub inner: T, + pub name: SmolStr, +} + +// Utility methods thats needs to be implemented by manifest types +pub trait ManifestMethods { + type OverlayType; + fn abi(&self) -> Option<&AbiFormat>; + fn set_abi(&mut self, abi: Option); + fn class_hash(&self) -> &FieldElement; + fn set_class_hash(&mut self, class_hash: FieldElement); + fn original_class_hash(&self) -> &FieldElement; + + /// This method is called when during compilation base manifest file already exists. + /// Manifest generated during compilation won't contains properties manually updated by users + /// (like calldata) so this method should override those fields + fn merge(&mut self, old: Self::OverlayType); +} + +impl Manifest +where + T: ManifestMethods, +{ + pub fn new(inner: T, name: SmolStr) -> Self { + Self { inner, name } + } +} + +#[serde_as] +#[derive(Clone, Default, Debug, Serialize, Deserialize)] +#[cfg_attr(test, derive(PartialEq))] +#[serde(tag = "kind")] +pub struct DojoContract { + #[serde_as(as = "Option")] + pub address: Option, + #[serde_as(as = "UfeHex")] + pub class_hash: FieldElement, + #[serde_as(as = "UfeHex")] + pub original_class_hash: FieldElement, + // base class hash used to deploy the contract + #[serde_as(as = "UfeHex")] + pub base_class_hash: FieldElement, + pub abi: Option, + pub reads: Vec, + pub writes: Vec, + pub computed: Vec, +} + +/// Represents a declaration of a model. +#[serde_as] +#[derive(Clone, Default, Debug, Serialize, Deserialize)] +#[cfg_attr(test, derive(PartialEq))] +#[serde(tag = "kind")] +pub struct DojoModel { + pub members: Vec, + #[serde_as(as = "UfeHex")] + pub class_hash: FieldElement, + #[serde_as(as = "UfeHex")] + pub original_class_hash: FieldElement, + pub abi: Option, +} + +#[serde_as] +#[derive(Clone, Default, Debug, Serialize, Deserialize)] +#[cfg_attr(test, derive(PartialEq))] +#[serde(tag = "kind")] +pub struct WorldContract { + #[serde_as(as = "UfeHex")] + pub class_hash: FieldElement, + #[serde_as(as = "UfeHex")] + pub original_class_hash: FieldElement, + pub abi: Option, + #[serde_as(as = "Option")] + pub address: Option, + #[serde_as(as = "Option")] + pub transaction_hash: Option, + pub block_number: Option, + pub seed: String, + pub metadata: Option, +} + +#[serde_as] +#[derive(Clone, Default, Debug, Serialize, Deserialize)] +#[cfg_attr(test, derive(PartialEq))] +#[serde(tag = "kind")] +pub struct Class { + #[serde_as(as = "UfeHex")] + pub class_hash: FieldElement, + #[serde_as(as = "UfeHex")] + pub original_class_hash: FieldElement, + pub abi: Option, +} + +#[serde_as] +#[derive(Clone, Default, Debug, Serialize, Deserialize)] +#[cfg_attr(test, derive(PartialEq))] +pub struct OverlayDojoContract { + pub name: SmolStr, + pub original_class_hash: Option, + pub reads: Option>, + pub writes: Option>, +} + +#[serde_as] +#[derive(Clone, Default, Debug, Serialize, Deserialize)] +#[cfg_attr(test, derive(PartialEq))] +pub struct OverlayDojoModel { + pub name: SmolStr, + pub original_class_hash: Option, +} + +#[serde_as] +#[derive(Clone, Default, Debug, Serialize, Deserialize)] +#[cfg_attr(test, derive(PartialEq))] +pub struct OverlayContract { + pub name: SmolStr, + pub original_class_hash: Option, +} + +#[serde_as] +#[derive(Clone, Default, Debug, Serialize, Deserialize)] +#[cfg_attr(test, derive(PartialEq))] +pub struct OverlayClass { + pub name: SmolStr, + pub original_class_hash: Option, +} + +// Types used by manifest + +/// Represents a model member. +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)] +pub struct Member { + /// Name of the member. + pub name: String, + /// Type of the member. + #[serde(rename = "type")] + pub ty: String, + pub key: bool, +} + +#[serde_as] +#[derive(Clone, Default, Debug, Serialize, Deserialize, PartialEq)] +pub struct ComputedValueEntrypoint { + // Name of the contract containing the entrypoint + pub contract: SmolStr, + // Name of entrypoint to get computed value + pub entrypoint: SmolStr, + // Component to compute for + pub model: Option, +} + +impl From for Member { + fn from(m: dojo_types::schema::Member) -> Self { + Self { name: m.name, ty: m.ty.name(), key: m.key } + } +} + +/// System input ABI. +#[derive(Clone, Default, Debug, Serialize, Deserialize, PartialEq)] +pub struct Input { + pub name: String, + #[serde(rename = "type")] + pub ty: String, +} + +/// System Output ABI. +#[derive(Clone, Default, Debug, Serialize, Deserialize, PartialEq)] +pub struct Output { + #[serde(rename = "type")] + pub ty: String, +} + +/// Format of the ABI into the manifest. +#[serde_as] +#[derive(Clone, Debug, Serialize, Deserialize)] +#[serde(untagged)] +pub enum AbiFormat { + /// Only a relative path to the ABI file is stored. + Path(Utf8PathBuf), + /// The full ABI is embedded. + Embed(Vec), +} + +impl AbiFormat { + /// Get the [`Utf8PathBuf`] if the ABI is stored as a path. + pub fn to_path(&self) -> Option<&Utf8PathBuf> { + match self { + AbiFormat::Path(p) => Some(p), + AbiFormat::Embed(_) => None, + } + } + + /// Loads an ABI from the path or embedded entries. + /// + /// # Arguments + /// + /// * `root_dir` - The root directory of the ABI file. + pub fn load_abi_string(&self, root_dir: &Utf8PathBuf) -> Result { + match self { + AbiFormat::Path(abi_path) => Ok(fs::read_to_string(root_dir.join(abi_path))?), + AbiFormat::Embed(abi) => Ok(serde_json::to_string(&abi)?), + } + } + + /// Convert to embed variant. + /// + /// # Arguments + /// + /// * `root_dir` - The root directory for the abi file resolution. + pub fn to_embed(&self, root_dir: &Utf8PathBuf) -> Result { + if let AbiFormat::Path(abi_path) = self { + let mut abi_file = std::fs::File::open(root_dir.join(abi_path))?; + Ok(serde_json::from_reader(&mut abi_file)?) + } else { + Ok(self.clone()) + } + } +} + +#[cfg(test)] +impl PartialEq for AbiFormat { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (AbiFormat::Path(p1), AbiFormat::Path(p2)) => p1 == p2, + (AbiFormat::Embed(e1), AbiFormat::Embed(e2)) => { + // Currently, [`AbiEntry`] does not implement [`PartialEq`] so we cannot compare + // them directly. + let e1_json = serde_json::to_string(e1).expect("valid JSON from ABI"); + let e2_json = serde_json::to_string(e2).expect("valid JSON from ABI"); + e1_json == e2_json + } + _ => false, + } + } +} + +#[serde_as] +#[derive(Clone, Default, Debug, Serialize, Deserialize)] +#[cfg_attr(test, derive(PartialEq))] +pub struct WorldMetadata { + pub profile_name: String, + pub rpc_url: String, +} diff --git a/crates/dojo-world/src/metadata.rs b/crates/dojo-world/src/metadata.rs index a3c2c7d1ca..2d50f5fa70 100644 --- a/crates/dojo-world/src/metadata.rs +++ b/crates/dojo-world/src/metadata.rs @@ -3,24 +3,144 @@ use std::io::Cursor; use std::path::PathBuf; use anyhow::Result; +use camino::Utf8PathBuf; use ipfs_api_backend_hyper::{IpfsApi, IpfsClient, TryFromUri}; use scarb::core::{ManifestMetadata, Workspace}; use serde::{Deserialize, Deserializer, Serialize, Serializer}; use serde_json::json; use url::Url; +use crate::manifest::{BaseManifest, WORLD_CONTRACT_NAME}; + #[cfg(test)] #[path = "metadata_test.rs"] mod test; -pub fn dojo_metadata_from_workspace(ws: &Workspace<'_>) -> Option { - Some(ws.current_package().ok()?.manifest.metadata.dojo()) +pub const IPFS_CLIENT_URL: &str = "https://ipfs.infura.io:5001"; +pub const IPFS_USERNAME: &str = "2EBrzr7ZASQZKH32sl2xWauXPSA"; +pub const IPFS_PASSWORD: &str = "12290b883db9138a8ae3363b6739d220"; + +// copy constants from dojo-lang to avoid circular dependency +pub const MANIFESTS_DIR: &str = "manifests"; +pub const ABIS_DIR: &str = "abis"; +pub const SOURCES_DIR: &str = "src"; +pub const BASE_DIR: &str = "base"; + +fn build_artifact_from_name( + source_dir: &Utf8PathBuf, + abi_dir: &Utf8PathBuf, + element_name: &str, +) -> ArtifactMetadata { + let sanitized_name = element_name.replace("::", "_"); + let abi_file = abi_dir.join(format!("{sanitized_name}.json")); + let src_file = source_dir.join(format!("{sanitized_name}.cairo")); + + ArtifactMetadata { + abi: if abi_file.exists() { Some(Uri::File(abi_file.into_std_path_buf())) } else { None }, + source: if src_file.exists() { + Some(Uri::File(src_file.into_std_path_buf())) + } else { + None + }, + } } +/// Build world metadata with data read from the project configuration. +/// +/// # Arguments +/// +/// * `project_metadata` - The project metadata. +/// +/// # Returns +/// +/// A [`WorldMetadata`] object initialized with project metadata. +pub fn project_to_world_metadata(project_metadata: Option) -> WorldMetadata { + if let Some(m) = project_metadata { + WorldMetadata { + name: m.name, + description: m.description, + cover_uri: m.cover_uri, + icon_uri: m.icon_uri, + website: m.website, + socials: m.socials, + ..Default::default() + } + } else { + WorldMetadata { + name: None, + description: None, + cover_uri: None, + icon_uri: None, + website: None, + socials: None, + ..Default::default() + } + } +} + +/// Collect metadata from the project configuration and from the workspace. +/// +/// # Arguments +/// `ws`: the workspace. +/// +/// # Returns +/// A [`DojoMetadata`] object containing all Dojo metadata. +pub fn dojo_metadata_from_workspace(ws: &Workspace<'_>) -> DojoMetadata { + let profile = ws.config().profile(); + + let manifest_dir = ws.manifest_path().parent().unwrap().to_path_buf(); + let manifest_dir = manifest_dir.join(MANIFESTS_DIR).join(profile.as_str()); + let target_dir = ws.target_dir().path_existent().unwrap(); + let sources_dir = target_dir.join(profile.as_str()).join(SOURCES_DIR); + let abis_dir = manifest_dir.join(ABIS_DIR).join(BASE_DIR); + + let project_metadata = ws.current_package().unwrap().manifest.metadata.dojo(); + let mut dojo_metadata = + DojoMetadata { env: project_metadata.env.clone(), ..Default::default() }; + + let world_artifact = build_artifact_from_name(&sources_dir, &abis_dir, WORLD_CONTRACT_NAME); + + // inialize Dojo world metadata with world metadata coming from project configuration + dojo_metadata.world = project_to_world_metadata(project_metadata.world); + dojo_metadata.world.artifacts = world_artifact; + + // load models and contracts metadata + if manifest_dir.join(BASE_DIR).exists() { + if let Ok(manifest) = BaseManifest::load_from_path(&manifest_dir.join(BASE_DIR)) { + for model in manifest.models { + let name = model.name.to_string(); + dojo_metadata.artifacts.insert( + name.clone(), + build_artifact_from_name(&sources_dir, &abis_dir.join("models"), &name), + ); + } + + for contract in manifest.contracts { + let name = contract.name.to_string(); + dojo_metadata.artifacts.insert( + name.clone(), + build_artifact_from_name(&sources_dir, &abis_dir.join("contracts"), &name), + ); + } + } + } + + dojo_metadata +} + +/// Metadata coming from project configuration (Scarb.toml) +#[derive(Default, Deserialize, Debug, Clone)] +pub struct ProjectMetadata { + pub world: Option, + pub env: Option, +} + +/// Metadata collected from the project configuration and the Dojo workspace #[derive(Default, Deserialize, Debug, Clone)] -pub struct Metadata { - pub world: Option, +pub struct DojoMetadata { + pub world: WorldMetadata, pub env: Option, + pub artifacts: HashMap, } #[derive(Debug)] @@ -76,6 +196,18 @@ impl Uri { } } +/// World metadata coming from the project configuration (Scarb.toml) +#[derive(Default, Serialize, Deserialize, Debug, Clone)] +pub struct ProjectWorldMetadata { + pub name: Option, + pub description: Option, + pub cover_uri: Option, + pub icon_uri: Option, + pub website: Option, + pub socials: Option>, +} + +/// World metadata collected from the project configuration and the Dojo workspace #[derive(Default, Serialize, Deserialize, Debug, Clone)] pub struct WorldMetadata { pub name: Option, @@ -84,6 +216,14 @@ pub struct WorldMetadata { pub icon_uri: Option, pub website: Option, pub socials: Option>, + pub artifacts: ArtifactMetadata, +} + +/// Metadata Artifacts collected for one Dojo element (world, model, contract...) +#[derive(Default, Serialize, Deserialize, Debug, Clone)] +pub struct ArtifactMetadata { + pub abi: Option, + pub source: Option, } #[derive(Default, Deserialize, Clone, Debug)] @@ -122,7 +262,7 @@ impl Environment { } } -impl WorldMetadata { +impl ProjectWorldMetadata { pub fn name(&self) -> Option<&str> { self.name.as_deref() } @@ -135,8 +275,8 @@ impl WorldMetadata { impl WorldMetadata { pub async fn upload(&self) -> Result { let mut meta = self.clone(); - let client = IpfsClient::from_str("https://ipfs.infura.io:5001")? - .with_credentials("2EBrzr7ZASQZKH32sl2xWauXPSA", "12290b883db9138a8ae3363b6739d220"); + let client = + IpfsClient::from_str(IPFS_CLIENT_URL)?.with_credentials(IPFS_USERNAME, IPFS_PASSWORD); if let Some(Uri::File(icon)) = &self.icon_uri { let icon_data = std::fs::read(icon)?; @@ -152,6 +292,20 @@ impl WorldMetadata { meta.cover_uri = Some(Uri::Ipfs(format!("ipfs://{}", response.hash))) }; + if let Some(Uri::File(abi)) = &self.artifacts.abi { + let abi_data = std::fs::read(abi)?; + let reader = Cursor::new(abi_data); + let response = client.add(reader).await?; + meta.artifacts.abi = Some(Uri::Ipfs(format!("ipfs://{}", response.hash))) + }; + + if let Some(Uri::File(source)) = &self.artifacts.source { + let source_data = std::fs::read(source)?; + let reader = Cursor::new(source_data); + let response = client.add(reader).await?; + meta.artifacts.source = Some(Uri::Ipfs(format!("ipfs://{}", response.hash))) + }; + let serialized = json!(meta).to_string(); let reader = Cursor::new(serialized); let response = client.add(reader).await?; @@ -160,26 +314,51 @@ impl WorldMetadata { } } -impl Metadata { - pub fn env(&self) -> Option<&Environment> { - self.env.as_ref() +impl ArtifactMetadata { + pub async fn upload(&self) -> Result { + let mut meta = self.clone(); + let client = + IpfsClient::from_str(IPFS_CLIENT_URL)?.with_credentials(IPFS_USERNAME, IPFS_PASSWORD); + + if let Some(Uri::File(abi)) = &self.abi { + let abi_data = std::fs::read(abi)?; + let reader = Cursor::new(abi_data); + let response = client.add(reader).await?; + meta.abi = Some(Uri::Ipfs(format!("ipfs://{}", response.hash))) + }; + + if let Some(Uri::File(source)) = &self.source { + let source_data = std::fs::read(source)?; + let reader = Cursor::new(source_data); + let response = client.add(reader).await?; + meta.source = Some(Uri::Ipfs(format!("ipfs://{}", response.hash))) + }; + + let serialized = json!(meta).to_string(); + let reader = Cursor::new(serialized); + let response = client.add(reader).await?; + + Ok(response.hash) } +} - pub fn world(&self) -> Option<&WorldMetadata> { - self.world.as_ref() +impl DojoMetadata { + pub fn env(&self) -> Option<&Environment> { + self.env.as_ref() } } + trait MetadataExt { - fn dojo(&self) -> Metadata; + fn dojo(&self) -> ProjectMetadata; } impl MetadataExt for ManifestMetadata { - fn dojo(&self) -> Metadata { + fn dojo(&self) -> ProjectMetadata { self.tool_metadata .as_ref() .and_then(|e| e.get("dojo")) .cloned() - .map(|v| v.try_into::().unwrap_or_default()) + .map(|v| v.try_into::().unwrap_or_default()) .unwrap_or_default() } } diff --git a/crates/dojo-world/src/metadata_test.rs b/crates/dojo-world/src/metadata_test.rs index a6c950fa6c..b30624320f 100644 --- a/crates/dojo-world/src/metadata_test.rs +++ b/crates/dojo-world/src/metadata_test.rs @@ -1,13 +1,18 @@ use std::collections::HashMap; +use camino::Utf8PathBuf; +use dojo_test_utils::compiler::build_full_test_config; +use scarb::ops; use url::Url; -use super::WorldMetadata; -use crate::metadata::{Metadata, Uri}; +use crate::metadata::{ + dojo_metadata_from_workspace, ArtifactMetadata, ProjectMetadata, Uri, WorldMetadata, ABIS_DIR, + BASE_DIR, MANIFESTS_DIR, SOURCES_DIR, +}; #[test] fn check_metadata_deserialization() { - let metadata: Metadata = toml::from_str( + let metadata: ProjectMetadata = toml::from_str( r#" [env] rpc_url = "http://localhost:5050/" @@ -64,9 +69,13 @@ async fn world_metadata_hash_and_upload() { name: Some("Test World".to_string()), description: Some("A world used for testing".to_string()), cover_uri: Some(Uri::File("src/metadata_test_data/cover.png".into())), - icon_uri: None, + icon_uri: Some(Uri::File("src/metadata_test_data/cover.png".into())), website: Some(Url::parse("https://dojoengine.org").unwrap()), socials: Some(HashMap::from([("x".to_string(), "https://x.com/dojostarknet".to_string())])), + artifacts: ArtifactMetadata { + abi: Some(Uri::File("src/metadata_test_data/abi.json".into())), + source: Some(Uri::File("src/metadata_test_data/source.cairo".into())), + }, }; let _ = meta.upload().await.unwrap(); @@ -74,7 +83,7 @@ async fn world_metadata_hash_and_upload() { #[tokio::test] async fn parse_world_metadata_without_socials() { - let metadata: Metadata = toml::from_str( + let metadata: ProjectMetadata = toml::from_str( r#" [env] rpc_url = "http://localhost:5050/" @@ -97,3 +106,101 @@ website = "https://dojoengine.org" assert!(metadata.world.is_some()); } + +#[tokio::test] +async fn get_full_dojo_metadata_from_workspace() { + let config = build_full_test_config("../../examples/spawn-and-move/Scarb.toml", false).unwrap(); + let ws = ops::read_workspace(config.manifest_path(), &config) + .unwrap_or_else(|op| panic!("Error building workspace: {op:?}")); + + let profile = ws.config().profile(); + let manifest_dir = ws.manifest_path().parent().unwrap().to_path_buf(); + let manifest_dir = manifest_dir.join(MANIFESTS_DIR).join(profile.as_str()); + let target_dir = ws.target_dir().path_existent().unwrap(); + let sources_dir = target_dir.join(profile.as_str()).join(SOURCES_DIR); + let abis_dir = manifest_dir.join(ABIS_DIR).join(BASE_DIR); + + let dojo_metadata = dojo_metadata_from_workspace(&ws); + + // env + assert!(dojo_metadata.env.is_some()); + let env = dojo_metadata.env.unwrap(); + + assert!(env.rpc_url.is_some()); + assert!(env.rpc_url.unwrap().eq("http://localhost:5050/")); + + assert!(env.account_address.is_some()); + assert!( + env.account_address + .unwrap() + .eq("0x6162896d1d7ab204c7ccac6dd5f8e9e7c25ecd5ae4fcb4ad32e57786bb46e03") + ); + + assert!(env.private_key.is_some()); + assert!( + env.private_key.unwrap().eq("0x1800000000300000180000000000030000000000003006001800006600") + ); + + assert!(env.world_address.is_some()); + assert!( + env.world_address + .unwrap() + .eq("0x1385f25d20a724edc9c7b3bd9636c59af64cbaf9fcd12f33b3af96b2452f295") + ); + + assert!(env.keystore_path.is_none()); + assert!(env.keystore_password.is_none()); + + // world + assert!(dojo_metadata.world.name.is_some()); + assert!(dojo_metadata.world.name.unwrap().eq("example")); + + assert!(dojo_metadata.world.description.is_some()); + assert!(dojo_metadata.world.description.unwrap().eq("example world")); + + assert!(dojo_metadata.world.cover_uri.is_none()); + assert!(dojo_metadata.world.icon_uri.is_none()); + assert!(dojo_metadata.world.website.is_none()); + assert!(dojo_metadata.world.socials.is_none()); + + check_artifact( + dojo_metadata.world.artifacts, + "dojo_world_world".to_string(), + &abis_dir, + &sources_dir, + ); + + // artifacts + let artifacts = vec![ + ("models", "dojo_examples::actions::actions::moved"), + ("models", "dojo_examples::models::emote_message"), + ("models", "dojo_examples::models::moves"), + ("models", "dojo_examples::models::position"), + ("contracts", "dojo_examples::actions::actions"), + ]; + + for (abi_subdir, name) in artifacts { + let artifact = dojo_metadata.artifacts.get(name); + assert!(artifact.is_some()); + let artifact = artifact.unwrap(); + + let sanitized_name = name.replace("::", "_"); + + check_artifact(artifact.clone(), sanitized_name, &abis_dir.join(abi_subdir), &sources_dir); + } +} + +fn check_artifact( + artifact: ArtifactMetadata, + name: String, + abis_dir: &Utf8PathBuf, + sources_dir: &Utf8PathBuf, +) { + assert!(artifact.abi.is_some()); + let abi = artifact.abi.unwrap(); + assert_eq!(abi, Uri::File(abis_dir.join(format!("{name}.json")).into())); + + assert!(artifact.source.is_some()); + let source = artifact.source.unwrap(); + assert_eq!(source, Uri::File(sources_dir.join(format!("{name}.cairo")).into())); +} diff --git a/crates/dojo-world/src/metadata_test_data/abi.json b/crates/dojo-world/src/metadata_test_data/abi.json new file mode 100644 index 0000000000..78efed0140 --- /dev/null +++ b/crates/dojo-world/src/metadata_test_data/abi.json @@ -0,0 +1,17 @@ +[ + { + "type": "impl", + "name": "WorldProviderImpl", + "interface_name": "dojo::world::IWorldProvider" + }, + { + "type": "struct", + "name": "dojo::world::IWorldDispatcher", + "members": [ + { + "name": "contract_address", + "type": "core::starknet::contract_address::ContractAddress" + } + ] + } +] diff --git a/crates/dojo-world/src/metadata_test_data/source.cairo b/crates/dojo-world/src/metadata_test_data/source.cairo new file mode 100644 index 0000000000..c917342ece --- /dev/null +++ b/crates/dojo-world/src/metadata_test_data/source.cairo @@ -0,0 +1,79 @@ +use starknet::ContractAddress; + +#[derive(Serde, Copy, Drop, Introspect)] +enum Direction { + None, + Left, + Right, + Up, + Down, +} + +impl DirectionIntoFelt252 of Into { + fn into(self: Direction) -> felt252 { + match self { + Direction::None => 0, + Direction::Left => 1, + Direction::Right => 2, + Direction::Up => 3, + Direction::Down => 4, + } + } +} + +#[derive(Model, Copy, Drop, Serde)] +struct Moves { + #[key] + player: ContractAddress, + remaining: u8, + last_direction: Direction +} + +#[derive(Copy, Drop, Serde, Introspect)] +struct Vec2 { + x: u32, + y: u32 +} + +#[derive(Model, Copy, Drop, Serde)] +struct Position { + #[key] + player: ContractAddress, + vec: Vec2, +} + +trait Vec2Trait { + fn is_zero(self: Vec2) -> bool; + fn is_equal(self: Vec2, b: Vec2) -> bool; +} + +impl Vec2Impl of Vec2Trait { + fn is_zero(self: Vec2) -> bool { + if self.x - self.y == 0 { + return true; + } + false + } + + fn is_equal(self: Vec2, b: Vec2) -> bool { + self.x == b.x && self.y == b.y + } +} + +#[cfg(test)] +mod tests { + use super::{Position, Vec2, Vec2Trait}; + + #[test] + #[available_gas(100000)] + fn test_vec_is_zero() { + assert(Vec2Trait::is_zero(Vec2 { x: 0, y: 0 }), 'not zero'); + } + + #[test] + #[available_gas(100000)] + fn test_vec_is_equal() { + let position = Vec2 { x: 420, y: 0 }; + assert(position.is_equal(Vec2 { x: 420, y: 0 }), 'not equal'); + } +} diff --git a/crates/dojo-world/src/migration/class.rs b/crates/dojo-world/src/migration/class.rs index dfcb98acb5..7f2376694f 100644 --- a/crates/dojo-world/src/migration/class.rs +++ b/crates/dojo-world/src/migration/class.rs @@ -10,22 +10,27 @@ use super::{Declarable, MigrationType, StateDiff}; #[derive(Debug, Default, Clone)] pub struct ClassDiff { pub name: String, - pub local: FieldElement, - pub remote: Option, + pub local_class_hash: FieldElement, + pub original_class_hash: FieldElement, + pub remote_class_hash: Option, } impl StateDiff for ClassDiff { fn is_same(&self) -> bool { - if let Some(remote) = self.remote { self.local == remote } else { false } + if let Some(remote) = self.remote_class_hash { + self.local_class_hash == remote + } else { + false + } } } impl Display for ClassDiff { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { writeln!(f, "{}:", self.name)?; - writeln!(f, " Local: {:#x}", self.local)?; + writeln!(f, " Local: {:#x}", self.local_class_hash)?; - if let Some(remote) = self.remote { + if let Some(remote) = self.remote_class_hash { writeln!(f, " Remote: {remote:#x}")?; } @@ -41,11 +46,11 @@ pub struct ClassMigration { impl ClassMigration { pub fn migration_type(&self) -> MigrationType { - let Some(remote) = self.diff.remote else { + let Some(remote) = self.diff.remote_class_hash else { return MigrationType::New; }; - match self.diff.local == remote { + match self.diff.local_class_hash == remote { true => MigrationType::New, false => MigrationType::Update, } diff --git a/crates/dojo-world/src/migration/contract.rs b/crates/dojo-world/src/migration/contract.rs index ee8fc2dae7..166fbfbed8 100644 --- a/crates/dojo-world/src/migration/contract.rs +++ b/crates/dojo-world/src/migration/contract.rs @@ -4,7 +4,7 @@ use std::path::PathBuf; use async_trait::async_trait; use starknet::core::types::{DeclareTransactionResult, FieldElement}; -use super::{Declarable, Deployable, MigrationType, StateDiff}; +use super::{Declarable, Deployable, MigrationType, StateDiff, Upgradable}; pub type DeclareOutput = DeclareTransactionResult; @@ -12,23 +12,31 @@ pub type DeclareOutput = DeclareTransactionResult; #[derive(Debug, Default, Clone)] pub struct ContractDiff { pub name: String, - pub local: FieldElement, - pub remote: Option, + pub local_class_hash: FieldElement, + pub original_class_hash: FieldElement, + pub base_class_hash: FieldElement, + pub remote_class_hash: Option, } impl StateDiff for ContractDiff { fn is_same(&self) -> bool { - if let Some(remote) = self.remote { self.local == remote } else { false } + if let Some(remote) = self.remote_class_hash { + self.local_class_hash == remote + } else { + false + } } } impl Display for ContractDiff { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { writeln!(f, "{}:", self.name)?; - writeln!(f, " Local: {:#x}", self.local)?; + writeln!(f, " Local Class Hash: {:#x}", self.local_class_hash)?; + writeln!(f, " Original Class Hash: {:#x}", self.original_class_hash)?; + writeln!(f, " Base Class Hash: {:#x}", self.base_class_hash)?; - if let Some(remote) = self.remote { - writeln!(f, " Remote: {remote:#x}")?; + if let Some(remote) = self.remote_class_hash { + writeln!(f, " Remote Class Hash: {remote:#x}")?; } Ok(()) @@ -46,11 +54,11 @@ pub struct ContractMigration { impl ContractMigration { pub fn migration_type(&self) -> MigrationType { - let Some(remote) = self.diff.remote else { + let Some(remote) = self.diff.remote_class_hash else { return MigrationType::New; }; - match self.diff.local == remote { + match self.diff.local_class_hash == remote { true => MigrationType::New, false => MigrationType::Update, } @@ -70,3 +78,6 @@ impl Deployable for ContractMigration { self.salt } } + +#[async_trait] +impl Upgradable for ContractMigration {} diff --git a/crates/dojo-world/src/migration/mod.rs b/crates/dojo-world/src/migration/mod.rs index bafb4d25dc..2eb5df13d0 100644 --- a/crates/dojo-world/src/migration/mod.rs +++ b/crates/dojo-world/src/migration/mod.rs @@ -9,18 +9,16 @@ use cairo_lang_starknet::contract_class::ContractClass; use starknet::accounts::{Account, AccountError, Call, ConnectedAccount, SingleOwnerAccount}; use starknet::core::types::contract::{CompiledClass, SierraClass}; use starknet::core::types::{ - BlockId, BlockTag, DeclareTransactionResult, FieldElement, FlattenedSierraClass, FunctionCall, + BlockId, BlockTag, DeclareTransactionResult, FieldElement, FlattenedSierraClass, InvokeTransactionResult, MaybePendingTransactionReceipt, StarknetError, TransactionReceipt, }; -use starknet::core::utils::{ - get_contract_address, get_selector_from_name, CairoShortStringToFeltError, -}; +use starknet::core::utils::{get_contract_address, CairoShortStringToFeltError}; use starknet::macros::{felt, selector}; use starknet::providers::{Provider, ProviderError}; use starknet::signers::Signer; use thiserror::Error; -use crate::utils::{TransactionWaiter, TransactionWaitingError}; +use crate::utils::{TransactionExt, TransactionWaiter, TransactionWaitingError}; pub mod class; pub mod contract; @@ -35,12 +33,25 @@ pub struct DeployOutput { pub block_number: Option, pub contract_address: FieldElement, pub declare: Option, + // base class hash at time of deployment + pub base_class_hash: FieldElement, + pub was_upgraded: bool, + pub name: Option, +} + +#[derive(Clone, Debug)] +pub struct UpgradeOutput { + pub transaction_hash: FieldElement, + pub block_number: Option, + pub contract_address: FieldElement, + pub declare: Option, } #[derive(Debug)] pub struct RegisterOutput { pub transaction_hash: FieldElement, pub declare_output: Vec, + pub registered_model_names: Vec, } #[derive(Debug, Error)] @@ -81,12 +92,13 @@ pub trait StateDiff { /// The transaction configuration to use when sending a transaction. #[derive(Debug, Copy, Clone, Default)] -pub struct TxConfig { +pub struct TxnConfig { /// The multiplier for how much the actual transaction max fee should be relative to the /// estimated fee. If `None` is provided, the multiplier is set to `1.1`. pub fee_estimate_multiplier: Option, pub wait: bool, pub receipt: bool, + pub max_fee_raw: Option, } #[cfg_attr(not(target_arch = "wasm32"), async_trait)] @@ -95,7 +107,7 @@ pub trait Declarable { async fn declare( &self, account: &SingleOwnerAccount, - txn_config: TxConfig, + txn_config: &TxnConfig, ) -> Result as Account>::SignError>> where P: Provider + Sync + Send, @@ -114,14 +126,11 @@ pub trait Declarable { Err(e) => return Err(MigrationError::Provider(e)), } - let mut txn = account.declare(Arc::new(flattened_class), casm_class_hash); - - if let TxConfig { fee_estimate_multiplier: Some(multiplier), .. } = txn_config { - txn = txn.fee_estimate_multiplier(multiplier); - } - - let DeclareTransactionResult { transaction_hash, class_hash } = - txn.send().await.map_err(MigrationError::Migrator)?; + let DeclareTransactionResult { transaction_hash, class_hash } = account + .declare(Arc::new(flattened_class), casm_class_hash) + .send_with_cfg(txn_config) + .await + .map_err(MigrationError::Migrator)?; TransactionWaiter::new(transaction_hash, account.provider()) .await @@ -136,12 +145,13 @@ pub trait Declarable { #[cfg_attr(not(target_arch = "wasm32"), async_trait)] #[cfg_attr(target_arch = "wasm32", async_trait(?Send))] pub trait Deployable: Declarable + Sync { - async fn world_deploy( + async fn deploy_dojo_contract( &self, world_address: FieldElement, class_hash: FieldElement, + base_class_hash: FieldElement, account: &SingleOwnerAccount, - txn_config: TxConfig, + txn_config: &TxnConfig, ) -> Result as Account>::SignError>> where P: Provider + Sync + Send, @@ -153,32 +163,25 @@ pub trait Deployable: Declarable + Sync { Err(e) => return Err(e), }; - let base_class_hash = account - .provider() - .call( - FunctionCall { - contract_address: world_address, - calldata: vec![], - entry_point_selector: get_selector_from_name("base").unwrap(), - }, - BlockId::Tag(BlockTag::Pending), - ) - .await - .map_err(MigrationError::Provider)?; - let contract_address = - get_contract_address(self.salt(), base_class_hash[0], &[], world_address); + get_contract_address(self.salt(), base_class_hash, &[], world_address); + + let mut was_upgraded = false; let call = match account .provider() .get_class_hash_at(BlockId::Tag(BlockTag::Pending), contract_address) .await { - Ok(current_class_hash) if current_class_hash != class_hash => Call { - calldata: vec![contract_address, class_hash], - selector: selector!("upgrade_contract"), - to: world_address, - }, + Ok(current_class_hash) if current_class_hash != class_hash => { + was_upgraded = true; + + Call { + calldata: vec![contract_address, class_hash], + selector: selector!("upgrade_contract"), + to: world_address, + } + } Err(ProviderError::StarknetError(StarknetError::ContractNotFound)) => Call { calldata: vec![self.salt(), class_hash], @@ -193,19 +196,24 @@ pub trait Deployable: Declarable + Sync { Err(e) => return Err(MigrationError::Provider(e)), }; - let mut txn = account.execute(vec![call]); - - if let TxConfig { fee_estimate_multiplier: Some(multiplier), .. } = txn_config { - txn = txn.fee_estimate_multiplier(multiplier); - } - - let InvokeTransactionResult { transaction_hash } = - txn.send().await.map_err(MigrationError::Migrator)?; + let InvokeTransactionResult { transaction_hash } = account + .execute(vec![call]) + .send_with_cfg(txn_config) + .await + .map_err(MigrationError::Migrator)?; let receipt = TransactionWaiter::new(transaction_hash, account.provider()).await?; let block_number = get_block_number_from_receipt(receipt); - Ok(DeployOutput { transaction_hash, block_number, contract_address, declare }) + Ok(DeployOutput { + transaction_hash, + block_number, + contract_address, + declare, + base_class_hash, + was_upgraded, + name: None, + }) } async fn deploy( @@ -213,7 +221,7 @@ pub trait Deployable: Declarable + Sync { class_hash: FieldElement, constructor_calldata: Vec, account: &SingleOwnerAccount, - txn_config: TxConfig, + txn_config: &TxnConfig, ) -> Result as Account>::SignError>> where P: Provider + Sync + Send, @@ -253,29 +261,86 @@ pub trait Deployable: Declarable + Sync { Err(e) => return Err(MigrationError::Provider(e)), } - let mut txn = account.execute(vec![Call { + let txn = account.execute(vec![Call { calldata, // devnet UDC address selector: selector!("deployContract"), to: felt!("0x41a78e741e5af2fec34b695679bc6891742439f7afb8484ecd7766661ad02bf"), }]); - if let TxConfig { fee_estimate_multiplier: Some(multiplier), .. } = txn_config { - txn = txn.fee_estimate_multiplier(multiplier); - } - let InvokeTransactionResult { transaction_hash } = - txn.send().await.map_err(MigrationError::Migrator)?; + txn.send_with_cfg(txn_config).await.map_err(MigrationError::Migrator)?; let receipt = TransactionWaiter::new(transaction_hash, account.provider()).await?; let block_number = get_block_number_from_receipt(receipt); - Ok(DeployOutput { transaction_hash, block_number, contract_address, declare }) + Ok(DeployOutput { + transaction_hash, + block_number, + contract_address, + declare, + base_class_hash: FieldElement::default(), + was_upgraded: false, + name: None, + }) } fn salt(&self) -> FieldElement; } +#[cfg_attr(not(target_arch = "wasm32"), async_trait)] +#[cfg_attr(target_arch = "wasm32", async_trait(?Send))] +pub trait Upgradable: Deployable + Declarable + Sync { + async fn upgrade_world( + &self, + class_hash: FieldElement, + original_class_hash: FieldElement, + original_base_class_hash: FieldElement, + account: &SingleOwnerAccount, + txn_config: &TxnConfig, + ) -> Result as Account>::SignError>> + where + P: Provider + Sync + Send, + S: Signer + Sync + Send, + { + let declare = match self.declare(account, txn_config).await { + Ok(res) => Some(res), + Err(MigrationError::ClassAlreadyDeclared) => None, + Err(e) => return Err(e), + }; + + let original_constructor_calldata = vec![original_base_class_hash]; + let contract_address = get_contract_address( + self.salt(), + original_class_hash, + &original_constructor_calldata, + FieldElement::ZERO, + ); + + match account + .provider() + .get_class_hash_at(BlockId::Tag(BlockTag::Pending), contract_address) + .await + { + Ok(_) => {} + Err(e) => return Err(MigrationError::Provider(e)), + } + + let calldata = vec![class_hash]; + + let InvokeTransactionResult { transaction_hash } = account + .execute(vec![Call { calldata, selector: selector!("upgrade"), to: contract_address }]) + .send_with_cfg(txn_config) + .await + .map_err(MigrationError::Migrator)?; + + let receipt = TransactionWaiter::new(transaction_hash, account.provider()).await?; + let block_number = get_block_number_from_receipt(receipt); + + Ok(UpgradeOutput { transaction_hash, block_number, contract_address, declare }) + } +} + fn prepare_contract_declaration_params( artifact_path: &PathBuf, ) -> Result<(FlattenedSierraClass, FieldElement)> { diff --git a/crates/dojo-world/src/migration/strategy.rs b/crates/dojo-world/src/migration/strategy.rs index 0873e962de..fae462a248 100644 --- a/crates/dojo-world/src/migration/strategy.rs +++ b/crates/dojo-world/src/migration/strategy.rs @@ -2,7 +2,7 @@ use std::collections::HashMap; use std::fs; use std::path::PathBuf; -use anyhow::{anyhow, Context, Result}; +use anyhow::{anyhow, bail, Context, Result}; use camino::Utf8PathBuf; use starknet::core::types::FieldElement; use starknet::core::utils::{cairo_short_string_to_felt, get_contract_address}; @@ -65,7 +65,7 @@ impl MigrationStrategy { /// evaluate which contracts/classes need to be declared/deployed pub fn prepare_for_migration( world_address: Option, - seed: Option, + seed: FieldElement, target_dir: &Utf8PathBuf, diff: WorldDiff, ) -> Result { @@ -98,16 +98,26 @@ pub fn prepare_for_migration( // If world needs to be migrated, then we expect the `seed` to be provided. if let Some(world) = &mut world { - let salt = - seed.map(poseidon_hash_single).ok_or(anyhow!("Missing seed for World deployment."))?; + let salt = poseidon_hash_single(seed); world.salt = salt; - world.contract_address = get_contract_address( + let generated_world_address = get_contract_address( salt, - diff.world.local, - &[base.as_ref().unwrap().diff.local], + diff.world.original_class_hash, + &[base.as_ref().unwrap().diff.original_class_hash], FieldElement::ZERO, ); + + if let Some(world_address) = world_address { + if world_address != generated_world_address { + bail!( + "Calculated world address doesn't match provided world address.\nIf you are \ + deploying with custom seed make sure `world_address` is correctly configured \ + (or not set) `Scarb.toml`" + ) + } + } + world.contract_address = generated_world_address; } Ok(MigrationStrategy { world_address, world, base, contracts, models }) @@ -136,8 +146,10 @@ fn evaluate_class_to_migrate( artifact_paths: &HashMap, world_contract_will_migrate: bool, ) -> Result> { - match class.remote { - Some(remote) if remote == class.local && !world_contract_will_migrate => Ok(None), + match class.remote_class_hash { + Some(remote) if remote == class.local_class_hash && !world_contract_will_migrate => { + Ok(None) + } _ => { let path = find_artifact_path(class.name.as_str(), artifact_paths)?; Ok(Some(ClassMigration { diff: class.clone(), artifact_path: path.clone() })) @@ -153,8 +165,10 @@ fn evaluate_contracts_to_migrate( let mut comps_to_migrate = vec![]; for c in contracts { - match c.remote { - Some(remote) if remote == c.local && !world_contract_will_migrate => continue, + match c.remote_class_hash { + Some(remote) if remote == c.local_class_hash && !world_contract_will_migrate => { + continue; + } _ => { let path = find_artifact_path(c.name.as_str(), artifact_paths)?; comps_to_migrate.push(ContractMigration { @@ -176,8 +190,8 @@ fn evaluate_contract_to_migrate( world_contract_will_migrate: bool, ) -> Result> { if world_contract_will_migrate - || contract.remote.is_none() - || matches!(contract.remote, Some(remote_hash) if remote_hash != contract.local) + || contract.remote_class_hash.is_none() + || matches!(contract.remote_class_hash, Some(remote_hash) if remote_hash != contract.local_class_hash) { let path = find_artifact_path(&contract.name, artifact_paths)?; diff --git a/crates/dojo-world/src/migration/world.rs b/crates/dojo-world/src/migration/world.rs index 417258fb50..7fdad6e43d 100644 --- a/crates/dojo-world/src/migration/world.rs +++ b/crates/dojo-world/src/migration/world.rs @@ -1,6 +1,7 @@ use std::fmt::Display; use convert_case::{Case, Casing}; +use starknet_crypto::FieldElement; use super::class::ClassDiff; use super::contract::ContractDiff; @@ -29,8 +30,9 @@ impl WorldDiff { .iter() .map(|model| ClassDiff { name: model.name.to_string(), - local: *model.inner.class_hash(), - remote: remote.as_ref().and_then(|m| { + local_class_hash: *model.inner.class_hash(), + original_class_hash: *model.inner.original_class_hash(), + remote_class_hash: remote.as_ref().and_then(|m| { // Remote models are detected from events, where only the struct // name (pascal case) is emitted. // Local models uses the fully qualified name of the model, @@ -51,28 +53,44 @@ impl WorldDiff { let contracts = local .contracts .iter() - .map(|contract| ContractDiff { - name: contract.name.to_string(), - local: *contract.inner.class_hash(), - remote: remote.as_ref().and_then(|m| { - m.contracts - .iter() - .find(|r| r.inner.class_hash() == contract.inner.class_hash()) - .map(|r| *r.inner.class_hash()) - }), + .map(|contract| { + let base_class_hash = { + let class_hash = contract.inner.base_class_hash; + if class_hash != FieldElement::ZERO { + class_hash + } else { + *local.base.inner.class_hash() + } + }; + + ContractDiff { + name: contract.name.to_string(), + local_class_hash: *contract.inner.class_hash(), + original_class_hash: *contract.inner.original_class_hash(), + base_class_hash, + remote_class_hash: remote.as_ref().and_then(|m| { + m.contracts + .iter() + .find(|r| r.inner.class_hash() == contract.inner.class_hash()) + .map(|r| *r.inner.class_hash()) + }), + } }) .collect::>(); let base = ClassDiff { name: BASE_CONTRACT_NAME.into(), - local: *local.base.inner.class_hash(), - remote: remote.as_ref().map(|m| *m.base.inner.class_hash()), + local_class_hash: *local.base.inner.class_hash(), + original_class_hash: *local.base.inner.original_class_hash(), + remote_class_hash: remote.as_ref().map(|m| *m.base.inner.class_hash()), }; let world = ContractDiff { name: WORLD_CONTRACT_NAME.into(), - local: *local.world.inner.class_hash(), - remote: remote.map(|m| *m.world.inner.class_hash()), + local_class_hash: *local.world.inner.class_hash(), + original_class_hash: *local.world.inner.original_class_hash(), + base_class_hash: *local.base.inner.class_hash(), + remote_class_hash: remote.map(|m| *m.world.inner.class_hash()), }; WorldDiff { world, base, contracts, models } diff --git a/crates/dojo-world/src/utils.rs b/crates/dojo-world/src/utils.rs index 83d3101daf..755cd0506f 100644 --- a/crates/dojo-world/src/utils.rs +++ b/crates/dojo-world/src/utils.rs @@ -4,13 +4,17 @@ use std::task::{Context, Poll}; use std::time::Duration; use futures::FutureExt; +use starknet::accounts::{AccountError, ConnectedAccount, Declaration, Execution}; use starknet::core::types::{ - ExecutionResult, FieldElement, MaybePendingTransactionReceipt, PendingTransactionReceipt, - StarknetError, TransactionFinalityStatus, TransactionReceipt, TransactionStatus, + DeclareTransactionResult, ExecutionResult, FieldElement, InvokeTransactionResult, + MaybePendingTransactionReceipt, PendingTransactionReceipt, StarknetError, + TransactionFinalityStatus, TransactionReceipt, TransactionStatus, }; use starknet::providers::{Provider, ProviderError}; use tokio::time::{Instant, Interval}; +use crate::migration::TxnConfig; + type GetTxStatusResult = Result; type GetTxReceiptResult = Result; @@ -48,7 +52,7 @@ pub enum TransactionWaitingError { /// let provider = JsonRpcClient::new(HttpTransport::new(Url::parse("http://localhost:5000").unwrap())); /// /// let tx_hash = FieldElement::from(0xbadbeefu64); -/// let receipt = TransactionWaiter::new(tx_hash, &provider).with_finality(TransactionFinalityStatus::ACCEPTED_ON_L2).await.unwrap(); +/// let receipt = TransactionWaiter::new(tx_hash, &provider).with_tx_status(TransactionFinalityStatus::AcceptedOnL2).await.unwrap(); /// ``` #[must_use = "TransactionWaiter does nothing unless polled"] pub struct TransactionWaiter<'a, P: Provider> { @@ -328,6 +332,69 @@ pub fn block_number_from_receipt(receipt: &TransactionReceipt) -> u64 { } } +/// Helper trait to abstract away setting `TxnConfig` configurations before sending a transaction +/// Implemented by types from `starknet-accounts` like `Execution`, `Declaration`, etc... +#[allow(async_fn_in_trait)] +pub trait TransactionExt +where + T: ConnectedAccount + Sync, +{ + type R; + + /// Sets `fee_estimate_multiplier` and `max_fee_raw` from `TxnConfig` if its present before + /// calling `send` method on the respective type. + /// NOTE: If both are specified `max_fee_raw` will take precedence and `fee_estimate_multiplier` + /// will be ignored by `starknet-rs` + async fn send_with_cfg( + self, + txn_config: &TxnConfig, + ) -> Result>; +} + +impl TransactionExt for Execution<'_, T> +where + T: ConnectedAccount + Sync, +{ + type R = InvokeTransactionResult; + + async fn send_with_cfg( + mut self, + txn_config: &TxnConfig, + ) -> Result> { + if let TxnConfig { fee_estimate_multiplier: Some(fee_est_mul), .. } = txn_config { + self = self.fee_estimate_multiplier(*fee_est_mul); + } + + if let TxnConfig { max_fee_raw: Some(max_fee_r), .. } = txn_config { + self = self.max_fee(*max_fee_r); + } + + self.send().await + } +} + +impl TransactionExt for Declaration<'_, T> +where + T: ConnectedAccount + Sync, +{ + type R = DeclareTransactionResult; + + async fn send_with_cfg( + mut self, + txn_config: &TxnConfig, + ) -> Result> { + if let TxnConfig { fee_estimate_multiplier: Some(fee_est_mul), .. } = txn_config { + self = self.fee_estimate_multiplier(*fee_est_mul); + } + + if let TxnConfig { max_fee_raw: Some(max_raw_f), .. } = txn_config { + self = self.max_fee(*max_raw_f); + } + + self.send().await + } +} + #[cfg(test)] mod tests { use assert_matches::assert_matches; diff --git a/crates/katana/contracts/account_with_dummy_validate.cairo b/crates/katana/contracts/account_with_dummy_validate.cairo new file mode 100644 index 0000000000..15a3ed3095 --- /dev/null +++ b/crates/katana/contracts/account_with_dummy_validate.cairo @@ -0,0 +1,53 @@ +#[starknet::contract] +mod Account { + use array::{ArrayTrait, SpanTrait}; + use starknet::{ContractAddress, call_contract_syscall}; + use starknet::info::SyscallResultTrait; + use zeroable::Zeroable; + + #[storage] + struct Storage { + } + + #[external(v0)] + fn __validate_deploy__( + self: @ContractState, + class_hash: felt252, + contract_address_salt: felt252 + ) -> felt252 { + starknet::VALIDATED + } + + #[external(v0)] + fn __validate_declare__(self: @ContractState, class_hash: felt252) -> felt252 { + starknet::VALIDATED + } + + #[external(v0)] + fn __validate__( + self: @ContractState, + contract_address: ContractAddress, + selector: felt252, + calldata: Array + ) -> felt252 { + starknet::VALIDATED + } + + #[external(v0)] + #[raw_output] + fn __execute__( + self: @ContractState, + contract_address: ContractAddress, + selector: felt252, + calldata: Array + ) -> Span { + // Validate caller. + assert(starknet::get_caller_address().is_zero(), 'INVALID_CALLER'); + + call_contract_syscall( + address: contract_address, + entry_point_selector: selector, + calldata: calldata.span() + ).unwrap_syscall() + } +} diff --git a/crates/katana/primitives/contracts/compiled/account.json b/crates/katana/contracts/compiled/account.json similarity index 100% rename from crates/katana/primitives/contracts/compiled/account.json rename to crates/katana/contracts/compiled/account.json diff --git a/crates/katana/contracts/compiled/account_with_dummy_validate.sierra.json b/crates/katana/contracts/compiled/account_with_dummy_validate.sierra.json new file mode 100644 index 0000000000..62a0da050a --- /dev/null +++ b/crates/katana/contracts/compiled/account_with_dummy_validate.sierra.json @@ -0,0 +1,709 @@ +{ + "sierra_program": [ + "0x1", + "0x4", + "0x0", + "0x2", + "0x4", + "0x0", + "0x114", + "0xec", + "0x2c", + "0x52616e6765436865636b", + "0x800000000000000100000000000000000000000000000000", + "0x426f78", + "0x800000000000000700000000000000000000000000000001", + "0x1", + "0x13", + "0x4172726179", + "0x800000000000000300000000000000000000000000000001", + "0x6", + "0x456e756d", + "0x800000000000000300000000000000000000000000000003", + "0x0", + "0xfeece2ea7edbbbebeeb5f270b77f64c680a68a089b794478dd9eca75e0196a", + "0x2", + "0xf", + "0x11", + "0x66656c74323532", + "0x800000000000000700000000000000000000000000000000", + "0x436f6e747261637441646472657373", + "0x75313238", + "0x536e617073686f74", + "0x537472756374", + "0x800000000000000700000000000000000000000000000002", + "0x1baeba72e79e9db2587cf44fedb2f3700b2075a5e8e39a562584862c4b71f62", + "0x9", + "0x12", + "0xb", + "0x1597b831feeb60c71f259624b79cf66995ea4f7e383403583674ab9c33b9cec", + "0xc", + "0x753332", + "0x80000000000000070000000000000000000000000000000e", + "0x348a62b7a38c0673e61e888d83a3ac1bf334ee7361a8514593d3d9532ed8b39", + "0x7", + "0x8", + "0xa", + "0xd", + "0xe", + "0x753634", + "0x800000000000000700000000000000000000000000000004", + "0x3808c701a5d13e100ab11b6c02f91f752ecae7e420d21b56c90ec0a475cc7e5", + "0x10", + "0x3342418ef16b3e2799b906b1e4e89dbb9b111332dd44f72458ce44f9895b508", + "0x800000000000000700000000000000000000000000000006", + "0x7d4d99e9ed8d285b5c61b493cedb63976bc3d9da867933d829f49ce838b5e7", + "0x5", + "0x4", + "0x2ee1e2b1b89f8c495f200e4956278a4d47395fe262f27b52e5865c9524c08c3", + "0x800000000000000f00000000000000000000000000000001", + "0x16a4c8d7c05909052238a862d8cc3e7975bf05a07b3a69c6b28951083a6d672", + "0x15", + "0x2ca39cde64b91db1514d78c135ee79d71b3b57fffee52f1a3ef96618a34d8c8", + "0x14", + "0x16", + "0x161ee0e6962e56453b5d68e09d1cabe5633858c1ba3a7e73fee8c70867eced0", + "0x4e6f6e5a65726f", + "0x3e1934b18d91949ab9afdbdd1866a30ccca06c2b1e6581582c6b27f8b4f6555", + "0x1a", + "0x800000000000000700000000000000000000000000000003", + "0x29d7d57c04a880978e7b3689f6218e507f3be17588744b58dc17762447ad0e7", + "0x1c", + "0x1d", + "0x2490fcc229f827552c25240be8547f63b1bc196e0e5c5bf72432ff0bf34f9e0", + "0x556e696e697469616c697a6564", + "0x800000000000000200000000000000000000000000000001", + "0x17b6ecc31946835b0d9d92c2dd7a9c14f29af0371571ae74a1b228828b2242", + "0x22", + "0x34f9bd7c6cb2dd4263175964ad75f1ff1461ddc332fbfb274e0fb2a5d7ab968", + "0x23", + "0x3d37ad6eafb32512d2dd95a2917f6bf14858de22c27a1114392429f2e5c15d7", + "0x4275696c74696e436f737473", + "0x53797374656d", + "0x9931c641b913035ae674b400b61a51476d506bbe8bba2ff8a6272790aba9e6", + "0x26", + "0x11c6d8087e00642489f92d2821ad6ebd6532ad1a3b6d12833da6d6810391511", + "0x4761734275696c74696e", + "0x82", + "0x7265766f6b655f61705f747261636b696e67", + "0x77697468647261775f676173", + "0x6272616e63685f616c69676e", + "0x73746f72655f74656d70", + "0x66756e6374696f6e5f63616c6c", + "0x3", + "0x656e756d5f6d61746368", + "0x2a", + "0x64726f70", + "0x7374727563745f6465636f6e737472756374", + "0x61727261795f6c656e", + "0x736e617073686f745f74616b65", + "0x7533325f636f6e7374", + "0x72656e616d65", + "0x7533325f6571", + "0x61727261795f6e6577", + "0x66656c743235325f636f6e7374", + "0x496e70757420746f6f206c6f6e6720666f7220617267756d656e7473", + "0x61727261795f617070656e64", + "0x7374727563745f636f6e737472756374", + "0x656e756d5f696e6974", + "0x29", + "0x2b", + "0x28", + "0x6765745f6275696c74696e5f636f737473", + "0x27", + "0x77697468647261775f6761735f616c6c", + "0x56414c4944", + "0x4f7574206f6620676173", + "0x4661696c656420746f20646573657269616c697a6520706172616d202332", + "0x4661696c656420746f20646573657269616c697a6520706172616d202331", + "0x25", + "0x24", + "0x4661696c656420746f20646573657269616c697a6520706172616d202333", + "0x616c6c6f635f6c6f63616c", + "0x66696e616c697a655f6c6f63616c73", + "0x73746f72655f6c6f63616c", + "0x1f", + "0x20", + "0x21", + "0x61727261795f736e617073686f745f706f705f66726f6e74", + "0x1e", + "0x6a756d70", + "0x756e626f78", + "0x21adb5788e32c84f69a1863d85ef9394b7bf761a0ce1190f826984e5075c371", + "0x64697361626c655f61705f747261636b696e67", + "0x1b", + "0x636f6e74726163745f616464726573735f746f5f66656c74323532", + "0x66656c743235325f737562", + "0x66656c743235325f69735f7a65726f", + "0x63616c6c5f636f6e74726163745f73797363616c6c", + "0x18", + "0x19", + "0x494e56414c49445f43414c4c4552", + "0x647570", + "0x17", + "0x6765745f657865637574696f6e5f696e666f5f76325f73797363616c6c", + "0x372", + "0xffffffffffffffff", + "0x68", + "0x59", + "0x4a", + "0x3d", + "0x2d", + "0x2e", + "0x2f", + "0x30", + "0x31", + "0x32", + "0x33", + "0x34", + "0x35", + "0x36", + "0x37", + "0x38", + "0x39", + "0x3a", + "0x3b", + "0x3c", + "0x3e", + "0x3f", + "0x40", + "0x41", + "0x42", + "0x43", + "0x44", + "0x45", + "0x46", + "0xca", + "0xbb", + "0x97", + "0xae", + "0x160", + "0x151", + "0x142", + "0x13b", + "0x12c", + "0x108", + "0x11f", + "0x47", + "0x48", + "0x49", + "0x4b", + "0x4c", + "0x4d", + "0x4e", + "0x4f", + "0x50", + "0x51", + "0x52", + "0x53", + "0x54", + "0x55", + "0x56", + "0x57", + "0x20f", + "0x1fe", + "0x1ed", + "0x1e4", + "0x1d3", + "0x1a3", + "0x1c3", + "0x1bc", + "0x58", + "0x5a", + "0x226", + "0x22b", + "0x235", + "0x253", + "0x24c", + "0x261", + "0x266", + "0x282", + "0x27c", + "0x2d5", + "0x5b", + "0x5c", + "0x5d", + "0x5e", + "0x5f", + "0x60", + "0x61", + "0x2c5", + "0x62", + "0x2b0", + "0x63", + "0x64", + "0x2b5", + "0x65", + "0x66", + "0x2bf", + "0x67", + "0x317", + "0x69", + "0x2ef", + "0x6a", + "0x30d", + "0x6b", + "0x307", + "0x6c", + "0x6d", + "0x338", + "0x6e", + "0x6f", + "0x70", + "0x71", + "0x72", + "0x73", + "0x74", + "0x75", + "0x76", + "0x77", + "0x344", + "0x78", + "0x351", + "0x79", + "0x7a", + "0x356", + "0x7b", + "0x7c", + "0x360", + "0x7d", + "0x7e", + "0x7f", + "0x80", + "0x81", + "0x36c", + "0xd8", + "0x16e", + "0x21f", + "0x23c", + "0x241", + "0x259", + "0x28c", + "0x2de", + "0x326", + "0x33e", + "0x34a", + "0x366", + "0x1e7c", + "0x400a040183c0a04018380a0502834180b050240a08038180a04018080200", + "0x302e160d81408150d01410070c814300c0b8581c050205424140904c2411", + "0x880a24028240a09028240a23028880a21028240a200607c2c120f0740a1c", + "0x141205150304e1610814520514814500c138582426118144a05128144605", + "0xc418300b0140a2f0605c2c09028840a21028b80a2d028b0182b0b0880a29", + "0x1408370701446051b030160a1a814680519830160a070146405178301616", + "0xec183a050bc18300b0240a04018d40a39028e0180b050840a2f0605c2c09", + "0x3016161e8141c0520830160a10814803f04814803f1f03060161e8147805", + "0x8c0a2f0605c2c3d028840a45060e81435029100a430602c14420288c0a2f", + "0x309c0c26930044b1e814120525030740a1a814920524030160a0111c2446", + "0x15c460502958120502954a8050294c1805029405c5202944460502940184f", + "0x14b80c2d8940a05280940a052d0301c052c8940a052a8940a052c0240a05", + "0x38c405071846a050298064050298012050297c120502940bc0e029741205", + "0x14b06807014ba0c339980a0528030ca6202814a06402814a06302814a005", + "0x1804605029801c05029541c05029607a05029545a52029441c05029401205", + "0x14a26b07014ba6a07014ba2302814aa6907014ba0c071880a0e309240a05", + "0x17484050294c880502958da050294c425202944420502954d8050294c1252", + "0x14b07202814c00902814e22102814e20c380240a05378840a05379b81c05", + "0x154e60502954920502958c4050294c445202944420502940e40502954e405", + "0xf40a0530030ee7602814a01902814a00c071d80a0e308240a053a9d00a05", + "0x140a5006038a805071841205029681205029e0ec050294c0a0e3b0141c61", + "0x30f405071b00a0e309b00a05280301c6c02838c20c3c8141c5402838c254", + "0x141c6102838da0507184da0502940180e368141c61220140a600c9480a51", + "0x14ba0c3e0840a052d0840a052c0e40a052b1ec0a052988ca405288141c42", + "0x1480a5102839000507185000502940180e400141c61061fc187e061f4180e", + "0x74a405288141c052e8301c4202838c20902815068207014ba8102814aa1b", + "0x140a60170140a55168140a55030140a56030140a781a0140a56420140a53", + "0x1480a0e308310a8002814a605071ec0a0e309ec0a05280301c7b02838c239", + "0x140180e420141c611a0140a60121480a5102838a40507184a40502940180e", + "0x21c1c05060380a0c0621c0a0c060310c5202814a605072100a0e30a100a05", + "0x21c0a52028b8185202a1c0a5202948180c43814180e06084120e440b45c0e", + "0x21c0a0c0703036054488c0a87070640a09060b80a87028b80a2d06064440e", + "0x2418240e8390e05110145c0c110150e0511014a40c0621c0a2302884180c", + "0x143a05110301887028940a21060310e0506038180f02a284a05438384805", + "0x310e050d014360c030681c87028400a23060400a87028a40a19060a40a87", + "0x180a24060310e0519014360c1a8c81c87028d00a23060d00a87028303a0c", + "0x21c1c80420381e0c420150e05420144a0c400150e051a814480c420150e05", + "0xe40a1a060e40a8702830200c408150e05060a4180c43814180e06030920c", + "0x21c0a7b1e038640c1e0150e05060d0187b02a1c0a39408380c0c1c8150e05", + "0x305a05438145a05420305c05438145c0516830ec05438147a051a8307a05", + "0x310e05060381876070b45c2e029d80a87029d80a81060380a87028380a80", + "0x22ce8730721c1c72168b8a43c061c80a87029c80a7b061c80a8702830720c", + "0x1b00a76061b00a87028307a0c368150e05060a4180c43814180e06110840e", + "0x150e0536814e40c330150e0533014340c0621c0a49028841866248390e05", + "0x18ca80e43814c805210301887029880a7406188c80e43814da66071cc186d", + "0x2300a49062300a87028000a6c060000a870298c0a6d060310e052a014880c", + "0x150e0507015000c3a0150e053a015080c398150e05398145a0c468150e05", + "0x150e05060a4180c43814180e062341c74398b80a8d02a1c0a8d02a04180e", + "0xd0189002a1c0a8f470380c0c478150e0547814340c478150e0506198188e", + "0x1484051683126054381524051a8312405438152091070c8189102a1c0a0c", + "0x24c0a8702a4c0a81060380a87028380a80061100a87029100a84061080a87", + "0x301887028740a64060310e0507814e80c0621c0a0c07031260e221085c05", + "0x152a9407018189502a1c0a9502868189502a1c0a0c310312805438141829", + "0x2640a8702a600a35062600a8702a592e0e190312e05438141834062580a87", + "0x15020c070150e0507015000c168150e0516815080c170150e05170145a0c", + "0x14c80c0621c0a1b029d0180c43814180e062641c2d170b80a9902a1c0a99", + "0x3136054381536050d0313605438141854062680a8702830520c0621c0a22", + "0x146a0c4e8150e0545a701c32062700a8702830680c458150e054da681c06", + "0x21c0a0e02a00182d02a1c0a2d02a10182e02a1c0a2e028b4189e02a1c0a9d", + "0x14a405320301887028301c0c4f0385a2e170153c05438153c05408301c05", + "0x380c0c500150e0550014340c500150e0506198189f02a1c0a0c148301887", + "0x1546051a83146054381542a2070c818a202a1c0a0c1a031420543815409f", + "0x380a87028380a80060840a87028840a84060240a87028240a2d062900a87", + "0x21c1c05060380a0c0621c0a0c06031480e108245c05520150e0552015020c", + "0x21c0a52028b8185202a1c0a5202948180c43814180e06084120e528b45c0e", + "0x21c0a0c0703036055308c0a87070640a09060b80a87028b80a2d06064440e", + "0x14460c120150e050e814320c0e8150e0511014440c0621c0a2302884180c", + "0x390e0514814460c148150e0506074180c438144a050d8301e250721c0a24", + "0x94183402a1c0a1a02890180602a1c0a0f02890180c4381420050d8303410", + "0x21c0a0c148301887028301c0c0629c1887070d00c0e078300c05438140c05", + "0x310805438146a3207018183502a1c0a3502868183502a1c0a0c080306405", + "0xb80a2d060e40a8702a040a35062040a8702a11000e190310005438141834", + "0x150e051c815020c070150e0507015000c168150e0516815080c170150e05", + "0x150e053d814f60c3d8150e05060e4180c43814180e060e41c2d170b80a39", + "0x21c0a0c148301887028301c0c391d81ca81e8f01c87071ec5a2e290f0187b", + "0x68180c4381484051083088420721c0a74029d8187402a1c0a0c1e830e605", + "0x14e80c361b41c87029cc880e39830e60543814e605390308805438148805", + "0x150e0533014da0c0621c0a49029101866248390e0536814840c0621c0a6c", + "0x210183c02a1c0a3c028b4185402a1c0a6202924186202a1c0a64029b01864", + "0x387a3c17014a80543814a805408301c05438141c05400307a05438147a05", + "0x21c0a0002868180002a1c0a0c33030c605438141829060310e05060381854", + "0x2380a8702a311a0e190311a05438141834062300a8702800c60e030300005", + "0x15000c390150e0539015080c3b0150e053b0145a0c478150e05470146a0c", + "0x1d0180c43814180e0623c1c723b0b80a8f02a1c0a8f02a04180e02a1c0a0e", + "0x312205438141854062400a8702830520c0621c0a2202990180c438143605", + "0x24c1c320624c0a8702830680c490150e0548a401c06062440a8702a440a1a", + "0x21c0a2d02a10182e02a1c0a2e028b4189502a1c0a94028d4189402a1c0a92", + "0x301c0c4a8385a2e170152a05438152a05408301c05438141c05400305a05", + "0x14340c4b8150e0506198189602a1c0a0c148301887029480a64060310e05", + "0x153099070c8189902a1c0a0c1a0313005438152e9607018189702a1c0a97", + "0x840a87028840a84060240a87028240a2d0626c0a8702a680a35062680a87", + "0x21c0a0c06031360e108245c054d8150e054d815020c070150e0507015000c", + "0x21c0a2e028b4180c43814180e06084120e548b45c0e438380a0c07014180c", + "0x21c1c230280018230c888a487029485c0e31830a40543814a405290305c05", + "0x3032054381432052903018870286c0a8c060310e0506038181d02aa83605", + "0x84180c43814180e060a40aab078150e0e12814120c128901c87028640a2e", + "0x21c0a2402948182d02a1c0a2d02a10182202a1c0a22028b4180c438141e05", + "0x38183202ab06805438380c05470300c1a081490e05120b44452468304805", + "0x301c0c408155a8002a1c1c8402a4018841a8390e051a0151e0c0621c0a0c", + "0x30f6054381472050c8307205438146a0511030188702a000a44060310e05", + "0x14ec0511830ec0543814181d060310e051e014360c1e8f01c87029ec0a23", + "0x1080a87029cc0a24061d00a87028f40a24060310e0539014360c399c81c87", + "0x30520c0621c0a0c0703018ae0621c1c423a0381e0c3a0150e053a0144a0c", + "0x150e05369101c06061b40a87029b40a1a061b40a8702830200c220150e05", + "0xb4186402a1c0a66028d4186602a1c0a6c24838640c248150e05060d0186c", + "0x14c805408301c05438141c05400303405438143405420302005438142005", + "0x14c4053d830c405438141839060310e0506038186407068202e029900a87", + "0x30520c0621c0a0c070311800072bcc6540721c1c620d040a43c061880a87", + "0x310e0547814420c4823c1c8702a380a76062380a87028307a0c468150e05", + "0x3124910721c0a8d48038e60c468150e0546814e40c480150e0548014340c", + "0x15280536830188702a4c0a4406251260e43815220521030188702a480a74", + "0x1500a87029500a2d0625c0a8702a580a49062580a8702a540a6c062540a87", + "0x1505c054b8150e054b815020c070150e0507015000c318150e0531815080c", + "0x2640a1a062640a8702830cc0c4c0150e05060a4180c43814180e0625c1c63", + "0x21c0a9a4d838640c4d8150e05060d0189a02a1c0a994c0380c0c4c8150e05", + "0x3118054381518054203000054381400051683138054381516051a8311605", + "0x310e0506038189c07230002e02a700a8702a700a81060380a87028380a80", + "0x150e0506244189d02a1c0a0c148301887028d40a64060310e0540814e80c", + "0xc818a002a1c0a0c1a0313e05438153c9d07018189e02a1c0a9e02868189e", + "0x680a84060400a87028400a2d062880a8702a840a35062840a8702a7d400e", + "0x31440e0d0405c05510150e0551015020c070150e0507015000c0d0150e05", + "0x21c0a1a02a10181002a1c0a10028b418a302a1c0a32028d4180c43814180e", + "0x301c0c518383410170154605438154605408301c05438141c05400303405", + "0x30c40c520150e05060a4180c438144805320301887028a40a74060310e05", + "0x150e05060d018b102a1c0ab0520380c0c580150e0558014340c580150e05", + "0x3044054381444051683166054381514051a83114054381562b2070c818b2", + "0xb4442e02acc0a8702acc0a81060380a87028380a80060b40a87028b40a84", + "0x21c0a0c148301887028640a64060310e050e814e80c0621c0a0c07031660e", + "0x316c05438156ab40701818b502a1c0ab50286818b502a1c0a0c2a0316805", + "0x880a2d062e40a8702ae00a35062e00a8702ad96e0e190316e05438141834", + "0x150e055c815020c070150e0507015000c168150e0516815080c110150e05", + "0x2e80a8702830520c0621c0a5202990180c43814180e062e41c2d110b80ab9", + "0x30680c5e0150e055dae81c06062ec0a8702aec0a1a062ec0a8702830cc0c", + "0x21c0a09028b418bf02a1c0abe028d418be02a1c0abc5e838640c5e8150e05", + "0x157e05438157e05408301c05438141c05400304205438144205420301205", + "0x30180c438141894060840a8702831260c168150e050624818bf07084122e", + "0x145a0c0621c0a0c0703036230730032220721c1c05060380a0c0621c0a0c", + "0x14000c128903a5243814a4220718c185202a1c0a5202948182202a1c0a22", + "0x144805170304805438144805290301887028301c0c07815822e02a1c1c25", + "0x301c0c0d015840902a1c1c1002824182e02a1c0a2e168392a0c080a41c87", + "0xa40a87028a40a52060640a87028640a84060740a87028740a2d060310e05", + "0x21c1c3202a3818321a018a487028a4321d29234180902a1c0a09108392c0c", + "0x150e0e40815200c40a001c87028d40a8f060310e0506038188402b0c6a05", + "0x307a054381478050c8307805438150005110301887028301c0c3d8158839", + "0x14e60511830e60543814181d060310e053b014360c391d81c87028f40a23", + "0x1b40a87029080a24061100a87029c80a24060310e053a014360c211d01c87", + "0x14880c0621c0a0c0703018c50621c1c6d220381e0c220150e05220144a0c", + "0x40186c02a1c0a0c148301887028b80a8c060310e0504814420c0621c0a39", + "0x21c0a0c1a030cc0543814926c07018184902a1c0a4902868184902a1c0a0c", + "0x180a87028180a2d061500a87029880a35061880a8702998c80e19030c805", + "0x185c052a0150e052a015020c070150e0507015000c1a0150e051a015080c", + "0x148780c318150e0531814f60c318150e05060e4180c43814180e061501c34", + "0x260188f02a1c0a0c4b8301887028301c0c472341cc6460001c870718c6806", + "0x141c0540031180543815180542030188702a400a9906245200e438151e05", + "0xe40a87028e40a72060240a87028240a1a060b80a87028b80a9a060380a87", + "0x2500a8b060000a87028000a2d0625126922921c0a39048b9220e46025360c", + "0x21c0a97029b0189702a1c0a9502a70180c43814180e062580ac74a8150e0e", + "0x312405438152405420300005438140005168313205438153005248313005", + "0x310e0506038189949a48002e02a640a8702a640a810624c0a8702a4c0a80", + "0x15000c490150e0549015080c000150e05000145a0c4d0150e054b0146a0c", + "0x110180c43814180e062692692000b80a9a02a1c0a9a02a04189302a1c0a93", + "0x313605438141829060310e0517015180c0621c0a0902884180c438147205", + "0x141834062700a8702a2d360e0303116054381516050d0311605438141866", + "0x150e05468145a0c4f8150e054f0146a0c4f0150e054e2741c32062740a87", + "0xb80a9f02a1c0a9f02a04180e02a1c0a0e02a00188e02a1c0a8e02a10188d", + "0x14420c0621c0a8002990180c43814f6053a0301887028301c0c4f8391c8d", + "0x6818a102a1c0a0c488314005438141829060310e0517015180c0621c0a09", + "0x289460e190314605438141834062880a8702a85400e030314205438154205", + "0x150e051a015080c030150e05030145a0c580150e05520146a0c520150e05", + "0x14180e062c01c34030b80ab002a1c0ab002a04180e02a1c0a0e02a001834", + "0x145a0c588150e05420146a0c0621c0a0902884180c438145c05460301887", + "0x21c0ab102a04180e02a1c0a0e02a00183402a1c0a3402a10180602a1c0a06", + "0x21c0a2e02a30180c4381434053a0301887028301c0c588386806170156205", + "0x21c0a0c310316405438141829060310e05108153a0c0621c0a2902990180c", + "0x316805438141834062cc0a8702a29640e0303114054381514050d0311405", + "0x15080c0e8150e050e8145a0c5b0150e055a8146a0c5a8150e0559ad01c32", + "0x2d81c190e8b80ab602a1c0ab602a04180e02a1c0a0e02a00181902a1c0a19", + "0x310e05108153a0c0621c0a2402990180c438141e053a0301887028301c0c", + "0x21c0ab80286818b802a1c0a0c2a0316e05438141829060310e05168153c0c", + "0x2ec0a8702ae5740e190317405438141834062e40a8702ae16e0e030317005", + "0x15000c0c8150e050c815080c0e8150e050e8145a0c5e0150e055d8146a0c", + "0x278180c43814180e062f01c190e8b80abc02a1c0abc02a04180e02a1c0a0e", + "0x317a05438141829060310e05108153a0c0621c0a5202990180c438145a05", + "0x141834062fc0a8702af97a0e030317c05438157c050d0317c05438141866", + "0x150e05118145a0c648150e05448146a0c448150e055fb201c32063200a87", + "0xb80ac902a1c0ac902a04180e02a1c0a0e02a00181b02a1c0a1b02a101823", + "0x301c0c170159452070390e0e028153e0c028150e0506014440c648383623", + "0x840a87028b40aa2060240a87028380aa1060b40a87029480aa0060310e05", + "0x640a87028880ab0060880a8702831480c0621c0a0c0703018cb02831460c", + "0x14a40c118150e0504814da0c108150e050c815440c048150e0517015420c", + "0x143605590301887028301c0c0e815981b02a1c1c2102ac4182302a1c0a23", + "0x8c0a870288c0a520603c0a87028940ab3060940a87028900a8a060900a87", + "0x290180c438143a053a0301887028301c0c0788c1c05078150e0507815680c", + "0x21c0a1002ad0182302a1c0a2302948181002a1c0a2902ad4182902a1c0a0c", + "0x1418a4061480a87028380a0e030301c05438141805450302023070142005", + "0x140a2e060140a87028140a52060b8a40e029480a87029480a72060b80a87", + "0xb8180e5b0301887028301c0c168159a2e02a1c1c52028241852070390e05", + "0x1412051683032054381442055b8301887028301c0c110159c21048390e0e", + "0x301c0c0c8381252028640a87028640ab8060380a87028380a52060240a87", + "0x3044054381444051683036054381446055c83046054381418a4060310e05", + "0x301887028301c0c0d83844520286c0a870286c0ab8060380a87028380a52", + "0x740ab8060380a87028380a52060300a87028300a2d060740a87028b40ab9", + "0x1480a9f061480a87028380a22060310e05062e8181d07030a4050e8150e05", + "0x145c05508304205438145a05500301887028301c0c048159e2d170390e0e", + "0x1418a4060310e0506038180c6801418a3060640a87028840aa2060880a87", + "0x640a870286c0aa2060880a87028240aa10606c0a870288c0ab00608c0a87", + "0x940ad1120150e0e0c815620c0e8150e050e814a40c0e8150e0511014da0c", + "0x21c0a0f02a28182902a1c0a0c148301e05438144805590301887028301c0c", + "0x303a05438143a05290300a05438140a05420301805438141805168302005", + "0x68a48702840521d028305abb060400a87028400a1a060a40a87028a40a72", + "0x2101c87028c80a8f060310e0506038183502b486405438386805470306806", + "0x3034054381434051683072054381502055e8310205438150084072f01880", + "0x301887028301c0c1c8183452028e40a87028e40abe060180a87028180a84", + "0x1ec0abe060180a87028180a84060680a87028680a2d061ec0a87028d40abf", + "0x21c0a0c520301887028940a74060310e0506038187b03068a4053d8150e05", + "0x1c80a87029d80abd061d80a87028f43a0e5e0307a05438147805640307805", + "0x30a405390150e05390157c0c028150e0502815080c060150e05060145a0c", + "0x224180502a1c0a0502a00180c02a1c0a0c02a10180c438141c054c830e405", + "0x301887028301c0c11815a61902a1c1c2202b24182210824a48702814180e", + "0x900ad6060310e050e815180c120741c870286c0ad50606c0a87028640ad4", + "0x21c0a29028841810148390e0507814ec0c078150e0512815ae0c128150e05", + "0x360180c438140c051083068060721c0a1a029d8181a02a1c0a1002a28180c", + "0x21c0a3402a28180c438146a051083108350721c0a32029d8183202a1c0a0c", + "0xe40a87028e40a1a060e40a8702a05000e6c8310205438150805450310005", + "0x110183d1e0390e0516814840c0621c0a0c07030f6056d8310e0e1c815b40c", + "0x148420916b70187602a1c0a7602948187602a1c0a3d029b4180c438147805", + "0x1b00a87029d00ade060310e0506038186d22108a4dd3a1cce45243838ec2e", + "0x31460c320150e0536015be0c330150e0539815000c248150e0539015080c", + "0x200184902a1c0a4202a10186202a1c0a6d02b84180c43814180e06031c005", + "0x38a80545830a80543814c80571030c80543814c4056f830cc05438148805", + "0x150e0546014d80c460150e0531815380c0621c0a0c0703000057198c0a87", + "0x204186602a1c0a6602a00184902a1c0a4902a10188e02a1c0a8d02924188d", + "0x210188f02a1c0a00028d4180c43814180e06238cc49290151c05438151c05", + "0x23ccc49290151e05438151e0540830cc0543814cc05400309205438149205", + "0x310e0529015180c0621c0a2e02884180c43814f605720301887028301c0c", + "0x21c0a9102868189102a1c0a0c440312005438141829060310e0516814880c", + "0x2500a8702a49260e190312605438141834062480a8702a45200e030312205", + "0x15020c108150e0510815000c048150e0504815080c4a8150e054a0146a0c", + "0x1480a8c060310e0517014420c0621c0a0c070312a21049480a9502a1c0a95", + "0x301205438141205420312c054381446051a8301887028b40a44060310e05", + "0x30188702831740c4b084125202a580a8702a580a81060840a87028840a80", + "0x39ca21048390e0e1681418521e0305a05438145a053d8305a05438141839", + "0x368180902a1c0a09028b41823170390e0517015cc0c0621c0a0c070303222", + "0x21c0a5202ba0180c438145c05108301887028301c0c0d815ce0c438384605", + "0x240a87028240a2d060940a87028900abd060900a87028741c0e5e0303a05", + "0x310e0506038182510824a405128150e05128157c0c108150e0510815080c", + "0x14120c1483c1c87028380a2e060380a87028380a52060310e050d815c80c", + "0x1418ea060180a8702840a40e030301887028301c0c0d015d21002a1c1c29", + "0x150e0510815080c048150e05048145a0c190150e051a0b81cd9060d00a87", + "0x2ec183202a1c0a3202868180602a1c0a06029c8180f02a1c0a0f029481821", + "0x14180e060e40aeb408150e0e400151c0c402106a5243814640607884122d", + "0x150e051e8157a0c1e8150e051e1ec1cbc060f0f60e438150205478301887", + "0x1480a7602a1c0a7602af8188402a1c0a8402a10183502a1c0a35028b41876", + "0x210183502a1c0a35028b4187202a1c0a3902afc180c43814180e061d90835", + "0x84180c43814180e061c9083529014e40543814e4055f0310805438150805", + "0x21c0a7307839780c398150e050d015900c0621c0a5202910180c438145c05", + "0x30420543814420542030120543814120516830840543814e8055e830e805", + "0x110180c438145c05108301887028301c0c210841252029080a87029080abe", + "0x30da05438141866061100a8702830520c0621c0a0e02990180c43814a405", + "0x1241c32061240a8702830680c360150e05369101c06061b40a87029b40a1a", + "0x21c0a1902a10182202a1c0a22028b4186402a1c0a6602afc186602a1c0a6c", + "0x140a80060300a87028300a8406190322229014c80543814c8055f0303205", + "0x301205770b40a87070b80aed060b8a40e2921c0a0506039d80c028150e05", + "0x21c0a2202bc4182202a1c0a2102bc0182102a1c0a2d02bbc180c43814180e", + "0x310e050e815180c0621c0a2302bcc180c43814320579030481d0d88c322d", + "0x380a840603c0a87028940af5060940a870286c0af4060310e0512014420c", + "0x38180f29038a405078150e0507815ec0c290150e0529015000c070150e05", + "0x150e0529015000c070150e0507015080c148150e0504815ee0c0621c0a0c", + "0x38180e02be40a054383818057c0305252071480a2902a1c0a2902bd81852", + "0x150e0517015020c170150e0529014920c290150e0502814d80c0621c0a0c", + "0x301205438141c2d070c8182d02a1c0a0c1a0301887028301c0c170140a2e", + "0x38a48707014180e7d0304205028840a87028840a81060840a87028240a35", + "0x21c0a0e02a10182202a1c0a2e02bf0180c43814180e06084122d293ec5c52", + "0x301c0c063f80a0c5183036054381444057e830460543814a405400303205", + "0x8c0a87028240a80060640a87028b40a84060740a87028840aff060310e05", + "0x3c0b01128150e0e12015da0c120150e050d816000c0d8150e050e815fa0c", + "0x142005818302005438145205810305205438144a05778301887028301c0c", + "0x680a87028680b040608c0a870288c0a80060640a87028640a84060680a87", + "0x640a87028640a84060180a870283c0b05060310e0506038181a11864a405", + "0x21c1c0c02c18180611864a405030150e0503016080c118150e0511815000c", + "0xb80a87029480b03061480a87028140b02060310e0506038180e02c1c0a05", + "0x38640c168150e05060d0180c43814180e060b80a05170150e0517016080c", + "0xb84a21028144205438144205820304205438141205828301205438141c2d", + "0x380a0c31190c60c1708cc863060b818520701418623218c182e11990c60c", + "0x188c863060b84664318305d08290380a0c31190c60c1708cc863060b9ee52", + "0x8c1852118301d0b028307a0e07038120e85030a8230708c0b09290380a0c", + "0x188c86329038122139190c609868380a0c3698c18521198c185286014186c", + "0x148c8630743c5c5207014186d31830a4090708cc60c16c385a2e290380a0c", + "0x31080529016240506210c86329190c60e88830c405400162005061ecc863", + "0x113" + ], + "sierra_program_debug_info": { + "type_names": [], + "libfunc_names": [], + "user_func_names": [] + }, + "contract_class_version": "0.1.0", + "entry_points_by_type": { + "EXTERNAL": [ + { + "selector": "0x15d40a3d6ca2ac30f4031e42be28da9b056fef9bb7357ac5e85627ee876e5ad", + "function_idx": 3 + }, + { + "selector": "0x162da33a4585851fe8d3af3c2a9c60b557814e221e0d4f30ff0b2189d9c7775", + "function_idx": 2 + }, + { + "selector": "0x289da278a8dc833409cabfdad1581e8e7d40e42dcaed693fa4008dcdb4963b3", + "function_idx": 1 + }, + { + "selector": "0x36fcbf06cd96843058359e1a75928beacfac10727dab22a3972f0af8aa92895", + "function_idx": 0 + } + ], + "L1_HANDLER": [], + "CONSTRUCTOR": [] + }, + "abi": [ + { + "type": "function", + "name": "__validate_deploy__", + "inputs": [ + { + "name": "class_hash", + "type": "core::felt252" + }, + { + "name": "contract_address_salt", + "type": "core::felt252" + } + ], + "outputs": [ + { + "type": "core::felt252" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "__validate_declare__", + "inputs": [ + { + "name": "class_hash", + "type": "core::felt252" + } + ], + "outputs": [ + { + "type": "core::felt252" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "__validate__", + "inputs": [ + { + "name": "contract_address", + "type": "core::starknet::contract_address::ContractAddress" + }, + { + "name": "selector", + "type": "core::felt252" + }, + { + "name": "calldata", + "type": "core::array::Array::" + } + ], + "outputs": [ + { + "type": "core::felt252" + } + ], + "state_mutability": "view" + }, + { + "type": "struct", + "name": "core::array::Span::", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::" + } + ] + }, + { + "type": "function", + "name": "__execute__", + "inputs": [ + { + "name": "contract_address", + "type": "core::starknet::contract_address::ContractAddress" + }, + { + "name": "selector", + "type": "core::felt252" + }, + { + "name": "calldata", + "type": "core::array::Array::" + } + ], + "outputs": [ + { + "type": "core::array::Span::" + } + ], + "state_mutability": "view" + }, + { + "type": "event", + "name": "account_with_dummy_validate::account_with_dummy_validate::Account::Event", + "kind": "enum", + "variants": [] + } + ] +} \ No newline at end of file diff --git a/crates/katana/primitives/contracts/compiled/argent_ArgentAccount_0.3.0.json b/crates/katana/contracts/compiled/argent_ArgentAccount_0.3.0.json similarity index 100% rename from crates/katana/primitives/contracts/compiled/argent_ArgentAccount_0.3.0.json rename to crates/katana/contracts/compiled/argent_ArgentAccount_0.3.0.json diff --git a/crates/katana/primitives/contracts/compiled/argent_ArgentAccount_0.3.1.json b/crates/katana/contracts/compiled/argent_ArgentAccount_0.3.1.json similarity index 100% rename from crates/katana/primitives/contracts/compiled/argent_ArgentAccount_0.3.1.json rename to crates/katana/contracts/compiled/argent_ArgentAccount_0.3.1.json diff --git a/crates/katana/primitives/contracts/compiled/cairo1_contract.json b/crates/katana/contracts/compiled/cairo1_contract.json similarity index 100% rename from crates/katana/primitives/contracts/compiled/cairo1_contract.json rename to crates/katana/contracts/compiled/cairo1_contract.json diff --git a/crates/katana/primitives/contracts/compiled/erc20.json b/crates/katana/contracts/compiled/erc20.json similarity index 100% rename from crates/katana/primitives/contracts/compiled/erc20.json rename to crates/katana/contracts/compiled/erc20.json diff --git a/crates/katana/primitives/contracts/compiled/oz_account_080.json b/crates/katana/contracts/compiled/oz_account_080.json similarity index 100% rename from crates/katana/primitives/contracts/compiled/oz_account_080.json rename to crates/katana/contracts/compiled/oz_account_080.json diff --git a/crates/katana/primitives/contracts/compiled/test.json b/crates/katana/contracts/compiled/test.json similarity index 100% rename from crates/katana/primitives/contracts/compiled/test.json rename to crates/katana/contracts/compiled/test.json diff --git a/crates/katana/primitives/contracts/compiled/universal_deployer.json b/crates/katana/contracts/compiled/universal_deployer.json similarity index 100% rename from crates/katana/primitives/contracts/compiled/universal_deployer.json rename to crates/katana/contracts/compiled/universal_deployer.json diff --git a/crates/katana/primitives/contracts/messaging/README.md b/crates/katana/contracts/messaging/README.md similarity index 84% rename from crates/katana/primitives/contracts/messaging/README.md rename to crates/katana/contracts/messaging/README.md index 7ae51a1390..a86193d384 100644 --- a/crates/katana/primitives/contracts/messaging/README.md +++ b/crates/katana/contracts/messaging/README.md @@ -40,7 +40,13 @@ How to run the scripts: - Start Anvil in a terminal. - Start Katana in an other terminal on default port 5050 with the messaging configuration that is inside the: - `katana --messaging ~/dojo/crates/katana/core/contracts/messaging/anvil.messaging.json` +```bash + # From installed katana. + katana --messaging ~/dojo/crates/katana/contracts/messaging/anvil.messaging.json + + # Dev mode + cargo run --bin katana -- --messaging ~/dojo/crates/katana/contracts/messaging/anvil.messaging.json +``` - Open an other terminal and `cd ~/dojo/crates/katana/core/contracts/messaging`. Then you can then use pre-defined commands to interact with the contracts. @@ -66,11 +72,7 @@ make -sC cairo/ send_msg_value_l1 value=2 ``` Then you've to wait the message to be sent to L1, Katana will display it: ``` -2023-12-15T15:16:18.435370Z INFO messaging: Message sent to settlement layer: -| hash | 0x62c7475daef517f6858a6f539bb4d2aa7eb1e23a7e8b1bc6a0834256d995e49d -| from_address | 0x4231f608ea4a233136f6cdfcd10eaad2e46362bbc4e5d5aa88d0d574ea120d8 -| to_address | 0xe7f1725e7734ce288f8367e1bb143e90bb3f0512 -| payload | [0x2] +2024-04-22T23:42:52.478200Z INFO messaging: Message sent to settlement layer. hash=0xb5c9a1d3b8eb1c9d37ee5ffdacf09560a68d0c9e53fa4b1cc91d967095bc4ac7 from_address=0x609f8e7a76b6cc36f3ff86f09f6e5fdd0e6320f117d817e4344c1bf9fac7d67 to_address=0xe7f1725e7734ce288f8367e1bb143e90bb3f0512 payload=0x2 ``` ``` # Consume the messag previously sent. You can try to call it once and see the second one reverting. @@ -109,7 +111,15 @@ You can also use the Makefile to setup the chains, but the flow is the following How to run the scripts: - Starts Katana (1) to simulate starknet on a new terminal with default port 5050. -- Starts Katana (2) for your appchain on a new terminal with port 6060 and the configuration for messaging: `katana --messaging crates/katana/core/contracts/messaging/l3.messaging.json -p 6060` +- Starts Katana (2) for your appchain on a new terminal with port 6060 and the configuration for messaging: +```bash + # From installed Katana. + katana --messaging crates/katana/contracts/messaging/l3.messaging.json -p 6060` + + # Dev mode + cargo run --bin katana --features "starknet-messaging" -- --messaging crates/katana/contracts/messaging/l3.messaging.json -p 6060 +``` + - Open an other terminal and `cd ~/dojo/crates/katana/core/contracts/messaging`. Then you can then use pre-defined commands to interact with the contracts. @@ -139,3 +149,5 @@ make -sC ./cairo/ get_value_l2 # Try to change the value to see the transaction error. make -sC cairo/ send_msg_l3 selector_str=msg_handler_value value=888 ``` + +It's important to note that Dojo will support settlement. Hence, messaging will be done during the state update of the appchain on the base layer, and not with this custom solution that was developped for the demo. diff --git a/crates/katana/primitives/contracts/messaging/anvil.messaging.json b/crates/katana/contracts/messaging/anvil.messaging.json similarity index 100% rename from crates/katana/primitives/contracts/messaging/anvil.messaging.json rename to crates/katana/contracts/messaging/anvil.messaging.json diff --git a/crates/katana/primitives/contracts/messaging/cairo/.gitignore b/crates/katana/contracts/messaging/cairo/.gitignore similarity index 100% rename from crates/katana/primitives/contracts/messaging/cairo/.gitignore rename to crates/katana/contracts/messaging/cairo/.gitignore diff --git a/crates/katana/contracts/messaging/cairo/.tool-versions b/crates/katana/contracts/messaging/cairo/.tool-versions new file mode 100644 index 0000000000..49edbb5364 --- /dev/null +++ b/crates/katana/contracts/messaging/cairo/.tool-versions @@ -0,0 +1 @@ +scarb 2.5.4 diff --git a/crates/katana/primitives/contracts/messaging/cairo/Makefile b/crates/katana/contracts/messaging/cairo/Makefile similarity index 75% rename from crates/katana/primitives/contracts/messaging/cairo/Makefile rename to crates/katana/contracts/messaging/cairo/Makefile index aac32186a6..afdff6a560 100644 --- a/crates/katana/primitives/contracts/messaging/cairo/Makefile +++ b/crates/katana/contracts/messaging/cairo/Makefile @@ -10,10 +10,12 @@ sierra = .contract_class.json # ** L1 <-> L2 ** # L1_CONTRACT_ADDR=0xe7f1725E7734CE288F8367e1Bb143E90bb3F0512 -C_MSG_L1_ADDR=0x04231f608ea4a233136f6cdfcd10eaad2e46362bbc4e5d5aa88d0d574ea120d8 +C_MSG_L1_ADDR=0x0609f8e7a76b6cc36f3ff86f09f6e5fdd0e6320f117d817e4344c1bf9fac7d67 OPTS_L2 := --account katana-0 \ - --rpc http://0.0.0.0:5050 + --rpc http://0.0.0.0:5050 \ + --poll-interval 1000 \ + -w setup_for_l1_messaging: scarb build; \ @@ -38,29 +40,31 @@ send_msg_struct_l1: # ** L2 <-> L3 ** # ACCOUNT_L3=./account_l3.json -ACCOUNT_L3_ADDR=0x5686a647a9cdd63ade617e0baf3b364856b813b508f03903eb58a7e622d5855 -L3_PRIVATE_KEY=0x33003003001800009900180300d206308b0070db00121318d17b5e6262150b +ACCOUNT_L3_ADDR=0x6162896d1d7ab204c7ccac6dd5f8e9e7c25ecd5ae4fcb4ad32e57786bb46e03 +L3_PRIVATE_KEY=0x1800000000300000180000000000030000000000003006001800006600 -# L2_APPCHAIN_MSG_ADDR=0x046c0ea3fb2ad27053e8af3c8cfab38a51afb9fe90fcab1f75446bd41f7d3796 -# L2_APPCHAIN_MSG_CLASS_HASH=$(shell starkli class-hash target/dev/katana_messaging_appchain_messaging.contract_class.json) +L2_APPCHAIN_MSG_ADDR=0x0368407c4a0e075c693cf372dd735750df7289c6b5d06234615018456e658591 +L2_APPCHAIN_MSG_CLASS_HASH=$(shell starkli class-hash target/dev/katana_messaging_appchain_messaging.contract_class.json) -L2_CONTRACT1_ADDR=0x054f66c104745e27ad5194815a6c4755cf2076c4809212101dfe31563f312a34 +L2_CONTRACT1_ADDR=0x0450ae47f57d4a2165b015a4bf143cd53f60d61a74a0be998bf0a42c477f26ce L2_CONTRACT1_CLASS_HASH=$(shell starkli class-hash target/dev/katana_messaging_contract_1.contract_class.json) -L3_C_MSG_ADDR=0x071278839029ab1f9fa0ce1ee01e38599736dd4e8fed2417158bec4ef5dc6d0f +L3_C_MSG_ADDR=0x039bb4ce38513597cf75eeacd7f3ed6ef058a61818c252612d134d95ed2e9051 L3_C_MSG_CLASS_HASH=$(shell starkli class-hash target/dev/katana_messaging_contract_msg_starknet.contract_class.json) OPTS_L3 := --account ${ACCOUNT_L3} \ --rpc http://0.0.0.0:6060 \ - --private-key ${L3_PRIVATE_KEY} + --private-key ${L3_PRIVATE_KEY} \ + --poll-interval 1000 \ + -w setup_l2_messaging: @set -x; \ scarb build; \ - appchain_ch=$$(starkli class-hash ${build}ERC20${sierra}); \ - starkli declare target/dev/katana_messaging_appchain_messaging.contract_class.json ${OPTS_L2} - starkli declare target/dev/katana_messaging_contract_1.contract_class.json ${OPTS_L2} - starkli deploy --salt 0x1234 ${L2_APPCHAIN_MSG_CLASS_HASH} ${ACCOUNT_L2_ADDR} ${ACCOUNT_L3_ADDR} ${OPTS_L2} + appchain_ch=$$(starkli class-hash ${build}appchain_messaging${sierra}); \ + starkli declare target/dev/katana_messaging_appchain_messaging.contract_class.json ${OPTS_L2}; \ + starkli declare target/dev/katana_messaging_contract_1.contract_class.json ${OPTS_L2}; \ + starkli deploy --salt 0x1234 ${L2_APPCHAIN_MSG_CLASS_HASH} ${ACCOUNT_L2_ADDR} ${ACCOUNT_L3_ADDR} ${OPTS_L2}; \ starkli deploy --salt 0x1234 ${L2_CONTRACT1_CLASS_HASH} ${L2_APPCHAIN_MSG_ADDR} ${OPTS_L2} setup_l3_messaging: @@ -82,7 +86,7 @@ consume_msg_from_l3_usage: consume_msg_from_l3: starkli invoke ${L2_CONTRACT1_ADDR} consume_message \ - ${ACCOUNT_L3_ADDR} \ + ${L3_C_MSG_ADDR} \ $(payload) \ ${OPTS_L2} diff --git a/crates/katana/primitives/contracts/messaging/cairo/Scarb.lock b/crates/katana/contracts/messaging/cairo/Scarb.lock similarity index 100% rename from crates/katana/primitives/contracts/messaging/cairo/Scarb.lock rename to crates/katana/contracts/messaging/cairo/Scarb.lock diff --git a/crates/katana/primitives/contracts/messaging/cairo/Scarb.toml b/crates/katana/contracts/messaging/cairo/Scarb.toml similarity index 100% rename from crates/katana/primitives/contracts/messaging/cairo/Scarb.toml rename to crates/katana/contracts/messaging/cairo/Scarb.toml diff --git a/crates/katana/primitives/contracts/messaging/cairo/account_l2.json b/crates/katana/contracts/messaging/cairo/account_l2.json similarity index 68% rename from crates/katana/primitives/contracts/messaging/cairo/account_l2.json rename to crates/katana/contracts/messaging/cairo/account_l2.json index 788b31022a..db9480cf66 100644 --- a/crates/katana/primitives/contracts/messaging/cairo/account_l2.json +++ b/crates/katana/contracts/messaging/cairo/account_l2.json @@ -3,11 +3,12 @@ "variant": { "type": "open_zeppelin", "version": 1, - "public_key": "0x2b191c2f3ecf685a91af7cf72a43e7b90e2e41220175de5c4f7498981b10053" + "public_key": "0x2b191c2f3ecf685a91af7cf72a43e7b90e2e41220175de5c4f7498981b10053", + "legacy": false }, "deployment": { "status": "deployed", - "class_hash": "0x4d07e40e93398ed3c76981e72dd1fd22557a78ce36c0515f679e27f0bb5bc5f", + "class_hash": "0x05400e90f7e0ae78bd02c77cd75527280470e2fe19c54970dd79dc37a9d3645c", "address": "0x6162896d1d7ab204c7ccac6dd5f8e9e7c25ecd5ae4fcb4ad32e57786bb46e03" } } diff --git a/crates/katana/contracts/messaging/cairo/account_l3.json b/crates/katana/contracts/messaging/cairo/account_l3.json new file mode 100644 index 0000000000..db9480cf66 --- /dev/null +++ b/crates/katana/contracts/messaging/cairo/account_l3.json @@ -0,0 +1,14 @@ +{ + "version": 1, + "variant": { + "type": "open_zeppelin", + "version": 1, + "public_key": "0x2b191c2f3ecf685a91af7cf72a43e7b90e2e41220175de5c4f7498981b10053", + "legacy": false + }, + "deployment": { + "status": "deployed", + "class_hash": "0x05400e90f7e0ae78bd02c77cd75527280470e2fe19c54970dd79dc37a9d3645c", + "address": "0x6162896d1d7ab204c7ccac6dd5f8e9e7c25ecd5ae4fcb4ad32e57786bb46e03" + } +} diff --git a/crates/katana/primitives/contracts/messaging/cairo/src/appchain_messaging.cairo b/crates/katana/contracts/messaging/cairo/src/appchain_messaging.cairo similarity index 99% rename from crates/katana/primitives/contracts/messaging/cairo/src/appchain_messaging.cairo rename to crates/katana/contracts/messaging/cairo/src/appchain_messaging.cairo index f865fa44f2..0d8897f5a5 100644 --- a/crates/katana/primitives/contracts/messaging/cairo/src/appchain_messaging.cairo +++ b/crates/katana/contracts/messaging/cairo/src/appchain_messaging.cairo @@ -322,7 +322,7 @@ mod appchain_messaging { ); match starknet::call_contract_syscall(to_address, selector, payload) { - Result::Ok(span) => self + Result::Ok(_) => self .emit(MessageExecuted { from_address, to_address, selector, payload, }), Result::Err(e) => { panic(e) diff --git a/crates/katana/primitives/contracts/messaging/cairo/src/contract_1.cairo b/crates/katana/contracts/messaging/cairo/src/contract_1.cairo similarity index 91% rename from crates/katana/primitives/contracts/messaging/cairo/src/contract_1.cairo rename to crates/katana/contracts/messaging/cairo/src/contract_1.cairo index 322e65a52b..ef0d4ccb10 100644 --- a/crates/katana/primitives/contracts/messaging/cairo/src/contract_1.cairo +++ b/crates/katana/contracts/messaging/cairo/src/contract_1.cairo @@ -52,9 +52,10 @@ mod contract_1 { // Will revert in case of failure if the message is not registered // as consumable. - let msg_hash = messaging.consume_message_from_appchain(from_address, payload,); - // msg successfully consumed, we can proceed and process the data - // in the payload. + let _msg_hash = messaging.consume_message_from_appchain(from_address, payload,); + + // msg successfully consumed, we can proceed and process the data + // in the payload. } /// An example function to test how appchain contract can trigger diff --git a/crates/katana/primitives/contracts/messaging/cairo/src/contract_msg_l1.cairo b/crates/katana/contracts/messaging/cairo/src/contract_msg_l1.cairo similarity index 100% rename from crates/katana/primitives/contracts/messaging/cairo/src/contract_msg_l1.cairo rename to crates/katana/contracts/messaging/cairo/src/contract_msg_l1.cairo diff --git a/crates/katana/primitives/contracts/messaging/cairo/src/contract_msg_starknet.cairo b/crates/katana/contracts/messaging/cairo/src/contract_msg_starknet.cairo similarity index 100% rename from crates/katana/primitives/contracts/messaging/cairo/src/contract_msg_starknet.cairo rename to crates/katana/contracts/messaging/cairo/src/contract_msg_starknet.cairo diff --git a/crates/katana/primitives/contracts/messaging/cairo/src/lib.cairo b/crates/katana/contracts/messaging/cairo/src/lib.cairo similarity index 100% rename from crates/katana/primitives/contracts/messaging/cairo/src/lib.cairo rename to crates/katana/contracts/messaging/cairo/src/lib.cairo diff --git a/crates/katana/contracts/messaging/l3.messaging.json b/crates/katana/contracts/messaging/l3.messaging.json new file mode 100644 index 0000000000..9c64d2018f --- /dev/null +++ b/crates/katana/contracts/messaging/l3.messaging.json @@ -0,0 +1,9 @@ +{ + "chain": "starknet", + "rpc_url": "http://127.0.0.1:5050", + "contract_address": "0x0368407c4a0e075c693cf372dd735750df7289c6b5d06234615018456e658591", + "sender_address": "0x6162896d1d7ab204c7ccac6dd5f8e9e7c25ecd5ae4fcb4ad32e57786bb46e03", + "private_key": "0x1800000000300000180000000000030000000000003006001800006600", + "interval": 2, + "from_block": 0 +} diff --git a/crates/katana/primitives/contracts/messaging/run_e2e.sh b/crates/katana/contracts/messaging/run_e2e.sh similarity index 100% rename from crates/katana/primitives/contracts/messaging/run_e2e.sh rename to crates/katana/contracts/messaging/run_e2e.sh diff --git a/crates/katana/primitives/contracts/messaging/solidity/.anvil.env b/crates/katana/contracts/messaging/solidity/.anvil.env similarity index 100% rename from crates/katana/primitives/contracts/messaging/solidity/.anvil.env rename to crates/katana/contracts/messaging/solidity/.anvil.env diff --git a/crates/katana/primitives/contracts/messaging/solidity/.gitignore b/crates/katana/contracts/messaging/solidity/.gitignore similarity index 100% rename from crates/katana/primitives/contracts/messaging/solidity/.gitignore rename to crates/katana/contracts/messaging/solidity/.gitignore diff --git a/crates/katana/primitives/contracts/messaging/solidity/IStarknetMessagingLocal_ABI.json b/crates/katana/contracts/messaging/solidity/IStarknetMessagingLocal_ABI.json similarity index 100% rename from crates/katana/primitives/contracts/messaging/solidity/IStarknetMessagingLocal_ABI.json rename to crates/katana/contracts/messaging/solidity/IStarknetMessagingLocal_ABI.json diff --git a/crates/katana/primitives/contracts/messaging/solidity/Makefile b/crates/katana/contracts/messaging/solidity/Makefile similarity index 92% rename from crates/katana/primitives/contracts/messaging/solidity/Makefile rename to crates/katana/contracts/messaging/solidity/Makefile index 1291370c05..538d484913 100644 --- a/crates/katana/primitives/contracts/messaging/solidity/Makefile +++ b/crates/katana/contracts/messaging/solidity/Makefile @@ -10,7 +10,7 @@ export $(shell sed 's/=.*//' .env) # Addresses fixed here for easy testing. C_MSG_L2_ADDR=0xe7f1725E7734CE288F8367e1Bb143E90bb3F0512 L2_ACCOUNT=0x6162896d1d7ab204c7ccac6dd5f8e9e7c25ecd5ae4fcb4ad32e57786bb46e03 -L2_CONTRACT_ADDR=0x0429a64d97c1422a37a09fc7406f35c264be59b744aaff5a79d59393eb1bc7e1 +L2_CONTRACT_ADDR=0x609f8e7a76b6cc36f3ff86f09f6e5fdd0e6320f117d817e4344c1bf9fac7d67 deploy_messaging_contracts: forge script --broadcast --rpc-url ${ETH_RPC_URL} script/LocalTesting.s.sol:LocalSetup diff --git a/crates/katana/primitives/contracts/messaging/solidity/README.md b/crates/katana/contracts/messaging/solidity/README.md similarity index 100% rename from crates/katana/primitives/contracts/messaging/solidity/README.md rename to crates/katana/contracts/messaging/solidity/README.md diff --git a/crates/katana/primitives/contracts/messaging/solidity/foundry.toml b/crates/katana/contracts/messaging/solidity/foundry.toml similarity index 100% rename from crates/katana/primitives/contracts/messaging/solidity/foundry.toml rename to crates/katana/contracts/messaging/solidity/foundry.toml diff --git a/crates/katana/contracts/messaging/solidity/lib/forge-std b/crates/katana/contracts/messaging/solidity/lib/forge-std new file mode 160000 index 0000000000..5dd1c68131 --- /dev/null +++ b/crates/katana/contracts/messaging/solidity/lib/forge-std @@ -0,0 +1 @@ +Subproject commit 5dd1c68131ddd3c89ef169666eb262b92e90507c diff --git a/crates/katana/primitives/contracts/messaging/solidity/lib/starknet/IStarknetMessaging.sol b/crates/katana/contracts/messaging/solidity/lib/starknet/IStarknetMessaging.sol similarity index 100% rename from crates/katana/primitives/contracts/messaging/solidity/lib/starknet/IStarknetMessaging.sol rename to crates/katana/contracts/messaging/solidity/lib/starknet/IStarknetMessaging.sol diff --git a/crates/katana/primitives/contracts/messaging/solidity/lib/starknet/IStarknetMessagingEvents.sol b/crates/katana/contracts/messaging/solidity/lib/starknet/IStarknetMessagingEvents.sol similarity index 100% rename from crates/katana/primitives/contracts/messaging/solidity/lib/starknet/IStarknetMessagingEvents.sol rename to crates/katana/contracts/messaging/solidity/lib/starknet/IStarknetMessagingEvents.sol diff --git a/crates/katana/primitives/contracts/messaging/solidity/lib/starknet/NamedStorage.sol b/crates/katana/contracts/messaging/solidity/lib/starknet/NamedStorage.sol similarity index 100% rename from crates/katana/primitives/contracts/messaging/solidity/lib/starknet/NamedStorage.sol rename to crates/katana/contracts/messaging/solidity/lib/starknet/NamedStorage.sol diff --git a/crates/katana/primitives/contracts/messaging/solidity/lib/starknet/StarknetMessaging.sol b/crates/katana/contracts/messaging/solidity/lib/starknet/StarknetMessaging.sol similarity index 100% rename from crates/katana/primitives/contracts/messaging/solidity/lib/starknet/StarknetMessaging.sol rename to crates/katana/contracts/messaging/solidity/lib/starknet/StarknetMessaging.sol diff --git a/crates/katana/primitives/contracts/messaging/solidity/script/LocalTesting.s.sol b/crates/katana/contracts/messaging/solidity/script/LocalTesting.s.sol similarity index 100% rename from crates/katana/primitives/contracts/messaging/solidity/script/LocalTesting.s.sol rename to crates/katana/contracts/messaging/solidity/script/LocalTesting.s.sol diff --git a/crates/katana/primitives/contracts/messaging/solidity/src/Contract1.sol b/crates/katana/contracts/messaging/solidity/src/Contract1.sol similarity index 100% rename from crates/katana/primitives/contracts/messaging/solidity/src/Contract1.sol rename to crates/katana/contracts/messaging/solidity/src/Contract1.sol diff --git a/crates/katana/primitives/contracts/messaging/solidity/src/StarknetMessagingLocal.sol b/crates/katana/contracts/messaging/solidity/src/StarknetMessagingLocal.sol similarity index 100% rename from crates/katana/primitives/contracts/messaging/solidity/src/StarknetMessagingLocal.sol rename to crates/katana/contracts/messaging/solidity/src/StarknetMessagingLocal.sol diff --git a/crates/katana/primitives/contracts/test_contract.cairo b/crates/katana/contracts/test_contract.cairo similarity index 100% rename from crates/katana/primitives/contracts/test_contract.cairo rename to crates/katana/contracts/test_contract.cairo diff --git a/crates/katana/primitives/contracts/universal_deployer.cairo b/crates/katana/contracts/universal_deployer.cairo similarity index 100% rename from crates/katana/primitives/contracts/universal_deployer.cairo rename to crates/katana/contracts/universal_deployer.cairo diff --git a/crates/katana/core/Cargo.toml b/crates/katana/core/Cargo.toml index 2669286bdc..e533696360 100644 --- a/crates/katana/core/Cargo.toml +++ b/crates/katana/core/Cargo.toml @@ -7,25 +7,27 @@ repository.workspace = true version.workspace = true [dependencies] -katana-db = { path = "../storage/db" } +katana-db.workspace = true katana-executor.workspace = true -katana-primitives = { path = "../primitives" } -katana-provider = { path = "../storage/provider" } +katana-primitives.workspace = true +katana-provider.workspace = true katana-tasks.workspace = true anyhow.workspace = true async-trait.workspace = true +dojo-metrics.workspace = true +metrics.workspace = true cairo-lang-casm = "2.3.1" cairo-lang-starknet = "2.3.1" cairo-vm.workspace = true convert_case.workspace = true +k256 = { version = "0.13", default-features = false, features = ["ecdsa", "std"] } +reqwest = { version = "0.11.22", features = [ "blocking", "rustls-tls" ], default-features = false } derive_more.workspace = true -ethers = { version = "2.0.11", optional = true } flate2.workspace = true futures.workspace = true lazy_static = "1.4.0" parking_lot.workspace = true -primitive-types = "0.12.2" rand = { version = "0.8.5", features = [ "small_rng" ] } serde.workspace = true serde_json.workspace = true @@ -37,11 +39,31 @@ tokio.workspace = true tracing.workspace = true url.workspace = true +alloy-primitives = { workspace = true, features = ["serde"] } +alloy-sol-types = { workspace = true, default-features = false, features = ["json"], optional = true } + +alloy-transport = { version = "0.1.0", default-features = false, optional = true } +alloy-provider = { version = "0.1.0", default-features = false, optional = true, features = ["reqwest"]} +alloy-signer = { version = "0.1.0", default-features = false, optional = true } +alloy-network = { version = "0.1.0", default-features = false, optional = true } +alloy-rpc-types = { version = "0.1.0", default-features = false, optional = true } +alloy-signer-wallet = { version = "0.1.0", default-features = false, optional = true } +alloy-contract = { version = "0.1.0", default-features = false, optional = true } + [dev-dependencies] assert_matches.workspace = true hex = "0.4.3" tempfile = "3.8.1" [features] -messaging = [ "ethers" ] -starknet-messaging = [ ] +messaging = [ + "alloy-sol-types", + "alloy-transport", + "alloy-provider", + "alloy-signer", + "alloy-network", + "alloy-rpc-types", + "alloy-signer-wallet", + "alloy-contract" +] +starknet-messaging = [ ] diff --git a/crates/katana/core/src/backend/config.rs b/crates/katana/core/src/backend/config.rs index 622e550cf4..e1c4773c1f 100644 --- a/crates/katana/core/src/backend/config.rs +++ b/crates/katana/core/src/backend/config.rs @@ -1,18 +1,13 @@ use std::path::PathBuf; -use ::primitive_types::U256; -use katana_primitives::block::GasPrices; +use alloy_primitives::U256; use katana_primitives::chain::ChainId; -use katana_primitives::env::BlockEnv; use katana_primitives::genesis::allocation::DevAllocationsGenerator; use katana_primitives::genesis::constant::DEFAULT_PREFUNDED_ACCOUNT_BALANCE; use katana_primitives::genesis::Genesis; use url::Url; -use crate::constants::{ - DEFAULT_ETH_L1_GAS_PRICE, DEFAULT_INVOKE_MAX_STEPS, DEFAULT_STRK_L1_GAS_PRICE, - DEFAULT_VALIDATE_MAX_STEPS, -}; +use crate::constants::{DEFAULT_INVOKE_MAX_STEPS, DEFAULT_VALIDATE_MAX_STEPS}; use crate::env::BlockContextGenerator; #[derive(Debug, Clone)] @@ -27,10 +22,6 @@ pub struct StarknetConfig { } impl StarknetConfig { - pub fn block_env(&self) -> BlockEnv { - BlockEnv { l1_gas_prices: self.env.gas_price.clone(), ..Default::default() } - } - pub fn block_context_generator(&self) -> BlockContextGenerator { BlockContextGenerator::default() } @@ -60,7 +51,6 @@ impl Default for StarknetConfig { #[derive(Debug, Clone)] pub struct Environment { pub chain_id: ChainId, - pub gas_price: GasPrices, pub invoke_max_steps: u32, pub validate_max_steps: u32, } @@ -71,7 +61,6 @@ impl Default for Environment { chain_id: ChainId::parse("KATANA").unwrap(), invoke_max_steps: DEFAULT_INVOKE_MAX_STEPS, validate_max_steps: DEFAULT_VALIDATE_MAX_STEPS, - gas_price: GasPrices { eth: DEFAULT_ETH_L1_GAS_PRICE, strk: DEFAULT_STRK_L1_GAS_PRICE }, } } } diff --git a/crates/katana/core/src/backend/mod.rs b/crates/katana/core/src/backend/mod.rs index 979c0903d2..209b58a2dc 100644 --- a/crates/katana/core/src/backend/mod.rs +++ b/crates/katana/core/src/backend/mod.rs @@ -1,12 +1,11 @@ use std::sync::Arc; -use katana_executor::ExecutorFactory; +use katana_executor::{ExecutionOutput, ExecutionResult, ExecutorFactory}; use katana_primitives::block::{ Block, FinalityStatus, GasPrices, Header, PartialHeader, SealedBlockWithStatus, }; use katana_primitives::chain::ChainId; use katana_primitives::env::BlockEnv; -use katana_primitives::state::StateUpdatesWithDeclaredClasses; use katana_primitives::version::CURRENT_STARKNET_VERSION; use katana_primitives::FieldElement; use katana_provider::providers::fork::ForkedProvider; @@ -26,9 +25,11 @@ pub mod storage; use self::config::StarknetConfig; use self::storage::Blockchain; use crate::env::BlockContextGenerator; -use crate::service::block_producer::{BlockProductionError, MinedBlockOutcome, TxWithOutcome}; +use crate::service::block_producer::{BlockProductionError, MinedBlockOutcome}; use crate::utils::get_current_timestamp; +pub(crate) const LOG_TARGET: &str = "katana::core::backend"; + pub struct Backend { /// The config used to generate the backend. pub config: StarknetConfig, @@ -77,11 +78,11 @@ impl Backend { block.l1_gas_price.price_in_fri.try_into().expect("should fit in u128"); trace!( - target: "backend", - "forking chain `{}` at block {} from {}", - parse_cairo_short_string(&forked_chain_id).unwrap(), - block.block_number, - forked_url + target: LOG_TARGET, + chain = %parse_cairo_short_string(&forked_chain_id).unwrap(), + block_number = %block.block_number, + forked_url = %forked_url, + "Forking chain.", ); let blockchain = Blockchain::new_from_forked( @@ -118,17 +119,20 @@ impl Backend { pub fn do_mine_block( &self, block_env: &BlockEnv, - txs_outcomes: Vec, - state_updates: StateUpdatesWithDeclaredClasses, + execution_output: ExecutionOutput, ) -> Result { - let mut txs = vec![]; - let mut receipts = vec![]; - let mut execs = vec![]; - - for t in txs_outcomes { - txs.push(t.tx); - receipts.push(t.receipt); - execs.push(t.exec_info); + // we optimistically allocate the maximum amount possible + let mut txs = Vec::with_capacity(execution_output.transactions.len()); + let mut traces = Vec::with_capacity(execution_output.transactions.len()); + let mut receipts = Vec::with_capacity(execution_output.transactions.len()); + + // only include successful transactions in the block + for (tx, res) in execution_output.transactions { + if let ExecutionResult::Success { receipt, trace, .. } = res { + txs.push(tx); + traces.push(trace); + receipts.push(receipt); + } } let prev_hash = BlockHashProvider::latest_hash(self.blockchain.provider())?; @@ -154,14 +158,19 @@ impl Backend { BlockWriter::insert_block_with_states_and_receipts( self.blockchain.provider(), block, - state_updates, + execution_output.states, receipts, - execs, + traces, )?; - info!(target: "backend", "⛏️ Block {block_number} mined with {tx_count} transactions"); + info!( + target: LOG_TARGET, + block_number = %block_number, + tx_count = %tx_count, + "Block mined.", + ); - Ok(MinedBlockOutcome { block_number }) + Ok(MinedBlockOutcome { block_number, stats: execution_output.stats }) } pub fn update_block_env(&self, block_env: &mut BlockEnv) { @@ -179,14 +188,13 @@ impl Backend { block_env.number += 1; block_env.timestamp = timestamp; - block_env.l1_gas_prices = self.config.env.gas_price.clone(); } pub fn mine_empty_block( &self, block_env: &BlockEnv, ) -> Result { - self.do_mine_block(block_env, Default::default(), Default::default()) + self.do_mine_block(block_env, Default::default()) } } @@ -204,8 +212,12 @@ mod tests { use crate::backend::config::{Environment, StarknetConfig}; fn create_test_starknet_config() -> StarknetConfig { + let mut genesis = Genesis::default(); + genesis.gas_prices.eth = 2100; + genesis.gas_prices.strk = 3100; + StarknetConfig { - genesis: Genesis::default(), + genesis, disable_fee: true, env: Environment::default(), ..Default::default() @@ -219,17 +231,26 @@ mod tests { #[tokio::test] async fn test_creating_blocks() { let backend = create_test_backend().await; - let provider = backend.blockchain.provider(); - assert_eq!(BlockNumberProvider::latest_number(provider).unwrap(), 0); - let block_num = provider.latest_number().unwrap(); + let block_env = provider.block_env_at(block_num.into()).unwrap().unwrap(); + + assert_eq!(block_num, 0); + assert_eq!(block_env.number, 0); + assert_eq!(block_env.l1_gas_prices.eth, 2100); + assert_eq!(block_env.l1_gas_prices.strk, 3100); + let mut block_env = provider.block_env_at(block_num.into()).unwrap().unwrap(); backend.update_block_env(&mut block_env); backend.mine_empty_block(&block_env).unwrap(); let block_num = provider.latest_number().unwrap(); + assert_eq!(block_num, 1); + assert_eq!(block_env.number, 1); + assert_eq!(block_env.l1_gas_prices.eth, 2100); + assert_eq!(block_env.l1_gas_prices.strk, 3100); + let mut block_env = provider.block_env_at(block_num.into()).unwrap().unwrap(); backend.update_block_env(&mut block_env); backend.mine_empty_block(&block_env).unwrap(); @@ -237,8 +258,11 @@ mod tests { let block_num = provider.latest_number().unwrap(); let block_env = provider.block_env_at(block_num.into()).unwrap().unwrap(); - assert_eq!(BlockNumberProvider::latest_number(provider).unwrap(), 2); + let block_num = provider.latest_number().unwrap(); + assert_eq!(block_num, 2); assert_eq!(block_env.number, 2); + assert_eq!(block_env.l1_gas_prices.eth, 2100); + assert_eq!(block_env.l1_gas_prices.strk, 3100); let block0 = BlockProvider::block_by_number(provider, 0).unwrap().unwrap(); let block1 = BlockProvider::block_by_number(provider, 1).unwrap().unwrap(); diff --git a/crates/katana/core/src/backend/storage.rs b/crates/katana/core/src/backend/storage.rs index 06cbb71358..2cc86259bd 100644 --- a/crates/katana/core/src/backend/storage.rs +++ b/crates/katana/core/src/backend/storage.rs @@ -146,6 +146,7 @@ mod tests { use katana_primitives::genesis::Genesis; use katana_primitives::receipt::{InvokeTxReceipt, Receipt}; use katana_primitives::state::StateUpdatesWithDeclaredClasses; + use katana_primitives::trace::TxExecInfo; use katana_primitives::transaction::{InvokeTx, Tx, TxWithHash}; use katana_primitives::FieldElement; use katana_provider::providers::in_memory::InMemoryProvider; @@ -154,7 +155,7 @@ mod tests { HeaderProvider, }; use katana_provider::traits::state::StateFactoryProvider; - use katana_provider::traits::transaction::TransactionProvider; + use katana_provider::traits::transaction::{TransactionProvider, TransactionTraceProvider}; use starknet::macros::felt; use super::Blockchain; @@ -254,7 +255,7 @@ mod tests { dummy_block.clone(), StateUpdatesWithDeclaredClasses::default(), vec![Receipt::Invoke(InvokeTxReceipt::default())], - vec![], + vec![TxExecInfo::default()], ) .unwrap(); @@ -310,11 +311,14 @@ mod tests { .unwrap(); let tx = blockchain.provider().transaction_by_hash(dummy_tx.hash).unwrap().unwrap(); + let tx_exec = + blockchain.provider().transaction_execution(dummy_tx.hash).unwrap().unwrap(); assert_eq!(block_hash, dummy_block.block.header.hash); assert_eq!(block_number, dummy_block.block.header.header.number); assert_eq!(block, dummy_block.block.unseal()); assert_eq!(tx, dummy_tx); + assert_eq!(tx_exec, TxExecInfo::default()); } } } diff --git a/crates/katana/core/src/pool.rs b/crates/katana/core/src/pool.rs index ce0419ad12..8214e91657 100644 --- a/crates/katana/core/src/pool.rs +++ b/crates/katana/core/src/pool.rs @@ -6,6 +6,8 @@ use parking_lot::RwLock; use starknet::core::types::FieldElement; use tracing::{info, warn}; +pub(crate) const LOG_TARGET: &str = "txpool"; + #[derive(Debug, Default)] pub struct TransactionPool { transactions: RwLock>, @@ -23,7 +25,7 @@ impl TransactionPool { let hash = transaction.hash; self.transactions.write().push(transaction); - info!(target: "txpool", "Transaction received | Hash: {hash:#x}"); + info!(target: LOG_TARGET, hash = %format!("\"{hash:#x}\""), "Transaction received."); // notify listeners of new tx added to the pool self.notify_listener(hash) @@ -55,9 +57,9 @@ impl TransactionPool { Err(e) => { if e.is_full() { warn!( - target: "txpool", - "[{:?}] Failed to send tx notification because channel is full", - hash, + target: LOG_TARGET, + hash = ?format!("\"{hash:#x}\""), + "Unable to send tx notification because channel is full." ); true } else { diff --git a/crates/katana/core/src/sequencer.rs b/crates/katana/core/src/sequencer.rs index 790be10c69..0a0db8678d 100644 --- a/crates/katana/core/src/sequencer.rs +++ b/crates/katana/core/src/sequencer.rs @@ -85,13 +85,13 @@ impl KatanaSequencer { let block_producer = Arc::new(block_producer); - tokio::spawn(NodeService { + tokio::spawn(NodeService::new( + Arc::clone(&pool), miner, - pool: Arc::clone(&pool), - block_producer: block_producer.clone(), + block_producer.clone(), #[cfg(feature = "messaging")] messaging, - }); + )); Ok(Self { pool, config, backend, block_producer }) } diff --git a/crates/katana/core/src/service/block_producer.rs b/crates/katana/core/src/service/block_producer.rs index 8d8251e74f..4205290389 100644 --- a/crates/katana/core/src/service/block_producer.rs +++ b/crates/katana/core/src/service/block_producer.rs @@ -8,7 +8,7 @@ use std::time::Duration; use futures::channel::mpsc::{channel, Receiver, Sender}; use futures::stream::{Stream, StreamExt}; use futures::FutureExt; -use katana_executor::{BlockExecutor, ExecutionOutput, ExecutionResult, ExecutorFactory}; +use katana_executor::{BlockExecutor, ExecutionResult, ExecutionStats, ExecutorFactory}; use katana_primitives::block::{BlockHashOrNumber, ExecutableBlock, PartialHeader}; use katana_primitives::receipt::Receipt; use katana_primitives::trace::TxExecInfo; @@ -25,6 +25,8 @@ use tracing::{error, info, trace, warn}; use crate::backend::Backend; +pub(crate) const LOG_TARGET: &str = "miner"; + #[derive(Debug, thiserror::Error)] pub enum BlockProductionError { #[error(transparent)] @@ -40,8 +42,10 @@ pub enum BlockProductionError { TransactionExecutionError(#[from] katana_executor::ExecutorError), } +#[derive(Debug, Clone)] pub struct MinedBlockOutcome { pub block_number: u64, + pub stats: ExecutionStats, } #[derive(Debug, Clone)] @@ -115,7 +119,7 @@ impl BlockProducer { // Handler for the `katana_generateBlock` RPC method. pub fn force_mine(&self) { - trace!(target: "miner", "scheduling force block mining"); + trace!(target: LOG_TARGET, "Scheduling force block mining."); let mut mode = self.inner.write(); match &mut *mode { BlockProducerMode::Instant(producer) => producer.force_mine(), @@ -243,12 +247,12 @@ impl IntervalBlockProducer { pub fn force_mine(&mut self) { match Self::do_mine(self.executor.clone(), self.backend.clone()) { Ok(outcome) => { - info!(target: "miner", "force mined block {}", outcome.block_number); + info!(target: LOG_TARGET, block_number = %outcome.block_number, "Force mined block."); self.executor = self.create_new_executor_for_next_block().expect("fail to create executor"); } Err(e) => { - error!(target: "miner", "failed to force mine: {e}"); + error!(target: LOG_TARGET, error = %e, "On force mine."); } } } @@ -259,24 +263,13 @@ impl IntervalBlockProducer { ) -> Result { let executor = &mut executor.write(); - trace!(target: "miner", "creating new block"); + trace!(target: LOG_TARGET, "Creating new block."); let block_env = executor.block_env(); - let ExecutionOutput { states, transactions } = executor.take_execution_output()?; - - let transactions = transactions - .into_iter() - .filter_map(|(tx, res)| match res { - ExecutionResult::Failed { .. } => None, - ExecutionResult::Success { receipt, trace, .. } => { - Some(TxWithOutcome { tx, receipt, exec_info: trace }) - } - }) - .collect::>(); - - let outcome = backend.do_mine_block(&block_env, transactions, states)?; + let execution_output = executor.take_execution_output()?; + let outcome = backend.do_mine_block(&block_env, execution_output)?; - trace!(target: "miner", "created new block: {}", outcome.block_number); + trace!(target: LOG_TARGET, block_number = %outcome.block_number, "Created new block."); Ok(outcome) } @@ -342,8 +335,8 @@ impl IntervalBlockProducer { Err(e) => { if e.is_full() { warn!( - target: "miner", - "failed to send new txs notification because channel is full", + target: LOG_TARGET, + "Unable to send new txs notification because channel is full.", ); true } else { @@ -478,7 +471,7 @@ impl InstantBlockProducer { let txs = self.queued.pop_front().unwrap_or_default(); let _ = Self::do_mine(self.backend.clone(), txs); } else { - trace!(target: "miner", "unable to force mine while a mining process is running") + trace!(target: LOG_TARGET, "Unable to force mine while a mining process is running.") } } @@ -486,7 +479,7 @@ impl InstantBlockProducer { backend: Arc>, transactions: Vec, ) -> Result<(MinedBlockOutcome, Vec), BlockProductionError> { - trace!(target: "miner", "creating new block"); + trace!(target: LOG_TARGET, "Creating new block."); let provider = backend.blockchain.provider(); @@ -513,8 +506,10 @@ impl InstantBlockProducer { executor.execute_block(block)?; - let ExecutionOutput { states, transactions } = executor.take_execution_output()?; - let txs_outcomes = transactions + let execution_output = executor.take_execution_output()?; + let txs_outcomes = execution_output + .transactions + .clone() .into_iter() .filter_map(|(tx, res)| match res { ExecutionResult::Success { receipt, trace, .. } => { @@ -524,9 +519,9 @@ impl InstantBlockProducer { }) .collect::>(); - let outcome = backend.do_mine_block(&block_env, txs_outcomes.clone(), states)?; + let outcome = backend.do_mine_block(&block_env, execution_output)?; - trace!(target: "miner", "created new block: {}", outcome.block_number); + trace!(target: LOG_TARGET, block_number = %outcome.block_number, "Created new block."); Ok((outcome, txs_outcomes)) } @@ -549,8 +544,8 @@ impl InstantBlockProducer { Err(e) => { if e.is_full() { warn!( - target: "miner", - "failed to send new txs notification because channel is full", + target: LOG_TARGET, + "Unable to send new txs notification because channel is full.", ); true } else { diff --git a/crates/katana/core/src/service/messaging/ethereum.rs b/crates/katana/core/src/service/messaging/ethereum.rs index 1de2866e58..efea52b550 100644 --- a/crates/katana/core/src/service/messaging/ethereum.rs +++ b/crates/katana/core/src/service/messaging/ethereum.rs @@ -2,62 +2,56 @@ use std::collections::HashMap; use std::str::FromStr; use std::sync::Arc; +use alloy_network::Ethereum; +use alloy_primitives::{Address, LogData, U256}; +use alloy_provider::{Provider, ReqwestProvider}; +use alloy_rpc_types::{BlockNumberOrTag, Filter, FilterBlockOption, FilterSet, Log, Topic}; +use alloy_sol_types::{sol, SolEvent}; use anyhow::Result; use async_trait::async_trait; -use ethers::prelude::*; -use ethers::providers::{Http, Provider}; -use ethers::types::{Address, BlockNumber, Log}; -use k256::ecdsa::SigningKey; use katana_primitives::chain::ChainId; use katana_primitives::receipt::MessageToL1; use katana_primitives::transaction::L1HandlerTx; use katana_primitives::utils::transaction::compute_l1_message_hash; use katana_primitives::FieldElement; -use tracing::{debug, error, trace, warn}; +use tracing::{debug, trace, warn}; use super::{Error, MessagingConfig, Messenger, MessengerResult, LOG_TARGET}; -abigen!( +sol! { + #[sol(rpc, rename_all = "snakecase")] + #[derive(serde::Serialize, serde::Deserialize)] StarknetMessagingLocal, - "../primitives/contracts/messaging/solidity/IStarknetMessagingLocal_ABI.json", - event_derives(serde::Serialize, serde::Deserialize) -); - -#[derive(Debug, PartialEq, Eq, EthEvent)] -pub struct LogMessageToL2 { - #[ethevent(indexed)] - from_address: Address, - #[ethevent(indexed)] - to_address: U256, - #[ethevent(indexed)] - selector: U256, - payload: Vec, - nonce: U256, - fee: U256, + "../contracts/messaging/solidity/IStarknetMessagingLocal_ABI.json" +} + +sol! { + #[sol(rpc)] + contract LogMessageToL2 { + #[derive(Debug, PartialEq)] + event LogMessageToL2Event( + address indexed from_address, + uint256 indexed to_address, + uint256 indexed selector, + uint256[] payload, + uint256 nonce, + uint256 fee + ); + } } pub struct EthereumMessaging { - provider: Arc>, - provider_signer: Arc, Wallet>>, + provider: Arc>, messaging_contract_address: Address, } impl EthereumMessaging { pub async fn new(config: MessagingConfig) -> Result { - let provider = Provider::::try_from(&config.rpc_url)?; - - let chain_id = provider.get_chainid().await?; - - let wallet: LocalWallet = - config.private_key.parse::()?.with_chain_id(chain_id.as_u32()); - - let provider_signer = SignerMiddleware::new(provider.clone(), wallet); - let messaging_contract_address = Address::from_str(&config.contract_address)?; - Ok(EthereumMessaging { - provider: Arc::new(provider), - provider_signer: Arc::new(provider_signer), - messaging_contract_address, + provider: Arc::new(ReqwestProvider::::new_http(reqwest::Url::parse( + &config.rpc_url, + )?)), + messaging_contract_address: config.contract_address.parse::
()?, }) } @@ -77,21 +71,26 @@ impl EthereumMessaging { from_block: u64, to_block: u64, ) -> MessengerResult>> { - trace!(target: LOG_TARGET, "Fetching logs for blocks {} - {}.", from_block, to_block); + trace!(target: LOG_TARGET, from_block = ?from_block, to_block = ?to_block, "Fetching logs."); let mut block_to_logs: HashMap> = HashMap::new(); - let log_msg_to_l2_topic = - H256::from_str("0xdb80dd488acf86d17c747445b0eabb5d57c541d3bd7b6b87af987858e5066b2b") - .unwrap(); - let filters = Filter { block_option: FilterBlockOption::Range { - from_block: Some(BlockNumber::Number(from_block.into())), - to_block: Some(BlockNumber::Number(to_block.into())), + from_block: Some(BlockNumberOrTag::Number(from_block)), + to_block: Some(BlockNumberOrTag::Number(to_block)), }, - address: Some(ValueOrArray::Value(self.messaging_contract_address)), - topics: [Some(ValueOrArray::Value(Some(log_msg_to_l2_topic))), None, None, None], + address: FilterSet::
::from(self.messaging_contract_address), + topics: [ + Topic::from( + "0xdb80dd488acf86d17c747445b0eabb5d57c541d3bd7b6b87af987858e5066b2b" + .parse::() + .unwrap(), + ), + Default::default(), + Default::default(), + Default::default(), + ], }; self.provider @@ -99,15 +98,7 @@ impl EthereumMessaging { .await? .into_iter() .filter(|log| log.block_number.is_some()) - .map(|log| { - ( - log.block_number - .unwrap() - .try_into() - .expect("Block number couldn't be converted to u64."), - log, - ) - }) + .map(|log| (log.block_number.unwrap(), log)) .for_each(|(block_num, log)| { block_to_logs .entry(block_num) @@ -130,12 +121,7 @@ impl Messenger for EthereumMessaging { max_blocks: u64, chain_id: ChainId, ) -> MessengerResult<(u64, Vec)> { - let chain_latest_block: u64 = self - .provider - .get_block_number() - .await? - .try_into() - .expect("Can't convert latest block number into u64."); + let chain_latest_block: u64 = self.provider.get_block_number().await?; // +1 as the from_block counts as 1 block fetched. let to_block = if from_block + max_blocks + 1 < chain_latest_block { @@ -150,8 +136,9 @@ impl Messenger for EthereumMessaging { |(block_number, block_logs)| { debug!( target: LOG_TARGET, - "Converting logs of block {block_number} into L1HandlerTx ({} logs)", - block_logs.len(), + block_number = %block_number, + logs_found = %block_logs.len(), + "Converting logs into L1HandlerTx.", ); block_logs.into_iter().for_each(|log| { @@ -173,45 +160,47 @@ impl Messenger for EthereumMessaging { return Ok(vec![]); } - let starknet_messaging = StarknetMessagingLocal::new( - self.messaging_contract_address, - self.provider_signer.clone(), - ); + let starknet_messaging = + StarknetMessagingLocal::new(self.messaging_contract_address, self.provider.clone()); let hashes = parse_messages(messages); debug!("Sending transaction on L1 to register messages..."); - match starknet_messaging - .add_message_hashes_from_l2(hashes.clone()) + + let receipt = starknet_messaging + .addMessageHashesFromL2(hashes.clone()) .send() .await .map_err(|_| Error::SendError)? - // wait for the tx to be mined - .await? - { - Some(receipt) => { - trace!( - target: LOG_TARGET, - "Transaction sent on L1 to register {} messages: {:#x}", - hashes.len(), - receipt.transaction_hash, - ); - - Ok(hashes) - } - None => { + .get_receipt() + .await + .map_err(|_| { warn!(target: LOG_TARGET, "No receipt for L1 transaction."); - Err(Error::SendError) - } - } + Error::SendError + })?; + + trace!( + target: LOG_TARGET, + "Transaction sent on L1 to register {} messages: {:#x}", + hashes.len(), + receipt.transaction_hash, + ); + + Ok(hashes) } } fn l1_handler_tx_from_log(log: Log, chain_id: ChainId) -> MessengerResult { - let parsed_log = ::decode_log(&log.into()).map_err(|e| { - error!(target: LOG_TARGET, "Log parsing failed {e}"); - Error::GatherError - })?; + let parsed_log = LogMessageToL2::LogMessageToL2Event::decode_log( + &alloy_primitives::Log::::new( + log.address(), + log.topics().into(), + log.data().clone().data, + ) + .unwrap(), + false, + ) + .unwrap(); let from_address = felt_from_address(parsed_log.from_address); let contract_address = felt_from_u256(parsed_log.to_address); @@ -220,7 +209,7 @@ fn l1_handler_tx_from_log(log: Log, chain_id: ChainId) -> MessengerResult Vec { messages .iter() .map(|msg| { - let hash = - compute_l1_message_hash(msg.from_address.into(), msg.to_address, &msg.payload); - - U256::from_big_endian(hash.as_bytes()) + U256::from_be_bytes( + compute_l1_message_hash(msg.from_address.into(), msg.to_address, &msg.payload) + .into(), + ) }) .collect() } @@ -260,6 +249,7 @@ fn felt_from_address(v: Address) -> FieldElement { #[cfg(test)] mod tests { + use alloy_primitives::{Address, B256, U256}; use katana_primitives::chain::{ChainId, NamedChainId}; use starknet::macros::{felt, selector}; @@ -286,17 +276,22 @@ mod tests { felt!("0x6182c63599a9638272f1ce5b5cadabece9c81c2d2b8f88ab7a294472b8fce8b"); let log = Log { - address: H160::from_str("0xde29d060D45901Fb19ED6C6e959EB22d8626708e").unwrap(), - topics: vec![ - H256::from_str( - "0xdb80dd488acf86d17c747445b0eabb5d57c541d3bd7b6b87af987858e5066b2b", + inner: alloy_primitives::Log:: { + address: Address::from_str("0xde29d060D45901Fb19ED6C6e959EB22d8626708e").unwrap(), + data: LogData::new( + vec![ + B256::from_str( + "0xdb80dd488acf86d17c747445b0eabb5d57c541d3bd7b6b87af987858e5066b2b", + ) + .unwrap(), + B256::from_str(from_address).unwrap(), + B256::from_str(to_address).unwrap(), + B256::from_str(selector).unwrap(), + ], + payload_buf.into(), ) - .unwrap(), - H256::from_str(from_address).unwrap(), - H256::from_str(to_address).unwrap(), - H256::from_str(selector).unwrap(), - ], - data: payload_buf.into(), + .expect("Failed to load log data"), + }, ..Default::default() }; @@ -338,7 +333,7 @@ mod tests { assert_eq!( hashes[0], U256::from_str_radix( - "0x5ba1d2e131360f15e26dd4f6ff10550685611cc25f75e7950b704adb04b36162", + "5ba1d2e131360f15e26dd4f6ff10550685611cc25f75e7950b704adb04b36162", 16 ) .unwrap() diff --git a/crates/katana/core/src/service/messaging/mod.rs b/crates/katana/core/src/service/messaging/mod.rs index 6b2de596c2..f07b353e8c 100644 --- a/crates/katana/core/src/service/messaging/mod.rs +++ b/crates/katana/core/src/service/messaging/mod.rs @@ -40,10 +40,10 @@ mod starknet; use std::path::Path; use ::starknet::providers::ProviderError as StarknetProviderError; +use alloy_transport::TransportError; use anyhow::Result; use async_trait::async_trait; use ethereum::EthereumMessaging; -use ethers::providers::ProviderError as EthereumProviderError; use katana_primitives::chain::ChainId; use katana_primitives::receipt::MessageToL1; use serde::Deserialize; @@ -77,13 +77,13 @@ pub enum Error { #[derive(Debug, thiserror::Error)] pub enum ProviderError { #[error("Ethereum provider error: {0}")] - Ethereum(EthereumProviderError), + Ethereum(TransportError), #[error("Starknet provider error: {0}")] Starknet(StarknetProviderError), } -impl From for Error { - fn from(e: EthereumProviderError) -> Self { +impl From for Error { + fn from(e: TransportError) -> Self { Self::Provider(ProviderError::Ethereum(e)) } } @@ -173,11 +173,11 @@ impl MessengerMode { match config.chain.as_str() { CONFIG_CHAIN_ETHEREUM => match EthereumMessaging::new(config).await { Ok(m_eth) => { - info!(target: LOG_TARGET, "Messaging enabled [Ethereum]"); + info!(target: LOG_TARGET, "Messaging enabled [Ethereum]."); Ok(MessengerMode::Ethereum(m_eth)) } Err(e) => { - error!(target: LOG_TARGET, "Ethereum messenger init failed: {e}"); + error!(target: LOG_TARGET, error = %e, "Ethereum messenger init."); Err(Error::InitError) } }, @@ -185,17 +185,17 @@ impl MessengerMode { #[cfg(feature = "starknet-messaging")] CONFIG_CHAIN_STARKNET => match StarknetMessaging::new(config).await { Ok(m_sn) => { - info!(target: LOG_TARGET, "Messaging enabled [Starknet]"); + info!(target: LOG_TARGET, "Messaging enabled [Starknet]."); Ok(MessengerMode::Starknet(m_sn)) } Err(e) => { - error!(target: LOG_TARGET, "Starknet messenger init failed: {e}"); + error!(target: LOG_TARGET, error = %e, "Starknet messenger init."); Err(Error::InitError) } }, chain => { - error!(target: LOG_TARGET, "Unsupported settlement chain: {}", chain); + error!(target: LOG_TARGET, chain = %chain, "Unsupported settlement chain."); Err(Error::UnsupportedChain) } } diff --git a/crates/katana/core/src/service/messaging/service.rs b/crates/katana/core/src/service/messaging/service.rs index ea0d124904..a4a6810c29 100644 --- a/crates/katana/core/src/service/messaging/service.rs +++ b/crates/katana/core/src/service/messaging/service.rs @@ -131,20 +131,18 @@ impl MessagingService { } else { match messenger.as_ref() { MessengerMode::Ethereum(inner) => { - let hashes = inner - .send_messages(&messages) - .await - .map(|hashes| hashes.iter().map(|h| format!("{h:#x}")).collect())?; + let hashes = inner.send_messages(&messages).await.map(|hashes| { + hashes.iter().map(|h| format!("{h:#x}")).collect::>() + })?; trace_msg_to_l1_sent(&messages, &hashes); Ok(Some((block_num, hashes.len()))) } #[cfg(feature = "starknet-messaging")] MessengerMode::Starknet(inner) => { - let hashes = inner - .send_messages(&messages) - .await - .map(|hashes| hashes.iter().map(|h| format!("{h:#x}")).collect())?; + let hashes = inner.send_messages(&messages).await.map(|hashes| { + hashes.iter().map(|h| format!("{h:#x}")).collect::>() + })?; trace_msg_to_l1_sent(&messages, &hashes); Ok(Some((block_num, hashes.len()))) } @@ -210,7 +208,9 @@ impl Stream for MessagingService { Poll::Ready(Err(e)) => { error!( target: LOG_TARGET, - "error gathering messages for block {}: {e}", pin.gather_from_block + block = %pin.gather_from_block, + error = %e, + "Gathering messages for block." ); return Poll::Pending; } @@ -230,7 +230,9 @@ impl Stream for MessagingService { Poll::Ready(Err(e)) => { error!( target: LOG_TARGET, - "error settling messages for block {}: {e}", pin.send_from_block + block = %pin.send_from_block, + error = %e, + "Settling messages for block." ); return Poll::Pending; } @@ -251,7 +253,7 @@ fn interval_from_seconds(secs: u64) -> Interval { interval } -fn trace_msg_to_l1_sent(messages: &Vec, hashes: &Vec) { +fn trace_msg_to_l1_sent(messages: &[MessageToL1], hashes: &[String]) { assert_eq!(messages.len(), hashes.len()); #[cfg(feature = "starknet-messaging")] @@ -271,17 +273,11 @@ fn trace_msg_to_l1_sent(messages: &Vec, hashes: &Vec) { #[rustfmt::skip] info!( target: LOG_TARGET, - r"Message executed on settlement layer: -| from_address | {} -| to_address | {} -| selector | {} -| payload | [{}] - -", - m.from_address, - to_address, - selector, - payload_str.join(", ") + from_address = %m.from_address, + to_address = %to_address, + selector = %selector, + payload = %payload_str.join(", "), + "Message executed on settlement layer.", ); continue; @@ -297,17 +293,11 @@ fn trace_msg_to_l1_sent(messages: &Vec, hashes: &Vec) { #[rustfmt::skip] info!( target: LOG_TARGET, - r#"Message sent to settlement layer: -| hash | {} -| from_address | {} -| to_address | {} -| payload | [{}] - -"#, - hash.as_str(), - m.from_address, - to_address, - payload_str.join(", ") + hash = %hash.as_str(), + from_address = %m.from_address, + to_address = %to_address, + payload = %payload_str.join(", "), + "Message sent to settlement layer.", ); } } @@ -318,16 +308,10 @@ fn trace_l1_handler_tx_exec(hash: TxHash, tx: &L1HandlerTx) { #[rustfmt::skip] info!( target: LOG_TARGET, - r"L1Handler transaction added to the pool: -| tx_hash | {:#x} -| contract_address | {} -| selector | {:#x} -| calldata | [{}] - -", -hash, - tx.contract_address, - tx.entry_point_selector, - calldata_str.join(", ") + tx_hash = %format!("{:#x}", hash), + contract_address = %tx.contract_address, + selector = %format!("{:#x}", tx.entry_point_selector), + calldata = %calldata_str.join(", "), + "L1Handler transaction added to the pool.", ); } diff --git a/crates/katana/core/src/service/messaging/starknet.rs b/crates/katana/core/src/service/messaging/starknet.rs index da911803c9..0c1b242721 100644 --- a/crates/katana/core/src/service/messaging/starknet.rs +++ b/crates/katana/core/src/service/messaging/starknet.rs @@ -66,7 +66,7 @@ impl StarknetMessaging { from_block: BlockId, to_block: BlockId, ) -> Result>> { - trace!(target: LOG_TARGET, "Fetching blocks {:?} - {:?}.", from_block, to_block); + trace!(target: LOG_TARGET, from_block = ?from_block, to_block = ?to_block, "Fetching logs."); let mut block_to_events: HashMap> = HashMap::new(); @@ -118,7 +118,7 @@ impl StarknetMessaging { ExecutionEncoding::New, ); - account.set_block_id(BlockId::Tag(BlockTag::Latest)); + account.set_block_id(BlockId::Tag(BlockTag::Pending)); // TODO: we need to have maximum fee configurable. let execution = account.execute(calls).fee_estimate_multiplier(10f64); @@ -147,11 +147,11 @@ impl StarknetMessaging { match self.send_invoke_tx(vec![call]).await { Ok(tx_hash) => { - trace!(target: LOG_TARGET, "Hashes sending transaction {:#064x}", tx_hash); + trace!(target: LOG_TARGET, tx_hash = %format!("{:#064x}", tx_hash), "Hashes sending transaction."); Ok(tx_hash) } Err(e) => { - error!("Error settling hashes on Starknet: {:?}", e); + error!(target: LOG_TARGET, error = %e, "Settling hashes on Starknet."); Err(Error::SendError) } } @@ -173,6 +173,7 @@ impl Messenger for StarknetMessaging { Ok(n) => n, Err(_) => { warn!( + target: LOG_TARGET, "Couldn't fetch settlement chain last block number. \nSkipped, retry at the \ next tick." ); @@ -202,9 +203,9 @@ impl Messenger for StarknetMessaging { .for_each(|(block_number, block_events)| { debug!( target: LOG_TARGET, - "Converting events of block {} into L1HandlerTx ({} events)", - block_number, - block_events.len(), + block_number = %block_number, + events_count = %block_events.len(), + "Converting events of block into L1HandlerTx." ); block_events.iter().for_each(|e| { @@ -230,10 +231,10 @@ impl Messenger for StarknetMessaging { if !calls.is_empty() { match self.send_invoke_tx(calls).await { Ok(tx_hash) => { - trace!(target: LOG_TARGET, "Invoke transaction hash {:#064x}", tx_hash); + trace!(target: LOG_TARGET, tx_hash = %format!("{:#064x}", tx_hash), "Invoke transaction hash."); } Err(e) => { - error!("Error sending invoke tx on Starknet: {:?}", e); + error!(target: LOG_TARGET, error = %e, "Sending invoke tx on Starknet."); return Err(Error::SendError); } }; @@ -302,7 +303,7 @@ fn parse_messages(messages: &[MessageToL1]) -> MessengerResult<(Vec Result { /// The messaging service #[cfg(feature = "messaging")] pub(crate) messaging: Option>, + /// Metrics for recording the service operations + metrics: ServiceMetrics, +} + +impl NodeService { + pub fn new( + pool: Arc, + miner: TransactionMiner, + block_producer: Arc>, + #[cfg(feature = "messaging")] messaging: Option>, + ) -> Self { + let metrics = ServiceMetrics { block_producer: BlockProducerMetrics::default() }; + + Self { + pool, + miner, + block_producer, + metrics, + #[cfg(feature = "messaging")] + messaging, + } + } } impl Future for NodeService { @@ -50,10 +76,10 @@ impl Future for NodeService { while let Poll::Ready(Some(outcome)) = messaging.poll_next_unpin(cx) { match outcome { MessagingOutcome::Gather { msg_count, .. } => { - info!(target: "node", "collected {msg_count} messages from settlement chain"); + info!(target: LOG_TARGET, msg_count = %msg_count, "Collected messages from settlement chain."); } MessagingOutcome::Send { msg_count, .. } => { - info!(target: "node", "sent {msg_count} messages to the settlement chain"); + info!(target: LOG_TARGET, msg_count = %msg_count, "Sent messages to the settlement chain."); } } } @@ -65,11 +91,17 @@ impl Future for NodeService { while let Poll::Ready(Some(res)) = pin.block_producer.poll_next(cx) { match res { Ok(outcome) => { - info!(target: "node", "mined block {}", outcome.block_number) + info!(target: LOG_TARGET, block_number = %outcome.block_number, "Mined block."); + + let metrics = &pin.metrics.block_producer; + let gas_used = outcome.stats.l1_gas_used; + let steps_used = outcome.stats.cairo_steps_used; + metrics.l1_gas_processed_total.increment(gas_used as u64); + metrics.cairo_steps_processed_total.increment(steps_used as u64); } Err(err) => { - error!(target: "node", "failed to mine block: {err}"); + error!(target: LOG_TARGET, error = %err, "Mining block."); } } } diff --git a/crates/katana/core/tests/sequencer.rs b/crates/katana/core/tests/sequencer.rs index 8fa521afb2..4d3ffb1494 100644 --- a/crates/katana/core/tests/sequencer.rs +++ b/crates/katana/core/tests/sequencer.rs @@ -1,5 +1,5 @@ -use ethers::types::U256; -use katana_core::backend::config::{Environment, StarknetConfig}; +use alloy_primitives::U256; +use katana_core::backend::config::StarknetConfig; use katana_core::sequencer::{KatanaSequencer, SequencerConfig}; use katana_executor::implementation::noop::NoopExecutorFactory; use katana_primitives::genesis::allocation::DevAllocationsGenerator; @@ -17,13 +17,8 @@ fn create_test_sequencer_config() -> (SequencerConfig, StarknetConfig) { genesis.extend_allocations(accounts.into_iter().map(|(k, v)| (k, v.into()))); ( - SequencerConfig { block_time: None, ..Default::default() }, - StarknetConfig { - genesis, - disable_fee: true, - env: Environment::default(), - ..Default::default() - }, + SequencerConfig::default(), + StarknetConfig { genesis, disable_fee: true, ..Default::default() }, ) } diff --git a/crates/katana/docs/database.md b/crates/katana/docs/database.md index fde4c692c1..96404850c7 100644 --- a/crates/katana/docs/database.md +++ b/crates/katana/docs/database.md @@ -40,6 +40,11 @@ TxHashes { VALUE TxHash } +TxTraces { + KEY TxNumber + VALUE TxExecInfo +} + Transactions { KEY TxNumber VALUE Tx @@ -97,25 +102,24 @@ ContractInfoChangeSet { VALUE ContractInfoChangeList } -NonceChanges { +NonceChangeHistory { KEY BlockNumber DUP_KEY ContractAddress VALUE ContractNonceChange } -ContractClassChanges { +ClassChangeHistory { KEY BlockNumber DUP_KEY ContractAddress VALUE ContractClassChange } StorageChangeSet { - KEY ContractAddress - DUP_KEY StorageKey - VALUE StorageEntryChangeList + KEY ContractStorageKey + VALUE BlockList } -StorageChanges { +StorageChangeHistory { KEY BlockNumber DUP_KEY ContractStorageKey VALUE ContractStorageEntry @@ -133,6 +137,7 @@ TxHashes ||--|| TxNumbers : "tx id" TxNumbers ||--|| Transactions : "has" TxBlocks ||--|{ Transactions : "tx block" Transactions ||--|| Receipts : "each tx must have a receipt" +Transactions ||--|| TxTraces : "each tx must have a trace" CompiledClassHashes ||--|| CompiledContractClasses : "has" CompiledClassHashes ||--|| SierraClasses : "has" @@ -143,11 +148,10 @@ ContractInfo ||--|| CompiledClassHashes : "has" ContractInfo }|--|{ ContractInfoChangeSet : "has" ContractStorage }|--|{ StorageChangeSet : "has" -ContractInfoChangeSet }|--|{ NonceChanges : "has" -ContractInfoChangeSet }|--|{ ContractClassChanges : "has" +ContractInfoChangeSet }|--|{ NonceChangeHistory : "has" +ContractInfoChangeSet }|--|{ ClassChangeHistory : "has" CompiledClassHashes ||--|| ClassDeclarationBlock : "has" ClassDeclarationBlock ||--|| ClassDeclarations : "has" BlockNumbers ||--|| ClassDeclarations : "" -StorageChangeSet }|--|{ StorageChanges : "has" - -``` \ No newline at end of file +StorageChangeSet }|--|{ StorageChangeHistory : "has" +``` diff --git a/crates/katana/executor/Cargo.toml b/crates/katana/executor/Cargo.toml index 22830c4933..dca77a1056 100644 --- a/crates/katana/executor/Cargo.toml +++ b/crates/katana/executor/Cargo.toml @@ -19,6 +19,8 @@ starknet_api.workspace = true thiserror.workspace = true tracing.workspace = true +alloy-primitives.workspace = true + # blockifier deps blockifier = { git = "https://github.com/dojoengine/blockifier", rev = "d38b979", optional = true } cairo-vm = { workspace = true, optional = true } diff --git a/crates/katana/executor/src/abstraction/mod.rs b/crates/katana/executor/src/abstraction/mod.rs index 313ce8e045..5e231cc823 100644 --- a/crates/katana/executor/src/abstraction/mod.rs +++ b/crates/katana/executor/src/abstraction/mod.rs @@ -72,9 +72,20 @@ impl SimulationFlag { } } +/// Stats about the transactions execution. +#[derive(Debug, Clone, Default)] +pub struct ExecutionStats { + /// The total gas used. + pub l1_gas_used: u128, + /// The total cairo steps used. + pub cairo_steps_used: u128, +} + /// The output of a executor after a series of executions. #[derive(Debug, Default)] pub struct ExecutionOutput { + /// Statistics throughout the executions process. + pub stats: ExecutionStats, /// The state updates produced by the executions. pub states: StateUpdatesWithDeclaredClasses, /// The transactions that have been executed. diff --git a/crates/katana/executor/src/implementation/blockifier/error.rs b/crates/katana/executor/src/implementation/blockifier/error.rs index d920d3bded..c161d3be7a 100644 --- a/crates/katana/executor/src/implementation/blockifier/error.rs +++ b/crates/katana/executor/src/implementation/blockifier/error.rs @@ -69,8 +69,8 @@ impl From for ExecutionError { incoming_tx_nonce, .. } => Self::InvalidNonce { - actual: account_nonce.0.into(), - expected: incoming_tx_nonce.0.into(), + actual: incoming_tx_nonce.0.into(), + expected: account_nonce.0.into(), }, TransactionPreValidationError::TransactionFeeError(e) => Self::from(e), TransactionPreValidationError::StateError(e) => Self::from(e), diff --git a/crates/katana/executor/src/implementation/blockifier/mod.rs b/crates/katana/executor/src/implementation/blockifier/mod.rs index 9d9a690b22..cfa44c2f3c 100644 --- a/crates/katana/executor/src/implementation/blockifier/mod.rs +++ b/crates/katana/executor/src/implementation/blockifier/mod.rs @@ -1,5 +1,4 @@ mod error; -mod output; mod state; mod utils; @@ -16,13 +15,16 @@ use katana_provider::traits::state::StateProvider; use starknet_api::block::{BlockNumber, BlockTimestamp}; use tracing::info; -use self::output::receipt_from_exec_info; use self::state::CachedState; +use crate::utils::receipt_from_exec_info; use crate::{ - BlockExecutor, EntryPointCall, ExecutionError, ExecutionOutput, ExecutionResult, ExecutorExt, - ExecutorFactory, ExecutorResult, ResultAndStates, SimulationFlag, StateProviderDb, + BlockExecutor, EntryPointCall, ExecutionError, ExecutionOutput, ExecutionResult, + ExecutionStats, ExecutorExt, ExecutorFactory, ExecutorResult, ResultAndStates, SimulationFlag, + StateProviderDb, }; +pub(crate) const LOG_TARGET: &str = "katana::executor::blockifier"; + #[derive(Debug)] pub struct BlockifierFactory { cfg: CfgEnv, @@ -67,6 +69,7 @@ pub struct StarknetVMProcessor<'a> { state: CachedState>, transactions: Vec<(TxWithHash, ExecutionResult)>, simulation_flags: SimulationFlag, + stats: ExecutionStats, } impl<'a> StarknetVMProcessor<'a> { @@ -79,7 +82,7 @@ impl<'a> StarknetVMProcessor<'a> { let transactions = Vec::new(); let block_context = utils::block_context_from_envs(&block_env, &cfg_env); let state = state::CachedState::new(StateProviderDb(state)); - Self { block_context, state, transactions, simulation_flags } + Self { block_context, state, transactions, simulation_flags, stats: Default::default() } } fn fill_block_env_from_header(&mut self, header: &PartialHeader) { @@ -157,14 +160,17 @@ impl<'a> BlockExecutor<'a> for StarknetVMProcessor<'a> { crate::utils::log_resources(&trace.actual_resources); crate::utils::log_events(receipt.events()); + self.stats.l1_gas_used += fee.gas_consumed; + self.stats.cairo_steps_used += receipt.resources_used().steps as u128; + if let Some(reason) = receipt.revert_reason() { - info!(target: "executor", "transaction reverted: {reason}"); + info!(target: LOG_TARGET, reason = %reason, "Transaction reverted."); } ExecutionResult::new_success(receipt, trace, fee) } Err(e) => { - info!(target: "executor", "transaction execution failed: {e}"); + info!(target: LOG_TARGET, error = %e, "Executing transaction."); ExecutionResult::new_failed(e) } }; @@ -185,7 +191,8 @@ impl<'a> BlockExecutor<'a> for StarknetVMProcessor<'a> { fn take_execution_output(&mut self) -> ExecutorResult { let states = utils::state_update_from_cached_state(&self.state); let transactions = std::mem::take(&mut self.transactions); - Ok(ExecutionOutput { states, transactions }) + let stats = std::mem::take(&mut self.stats); + Ok(ExecutionOutput { stats, states, transactions }) } fn state(&self) -> Box { @@ -238,14 +245,14 @@ impl ExecutorExt for StarknetVMProcessor<'_> { Ok((info, fee)) => { // if the transaction was reverted, return as error if let Some(reason) = info.revert_error { - info!(target: "executor", "fee estimation failed: {reason}"); + info!(target: LOG_TARGET, reason = %reason, "Estimating fee."); Err(ExecutionError::TransactionReverted { revert_error: reason }) } else { Ok(fee) } } Err(e) => { - info!(target: "executor", "fee estimation failed: {e}"); + info!(target: LOG_TARGET, error = %e, "Estimating fee."); Err(e) } }) @@ -255,7 +262,7 @@ impl ExecutorExt for StarknetVMProcessor<'_> { let block_context = &self.block_context; let mut state = self.state.0.write(); let state = MutRefState::new(&mut state.inner); - let retdata = utils::call(call, state, block_context, 100_000_000)?; + let retdata = utils::call(call, state, block_context, 1_000_000_000)?; Ok(retdata) } } diff --git a/crates/katana/executor/src/implementation/blockifier/output.rs b/crates/katana/executor/src/implementation/blockifier/output.rs deleted file mode 100644 index 0de6d6055e..0000000000 --- a/crates/katana/executor/src/implementation/blockifier/output.rs +++ /dev/null @@ -1,153 +0,0 @@ -use std::collections::HashMap; - -use katana_primitives::receipt::{ - DeclareTxReceipt, DeployAccountTxReceipt, Event, InvokeTxReceipt, L1HandlerTxReceipt, - MessageToL1, Receipt, TxExecutionResources, -}; -use katana_primitives::trace::{CallInfo, TxExecInfo}; -use katana_primitives::transaction::Tx; - -pub(super) fn receipt_from_exec_info(tx: &Tx, info: &TxExecInfo) -> Receipt { - let actual_fee = info.actual_fee; - let events = events_from_exec_info(info); - let revert_error = info.revert_error.clone(); - let messages_sent = l2_to_l1_messages_from_exec_info(info); - let actual_resources = parse_actual_resources(&info.actual_resources); - - match tx { - Tx::Invoke(_) => Receipt::Invoke(InvokeTxReceipt { - events, - actual_fee, - revert_error, - messages_sent, - execution_resources: actual_resources, - }), - - Tx::Declare(_) => Receipt::Declare(DeclareTxReceipt { - events, - actual_fee, - revert_error, - messages_sent, - execution_resources: actual_resources, - }), - - Tx::L1Handler(tx) => Receipt::L1Handler(L1HandlerTxReceipt { - events, - actual_fee, - revert_error, - messages_sent, - message_hash: tx.message_hash, - execution_resources: actual_resources, - }), - - Tx::DeployAccount(tx) => Receipt::DeployAccount(DeployAccountTxReceipt { - events, - actual_fee, - revert_error, - messages_sent, - execution_resources: actual_resources, - contract_address: tx.contract_address(), - }), - } -} - -fn events_from_exec_info(info: &TxExecInfo) -> Vec { - let mut events: Vec = vec![]; - - fn get_events_recursively(call_info: &CallInfo) -> Vec { - let mut events: Vec = vec![]; - - // By default, `from_address` must correspond to the contract address that - // is sending the message. In the case of library calls, `code_address` is `None`, - // we then use the `caller_address` instead (which can also be an account). - let from_address = if let Some(code_address) = call_info.code_address { - code_address - } else { - call_info.caller_address - }; - - events.extend(call_info.events.iter().map(|e| Event { - from_address, - data: e.data.clone(), - keys: e.keys.clone(), - })); - - call_info.inner_calls.iter().for_each(|call| { - events.extend(get_events_recursively(call)); - }); - - events - } - - if let Some(ref call) = info.validate_call_info { - events.extend(get_events_recursively(call)); - } - - if let Some(ref call) = info.execute_call_info { - events.extend(get_events_recursively(call)); - } - - if let Some(ref call) = info.fee_transfer_call_info { - events.extend(get_events_recursively(call)); - } - - events -} - -fn l2_to_l1_messages_from_exec_info(info: &TxExecInfo) -> Vec { - let mut messages = vec![]; - - fn get_messages_recursively(info: &CallInfo) -> Vec { - let mut messages = vec![]; - - // By default, `from_address` must correspond to the contract address that - // is sending the message. In the case of library calls, `code_address` is `None`, - // we then use the `caller_address` instead (which can also be an account). - let from_address = if let Some(code_address) = info.code_address { - code_address - } else { - info.caller_address - }; - - messages.extend(info.l2_to_l1_messages.iter().map(|m| MessageToL1 { - from_address, - payload: m.payload.clone(), - to_address: m.to_address, - })); - - info.inner_calls.iter().for_each(|call| { - messages.extend(get_messages_recursively(call)); - }); - - messages - } - - if let Some(ref info) = info.validate_call_info { - messages.extend(get_messages_recursively(info)); - } - - if let Some(ref info) = info.execute_call_info { - messages.extend(get_messages_recursively(info)); - } - - if let Some(ref info) = info.fee_transfer_call_info { - messages.extend(get_messages_recursively(info)); - } - - messages -} - -fn parse_actual_resources(resources: &HashMap) -> TxExecutionResources { - TxExecutionResources { - steps: resources.get("n_steps").copied().unwrap_or_default(), - memory_holes: resources.get("memory_holes").copied(), - ec_op_builtin: resources.get("ec_op_builtin").copied(), - ecdsa_builtin: resources.get("ecdsa_builtin").copied(), - keccak_builtin: resources.get("keccak_builtin").copied(), - bitwise_builtin: resources.get("bitwise_builtin").copied(), - pedersen_builtin: resources.get("pedersen_builtin").copied(), - poseidon_builtin: resources.get("poseidon_builtin").copied(), - range_check_builtin: resources.get("range_check_builtin").copied(), - segment_arena_builtin: resources.get("segment_arena_builtin").copied(), - } -} diff --git a/crates/katana/executor/src/implementation/blockifier/state.rs b/crates/katana/executor/src/implementation/blockifier/state.rs index 074e52dad2..79305b12e6 100644 --- a/crates/katana/executor/src/implementation/blockifier/state.rs +++ b/crates/katana/executor/src/implementation/blockifier/state.rs @@ -263,7 +263,7 @@ mod tests { use crate::StateProviderDb; fn new_sierra_class() -> (FlattenedSierraClass, CompiledClass) { - let json = include_str!("../../../../primitives/contracts/compiled/cairo1_contract.json"); + let json = include_str!("../../../../contracts/compiled/cairo1_contract.json"); let artifact = serde_json::from_str(json).unwrap(); let compiled_class = parse_compiled_class(artifact).unwrap(); let sierra_class = parse_sierra_class(json).unwrap().flatten().unwrap(); diff --git a/crates/katana/executor/src/implementation/blockifier/utils.rs b/crates/katana/executor/src/implementation/blockifier/utils.rs index 7016583079..21bae0ac89 100644 --- a/crates/katana/executor/src/implementation/blockifier/utils.rs +++ b/crates/katana/executor/src/implementation/blockifier/utils.rs @@ -2,7 +2,9 @@ use std::collections::{BTreeMap, HashMap}; use std::sync::Arc; use blockifier::block_context::{BlockContext, BlockInfo, ChainInfo, FeeTokenAddresses, GasPrices}; -use blockifier::execution::call_info::CallInfo; +use blockifier::execution::call_info::{ + CallExecution, CallInfo, OrderedEvent, OrderedL2ToL1Message, +}; use blockifier::execution::common_hints::ExecutionMode; use blockifier::execution::contract_class::{ContractClass, ContractClassV0, ContractClassV1}; use blockifier::execution::entry_point::{ @@ -29,7 +31,7 @@ use katana_primitives::trace::TxExecInfo; use katana_primitives::transaction::{ DeclareTx, DeployAccountTx, ExecutableTx, ExecutableTxWithHash, InvokeTx, }; -use katana_primitives::FieldElement; +use katana_primitives::{event, message, trace, FieldElement}; use katana_provider::traits::contract::ContractClassProvider; use starknet::core::types::PriceUnit; use starknet::core::utils::parse_cairo_short_string; @@ -525,82 +527,95 @@ pub fn to_exec_info(exec_info: TransactionExecutionInfo) -> TxExecInfo { } } -fn to_call_info(call_info: CallInfo) -> katana_primitives::trace::CallInfo { - let message_to_l1_from_address = if let Some(a) = call_info.call.code_address { - to_address(a) - } else { - to_address(call_info.call.caller_address) +fn to_call_info(call: CallInfo) -> trace::CallInfo { + let contract_address = to_address(call.call.storage_address); + let caller_address = to_address(call.call.caller_address); + let code_address = call.call.code_address.map(to_address); + let class_hash = call.call.class_hash.map(|a| a.0.into()); + let entry_point_selector = call.call.entry_point_selector.0.into(); + let calldata = call.call.calldata.0.iter().map(|f| (*f).into()).collect(); + let retdata = call.execution.retdata.0.into_iter().map(|f| f.into()).collect(); + + let builtin_counter = call.vm_resources.builtin_instance_counter; + let execution_resources = trace::ExecutionResources { + n_steps: call.vm_resources.n_steps as u64, + n_memory_holes: call.vm_resources.n_memory_holes as u64, + builtin_instance_counter: builtin_counter.into_iter().map(|(k, v)| (k, v as u64)).collect(), }; - katana_primitives::trace::CallInfo { - contract_address: to_address(call_info.call.storage_address), - caller_address: to_address(call_info.call.caller_address), - call_type: match call_info.call.call_type { - CallType::Call => katana_primitives::trace::CallType::Call, - CallType::Delegate => katana_primitives::trace::CallType::Delegate, - }, - code_address: call_info.call.code_address.map(to_address), - class_hash: call_info.call.class_hash.map(|a| a.0.into()), - entry_point_selector: call_info.call.entry_point_selector.0.into(), - entry_point_type: match call_info.call.entry_point_type { - EntryPointType::External => katana_primitives::trace::EntryPointType::External, - EntryPointType::L1Handler => katana_primitives::trace::EntryPointType::L1Handler, - EntryPointType::Constructor => katana_primitives::trace::EntryPointType::Constructor, - }, - calldata: call_info.call.calldata.0.iter().map(|f| (*f).into()).collect(), - retdata: call_info.execution.retdata.0.iter().map(|f| (*f).into()).collect(), - execution_resources: katana_primitives::trace::ExecutionResources { - n_steps: call_info.vm_resources.n_steps as u64, - n_memory_holes: call_info.vm_resources.n_memory_holes as u64, - builtin_instance_counter: call_info - .vm_resources - .builtin_instance_counter - .into_iter() - .map(|(k, v)| (k, v as u64)) - .collect(), - }, - events: call_info - .execution - .events - .iter() - .map(|e| katana_primitives::event::OrderedEvent { - order: e.order as u64, - keys: e.event.keys.iter().map(|f| f.0.into()).collect(), - data: e.event.data.0.iter().map(|f| (*f).into()).collect(), - }) - .collect(), - l2_to_l1_messages: call_info - .execution - .l2_to_l1_messages - .iter() - .map(|m| { - let to_address = starknet_api_ethaddr_to_felt(m.message.to_address); - katana_primitives::message::OrderedL2ToL1Message { - order: m.order as u64, - from_address: message_to_l1_from_address, - to_address, - payload: m.message.payload.0.iter().map(|f| (*f).into()).collect(), - } - }) - .collect(), - storage_read_values: call_info.storage_read_values.into_iter().map(|f| f.into()).collect(), - accessed_storage_keys: call_info - .accessed_storage_keys - .into_iter() - .map(|sk| (*sk.0.key()).into()) - .collect(), - inner_calls: call_info.inner_calls.iter().map(|c| to_call_info(c.clone())).collect(), - gas_consumed: call_info.execution.gas_consumed as u128, - failed: call_info.execution.failed, + let CallExecution { events, l2_to_l1_messages, .. } = call.execution; + + let events = events.into_iter().map(to_ordered_event).collect(); + let l1_msg = + l2_to_l1_messages.into_iter().map(|m| to_l2_l1_messages(m, contract_address)).collect(); + + let call_type = match call.call.call_type { + CallType::Call => trace::CallType::Call, + CallType::Delegate => trace::CallType::Delegate, + }; + + let entry_point_type = match call.call.entry_point_type { + EntryPointType::External => trace::EntryPointType::External, + EntryPointType::L1Handler => trace::EntryPointType::L1Handler, + EntryPointType::Constructor => trace::EntryPointType::Constructor, + }; + + let storage_read_values = call.storage_read_values.into_iter().map(|f| f.into()).collect(); + let storg_keys = call.accessed_storage_keys.into_iter().map(|k| (*k.0.key()).into()).collect(); + let inner_calls = call.inner_calls.into_iter().map(to_call_info).collect(); + + trace::CallInfo { + contract_address, + caller_address, + call_type, + code_address, + class_hash, + entry_point_selector, + entry_point_type, + calldata, + retdata, + execution_resources, + events, + l2_to_l1_messages: l1_msg, + storage_read_values, + accessed_storage_keys: storg_keys, + inner_calls, + gas_consumed: call.execution.gas_consumed as u128, + failed: call.execution.failed, } } +fn to_ordered_event(e: OrderedEvent) -> event::OrderedEvent { + event::OrderedEvent { + order: e.order as u64, + keys: e.event.keys.into_iter().map(|f| f.0.into()).collect(), + data: e.event.data.0.into_iter().map(FieldElement::from).collect(), + } +} + +fn to_l2_l1_messages( + m: OrderedL2ToL1Message, + from_address: katana_primitives::contract::ContractAddress, +) -> message::OrderedL2ToL1Message { + let order = m.order as u64; + let to_address = starknet_api_ethaddr_to_felt(m.message.to_address); + let payload = m.message.payload.0.into_iter().map(FieldElement::from).collect(); + message::OrderedL2ToL1Message { order, from_address, to_address, payload } +} + #[cfg(test)] mod tests { + + use std::collections::HashSet; + + use cairo_vm::vm::runners::cairo_runner::ExecutionResources; use katana_primitives::chain::{ChainId, NamedChainId}; - use starknet::core::utils::parse_cairo_short_string; + use starknet_api::core::EntryPointSelector; + use starknet_api::hash::StarkFelt; + use starknet_api::stark_felt; + use starknet_api::transaction::{EventContent, EventData, EventKey}; - use crate::implementation::blockifier::utils::to_blk_chain_id; + use super::*; #[test] fn convert_chain_id() { @@ -612,4 +627,142 @@ mod tests { assert_eq!(goerli.0, parse_cairo_short_string(&NamedChainId::Goerli.id()).unwrap()); assert_eq!(sepolia.0, parse_cairo_short_string(&NamedChainId::Sepolia.id()).unwrap()); } + + fn create_blockifier_call_info() -> CallInfo { + let top_events = vec![OrderedEvent { + order: 0, + event: EventContent { + data: EventData(vec![888u128.into()]), + keys: vec![EventKey(999u128.into())], + }, + }]; + let nested_events = vec![ + OrderedEvent { + order: 1, + event: EventContent { + data: EventData(vec![889u128.into()]), + keys: vec![EventKey(990u128.into())], + }, + }, + OrderedEvent { + order: 2, + event: EventContent { + data: EventData(vec![0u128.into()]), + keys: vec![EventKey(9u128.into())], + }, + }, + ]; + + let nested_call = CallInfo { + execution: CallExecution { events: nested_events, ..Default::default() }, + ..Default::default() + }; + + CallInfo { + call: CallEntryPoint { + class_hash: None, + initial_gas: 77, + call_type: CallType::Call, + caller_address: 200u128.into(), + storage_address: 100u128.into(), + code_address: Some(100u128.into()), + entry_point_type: EntryPointType::External, + calldata: Calldata(Arc::new(vec![stark_felt!(1_u8)])), + entry_point_selector: EntryPointSelector(stark_felt!(999_u32)), + }, + execution: CallExecution { + failed: true, + gas_consumed: 12345, + events: top_events, + ..Default::default() + }, + storage_read_values: vec![stark_felt!(1_u8), stark_felt!(2_u8)], + accessed_storage_keys: HashSet::from([3u128.into(), 4u128.into(), 5u128.into()]), + vm_resources: ExecutionResources { + n_steps: 1_000_000, + n_memory_holes: 9_000, + builtin_instance_counter: HashMap::from([ + ("ecdsa_builtin".into(), 50), + ("pedersen_builtin".into(), 9), + ]), + }, + inner_calls: vec![nested_call], + } + } + + #[test] + fn convert_call_info() { + // setup expected values + let call = create_blockifier_call_info(); + + let expected_contract_address = to_address(call.call.storage_address); + let expected_caller_address = to_address(call.call.caller_address); + let expected_code_address = call.call.code_address.map(to_address); + let expected_class_hash = call.call.class_hash.map(|a| a.0.into()); + let expected_entry_point_selector = call.call.entry_point_selector.0.into(); + let expected_calldata: Vec = + call.call.calldata.0.iter().map(|f| (*f).into()).collect(); + let expected_retdata: Vec = + call.execution.retdata.0.iter().map(|f| (*f).into()).collect(); + + let builtin_counter = call.vm_resources.builtin_instance_counter.clone(); + let expected_execution_resources = trace::ExecutionResources { + n_steps: call.vm_resources.n_steps as u64, + n_memory_holes: call.vm_resources.n_memory_holes as u64, + builtin_instance_counter: builtin_counter + .into_iter() + .map(|(k, v)| (k, v as u64)) + .collect(), + }; + + let CallExecution { events, l2_to_l1_messages, .. } = call.execution.clone(); + let expected_events: Vec<_> = events.into_iter().map(to_ordered_event).collect(); + let expected_l2_to_l1_msg: Vec<_> = l2_to_l1_messages + .into_iter() + .map(|m| to_l2_l1_messages(m, expected_contract_address)) + .collect(); + + let expected_call_type = match call.call.call_type { + CallType::Call => trace::CallType::Call, + CallType::Delegate => trace::CallType::Delegate, + }; + + let expected_entry_point_type = match call.call.entry_point_type { + EntryPointType::External => trace::EntryPointType::External, + EntryPointType::L1Handler => trace::EntryPointType::L1Handler, + EntryPointType::Constructor => trace::EntryPointType::Constructor, + }; + + let expected_storage_read_values: Vec = + call.storage_read_values.iter().map(|f| (*f).into()).collect(); + let expected_storage_keys: HashSet = + call.accessed_storage_keys.iter().map(|k| (*k.0.key()).into()).collect(); + let expected_inner_calls: Vec<_> = + call.inner_calls.clone().into_iter().map(to_call_info).collect(); + + let expected_gas_consumed = call.execution.gas_consumed as u128; + let expected_failed = call.execution.failed; + + // convert to call info + let call = to_call_info(call.clone()); + + // assert actual values + assert_eq!(call.contract_address, expected_contract_address); + assert_eq!(call.caller_address, expected_caller_address); + assert_eq!(call.code_address, expected_code_address); + assert_eq!(call.class_hash, expected_class_hash); + assert_eq!(call.entry_point_selector, expected_entry_point_selector); + assert_eq!(call.calldata, expected_calldata); + assert_eq!(call.retdata, expected_retdata); + assert_eq!(call.execution_resources, expected_execution_resources); + assert_eq!(call.events, expected_events); + assert_eq!(call.l2_to_l1_messages, expected_l2_to_l1_msg); + assert_eq!(call.call_type, expected_call_type); + assert_eq!(call.entry_point_type, expected_entry_point_type); + assert_eq!(call.storage_read_values, expected_storage_read_values); + assert_eq!(call.accessed_storage_keys, expected_storage_keys); + assert_eq!(call.inner_calls, expected_inner_calls); + assert_eq!(call.gas_consumed, expected_gas_consumed); + assert_eq!(call.failed, expected_failed); + } } diff --git a/crates/katana/executor/src/implementation/sir/mod.rs b/crates/katana/executor/src/implementation/sir/mod.rs index 603219e89c..e72084e538 100644 --- a/crates/katana/executor/src/implementation/sir/mod.rs +++ b/crates/katana/executor/src/implementation/sir/mod.rs @@ -1,5 +1,4 @@ mod error; -mod output; mod state; pub mod utils; @@ -17,13 +16,15 @@ use sir::state::cached_state; use sir::state::contract_class_cache::PermanentContractClassCache; use tracing::info; -use self::output::receipt_from_exec_info; use self::state::CachedState; use crate::abstraction::{ BlockExecutor, ExecutionOutput, ExecutorExt, ExecutorFactory, ExecutorResult, SimulationFlag, StateProviderDb, }; -use crate::{EntryPointCall, ExecutionError, ExecutionResult, ResultAndStates}; +use crate::utils::receipt_from_exec_info; +use crate::{EntryPointCall, ExecutionError, ExecutionResult, ExecutionStats, ResultAndStates}; + +pub(crate) const LOG_TARGET: &str = "katana::executor::sir"; /// A factory for creating [StarknetVMProcessor] instances. #[derive(Debug)] @@ -70,6 +71,7 @@ pub struct StarknetVMProcessor<'a> { state: CachedState, PermanentContractClassCache>, transactions: Vec<(TxWithHash, ExecutionResult)>, simulation_flags: SimulationFlag, + stats: ExecutionStats, } impl<'a> StarknetVMProcessor<'a> { @@ -83,7 +85,7 @@ impl<'a> StarknetVMProcessor<'a> { let block_context = utils::block_context_from_envs(&block_env, &cfg_env); let state = CachedState::new(StateProviderDb(state), PermanentContractClassCache::default()); - Self { block_context, state, transactions, simulation_flags } + Self { block_context, state, transactions, simulation_flags, stats: Default::default() } } fn fill_block_env_from_header(&mut self, header: &PartialHeader) { @@ -158,14 +160,17 @@ impl<'a> BlockExecutor<'a> for StarknetVMProcessor<'a> { crate::utils::log_resources(&trace.actual_resources); crate::utils::log_events(receipt.events()); + self.stats.l1_gas_used += fee.gas_consumed; + self.stats.cairo_steps_used += receipt.resources_used().steps as u128; + if let Some(reason) = receipt.revert_reason() { - info!(target: "executor", "transaction reverted: {reason}"); + info!(target: LOG_TARGET, reason = %reason, "Transaction reverted."); } ExecutionResult::new_success(receipt, trace, fee) } Err(e) => { - info!(target: "executor", "transaction execution failed: {e}"); + info!(target: LOG_TARGET, error = %e, "Executing transaction."); ExecutionResult::new_failed(e) } }; @@ -192,7 +197,8 @@ impl<'a> BlockExecutor<'a> for StarknetVMProcessor<'a> { fn take_execution_output(&mut self) -> ExecutorResult { let states = utils::state_update_from_cached_state(&self.state); let transactions = std::mem::take(&mut self.transactions); - Ok(ExecutionOutput { states, transactions }) + let stats = std::mem::take(&mut self.stats); + Ok(ExecutionOutput { stats, states, transactions }) } fn state(&self) -> Box { @@ -248,14 +254,14 @@ impl<'a> ExecutorExt for StarknetVMProcessor<'a> { Ok((info, fee)) => { // if the transaction was reverted, return as error if let Some(reason) = info.revert_error { - info!(target: "executor", "fee estimation failed: {reason}"); + info!(target: LOG_TARGET, reason = %reason, "Fee estimation failed."); Err(ExecutionError::TransactionReverted { revert_error: reason }) } else { Ok(fee) } } Err(e) => { - info!(target: "executor", "fee estimation failed: {e}"); + info!(target: LOG_TARGET, error = %e, "Estimating fee."); Err(e) } }) @@ -263,7 +269,7 @@ impl<'a> ExecutorExt for StarknetVMProcessor<'a> { fn call(&self, call: EntryPointCall) -> Result, ExecutionError> { let block_context = &self.block_context; - let retdata = utils::call(call, &self.state, block_context, 100_000_000)?; + let retdata = utils::call(call, &self.state, block_context, 1_000_000_000)?; Ok(retdata) } } diff --git a/crates/katana/executor/src/implementation/sir/output.rs b/crates/katana/executor/src/implementation/sir/output.rs deleted file mode 100644 index 3e435c1b4f..0000000000 --- a/crates/katana/executor/src/implementation/sir/output.rs +++ /dev/null @@ -1,153 +0,0 @@ -use std::collections::HashMap; - -use katana_primitives::receipt::{ - DeclareTxReceipt, DeployAccountTxReceipt, Event, InvokeTxReceipt, L1HandlerTxReceipt, - MessageToL1, Receipt, TxExecutionResources, -}; -use katana_primitives::trace::{CallInfo, TxExecInfo}; -use katana_primitives::transaction::Tx; - -pub(super) fn receipt_from_exec_info(tx: &Tx, info: &TxExecInfo) -> Receipt { - let actual_fee = info.actual_fee; - let events = events_from_exec_info(info); - let revert_error = info.revert_error.clone(); - let messages_sent = l2_to_l1_messages_from_exec_info(info); - let actual_resources = parse_actual_resources(&info.actual_resources); - - match tx { - Tx::Invoke(_) => Receipt::Invoke(InvokeTxReceipt { - events, - actual_fee, - revert_error, - messages_sent, - execution_resources: actual_resources, - }), - - Tx::Declare(_) => Receipt::Declare(DeclareTxReceipt { - events, - actual_fee, - revert_error, - messages_sent, - execution_resources: actual_resources, - }), - - Tx::L1Handler(tx) => Receipt::L1Handler(L1HandlerTxReceipt { - events, - actual_fee, - revert_error, - messages_sent, - message_hash: tx.message_hash, - execution_resources: actual_resources, - }), - - Tx::DeployAccount(tx) => Receipt::DeployAccount(DeployAccountTxReceipt { - events, - actual_fee, - revert_error, - messages_sent, - execution_resources: actual_resources, - contract_address: tx.contract_address(), - }), - } -} - -fn events_from_exec_info(info: &TxExecInfo) -> Vec { - let mut events: Vec = vec![]; - - fn get_events_recursively(call_info: &CallInfo) -> Vec { - let mut events: Vec = vec![]; - - // By default, `from_address` must correspond to the contract address that - // is sending the message. In the case of library calls, `code_address` is `None`, - // we then use the `caller_address` instead (which can also be an account). - let from_address = if let Some(code_address) = call_info.code_address { - code_address - } else { - call_info.caller_address - }; - - events.extend(call_info.events.iter().map(|e| Event { - from_address, - data: e.data.clone(), - keys: e.keys.clone(), - })); - - call_info.inner_calls.iter().for_each(|call| { - events.extend(get_events_recursively(call)); - }); - - events - } - - if let Some(ref call) = info.validate_call_info { - events.extend(get_events_recursively(call)); - } - - if let Some(ref call) = info.execute_call_info { - events.extend(get_events_recursively(call)); - } - - if let Some(ref call) = info.fee_transfer_call_info { - events.extend(get_events_recursively(call)); - } - - events -} - -fn l2_to_l1_messages_from_exec_info(info: &TxExecInfo) -> Vec { - let mut messages = vec![]; - - fn get_messages_recursively(info: &CallInfo) -> Vec { - let mut messages = vec![]; - - // By default, `from_address` must correspond to the contract address that - // is sending the message. In the case of library calls, `code_address` is `None`, - // we then use the `caller_address` instead (which can also be an account). - let from_address = if let Some(code_address) = info.code_address { - code_address - } else { - info.caller_address - }; - - messages.extend(info.l2_to_l1_messages.iter().map(|m| MessageToL1 { - from_address, - to_address: m.to_address, - payload: m.payload.clone(), - })); - - info.inner_calls.iter().for_each(|call| { - messages.extend(get_messages_recursively(call)); - }); - - messages - } - - if let Some(ref info) = info.validate_call_info { - messages.extend(get_messages_recursively(info)); - } - - if let Some(ref info) = info.execute_call_info { - messages.extend(get_messages_recursively(info)); - } - - if let Some(ref info) = info.fee_transfer_call_info { - messages.extend(get_messages_recursively(info)); - } - - messages -} - -fn parse_actual_resources(resources: &HashMap) -> TxExecutionResources { - TxExecutionResources { - steps: resources.get("n_steps").copied().unwrap_or_default(), - memory_holes: resources.get("memory_holes").copied(), - ec_op_builtin: resources.get("ec_op_builtin").copied(), - ecdsa_builtin: resources.get("ecdsa_builtin").copied(), - keccak_builtin: resources.get("keccak_builtin").copied(), - bitwise_builtin: resources.get("bitwise_builtin").copied(), - pedersen_builtin: resources.get("pedersen_builtin").copied(), - poseidon_builtin: resources.get("poseidon_builtin").copied(), - range_check_builtin: resources.get("range_check_builtin").copied(), - segment_arena_builtin: resources.get("segment_arena_builtin").copied(), - } -} diff --git a/crates/katana/executor/src/implementation/sir/state.rs b/crates/katana/executor/src/implementation/sir/state.rs index 74bd309692..a7334c219e 100644 --- a/crates/katana/executor/src/implementation/sir/state.rs +++ b/crates/katana/executor/src/implementation/sir/state.rs @@ -299,7 +299,7 @@ mod tests { use crate::StateProviderDb; fn new_sierra_class() -> (FlattenedSierraClass, CompiledClass) { - let json = include_str!("../../../../primitives/contracts/compiled/cairo1_contract.json"); + let json = include_str!("../../../../contracts/compiled/cairo1_contract.json"); let artifact = serde_json::from_str(json).unwrap(); let compiled_class = parse_compiled_class(artifact).unwrap(); let sierra_class = parse_sierra_class(json).unwrap().flatten().unwrap(); diff --git a/crates/katana/executor/src/implementation/sir/utils.rs b/crates/katana/executor/src/implementation/sir/utils.rs index 9b296ac68f..584ffa5ead 100644 --- a/crates/katana/executor/src/implementation/sir/utils.rs +++ b/crates/katana/executor/src/implementation/sir/utils.rs @@ -560,12 +560,6 @@ pub fn to_exec_info(exec_info: &TransactionExecutionInfo) -> TxExecInfo { } fn from_sir_call_info(call_info: CallInfo) -> katana_primitives::trace::CallInfo { - let message_to_l1_from_address = if let Some(ref a) = call_info.code_address { - to_address(a) - } else { - to_address(&call_info.caller_address) - }; - katana_primitives::trace::CallInfo { contract_address: to_address(&call_info.contract_address), caller_address: to_address(&call_info.caller_address), @@ -616,7 +610,7 @@ fn from_sir_call_info(call_info: CallInfo) -> katana_primitives::trace::CallInfo .iter() .map(|m| katana_primitives::message::OrderedL2ToL1Message { order: m.order as u64, - from_address: message_to_l1_from_address, + from_address: to_address(&call_info.contract_address), to_address: *to_address(&m.to_address), payload: m.payload.iter().map(to_felt).collect(), }) diff --git a/crates/katana/executor/src/utils.rs b/crates/katana/executor/src/utils.rs index 6de7b60b33..45d90fb6f8 100644 --- a/crates/katana/executor/src/utils.rs +++ b/crates/katana/executor/src/utils.rs @@ -1,9 +1,16 @@ use std::collections::HashMap; use convert_case::{Case, Casing}; -use katana_primitives::receipt::Event; +use katana_primitives::receipt::{ + DeclareTxReceipt, DeployAccountTxReceipt, Event, InvokeTxReceipt, L1HandlerTxReceipt, + MessageToL1, Receipt, TxExecutionResources, +}; +use katana_primitives::trace::{CallInfo, TxExecInfo}; +use katana_primitives::transaction::Tx; use tracing::trace; +pub(crate) const LOG_TARGET: &str = "executor"; + pub fn log_resources(resources: &HashMap) { let mut mapped_strings = resources .iter() @@ -27,15 +34,247 @@ pub fn log_resources(resources: &HashMap) { mapped_strings.insert(0, format!("Steps: {}", steps)); } - trace!(target: "executor", "transaction resource usage: {}", mapped_strings.join(" | ")); + trace!(target: LOG_TARGET, usage = mapped_strings.join(" | "), "Transaction resource usage."); } pub fn log_events(events: &[Event]) { for e in events { trace!( - target: "executor", - "event emitted keys=[{}]", - e.keys.iter().map(|key| format!("{key:#x}")).collect::>().join(", ") + target: LOG_TARGET, + keys = e.keys.iter().map(|key| format!("{key:#x}")).collect::>().join(", "), + "Event emitted.", ); } } + +pub fn receipt_from_exec_info(tx: &Tx, info: &TxExecInfo) -> Receipt { + let actual_fee = info.actual_fee; + let events = events_from_exec_info(info); + let revert_error = info.revert_error.clone(); + let messages_sent = l2_to_l1_messages_from_exec_info(info); + let actual_resources = parse_actual_resources(&info.actual_resources); + + match tx { + Tx::Invoke(_) => Receipt::Invoke(InvokeTxReceipt { + events, + actual_fee, + revert_error, + messages_sent, + execution_resources: actual_resources, + }), + + Tx::Declare(_) => Receipt::Declare(DeclareTxReceipt { + events, + actual_fee, + revert_error, + messages_sent, + execution_resources: actual_resources, + }), + + Tx::L1Handler(tx) => Receipt::L1Handler(L1HandlerTxReceipt { + events, + actual_fee, + revert_error, + messages_sent, + message_hash: tx.message_hash, + execution_resources: actual_resources, + }), + + Tx::DeployAccount(tx) => Receipt::DeployAccount(DeployAccountTxReceipt { + events, + actual_fee, + revert_error, + messages_sent, + execution_resources: actual_resources, + contract_address: tx.contract_address(), + }), + } +} + +pub fn events_from_exec_info(info: &TxExecInfo) -> Vec { + let mut events: Vec = vec![]; + + if let Some(ref call) = info.validate_call_info { + events.extend(get_events_recur(call)); + } + + if let Some(ref call) = info.execute_call_info { + events.extend(get_events_recur(call)); + } + + if let Some(ref call) = info.fee_transfer_call_info { + events.extend(get_events_recur(call)); + } + + events +} + +pub fn l2_to_l1_messages_from_exec_info(info: &TxExecInfo) -> Vec { + let mut messages = vec![]; + + if let Some(ref info) = info.validate_call_info { + messages.extend(get_l2_to_l1_messages_recur(info)); + } + + if let Some(ref info) = info.execute_call_info { + messages.extend(get_l2_to_l1_messages_recur(info)); + } + + if let Some(ref info) = info.fee_transfer_call_info { + messages.extend(get_l2_to_l1_messages_recur(info)); + } + + messages +} + +pub fn parse_actual_resources(resources: &HashMap) -> TxExecutionResources { + TxExecutionResources { + steps: resources.get("n_steps").copied().unwrap_or_default(), + memory_holes: resources.get("memory_holes").copied(), + ec_op_builtin: resources.get("ec_op_builtin").copied(), + ecdsa_builtin: resources.get("ecdsa_builtin").copied(), + keccak_builtin: resources.get("keccak_builtin").copied(), + bitwise_builtin: resources.get("bitwise_builtin").copied(), + pedersen_builtin: resources.get("pedersen_builtin").copied(), + poseidon_builtin: resources.get("poseidon_builtin").copied(), + range_check_builtin: resources.get("range_check_builtin").copied(), + segment_arena_builtin: resources.get("segment_arena_builtin").copied(), + } +} + +fn get_events_recur(info: &CallInfo) -> Vec { + let mut events: Vec = vec![]; + + events.extend(info.events.iter().map(|e| Event { + from_address: info.contract_address, + data: e.data.clone(), + keys: e.keys.clone(), + })); + + info.inner_calls.iter().for_each(|call| { + events.extend(get_events_recur(call)); + }); + + events +} + +fn get_l2_to_l1_messages_recur(info: &CallInfo) -> Vec { + let mut messages = vec![]; + + messages.extend(info.l2_to_l1_messages.iter().map(|m| MessageToL1 { + from_address: m.from_address, + to_address: m.to_address, + payload: m.payload.clone(), + })); + + info.inner_calls.iter().for_each(|call| { + messages.extend(get_l2_to_l1_messages_recur(call)); + }); + + messages +} + +#[cfg(test)] +mod tests { + use katana_primitives::event::OrderedEvent; + use katana_primitives::message::OrderedL2ToL1Message; + use katana_primitives::receipt::{Event, MessageToL1}; + use katana_primitives::trace::CallInfo; + use starknet::macros::felt; + + fn call_info() -> CallInfo { + let inner_calls = vec![CallInfo { + contract_address: felt!("0x111").into(), + events: vec![ + OrderedEvent { order: 1, data: vec![1u8.into()], keys: vec![10u8.into()] }, + OrderedEvent { order: 4, data: vec![2u8.into()], keys: vec![20u8.into()] }, + ], + l2_to_l1_messages: vec![OrderedL2ToL1Message { + order: 0, + from_address: felt!("0x111").into(), + to_address: felt!("0x200"), + payload: vec![1u8.into()], + }], + ..Default::default() + }]; + + CallInfo { + contract_address: felt!("0x100").into(), + events: vec![OrderedEvent { order: 0, data: vec![1u8.into()], keys: vec![2u8.into()] }], + l2_to_l1_messages: vec![ + OrderedL2ToL1Message { + order: 0, + from_address: felt!("0x100").into(), + to_address: felt!("0x200"), + payload: vec![1u8.into()], + }, + OrderedL2ToL1Message { + order: 1, + from_address: felt!("0x100").into(), + to_address: felt!("0x201"), + payload: vec![2u8.into()], + }, + ], + inner_calls, + ..Default::default() + } + } + + #[test] + fn get_events_from_exec_info() { + let info = call_info(); + let events = super::get_events_recur(&info); + + let expected_events = vec![ + Event { + from_address: info.contract_address, + data: vec![1u8.into()], + keys: vec![2u8.into()], + }, + Event { + from_address: info.inner_calls[0].contract_address, + data: vec![1u8.into()], + keys: vec![10u8.into()], + }, + Event { + from_address: info.inner_calls[0].contract_address, + data: vec![2u8.into()], + keys: vec![20u8.into()], + }, + ]; + + similar_asserts::assert_eq!(events, expected_events) + } + + #[test] + fn get_l2_to_l1_messages_from_exec_info() { + let info = call_info(); + let events = super::get_l2_to_l1_messages_recur(&info); + + // TODO: Maybe remove `from_address` from `MessageToL1`? + // + // The from address is not constrained to be the same as the contract address + // of the call info beca use we already set it when converting TxExecInfo from its executor + // specific counterparts. Which is different compare to the events where it doesn't have + // from address field in `OrderedEvent`. + let expected_messages = vec![ + MessageToL1 { + from_address: info.contract_address, + to_address: info.l2_to_l1_messages[0].to_address, + payload: info.l2_to_l1_messages[0].payload.clone(), + }, + MessageToL1 { + from_address: info.contract_address, + to_address: info.l2_to_l1_messages[1].to_address, + payload: info.l2_to_l1_messages[1].payload.clone(), + }, + MessageToL1 { + from_address: info.inner_calls[0].contract_address, + to_address: info.inner_calls[0].l2_to_l1_messages[0].to_address, + payload: info.inner_calls[0].l2_to_l1_messages[0].payload.clone(), + }, + ]; + + similar_asserts::assert_eq!(events, expected_messages) + } +} diff --git a/crates/katana/executor/tests/executor.rs b/crates/katana/executor/tests/executor.rs index 70a85ee6fb..3d64cf2393 100644 --- a/crates/katana/executor/tests/executor.rs +++ b/crates/katana/executor/tests/executor.rs @@ -249,13 +249,28 @@ fn test_executor_with_valid_blocks_impl( ); // assert the state updates after all the blocks are executed - // + let mut actual_total_gas: u128 = 0; + let mut actual_total_steps: u128 = 0; // assert the state updates - let ExecutionOutput { states, transactions } = executor.take_execution_output().unwrap(); - // asserts that the executed transactions are stored - let actual_txs: Vec = transactions.iter().map(|(tx, _)| tx.clone()).collect(); + let ExecutionOutput { states, transactions, stats } = executor.take_execution_output().unwrap(); + // asserts that the executed transactions are stored + let actual_txs: Vec = transactions + .iter() + .map(|(tx, res)| { + if let Some(fee) = res.fee() { + actual_total_gas += fee.gas_consumed; + } + if let Some(rec) = res.receipt() { + actual_total_steps += rec.resources_used().steps as u128; + } + tx.clone() + }) + .collect(); + + assert_eq!(actual_total_gas, stats.l1_gas_used); + assert_eq!(actual_total_steps, stats.cairo_steps_used); assert_eq!(actual_txs, expected_txs); let actual_nonce_updates = states.state_updates.nonce_updates; diff --git a/crates/katana/executor/tests/fixtures/contract.json b/crates/katana/executor/tests/fixtures/contract.json index ce1957cd94..359796f532 120000 --- a/crates/katana/executor/tests/fixtures/contract.json +++ b/crates/katana/executor/tests/fixtures/contract.json @@ -1 +1 @@ -../../../primitives/contracts/compiled/oz_account_080.json \ No newline at end of file +../../../contracts/compiled/oz_account_080.json \ No newline at end of file diff --git a/crates/katana/executor/tests/fixtures/mod.rs b/crates/katana/executor/tests/fixtures/mod.rs index 9a7f04ca8d..d11bf0e1ff 100644 --- a/crates/katana/executor/tests/fixtures/mod.rs +++ b/crates/katana/executor/tests/fixtures/mod.rs @@ -2,6 +2,7 @@ pub mod transaction; use std::collections::HashMap; +use alloy_primitives::U256; use cairo_vm::vm::runners::builtin_runner::{ BITWISE_BUILTIN_NAME, EC_OP_BUILTIN_NAME, HASH_BUILTIN_NAME, KECCAK_BUILTIN_NAME, OUTPUT_BUILTIN_NAME, POSEIDON_BUILTIN_NAME, RANGE_CHECK_BUILTIN_NAME, @@ -59,7 +60,7 @@ pub fn genesis() -> Genesis { let accounts = DevAllocationsGenerator::new(10) .with_seed(seed) - .with_balance(DEFAULT_PREFUNDED_ACCOUNT_BALANCE) + .with_balance(U256::from(DEFAULT_PREFUNDED_ACCOUNT_BALANCE)) .generate(); let mut genesis = Genesis::default(); diff --git a/crates/katana/executor/tests/fixtures/transaction.rs b/crates/katana/executor/tests/fixtures/transaction.rs index b8819691b9..450df62e16 100644 --- a/crates/katana/executor/tests/fixtures/transaction.rs +++ b/crates/katana/executor/tests/fixtures/transaction.rs @@ -7,7 +7,7 @@ use katana_primitives::genesis::Genesis; use katana_primitives::transaction::ExecutableTxWithHash; use katana_primitives::FieldElement; use starknet::accounts::{Account, Call, ExecutionEncoding, SingleOwnerAccount}; -use starknet::core::types::BroadcastedInvokeTransaction; +use starknet::core::types::{BlockId, BlockTag, BroadcastedInvokeTransaction}; use starknet::macros::{felt, selector}; use starknet::providers::jsonrpc::HttpTransport; use starknet::providers::{JsonRpcClient, Url}; @@ -28,7 +28,7 @@ pub fn invoke_executable_tx( let provider = JsonRpcClient::new(HttpTransport::new(Url::try_from(url).unwrap())); let signer = LocalWallet::from_signing_key(SigningKey::from_secret_scalar(private_key)); - let account = SingleOwnerAccount::new( + let mut account = SingleOwnerAccount::new( provider, signer, address.into(), @@ -36,6 +36,8 @@ pub fn invoke_executable_tx( ExecutionEncoding::New, ); + account.set_block_id(BlockId::Tag(BlockTag::Pending)); + let calls = vec![Call { to: DEFAULT_FEE_TOKEN_ADDRESS.into(), selector: selector!("transfer"), diff --git a/crates/katana/executor/tests/simulate.rs b/crates/katana/executor/tests/simulate.rs index 7a8905fa43..076f2ba013 100644 --- a/crates/katana/executor/tests/simulate.rs +++ b/crates/katana/executor/tests/simulate.rs @@ -62,9 +62,11 @@ fn test_simulate_tx_impl( }),); // check that the underlying state is not modified - let ExecutionOutput { states, transactions } = + let ExecutionOutput { states, transactions, stats } = executor.take_execution_output().expect("must take output"); + assert_eq!(stats.l1_gas_used, 0, "no gas usage should be recorded"); + assert_eq!(stats.cairo_steps_used, 0, "no steps usage should be recorded"); assert!(transactions.is_empty(), "simulated tx should not be stored"); assert!(states.state_updates.nonce_updates.is_empty(), "no state updates"); diff --git a/crates/katana/primitives/Cargo.toml b/crates/katana/primitives/Cargo.toml index a2ec69ca12..d09ce22a75 100644 --- a/crates/katana/primitives/Cargo.toml +++ b/crates/katana/primitives/Cargo.toml @@ -23,14 +23,13 @@ strum.workspace = true strum_macros.workspace = true thiserror.workspace = true +alloy-primitives.workspace = true cairo-lang-sierra.workspace = true cairo-lang-starknet.workspace = true flate2.workspace = true starknet_api.workspace = true -ethers = "2.0.11" - [features] default = [ "serde" ] rpc = [ ] -serde = [ ] +serde = [ "alloy-primitives/serde" ] diff --git a/crates/katana/primitives/contracts/messaging/cairo/.tool-versions b/crates/katana/primitives/contracts/messaging/cairo/.tool-versions deleted file mode 100644 index 697917e577..0000000000 --- a/crates/katana/primitives/contracts/messaging/cairo/.tool-versions +++ /dev/null @@ -1 +0,0 @@ -scarb 2.3.1 diff --git a/crates/katana/primitives/contracts/messaging/cairo/account_l3.json b/crates/katana/primitives/contracts/messaging/cairo/account_l3.json deleted file mode 100644 index 3b333914a4..0000000000 --- a/crates/katana/primitives/contracts/messaging/cairo/account_l3.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "version": 1, - "variant": { - "type": "open_zeppelin", - "version": 1, - "public_key": "0x4c0f884b8e5b4f00d97a3aad26b2e5de0c0c76a555060c837da2e287403c01d" - }, - "deployment": { - "status": "deployed", - "class_hash": "0x4d07e40e93398ed3c76981e72dd1fd22557a78ce36c0515f679e27f0bb5bc5f", - "address": "0x5686a647a9cdd63ade617e0baf3b364856b813b508f03903eb58a7e622d5855" - } -} diff --git a/crates/katana/primitives/contracts/messaging/l3.messaging.json b/crates/katana/primitives/contracts/messaging/l3.messaging.json deleted file mode 100644 index 58a5264fc0..0000000000 --- a/crates/katana/primitives/contracts/messaging/l3.messaging.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "chain": "starknet", - "rpc_url": "http://127.0.0.1:5050", - "contract_address": "0x046c0ea3fb2ad27053e8af3c8cfab38a51afb9fe90fcab1f75446bd41f7d3796", - "sender_address": "0x5686a647a9cdd63ade617e0baf3b364856b813b508f03903eb58a7e622d5855", - "private_key": "0x33003003001800009900180300d206308b0070db00121318d17b5e6262150b", - "interval": 2, - "from_block": 0 -} diff --git a/crates/katana/primitives/contracts/messaging/solidity/lib/forge-std b/crates/katana/primitives/contracts/messaging/solidity/lib/forge-std deleted file mode 160000 index ae570fec08..0000000000 --- a/crates/katana/primitives/contracts/messaging/solidity/lib/forge-std +++ /dev/null @@ -1 +0,0 @@ -Subproject commit ae570fec082bfe1c1f45b0acca4a2b4f84d345ce diff --git a/crates/katana/primitives/src/conversion/rpc.rs b/crates/katana/primitives/src/conversion/rpc.rs index 41d9836743..2b27cd2b7e 100644 --- a/crates/katana/primitives/src/conversion/rpc.rs +++ b/crates/katana/primitives/src/conversion/rpc.rs @@ -280,7 +280,7 @@ mod tests { #[test] fn legacy_rpc_to_inner_and_back() { - let json = include_str!("../../contracts/compiled/account.json"); + let json = include_str!("../../../contracts/compiled/account.json"); let json = serde_json::from_str(json).unwrap(); let class: DeprecatedCompiledClass = parse_deprecated_compiled_class(json).unwrap(); diff --git a/crates/katana/primitives/src/genesis/allocation.rs b/crates/katana/primitives/src/genesis/allocation.rs index fa5fd2a07c..5ae6f11cba 100644 --- a/crates/katana/primitives/src/genesis/allocation.rs +++ b/crates/katana/primitives/src/genesis/allocation.rs @@ -1,8 +1,8 @@ use std::collections::HashMap; use std::fmt::Debug; +use alloy_primitives::U256; use derive_more::{Deref, DerefMut}; -use ethers::types::U256; use rand::rngs::SmallRng; use rand::{RngCore, SeedableRng}; use serde::{Deserialize, Serialize}; @@ -239,7 +239,7 @@ impl DevAllocationsGenerator { Self { total, seed: [0u8; 32], - balance: U256::zero(), + balance: U256::ZERO, class_hash: DEFAULT_OZ_ACCOUNT_CONTRACT_CLASS_HASH, } } diff --git a/crates/katana/primitives/src/genesis/constant.rs b/crates/katana/primitives/src/genesis/constant.rs index 16d8019b85..b5e8c555a3 100644 --- a/crates/katana/primitives/src/genesis/constant.rs +++ b/crates/katana/primitives/src/genesis/constant.rs @@ -126,14 +126,14 @@ lazy_static! { // Default fee token contract // pub static ref DEFAULT_LEGACY_ERC20_CONTRACT_CASM: CompiledContractClass = parse_compiled_class(include_str!("../../contracts/compiled/erc20.json")).unwrap(); - pub static ref DEFAULT_LEGACY_ERC20_CONTRACT_CASM: CompiledClass = read_compiled_class_artifact(include_str!("../../contracts/compiled/erc20.json")); + pub static ref DEFAULT_LEGACY_ERC20_CONTRACT_CASM: CompiledClass = read_compiled_class_artifact(include_str!("../../../contracts/compiled/erc20.json")); // Default universal deployer - pub static ref DEFAULT_LEGACY_UDC_CASM: CompiledClass = read_compiled_class_artifact(include_str!("../../contracts/compiled/universal_deployer.json")); + pub static ref DEFAULT_LEGACY_UDC_CASM: CompiledClass = read_compiled_class_artifact(include_str!("../../../contracts/compiled/universal_deployer.json")); // Default account contract - pub static ref DEFAULT_OZ_ACCOUNT_CONTRACT: SierraClass = parse_sierra_class(include_str!("../../contracts/compiled/oz_account_080.json")).unwrap(); - pub static ref DEFAULT_OZ_ACCOUNT_CONTRACT_CASM: CompiledClass = read_compiled_class_artifact(include_str!("../../contracts/compiled/oz_account_080.json")); + pub static ref DEFAULT_OZ_ACCOUNT_CONTRACT: SierraClass = parse_sierra_class(include_str!("../../../contracts/compiled/oz_account_080.json")).unwrap(); + pub static ref DEFAULT_OZ_ACCOUNT_CONTRACT_CASM: CompiledClass = read_compiled_class_artifact(include_str!("../../../contracts/compiled/oz_account_080.json")); } diff --git a/crates/katana/primitives/src/genesis/json.rs b/crates/katana/primitives/src/genesis/json.rs index 3efc140fa2..84a97e101e 100644 --- a/crates/katana/primitives/src/genesis/json.rs +++ b/crates/katana/primitives/src/genesis/json.rs @@ -10,10 +10,10 @@ use std::path::{Path, PathBuf}; use std::str::FromStr; use std::sync::Arc; +use alloy_primitives::U256; use base64::prelude::*; use cairo_lang_starknet::casm_contract_class::StarknetSierraCompilationError; use cairo_vm::types::errors::program_errors::ProgramError; -use ethers::types::U256; use rayon::prelude::*; use serde::de::value::MapAccessDeserializer; use serde::de::Visitor; @@ -99,7 +99,7 @@ pub struct GenesisClassJson { pub class_hash: Option, } -#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Default)] #[serde(rename_all = "camelCase")] pub struct FeeTokenConfigJson { pub name: String, @@ -199,7 +199,7 @@ pub enum GenesisJsonError { /// (eg, using `serde_json`). /// /// The path of the class artifact are computed **relative** to the JSON file. -#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Default)] #[serde(rename_all = "camelCase")] pub struct GenesisJson { pub parent_hash: BlockHash, @@ -315,10 +315,9 @@ impl TryFrom for Genesis { }) .collect::>()?; - let mut fee_token = FeeTokenConfig { + let fee_token = FeeTokenConfig { name: value.fee_token.name, symbol: value.fee_token.symbol, - total_supply: U256::zero(), decimals: value.fee_token.decimals, address: value.fee_token.address.unwrap_or(DEFAULT_FEE_TOKEN_ADDRESS), class_hash: value.fee_token.class.unwrap_or(DEFAULT_LEGACY_ERC20_CONTRACT_CLASS_HASH), @@ -414,11 +413,6 @@ impl TryFrom for Genesis { } }; - // increase the total supply of the fee token if balance is given - if let Some(balance) = account.balance { - fee_token.total_supply += balance; - } - match account.private_key { Some(private_key) => allocations.insert( address, @@ -456,11 +450,6 @@ impl TryFrom for Genesis { } } - // increase the total supply of the fee token if balance is given - if let Some(balance) = contract.balance { - fee_token.total_supply += balance; - } - allocations.insert( address, GenesisAllocation::Contract(GenesisContractAlloc { @@ -552,7 +541,7 @@ mod tests { use std::path::PathBuf; use std::str::FromStr; - use ethers::types::U256; + use alloy_primitives::U256; use starknet::macros::felt; use super::{from_base64, GenesisClassJson, GenesisJson}; @@ -705,15 +694,16 @@ mod tests { vec![ GenesisClassJson { class_hash: Some(felt!("0x8")), - class: PathBuf::from("../../contracts/compiled/erc20.json").into(), + class: PathBuf::from("../../../contracts/compiled/erc20.json").into(), }, GenesisClassJson { class_hash: Some(felt!("0x80085")), - class: PathBuf::from("../../contracts/compiled/universal_deployer.json").into(), + class: PathBuf::from("../../../contracts/compiled/universal_deployer.json") + .into(), }, GenesisClassJson { class_hash: Some(felt!("0xa55")), - class: PathBuf::from("../../contracts/compiled/oz_account_080.json").into(), + class: PathBuf::from("../../../contracts/compiled/oz_account_080.json").into(), }, ] ); @@ -729,11 +719,12 @@ mod tests { vec![ GenesisClassJson { class_hash: Some(felt!("0x8")), - class: PathBuf::from("../../contracts/compiled/erc20.json").into(), + class: PathBuf::from("../../../contracts/compiled/erc20.json").into(), }, GenesisClassJson { class_hash: Some(felt!("0x80085")), - class: PathBuf::from("../../contracts/compiled/universal_deployer.json").into(), + class: PathBuf::from("../../../contracts/compiled/universal_deployer.json") + .into(), }, GenesisClassJson { class_hash: Some(felt!("0xa55")), @@ -801,7 +792,6 @@ mod tests { address: ContractAddress::from(felt!("0x55")), name: String::from("ETHER"), symbol: String::from("ETH"), - total_supply: U256::from_str("0xD3C21BCECCEDA1000000").unwrap() * 5, decimals: 18, class_hash: felt!("0x8"), storage: Some(HashMap::from([ @@ -935,16 +925,12 @@ mod tests { assert_eq!(actual_genesis.timestamp, expected_genesis.timestamp); assert_eq!(actual_genesis.state_root, expected_genesis.state_root); assert_eq!(actual_genesis.gas_prices, expected_genesis.gas_prices); - assert_eq!(actual_genesis.fee_token.address, expected_genesis.fee_token.address); assert_eq!(actual_genesis.fee_token.name, expected_genesis.fee_token.name); assert_eq!(actual_genesis.fee_token.symbol, expected_genesis.fee_token.symbol); assert_eq!(actual_genesis.fee_token.decimals, expected_genesis.fee_token.decimals); - assert_eq!(actual_genesis.fee_token.total_supply, expected_genesis.fee_token.total_supply); assert_eq!(actual_genesis.fee_token.class_hash, expected_genesis.fee_token.class_hash); - assert_eq!(actual_genesis.universal_deployer, expected_genesis.universal_deployer); - assert_eq!(actual_genesis.allocations.len(), expected_genesis.allocations.len()); for alloc in actual_genesis.allocations { @@ -1026,7 +1012,6 @@ mod tests { address: DEFAULT_FEE_TOKEN_ADDRESS, name: String::from("ETHER"), symbol: String::from("ETH"), - total_supply: U256::from_str("0xD3C21BCECCEDA1000000").unwrap(), decimals: 18, class_hash: DEFAULT_LEGACY_ERC20_CONTRACT_CLASS_HASH, storage: None, diff --git a/crates/katana/primitives/src/genesis/mod.rs b/crates/katana/primitives/src/genesis/mod.rs index 5b072d196e..5774e3407c 100644 --- a/crates/katana/primitives/src/genesis/mod.rs +++ b/crates/katana/primitives/src/genesis/mod.rs @@ -6,7 +6,7 @@ use std::collections::{BTreeMap, HashMap}; use std::fmt::Debug; use std::sync::Arc; -use ethers::types::U256; +use alloy_primitives::U256; use serde::{Deserialize, Serialize}; use starknet::core::serde::unsigned_field_element::UfeHex; use starknet::core::utils::cairo_short_string_to_felt; @@ -42,8 +42,6 @@ pub struct FeeTokenConfig { pub address: ContractAddress, /// The decimals of the fee token. pub decimals: u8, - /// The total supply of the fee token. - pub total_supply: U256, /// The class hash of the fee token contract. #[serde_as(as = "UfeHex")] pub class_hash: ClassHash, @@ -186,24 +184,13 @@ impl Genesis { states.state_updates.storage_updates.insert(address, storage); } - // TODO: put this in a separate function - // insert fee token related data let mut fee_token_storage = self.fee_token.storage.clone().unwrap_or_default(); - - let name: FieldElement = cairo_short_string_to_felt(&self.fee_token.name).unwrap(); - let symbol: FieldElement = cairo_short_string_to_felt(&self.fee_token.symbol).unwrap(); - let decimals: FieldElement = self.fee_token.decimals.into(); - let (total_supply_low, total_supply_high) = split_u256(self.fee_token.total_supply); - - fee_token_storage.insert(ERC20_NAME_STORAGE_SLOT, name); - fee_token_storage.insert(ERC20_SYMBOL_STORAGE_SLOT, symbol); - fee_token_storage.insert(ERC20_DECIMAL_STORAGE_SLOT, decimals); - fee_token_storage.insert(ERC20_TOTAL_SUPPLY_STORAGE_SLOT, total_supply_low); - fee_token_storage.insert(ERC20_TOTAL_SUPPLY_STORAGE_SLOT + 1u8.into(), total_supply_high); + let mut fee_token_total_supply = U256::ZERO; for (address, alloc) in &self.allocations { if let Some(balance) = alloc.balance() { + fee_token_total_supply += balance; let (low, high) = split_u256(balance); // the base storage address for a standard ERC20 contract balance @@ -219,6 +206,19 @@ impl Genesis { } } + // TODO: put this in a separate function + + let name: FieldElement = cairo_short_string_to_felt(&self.fee_token.name).unwrap(); + let symbol: FieldElement = cairo_short_string_to_felt(&self.fee_token.symbol).unwrap(); + let decimals: FieldElement = self.fee_token.decimals.into(); + let (total_supply_low, total_supply_high) = split_u256(fee_token_total_supply); + + fee_token_storage.insert(ERC20_NAME_STORAGE_SLOT, name); + fee_token_storage.insert(ERC20_SYMBOL_STORAGE_SLOT, symbol); + fee_token_storage.insert(ERC20_DECIMAL_STORAGE_SLOT, decimals); + fee_token_storage.insert(ERC20_TOTAL_SUPPLY_STORAGE_SLOT, total_supply_low); + fee_token_storage.insert(ERC20_TOTAL_SUPPLY_STORAGE_SLOT + 1u8.into(), total_supply_high); + states .state_updates .contract_updates @@ -246,7 +246,6 @@ impl Default for Genesis { decimals: 18, name: "Ether".into(), symbol: "ETH".into(), - total_supply: 0.into(), address: DEFAULT_FEE_TOKEN_ADDRESS, class_hash: DEFAULT_LEGACY_ERC20_CONTRACT_CLASS_HASH, storage: None, @@ -352,7 +351,6 @@ mod tests { address: DEFAULT_FEE_TOKEN_ADDRESS, name: String::from("ETHER"), symbol: String::from("ETH"), - total_supply: U256::from_str("0x1a784379d99db42000000").unwrap(), decimals: 18, class_hash: DEFAULT_LEGACY_ERC20_CONTRACT_CLASS_HASH, storage: Some(HashMap::from([ @@ -393,7 +391,7 @@ mod tests { ContractAddress::from(felt!("0x2")), GenesisAllocation::Account(GenesisAccountAlloc::Account(GenesisAccount { public_key: felt!("0x2"), - balance: Some(U256::zero()), + balance: Some(U256::ZERO), class_hash: DEFAULT_OZ_ACCOUNT_CONTRACT_CLASS_HASH, nonce: None, storage: None, @@ -420,12 +418,16 @@ mod tests { universal_deployer: Some(ud.clone()), }; - // setup expected values + // setup expected storage values let name: FieldElement = cairo_short_string_to_felt(&fee_token.name).unwrap(); let symbol: FieldElement = cairo_short_string_to_felt(&fee_token.symbol).unwrap(); let decimals: FieldElement = fee_token.decimals.into(); - let (total_supply_low, total_supply_high) = split_u256(fee_token.total_supply); + + // there are only two allocations so the total token supply is + // 0xD3C21BCECCEDA1000000 * 2 = 0x1a784379d99db42000000 + let (total_supply_low, total_supply_high) = + split_u256(U256::from_str("0x1a784379d99db42000000").unwrap()); let mut fee_token_storage = HashMap::new(); fee_token_storage.insert(ERC20_NAME_STORAGE_SLOT, name); @@ -649,7 +651,7 @@ mod tests { assert_eq!(fee_token_storage.get(&felt!("0x111")), Some(&felt!("0x1"))); assert_eq!(fee_token_storage.get(&felt!("0x222")), Some(&felt!("0x2"))); - let mut actual_total_supply = U256::zero(); + let mut allocs_total_supply = U256::ZERO; // check for balance for (address, alloc) in &allocations { @@ -667,13 +669,24 @@ mod tests { assert_eq!(fee_token_storage.get(&low_bal_storage_var), Some(&low)); assert_eq!(fee_token_storage.get(&high_bal_storage_var), Some(&high)); - actual_total_supply += balance; + allocs_total_supply += balance; } } + // Check that the total supply is the sum of all balances in the allocations. + // Technically this is not necessary bcs we already checked the total supply in + // the fee token storage but it's a good sanity check. + + let (actual_total_supply_low, actual_total_supply_high) = split_u256(allocs_total_supply); assert_eq!( - actual_total_supply, fee_token.total_supply, - "total supply should match the total balances of all allocations" + fee_token_storage.get(&ERC20_TOTAL_SUPPLY_STORAGE_SLOT), + Some(&actual_total_supply_low), + "total supply must be calculated from allocations balances correctly" + ); + assert_eq!( + fee_token_storage.get(&(ERC20_TOTAL_SUPPLY_STORAGE_SLOT + 1u8.into())), + Some(&actual_total_supply_high), + "total supply must be calculated from allocations balances correctly" ); let udc_storage = diff --git a/crates/katana/primitives/src/genesis/test-genesis-with-class.json b/crates/katana/primitives/src/genesis/test-genesis-with-class.json index 5a6f151d6e..177ff094f0 100644 --- a/crates/katana/primitives/src/genesis/test-genesis-with-class.json +++ b/crates/katana/primitives/src/genesis/test-genesis-with-class.json @@ -57,11 +57,11 @@ }, "classes": [ { - "class": "../../contracts/compiled/erc20.json", + "class": "../../../contracts/compiled/erc20.json", "classHash": "0x8" }, { - "class": "../../contracts/compiled/universal_deployer.json", + "class": "../../../contracts/compiled/universal_deployer.json", "classHash": "0x80085" }, { diff --git a/crates/katana/primitives/src/genesis/test-genesis.json b/crates/katana/primitives/src/genesis/test-genesis.json index 822a664ca1..b122261e2f 100644 --- a/crates/katana/primitives/src/genesis/test-genesis.json +++ b/crates/katana/primitives/src/genesis/test-genesis.json @@ -69,15 +69,15 @@ }, "classes": [ { - "class": "../../contracts/compiled/erc20.json", + "class": "../../../contracts/compiled/erc20.json", "classHash": "0x8" }, { - "class": "../../contracts/compiled/universal_deployer.json", + "class": "../../../contracts/compiled/universal_deployer.json", "classHash": "0x80085" }, { - "class": "../../contracts/compiled/oz_account_080.json", + "class": "../../../contracts/compiled/oz_account_080.json", "classHash": "0xa55" } ] diff --git a/crates/katana/primitives/src/message.rs b/crates/katana/primitives/src/message.rs index 4772c0e048..1558fb4640 100644 --- a/crates/katana/primitives/src/message.rs +++ b/crates/katana/primitives/src/message.rs @@ -1,7 +1,7 @@ use crate::contract::ContractAddress; use crate::FieldElement; -#[derive(Debug, Clone, PartialEq, Eq)] +#[derive(Debug, Clone, PartialEq, Eq, Default)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub struct OrderedL2ToL1Message { pub order: u64, diff --git a/crates/katana/primitives/src/receipt.rs b/crates/katana/primitives/src/receipt.rs index 2223115e61..f17e6d9cc9 100644 --- a/crates/katana/primitives/src/receipt.rs +++ b/crates/katana/primitives/src/receipt.rs @@ -1,4 +1,4 @@ -use ethers::types::H256; +use alloy_primitives::B256; use crate::contract::ContractAddress; use crate::FieldElement; @@ -67,7 +67,7 @@ pub struct L1HandlerTxReceipt { /// Events emitted by contracts. pub events: Vec, /// The hash of the L1 message - pub message_hash: H256, + pub message_hash: B256, /// Messages sent to L1. pub messages_sent: Vec, /// Revert error message if the transaction execution failed. @@ -122,6 +122,7 @@ impl Receipt { } } + /// Returns the L1 messages sent. pub fn messages_sent(&self) -> &[MessageToL1] { match self { Receipt::Invoke(rct) => &rct.messages_sent, @@ -131,6 +132,7 @@ impl Receipt { } } + /// Returns the events emitted. pub fn events(&self) -> &[Event] { match self { Receipt::Invoke(rct) => &rct.events, @@ -139,6 +141,16 @@ impl Receipt { Receipt::DeployAccount(rct) => &rct.events, } } + + /// Returns the execution resources used. + pub fn resources_used(&self) -> &TxExecutionResources { + match self { + Receipt::Invoke(rct) => &rct.execution_resources, + Receipt::Declare(rct) => &rct.execution_resources, + Receipt::L1Handler(rct) => &rct.execution_resources, + Receipt::DeployAccount(rct) => &rct.execution_resources, + } + } } /// Transaction execution resources. diff --git a/crates/katana/primitives/src/trace.rs b/crates/katana/primitives/src/trace.rs index 1349e10358..b7502680e0 100644 --- a/crates/katana/primitives/src/trace.rs +++ b/crates/katana/primitives/src/trace.rs @@ -32,22 +32,27 @@ pub struct ExecutionResources { pub builtin_instance_counter: HashMap, } -#[derive(Debug, Clone, PartialEq, Eq)] +/// The call type. +#[derive(Debug, Clone, PartialEq, Eq, Default)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub enum CallType { + #[default] + /// Normal contract call. Call, + /// Library call. Delegate, } -#[derive(Debug, Clone, PartialEq, Eq)] +#[derive(Debug, Clone, PartialEq, Eq, Default)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub enum EntryPointType { + #[default] External, L1Handler, Constructor, } -#[derive(Debug, Clone, PartialEq, Eq)] +#[derive(Debug, Clone, PartialEq, Eq, Default)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub struct CallInfo { /// The contract address which the call is initiated from. @@ -55,6 +60,10 @@ pub struct CallInfo { /// The call type. pub call_type: CallType, /// The contract address. + /// + /// The contract address of the current call execution context. This would be the address of + /// the contract whose code is currently being executed, or in the case of library call, the + /// address of the contract where the library call is being initiated from. pub contract_address: ContractAddress, /// The address where the code is being executed. /// Optional, since there is no address to the code implementation in a delegate call. diff --git a/crates/katana/primitives/src/transaction.rs b/crates/katana/primitives/src/transaction.rs index 90a8d7c9be..f2f7cb54e3 100644 --- a/crates/katana/primitives/src/transaction.rs +++ b/crates/katana/primitives/src/transaction.rs @@ -1,5 +1,5 @@ +use alloy_primitives::B256; use derive_more::{AsRef, Deref, From}; -use ethers::types::H256; use starknet::core::types::{DataAvailabilityMode, ResourceBoundsMapping}; use crate::chain::ChainId; @@ -355,7 +355,7 @@ pub struct L1HandlerTx { pub chain_id: ChainId, pub paid_fee_on_l1: u128, pub version: FieldElement, - pub message_hash: H256, + pub message_hash: B256, pub calldata: Vec, pub contract_address: ContractAddress, pub entry_point_selector: FieldElement, diff --git a/crates/katana/primitives/src/utils/mod.rs b/crates/katana/primitives/src/utils/mod.rs index 1a16289a6e..9c1591434b 100644 --- a/crates/katana/primitives/src/utils/mod.rs +++ b/crates/katana/primitives/src/utils/mod.rs @@ -1,4 +1,4 @@ -use ethers::types::U256; +use alloy_primitives::U256; use crate::FieldElement; @@ -9,7 +9,25 @@ pub mod transaction; /// The first element in the returned tuple is the low part, and the second element is the high /// part. pub fn split_u256(value: U256) -> (FieldElement, FieldElement) { - let low_u128: u128 = value.low_u128(); - let high_u128: u128 = (value >> 128).low_u128(); + let low_u128: u128 = (value & U256::from(u128::MAX)).to(); + let high_u128: u128 = U256::from(value >> 128).to(); (FieldElement::from(low_u128), FieldElement::from(high_u128)) } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_split_u256() { + // Given + let value = U256::MAX; + + // When + let (low, high) = split_u256(value); + + // Then + assert_eq!(low, FieldElement::from(u128::MAX)); + assert_eq!(high, FieldElement::from(u128::MAX)); + } +} diff --git a/crates/katana/primitives/src/utils/transaction.rs b/crates/katana/primitives/src/utils/transaction.rs index 7bada6be65..d2dc99d26a 100644 --- a/crates/katana/primitives/src/utils/transaction.rs +++ b/crates/katana/primitives/src/utils/transaction.rs @@ -1,4 +1,4 @@ -use ethers::types::H256; +use alloy_primitives::B256; use starknet::core::crypto::compute_hash_on_elements; use starknet::core::types::{DataAvailabilityMode, MsgToL1, ResourceBounds}; use starknet_crypto::poseidon_hash_many; @@ -268,10 +268,44 @@ pub fn compute_l1_message_hash( from_address: FieldElement, to_address: FieldElement, payload: &[FieldElement], -) -> H256 { +) -> B256 { let msg = MsgToL1 { from_address, to_address, payload: payload.to_vec() }; - H256::from_slice(msg.hash().as_bytes()) + B256::from_slice(msg.hash().as_bytes()) +} + +fn encode_gas_bound(name: &[u8], bound: &ResourceBounds) -> FieldElement { + let mut buffer = [0u8; 32]; + let (remainder, max_price) = buffer.split_at_mut(128 / 8); + let (gas_kind, max_amount) = remainder.split_at_mut(64 / 8); + + let padding = gas_kind.len() - name.len(); + gas_kind[padding..].copy_from_slice(name); + max_amount.copy_from_slice(&bound.max_amount.to_be_bytes()); + max_price.copy_from_slice(&bound.max_price_per_unit.to_be_bytes()); + + FieldElement::from_bytes_be(&buffer).expect("Packed resource should fit into felt") +} + +fn hash_fee_fields( + tip: u64, + l1_gas_bounds: &ResourceBounds, + l2_gas_bounds: &ResourceBounds, +) -> FieldElement { + poseidon_hash_many(&[ + tip.into(), + encode_gas_bound(b"L1_GAS", l1_gas_bounds), + encode_gas_bound(b"L2_GAS", l2_gas_bounds), + ]) +} + +fn encode_da_mode( + nonce_da_mode: &DataAvailabilityMode, + fee_da_mode: &DataAvailabilityMode, +) -> FieldElement { + let nonce = (*nonce_da_mode as u64) << 32; + let fee = *fee_da_mode as u64; + FieldElement::from(nonce + fee) } #[cfg(test)] @@ -334,37 +368,3 @@ mod tests { ); } } - -fn encode_gas_bound(name: &[u8], bound: &ResourceBounds) -> FieldElement { - let mut buffer = [0u8; 32]; - let (remainder, max_price) = buffer.split_at_mut(128 / 8); - let (gas_kind, max_amount) = remainder.split_at_mut(64 / 8); - - let padding = gas_kind.len() - name.len(); - gas_kind[padding..].copy_from_slice(name); - max_amount.copy_from_slice(&bound.max_amount.to_be_bytes()); - max_price.copy_from_slice(&bound.max_price_per_unit.to_be_bytes()); - - FieldElement::from_bytes_be(&buffer).expect("Packed resource should fit into felt") -} - -fn hash_fee_fields( - tip: u64, - l1_gas_bounds: &ResourceBounds, - l2_gas_bounds: &ResourceBounds, -) -> FieldElement { - poseidon_hash_many(&[ - tip.into(), - encode_gas_bound(b"L1_GAS", l1_gas_bounds), - encode_gas_bound(b"L2_GAS", l2_gas_bounds), - ]) -} - -fn encode_da_mode( - nonce_da_mode: &DataAvailabilityMode, - fee_da_mode: &DataAvailabilityMode, -) -> FieldElement { - let nonce = (*nonce_da_mode as u64) << 32; - let fee = *fee_da_mode as u64; - FieldElement::from(nonce + fee) -} diff --git a/crates/katana/rpc/rpc-api/Cargo.toml b/crates/katana/rpc/rpc-api/Cargo.toml index f9fac4650f..f385189a56 100644 --- a/crates/katana/rpc/rpc-api/Cargo.toml +++ b/crates/katana/rpc/rpc-api/Cargo.toml @@ -7,9 +7,9 @@ version.workspace = true # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -katana-core = { path = "../../core" } -katana-primitives = { path = "../../primitives" } -katana-rpc-types = { path = "../rpc-types" } +katana-core.workspace = true +katana-primitives.workspace = true +katana-rpc-types.workspace = true jsonrpsee = { workspace = true, features = [ "macros", "server" ] } starknet.workspace = true diff --git a/crates/katana/rpc/rpc-types-builder/Cargo.toml b/crates/katana/rpc/rpc-types-builder/Cargo.toml index a29f34ea29..d53783e828 100644 --- a/crates/katana/rpc/rpc-types-builder/Cargo.toml +++ b/crates/katana/rpc/rpc-types-builder/Cargo.toml @@ -7,10 +7,10 @@ version.workspace = true # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -katana-executor = { path = "../../executor" } -katana-primitives = { path = "../../primitives" } -katana-provider = { path = "../../storage/provider" } -katana-rpc-types = { path = "../rpc-types" } +katana-executor.workspace = true +katana-primitives.workspace = true +katana-provider.workspace = true +katana-rpc-types.workspace = true anyhow.workspace = true starknet.workspace = true diff --git a/crates/katana/rpc/rpc-types/Cargo.toml b/crates/katana/rpc/rpc-types/Cargo.toml index 42d70a4c43..bc351aa0fc 100644 --- a/crates/katana/rpc/rpc-types/Cargo.toml +++ b/crates/katana/rpc/rpc-types/Cargo.toml @@ -7,20 +7,22 @@ version.workspace = true # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -katana-core = { path = "../../core" } +katana-core.workspace = true katana-executor.workspace = true -katana-primitives = { path = "../../primitives" } -katana-provider = { path = "../../storage/provider" } +katana-primitives.workspace = true +katana-provider.workspace = true anyhow.workspace = true derive_more.workspace = true -ethers = "2.0.11" futures.workspace = true -jsonrpsee = { workspace = true, features = [ "macros", "server" ] } +jsonrpsee = { workspace = true, features = [ "server" ] } serde.workspace = true +serde_json.workspace = true serde_with.workspace = true starknet.workspace = true thiserror.workspace = true +alloy-primitives.workspace = true + [dev-dependencies] -serde_json.workspace = true +rstest.workspace = true diff --git a/crates/katana/rpc/rpc-types/src/account.rs b/crates/katana/rpc/rpc-types/src/account.rs index d73eb48f3c..47fc8b225f 100644 --- a/crates/katana/rpc/rpc-types/src/account.rs +++ b/crates/katana/rpc/rpc-types/src/account.rs @@ -1,4 +1,4 @@ -use ethers::types::U256; +use alloy_primitives::U256; use katana_primitives::class::ClassHash; use katana_primitives::contract::ContractAddress; use katana_primitives::genesis::allocation::GenesisAccountAlloc; diff --git a/crates/katana/rpc/rpc-types/src/error/starknet.rs b/crates/katana/rpc/rpc-types/src/error/starknet.rs index c369bcdc86..a4aacae381 100644 --- a/crates/katana/rpc/rpc-types/src/error/starknet.rs +++ b/crates/katana/rpc/rpc-types/src/error/starknet.rs @@ -3,10 +3,11 @@ use jsonrpsee::types::error::CallError; use jsonrpsee::types::ErrorObject; use katana_core::sequencer_error::SequencerError; use katana_provider::error::ProviderError; -use starknet::core::types::ContractErrorData; +use serde::Serialize; /// Possible list of errors that can be returned by the Starknet API according to the spec: . -#[derive(Debug, thiserror::Error, Clone)] +#[derive(Debug, thiserror::Error, Clone, Serialize)] +#[serde(untagged)] #[repr(i32)] pub enum StarknetApiError { #[error("Failed to write transaction")] @@ -111,37 +112,29 @@ impl StarknetApiError { StarknetApiError::ProofLimitExceeded => 10000, } } -} -#[derive(serde::Serialize, serde::Deserialize)] -struct UnexpectedError { - reason: String, -} + pub fn message(&self) -> String { + self.to_string() + } -impl From for StarknetApiError { - fn from(value: ProviderError) -> Self { - StarknetApiError::UnexpectedError { reason: value.to_string() } + pub fn data(&self) -> Option { + match self { + StarknetApiError::ContractError { .. } + | StarknetApiError::UnexpectedError { .. } + | StarknetApiError::TransactionExecutionError { .. } => Some(serde_json::json!(self)), + _ => None, + } } } impl From for Error { fn from(err: StarknetApiError) -> Self { - let code = err.code(); - let message = err.to_string(); - - let err = match err { - StarknetApiError::ContractError { revert_error } => { - ErrorObject::owned(code, message, Some(ContractErrorData { revert_error })) - } - - StarknetApiError::UnexpectedError { reason } => { - ErrorObject::owned(code, message, Some(UnexpectedError { reason })) - } - - _ => ErrorObject::owned(code, message, None::<()>), - }; - - Error::Call(CallError::Custom(err)) + Error::Call(CallError::Custom(ErrorObject::owned(err.code(), err.message(), err.data()))) + } +} +impl From for StarknetApiError { + fn from(value: ProviderError) -> Self { + StarknetApiError::UnexpectedError { reason: value.to_string() } } } @@ -154,3 +147,110 @@ impl From for StarknetApiError { } } } + +#[cfg(test)] +mod tests { + use rstest::rstest; + use serde_json::json; + + use super::*; + + #[rustfmt::skip] + #[rstest] + #[case(StarknetApiError::NoBlocks, 32, "There are no blocks")] + #[case(StarknetApiError::BlockNotFound, 24, "Block not found")] + #[case(StarknetApiError::InvalidCallData, 22, "Invalid call data")] + #[case(StarknetApiError::ContractNotFound, 20, "Contract not found")] + #[case(StarknetApiError::CompilationFailed, 56, "Compilation failed")] + #[case(StarknetApiError::ClassHashNotFound, 28, "Class hash not found")] + #[case(StarknetApiError::TxnHashNotFound, 29, "Transaction hash not found")] + #[case(StarknetApiError::ValidationFailure, 55, "Account validation failed")] + #[case(StarknetApiError::ClassAlreadyDeclared, 51, "Class already declared")] + #[case(StarknetApiError::InvalidContractClass, 50, "Invalid contract class")] + #[case(StarknetApiError::PageSizeTooBig, 31, "Requested page size is too big")] + #[case(StarknetApiError::FailedToReceiveTxn, 1, "Failed to write transaction")] + #[case(StarknetApiError::InvalidMessageSelector, 21, "Invalid message selector")] + #[case(StarknetApiError::InvalidTransactionNonce, 52, "Invalid transaction nonce")] + #[case(StarknetApiError::NonAccount, 58, "Sender address in not an account contract")] + #[case(StarknetApiError::InvalidTxnIndex, 27, "Invalid transaction index in a block")] + #[case(StarknetApiError::ProofLimitExceeded, 10000, "Too many storage keys requested")] + #[case(StarknetApiError::TooManyKeysInFilter, 34, "Too many keys provided in a filter")] + #[case(StarknetApiError::ContractClassSizeIsTooLarge, 57, "Contract class size is too large")] + #[case(StarknetApiError::FailedToFetchPendingTransactions, 38, "Failed to fetch pending transactions")] + #[case(StarknetApiError::UnsupportedTransactionVersion, 61, "The transaction version is not supported")] + #[case(StarknetApiError::UnsupportedContractClassVersion, 62, "The contract class version is not supported")] + #[case(StarknetApiError::InvalidContinuationToken, 33, "The supplied continuation token is invalid or unknown")] + #[case(StarknetApiError::DuplicateTransaction, 59, "A transaction with the same hash already exists in the mempool")] + #[case(StarknetApiError::InsufficientAccountBalance, 54, "Account balance is smaller than the transaction's max_fee")] + #[case(StarknetApiError::CompiledClassHashMismatch, 60, "The compiled class hash did not match the one supplied in the transaction")] + #[case(StarknetApiError::InsufficientMaxFee, 53, "Max fee is smaller than the minimal transaction cost (validation plus fee transfer)")] + fn test_starknet_api_error_to_error_conversion_data_none( + #[case] starknet_error: StarknetApiError, + #[case] expected_code: i32, + #[case] expected_message: &str, + ) { + let error: Error = starknet_error.into(); + match error { + Error::Call(CallError::Custom(err)) => { + assert_eq!(err.code(), expected_code); + assert_eq!(err.message(), expected_message); + assert!(err.data().is_none(), "data should be None"); + } + _ => panic!("Unexpected error variant"), + } + } + + #[rstest] + #[case( + StarknetApiError::ContractError { + revert_error: "Contract error message".to_string(), + }, + 40, + "Contract error", + json!({ + "revert_error": "Contract error message".to_string() + }), + )] + #[case( + StarknetApiError::TransactionExecutionError { + transaction_index: 1, + execution_error: "Transaction execution error message".to_string(), + }, + 41, + "Transaction execution error", + json!({ + "transaction_index": 1, + "execution_error": "Transaction execution error message".to_string() + }), + )] + #[case( + StarknetApiError::UnexpectedError { + reason: "Unexpected error reason".to_string(), + }, + 63, + "An unexpected error occured", + json!({ + "reason": "Unexpected error reason".to_string() + }), + )] + fn test_starknet_api_error_to_error_conversion_data_some( + #[case] starknet_error: StarknetApiError, + #[case] expected_code: i32, + #[case] expected_message: &str, + #[case] expected_data: serde_json::Value, + ) { + let error: Error = starknet_error.into(); + match error { + Error::Call(CallError::Custom(err)) => { + assert_eq!(err.code(), expected_code); + assert_eq!(err.message(), expected_message); + assert_eq!( + err.data().unwrap().to_string(), + expected_data.to_string(), + "data should exist" + ); + } + _ => panic!("Unexpected error variant"), + } + } +} diff --git a/crates/katana/rpc/rpc-types/src/receipt.rs b/crates/katana/rpc/rpc-types/src/receipt.rs index 42112be1c3..a6ff5692e6 100644 --- a/crates/katana/rpc/rpc-types/src/receipt.rs +++ b/crates/katana/rpc/rpc-types/src/receipt.rs @@ -86,7 +86,7 @@ impl TxReceipt { transaction_hash, actual_fee: FeePayment { amount: rct.actual_fee.into(), unit: PriceUnit::Wei }, execution_resources: ExecutionResources::from(rct.execution_resources).0, - message_hash: Hash256::from_bytes(rct.message_hash.to_fixed_bytes()), + message_hash: Hash256::from_bytes(*rct.message_hash), execution_result: if let Some(reason) = rct.revert_error { ExecutionResult::Reverted { reason } } else { diff --git a/crates/katana/rpc/rpc/Cargo.toml b/crates/katana/rpc/rpc/Cargo.toml index 2feade480e..1cf0531841 100644 --- a/crates/katana/rpc/rpc/Cargo.toml +++ b/crates/katana/rpc/rpc/Cargo.toml @@ -7,22 +7,23 @@ repository.workspace = true version.workspace = true [dependencies] -katana-core = { path = "../../core" } -katana-executor = { path = "../../executor" } -katana-primitives = { path = "../../primitives" } -katana-provider = { path = "../../storage/provider" } -katana-rpc-api = { path = "../rpc-api" } -katana-rpc-types = { path = "../rpc-types" } -katana-rpc-types-builder = { path = "../rpc-types-builder" } -katana-tasks = { path = "../../tasks" } +dojo-metrics.workspace = true +katana-core.workspace = true +katana-executor.workspace = true +katana-primitives.workspace = true +katana-provider.workspace = true +katana-rpc-api.workspace = true +katana-rpc-types-builder.workspace = true +katana-rpc-types.workspace = true +katana-tasks.workspace = true anyhow.workspace = true -cairo-lang-starknet = "2.3.1" flate2.workspace = true futures.workspace = true hex = { version = "0.4.3", default-features = false } hyper = "0.14.20" -jsonrpsee = { version = "0.16.2", features = [ "macros", "server" ] } +jsonrpsee = { workspace = true, features = [ "server" ] } +metrics.workspace = true serde.workspace = true serde_json.workspace = true serde_with.workspace = true @@ -36,7 +37,8 @@ tracing.workspace = true [dev-dependencies] assert_matches = "1.5.0" -dojo-test-utils = { path = "../../../dojo-test-utils" } -jsonrpsee = { version = "0.16.2", features = [ "client" ] } +cairo-lang-starknet.workspace = true +dojo-test-utils.workspace = true +jsonrpsee = { workspace = true, features = [ "client" ] } katana-rpc-api = { workspace = true, features = [ "client" ] } url.workspace = true diff --git a/crates/katana/rpc/rpc/src/lib.rs b/crates/katana/rpc/rpc/src/lib.rs index d5974b69d8..22464622ce 100644 --- a/crates/katana/rpc/rpc/src/lib.rs +++ b/crates/katana/rpc/rpc/src/lib.rs @@ -1,22 +1,22 @@ +#![allow(clippy::blocks_in_conditions)] + pub mod config; pub mod dev; pub mod katana; +pub mod metrics; pub mod saya; pub mod starknet; pub mod torii; use std::net::SocketAddr; use std::sync::Arc; -use std::time::{Duration, Instant}; +use std::time::Duration; use anyhow::Result; use config::ServerConfig; use hyper::Method; -use jsonrpsee::server::logger::{Logger, MethodKind, TransportProtocol}; use jsonrpsee::server::middleware::proxy_get_request::ProxyGetRequestLayer; use jsonrpsee::server::{AllowHosts, ServerBuilder, ServerHandle}; -use jsonrpsee::tracing::debug; -use jsonrpsee::types::Params; use jsonrpsee::RpcModule; use katana_core::sequencer::KatanaSequencer; use katana_executor::ExecutorFactory; @@ -26,6 +26,7 @@ use katana_rpc_api::saya::SayaApiServer; use katana_rpc_api::starknet::StarknetApiServer; use katana_rpc_api::torii::ToriiApiServer; use katana_rpc_api::ApiKind; +use metrics::RpcServerMetrics; use tower_http::cors::{Any, CorsLayer}; use crate::dev::DevApi; @@ -74,7 +75,7 @@ pub async fn spawn( .timeout(Duration::from_secs(20)); let server = ServerBuilder::new() - .set_logger(RpcLogger) + .set_logger(RpcServerMetrics::new(&methods)) .set_host_filtering(AllowHosts::Any) .set_middleware(middleware) .max_connections(config.max_connections) @@ -93,50 +94,3 @@ pub struct NodeHandle { pub config: ServerConfig, pub handle: ServerHandle, } - -#[derive(Debug, Clone)] -pub struct RpcLogger; - -impl Logger for RpcLogger { - type Instant = std::time::Instant; - - fn on_connect( - &self, - _remote_addr: std::net::SocketAddr, - _request: &jsonrpsee::server::logger::HttpRequest, - _t: TransportProtocol, - ) { - } - - fn on_request(&self, _transport: TransportProtocol) -> Self::Instant { - Instant::now() - } - - fn on_call( - &self, - method_name: &str, - _params: Params<'_>, - _kind: MethodKind, - _transport: TransportProtocol, - ) { - debug!(target: "server", method = ?method_name); - } - - fn on_result( - &self, - _method_name: &str, - _success: bool, - _started_at: Self::Instant, - _transport: TransportProtocol, - ) { - } - - fn on_response( - &self, - _result: &str, - _started_at: Self::Instant, - _transport: TransportProtocol, - ) { - } - fn on_disconnect(&self, _remote_addr: std::net::SocketAddr, _transport: TransportProtocol) {} -} diff --git a/crates/katana/rpc/rpc/src/metrics.rs b/crates/katana/rpc/rpc/src/metrics.rs new file mode 100644 index 0000000000..501cc2fce3 --- /dev/null +++ b/crates/katana/rpc/rpc/src/metrics.rs @@ -0,0 +1,173 @@ +//! This module is responsible for managing and collecting metrics related to the RPC +//! server. The metrics collected are primarily focused on connections and method calls. +//! +//! ## Connections +//! +//! Metrics related to connections: +//! +//! - Number of connections opened +//! - Number of connections closed +//! - Number of requests started +//! - Number of requests finished +//! - Response time for each request/response pair +//! +//! ## Method Calls +//! +//! Metrics are collected for each methods expose by the RPC server. The metrics collected include: +//! +//! - Number of calls started for each method +//! - Number of successful calls for each method +//! - Number of failed calls for each method +//! - Response time for each method call + +use std::collections::HashMap; +use std::net::SocketAddr; +use std::sync::Arc; +use std::time::Instant; + +use dojo_metrics::metrics::{Counter, Histogram}; +use dojo_metrics::Metrics; +use jsonrpsee::server::logger::{HttpRequest, Logger, MethodKind, Params, TransportProtocol}; +use jsonrpsee::RpcModule; +use tracing::debug; + +/// Metrics for the RPC server. +#[derive(Default, Clone)] +pub(crate) struct RpcServerMetrics { + inner: Arc, +} + +impl RpcServerMetrics { + /// Creates a new instance of `RpcServerMetrics` for the given `RpcModule`. + /// This will create metrics for each method in the module. + pub(crate) fn new(module: &RpcModule<()>) -> Self { + let call_metrics = HashMap::from_iter(module.method_names().map(|method| { + let metrics = RpcServerCallMetrics::new_with_labels(&[("method", method)]); + (method, metrics) + })); + + Self { + inner: Arc::new(RpcServerMetricsInner { + call_metrics, + connection_metrics: ConnectionMetrics::default(), + }), + } + } +} + +#[derive(Default, Clone)] +struct RpcServerMetricsInner { + /// Connection metrics per transport type + connection_metrics: ConnectionMetrics, + /// Call metrics per RPC method + call_metrics: HashMap<&'static str, RpcServerCallMetrics>, +} + +#[derive(Clone)] +struct ConnectionMetrics { + /// Metrics for WebSocket connections + ws: RpcServerConnectionMetrics, + /// Metrics for HTTP connections + http: RpcServerConnectionMetrics, +} + +impl ConnectionMetrics { + /// Returns the metrics for the given transport protocol + fn get_metrics(&self, transport: TransportProtocol) -> &RpcServerConnectionMetrics { + match transport { + TransportProtocol::Http => &self.http, + TransportProtocol::WebSocket => &self.ws, + } + } +} + +impl Default for ConnectionMetrics { + fn default() -> Self { + Self { + ws: RpcServerConnectionMetrics::new_with_labels(&[("transport", "ws")]), + http: RpcServerConnectionMetrics::new_with_labels(&[("transport", "http")]), + } + } +} + +/// Metrics for the RPC connections +#[derive(Metrics, Clone)] +#[metrics(scope = "rpc_server.connections")] +struct RpcServerConnectionMetrics { + /// The number of connections opened + connections_opened: Counter, + /// The number of connections closed + connections_closed: Counter, + /// The number of requests started + requests_started: Counter, + /// The number of requests finished + requests_finished: Counter, + /// Response for a single request/response pair + request_time_seconds: Histogram, +} + +/// Metrics for the RPC calls +#[derive(Metrics, Clone)] +#[metrics(scope = "rpc_server.calls")] +struct RpcServerCallMetrics { + /// The number of calls started + started: Counter, + /// The number of successful calls + successful: Counter, + /// The number of failed calls + failed: Counter, + /// Response for a single call + time_seconds: Histogram, +} + +/// Implements the [Logger] trait so that we can collect metrics on each server request life-cycle. +impl Logger for RpcServerMetrics { + type Instant = Instant; + + fn on_connect(&self, _: SocketAddr, _: &HttpRequest, transport: TransportProtocol) { + self.inner.connection_metrics.get_metrics(transport).connections_opened.increment(1) + } + + fn on_request(&self, transport: TransportProtocol) -> Self::Instant { + self.inner.connection_metrics.get_metrics(transport).requests_started.increment(1); + Instant::now() + } + + fn on_call(&self, method_name: &str, _: Params<'_>, _: MethodKind, _: TransportProtocol) { + debug!(target: "server", method = ?method_name); + let Some(call_metrics) = self.inner.call_metrics.get(method_name) else { return }; + call_metrics.started.increment(1); + } + + fn on_result( + &self, + method_name: &str, + success: bool, + started_at: Self::Instant, + _: TransportProtocol, + ) { + let Some(call_metrics) = self.inner.call_metrics.get(method_name) else { return }; + + // capture call latency + let time_taken = started_at.elapsed().as_secs_f64(); + call_metrics.time_seconds.record(time_taken); + + if success { + call_metrics.successful.increment(1); + } else { + call_metrics.failed.increment(1); + } + } + + fn on_response(&self, _: &str, started_at: Self::Instant, transport: TransportProtocol) { + let metrics = self.inner.connection_metrics.get_metrics(transport); + // capture request latency for this request/response pair + let time_taken = started_at.elapsed().as_secs_f64(); + metrics.request_time_seconds.record(time_taken); + metrics.requests_finished.increment(1); + } + + fn on_disconnect(&self, _: SocketAddr, transport: TransportProtocol) { + self.inner.connection_metrics.get_metrics(transport).connections_closed.increment(1) + } +} diff --git a/crates/katana/rpc/rpc/tests/saya.rs b/crates/katana/rpc/rpc/tests/saya.rs index dd1135c4b2..e706177157 100644 --- a/crates/katana/rpc/rpc/tests/saya.rs +++ b/crates/katana/rpc/rpc/tests/saya.rs @@ -23,7 +23,7 @@ mod common; async fn no_pending_support() { // Saya does not support the pending block and only work on sealed blocks. let sequencer = TestSequencer::start( - SequencerConfig { block_time: None, no_mining: true, ..Default::default() }, + SequencerConfig { no_mining: true, ..Default::default() }, get_default_test_starknet_config(), ) .await; @@ -47,7 +47,7 @@ async fn no_pending_support() { async fn process_sealed_block_only() { // Saya does not support the pending block and only work on sealed blocks. let sequencer = TestSequencer::start( - SequencerConfig { block_time: None, no_mining: true, ..Default::default() }, + SequencerConfig { no_mining: true, ..Default::default() }, get_default_test_starknet_config(), ) .await; @@ -68,39 +68,57 @@ async fn process_sealed_block_only() { client.get_transactions_executions(cursor).await.unwrap(); assert!(response.transactions_executions.is_empty()); - assert!(response.cursor.block_number == 1); - assert!(response.cursor.transaction_index == 0); - assert!(response.cursor.chunk_size == CHUNK_SIZE_DEFAULT); + assert_eq!(response.cursor.block_number, 1); + assert_eq!(response.cursor.transaction_index, 0); + assert_eq!(response.cursor.chunk_size, CHUNK_SIZE_DEFAULT); - let _declare_res = account.declare(contract.clone(), compiled_class_hash).send().await.unwrap(); + let declare_res = account.declare(contract.clone(), compiled_class_hash).send().await.unwrap(); + + let max_retry = 10; + let mut attempt = 0; + loop { + match client.transaction_status(declare_res.transaction_hash).await { + Ok(s) => { + if s != TransactionStatus::Received { + break; + } + } + Err(_) => { + assert!(attempt < max_retry); + sleep(Duration::from_millis(300)).await; + attempt += 1; + } + } + } // Should still return 0 transactions execution for the block 0. let response: TransactionsExecutionsPage = client.get_transactions_executions(cursor).await.unwrap(); assert!(response.transactions_executions.is_empty()); - assert!(response.cursor.block_number == 1); - assert!(response.cursor.transaction_index == 0); - assert!(response.cursor.chunk_size == CHUNK_SIZE_DEFAULT); + assert_eq!(response.cursor.block_number, 1); + assert_eq!(response.cursor.transaction_index, 0); + assert_eq!(response.cursor.chunk_size, CHUNK_SIZE_DEFAULT); // Create block 1. let _: () = client.generate_block().await.unwrap(); // Should now return 1 transaction from the mined block. cursor.block_number = 1; + let response: TransactionsExecutionsPage = client.get_transactions_executions(cursor).await.unwrap(); - assert!(response.transactions_executions.len() == 1); - assert!(response.cursor.block_number == 2); - assert!(response.cursor.transaction_index == 0); - assert!(response.cursor.chunk_size == CHUNK_SIZE_DEFAULT); + assert_eq!(response.transactions_executions.len(), 1); + assert_eq!(response.cursor.block_number, 2); + assert_eq!(response.cursor.transaction_index, 0); + assert_eq!(response.cursor.chunk_size, CHUNK_SIZE_DEFAULT); } #[tokio::test(flavor = "multi_thread")] async fn executions_chunks_logic_ok() { let sequencer = TestSequencer::start( - SequencerConfig { block_time: None, no_mining: true, ..Default::default() }, + SequencerConfig { no_mining: true, ..Default::default() }, get_default_test_starknet_config(), ) .await; @@ -160,17 +178,17 @@ async fn executions_chunks_logic_ok() { let response: TransactionsExecutionsPage = client.get_transactions_executions(cursor).await.unwrap(); - assert!(response.transactions_executions.len() == 15); - assert!(response.cursor.block_number == 1); - assert!(response.cursor.transaction_index == 15); + assert_eq!(response.transactions_executions.len(), 15); + assert_eq!(response.cursor.block_number, 1); + assert_eq!(response.cursor.transaction_index, 15); // Should get the remaining 15 transactions and cursor to the next block. let response: TransactionsExecutionsPage = client.get_transactions_executions(response.cursor).await.unwrap(); - assert!(response.transactions_executions.len() == 15); - assert!(response.cursor.block_number == 2); - assert!(response.cursor.transaction_index == 0); + assert_eq!(response.transactions_executions.len(), 15); + assert_eq!(response.cursor.block_number, 2); + assert_eq!(response.cursor.transaction_index, 0); // Create block 2. let _: () = client.generate_block().await.unwrap(); @@ -179,8 +197,8 @@ async fn executions_chunks_logic_ok() { client.get_transactions_executions(response.cursor).await.unwrap(); assert!(response.transactions_executions.is_empty()); - assert!(response.cursor.block_number == 3); - assert!(response.cursor.transaction_index == 0); + assert_eq!(response.cursor.block_number, 3); + assert_eq!(response.cursor.transaction_index, 0); sequencer.stop().expect("failed to stop sequencer"); } diff --git a/crates/katana/rpc/rpc/tests/torii.rs b/crates/katana/rpc/rpc/tests/torii.rs index c0e801a130..5a897ed8aa 100644 --- a/crates/katana/rpc/rpc/tests/torii.rs +++ b/crates/katana/rpc/rpc/tests/torii.rs @@ -6,10 +6,11 @@ use dojo_test_utils::sequencer::{get_default_test_starknet_config, TestSequencer use jsonrpsee::http_client::HttpClientBuilder; use katana_core::sequencer::SequencerConfig; use katana_rpc_api::dev::DevApiClient; +use katana_rpc_api::starknet::StarknetApiClient; use katana_rpc_api::torii::ToriiApiClient; use katana_rpc_types::transaction::{TransactionsPage, TransactionsPageCursor}; use starknet::accounts::{Account, Call}; -use starknet::core::types::FieldElement; +use starknet::core::types::{FieldElement, TransactionStatus}; use starknet::core::utils::get_selector_from_name; use tokio::time::sleep; @@ -22,7 +23,7 @@ pub const ENOUGH_GAS: &str = "0x100000000000000000"; #[tokio::test(flavor = "multi_thread")] async fn test_get_transactions() { let sequencer = TestSequencer::start( - SequencerConfig { block_time: None, no_mining: true, ..Default::default() }, + SequencerConfig { no_mining: true, ..Default::default() }, get_default_test_starknet_config(), ) .await; @@ -112,17 +113,39 @@ async fn test_get_transactions() { let max_fee = FieldElement::from_hex_be(ENOUGH_GAS).unwrap(); let mut nonce = FieldElement::THREE; + let mut last_tx_hash = FieldElement::ZERO; + // Test only returns first 100 txns from pending block for i in 0..101 { let deploy_call = build_deploy_contract_call(declare_res.class_hash, (i + 2_u32).into()); let deploy_txn = account.execute(vec![deploy_call]).nonce(nonce).max_fee(max_fee); - deploy_txn.send().await.unwrap(); + let res = deploy_txn.send().await.unwrap(); nonce += FieldElement::ONE; + + if i == 100 { + last_tx_hash = res.transaction_hash; + } } - // Wait until all pending txs have been mined. - // @kairy is there a more deterministic approach here? - sleep(Duration::from_millis(5000)).await; + assert!(last_tx_hash != FieldElement::ZERO); + + // Poll the statux of the last tx sent. + let max_retry = 10; + let mut attempt = 0; + loop { + match client.transaction_status(last_tx_hash).await { + Ok(s) => { + if s != TransactionStatus::Received { + break; + } + } + Err(_) => { + assert!(attempt < max_retry); + sleep(Duration::from_millis(300)).await; + attempt += 1; + } + } + } let start_cursor = response.cursor; let response: TransactionsPage = client.get_transactions(start_cursor).await.unwrap(); @@ -156,7 +179,7 @@ async fn test_get_transactions() { #[tokio::test(flavor = "multi_thread")] async fn test_get_transactions_with_instant_mining() { let sequencer = TestSequencer::start( - SequencerConfig { block_time: None, no_mining: false, ..Default::default() }, + SequencerConfig { no_mining: false, ..Default::default() }, get_default_test_starknet_config(), ) .await; @@ -180,7 +203,7 @@ async fn test_get_transactions_with_instant_mining() { let response: TransactionsPage = client.get_transactions(cursor).await.unwrap(); assert_eq!(response.transactions.len(), 1); - assert_eq!(response.cursor.block_number, 2); + assert_eq!(response.cursor.block_number, 1); assert_eq!(response.cursor.transaction_index, 0); // Should block on cursor at end of page and return on new txn @@ -193,7 +216,7 @@ async fn test_get_transactions_with_instant_mining() { result = long_poll_future => { let long_poll_result = result.unwrap(); assert_eq!(long_poll_result.transactions.len(), 1); - assert_eq!(long_poll_result.cursor.block_number, 3); + assert_eq!(long_poll_result.cursor.block_number, 2); assert_eq!(long_poll_result.cursor.transaction_index, 0); } result = deploy_txn_future => { @@ -210,22 +233,10 @@ async fn test_get_transactions_with_instant_mining() { // Should properly increment to new pending block - // let response: TransactionsPage = client - // .get_transactions(TransactionsPageCursor { - // block_number: 3, - // transaction_index: 0, - // chunk_size: 100, - // }) - // .await - // .unwrap(); - - // assert_eq!(response.transactions.len(), 1); - // assert_eq!( - // response.transactions[0].0 .0.transaction_hash().clone(), - // deploy_txn_future.transaction_hash - // ); - // assert_eq!(response.cursor.block_number, 3); - // assert_eq!(response.cursor.transaction_index, 1); + assert_eq!(response.transactions.len(), 1); + assert_eq!(response.transactions[0].0.hash, deploy_txn_future.transaction_hash); + assert_eq!(response.cursor.block_number, 3); + assert_eq!(response.cursor.transaction_index, 1); sequencer.stop().expect("failed to stop sequencer"); } diff --git a/crates/katana/runner/runner-macro/Cargo.toml b/crates/katana/runner/runner-macro/Cargo.toml index 2dd6cd44f7..bfeeb542dc 100644 --- a/crates/katana/runner/runner-macro/Cargo.toml +++ b/crates/katana/runner/runner-macro/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "runner-macro" -version = "0.6.0-alpha.8" +version = "0.6.1-alpha.4" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html diff --git a/crates/katana/runner/src/prefunded.rs b/crates/katana/runner/src/prefunded.rs index 8050a82105..0a04515897 100644 --- a/crates/katana/runner/src/prefunded.rs +++ b/crates/katana/runner/src/prefunded.rs @@ -3,6 +3,7 @@ use katana_primitives::chain::ChainId; use katana_primitives::contract::ContractAddress; use katana_primitives::genesis::allocation::DevGenesisAccount; use starknet::accounts::{ExecutionEncoding, SingleOwnerAccount}; +use starknet::core::types::{BlockId, BlockTag}; use starknet::providers::jsonrpc::HttpTransport; use starknet::providers::JsonRpcClient; use starknet::signers::{LocalWallet, SigningKey}; @@ -36,12 +37,16 @@ impl KatanaRunner { debug_assert_eq!(Environment::default().chain_id, ChainId::parse("KATANA").unwrap()); let provider = self.owned_provider(); - SingleOwnerAccount::new( + let mut account = SingleOwnerAccount::new( provider, signer, account.0.into(), chain_id.into(), ExecutionEncoding::New, - ) + ); + + account.set_block_id(BlockId::Tag(BlockTag::Pending)); + + account } } diff --git a/crates/katana/runner/src/utils.rs b/crates/katana/runner/src/utils.rs index 169f43b1bf..d6072aeb51 100644 --- a/crates/katana/runner/src/utils.rs +++ b/crates/katana/runner/src/utils.rs @@ -23,7 +23,7 @@ pub fn wait_for_server_started_and_signal(path: &Path, stdout: ChildStdout, send let line = line.expect("failed to read line from subprocess stdout"); writeln!(log_writer, "{}", line).expect("failed to write to log file"); - if line.contains(r#""target":"katana""#) { + if line.contains(r#""target":"katana::cli""#) { sender.send(()).expect("failed to send start signal"); } } diff --git a/crates/katana/storage/codecs/Cargo.toml b/crates/katana/storage/codecs/Cargo.toml index 7af7355661..d64b0c3ea2 100644 --- a/crates/katana/storage/codecs/Cargo.toml +++ b/crates/katana/storage/codecs/Cargo.toml @@ -7,4 +7,4 @@ version.workspace = true [dependencies] bytes = "1.5.0" -katana-primitives = { path = "../../primitives" } +katana-primitives.workspace = true diff --git a/crates/katana/storage/db/Cargo.toml b/crates/katana/storage/db/Cargo.toml index 68c9ac316c..1c0efbe423 100644 --- a/crates/katana/storage/db/Cargo.toml +++ b/crates/katana/storage/db/Cargo.toml @@ -7,7 +7,7 @@ version.workspace = true # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -katana-primitives = { path = "../../primitives" } +katana-primitives = { workspace = true } anyhow.workspace = true page_size = "0.6.0" @@ -18,6 +18,7 @@ tempfile = { version = "3.8.1", optional = true } thiserror.workspace = true cairo-vm.workspace = true +roaring = { version = "0.10.3", features = [ "serde" ] } starknet_api.workspace = true # codecs diff --git a/crates/katana/storage/db/src/codecs/postcard.rs b/crates/katana/storage/db/src/codecs/postcard.rs index 074bbec480..83f46f8d13 100644 --- a/crates/katana/storage/db/src/codecs/postcard.rs +++ b/crates/katana/storage/db/src/codecs/postcard.rs @@ -1,4 +1,4 @@ -use katana_primitives::block::{BlockNumber, Header}; +use katana_primitives::block::Header; use katana_primitives::contract::{ContractAddress, GenericContractInfo}; use katana_primitives::receipt::Receipt; use katana_primitives::trace::TxExecInfo; @@ -10,6 +10,7 @@ use super::{Compress, Decompress}; use crate::error::CodecError; use crate::models::block::StoredBlockBodyIndices; use crate::models::contract::ContractInfoChangeList; +use crate::models::list::BlockList; macro_rules! impl_compress_and_decompress_for_table_values { ($($name:ty),*) => { @@ -38,7 +39,7 @@ impl_compress_and_decompress_for_table_values!( Receipt, FieldElement, ContractAddress, - Vec, + BlockList, GenericContractInfo, StoredBlockBodyIndices, ContractInfoChangeList diff --git a/crates/katana/storage/db/src/models/block.rs b/crates/katana/storage/db/src/models/block.rs index be577651f3..7734b7f517 100644 --- a/crates/katana/storage/db/src/models/block.rs +++ b/crates/katana/storage/db/src/models/block.rs @@ -3,7 +3,7 @@ use std::ops::Range; use katana_primitives::transaction::TxNumber; use serde::{Deserialize, Serialize}; -#[derive(Debug, Clone, Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize, Default, PartialEq, Eq)] pub struct StoredBlockBodyIndices { /// The offset in database of the first transaction in the block. /// diff --git a/crates/katana/storage/db/src/models/contract.rs b/crates/katana/storage/db/src/models/contract.rs index 73f5025d0c..241e29dd59 100644 --- a/crates/katana/storage/db/src/models/contract.rs +++ b/crates/katana/storage/db/src/models/contract.rs @@ -1,19 +1,17 @@ -use katana_primitives::block::BlockNumber; use katana_primitives::class::ClassHash; use katana_primitives::contract::{ContractAddress, Nonce}; use serde::{Deserialize, Serialize}; +use super::list::BlockList; use crate::codecs::{Compress, Decode, Decompress, Encode}; -pub type BlockList = Vec; - -#[derive(Debug, Default, Serialize, Deserialize)] +#[derive(Debug, Default, Serialize, Deserialize, PartialEq)] pub struct ContractInfoChangeList { pub class_change_list: BlockList, pub nonce_change_list: BlockList, } -#[derive(Debug)] +#[derive(Debug, Default, PartialEq, Eq)] pub struct ContractClassChange { pub contract_address: ContractAddress, /// The updated class hash of `contract_address`. @@ -39,7 +37,7 @@ impl Decompress for ContractClassChange { } } -#[derive(Debug)] +#[derive(Debug, Default, PartialEq, Eq)] pub struct ContractNonceChange { pub contract_address: ContractAddress, /// The updated nonce value of `contract_address`. diff --git a/crates/katana/storage/db/src/models/list.rs b/crates/katana/storage/db/src/models/list.rs new file mode 100644 index 0000000000..4623341732 --- /dev/null +++ b/crates/katana/storage/db/src/models/list.rs @@ -0,0 +1,47 @@ +use roaring::RoaringTreemap; +use serde::{Deserialize, Serialize}; + +/// Stores a list of block numbers. +/// Mainly used for changeset tables to store the list of block numbers where a change occurred. +pub type BlockList = IntegerSet; + +/// A set for storing integer values. +/// +/// The list is stored in a Roaring bitmap data structure as it uses less space compared to a normal +/// bitmap or even a naive array with similar cardinality. +/// +/// See . +#[derive(Debug, Default, Serialize, Deserialize, PartialEq)] +pub struct IntegerSet(RoaringTreemap); + +impl IntegerSet { + pub fn new() -> Self { + Self(RoaringTreemap::new()) + } + + /// Insert a new number to the set. + pub fn insert(&mut self, num: u64) { + self.0.insert(num); + } + + /// Checks if the set contains the given number. + pub fn contains(&self, num: u64) -> bool { + self.0.contains(num) + } + + /// Returns the number of elements in the set that are smaller or equal to the given `value`. + pub fn rank(&self, value: u64) -> u64 { + self.0.rank(value) + } + + /// Returns the `n`th integer in the set or `None` if `n >= len()`. + pub fn select(&self, n: u64) -> Option { + self.0.select(n) + } +} + +impl From<[u64; N]> for IntegerSet { + fn from(arr: [u64; N]) -> Self { + Self(RoaringTreemap::from_iter(arr)) + } +} diff --git a/crates/katana/storage/db/src/models/mod.rs b/crates/katana/storage/db/src/models/mod.rs index 66150ed28b..4279b48986 100644 --- a/crates/katana/storage/db/src/models/mod.rs +++ b/crates/katana/storage/db/src/models/mod.rs @@ -1,4 +1,5 @@ pub mod block; pub mod class; pub mod contract; +pub mod list; pub mod storage; diff --git a/crates/katana/storage/db/src/models/storage.rs b/crates/katana/storage/db/src/models/storage.rs index 6b1c3da54d..5412b22b1f 100644 --- a/crates/katana/storage/db/src/models/storage.rs +++ b/crates/katana/storage/db/src/models/storage.rs @@ -1,4 +1,3 @@ -use katana_primitives::block::BlockNumber; use katana_primitives::contract::{ContractAddress, StorageKey, StorageValue}; use crate::codecs::{Compress, Decode, Decompress, Encode}; @@ -35,31 +34,6 @@ impl Decompress for StorageEntry { } } -#[derive(Debug)] -pub struct StorageEntryChangeList { - pub key: StorageKey, - pub block_list: Vec, -} - -impl Compress for StorageEntryChangeList { - type Compressed = Vec; - fn compress(self) -> Self::Compressed { - let mut buf = Vec::new(); - buf.extend_from_slice(&self.key.encode()); - buf.extend_from_slice(&self.block_list.compress()); - buf - } -} - -impl Decompress for StorageEntryChangeList { - fn decompress>(bytes: B) -> Result { - let bytes = bytes.as_ref(); - let key = StorageKey::decode(&bytes[0..32])?; - let blocks = Vec::::decompress(&bytes[32..])?; - Ok(Self { key, block_list: blocks }) - } -} - #[derive(Debug, Clone, Default, PartialEq, Eq)] pub struct ContractStorageKey { pub contract_address: ContractAddress, diff --git a/crates/katana/storage/db/src/tables.rs b/crates/katana/storage/db/src/tables.rs index b42f431e8e..47a6436791 100644 --- a/crates/katana/storage/db/src/tables.rs +++ b/crates/katana/storage/db/src/tables.rs @@ -8,9 +8,8 @@ use katana_primitives::transaction::{Tx, TxHash, TxNumber}; use crate::codecs::{Compress, Decode, Decompress, Encode}; use crate::models::block::StoredBlockBodyIndices; use crate::models::contract::{ContractClassChange, ContractInfoChangeList, ContractNonceChange}; -use crate::models::storage::{ - ContractStorageEntry, ContractStorageKey, StorageEntry, StorageEntryChangeList, -}; +use crate::models::list::BlockList; +use crate::models::storage::{ContractStorageEntry, ContractStorageKey, StorageEntry}; pub trait Key: Encode + Decode + Clone + std::fmt::Debug {} pub trait Value: Compress + Decompress + std::fmt::Debug {} @@ -45,7 +44,7 @@ pub enum TableType { DupSort, } -pub const NUM_TABLES: usize = 22; +pub const NUM_TABLES: usize = 23; /// Macro to declare `libmdbx` tables. #[macro_export] @@ -154,6 +153,7 @@ define_tables_enum! {[ (TxNumbers, TableType::Table), (TxBlocks, TableType::Table), (TxHashes, TableType::Table), + (TxTraces, TableType::Table), (Transactions, TableType::Table), (Receipts, TableType::Table), (CompiledClassHashes, TableType::Table), @@ -164,10 +164,10 @@ define_tables_enum! {[ (ClassDeclarationBlock, TableType::Table), (ClassDeclarations, TableType::DupSort), (ContractInfoChangeSet, TableType::Table), - (NonceChanges, TableType::DupSort), - (ContractClassChanges, TableType::DupSort), - (StorageChanges, TableType::DupSort), - (StorageChangeSet, TableType::DupSort) + (NonceChangeHistory, TableType::DupSort), + (ClassChangeHistory, TableType::DupSort), + (StorageChangeHistory, TableType::DupSort), + (StorageChangeSet, TableType::Table) ]} tables! { @@ -190,8 +190,8 @@ tables! { Transactions: (TxNumber) => Tx, /// Stores the block number of a transaction. TxBlocks: (TxNumber) => BlockNumber, - /// Stores the transaction's execution info. - TxExecutions: (TxNumber) => TxExecInfo, + /// Stores the transaction's traces. + TxTraces: (TxNumber) => TxExecInfo, /// Store transaction receipts Receipts: (TxNumber) => Receipt, /// Store compiled classes @@ -215,21 +215,21 @@ tables! { /// /// Stores the list of blocks where the contract info (nonce / class hash) has changed. ContractInfoChangeSet: (ContractAddress) => ContractInfoChangeList, - /// Contract nonce changes by block. - NonceChanges: (BlockNumber, ContractAddress) => ContractNonceChange, + NonceChangeHistory: (BlockNumber, ContractAddress) => ContractNonceChange, /// Contract class hash changes by block. - ContractClassChanges: (BlockNumber, ContractAddress) => ContractClassChange, + ClassChangeHistory: (BlockNumber, ContractAddress) => ContractClassChange, /// storage change set - StorageChangeSet: (ContractAddress, StorageKey) => StorageEntryChangeList, + StorageChangeSet: (ContractStorageKey) => BlockList, /// Account storage change set - StorageChanges: (BlockNumber, ContractStorageKey) => ContractStorageEntry + StorageChangeHistory: (BlockNumber, ContractStorageKey) => ContractStorageEntry } #[cfg(test)] mod tests { + #[test] fn test_tables() { use super::*; @@ -243,19 +243,129 @@ mod tests { assert_eq!(Tables::ALL[5].name(), TxNumbers::NAME); assert_eq!(Tables::ALL[6].name(), TxBlocks::NAME); assert_eq!(Tables::ALL[7].name(), TxHashes::NAME); - assert_eq!(Tables::ALL[8].name(), Transactions::NAME); - assert_eq!(Tables::ALL[9].name(), Receipts::NAME); - assert_eq!(Tables::ALL[10].name(), CompiledClassHashes::NAME); - assert_eq!(Tables::ALL[11].name(), CompiledClasses::NAME); - assert_eq!(Tables::ALL[12].name(), SierraClasses::NAME); - assert_eq!(Tables::ALL[13].name(), ContractInfo::NAME); - assert_eq!(Tables::ALL[14].name(), ContractStorage::NAME); - assert_eq!(Tables::ALL[15].name(), ClassDeclarationBlock::NAME); - assert_eq!(Tables::ALL[16].name(), ClassDeclarations::NAME); - assert_eq!(Tables::ALL[17].name(), ContractInfoChangeSet::NAME); - assert_eq!(Tables::ALL[18].name(), NonceChanges::NAME); - assert_eq!(Tables::ALL[19].name(), ContractClassChanges::NAME); - assert_eq!(Tables::ALL[20].name(), StorageChanges::NAME); - assert_eq!(Tables::ALL[21].name(), StorageChangeSet::NAME); + assert_eq!(Tables::ALL[8].name(), TxTraces::NAME); + assert_eq!(Tables::ALL[9].name(), Transactions::NAME); + assert_eq!(Tables::ALL[10].name(), Receipts::NAME); + assert_eq!(Tables::ALL[11].name(), CompiledClassHashes::NAME); + assert_eq!(Tables::ALL[12].name(), CompiledClasses::NAME); + assert_eq!(Tables::ALL[13].name(), SierraClasses::NAME); + assert_eq!(Tables::ALL[14].name(), ContractInfo::NAME); + assert_eq!(Tables::ALL[15].name(), ContractStorage::NAME); + assert_eq!(Tables::ALL[16].name(), ClassDeclarationBlock::NAME); + assert_eq!(Tables::ALL[17].name(), ClassDeclarations::NAME); + assert_eq!(Tables::ALL[18].name(), ContractInfoChangeSet::NAME); + assert_eq!(Tables::ALL[19].name(), NonceChangeHistory::NAME); + assert_eq!(Tables::ALL[20].name(), ClassChangeHistory::NAME); + assert_eq!(Tables::ALL[21].name(), StorageChangeHistory::NAME); + assert_eq!(Tables::ALL[22].name(), StorageChangeSet::NAME); + + assert_eq!(Tables::Headers.table_type(), TableType::Table); + assert_eq!(Tables::BlockHashes.table_type(), TableType::Table); + assert_eq!(Tables::BlockNumbers.table_type(), TableType::Table); + assert_eq!(Tables::BlockBodyIndices.table_type(), TableType::Table); + assert_eq!(Tables::BlockStatusses.table_type(), TableType::Table); + assert_eq!(Tables::TxNumbers.table_type(), TableType::Table); + assert_eq!(Tables::TxBlocks.table_type(), TableType::Table); + assert_eq!(Tables::TxHashes.table_type(), TableType::Table); + assert_eq!(Tables::TxTraces.table_type(), TableType::Table); + assert_eq!(Tables::Transactions.table_type(), TableType::Table); + assert_eq!(Tables::Receipts.table_type(), TableType::Table); + assert_eq!(Tables::CompiledClassHashes.table_type(), TableType::Table); + assert_eq!(Tables::CompiledClasses.table_type(), TableType::Table); + assert_eq!(Tables::SierraClasses.table_type(), TableType::Table); + assert_eq!(Tables::ContractInfo.table_type(), TableType::Table); + assert_eq!(Tables::ContractStorage.table_type(), TableType::DupSort); + assert_eq!(Tables::ClassDeclarationBlock.table_type(), TableType::Table); + assert_eq!(Tables::ClassDeclarations.table_type(), TableType::DupSort); + assert_eq!(Tables::ContractInfoChangeSet.table_type(), TableType::Table); + assert_eq!(Tables::NonceChangeHistory.table_type(), TableType::DupSort); + assert_eq!(Tables::ClassChangeHistory.table_type(), TableType::DupSort); + assert_eq!(Tables::StorageChangeHistory.table_type(), TableType::DupSort); + assert_eq!(Tables::StorageChangeSet.table_type(), TableType::Table); + } + + use katana_primitives::block::{BlockHash, BlockNumber, FinalityStatus, Header}; + use katana_primitives::class::{ClassHash, CompiledClass, CompiledClassHash}; + use katana_primitives::contract::{ContractAddress, GenericContractInfo}; + use katana_primitives::receipt::Receipt; + use katana_primitives::trace::TxExecInfo; + use katana_primitives::transaction::{InvokeTx, Tx, TxHash, TxNumber}; + use starknet::macros::felt; + + use crate::codecs::{Compress, Decode, Decompress, Encode}; + use crate::models::block::StoredBlockBodyIndices; + use crate::models::contract::{ + ContractClassChange, ContractInfoChangeList, ContractNonceChange, + }; + use crate::models::list::BlockList; + use crate::models::storage::{ContractStorageEntry, ContractStorageKey, StorageEntry}; + + macro_rules! assert_key_encode_decode { + { $( ($name:ty, $key:expr) ),* } => { + $( + { + let key: $name = $key; + let encoded = key.encode(); + let decoded = <$name as Decode>::decode(encoded.as_slice()).expect("decode failed"); + assert_eq!($key, decoded); + } + )* + }; + } + + macro_rules! assert_value_compress_decompress { + { $( ($name:ty, $value:expr) ),* } => { + $( + { + let value: $name = $value; + let compressed = value.compress(); + let decompressed = <$name as Decompress>::decompress(compressed.as_slice()).expect("decode failed"); + assert_eq!($value, decompressed); + } + )* + }; + } + + // Test that all key/subkey types can be encoded and decoded + // through the Encode and Decode traits + #[test] + fn test_key_encode_decode() { + assert_key_encode_decode! { + (BlockNumber, 100), + (BlockHash, felt!("0x123456789")), + (TxHash, felt!("0x123456789")), + (TxNumber, 100), + (ClassHash, felt!("0x123456789")), + (ContractAddress, ContractAddress(felt!("0x123456789"))), + (ContractStorageKey, ContractStorageKey { contract_address : ContractAddress(felt!("0x123456789")), key : felt!("0x123456789")}) + } + } + + // Test that all value types can be compressed and decompressed + // through the Compress and Decompress traits + #[test] + fn test_value_compress_decompress() { + assert_value_compress_decompress! { + (Header, Header::default()), + (BlockHash, BlockHash::default()), + (BlockNumber, BlockNumber::default()), + (FinalityStatus, FinalityStatus::AcceptedOnL1), + (StoredBlockBodyIndices, StoredBlockBodyIndices::default()), + (TxNumber, 77), + (TxHash, felt!("0x123456789")), + (Tx, Tx::Invoke(InvokeTx::V1(Default::default()))), + (BlockNumber, 99), + (TxExecInfo, TxExecInfo::default()), + (Receipt, Receipt::Invoke(Default::default())), + (CompiledClassHash, felt!("211")), + (CompiledClass, CompiledClass::Deprecated(Default::default())), + (GenericContractInfo, GenericContractInfo::default()), + (StorageEntry, StorageEntry::default()), + (ContractInfoChangeList, ContractInfoChangeList::default()), + (ContractNonceChange, ContractNonceChange::default()), + (ContractClassChange, ContractClassChange::default()), + (BlockList, BlockList::default()), + (ContractStorageEntry, ContractStorageEntry::default()) + } } } diff --git a/crates/katana/storage/db/src/version.rs b/crates/katana/storage/db/src/version.rs index 58e68f6ae4..f77bc28b2f 100644 --- a/crates/katana/storage/db/src/version.rs +++ b/crates/katana/storage/db/src/version.rs @@ -5,7 +5,7 @@ use std::mem; use std::path::{Path, PathBuf}; /// Current version of the database. -pub const CURRENT_DB_VERSION: u32 = 0; +pub const CURRENT_DB_VERSION: u32 = 1; /// Name of the version file. const DB_VERSION_FILE_NAME: &str = "db.version"; @@ -74,3 +74,13 @@ pub(super) fn get_db_version(path: impl AsRef) -> Result PathBuf { path.join(DB_VERSION_FILE_NAME) } + +#[cfg(test)] +mod tests { + + #[test] + fn test_current_version() { + use super::CURRENT_DB_VERSION; + assert_eq!(CURRENT_DB_VERSION, 1, "Invalid current database version") + } +} diff --git a/crates/katana/storage/provider/Cargo.toml b/crates/katana/storage/provider/Cargo.toml index 39b0345385..9b7d44d6c7 100644 --- a/crates/katana/storage/provider/Cargo.toml +++ b/crates/katana/storage/provider/Cargo.toml @@ -7,11 +7,11 @@ version.workspace = true # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -katana-db = { path = "../db", features = [ "test-utils" ] } -katana-primitives = { path = "../../primitives", features = [ "rpc" ] } +katana-db = { workspace = true, features = [ "test-utils" ] } +katana-primitives = { workspace = true, features = [ "rpc" ] } anyhow.workspace = true -auto_impl = "1.1.0" +auto_impl = "1.2.0" parking_lot.workspace = true thiserror.workspace = true tracing.workspace = true @@ -27,7 +27,7 @@ fork = [ "dep:futures", "dep:starknet", "dep:tokio", "in-memory" ] in-memory = [ ] [dev-dependencies] -katana-core = { path = "../../core" } +katana-core.workspace = true katana-runner.workspace = true lazy_static.workspace = true rand = "0.8.5" diff --git a/crates/katana/storage/provider/src/error.rs b/crates/katana/storage/provider/src/error.rs index 6fe75d4c6d..531c37dfc9 100644 --- a/crates/katana/storage/provider/src/error.rs +++ b/crates/katana/storage/provider/src/error.rs @@ -55,6 +55,10 @@ pub enum ProviderError { #[error("Missing transaction receipt for tx number {0}")] MissingTxReceipt(TxNumber), + /// Error when a transaction execution info is not found but the transaction exists. + #[error("Missing transaction execution for tx number {0}")] + MissingTxExecution(TxNumber), + /// Error when a compiled class hash is not found but the class hash exists. #[error("Missing compiled class hash for class hash {0:#x}")] MissingCompiledClassHash(ClassHash), diff --git a/crates/katana/storage/provider/src/providers/db/mod.rs b/crates/katana/storage/provider/src/providers/db/mod.rs index d59aa8b684..5421ba7eb7 100644 --- a/crates/katana/storage/provider/src/providers/db/mod.rs +++ b/crates/katana/storage/provider/src/providers/db/mod.rs @@ -10,9 +10,8 @@ use katana_db::models::block::StoredBlockBodyIndices; use katana_db::models::contract::{ ContractClassChange, ContractInfoChangeList, ContractNonceChange, }; -use katana_db::models::storage::{ - ContractStorageEntry, ContractStorageKey, StorageEntry, StorageEntryChangeList, -}; +use katana_db::models::list::BlockList; +use katana_db::models::storage::{ContractStorageEntry, ContractStorageKey, StorageEntry}; use katana_db::tables::{self, DupSort, Table}; use katana_db::utils::KeyValue; use katana_primitives::block::{ @@ -288,7 +287,7 @@ impl StateUpdateProvider for DbProvider { if let Some(block_num) = block_num { let nonce_updates = dup_entries::< - tables::NonceChanges, + tables::NonceChangeHistory, HashMap, _, >(&db_tx, block_num, |entry| { @@ -297,7 +296,7 @@ impl StateUpdateProvider for DbProvider { })?; let contract_updates = dup_entries::< - tables::ContractClassChanges, + tables::ClassChangeHistory, HashMap, _, >(&db_tx, block_num, |entry| { @@ -321,7 +320,7 @@ impl StateUpdateProvider for DbProvider { let storage_updates = { let entries = dup_entries::< - tables::StorageChanges, + tables::StorageChangeHistory, Vec<(ContractAddress, (StorageKey, StorageValue))>, _, >(&db_tx, block_num, |entry| { @@ -493,15 +492,40 @@ impl TransactionStatusProvider for DbProvider { } impl TransactionTraceProvider for DbProvider { - fn transaction_execution(&self, _hash: TxHash) -> ProviderResult> { - todo!() + fn transaction_execution(&self, hash: TxHash) -> ProviderResult> { + let db_tx = self.0.tx()?; + if let Some(num) = db_tx.get::(hash)? { + let execution = db_tx + .get::(num)? + .ok_or(ProviderError::MissingTxExecution(num))?; + + db_tx.commit()?; + Ok(Some(execution)) + } else { + Ok(None) + } } fn transactions_executions_by_block( &self, - _block_id: BlockHashOrNumber, + block_id: BlockHashOrNumber, ) -> ProviderResult>> { - todo!() + if let Some(indices) = self.block_body_indices(block_id)? { + let db_tx = self.0.tx()?; + let mut executions = Vec::with_capacity(indices.tx_count as usize); + + let range = Range::from(indices); + for i in range { + if let Some(execution) = db_tx.get::(i)? { + executions.push(execution); + } + } + + db_tx.commit()?; + Ok(Some(executions)) + } else { + Ok(None) + } } } @@ -509,9 +533,8 @@ impl ReceiptProvider for DbProvider { fn receipt_by_hash(&self, hash: TxHash) -> ProviderResult> { let db_tx = self.0.tx()?; if let Some(num) = db_tx.get::(hash)? { - let receipt = db_tx - .get::(num)? - .ok_or(ProviderError::MissingTxReceipt(num))?; + let receipt = + db_tx.get::(num)?.ok_or(ProviderError::MissingTxReceipt(num))?; db_tx.commit()?; Ok(Some(receipt)) @@ -562,7 +585,7 @@ impl BlockWriter for DbProvider { block: SealedBlockWithStatus, states: StateUpdatesWithDeclaredClasses, receipts: Vec, - _executions: Vec, + executions: Vec, ) -> ProviderResult<()> { self.0.update(move |db_tx| -> ProviderResult<()> { let block_hash = block.block.header.hash; @@ -582,7 +605,13 @@ impl BlockWriter for DbProvider { db_tx.put::(block_number, block_header)?; db_tx.put::(block_number, block_body_indices)?; - for (i, (transaction, receipt)) in transactions.into_iter().zip(receipts).enumerate() { + for (i, (transaction, receipt, execution)) in transactions + .into_iter() + .zip(receipts.into_iter()) + .zip(executions.into_iter()) + .map(|((transaction, receipt), execution)| (transaction, receipt, execution)) + .enumerate() + { let tx_number = tx_offset + i as u64; let tx_hash = transaction.hash; @@ -591,6 +620,7 @@ impl BlockWriter for DbProvider { db_tx.put::(tx_number, block_number)?; db_tx.put::(tx_number, transaction.transaction)?; db_tx.put::(tx_number, receipt)?; + db_tx.put::(tx_number, execution)?; } // insert classes @@ -626,34 +656,28 @@ impl BlockWriter for DbProvider { _ => {} } - let mut change_set_cursor = db_tx.cursor::()?; - let new_block_list = - match change_set_cursor.seek_by_key_subkey(addr, entry.key)? { - Some(StorageEntryChangeList { mut block_list, key }) - if key == entry.key => - { - change_set_cursor.delete_current()?; - - block_list.push(block_number); - block_list.sort(); - block_list - } - - _ => { - vec![block_number] - } - }; - - change_set_cursor.upsert( - addr, - StorageEntryChangeList { key: entry.key, block_list: new_block_list }, - )?; + // update block list in the change set + let changeset_key = + ContractStorageKey { contract_address: addr, key: entry.key }; + let list = db_tx.get::(changeset_key.clone())?; + + let updated_list = match list { + Some(mut list) => { + list.insert(block_number); + list + } + // create a new block list if it doesn't yet exist, and insert the block + // number + None => BlockList::from([block_number]), + }; + + db_tx.put::(changeset_key, updated_list)?; storage_cursor.upsert(addr, entry)?; let storage_change_sharded_key = ContractStorageKey { contract_address: addr, key: entry.key }; - db_tx.put::( + db_tx.put::( block_number, ContractStorageEntry { key: storage_change_sharded_key, @@ -676,12 +700,11 @@ impl BlockWriter for DbProvider { let new_change_set = if let Some(mut change_set) = db_tx.get::(addr)? { - change_set.class_change_list.push(block_number); - change_set.class_change_list.sort(); + change_set.class_change_list.insert(block_number); change_set } else { ContractInfoChangeList { - class_change_list: vec![block_number], + class_change_list: BlockList::from([block_number]), ..Default::default() } }; @@ -689,7 +712,7 @@ impl BlockWriter for DbProvider { db_tx.put::(addr, value)?; let class_change_key = ContractClassChange { contract_address: addr, class_hash }; - db_tx.put::(block_number, class_change_key)?; + db_tx.put::(block_number, class_change_key)?; db_tx.put::(addr, new_change_set)?; } @@ -703,12 +726,11 @@ impl BlockWriter for DbProvider { let new_change_set = if let Some(mut change_set) = db_tx.get::(addr)? { - change_set.nonce_change_list.push(block_number); - change_set.nonce_change_list.sort(); + change_set.nonce_change_list.insert(block_number); change_set } else { ContractInfoChangeList { - nonce_change_list: vec![block_number], + nonce_change_list: BlockList::from([block_number]), ..Default::default() } }; @@ -716,7 +738,7 @@ impl BlockWriter for DbProvider { db_tx.put::(addr, value)?; let nonce_change_key = ContractNonceChange { contract_address: addr, nonce }; - db_tx.put::(block_number, nonce_change_key)?; + db_tx.put::(block_number, nonce_change_key)?; db_tx.put::(addr, new_change_set)?; } @@ -736,6 +758,7 @@ mod tests { use katana_primitives::contract::ContractAddress; use katana_primitives::receipt::Receipt; use katana_primitives::state::{StateUpdates, StateUpdatesWithDeclaredClasses}; + use katana_primitives::trace::TxExecInfo; use katana_primitives::transaction::{InvokeTx, Tx, TxHash, TxWithHash}; use starknet::macros::felt; @@ -821,7 +844,7 @@ mod tests { block.clone(), state_updates, vec![Receipt::Invoke(Default::default())], - vec![], + vec![TxExecInfo::default()], ) .expect("failed to insert block"); @@ -899,7 +922,7 @@ mod tests { block.clone(), state_updates1, vec![Receipt::Invoke(Default::default())], - vec![], + vec![TxExecInfo::default()], ) .expect("failed to insert block"); @@ -909,7 +932,7 @@ mod tests { block, state_updates2, vec![Receipt::Invoke(Default::default())], - vec![], + vec![TxExecInfo::default()], ) .expect("failed to insert block"); diff --git a/crates/katana/storage/provider/src/providers/db/state.rs b/crates/katana/storage/provider/src/providers/db/state.rs index 381e60063c..bb8f070646 100644 --- a/crates/katana/storage/provider/src/providers/db/state.rs +++ b/crates/katana/storage/provider/src/providers/db/state.rs @@ -1,7 +1,6 @@ -use std::cmp::Ordering; - use katana_db::mdbx::{self}; use katana_db::models::contract::ContractInfoChangeList; +use katana_db::models::list::BlockList; use katana_db::models::storage::{ContractStorageKey, StorageEntry}; use katana_db::tables; use katana_primitives::block::BlockNumber; @@ -168,41 +167,6 @@ impl HistoricalStateProvider { pub fn new(tx: mdbx::tx::TxRO, block_number: u64) -> Self { Self { tx, block_number } } - - // This looks ugly but it works and I will most likely forget how it works - // if I don't document it. But im lazy. - fn recent_block_change_relative_to_pinned_block_num( - block_number: BlockNumber, - block_list: &[BlockNumber], - ) -> Option { - if block_list.first().is_some_and(|num| block_number < *num) { - return None; - } - - // if the pinned block number is smaller than the first block number in the list, - // then that means there is no change happening before the pinned block number. - let pos = { - if let Some(pos) = block_list.last().and_then(|num| { - if block_number >= *num { Some(block_list.len() - 1) } else { None } - }) { - Some(pos) - } else { - block_list.iter().enumerate().find_map(|(i, num)| match block_number.cmp(num) { - Ordering::Equal => Some(i), - Ordering::Greater => None, - Ordering::Less => { - if i == 0 || block_number == 0 { - None - } else { - Some(i - 1) - } - } - }) - } - }?; - - block_list.get(pos).copied() - } } impl ContractClassProvider for HistoricalStateProvider { @@ -244,13 +208,10 @@ impl StateProvider for HistoricalStateProvider { fn nonce(&self, address: ContractAddress) -> ProviderResult> { let change_list = self.tx.get::(address)?; - if let Some(num) = change_list.and_then(|entry| { - Self::recent_block_change_relative_to_pinned_block_num( - self.block_number, - &entry.nonce_change_list, - ) - }) { - let mut cursor = self.tx.cursor::()?; + if let Some(num) = change_list + .and_then(|entry| recent_change_from_block(self.block_number, &entry.nonce_change_list)) + { + let mut cursor = self.tx.cursor::()?; let entry = cursor.seek_by_key_subkey(num, address)?.ok_or( ProviderError::MissingContractNonceChangeEntry { block: num, @@ -273,13 +234,10 @@ impl StateProvider for HistoricalStateProvider { let change_list: Option = self.tx.get::(address)?; - if let Some(num) = change_list.and_then(|entry| { - Self::recent_block_change_relative_to_pinned_block_num( - self.block_number, - &entry.class_change_list, - ) - }) { - let mut cursor = self.tx.cursor::()?; + if let Some(num) = change_list + .and_then(|entry| recent_change_from_block(self.block_number, &entry.class_change_list)) + { + let mut cursor = self.tx.cursor::()?; let entry = cursor.seek_by_key_subkey(num, address)?.ok_or( ProviderError::MissingContractClassChangeEntry { block: num, @@ -300,18 +258,14 @@ impl StateProvider for HistoricalStateProvider { address: ContractAddress, storage_key: StorageKey, ) -> ProviderResult> { - let mut cursor = self.tx.cursor::()?; - - if let Some(num) = cursor.seek_by_key_subkey(address, storage_key)?.and_then(|entry| { - Self::recent_block_change_relative_to_pinned_block_num( - self.block_number, - &entry.block_list, - ) - }) { - let mut cursor = self.tx.cursor::()?; - let sharded_key = ContractStorageKey { contract_address: address, key: storage_key }; - - let entry = cursor.seek_by_key_subkey(num, sharded_key)?.ok_or( + let key = ContractStorageKey { contract_address: address, key: storage_key }; + let block_list = self.tx.get::(key.clone())?; + + if let Some(num) = + block_list.and_then(|list| recent_change_from_block(self.block_number, &list)) + { + let mut cursor = self.tx.cursor::()?; + let entry = cursor.seek_by_key_subkey(num, key)?.ok_or( ProviderError::MissingStorageChangeEntry { block: num, storage_key, @@ -328,11 +282,22 @@ impl StateProvider for HistoricalStateProvider { } } +/// This is a helper function for getting the block number of the most +/// recent change that occurred relative to the given block number. +fn recent_change_from_block( + block_number: BlockNumber, + block_list: &BlockList, +) -> Option { + // if the rank is 0, then it's either; + // 1. the list is empty + // 2. there are no prior changes occured before/at `block_number` + let rank = block_list.rank(block_number); + if rank == 0 { None } else { block_list.select(rank - 1) } +} + #[cfg(test)] mod tests { - use super::HistoricalStateProvider; - - const BLOCK_LIST: [u64; 5] = [1, 2, 5, 6, 10]; + use katana_db::models::list::BlockList; #[rstest::rstest] #[case(0, None)] @@ -346,12 +311,8 @@ mod tests { #[case] block_num: u64, #[case] expected_block_num: Option, ) { - assert_eq!( - HistoricalStateProvider::recent_block_change_relative_to_pinned_block_num( - block_num, - &BLOCK_LIST, - ), - expected_block_num - ); + let list = BlockList::from([1, 2, 5, 6, 10]); + let actual_block_num = super::recent_change_from_block(block_num, &list); + assert_eq!(actual_block_num, expected_block_num); } } diff --git a/crates/katana/storage/provider/src/providers/fork/backend.rs b/crates/katana/storage/provider/src/providers/fork/backend.rs index 45052b8eef..a19e923946 100644 --- a/crates/katana/storage/provider/src/providers/fork/backend.rs +++ b/crates/katana/storage/provider/src/providers/fork/backend.rs @@ -36,6 +36,8 @@ type GetStorageResult = Result; type GetClassHashAtResult = Result; type GetClassAtResult = Result; +pub(crate) const LOG_TARGET: &str = "forked_backend"; + #[derive(Debug, thiserror::Error)] pub enum ForkedBackendError { #[error("Failed to send request to the forked backend: {0}")] @@ -218,7 +220,7 @@ impl ForkedBackend { .block_on(backend); })?; - trace!(target: "forked_backend", "fork backend thread spawned"); + trace!(target: LOG_TARGET, "Fork backend thread spawned."); Ok(handler) } @@ -248,7 +250,7 @@ impl ForkedBackend { &self, contract_address: ContractAddress, ) -> Result { - trace!(target: "forked_backend", "requesting nonce for contract address {contract_address}"); + trace!(target: LOG_TARGET, contract_address = %contract_address, "Requesting nonce for contract address."); let (sender, rx) = oneshot(); self.0 .lock() @@ -262,7 +264,12 @@ impl ForkedBackend { contract_address: ContractAddress, key: StorageKey, ) -> Result { - trace!(target: "forked_backend", "requesting storage for address {contract_address} at key {key:#x}" ); + trace!( + target: LOG_TARGET, + contract_address = %contract_address, + key = %format!("{:#x}", key), + "Requesting storage." + ); let (sender, rx) = oneshot(); self.0 .lock() @@ -275,7 +282,7 @@ impl ForkedBackend { &self, contract_address: ContractAddress, ) -> Result { - trace!(target: "forked_backend", "requesting class hash at address {contract_address}"); + trace!(target: LOG_TARGET, contract_address = %contract_address, "Requesting class hash at address."); let (sender, rx) = oneshot(); self.0 .lock() @@ -288,7 +295,11 @@ impl ForkedBackend { &self, class_hash: ClassHash, ) -> Result { - trace!(target: "forked_backend", "requesting class at hash {class_hash:#x}"); + trace!( + target: LOG_TARGET, + class_hash = %format!("{:#x}", class_hash), + "Requesting class." + ); let (sender, rx) = oneshot(); self.0 .lock() @@ -301,7 +312,11 @@ impl ForkedBackend { &self, class_hash: ClassHash, ) -> Result { - trace!(target: "forked_backend", "requesting compiled class hash at class {class_hash:#x}"); + trace!( + target: LOG_TARGET, + class_hash = %format!("{:#x}", class_hash), + "Requesting compiled class hash." + ); let class = self.do_get_class_at(class_hash)?; // if its a legacy class, then we just return back the class hash // else if sierra class, then we have to compile it and compute the compiled class hash. @@ -338,14 +353,39 @@ impl ContractInfoProvider for SharedStateProvider { impl StateProvider for SharedStateProvider { fn nonce(&self, address: ContractAddress) -> ProviderResult> { - if let nonce @ Some(_) = self.contract(address)?.map(|i| i.nonce) { + // TEMP: + // + // The nonce and class hash are stored in the same struct, so if we call either `nonce` or + // `class_hash_of_contract` first, the other would be filled with the default value. + // Currently, the data types that we're using doesn't allow us to distinguish between + // 'not fetched' vs the actual value. + // + // Right now, if the nonce value is 0, we couldn't distinguish whether that is the actual + // value or just the default value. So this filter is a pessimistic approach to always + // invalidate 0 nonce value in the cache. + // + // Meaning, if the nonce is 0, we always fetch the nonce from the forked provider, even if + // we already fetched it before. + // + // Similar story with `class_hash_of_contract` + // + if let nonce @ Some(_) = + self.contract(address)?.map(|i| i.nonce).filter(|n| n != &Nonce::ZERO) + { return Ok(nonce); } - if let Some(nonce) = handle_contract_or_class_not_found_err(self.0.do_get_nonce(address)).map_err(|e| { - error!(target: "forked_backend", "error while fetching nonce of contract {address}: {e}"); - e - })? { + if let Some(nonce) = handle_contract_or_class_not_found_err(self.0.do_get_nonce(address)) + .map_err(|e| { + error!( + target: LOG_TARGET, + contract_address = %address, + error = %e, + "Fetching nonce." + ); + e + })? + { self.0.contract_state.write().entry(address).or_default().nonce = nonce; Ok(Some(nonce)) } else { @@ -364,10 +404,18 @@ impl StateProvider for SharedStateProvider { return Ok(value.copied()); } - let value = handle_contract_or_class_not_found_err(self.0.do_get_storage(address, storage_key)).map_err(|e| { - error!(target: "forked_backend", "error while fetching storage value of contract {address} at key {storage_key:#x}: {e}"); - e - })?; + let value = + handle_contract_or_class_not_found_err(self.0.do_get_storage(address, storage_key)) + .map_err(|e| { + error!( + target: LOG_TARGET, + address = %address, + storage_key = %format!("{:#x}", storage_key), + error = %e, + "Fetching storage value." + ); + e + })?; self.0 .storage @@ -383,12 +431,23 @@ impl StateProvider for SharedStateProvider { &self, address: ContractAddress, ) -> ProviderResult> { - if let hash @ Some(_) = self.contract(address)?.map(|i| i.class_hash) { + // See comment at `nonce` for the explanation of this filter. + if let hash @ Some(_) = + self.contract(address)?.map(|i| i.class_hash).filter(|h| h != &ClassHash::ZERO) + { return Ok(hash); } - if let Some(hash) = handle_contract_or_class_not_found_err(self.0.do_get_class_hash_at(address)).map_err(|e| { - error!(target: "forked_backend", "error while fetching class hash of contract {address}: {e}"); + if let Some(hash) = handle_contract_or_class_not_found_err( + self.0.do_get_class_hash_at(address), + ) + .map_err(|e| { + error!( + target: LOG_TARGET, + contract_address = %address, + error = %e, + "Fetching class hash." + ); e })? { self.0.contract_state.write().entry(address).or_default().class_hash = hash; @@ -407,7 +466,12 @@ impl ContractClassProvider for SharedStateProvider { let Some(class) = handle_contract_or_class_not_found_err(self.0.do_get_class_at(hash)) .map_err(|e| { - error!(target: "forked_backend", "error while fetching sierra class {hash:#x}: {e}"); + error!( + target: LOG_TARGET, + hash = %format!("{:#x}", hash), + error = %e, + "Fetching sierra class." + ); e })? else { @@ -438,7 +502,12 @@ impl ContractClassProvider for SharedStateProvider { if let Some(hash) = handle_contract_or_class_not_found_err(self.0.do_get_compiled_class_hash(hash)) .map_err(|e| { - error!(target: "forked_backend", "error while fetching compiled class hash for class hash {hash:#x}: {e}"); + error!( + target: LOG_TARGET, + hash = %format!("{:#x}", hash), + error = %e, + "Fetching compiled class hash." + ); e })? { @@ -456,7 +525,12 @@ impl ContractClassProvider for SharedStateProvider { let Some(class) = handle_contract_or_class_not_found_err(self.0.do_get_class_at(hash)) .map_err(|e| { - error!(target: "forked_backend", "error while fetching class {hash:#x}: {e}"); + error!( + target: LOG_TARGET, + hash = %format!("{:#x}", hash), + error = %e, + "Fetching class." + ); e })? else { @@ -466,7 +540,12 @@ impl ContractClassProvider for SharedStateProvider { let (class_hash, compiled_class_hash, casm, sierra) = match class { ContractClass::Legacy(class) => { let (_, compiled_class) = legacy_rpc_to_compiled_class(&class).map_err(|e| { - error!(target: "forked_backend", "error while parsing legacy class {hash:#x}: {e}"); + error!( + target: LOG_TARGET, + hash = %format!("{:#x}", hash), + error = %e, + "Parsing legacy class." + ); ProviderError::ParsingError(e.to_string()) })?; @@ -474,10 +553,16 @@ impl ContractClassProvider for SharedStateProvider { } ContractClass::Sierra(sierra_class) => { - let (_, compiled_class_hash, compiled_class) = flattened_sierra_to_compiled_class(&sierra_class).map_err(|e|{ - error!(target: "forked_backend", "error while parsing sierra class {hash:#x}: {e}"); - ProviderError::ParsingError(e.to_string()) - })?; + let (_, compiled_class_hash, compiled_class) = + flattened_sierra_to_compiled_class(&sierra_class).map_err(|e| { + error!( + target: LOG_TARGET, + hash = %format!("{:#x}", hash), + error = %e, + "Parsing sierra class." + ); + ProviderError::ParsingError(e.to_string()) + })?; (hash, compiled_class_hash, compiled_class, Some(sierra_class)) } diff --git a/crates/katana/storage/provider/src/providers/fork/mod.rs b/crates/katana/storage/provider/src/providers/fork/mod.rs index 98a2bd991e..16237cb7d3 100644 --- a/crates/katana/storage/provider/src/providers/fork/mod.rs +++ b/crates/katana/storage/provider/src/providers/fork/mod.rs @@ -456,7 +456,7 @@ impl BlockWriter for ForkedProvider { block: SealedBlockWithStatus, states: StateUpdatesWithDeclaredClasses, receipts: Vec, - _executions: Vec, + executions: Vec, ) -> ProviderResult<()> { let mut storage = self.storage.write(); @@ -494,6 +494,7 @@ impl BlockWriter for ForkedProvider { storage.transaction_numbers.extend(txs_num); storage.transaction_block.extend(txs_block); storage.receipts.extend(receipts); + storage.transactions_executions.extend(executions); storage.state_update.insert(block_number, states.state_updates.clone()); diff --git a/crates/katana/storage/provider/src/providers/fork/state.rs b/crates/katana/storage/provider/src/providers/fork/state.rs index ba35a010d5..e265388b02 100644 --- a/crates/katana/storage/provider/src/providers/fork/state.rs +++ b/crates/katana/storage/provider/src/providers/fork/state.rs @@ -38,14 +38,22 @@ impl StateProvider for ForkedStateDb { &self, address: ContractAddress, ) -> ProviderResult> { - if let hash @ Some(_) = self.contract_state.read().get(&address).map(|i| i.class_hash) { + if let hash @ Some(_) = self + .contract_state + .read() + .get(&address) + .map(|i| i.class_hash) + .filter(|h| h != &ClassHash::ZERO) + { return Ok(hash); } StateProvider::class_hash_of_contract(&self.db, address) } fn nonce(&self, address: ContractAddress) -> ProviderResult> { - if let nonce @ Some(_) = self.contract_state.read().get(&address).map(|i| i.nonce) { + if let nonce @ Some(_) = + self.contract_state.read().get(&address).map(|i| i.nonce).filter(|n| n != &Nonce::ZERO) + { return Ok(nonce); } StateProvider::nonce(&self.db, address) @@ -148,7 +156,13 @@ impl ContractInfoProvider for ForkedSnapshot { impl StateProvider for ForkedSnapshot { fn nonce(&self, address: ContractAddress) -> ProviderResult> { - if let nonce @ Some(_) = self.inner.contract_state.get(&address).map(|info| info.nonce) { + if let nonce @ Some(_) = self + .inner + .contract_state + .get(&address) + .map(|info| info.nonce) + .filter(|n| n != &Nonce::ZERO) + { return Ok(nonce); } StateProvider::nonce(&self.inner.db, address) @@ -171,8 +185,12 @@ impl StateProvider for ForkedSnapshot { &self, address: ContractAddress, ) -> ProviderResult> { - if let class_hash @ Some(_) = - self.inner.contract_state.get(&address).map(|info| info.class_hash) + if let class_hash @ Some(_) = self + .inner + .contract_state + .get(&address) + .map(|info| info.class_hash) + .filter(|h| h != &ClassHash::ZERO) { return Ok(class_hash); } diff --git a/crates/katana/storage/provider/tests/block.rs b/crates/katana/storage/provider/tests/block.rs index 83861e52a4..c2ff3b3a8b 100644 --- a/crates/katana/storage/provider/tests/block.rs +++ b/crates/katana/storage/provider/tests/block.rs @@ -15,7 +15,7 @@ use katana_provider::traits::env::BlockEnvProvider; use katana_provider::traits::state::StateRootProvider; use katana_provider::traits::state_update::StateUpdateProvider; use katana_provider::traits::transaction::{ - ReceiptProvider, TransactionProvider, TransactionStatusProvider, + ReceiptProvider, TransactionProvider, TransactionStatusProvider, TransactionTraceProvider, }; use katana_provider::BlockchainProvider; use rstest_reuse::{self, *}; @@ -27,7 +27,7 @@ use fixtures::{ db_provider, fork_provider, fork_provider_with_spawned_fork_network, in_memory_provider, mock_state_updates, provider_with_states, }; -use utils::generate_dummy_blocks_and_receipts; +use starknet::core::types::FieldElement; #[apply(insert_block_cases)] fn insert_block_with_in_memory_provider( @@ -53,6 +53,30 @@ fn insert_block_with_db_provider( insert_block_test_impl(provider, block_count) } +#[apply(insert_block_cases)] +fn insert_block_empty_with_in_memory_provider( + #[from(in_memory_provider)] provider: BlockchainProvider, + #[case] block_count: u64, +) -> Result<()> { + insert_block_empty_test_impl(provider, block_count) +} + +#[apply(insert_block_cases)] +fn insert_block_empty_with_fork_provider( + #[from(fork_provider)] provider: BlockchainProvider, + #[case] block_count: u64, +) -> Result<()> { + insert_block_empty_test_impl(provider, block_count) +} + +#[apply(insert_block_cases)] +fn insert_block_empty_with_db_provider( + #[from(db_provider)] provider: BlockchainProvider, + #[case] block_count: u64, +) -> Result<()> { + insert_block_empty_test_impl(provider, block_count) +} + fn insert_block_test_impl(provider: BlockchainProvider, count: u64) -> Result<()> where Db: BlockProvider @@ -60,19 +84,20 @@ where + ReceiptProvider + StateRootProvider + TransactionStatusProvider + + TransactionTraceProvider + BlockEnvProvider, { - let blocks = generate_dummy_blocks_and_receipts(count); + let blocks = utils::generate_dummy_blocks_and_receipts(count); let txs: Vec = - blocks.iter().flat_map(|(block, _)| block.block.body.clone()).collect(); + blocks.iter().flat_map(|(block, _, _)| block.block.body.clone()).collect(); let total_txs = txs.len() as u64; - for (block, receipts) in &blocks { + for (block, receipts, executions) in &blocks { provider.insert_block_with_states_and_receipts( block.clone(), Default::default(), receipts.clone(), - Default::default(), + executions.clone(), )?; assert_eq!(provider.latest_number().unwrap(), block.block.header.header.number); @@ -91,7 +116,7 @@ where blocks.clone().into_iter().map(|b| b.0.block.unseal()).collect::>() ); - for (block, receipts) in blocks { + for (block, receipts, executions) in blocks { let block_id = BlockHashOrNumber::Hash(block.block.header.hash); let expected_block_num = block.block.header.header.number; @@ -114,6 +139,7 @@ where let actual_block_tx_count = provider.transaction_count_by_block(block_id)?; let actual_receipts = provider.receipts_by_block(block_id)?; + let actual_executions = provider.transactions_executions_by_block(block_id)?; let expected_block_with_tx_hashes = BlockWithTxHashes { header: expected_block.header.clone(), @@ -128,6 +154,7 @@ where for (idx, tx) in expected_block.body.iter().enumerate() { let actual_receipt = provider.receipt_by_hash(tx.hash)?; + let actual_execution = provider.transaction_execution(tx.hash)?; let actual_tx = provider.transaction_by_hash(tx.hash)?; let actual_tx_status = provider.transaction_status(tx.hash)?; let actual_tx_block_num_hash = provider.transaction_block_num_and_hash(tx.hash)?; @@ -137,6 +164,7 @@ where assert_eq!(actual_tx_block_num_hash, Some((expected_block_num, expected_block_hash))); assert_eq!(actual_tx_status, Some(FinalityStatus::AcceptedOnL2)); assert_eq!(actual_receipt, Some(receipts[idx].clone())); + assert_eq!(actual_execution, Some(executions[idx].clone())); assert_eq!(actual_tx_by_block_idx, Some(tx.clone())); assert_eq!(actual_tx, Some(tx.clone())); } @@ -145,6 +173,109 @@ where assert_eq!(actual_receipts.as_ref().map(|r| r.len()), Some(expected_block.body.len())); assert_eq!(actual_receipts, Some(receipts)); + assert_eq!(actual_executions, Some(executions)); + + assert_eq!(actual_block_tx_count, Some(expected_block.body.len() as u64)); + assert_eq!(actual_state_root, Some(expected_block.header.state_root)); + assert_eq!(actual_block_txs, Some(expected_block.body.clone())); + assert_eq!(actual_block_hash, Some(expected_block_hash)); + assert_eq!(actual_block, Some(expected_block)); + } + + Ok(()) +} + +fn insert_block_empty_test_impl(provider: BlockchainProvider, count: u64) -> Result<()> +where + Db: BlockProvider + + BlockWriter + + ReceiptProvider + + StateRootProvider + + TransactionStatusProvider + + TransactionTraceProvider + + BlockEnvProvider, +{ + let blocks = utils::generate_dummy_blocks_empty(count); + let txs: Vec = blocks.iter().flat_map(|block| block.block.body.clone()).collect(); + + let total_txs = txs.len() as u64; + assert_eq!(total_txs, 0); + + for block in &blocks { + provider.insert_block_with_states_and_receipts( + block.clone(), + Default::default(), + vec![], + vec![], + )?; + + assert_eq!(provider.latest_number().unwrap(), block.block.header.header.number); + assert_eq!(provider.latest_hash().unwrap(), block.block.header.hash); + } + + let actual_blocks_in_range = provider.blocks_in_range(0..=count)?; + + assert_eq!(actual_blocks_in_range.len(), count as usize); + assert_eq!( + actual_blocks_in_range, + blocks.clone().into_iter().map(|b| b.block.unseal()).collect::>() + ); + + for block in blocks { + let block_id = BlockHashOrNumber::Hash(block.block.header.hash); + + let expected_block_num = block.block.header.header.number; + let expected_block_hash = block.block.header.hash; + let expected_block = block.block.unseal(); + + let expected_block_env = BlockEnv { + number: expected_block_num, + timestamp: expected_block.header.timestamp, + l1_gas_prices: expected_block.header.gas_prices.clone(), + sequencer_address: expected_block.header.sequencer_address, + }; + + let actual_block_hash = provider.block_hash_by_num(expected_block_num)?; + + let actual_block = provider.block(block_id)?; + let actual_block_txs = provider.transactions_by_block(block_id)?; + let actual_status = provider.block_status(block_id)?; + let actual_state_root = provider.state_root(block_id)?; + + let actual_block_tx_count = provider.transaction_count_by_block(block_id)?; + let actual_receipts = provider.receipts_by_block(block_id)?; + let actual_executions = provider.transactions_executions_by_block(block_id)?; + + let expected_block_with_tx_hashes = + BlockWithTxHashes { header: expected_block.header.clone(), body: vec![] }; + + let actual_block_with_tx_hashes = provider.block_with_tx_hashes(block_id)?; + let actual_block_env = provider.block_env_at(block_id)?; + + assert_eq!(actual_status, Some(FinalityStatus::AcceptedOnL2)); + assert_eq!(actual_block_with_tx_hashes, Some(expected_block_with_tx_hashes)); + + let tx_hash = FieldElement::ZERO; + + let actual_receipt = provider.receipt_by_hash(tx_hash)?; + let actual_execution = provider.transaction_execution(tx_hash)?; + let actual_tx = provider.transaction_by_hash(tx_hash)?; + let actual_tx_status = provider.transaction_status(tx_hash)?; + let actual_tx_block_num_hash = provider.transaction_block_num_and_hash(tx_hash)?; + let actual_tx_by_block_idx = provider.transaction_by_block_and_idx(block_id, 0)?; + + assert_eq!(actual_tx_block_num_hash, None); + assert_eq!(actual_tx_status, None); + assert_eq!(actual_receipt, None); + assert_eq!(actual_execution, None); + assert_eq!(actual_tx_by_block_idx, None); + assert_eq!(actual_tx, None); + + assert_eq!(actual_block_env, Some(expected_block_env)); + + assert_eq!(actual_receipts.as_ref().map(|r| r.len()), Some(expected_block.body.len())); + assert_eq!(actual_receipts, Some(vec![])); + assert_eq!(actual_executions, Some(vec![])); assert_eq!(actual_block_tx_count, Some(expected_block.body.len() as u64)); assert_eq!(actual_state_root, Some(expected_block.header.state_root)); diff --git a/crates/katana/storage/provider/tests/utils.rs b/crates/katana/storage/provider/tests/utils.rs index aaff5161bf..17795922c0 100644 --- a/crates/katana/storage/provider/tests/utils.rs +++ b/crates/katana/storage/provider/tests/utils.rs @@ -1,11 +1,15 @@ use katana_primitives::block::{Block, BlockHash, FinalityStatus, Header, SealedBlockWithStatus}; use katana_primitives::receipt::{InvokeTxReceipt, Receipt}; +use katana_primitives::trace::TxExecInfo; use katana_primitives::transaction::{InvokeTx, Tx, TxHash, TxWithHash}; use katana_primitives::FieldElement; -pub fn generate_dummy_txs_and_receipts(count: usize) -> (Vec, Vec) { +pub fn generate_dummy_txs_and_receipts( + count: usize, +) -> (Vec, Vec, Vec) { let mut txs = Vec::with_capacity(count); let mut receipts = Vec::with_capacity(count); + let mut executions = Vec::with_capacity(count); // TODO: generate random txs and receipts variants for _ in 0..count { @@ -15,20 +19,21 @@ pub fn generate_dummy_txs_and_receipts(count: usize) -> (Vec, Vec Vec<(SealedBlockWithStatus, Vec)> { +) -> Vec<(SealedBlockWithStatus, Vec, Vec)> { let mut blocks = Vec::with_capacity(count as usize); let mut parent_hash: BlockHash = 0u8.into(); for i in 0..count { let tx_count = (rand::random::() % 10) as usize; - let (body, receipts) = generate_dummy_txs_and_receipts(tx_count); + let (body, receipts, executions) = generate_dummy_txs_and_receipts(tx_count); let header = Header { parent_hash, number: i, ..Default::default() }; let block = @@ -39,8 +44,28 @@ pub fn generate_dummy_blocks_and_receipts( blocks.push(( SealedBlockWithStatus { block, status: FinalityStatus::AcceptedOnL2 }, receipts, + executions, )); } blocks } + +pub fn generate_dummy_blocks_empty(count: u64) -> Vec { + let mut blocks = Vec::with_capacity(count as usize); + let mut parent_hash: BlockHash = 0u8.into(); + + for i in 0..count { + let header = Header { parent_hash, number: i, ..Default::default() }; + let body = vec![]; + + let block = + Block { header, body }.seal_with_hash(FieldElement::from(rand::random::())); + + parent_hash = block.header.hash; + + blocks.push(SealedBlockWithStatus { block, status: FinalityStatus::AcceptedOnL2 }); + } + + blocks +} diff --git a/crates/metrics/Cargo.toml b/crates/metrics/Cargo.toml index d351b39341..92fda92b98 100644 --- a/crates/metrics/Cargo.toml +++ b/crates/metrics/Cargo.toml @@ -1,22 +1,23 @@ [package] edition.workspace = true -name = "metrics" +name = "dojo-metrics" version.workspace = true # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] anyhow.workspace = true -hyper.workspace = true +hyper = { workspace = true, features = [ "http1", "http2", "server", "tcp" ] } thiserror.workspace = true tokio.workspace = true tracing.workspace = true # Metrics -metrics = "0.21.1" metrics-exporter-prometheus = "0.12.1" -metrics-process.workspace = true +metrics-process = "=1.0.14" metrics-util = "0.15.0" +metrics.workspace = true +reth-metrics-derive = { git = "https://github.com/paradigmxyz/reth.git", tag = "v0.2.0-beta.4" } [target.'cfg(not(windows))'.dependencies] jemalloc-ctl = { version = "0.5.0", optional = true } diff --git a/crates/metrics/src/lib.rs b/crates/metrics/src/lib.rs index 2b3519af15..4a3a43cea9 100644 --- a/crates/metrics/src/lib.rs +++ b/crates/metrics/src/lib.rs @@ -2,6 +2,12 @@ pub mod prometheus_exporter; #[cfg(all(feature = "jemalloc", unix))] use jemallocator as _; +/// Re-export the metrics crate +pub use metrics; +/// Re-export the metrics-process crate +pub use metrics_process; +/// Re-export the metrics derive macro +pub use reth_metrics_derive::Metrics; // We use jemalloc for performance reasons #[cfg(all(feature = "jemalloc", unix))] diff --git a/crates/metrics/src/prometheus_exporter.rs b/crates/metrics/src/prometheus_exporter.rs index 82206da51e..19b8e29529 100644 --- a/crates/metrics/src/prometheus_exporter.rs +++ b/crates/metrics/src/prometheus_exporter.rs @@ -1,47 +1,72 @@ //! Prometheus exporter //! Adapted from Paradigm's [`reth`](https://github.com/paradigmxyz/reth/blob/c1d7d2bde398bcf410c7e2df13fd7151fc2a58b9/bin/reth/src/prometheus_exporter.rs) + use std::convert::Infallible; use std::net::SocketAddr; use std::sync::Arc; +use anyhow::{Context, Result}; use hyper::service::{make_service_fn, service_fn}; use hyper::{Body, Request, Response, Server}; use metrics::{describe_gauge, gauge}; use metrics_exporter_prometheus::{PrometheusBuilder, PrometheusHandle}; use metrics_util::layers::{PrefixLayer, Stack}; +pub(crate) const LOG_TARGET: &str = "metrics::prometheus_exporter"; + pub(crate) trait Hook: Fn() + Send + Sync {} impl Hook for T {} /// Installs Prometheus as the metrics recorder. -pub fn install_recorder(prefix: &str) -> anyhow::Result { +/// +/// ## Arguments +/// * `prefix` - Apply a prefix to all metrics keys. +pub fn install_recorder(prefix: &str) -> Result { let recorder = PrometheusBuilder::new().build_recorder(); let handle = recorder.handle(); - // Build metrics stack + // Build metrics stack and install the recorder Stack::new(recorder) .push(PrefixLayer::new(prefix)) .install() - .map_err(|e| anyhow::anyhow!("Couldn't set metrics recorder: {}", e))?; + .context("Couldn't set metrics recorder")?; Ok(handle) } +/// Serves Prometheus metrics over HTTP with database and process metrics. +pub async fn serve( + listen_addr: SocketAddr, + handle: PrometheusHandle, + process: metrics_process::Collector, +) -> Result<()> { + // Clone `process` to move it into the hook and use the original `process` for describe below. + let cloned_process = process.clone(); + let hooks: Vec>> = + vec![Box::new(move || cloned_process.collect()), Box::new(collect_memory_stats)]; + serve_with_hooks(listen_addr, handle, hooks).await?; + + process.describe(); + describe_memory_stats(); + + Ok(()) +} + /// Serves Prometheus metrics over HTTP with hooks. /// /// The hooks are called every time the metrics are requested at the given endpoint, and can be used /// to record values for pull-style metrics, i.e. metrics that are not automatically updated. -pub(crate) async fn serve_with_hooks( +async fn serve_with_hooks( listen_addr: SocketAddr, handle: PrometheusHandle, hooks: impl IntoIterator, -) -> anyhow::Result<()> { +) -> Result<()> { let hooks: Vec<_> = hooks.into_iter().collect(); // Start endpoint start_endpoint(listen_addr, handle, Arc::new(move || hooks.iter().for_each(|hook| hook()))) .await - .map_err(|e| anyhow::anyhow!("Could not start Prometheus endpoint: {}", e))?; + .context("Could not start Prometheus endpoint")?; Ok(()) } @@ -51,7 +76,7 @@ async fn start_endpoint( listen_addr: SocketAddr, handle: PrometheusHandle, hook: Arc, -) -> anyhow::Result<()> { +) -> Result<()> { let make_svc = make_service_fn(move |_| { let handle = handle.clone(); let hook = Arc::clone(&hook); @@ -64,7 +89,7 @@ async fn start_endpoint( } }); let server = Server::try_bind(&listen_addr) - .map_err(|e| anyhow::anyhow!("Could not bind to address: {}", e))? + .context(format!("Could not bind to address: {listen_addr}"))? .serve(make_svc); tokio::spawn(async move { server.await.expect("Metrics endpoint crashed") }); @@ -72,68 +97,80 @@ async fn start_endpoint( Ok(()) } -/// Serves Prometheus metrics over HTTP with database and process metrics. -pub async fn serve( - listen_addr: SocketAddr, - handle: PrometheusHandle, - process: metrics_process::Collector, -) -> anyhow::Result<()> { - // Clone `process` to move it into the hook and use the original `process` for describe below. - let cloned_process = process.clone(); - let hooks: Vec>> = - vec![Box::new(move || cloned_process.collect()), Box::new(collect_memory_stats)]; - serve_with_hooks(listen_addr, handle, hooks).await?; - - process.describe(); - describe_memory_stats(); - - Ok(()) -} - #[cfg(all(feature = "jemalloc", unix))] fn collect_memory_stats() { use jemalloc_ctl::{epoch, stats}; if epoch::advance() - .map_err(|error| tracing::error!(?error, "Failed to advance jemalloc epoch")) + .map_err(|error| { + tracing::error!( + target: LOG_TARGET, + error = %error, + "Advance jemalloc epoch." + ) + }) .is_err() { return; } - if let Ok(value) = stats::active::read() - .map_err(|error| tracing::error!(?error, "Failed to read jemalloc.stats.active")) - { + if let Ok(value) = stats::active::read().map_err(|error| { + tracing::error!( + target: LOG_TARGET, + error = %error, + "Read jemalloc.stats.active." + ) + }) { gauge!("jemalloc.active", value as f64); } - if let Ok(value) = stats::allocated::read() - .map_err(|error| tracing::error!(?error, "Failed to read jemalloc.stats.allocated")) - { + if let Ok(value) = stats::allocated::read().map_err(|error| { + tracing::error!( + target: LOG_TARGET, + error = %error, + "Read jemalloc.stats.allocated." + ) + }) { gauge!("jemalloc.allocated", value as f64); } - if let Ok(value) = stats::mapped::read() - .map_err(|error| tracing::error!(?error, "Failed to read jemalloc.stats.mapped")) - { + if let Ok(value) = stats::mapped::read().map_err(|error| { + tracing::error!( + target: LOG_TARGET, + error = %error, + "Read jemalloc.stats.mapped." + ) + }) { gauge!("jemalloc.mapped", value as f64); } - if let Ok(value) = stats::metadata::read() - .map_err(|error| tracing::error!(?error, "Failed to read jemalloc.stats.metadata")) - { + if let Ok(value) = stats::metadata::read().map_err(|error| { + tracing::error!( + target: LOG_TARGET, + error = %error, + "Read jemalloc.stats.metadata." + ) + }) { gauge!("jemalloc.metadata", value as f64); } - if let Ok(value) = stats::resident::read() - .map_err(|error| tracing::error!(?error, "Failed to read jemalloc.stats.resident")) - { + if let Ok(value) = stats::resident::read().map_err(|error| { + tracing::error!( + target: LOG_TARGET, + error = %error, + "Read jemalloc.stats.resident." + ) + }) { gauge!("jemalloc.resident", value as f64); } - if let Ok(value) = stats::retained::read() - .map_err(|error| tracing::error!(?error, "Failed to read jemalloc.stats.retained")) - { + if let Ok(value) = stats::retained::read().map_err(|error| { + tracing::error!( + target: LOG_TARGET, + error = %error, + "Read jemalloc.stats.retained." + ) + }) { gauge!("jemalloc.retained", value as f64); } } diff --git a/crates/saya/README.md b/crates/saya/README.md index 112cc89d8a..5c0542d503 100644 --- a/crates/saya/README.md +++ b/crates/saya/README.md @@ -67,6 +67,14 @@ However, papyrus and blockifier which we depend on are still in `-dev` version, * cairo-lang (we should support `2.5` now) * scarb (breaking changes between 2.4 and 2.5 to be addresses, not required to only build saya and SNOS) +## Local Testing + +```bash +cargo run -r -p katana # Start an appchain +cargo run -r -p sozo -- build --manifest-path examples/spawn-and-move/Scarb.toml +cargo run -r -p sozo -- migrate --manifest-path examples/spawn-and-move/Scarb.toml # Make some transactions +cargo run -r --bin saya -- --rpc-url http://localhost:5050 # Run Saya +``` ## Additional documentation [Hackmd note](https://hackmd.io/@glihm/saya) diff --git a/crates/saya/core/Cargo.toml b/crates/saya/core/Cargo.toml index 754df6d8ee..1cdeccce37 100644 --- a/crates/saya/core/Cargo.toml +++ b/crates/saya/core/Cargo.toml @@ -16,24 +16,28 @@ saya-provider.workspace = true anyhow.workspace = true async-trait.workspace = true -convert_case.workspace = true +cairo-proof-parser.workspace = true cairo-vm.workspace = true +convert_case.workspace = true flate2.workspace = true futures.workspace = true -lazy_static = "1.4.0" +lazy_static.workspace = true parking_lot.workspace = true rand = { version = "0.8.5", features = [ "small_rng" ] } serde.workspace = true serde_json.workspace = true serde_with.workspace = true +starknet-types-core = { version = "0.0.9", default-features = false, features = [ "curve", "num-traits", "serde" ] } starknet.workspace = true starknet_api.workspace = true thiserror.workspace = true tokio.workspace = true tracing.workspace = true url.workspace = true -starknet-types-core = { version = "0.0.9", default-features = false, features = ["serde", "curve", "num-traits"] } # TODO: use features for each possible DA. celestia-rpc = "0.1.1" celestia-types = "0.1.1" + +cairo-felt = "0.9.1" +num-bigint = "0.4.4" diff --git a/crates/saya/core/src/lib.rs b/crates/saya/core/src/lib.rs index 8d229b9294..bf7d14bf2a 100644 --- a/crates/saya/core/src/lib.rs +++ b/crates/saya/core/src/lib.rs @@ -2,16 +2,23 @@ use std::sync::Arc; -use katana_primitives::block::{BlockNumber, FinalityStatus, SealedBlockWithStatus}; +use futures::future::join; +use katana_primitives::block::{BlockNumber, FinalityStatus, SealedBlock, SealedBlockWithStatus}; +use katana_primitives::transaction::Tx; +use katana_primitives::FieldElement; +use prover::ProverIdentifier; use saya_provider::rpc::JsonRpcProvider; use saya_provider::Provider as SayaProvider; use serde::{Deserialize, Serialize}; -use tracing::{error, trace}; +use tokio::io::AsyncWriteExt; +use tracing::{error, info, trace}; use url::Url; +use verifier::VerifierIdentifier; use crate::blockchain::Blockchain; use crate::data_availability::{DataAvailabilityClient, DataAvailabilityConfig}; use crate::error::SayaResult; +use crate::prover::{extract_messages, ProgramInput}; pub mod blockchain; pub mod data_availability; @@ -20,6 +27,8 @@ pub mod prover; pub mod starknet_os; pub mod verifier; +pub(crate) const LOG_TARGET: &str = "saya::core"; + /// Saya's main configuration. #[derive(Debug, Deserialize, Serialize)] pub struct SayaConfig { @@ -27,6 +36,8 @@ pub struct SayaConfig { pub katana_rpc: Url, pub start_block: u64, pub data_availability: Option, + pub prover: ProverIdentifier, + pub verifier: VerifierIdentifier, } fn url_deserializer<'de, D>(deserializer: D) -> Result @@ -79,26 +90,34 @@ impl Saya { /// Should be refacto in crates as necessary. pub async fn start(&mut self) -> SayaResult<()> { let poll_interval_secs = 1; - let mut block = self.config.start_block; + let mut block = self.config.start_block.max(1); // Genesis block is not proven. We advance to block 1 + + let (genesis_block, block_before_the_first) = + join(self.provider.fetch_block(0), self.provider.fetch_block(block - 1)).await; + let genesis_state_hash = genesis_block?.header.header.state_root; + let mut previous_block = block_before_the_first?; loop { let latest_block = match self.provider.block_number().await { Ok(block_number) => block_number, Err(e) => { - error!(?e, "fetch block number"); + error!(target: LOG_TARGET, error = ?e, "Fetching block."); tokio::time::sleep(tokio::time::Duration::from_secs(poll_interval_secs)).await; continue; } }; if block > latest_block { - trace!(block_number = block, "waiting block number"); + trace!(target: LOG_TARGET, block_number = block, "Waiting for block."); tokio::time::sleep(tokio::time::Duration::from_secs(poll_interval_secs)).await; continue; } - self.process_block(block).await?; + let fetched_block = self.provider.fetch_block(block).await?; + self.process_block(block, (&fetched_block, previous_block, genesis_state_hash)).await?; + + previous_block = fetched_block; block += 1; } } @@ -121,10 +140,15 @@ impl Saya { /// # Arguments /// /// * `block_number` - The block number. - async fn process_block(&mut self, block_number: BlockNumber) -> SayaResult<()> { - trace!(block_number, "processing block"); + async fn process_block( + &mut self, + block_number: BlockNumber, + blocks: (&SealedBlock, SealedBlock, FieldElement), + ) -> SayaResult<()> { + trace!(target: LOG_TARGET, block_number = %block_number, "Processing block."); + + let (block, prev_block, _genesis_state_hash) = blocks; - let block = self.provider.fetch_block(block_number).await?; let (state_updates, da_state_update) = self.provider.fetch_state_updates(block_number).await?; @@ -132,15 +156,69 @@ impl Saya { da.publish_state_diff_felts(&da_state_update).await?; } - let block = SealedBlockWithStatus { block, status: FinalityStatus::AcceptedOnL2 }; + let block = + SealedBlockWithStatus { block: block.clone(), status: FinalityStatus::AcceptedOnL2 }; + let state_updates_to_prove = state_updates.state_updates.clone(); self.blockchain.update_state_with_block(block.clone(), state_updates)?; if block_number == 0 { return Ok(()); } - let _exec_infos = self.provider.fetch_transactions_executions(block_number).await?; + let exec_infos = self.provider.fetch_transactions_executions(block_number).await?; + + if exec_infos.is_empty() { + trace!(target: "saya_core", block_number, "Skipping empty block."); + return Ok(()); + } + + let transactions = block + .block + .body + .iter() + .filter_map(|t| match &t.transaction { + Tx::L1Handler(tx) => Some(tx), + _ => None, + }) + .collect::>(); + + let (message_to_starknet_segment, message_to_appchain_segment) = + extract_messages(&exec_infos, transactions); + + let new_program_input = ProgramInput { + prev_state_root: prev_block.header.header.state_root, + block_number: FieldElement::from(block_number), + block_hash: block.block.header.hash, + config_hash: FieldElement::from(0u64), + message_to_starknet_segment, + message_to_appchain_segment, + state_updates: state_updates_to_prove, + }; + + println!("Program input: {}", new_program_input.serialize()?); + + // let to_prove = ProvedStateDiff { + // genesis_state_hash, + // prev_state_hash: prev_block.header.header.state_root, + // state_updates: state_updates_to_prove, + // }; + + trace!(target: "saya_core", "Proving block {block_number}."); + let proof = prover::prove(new_program_input.serialize()?, self.config.prover).await?; + info!(target: "saya_core", block_number, "Block proven."); + + // save proof to file + tokio::fs::File::create(format!("proof_{}.json", block_number)) + .await + .unwrap() + .write_all(proof.as_bytes()) + .await + .unwrap(); + + trace!(target: "saya_core", "Verifying block {block_number}."); + let transaction_hash = verifier::verify(proof, self.config.verifier).await?; + info!(target: "saya_core", block_number, transaction_hash, "Block verified."); Ok(()) } @@ -151,3 +229,25 @@ impl From for error::Error { Self::KatanaClient(format!("Katana client RPC provider error: {e}")) } } + +// CI is not allowing to fetch images from inside the docker itself. +// Need to be addressed, so tests by pulling prover and verifier are for now +// disabled here, but can be uncommented to test locally. +// #[cfg(test)] +// mod tests { +// use crate::prover::state_diff::EXAMPLE_STATE_DIFF; +// use crate::prover::{prove, ProverIdentifier}; +// use crate::verifier::{verify, VerifierIdentifier}; + +// #[tokio::test] +// async fn test_herodotus_verify() { +// let proof = prove(EXAMPLE_STATE_DIFF.into(), ProverIdentifier::Stone).await.unwrap(); +// let _tx = verify(proof, VerifierIdentifier::HerodotusStarknetSepolia).await.unwrap(); +// } + +// #[tokio::test] +// async fn test_local_verify() { +// let proof = prove(EXAMPLE_STATE_DIFF.into(), ProverIdentifier::Stone).await.unwrap(); +// let _res = verify(proof, VerifierIdentifier::StoneLocal).await.unwrap(); +// } +// } diff --git a/crates/saya/core/src/prover/mod.rs b/crates/saya/core/src/prover/mod.rs index 6e5ebc13dd..3214a7f4ae 100644 --- a/crates/saya/core/src/prover/mod.rs +++ b/crates/saya/core/src/prover/mod.rs @@ -1,18 +1,60 @@ //! Prover backends. //! //! The prover is in charge of generating a proof from the cairo execution trace. +use std::str::FromStr; + +use anyhow::bail; use async_trait::async_trait; +mod program_input; +mod serializer; +pub mod state_diff; +mod stone_image; +mod vec252; + +pub use program_input::*; +use serde::{Deserialize, Serialize}; +pub use serializer::parse_proof; +pub use stone_image::*; + /// The prover used to generate the proof. -#[derive(Debug)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Serialize, Deserialize)] pub enum ProverIdentifier { - Sharp, + #[default] Stone, + Sharp, Platinum, } +pub async fn prove(input: String, prover: ProverIdentifier) -> anyhow::Result { + match prover { + ProverIdentifier::Sharp => todo!(), + ProverIdentifier::Stone => prove_stone(input).await, + ProverIdentifier::Platinum => todo!(), + } +} + /// The prover client. in charge of producing the proof. #[async_trait] pub trait ProverClient { fn identifier() -> ProverIdentifier; + + /// Generates the proof from the given trace. + /// At the moment prover is coupled with the program it proves. Because of this input should + /// correspond to the program. + async fn prove(&self, input: String) -> anyhow::Result; + async fn local_verify(&self, proof: String) -> anyhow::Result<()>; +} + +impl FromStr for ProverIdentifier { + type Err = anyhow::Error; + + fn from_str(prover: &str) -> anyhow::Result { + Ok(match prover { + "stone" => ProverIdentifier::Stone, + "sharp" => ProverIdentifier::Sharp, + "platinum" => ProverIdentifier::Platinum, + _ => bail!("Unknown prover: `{}`.", prover), + }) + } } diff --git a/crates/saya/core/src/prover/program_input.rs b/crates/saya/core/src/prover/program_input.rs new file mode 100644 index 0000000000..89e3ac92a6 --- /dev/null +++ b/crates/saya/core/src/prover/program_input.rs @@ -0,0 +1,221 @@ +use katana_primitives::contract::ContractAddress; +use katana_primitives::state::StateUpdates; +use katana_primitives::trace::{CallInfo, EntryPointType, TxExecInfo}; +use katana_primitives::transaction::L1HandlerTx; +use katana_primitives::utils::transaction::compute_l1_message_hash; +use starknet::core::types::FieldElement; + +use super::state_diff::state_updates_to_json_like; + +/// Based on https://github.com/cartridge-gg/piltover/blob/2be9d46f00c9c71e2217ab74341f77b09f034c81/src/snos_output.cairo#L19-L20 +/// With the new state root computed by the prover. +pub struct ProgramInput { + pub prev_state_root: FieldElement, + pub block_number: FieldElement, + pub block_hash: FieldElement, + pub config_hash: FieldElement, + pub message_to_starknet_segment: Vec, + pub message_to_appchain_segment: Vec, + pub state_updates: StateUpdates, +} + +fn get_messages_recursively(info: &CallInfo) -> Vec { + let mut messages = vec![]; + + // By default, `from_address` must correspond to the contract address that + // is sending the message. In the case of library calls, `code_address` is `None`, + // we then use the `caller_address` instead (which can also be an account). + let from_address = + if let Some(code_address) = info.code_address { code_address } else { info.caller_address }; + + messages.extend(info.l2_to_l1_messages.iter().map(|m| MessageToStarknet { + from_address, + to_address: ContractAddress::from(m.to_address), + payload: m.payload.clone(), + })); + + info.inner_calls.iter().for_each(|call| { + messages.extend(get_messages_recursively(call)); + }); + + messages +} + +pub fn extract_messages( + exec_infos: &[TxExecInfo], + mut transactions: Vec<&L1HandlerTx>, +) -> (Vec, Vec) { + let message_to_starknet_segment = exec_infos + .iter() + .flat_map(|t| t.execute_call_info.iter().chain(t.validate_call_info.iter()).chain(t.fee_transfer_call_info.iter())) // Take into account both validate and execute calls. + .flat_map(get_messages_recursively) + .collect(); + + let message_to_appchain_segment = exec_infos + .iter() + .flat_map(|t| t.execute_call_info.iter()) + .filter(|c| c.entry_point_type == EntryPointType::L1Handler) + .map(|c| { + let message_hash = + compute_l1_message_hash(*c.caller_address, *c.contract_address, &c.calldata[..]); + + // Matching execution to a transaction to extract nonce. + let matching = transactions + .iter() + .enumerate() + .find(|(_, &t)| { + t.message_hash == message_hash + && c.contract_address == t.contract_address + && c.calldata == t.calldata + }) + .unwrap_or_else(|| { + panic!("No matching transaction found for message hash: {}", message_hash) + }) + .0; + + // Removing, to have different nonces, even for the same message content. + let removed = transactions.remove(matching); + + (c, removed) + }) + .map(|(c, t)| MessageToAppchain { + from_address: c.caller_address, + to_address: c.contract_address, + nonce: t.nonce, + selector: c.entry_point_selector, + payload: c.calldata.clone(), + }) + .collect(); + + (message_to_starknet_segment, message_to_appchain_segment) +} + +impl ProgramInput { + pub fn serialize(&self) -> anyhow::Result { + let message_to_starknet = self + .message_to_starknet_segment + .iter() + .map(MessageToStarknet::serialize) + .collect::>>()? + .into_iter() + .flatten() + .map(|e| format!("{}", e)) + .collect::>() + .join(","); + + let message_to_appchain = self + .message_to_appchain_segment + .iter() + .map(|m| m.serialize()) + .collect::>>()? + .into_iter() + .flatten() + .map(|e| format!("{}", e)) + .collect::>() + .join(","); + + let mut result = String::from('{'); + result.push_str(&format!(r#""prev_state_root":{},"#, self.prev_state_root)); + result.push_str(&format!(r#""block_number":{},"#, self.block_number)); + result.push_str(&format!(r#""block_hash":{},"#, self.block_hash)); + result.push_str(&format!(r#""config_hash":{},"#, self.config_hash)); + + result.push_str(&format!(r#""message_to_starknet_segment":[{}],"#, message_to_starknet)); + result.push_str(&format!(r#""message_to_appchain_segment":[{}],"#, message_to_appchain)); + + result.push_str(&state_updates_to_json_like(&self.state_updates)); + + result.push('}'); + + Ok(result) + } +} + +/// Based on https://github.com/cartridge-gg/piltover/blob/2be9d46f00c9c71e2217ab74341f77b09f034c81/src/messaging/output_process.cairo#L16 +pub struct MessageToStarknet { + pub from_address: ContractAddress, + pub to_address: ContractAddress, + pub payload: Vec, +} + +impl MessageToStarknet { + pub fn serialize(&self) -> anyhow::Result> { + let mut result = vec![*self.from_address, *self.to_address]; + result.push(FieldElement::from(self.payload.len())); + result.extend(self.payload.iter().cloned()); + Ok(result) + } +} + +/// Based on https://github.com/cartridge-gg/piltover/blob/2be9d46f00c9c71e2217ab74341f77b09f034c81/src/messaging/output_process.cairo#L28 +pub struct MessageToAppchain { + pub from_address: ContractAddress, + pub to_address: ContractAddress, + pub nonce: FieldElement, + pub selector: FieldElement, + pub payload: Vec, +} + +impl MessageToAppchain { + pub fn serialize(&self) -> anyhow::Result> { + let mut result = vec![*self.from_address, *self.to_address, self.nonce, self.selector]; + result.push(FieldElement::from(self.payload.len())); + result.extend(self.payload.iter().cloned()); + Ok(result) + } +} + +#[test] +fn test_program_input() -> anyhow::Result<()> { + use std::str::FromStr; + + let input = ProgramInput { + prev_state_root: FieldElement::from_str("101")?, + block_number: FieldElement::from_str("102")?, + block_hash: FieldElement::from_str("103")?, + config_hash: FieldElement::from_str("104")?, + message_to_starknet_segment: vec![MessageToStarknet { + from_address: ContractAddress::from(FieldElement::from_str("105")?), + to_address: ContractAddress::from(FieldElement::from_str("106")?), + payload: vec![FieldElement::from_str("107")?], + }], + message_to_appchain_segment: vec![MessageToAppchain { + from_address: ContractAddress::from(FieldElement::from_str("108")?), + to_address: ContractAddress::from(FieldElement::from_str("109")?), + nonce: FieldElement::from_str("110")?, + selector: FieldElement::from_str("111")?, + payload: vec![FieldElement::from_str("112")?], + }], + state_updates: StateUpdates { + nonce_updates: std::collections::HashMap::new(), + storage_updates: std::collections::HashMap::new(), + contract_updates: std::collections::HashMap::new(), + declared_classes: std::collections::HashMap::new(), + }, + }; + + let serialized = input.serialize().unwrap(); + + println!("Serialized: {}", serialized); + + pub const EXPECTED: &str = r#"{ + "prev_state_root": 101, + "block_number": 102, + "block_hash": 103, + "config_hash": 104, + "message_to_starknet_segment": [105,106,1,107], + "message_to_appchain_segment": [108,109,110,111,1,112], + "nonce_updates": {}, + "storage_updates": {}, + "contract_updates": {}, + "declared_classes": {} + }"#; + + let expected = EXPECTED.chars().filter(|c| !c.is_whitespace()).collect::(); + + println!("{}", expected); + + assert_eq!(serialized, expected); + + Ok(()) +} diff --git a/crates/saya/core/src/prover/serializer.rs b/crates/saya/core/src/prover/serializer.rs new file mode 100644 index 0000000000..b026e6ca6b --- /dev/null +++ b/crates/saya/core/src/prover/serializer.rs @@ -0,0 +1,27 @@ +use anyhow::Ok; +use cairo_proof_parser::parse; +use starknet::core::types::FieldElement; + +use super::vec252::VecFelt252; + +pub fn parse_proof(proof: String) -> anyhow::Result> { + let parsed = parse(proof)?; + + let config: VecFelt252 = serde_json::from_str(&parsed.config.to_string()).unwrap(); + let public_input: VecFelt252 = serde_json::from_str(&parsed.public_input.to_string()).unwrap(); + let unsent_commitment: VecFelt252 = + serde_json::from_str(&parsed.unsent_commitment.to_string()).unwrap(); + let witness: VecFelt252 = serde_json::from_str(&parsed.witness.to_string()).unwrap(); + + let serialized = config + .iter() + .cloned() + .chain(public_input.to_vec()) + .chain(unsent_commitment.iter().cloned()) + .chain(witness.iter().cloned()) + .map(|x| FieldElement::from_dec_str(&x.to_string())) + .map(Result::unwrap) + .collect(); + + Ok(serialized) +} diff --git a/crates/saya/core/src/prover/state_diff.rs b/crates/saya/core/src/prover/state_diff.rs new file mode 100644 index 0000000000..ad0572a322 --- /dev/null +++ b/crates/saya/core/src/prover/state_diff.rs @@ -0,0 +1,164 @@ +use katana_primitives::state::StateUpdates; +use starknet::core::types::FieldElement; + +pub struct ProvedStateDiff { + pub genesis_state_hash: FieldElement, + pub prev_state_hash: FieldElement, + pub state_updates: StateUpdates, +} + +#[cfg(test)] +pub const EXAMPLE_STATE_DIFF: &str = r#"{ + "genesis_state_hash": 12312321313, + "prev_state_hash": 34343434343, + "nonce_updates": { + "1": 12, + "2": 1337 + }, + "storage_updates": { + "1": { + "123456789": 89, + "987654321": 98 + }, + "2": { + "123456789": 899, + "987654321": 98 + } + }, + "contract_updates": { + "3": 437267489 + }, + "declared_classes": { + "1234": 12345, + "12345": 123456, + "123456": 1234567 + } +}"#; + +#[cfg(test)] +pub const EXAMPLE_KATANA_DIFF: &str = r#"{ + "genesis_state_hash": 0, + "prev_state_hash": 0, + "nonce_updates": { + "2753027862869584298471002046734263971941226372316454331586763888183773261315": 1 + }, + "storage_updates": { + "2087021424722619777119509474943472645767659996348769578120564519014510906823": { + "2080372569135727803323277605537468839623406868880224375222092136867736091483": 9999999366500000000000, + "3488041066649332616440110253331181934927363442882040970594983370166361489161": 633500000000000 + } + }, + "contract_updates": {}, + "declared_classes": { + "2927827620326415540917522810963695348790596370636511605071677066526091865974": 3454128523693959991357220485501659129201494257878487792088502805686335557901 + } +}"#; + +pub fn state_updates_to_json_like(state_updates: &StateUpdates) -> String { + let mut result = String::new(); + + result.push_str(&format!(r#""nonce_updates":{}"#, "{")); + let nonce_updates = state_updates + .nonce_updates + .iter() + .map(|(k, v)| format!(r#""{}":{}"#, k.0, v)) + .collect::>() + .join(","); + result.push_str(&format!("{}{}", nonce_updates, "}")); + + result.push_str(&format!(r#","storage_updates":{}"#, "{")); + let storage_updates = state_updates + .storage_updates + .iter() + .map(|(k, v)| { + let storage = + v.iter().map(|(k, v)| format!(r#""{}":{}"#, k, v)).collect::>().join(","); + + format!(r#""{}":{{{}}}"#, k.0, storage) + }) + .collect::>() + .join(","); + result.push_str(&format!("{}{}", storage_updates, "}")); + + result.push_str(&format!(r#","contract_updates":{}"#, "{")); + let contract_updates = state_updates + .contract_updates + .iter() + .map(|(k, v)| format!(r#""{}":{}"#, k.0, v)) + .collect::>() + .join(","); + result.push_str(&format!("{}{}", contract_updates, "}")); + + result.push_str(&format!(r#","declared_classes":{}"#, "{")); + let declared_classes = state_updates + .declared_classes + .iter() + .map(|(k, v)| format!(r#""{}":{}"#, k, v)) + .collect::>() + .join(","); + + result.push_str(&format!("{}{}", declared_classes, "}")); + + result +} + +/// We need custom implementation because of dynamic keys in json +impl ProvedStateDiff { + pub fn serialize(&self) -> String { + let mut result = String::from('{'); + result.push_str(&format!(r#""genesis_state_hash":{}"#, self.genesis_state_hash)); + result.push_str(&format!(r#","prev_state_hash":{}"#, self.prev_state_hash)); + + result.push_str(&format!(r#","nonce_updates":{}"#, "{")); + let nonce_updates = self + .state_updates + .nonce_updates + .iter() + .map(|(k, v)| format!(r#""{}":{}"#, k.0, v)) + .collect::>() + .join(","); + result.push_str(&format!("{}{}", nonce_updates, "}")); + + result.push_str(&format!(r#","storage_updates":{}"#, "{")); + let storage_updates = self + .state_updates + .storage_updates + .iter() + .map(|(k, v)| { + let storage = v + .iter() + .map(|(k, v)| format!(r#""{}":{}"#, k, v)) + .collect::>() + .join(","); + + format!(r#""{}":{{{}}}"#, k.0, storage) + }) + .collect::>() + .join(","); + result.push_str(&format!("{}{}", storage_updates, "}")); + + result.push_str(&format!(r#","contract_updates":{}"#, "{")); + let contract_updates = self + .state_updates + .contract_updates + .iter() + .map(|(k, v)| format!(r#""{}":{}"#, k.0, v)) + .collect::>() + .join(","); + result.push_str(&format!("{}{}", contract_updates, "}")); + + result.push_str(&format!(r#","declared_classes":{}"#, "{")); + let declared_classes = self + .state_updates + .declared_classes + .iter() + .map(|(k, v)| format!(r#""{}":{}"#, k, v)) + .collect::>() + .join(","); + + result.push_str(&format!("{}{}", declared_classes, "}")); + result.push('}'); + + result + } +} diff --git a/crates/saya/core/src/prover/stone_image.rs b/crates/saya/core/src/prover/stone_image.rs new file mode 100644 index 0000000000..c5ad5de5d9 --- /dev/null +++ b/crates/saya/core/src/prover/stone_image.rs @@ -0,0 +1,118 @@ +use std::process::Stdio; + +use anyhow::{bail, Context}; +use async_trait::async_trait; +use tokio::io::{AsyncBufReadExt, AsyncReadExt, AsyncWriteExt, BufReader}; +use tokio::process::Command; +use tokio::sync::OnceCell; +use tracing::warn; + +use super::{ProverClient, ProverIdentifier}; + +#[derive(Clone)] +pub struct StoneProver(pub String); + +pub async fn prove_stone(input: String) -> anyhow::Result { + let prover = StoneProver::new().await?; + prover.prove(input).await +} + +pub async fn local_verify(input: String) -> anyhow::Result { + let prover = StoneProver::new().await?; + prover.local_verify(input).await?; + Ok(String::from("ok")) +} + +#[async_trait] +impl ProverClient for StoneProver { + fn identifier() -> ProverIdentifier { + ProverIdentifier::Stone + } + + async fn prove(&self, input: String) -> anyhow::Result { + let mut command = Command::new("podman"); + command.arg("run").arg("-i").arg("--rm").arg(&self.0); + + run(command, Some(input)).await + } + + async fn local_verify(&self, proof: String) -> anyhow::Result<()> { + let mut command = Command::new("podman"); + command.arg("run").arg("-i").arg("--rm").arg("verifier"); + + run(command, Some(proof)).await?; + + Ok(()) + } +} + +impl StoneProver { + async fn new() -> anyhow::Result { + static STONE_PROVER: OnceCell<(anyhow::Result, anyhow::Result)> = + OnceCell::const_new(); + + let source = "piniom/state-diff-commitment"; + let verifier = "piniom/verifier:latest"; + + let result = STONE_PROVER + .get_or_init(|| async { + let mut command = Command::new("podman"); + command.arg("pull").arg(format!("docker.io/{}", source)); + + let mut verifier_command = Command::new("podman"); + verifier_command.arg("pull").arg(format!("docker.io/{}", verifier)); + + ( + run(command, None).await.context("Failed to pull prover"), + run(verifier_command, None).await.context("Failed to pull prover"), + ) + }) + .await; + + if result.0.is_err() { + bail!("Failed to pull prover"); + } else if result.1.is_err() { + warn!("Failed to pull verifier"); + } + + Ok(StoneProver(source.to_string())) + } +} + +async fn run(mut command: Command, input: Option) -> anyhow::Result { + command.stdout(Stdio::piped()).stderr(Stdio::piped()).stdin(Stdio::piped()); + + let mut child = command.spawn()?; + + if let Some(input) = input { + let mut stdin = child.stdin.take().context("failed to open stdin")?; + + tokio::spawn(async move { + stdin.write_all(input.as_bytes()).await.unwrap(); + }); + } + + let stdout = child.stdout.take().context("failed to open stdout")?; + let reader = BufReader::new(stdout); + let mut lines = reader.lines(); + let mut out = String::new(); + while let Some(line) = lines.next_line().await? { + out.push_str(&line); + } + + let status = child.wait().await?; + + if !status.success() { + if let Some(mut output) = child.stderr.take() { + let mut err = Vec::new(); + output.read_to_end(&mut err).await?; + + // Handle error output + let err = String::from_utf8(err).context("failed to parse stderr")?; + bail!("Podman error: {}", err) + }; + bail!("Error without stderr") + } + + Ok(out) +} diff --git a/crates/saya/core/src/prover/vec252.rs b/crates/saya/core/src/prover/vec252.rs new file mode 100644 index 0000000000..2d6c0f979b --- /dev/null +++ b/crates/saya/core/src/prover/vec252.rs @@ -0,0 +1,120 @@ +use std::ops::Deref; +use std::str::FromStr; + +use cairo_felt::Felt252; +use serde::de::Visitor; +use serde::Deserialize; +use serde_json::Value; +use thiserror::Error; + +#[derive(Error, Debug)] +pub enum VecFelt252Error { + #[error("failed to parse number: {0}")] + NumberParseError(#[from] std::num::ParseIntError), + #[error("failed to parse bigint: {0}")] + BigIntParseError(#[from] num_bigint::ParseBigIntError), + #[error("number out of range")] + NumberOutOfRange, +} + +/// `VecFelt252` is a wrapper around a vector of `Arg`. +/// +/// It provides convenience methods for working with a vector of `Arg` and implements +/// `Deref` to allow it to be treated like a vector of `Arg`. +#[derive(Debug, Clone)] +pub struct VecFelt252(Vec); + +impl VecFelt252 { + /// Creates a new `VecFelt252` from a vector of `Arg`. + /// + /// # Arguments + /// + /// * `args` - A vector of `Arg`. + /// + /// # Returns + /// + /// * `VecFelt252` - A new `VecFelt252` instance. + #[must_use] + pub fn new(args: Vec) -> Self { + Self(args) + } +} + +impl Deref for VecFelt252 { + type Target = Vec; + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl From for Vec { + fn from(args: VecFelt252) -> Self { + args.0 + } +} + +impl From> for VecFelt252 { + fn from(args: Vec) -> Self { + Self(args) + } +} + +impl VecFelt252 { + fn visit_seq_helper(seq: &[Value]) -> Result { + let iterator = seq.iter(); + let mut args = Vec::new(); + + for arg in iterator { + match arg { + Value::Number(n) => { + let n = n.as_u64().ok_or(VecFelt252Error::NumberOutOfRange)?; + args.push(Felt252::from(n)); + } + Value::String(n) => { + let n = num_bigint::BigUint::from_str(n)?; + args.push(Felt252::from_bytes_be(&n.to_bytes_be())); + } + Value::Array(a) => { + args.push(Felt252::from(a.len())); + let result = Self::visit_seq_helper(a)?; + args.extend(result.0); + } + _ => (), + } + } + + Ok(Self::new(args)) + } +} + +impl<'de> Visitor<'de> for VecFelt252 { + type Value = VecFelt252; + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("a list of arguments") + } + fn visit_seq(self, mut seq: A) -> Result + where + A: serde::de::SeqAccess<'de>, + { + let mut args = Vec::new(); + while let Some(arg) = seq.next_element()? { + match arg { + Value::Number(n) => args.push(Value::Number(n)), + Value::String(n) => args.push(Value::String(n)), + Value::Array(a) => args.push(Value::Array(a)), + _ => return Err(serde::de::Error::custom("Invalid type")), + } + } + + Self::visit_seq_helper(&args).map_err(|e| serde::de::Error::custom(e.to_string())) + } +} + +impl<'de> Deserialize<'de> for VecFelt252 { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + deserializer.deserialize_seq(VecFelt252(Vec::new())) + } +} diff --git a/crates/saya/core/src/verifier/mod.rs b/crates/saya/core/src/verifier/mod.rs index f6e770f11d..c6fd5eef31 100644 --- a/crates/saya/core/src/verifier/mod.rs +++ b/crates/saya/core/src/verifier/mod.rs @@ -7,9 +7,45 @@ //! an interface to query the on-chain verifier, but also //! submitting facts and proofs. +use std::str::FromStr; + +use anyhow::bail; +use serde::{Deserialize, Serialize}; + +use crate::prover::parse_proof; +mod starknet; + /// Supported verifiers. -#[derive(Debug)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Serialize, Deserialize)] pub enum VerifierIdentifier { + #[default] + HerodotusStarknetSepolia, + StoneLocal, StarkwareEthereum, - HerodotusStarknet, +} + +pub async fn verify(proof: String, verifier: VerifierIdentifier) -> anyhow::Result { + match verifier { + VerifierIdentifier::HerodotusStarknetSepolia => { + let serialized_proof = parse_proof(proof).unwrap(); + starknet::starknet_verify(serialized_proof).await + } + VerifierIdentifier::StoneLocal => crate::prover::local_verify(proof).await, + VerifierIdentifier::StarkwareEthereum => { + unimplemented!("Herodotus Starknet not yet supported") + } + } +} + +impl FromStr for VerifierIdentifier { + type Err = anyhow::Error; + + fn from_str(verifier: &str) -> anyhow::Result { + Ok(match verifier { + "herodotus" => VerifierIdentifier::HerodotusStarknetSepolia, + "local" => VerifierIdentifier::StoneLocal, + "starkware" => VerifierIdentifier::StarkwareEthereum, + _ => bail!("Unknown verifier: `{}`.", verifier), + }) + } } diff --git a/crates/saya/core/src/verifier/starknet.rs b/crates/saya/core/src/verifier/starknet.rs new file mode 100644 index 0000000000..457c280283 --- /dev/null +++ b/crates/saya/core/src/verifier/starknet.rs @@ -0,0 +1,49 @@ +use starknet::accounts::{Account, Call, ExecutionEncoding, SingleOwnerAccount}; +use starknet::core::types::{BlockId, BlockTag, FieldElement}; +use starknet::core::utils::get_selector_from_name; +use starknet::providers::jsonrpc::HttpTransport; +use starknet::providers::JsonRpcClient; +use starknet::signers::{LocalWallet, SigningKey}; +use url::Url; + +// will need to be read from the environment for chains other than sepoia +const STARKNET_URL: &str = "https://free-rpc.nethermind.io/sepolia-juno/v0_6"; +const CHAIN_ID: &str = "0x00000000000000000000000000000000000000000000534e5f5345504f4c4941"; +const SIGNER_ADDRESS: &str = "0x76372bcb1d993b9ab059e542a93004962fb70d743b0f10e611df9ffe13c6d64"; +const SIGNER_KEY: &str = "0x710d3218ae70bf7ec580c620ec81e601a6258ceec2494c4261f916f42667000"; + +lazy_static::lazy_static!( + static ref STARKNET_ACCOUNT: SingleOwnerAccount, LocalWallet> = { + let provider = JsonRpcClient::new(HttpTransport::new( + Url::parse(STARKNET_URL).unwrap(), + )); + + let signer = FieldElement::from_hex_be(SIGNER_KEY).expect("invalid signer hex"); + let signer = LocalWallet::from(SigningKey::from_secret_scalar(signer)); + + let address = FieldElement::from_hex_be(SIGNER_ADDRESS).expect("invalid signer address"); + let chain_id = FieldElement::from_hex_be(CHAIN_ID).expect("invalid chain id"); + + let mut account = SingleOwnerAccount::new(provider, signer, address, chain_id, ExecutionEncoding::Legacy); + account.set_block_id(BlockId::Tag(BlockTag::Pending)); + account + }; + +); + +pub async fn starknet_verify(serialized_proof: Vec) -> anyhow::Result { + let tx = STARKNET_ACCOUNT + .execute(vec![Call { + to: FieldElement::from_hex_be( + "0x1b9c4e973ca9af0456eb6ae4c4576c5134905d8a560e0dfa1b977359e2c40ec", + ) + .expect("invalid verifier address"), + selector: get_selector_from_name("verify_and_register_fact").expect("invalid selector"), + calldata: serialized_proof, + }]) + .max_fee(starknet::macros::felt!("1000000000000000")) // sometimes failing without this line + .send() + .await?; + + Ok(format!("{:#x}", tx.transaction_hash)) +} diff --git a/crates/saya/provider/Cargo.toml b/crates/saya/provider/Cargo.toml index 13e9108e4a..884164ed1b 100644 --- a/crates/saya/provider/Cargo.toml +++ b/crates/saya/provider/Cargo.toml @@ -18,7 +18,7 @@ anyhow.workspace = true auto_impl = "1.1.0" async-trait.workspace = true convert_case.workspace = true -ethers = "2.0.11" +alloy-primitives.workspace = true flate2.workspace = true futures.workspace = true jsonrpsee = { workspace = true, features = ["client"] } diff --git a/crates/saya/provider/src/rpc/mod.rs b/crates/saya/provider/src/rpc/mod.rs index ffb0b93e64..43c4e5ef1b 100644 --- a/crates/saya/provider/src/rpc/mod.rs +++ b/crates/saya/provider/src/rpc/mod.rs @@ -135,13 +135,13 @@ impl Provider for JsonRpcProvider { .await? { ContractClass::Legacy(legacy) => { - trace!(target: LOG_TARGET, version = "cairo 0", %class_hash, "set contract class"); + trace!(target: LOG_TARGET, version = "cairo 0", %class_hash, "Set contract class."); let (hash, class) = rpc_converter::legacy_rpc_to_compiled_class(&legacy)?; state_updates_with_classes.declared_compiled_classes.insert(hash, class); } ContractClass::Sierra(s) => { - trace!(target: LOG_TARGET, version = "cairo 1", %class_hash, "set contract class"); + trace!(target: LOG_TARGET, version = "cairo 1", %class_hash, "Set contract class."); state_updates_with_classes .declared_sierra_classes @@ -157,7 +157,7 @@ impl Provider for JsonRpcProvider { &self, block_number: u64, ) -> ProviderResult> { - trace!(target: LOG_TARGET, block_number, "fetch transactions executions"); + trace!(target: LOG_TARGET, block_number = %block_number, "Fetching transactions executions."); let cursor = TransactionsPageCursor { block_number, chunk_size: 50, ..Default::default() }; let client = HttpClientBuilder::default().build(&self.rpc_url).unwrap(); diff --git a/crates/saya/provider/src/rpc/state.rs b/crates/saya/provider/src/rpc/state.rs index 890a88dd52..072b30c939 100644 --- a/crates/saya/provider/src/rpc/state.rs +++ b/crates/saya/provider/src/rpc/state.rs @@ -3,7 +3,7 @@ //! For data availability format, all the specification is available here: //! . //! -//! We use `U256` from ethers for easier computation (than working with felts). +//! We use `U256` from alloy for easier computation (than working with felts). //! //! Optims: //! Currently, the serialize functions are using `iter().find()` on arrays @@ -12,7 +12,7 @@ //! arrays to then have O(1) search. use std::collections::{HashMap, HashSet}; -use ethers::types::U256; +use alloy_primitives::U256; use katana_primitives::contract::ContractAddress; use katana_primitives::state::StateUpdates; use starknet::core::types::{ @@ -23,7 +23,7 @@ use starknet::core::types::{ use crate::ProviderResult; // 2 ^ 128 -const CLASS_INFO_FLAG_TRUE: &str = "0x100000000000000000000000000000000"; +const CLASS_INFO_FLAG_TRUE: &str = "100000000000000000000000000000000"; /// Converts the [`StateUpdate`] RPC type into [`StateUpdate`] Katana primitive. /// @@ -230,10 +230,10 @@ fn compute_update_meta_info( if let Some(nonce) = new_nonce { // At the moment, v0.11 and forward are packing the nonce into 64 bits. let nonce_u64: u64 = nonce.try_into().expect("Nonce too large for DA serialization"); - meta += ((nonce_u64 as u128) << 64).into() + meta += U256::from((nonce_u64 as u128) << 64) } - meta += (n_storage_updates as u128).into(); + meta += U256::from(n_storage_updates); FieldElement::from_hex_be(format!("0x{:064x}", meta).as_str()).unwrap() } diff --git a/crates/saya/provider/src/rpc/state_diff.rs b/crates/saya/provider/src/rpc/state_diff.rs index 959eb414a1..aee6e87b70 100644 --- a/crates/saya/provider/src/rpc/state_diff.rs +++ b/crates/saya/provider/src/rpc/state_diff.rs @@ -1,7 +1,5 @@ - use std::collections::HashSet; -use ethers::types::U256; use starknet::core::types::{ ContractStorageDiffItem, DeclaredClassItem, DeployedContractItem, FieldElement, NonceUpdate, StateDiff, @@ -9,4 +7,3 @@ use starknet::core::types::{ // 2 ^ 128 const CLASS_INFO_FLAG_TRUE: &str = "0x100000000000000000000000000000000"; - diff --git a/crates/sozo/ops/Cargo.toml b/crates/sozo/ops/Cargo.toml index 2472bb87fe..231d08bf14 100644 --- a/crates/sozo/ops/Cargo.toml +++ b/crates/sozo/ops/Cargo.toml @@ -8,6 +8,7 @@ version.workspace = true [dependencies] anyhow.workspace = true async-trait.workspace = true +cainome = { git = "https://github.com/cartridge-gg/cainome", tag = "v0.2.2" } cairo-lang-compiler.workspace = true cairo-lang-defs.workspace = true cairo-lang-filesystem.workspace = true @@ -23,6 +24,8 @@ camino.workspace = true clap-verbosity-flag = "2.0.1" clap.workspace = true clap_complete.workspace = true +colored = "2.0.0" +colored_json = "3.2.0" console.workspace = true dojo-bindgen.workspace = true dojo-lang.workspace = true @@ -31,11 +34,13 @@ dojo-world = { workspace = true, features = [ "contracts", "metadata", "migratio futures.workspace = true notify = "6.0.1" notify-debouncer-mini = "0.3.0" +rpassword.workspace = true scarb-ui.workspace = true scarb.workspace = true semver.workspace = true serde.workspace = true serde_json.workspace = true +serde_with.workspace = true smol_str.workspace = true starknet-crypto.workspace = true starknet.workspace = true @@ -44,10 +49,10 @@ tokio.workspace = true tracing-log = "0.1.3" tracing.workspace = true url.workspace = true -cainome = { git = "https://github.com/cartridge-gg/cainome", tag = "v0.2.2" } [dev-dependencies] assert_fs = "1.0.10" dojo-test-utils = { workspace = true, features = [ "build-examples" ] } +ipfs-api-backend-hyper = { git = "https://github.com/ferristseng/rust-ipfs-api", rev = "af2c17f7b19ef5b9898f458d97a90055c3605633", features = [ "with-hyper-rustls" ] } katana-runner.workspace = true snapbox = "0.4.6" diff --git a/crates/sozo/ops/src/account.rs b/crates/sozo/ops/src/account.rs new file mode 100644 index 0000000000..ce1ebcce0d --- /dev/null +++ b/crates/sozo/ops/src/account.rs @@ -0,0 +1,470 @@ +use core::panic; +use std::io::Write; +use std::path::PathBuf; + +use anyhow::Result; +use colored::Colorize; +use colored_json::{ColorMode, Output}; +use dojo_world::utils::TransactionWaiter; +use serde::{Deserialize, Serialize}; +use serde_with::serde_as; +use starknet::accounts::{AccountFactory, AccountFactoryError, OpenZeppelinAccountFactory}; +use starknet::core::serde::unsigned_field_element::UfeHex; +use starknet::core::types::{ + BlockId, BlockTag, FunctionCall, StarknetError, TransactionFinalityStatus, +}; +use starknet::core::utils::get_contract_address; +use starknet::macros::{felt, selector}; +use starknet::providers::jsonrpc::HttpTransport; +use starknet::providers::{JsonRpcClient, Provider, ProviderError}; +use starknet::signers::{LocalWallet, Signer, SigningKey}; +use starknet_crypto::FieldElement; + +/// The canonical hash of a contract class. This is the class hash value of a contract instance. +pub type ClassHash = FieldElement; + +/// The class hash of DEFAULT_OZ_ACCOUNT_CONTRACT. +/// Corresponds to 0x05400e90f7e0ae78bd02c77cd75527280470e2fe19c54970dd79dc37a9d3645c +pub const DEFAULT_OZ_ACCOUNT_CONTRACT_CLASS_HASH: ClassHash = FieldElement::from_mont([ + 8460675502047588988, + 17729791148444280953, + 7171298771336181387, + 292243705759714441, +]); + +#[derive(Serialize, Deserialize)] +pub struct AccountConfig { + pub version: u64, + pub variant: AccountVariant, + pub deployment: DeploymentStatus, +} + +impl AccountConfig { + pub fn deploy_account_address(&self) -> Result { + let undeployed_status = match &self.deployment { + DeploymentStatus::Undeployed(value) => value, + DeploymentStatus::Deployed(_) => { + anyhow::bail!("account already deployed"); + } + }; + + match &self.variant { + AccountVariant::OpenZeppelin(oz) => Ok(get_contract_address( + undeployed_status.salt, + undeployed_status.class_hash, + &[oz.public_key], + FieldElement::ZERO, + )), + } + } +} + +#[derive(Serialize, Deserialize)] +#[serde(tag = "type", rename_all = "snake_case")] +pub enum AccountVariant { + OpenZeppelin(OzAccountConfig), +} + +#[serde_as] +#[derive(Serialize, Deserialize)] +pub struct OzAccountConfig { + pub version: u64, + #[serde_as(as = "UfeHex")] + pub public_key: FieldElement, + #[serde(default = "true_as_default")] + pub legacy: bool, +} + +#[derive(Serialize, Deserialize)] +#[serde(tag = "status", rename_all = "snake_case")] +pub enum DeploymentStatus { + Undeployed(UndeployedStatus), + Deployed(DeployedStatus), +} + +impl DeploymentStatus { + pub fn to_deployed(&mut self, address: FieldElement) { + match self { + DeploymentStatus::Undeployed(status) => { + *self = DeploymentStatus::Deployed(DeployedStatus { + class_hash: status.class_hash, + address, + }); + } + DeploymentStatus::Deployed(_) => { + panic!("Already deployed!") + } + } + } +} + +#[serde_as] +#[derive(Serialize, Deserialize)] +pub struct UndeployedStatus { + #[serde_as(as = "UfeHex")] + pub class_hash: FieldElement, + #[serde_as(as = "UfeHex")] + pub salt: FieldElement, +} + +#[serde_as] +#[derive(Serialize, Deserialize)] +pub struct DeployedStatus { + #[serde_as(as = "UfeHex")] + pub class_hash: FieldElement, + #[serde_as(as = "UfeHex")] + pub address: FieldElement, +} + +enum MaxFeeType { + Manual { max_fee: FieldElement }, + Estimated { estimate: FieldElement, estimate_with_buffer: FieldElement }, +} + +impl MaxFeeType { + pub fn max_fee(&self) -> FieldElement { + match self { + Self::Manual { max_fee } => *max_fee, + Self::Estimated { estimate_with_buffer, .. } => *estimate_with_buffer, + } + } +} + +#[derive(Debug)] +pub enum FeeSetting { + Manual(FieldElement), + EstimateOnly, + None, +} + +impl FeeSetting { + pub fn is_estimate_only(&self) -> bool { + matches!(self, FeeSetting::EstimateOnly) + } +} + +pub async fn new(signer: LocalWallet, force: bool, file: PathBuf) -> Result<()> { + if file.exists() && !force { + anyhow::bail!("account config file already exists"); + } + + let salt = SigningKey::from_random().secret_scalar(); + + let account_config = AccountConfig { + version: 1, + variant: AccountVariant::OpenZeppelin(OzAccountConfig { + version: 1, + public_key: signer.get_public_key().await?.scalar(), + legacy: false, + }), + deployment: DeploymentStatus::Undeployed(UndeployedStatus { + class_hash: DEFAULT_OZ_ACCOUNT_CONTRACT_CLASS_HASH, + salt, + }), + }; + + let deployed_address = account_config.deploy_account_address()?; + + let file_path = file; + let mut file = std::fs::File::create(&file_path)?; + serde_json::to_writer_pretty(&mut file, &account_config)?; + file.write_all(b"\n")?; + + eprintln!("Created new account config file: {}", std::fs::canonicalize(&file_path)?.display()); + eprintln!(); + eprintln!( + "Once deployed, this account will be available at:\n {}", + format!("{:#064x}", deployed_address).bright_yellow() + ); + eprintln!(); + eprintln!( + "Deploy this account by running:\n {}", + format!("sozo account deploy {}", file_path.display()).bright_yellow() + ); + + Ok(()) +} + +#[allow(clippy::too_many_arguments)] +pub async fn deploy( + provider: JsonRpcClient, + signer: LocalWallet, + fee_setting: FeeSetting, + simulate: bool, + nonce: Option, + poll_interval: u64, + file: PathBuf, + no_confirmation: bool, +) -> Result<()> { + if simulate && fee_setting.is_estimate_only() { + anyhow::bail!("--simulate cannot be used with --estimate-only"); + } + + if !file.exists() { + anyhow::bail!("account config file not found"); + } + + let mut account: AccountConfig = serde_json::from_reader(&mut std::fs::File::open(&file)?)?; + + let undeployed_status = match &account.deployment { + DeploymentStatus::Undeployed(inner) => inner, + DeploymentStatus::Deployed(_) => { + anyhow::bail!("account already deployed"); + } + }; + + let chain_id = provider.chain_id().await?; + + let factory = match &account.variant { + AccountVariant::OpenZeppelin(oz_config) => { + // Makes sure we're using the right key + let signer_public_key = signer.get_public_key().await?.scalar(); + if signer_public_key != oz_config.public_key { + anyhow::bail!( + "public key mismatch. Expected: {:#064x}; actual: {:#064x}.", + oz_config.public_key, + signer_public_key + ); + } + + let mut factory = OpenZeppelinAccountFactory::new( + undeployed_status.class_hash, + chain_id, + signer, + &provider, + ) + .await?; + factory.set_block_id(BlockId::Tag(BlockTag::Pending)); + + factory + } + }; + + let account_deployment = factory.deploy(undeployed_status.salt); + + let target_deployment_address = account.deploy_account_address()?; + + // Sanity check. We don't really need to check again here actually + if account_deployment.address() != target_deployment_address { + panic!("Unexpected account deployment address mismatch"); + } + + let max_fee = match fee_setting { + FeeSetting::Manual(fee) => MaxFeeType::Manual { max_fee: fee }, + FeeSetting::EstimateOnly | FeeSetting::None => { + let estimated_fee = account_deployment + .estimate_fee() + .await + .map_err(|err| match err { + AccountFactoryError::Provider(ProviderError::StarknetError(err)) => { + map_starknet_error(err) + } + err => anyhow::anyhow!("{}", err), + })? + .overall_fee; + + let estimated_fee_with_buffer = (estimated_fee * felt!("3")).floor_div(felt!("2")); + + if fee_setting.is_estimate_only() { + println!("{} ETH", format!("{}", estimated_fee.to_big_decimal(18)).bright_yellow(),); + return Ok(()); + } + + MaxFeeType::Estimated { + estimate: estimated_fee, + estimate_with_buffer: estimated_fee_with_buffer, + } + } + }; + + let account_deployment = match nonce { + Some(nonce) => account_deployment.nonce(nonce), + None => account_deployment, + }; + let account_deployment = account_deployment.max_fee(max_fee.max_fee()); + + if simulate { + simulate_account_deploy(&account_deployment).await?; + Ok(()) + } else { + do_account_deploy( + max_fee, + target_deployment_address, + no_confirmation, + account_deployment, + &provider, + poll_interval, + &mut account, + ) + .await?; + + write_account_to_file(file, account)?; + + Ok(()) + } +} + +async fn do_account_deploy( + max_fee: MaxFeeType, + target_deployment_address: FieldElement, + no_confirmation: bool, + account_deployment: starknet::accounts::AccountDeployment< + '_, + OpenZeppelinAccountFactory>, + >, + provider: &JsonRpcClient, + poll_interval: u64, + account: &mut AccountConfig, +) -> Result<(), anyhow::Error> { + match max_fee { + MaxFeeType::Manual { max_fee } => { + eprintln!( + "You've manually specified the account deployment fee to be {}. Therefore, fund \ + at least:\n {}", + format!("{} ETH", max_fee.to_big_decimal(18)).bright_yellow(), + format!("{} ETH", max_fee.to_big_decimal(18)).bright_yellow(), + ); + } + MaxFeeType::Estimated { estimate, estimate_with_buffer } => { + eprintln!( + "The estimated account deployment fee is {}. However, to avoid failure, fund at \ + least:\n {}", + format!("{} ETH", estimate.to_big_decimal(18)).bright_yellow(), + format!("{} ETH", estimate_with_buffer.to_big_decimal(18)).bright_yellow() + ); + } + } + eprintln!( + "to the following address:\n {}", + format!("{:#064x}", target_deployment_address).bright_yellow() + ); + if !no_confirmation { + eprint!("Press [ENTER] once you've funded the address."); + std::io::stdin().read_line(&mut String::new())?; + } + let account_deployment_tx = account_deployment.send().await?.transaction_hash; + eprintln!( + "Account deployment transaction: {}", + format!("{:#064x}", account_deployment_tx).bright_yellow() + ); + eprintln!( + "Waiting for transaction {} to confirm. If this process is interrupted, you will need to \ + run `{}` to update the account file.", + format!("{:#064x}", account_deployment_tx).bright_yellow(), + "sozo account fetch".bright_yellow(), + ); + TransactionWaiter::new(account_deployment_tx, &provider) + .with_tx_status(TransactionFinalityStatus::AcceptedOnL2) + .with_interval(poll_interval) + .await?; + eprintln!( + "Transaction {} confirmed", + format!("{:#064x}", account_deployment_tx).bright_yellow() + ); + + account.deployment.to_deployed(target_deployment_address); + + Ok(()) +} + +fn write_account_to_file(file: PathBuf, account: AccountConfig) -> Result<(), anyhow::Error> { + let mut temp_file_name = file + .file_name() + .ok_or_else(|| anyhow::anyhow!("unable to determine file name"))? + .to_owned(); + + // Never write directly to the original file to avoid data loss + temp_file_name.push(".tmp"); + + let mut temp_path = file.clone(); + temp_path.set_file_name(temp_file_name); + + let mut temp_file = std::fs::File::create(&temp_path)?; + serde_json::to_writer_pretty(&mut temp_file, &account)?; + // temp_file.write_all(b"\n")?; + + std::fs::rename(temp_path, file)?; + Ok(()) +} + +async fn simulate_account_deploy( + account_deployment: &starknet::accounts::AccountDeployment< + '_, + OpenZeppelinAccountFactory>, + >, +) -> Result<(), anyhow::Error> { + let simulation = account_deployment.simulate(false, false).await?; + let simulation_json = serde_json::to_value(simulation)?; + let simulation_json = + colored_json::to_colored_json(&simulation_json, ColorMode::Auto(Output::StdOut))?; + + println!("{simulation_json}"); + Ok(()) +} + +pub async fn fetch( + provider: JsonRpcClient, + force: bool, + output: PathBuf, + address: FieldElement, +) -> Result<()> { + if output.exists() && !force { + anyhow::bail!("account config file already exists"); + } + + let class_hash = provider.get_class_hash_at(BlockId::Tag(BlockTag::Pending), address).await?; + + let public_key = provider + .call( + FunctionCall { + contract_address: address, + entry_point_selector: selector!("get_public_key"), + calldata: vec![], + }, + BlockId::Tag(BlockTag::Pending), + ) + .await?[0]; + + let variant = + AccountVariant::OpenZeppelin(OzAccountConfig { version: 1, public_key, legacy: false }); + + let account = AccountConfig { + version: 1, + variant, + deployment: DeploymentStatus::Deployed(DeployedStatus { class_hash, address }), + }; + + let mut file = std::fs::File::create(&output)?; + serde_json::to_writer_pretty(&mut file, &account)?; + file.write_all(b"\n")?; + + eprintln!("Downloaded new account config file: {}", std::fs::canonicalize(&output)?.display()); + + Ok(()) +} + +fn true_as_default() -> bool { + true +} + +fn map_starknet_error(err: StarknetError) -> anyhow::Error { + match err { + StarknetError::ContractError(err) => { + anyhow::anyhow!("ContractError: {}", err.revert_error.trim()) + } + StarknetError::TransactionExecutionError(err) => { + anyhow::anyhow!( + "TransactionExecutionError (tx index {}): {}", + err.transaction_index, + err.execution_error.trim() + ) + } + StarknetError::ValidationFailure(err) => { + anyhow::anyhow!("ValidationFailure: {}", err.trim()) + } + StarknetError::UnexpectedError(err) => { + anyhow::anyhow!("UnexpectedError: {}", err.trim()) + } + err => anyhow::anyhow!("{}", err), + } +} diff --git a/crates/sozo/ops/src/auth.rs b/crates/sozo/ops/src/auth.rs index 759a799893..0b26cc2144 100644 --- a/crates/sozo/ops/src/auth.rs +++ b/crates/sozo/ops/src/auth.rs @@ -4,14 +4,14 @@ use anyhow::{Context, Result}; use dojo_world::contracts::model::ModelError; use dojo_world::contracts::world::WorldContract; use dojo_world::contracts::{cairo_utils, WorldContractReader}; -use dojo_world::migration::TxConfig; +use dojo_world::migration::TxnConfig; +use dojo_world::utils::TransactionExt; use starknet::accounts::ConnectedAccount; +use starknet::core::types::{BlockId, BlockTag}; use starknet::core::utils::parse_cairo_short_string; -use starknet::providers::Provider; use starknet_crypto::FieldElement; -use super::get_contract_address; -use crate::utils::handle_transaction_result; +use crate::utils; #[derive(Debug, Clone, PartialEq)] pub enum ResourceType { @@ -87,23 +87,24 @@ impl FromStr for OwnerResource { } } -pub async fn grant_writer( +pub async fn grant_writer( world: &WorldContract, models_contracts: Vec, - world_reader: WorldContractReader

, - transaction: TxConfig, + txn_config: TxnConfig, ) -> Result<()> where A: ConnectedAccount + Sync + Send + 'static, - P: Provider + Sync + Send, { let mut calls = Vec::new(); + let world_reader = WorldContractReader::new(world.address, world.account.provider()) + .with_block(BlockId::Tag(BlockTag::Pending)); + for mc in models_contracts { let model_name = parse_cairo_short_string(&mc.model)?; match world_reader.model_reader(&model_name).await { Ok(_) => { - let contract = get_contract_address(world, mc.contract).await?; + let contract = utils::get_contract_address(world, mc.contract).await?; calls.push(world.grant_writer_getcall(&mc.model, &contract.into())); } @@ -121,15 +122,15 @@ where let res = world .account .execute(calls) - .send() + .send_with_cfg(&txn_config) .await .with_context(|| "Failed to send transaction")?; - handle_transaction_result( + utils::handle_transaction_result( &world.account.provider(), res, - transaction.wait, - transaction.receipt, + txn_config.wait, + txn_config.receipt, ) .await?; } @@ -138,9 +139,9 @@ where } pub async fn grant_owner( - world: WorldContract, + world: &WorldContract, owners_resources: Vec, - transaction: TxConfig, + txn_config: TxnConfig, ) -> Result<()> where A: ConnectedAccount + Sync + Send + 'static, @@ -151,21 +152,115 @@ where let resource = match &or.resource { ResourceType::Model(name) => *name, ResourceType::Contract(name_or_address) => { - get_contract_address(&world, name_or_address.clone()).await? + utils::get_contract_address(world, name_or_address.clone()).await? } }; calls.push(world.grant_owner_getcall(&or.owner.into(), &resource)); } - let res = - world.account.execute(calls).send().await.with_context(|| "Failed to send transaction")?; + let res = world + .account + .execute(calls) + .send_with_cfg(&txn_config) + .await + .with_context(|| "Failed to send transaction")?; + + utils::handle_transaction_result( + &world.account.provider(), + res, + txn_config.wait, + txn_config.receipt, + ) + .await?; + + Ok(()) +} + +pub async fn revoke_writer( + world: &WorldContract, + models_contracts: Vec, + txn_config: TxnConfig, +) -> Result<()> +where + A: ConnectedAccount + Sync + Send + 'static, +{ + let mut calls = Vec::new(); + + let world_reader = WorldContractReader::new(world.address, world.account.provider()) + .with_block(BlockId::Tag(BlockTag::Pending)); + + for mc in models_contracts { + let model_name = parse_cairo_short_string(&mc.model)?; + match world_reader.model_reader(&model_name).await { + Ok(_) => { + let contract = utils::get_contract_address(world, mc.contract).await?; + calls.push(world.revoke_writer_getcall(&mc.model, &contract.into())); + } + + Err(ModelError::ModelNotFound) => { + println!("Unknown model '{}' => IGNORED", model_name); + } + + Err(err) => { + return Err(err.into()); + } + } + } + + if !calls.is_empty() { + let res = world + .account + .execute(calls) + .send_with_cfg(&txn_config) + .await + .with_context(|| "Failed to send transaction")?; + + utils::handle_transaction_result( + &world.account.provider(), + res, + txn_config.wait, + txn_config.receipt, + ) + .await?; + } + + Ok(()) +} + +pub async fn revoke_owner( + world: &WorldContract, + owners_resources: Vec, + txn_config: TxnConfig, +) -> Result<()> +where + A: ConnectedAccount + Sync + Send + 'static, +{ + let mut calls = Vec::new(); + + for or in owners_resources { + let resource = match &or.resource { + ResourceType::Model(name) => *name, + ResourceType::Contract(name_or_address) => { + utils::get_contract_address(world, name_or_address.clone()).await? + } + }; + + calls.push(world.revoke_owner_getcall(&or.owner.into(), &resource)); + } + + let res = world + .account + .execute(calls) + .send_with_cfg(&txn_config) + .await + .with_context(|| "Failed to send transaction")?; - handle_transaction_result( + utils::handle_transaction_result( &world.account.provider(), res, - transaction.wait, - transaction.receipt, + txn_config.wait, + txn_config.receipt, ) .await?; diff --git a/crates/sozo/ops/src/call.rs b/crates/sozo/ops/src/call.rs new file mode 100644 index 0000000000..14bb487b5b --- /dev/null +++ b/crates/sozo/ops/src/call.rs @@ -0,0 +1,39 @@ +use anyhow::{Context, Result}; +use dojo_world::contracts::WorldContractReader; +use starknet::core::types::{BlockId, BlockTag, FieldElement, FunctionCall}; +use starknet::core::utils::get_selector_from_name; +use starknet::providers::Provider; + +use crate::utils::{get_contract_address_from_reader, parse_block_id}; + +pub async fn call( + world_reader: WorldContractReader

, + contract: String, + entrypoint: String, + calldata: Vec, + block_id: Option, +) -> Result<()> { + let contract_address = get_contract_address_from_reader(&world_reader, contract).await?; + let block_id = if let Some(block_id) = block_id { + parse_block_id(block_id)? + } else { + BlockId::Tag(BlockTag::Pending) + }; + + let output = world_reader + .provider() + .call( + FunctionCall { + contract_address, + entry_point_selector: get_selector_from_name(&entrypoint)?, + calldata, + }, + block_id, + ) + .await + .with_context(|| format!("Failed to call {entrypoint}"))?; + + println!("[ {} ]", output.iter().map(|o| format!("0x{:x}", o)).collect::>().join(" ")); + + Ok(()) +} diff --git a/crates/sozo/ops/src/events.rs b/crates/sozo/ops/src/events.rs index bca1ab3a67..10bf993f2d 100644 --- a/crates/sozo/ops/src/events.rs +++ b/crates/sozo/ops/src/events.rs @@ -1,11 +1,11 @@ use std::collections::{HashMap, VecDeque}; use std::fs; -use anyhow::{anyhow, Context, Result}; +use anyhow::{anyhow, Result}; use cainome::parser::tokens::{CompositeInner, CompositeInnerKind, CoreBasic, Token}; use cainome::parser::AbiParser; use camino::Utf8PathBuf; -use dojo_lang::compiler::{DEPLOYMENTS_DIR, MANIFESTS_DIR}; +use dojo_lang::compiler::MANIFESTS_DIR; use dojo_world::manifest::{AbiFormat, DeploymentManifest, ManifestMethods}; use starknet::core::types::{BlockId, EventFilter, FieldElement}; use starknet::core::utils::{parse_cairo_short_string, starknet_keccak}; @@ -35,16 +35,13 @@ pub async fn parse( event_filter: EventFilter, json: bool, manifest_dir: &Utf8PathBuf, + profile_name: &str, ) -> Result<()> { - let chain_id = provider.chain_id().await?; - let chain_id = - parse_cairo_short_string(&chain_id).with_context(|| "Cannot parse chain_id as string")?; - let events_map = if !json { let deployed_manifest = manifest_dir .join(MANIFESTS_DIR) - .join(DEPLOYMENTS_DIR) - .join(chain_id) + .join(profile_name) + .join("manifest") .with_extension("toml"); if !deployed_manifest.exists() { @@ -118,7 +115,7 @@ fn extract_events( } // Read the world and base ABI from scarb artifacts as the - // manifest does not include them. + // manifest does not include them (at least base is not included). let world_abi_path = manifest_dir.join("target/dev/dojo::world::world.json"); process_abi(&mut events_map, &world_abi_path)?; @@ -249,28 +246,29 @@ fn process_inners( #[cfg(test)] mod tests { + use cainome::parser::tokens::{Array, Composite, CompositeInner, CompositeType}; use camino::Utf8Path; use dojo_lang::compiler::{BASE_DIR, MANIFESTS_DIR}; use dojo_world::manifest::BaseManifest; + use starknet::core::types::EmittedEvent; + + use super::*; #[test] fn extract_events_work_as_expected() { + let profile_name = "dev"; let manifest_dir = Utf8Path::new("../../../examples/spawn-and-move").to_path_buf(); - let manifest = - BaseManifest::load_from_path(&manifest_dir.join(MANIFESTS_DIR).join(BASE_DIR)) - .unwrap() - .into(); + let manifest = BaseManifest::load_from_path( + &manifest_dir.join(MANIFESTS_DIR).join(profile_name).join(BASE_DIR), + ) + .unwrap() + .into(); let result = extract_events(&manifest, &manifest_dir).unwrap(); // we are just collecting all events from manifest file so just verifying count should work - assert_eq!(result.len(), 12); + assert_eq!(result.len(), 11); } - use cainome::parser::tokens::{Array, Composite, CompositeInner, CompositeType}; - use starknet::core::types::EmittedEvent; - - use super::*; - #[test] fn test_core_basic() { let composite = Composite { diff --git a/crates/sozo/ops/src/execute.rs b/crates/sozo/ops/src/execute.rs index a496a2910c..8c7f403100 100644 --- a/crates/sozo/ops/src/execute.rs +++ b/crates/sozo/ops/src/execute.rs @@ -1,24 +1,24 @@ use anyhow::{Context, Result}; use dojo_world::contracts::world::WorldContract; -use dojo_world::migration::TxConfig; +use dojo_world::migration::TxnConfig; +use dojo_world::utils::TransactionExt; use starknet::accounts::{Call, ConnectedAccount}; use starknet::core::types::FieldElement; use starknet::core::utils::get_selector_from_name; -use super::get_contract_address; -use crate::utils::handle_transaction_result; +use crate::utils; pub async fn execute( contract: String, entrypoint: String, calldata: Vec, - world: WorldContract, - transaction: TxConfig, + world: &WorldContract, + txn_config: &TxnConfig, ) -> Result<()> where A: ConnectedAccount + Sync + Send + 'static, { - let contract_address = get_contract_address(&world, contract).await?; + let contract_address = utils::get_contract_address(world, contract).await?; let res = world .account .execute(vec![Call { @@ -26,10 +26,15 @@ where to: contract_address, selector: get_selector_from_name(&entrypoint)?, }]) - .send() + .send_with_cfg(txn_config) .await .with_context(|| "Failed to send transaction")?; - handle_transaction_result(&world.account.provider(), res, transaction.wait, transaction.receipt) - .await + utils::handle_transaction_result( + &world.account.provider(), + res, + txn_config.wait, + txn_config.receipt, + ) + .await } diff --git a/crates/sozo/ops/src/keystore.rs b/crates/sozo/ops/src/keystore.rs new file mode 100644 index 0000000000..a8d853ca80 --- /dev/null +++ b/crates/sozo/ops/src/keystore.rs @@ -0,0 +1,94 @@ +use std::path::PathBuf; + +use anyhow::Result; +use colored::Colorize; +use starknet::signers::SigningKey; +use starknet_crypto::FieldElement; + +pub fn new(password: Option, force: bool, file: PathBuf) -> Result<()> { + if file.exists() && !force { + anyhow::bail!("keystore file already exists"); + } + + let password = get_password(password)?; + + let key = SigningKey::from_random(); + key.save_as_keystore(&file, &password)?; + + println!("Created new encrypted keystore file: {}", std::fs::canonicalize(file)?.display()); + println!("Public key: {}", format!("{:#064x}", key.verifying_key().scalar()).bright_yellow()); + + Ok(()) +} + +pub fn from_key( + force: bool, + private_key: Option, + password: Option, + file: PathBuf, +) -> Result<()> { + if file.exists() && !force { + anyhow::bail!("keystore file already exists"); + } + + let private_key = if let Some(private_key) = private_key { + private_key + } else { + rpassword::prompt_password("Enter private key: ")? + }; + let private_key = FieldElement::from_hex_be(private_key.trim())?; + + let password = get_password(password)?; + + let key = SigningKey::from_secret_scalar(private_key); + key.save_as_keystore(&file, &password)?; + + println!("Created new encrypted keystore file: {}", std::fs::canonicalize(file)?.display()); + println!("Public key: {:#064x}", key.verifying_key().scalar()); + + Ok(()) +} + +pub fn inspect(password: Option, raw: bool, file: PathBuf) -> Result<()> { + if !file.exists() { + anyhow::bail!("keystore file not found"); + } + + let password = get_password(password)?; + + let key = SigningKey::from_keystore(file, &password)?; + + if raw { + println!("{:#064x}", key.verifying_key().scalar()); + } else { + println!("Public key: {:#064x}", key.verifying_key().scalar()); + } + + Ok(()) +} + +pub fn inspect_private(password: Option, raw: bool, file: PathBuf) -> Result<()> { + if !file.exists() { + anyhow::bail!("keystore file not found"); + } + + let password = get_password(password)?; + + let key = SigningKey::from_keystore(file, &password)?; + + if raw { + println!("{:#064x}", key.secret_scalar()); + } else { + println!("Private key: {:#064x}", key.secret_scalar()); + } + + Ok(()) +} + +fn get_password(password: Option) -> std::io::Result { + if let Some(password) = password { + Ok(password) + } else { + rpassword::prompt_password("Enter password: ") + } +} diff --git a/crates/sozo/ops/src/lib.rs b/crates/sozo/ops/src/lib.rs index 676e8e86b8..4cd0b02a58 100644 --- a/crates/sozo/ops/src/lib.rs +++ b/crates/sozo/ops/src/lib.rs @@ -1,30 +1,14 @@ -use anyhow::Result; -use dojo_world::contracts::world::WorldContract; -use dojo_world::migration::strategy::generate_salt; -use starknet::accounts::ConnectedAccount; -use starknet::core::types::FieldElement; - +pub mod account; pub mod auth; +pub mod call; pub mod events; pub mod execute; +pub mod keystore; pub mod migration; pub mod model; pub mod register; +pub mod statistics; pub mod utils; -pub async fn get_contract_address( - world: &WorldContract, - name_or_address: String, -) -> Result { - if name_or_address.starts_with("0x") { - FieldElement::from_hex_be(&name_or_address).map_err(anyhow::Error::from) - } else { - let contract_class_hash = world.base().call().await?; - Ok(starknet::core::utils::get_contract_address( - generate_salt(&name_or_address), - contract_class_hash.into(), - &[], - world.address, - )) - } -} +#[cfg(test)] +pub mod tests; diff --git a/crates/sozo/ops/src/migration/migrate.rs b/crates/sozo/ops/src/migration/migrate.rs new file mode 100644 index 0000000000..1dd49310c4 --- /dev/null +++ b/crates/sozo/ops/src/migration/migrate.rs @@ -0,0 +1,867 @@ +use std::path::Path; + +use anyhow::{anyhow, bail, Context, Result}; +use camino::Utf8PathBuf; +use dojo_lang::compiler::{ABIS_DIR, BASE_DIR, DEPLOYMENTS_DIR, MANIFESTS_DIR}; +use dojo_world::contracts::abi::world; +use dojo_world::contracts::{cairo_utils, WorldContract}; +use dojo_world::manifest::{ + AbiFormat, BaseManifest, DeploymentManifest, DojoContract, DojoModel, Manifest, + ManifestMethods, WorldContract as ManifestWorldContract, WorldMetadata, +}; +use dojo_world::metadata::{dojo_metadata_from_workspace, ArtifactMetadata}; +use dojo_world::migration::class::ClassMigration; +use dojo_world::migration::contract::ContractMigration; +use dojo_world::migration::strategy::{generate_salt, prepare_for_migration, MigrationStrategy}; +use dojo_world::migration::world::WorldDiff; +use dojo_world::migration::{ + Declarable, Deployable, MigrationError, RegisterOutput, TxnConfig, Upgradable, +}; +use dojo_world::utils::{TransactionExt, TransactionWaiter}; +use futures::future; +use scarb::core::Workspace; +use scarb_ui::Ui; +use starknet::accounts::{Account, ConnectedAccount, SingleOwnerAccount}; +use starknet::core::types::{ + BlockId, BlockTag, FunctionCall, InvokeTransactionResult, StarknetError, +}; +use starknet::core::utils::{ + cairo_short_string_to_felt, get_contract_address, get_selector_from_name, +}; +use starknet::providers::{Provider, ProviderError}; +use starknet::signers::Signer; +use starknet_crypto::FieldElement; +use tokio::fs; + +use super::ui::{bold_message, italic_message, MigrationUi}; +use super::{ + ContractDeploymentOutput, ContractMigrationOutput, ContractUpgradeOutput, MigrationOutput, +}; + +pub fn prepare_migration( + target_dir: &Utf8PathBuf, + diff: WorldDiff, + name: &str, + world_address: Option, + ui: &Ui, +) -> Result { + ui.print_step(3, "📦", "Preparing for migration..."); + + let name = cairo_short_string_to_felt(name).with_context(|| "Failed to parse World name.")?; + + let migration = prepare_for_migration(world_address, name, target_dir, diff) + .with_context(|| "Problem preparing for migration.")?; + + let info = migration.info(); + + ui.print_sub(format!( + "Total items to be migrated ({}): New {} Update {}", + info.new + info.update, + info.new, + info.update + )); + + Ok(migration) +} + +pub async fn apply_diff( + ws: &Workspace<'_>, + account: &SingleOwnerAccount, + txn_config: TxnConfig, + strategy: &mut MigrationStrategy, +) -> Result +where + P: Provider + Sync + Send + 'static, + S: Signer + Sync + Send + 'static, +{ + let ui = ws.config().ui(); + + ui.print_step(4, "🛠", "Migrating..."); + ui.print(" "); + + let migration_output = execute_strategy(ws, strategy, account, txn_config) + .await + .map_err(|e| anyhow!(e)) + .with_context(|| "Problem trying to migrate.")?; + + if migration_output.full { + if let Some(block_number) = migration_output.world_block_number { + ui.print(format!( + "\n🎉 Successfully migrated World on block #{} at address {}\n", + block_number, + bold_message(format!( + "{:#x}", + strategy.world_address().expect("world address must exist") + )) + )); + } else { + ui.print(format!( + "\n🎉 Successfully migrated World at address {}\n", + bold_message(format!( + "{:#x}", + strategy.world_address().expect("world address must exist") + )) + )); + } + } else { + ui.print(format!( + "\n🚨 Partially migrated World at address {}", + bold_message(format!( + "{:#x}", + strategy.world_address().expect("world address must exist") + )) + )); + } + + Ok(migration_output) +} + +pub async fn execute_strategy( + ws: &Workspace<'_>, + strategy: &MigrationStrategy, + migrator: &SingleOwnerAccount, + txn_config: TxnConfig, +) -> Result +where + P: Provider + Sync + Send + 'static, + S: Signer + Sync + Send + 'static, +{ + let ui = ws.config().ui(); + let mut world_tx_hash: Option = None; + let mut world_block_number: Option = None; + + match &strategy.base { + Some(base) => { + ui.print_header("# Base Contract"); + + match base.declare(migrator, &txn_config).await { + Ok(res) => { + ui.print_sub(format!("Class Hash: {:#x}", res.class_hash)); + } + Err(MigrationError::ClassAlreadyDeclared) => { + ui.print_sub(format!("Already declared: {:#x}", base.diff.local_class_hash)); + } + Err(MigrationError::ArtifactError(e)) => { + return Err(handle_artifact_error(&ui, base.artifact_path(), e)); + } + Err(e) => { + ui.verbose(format!("{e:?}")); + return Err(e.into()); + } + }; + } + None => {} + }; + + match &strategy.world { + Some(world) => { + ui.print_header("# World"); + + // If a migration is pending for the world, we upgrade only if the remote world + // already exists. + if world.diff.remote_class_hash.is_some() { + let _deploy_result = upgrade_contract( + world, + "world", + world.diff.original_class_hash, + strategy.base.as_ref().unwrap().diff.original_class_hash, + migrator, + &ui, + &txn_config, + ) + .await + .map_err(|e| { + ui.verbose(format!("{e:?}")); + anyhow!("Failed to upgrade world: {e}") + })?; + + ui.print_sub(format!( + "Upgraded Contract at address: {:#x}", + world.contract_address + )); + } else { + let calldata = vec![strategy.base.as_ref().unwrap().diff.local_class_hash]; + let deploy_result = + deploy_contract(world, "world", calldata.clone(), migrator, &ui, &txn_config) + .await + .map_err(|e| { + ui.verbose(format!("{e:?}")); + anyhow!("Failed to deploy world: {e}") + })?; + + (world_tx_hash, world_block_number) = + if let ContractDeploymentOutput::Output(deploy_result) = deploy_result { + (Some(deploy_result.transaction_hash), deploy_result.block_number) + } else { + (None, None) + }; + + ui.print_sub(format!("Contract address: {:#x}", world.contract_address)); + } + } + None => {} + }; + + let mut migration_output = MigrationOutput { + world_address: strategy.world_address()?, + world_tx_hash, + world_block_number, + full: false, + models: vec![], + contracts: vec![], + }; + + let world_address = strategy.world_address()?; + + // Once Torii supports indexing arrays, we should declare and register the + // ResourceMetadata model. + match register_dojo_models(&strategy.models, world_address, migrator, &ui, &txn_config).await { + Ok(output) => { + migration_output.models = output.registered_model_names; + } + Err(e) => { + ui.anyhow(&e); + return Ok(migration_output); + } + }; + + match register_dojo_contracts(&strategy.contracts, world_address, migrator, &ui, &txn_config) + .await + { + Ok(output) => { + migration_output.contracts = output; + } + Err(e) => { + ui.anyhow(&e); + return Ok(migration_output); + } + }; + + migration_output.full = true; + + Ok(migration_output) +} + +/// Upload a metadata as a IPFS artifact and then create a resource to register +/// into the Dojo resource registry. +/// +/// # Arguments +/// * `element_name` - fully qualified name of the element linked to the metadata +/// * `resource_id` - the id of the resource to create. +/// * `artifact` - the artifact to upload on IPFS. +/// +/// # Returns +/// A [`ResourceData`] object to register in the Dojo resource register +/// on success. +async fn upload_on_ipfs_and_create_resource( + ui: &Ui, + element_name: String, + resource_id: FieldElement, + artifact: ArtifactMetadata, +) -> Result { + match artifact.upload().await { + Ok(hash) => { + ui.print_sub(format!("{}: ipfs://{}", element_name, hash)); + create_resource_metadata(resource_id, hash) + } + Err(_) => Err(anyhow!("Failed to upload IPFS resource.")), + } +} + +/// Create a resource to register in the Dojo resource registry. +/// +/// # Arguments +/// * `resource_id` - the ID of the resource +/// * `hash` - the IPFS hash +/// +/// # Returns +/// A [`ResourceData`] object to register in the Dojo resource register +/// on success. +fn create_resource_metadata( + resource_id: FieldElement, + hash: String, +) -> Result { + let mut encoded_uri = cairo_utils::encode_uri(&format!("ipfs://{hash}"))?; + + // Metadata is expecting an array of capacity 3. + if encoded_uri.len() < 3 { + encoded_uri.extend(vec![FieldElement::ZERO; 3 - encoded_uri.len()]); + } + + Ok(world::ResourceMetadata { resource_id, metadata_uri: encoded_uri }) +} + +/// Upload metadata of the world/models/contracts as IPFS artifacts and then +/// register them in the Dojo resource registry. +/// +/// # Arguments +/// +/// * `ws` - the workspace +/// * `migrator` - the account used to migrate +/// * `migration_output` - the output after having applied the migration plan. +pub async fn upload_metadata( + ws: &Workspace<'_>, + migrator: &SingleOwnerAccount, + migration_output: MigrationOutput, + txn_config: TxnConfig, +) -> Result<()> +where + P: Provider + Sync + Send + 'static, + S: Signer + Sync + Send + 'static, +{ + let ui = ws.config().ui(); + + ui.print(" "); + ui.print_step(6, "🌐", "Uploading metadata..."); + ui.print(" "); + + let dojo_metadata = dojo_metadata_from_workspace(ws); + let mut ipfs = vec![]; + let mut resources = vec![]; + + // world + if migration_output.world_tx_hash.is_some() { + match dojo_metadata.world.upload().await { + Ok(hash) => { + let resource = create_resource_metadata(FieldElement::ZERO, hash.clone())?; + ui.print_sub(format!("world: ipfs://{}", hash)); + resources.push(resource); + } + Err(err) => { + ui.print_sub(format!("Failed to upload World metadata:\n{err}")); + } + } + } + + // models + if !migration_output.models.is_empty() { + for model_name in migration_output.models { + if let Some(m) = dojo_metadata.artifacts.get(&model_name) { + ipfs.push(upload_on_ipfs_and_create_resource( + &ui, + model_name.clone(), + get_selector_from_name(&model_name).expect("ASCII model name"), + m.clone(), + )); + } + } + } + + // contracts + let migrated_contracts = migration_output.contracts.into_iter().flatten().collect::>(); + + if !migrated_contracts.is_empty() { + for contract in migrated_contracts { + if let Some(m) = dojo_metadata.artifacts.get(&contract.name) { + ipfs.push(upload_on_ipfs_and_create_resource( + &ui, + contract.name.clone(), + contract.contract_address, + m.clone(), + )); + } + } + } + + // upload IPFS + resources.extend( + future::try_join_all(ipfs) + .await + .map_err(|_| anyhow!("Unable to upload IPFS artifacts."))?, + ); + + ui.print("> All IPFS artifacts have been successfully uploaded.".to_string()); + + // update the resource registry + let world = WorldContract::new(migration_output.world_address, migrator); + + let calls = resources.iter().map(|r| world.set_metadata_getcall(r)).collect::>(); + + let InvokeTransactionResult { transaction_hash } = + migrator.execute(calls).send_with_cfg(&txn_config).await.map_err(|e| { + ui.verbose(format!("{e:?}")); + anyhow!("Failed to register metadata into the resource registry: {e}") + })?; + + TransactionWaiter::new(transaction_hash, migrator.provider()).await?; + + ui.print(format!( + "> All metadata have been registered in the resource registry (tx hash: \ + {transaction_hash:#x})" + )); + + ui.print(""); + ui.print("\n✨ Done."); + + Ok(()) +} + +async fn register_dojo_models( + models: &[ClassMigration], + world_address: FieldElement, + migrator: &SingleOwnerAccount, + ui: &Ui, + txn_config: &TxnConfig, +) -> Result +where + P: Provider + Sync + Send + 'static, + S: Signer + Sync + Send + 'static, +{ + if models.is_empty() { + return Ok(RegisterOutput { + transaction_hash: FieldElement::ZERO, + declare_output: vec![], + registered_model_names: vec![], + }); + } + + ui.print_header(format!("# Models ({})", models.len())); + + let mut declare_output = vec![]; + let mut registered_model_names = vec![]; + + for c in models.iter() { + ui.print(italic_message(&c.diff.name).to_string()); + + let res = c.declare(migrator, txn_config).await; + match res { + Ok(output) => { + ui.print_hidden_sub(format!("Declare transaction: {:#x}", output.transaction_hash)); + + declare_output.push(output); + } + + // Continue if model is already declared + Err(MigrationError::ClassAlreadyDeclared) => { + ui.print_sub(format!("Already declared: {:#x}", c.diff.local_class_hash)); + continue; + } + Err(MigrationError::ArtifactError(e)) => { + return Err(handle_artifact_error(ui, c.artifact_path(), e)); + } + Err(e) => { + ui.verbose(format!("{e:?}")); + bail!("Failed to declare model {}: {e}", c.diff.name) + } + } + + ui.print_sub(format!("Class hash: {:#x}", c.diff.local_class_hash)); + } + + let world = WorldContract::new(world_address, migrator); + + let calls = models + .iter() + .map(|c| { + registered_model_names.push(c.diff.name.clone()); + world.register_model_getcall(&c.diff.local_class_hash.into()) + }) + .collect::>(); + + let InvokeTransactionResult { transaction_hash } = + world.account.execute(calls).send_with_cfg(txn_config).await.map_err(|e| { + ui.verbose(format!("{e:?}")); + anyhow!("Failed to register models to World: {e}") + })?; + + TransactionWaiter::new(transaction_hash, migrator.provider()).await?; + + ui.print(format!("All models are registered at: {transaction_hash:#x}")); + + Ok(RegisterOutput { transaction_hash, declare_output, registered_model_names }) +} + +async fn register_dojo_contracts( + contracts: &Vec, + world_address: FieldElement, + migrator: &SingleOwnerAccount, + ui: &Ui, + txn_config: &TxnConfig, +) -> Result>> +where + P: Provider + Sync + Send + 'static, + S: Signer + Sync + Send + 'static, +{ + if contracts.is_empty() { + return Ok(vec![]); + } + + ui.print_header(format!("# Contracts ({})", contracts.len())); + + let mut deploy_output = vec![]; + + for contract in contracts { + let name = &contract.diff.name; + ui.print(italic_message(name).to_string()); + match contract + .deploy_dojo_contract( + world_address, + contract.diff.local_class_hash, + contract.diff.base_class_hash, + migrator, + txn_config, + ) + .await + { + Ok(output) => { + if let Some(ref declare) = output.declare { + ui.print_hidden_sub(format!( + "Declare transaction: {:#x}", + declare.transaction_hash + )); + } + + // NOTE: this assignment may not look useful since we are dropping + // `MigrationStrategy` without actually using this value from it. + // but some tests depend on this behaviour + // contract.contract_address = output.contract_address; + + if output.was_upgraded { + ui.print_hidden_sub(format!( + "Invoke transaction to upgrade: {:#x}", + output.transaction_hash + )); + ui.print_sub(format!( + "Contract address [upgraded]: {:#x}", + output.contract_address + )); + } else { + ui.print_hidden_sub(format!( + "Deploy transaction: {:#x}", + output.transaction_hash + )); + ui.print_sub(format!("Contract address: {:#x}", output.contract_address)); + } + deploy_output.push(Some(ContractMigrationOutput { + name: name.to_string(), + contract_address: output.contract_address, + base_class_hash: output.base_class_hash, + })); + } + Err(MigrationError::ContractAlreadyDeployed(contract_address)) => { + ui.print_sub(format!("Already deployed: {:#x}", contract_address)); + deploy_output.push(None); + } + Err(MigrationError::ArtifactError(e)) => { + return Err(handle_artifact_error(ui, contract.artifact_path(), e)); + } + Err(e) => { + ui.verbose(format!("{e:?}")); + return Err(anyhow!("Failed to migrate {name}: {e}")); + } + } + } + + Ok(deploy_output) +} + +async fn deploy_contract( + contract: &ContractMigration, + contract_id: &str, + constructor_calldata: Vec, + migrator: &SingleOwnerAccount, + ui: &Ui, + txn_config: &TxnConfig, +) -> Result +where + P: Provider + Sync + Send + 'static, + S: Signer + Sync + Send + 'static, +{ + match contract + .deploy(contract.diff.local_class_hash, constructor_calldata, migrator, txn_config) + .await + { + Ok(mut val) => { + if let Some(declare) = val.clone().declare { + ui.print_hidden_sub(format!( + "Declare transaction: {:#x}", + declare.transaction_hash + )); + } + + ui.print_hidden_sub(format!("Deploy transaction: {:#x}", val.transaction_hash)); + + val.name = Some(contract.diff.name.clone()); + Ok(ContractDeploymentOutput::Output(val)) + } + Err(MigrationError::ContractAlreadyDeployed(contract_address)) => { + Ok(ContractDeploymentOutput::AlreadyDeployed(contract_address)) + } + Err(MigrationError::ArtifactError(e)) => { + return Err(handle_artifact_error(ui, contract.artifact_path(), e)); + } + Err(e) => { + ui.verbose(format!("{e:?}")); + Err(anyhow!("Failed to migrate {contract_id}: {e}")) + } + } +} + +async fn upgrade_contract( + contract: &ContractMigration, + contract_id: &str, + original_class_hash: FieldElement, + original_base_class_hash: FieldElement, + migrator: &SingleOwnerAccount, + ui: &Ui, + txn_config: &TxnConfig, +) -> Result +where + P: Provider + Sync + Send + 'static, + S: Signer + Sync + Send + 'static, +{ + match contract + .upgrade_world( + contract.diff.local_class_hash, + original_class_hash, + original_base_class_hash, + migrator, + txn_config, + ) + .await + { + Ok(val) => { + if let Some(declare) = val.clone().declare { + ui.print_hidden_sub(format!( + "Declare transaction: {:#x}", + declare.transaction_hash + )); + } + + ui.print_hidden_sub(format!("Upgrade transaction: {:#x}", val.transaction_hash)); + + Ok(ContractUpgradeOutput::Output(val)) + } + Err(MigrationError::ArtifactError(e)) => { + return Err(handle_artifact_error(ui, contract.artifact_path(), e)); + } + Err(e) => { + ui.verbose(format!("{e:?}")); + Err(anyhow!("Failed to upgrade {contract_id}: {e}")) + } + } +} + +pub fn handle_artifact_error(ui: &Ui, artifact_path: &Path, error: anyhow::Error) -> anyhow::Error { + let path = artifact_path.to_string_lossy(); + let name = artifact_path.file_name().unwrap().to_string_lossy(); + ui.verbose(format!("{path}: {error:?}")); + + anyhow!( + "Discrepancy detected in {name}.\nUse `sozo clean` to clean your project or `sozo clean \ + --artifacts` to clean artifacts only.\nThen, rebuild your project with `sozo build`." + ) +} + +pub async fn get_contract_operation_name

( + provider: &P, + contract: &ContractMigration, + world_address: Option, +) -> String +where + P: Provider + Sync + Send + 'static, +{ + if let Some(world_address) = world_address { + if let Ok(base_class_hash) = provider + .call( + FunctionCall { + contract_address: world_address, + calldata: vec![], + entry_point_selector: get_selector_from_name("base").unwrap(), + }, + BlockId::Tag(BlockTag::Pending), + ) + .await + { + let contract_address = + get_contract_address(contract.salt, base_class_hash[0], &[], world_address); + + match provider + .get_class_hash_at(BlockId::Tag(BlockTag::Pending), contract_address) + .await + { + Ok(current_class_hash) if current_class_hash != contract.diff.local_class_hash => { + return format!("upgrade {}", contract.diff.name); + } + Err(ProviderError::StarknetError(StarknetError::ContractNotFound)) => { + return format!("deploy {}", contract.diff.name); + } + Ok(_) => return "already deployed".to_string(), + Err(_) => return format!("deploy {}", contract.diff.name), + } + } + } + format!("deploy {}", contract.diff.name) +} + +pub async fn print_strategy

(ui: &Ui, provider: &P, strategy: &MigrationStrategy) +where + P: Provider + Sync + Send + 'static, +{ + ui.print("\n📋 Migration Strategy\n"); + + if let Some(base) = &strategy.base { + ui.print_header("# Base Contract"); + ui.print_sub(format!("declare (class hash: {:#x})\n", base.diff.local_class_hash)); + } + + if let Some(world) = &strategy.world { + ui.print_header("# World"); + ui.print_sub(format!("declare (class hash: {:#x})\n", world.diff.local_class_hash)); + } + + if !&strategy.models.is_empty() { + ui.print_header(format!("# Models ({})", &strategy.models.len())); + for m in &strategy.models { + ui.print_sub(format!( + "register {} (class hash: {:#x})", + m.diff.name, m.diff.local_class_hash + )); + } + ui.print(" "); + } + + if !&strategy.contracts.is_empty() { + ui.print_header(format!("# Contracts ({})", &strategy.contracts.len())); + for c in &strategy.contracts { + let op_name = get_contract_operation_name(provider, c, strategy.world_address).await; + ui.print_sub(format!("{op_name} (class hash: {:#x})", c.diff.local_class_hash)); + } + ui.print(" "); + } +} + +#[allow(clippy::too_many_arguments)] +pub async fn update_manifests_and_abis( + ws: &Workspace<'_>, + local_manifest: BaseManifest, + profile_dir: &Utf8PathBuf, + profile_name: &str, + rpc_url: &str, + world_address: FieldElement, + migration_output: Option, + salt: &str, +) -> Result<()> { + let ui = ws.config().ui(); + ui.print_step(5, "✨", "Updating manifests..."); + + let deployed_path = profile_dir.join("manifest").with_extension("toml"); + let deployed_path_json = profile_dir.join("manifest").with_extension("json"); + + let mut local_manifest: DeploymentManifest = local_manifest.into(); + + local_manifest.world.inner.metadata = Some(WorldMetadata { + profile_name: profile_name.to_string(), + rpc_url: rpc_url.to_string(), + }); + + if deployed_path.exists() { + let previous_manifest = DeploymentManifest::load_from_path(&deployed_path)?; + local_manifest.merge_from_previous(previous_manifest); + }; + + local_manifest.world.inner.address = Some(world_address); + local_manifest.world.inner.seed = salt.to_owned(); + + // when the migration has not been applied because in `plan` mode or because of an error, + // the `migration_output` is empty. + if let Some(migration_output) = migration_output { + if migration_output.world_tx_hash.is_some() { + local_manifest.world.inner.transaction_hash = migration_output.world_tx_hash; + } + if migration_output.world_block_number.is_some() { + local_manifest.world.inner.block_number = migration_output.world_block_number; + } + + migration_output.contracts.iter().for_each(|contract_output| { + // ignore failed migration which are represented by None + if let Some(output) = contract_output { + // find the contract in local manifest and update its address and base class hash + let local = local_manifest + .contracts + .iter_mut() + .find(|c| c.name == output.name) + .expect("contract got migrated, means it should be present here"); + + local.inner.base_class_hash = output.base_class_hash; + } + }); + } + + local_manifest.contracts.iter_mut().for_each(|contract| { + let salt = generate_salt(&contract.name); + contract.inner.address = + Some(get_contract_address(salt, contract.inner.base_class_hash, &[], world_address)); + }); + + // copy abi files from `abi/base` to `abi/deployments/{chain_id}` and update abi path in + // local_manifest + update_manifest_abis(&mut local_manifest, profile_dir, profile_name).await; + + local_manifest.write_to_path_toml(&deployed_path)?; + local_manifest.write_to_path_json(&deployed_path_json, profile_dir)?; + ui.print("\n✨ Done."); + + Ok(()) +} + +async fn update_manifest_abis( + local_manifest: &mut DeploymentManifest, + profile_dir: &Utf8PathBuf, + profile_name: &str, +) { + fs::create_dir_all(profile_dir.join(ABIS_DIR).join(DEPLOYMENTS_DIR)) + .await + .expect("Failed to create folder"); + + async fn inner_helper( + profile_dir: &Utf8PathBuf, + profile_name: &str, + manifest: &mut Manifest, + ) where + T: ManifestMethods, + { + // Unwraps in call to abi is safe because we always write abis for DojoContracts as relative + // path. + // In this relative path, we only what the root from + // ABI directory. + let base_relative_path = manifest + .inner + .abi() + .unwrap() + .to_path() + .unwrap() + .strip_prefix(Utf8PathBuf::new().join(MANIFESTS_DIR).join(profile_name)) + .unwrap(); + + // The filename is safe to unwrap as it's always + // present in the base relative path. + let deployed_relative_path = Utf8PathBuf::new().join(ABIS_DIR).join(DEPLOYMENTS_DIR).join( + base_relative_path + .strip_prefix(Utf8PathBuf::new().join(ABIS_DIR).join(BASE_DIR)) + .unwrap(), + ); + + let full_base_path = profile_dir.join(base_relative_path); + let full_deployed_path = profile_dir.join(deployed_relative_path.clone()); + + fs::create_dir_all(full_deployed_path.parent().unwrap()) + .await + .expect("Failed to create folder"); + + fs::copy(full_base_path, full_deployed_path).await.expect("Failed to copy abi file"); + + manifest.inner.set_abi(Some(AbiFormat::Path(deployed_relative_path))); + } + + inner_helper::(profile_dir, profile_name, &mut local_manifest.world) + .await; + + for contract in local_manifest.contracts.iter_mut() { + inner_helper::(profile_dir, profile_name, contract).await; + } + + for model in local_manifest.models.iter_mut() { + inner_helper::(profile_dir, profile_name, model).await; + } +} diff --git a/crates/sozo/ops/src/migration/migration_test.rs b/crates/sozo/ops/src/migration/migration_test.rs deleted file mode 100644 index 5bff89e52c..0000000000 --- a/crates/sozo/ops/src/migration/migration_test.rs +++ /dev/null @@ -1,172 +0,0 @@ -use camino::Utf8Path; -use dojo_lang::compiler::{BASE_DIR, MANIFESTS_DIR}; -use dojo_test_utils::compiler::build_test_config; -use dojo_test_utils::migration::prepare_migration; -use dojo_test_utils::sequencer::{ - get_default_test_starknet_config, SequencerConfig, StarknetConfig, TestSequencer, -}; -use dojo_world::manifest::{BaseManifest, DeploymentManifest}; -use dojo_world::migration::strategy::prepare_for_migration; -use dojo_world::migration::world::WorldDiff; -use dojo_world::migration::TxConfig; -use scarb::ops; -use starknet::accounts::{ExecutionEncoding, SingleOwnerAccount}; -use starknet::core::chain_id; -use starknet::core::types::{BlockId, BlockTag}; -use starknet::macros::felt; -use starknet::providers::jsonrpc::HttpTransport; -use starknet::providers::JsonRpcClient; -use starknet::signers::{LocalWallet, SigningKey}; - -use crate::migration::execute_strategy; - -#[tokio::test(flavor = "multi_thread")] -async fn migrate_with_auto_mine() { - let config = build_test_config("../../../examples/spawn-and-move/Scarb.toml").unwrap(); - let ws = ops::read_workspace(config.manifest_path(), &config) - .unwrap_or_else(|op| panic!("Error building workspace: {op:?}")); - - let base_dir = "../../../examples/spawn-and-move"; - let target_dir = format!("{}/target/dev", base_dir); - let migration = prepare_migration(base_dir.into(), target_dir.into()).unwrap(); - - let sequencer = - TestSequencer::start(SequencerConfig::default(), get_default_test_starknet_config()).await; - - let mut account = sequencer.account(); - account.set_block_id(BlockId::Tag(BlockTag::Pending)); - - execute_strategy(&ws, &migration, &account, None).await.unwrap(); - - sequencer.stop().unwrap(); -} - -#[tokio::test(flavor = "multi_thread")] -async fn migrate_with_block_time() { - let config = build_test_config("../../../examples/spawn-and-move/Scarb.toml").unwrap(); - let ws = ops::read_workspace(config.manifest_path(), &config) - .unwrap_or_else(|op| panic!("Error building workspace: {op:?}")); - - let base = "../../../examples/spawn-and-move"; - let target_dir = format!("{}/target/dev", base); - let migration = prepare_migration(base.into(), target_dir.into()).unwrap(); - - let sequencer = TestSequencer::start( - SequencerConfig { block_time: Some(1000), ..Default::default() }, - get_default_test_starknet_config(), - ) - .await; - - let mut account = sequencer.account(); - account.set_block_id(BlockId::Tag(BlockTag::Pending)); - - execute_strategy(&ws, &migration, &account, None).await.unwrap(); - sequencer.stop().unwrap(); -} - -#[tokio::test(flavor = "multi_thread")] -async fn migrate_with_small_fee_multiplier_will_fail() { - let config = build_test_config("../../../examples/spawn-and-move/Scarb.toml").unwrap(); - let ws = ops::read_workspace(config.manifest_path(), &config) - .unwrap_or_else(|op| panic!("Error building workspace: {op:?}")); - - let base = "../../../examples/spawn-and-move"; - let target_dir = format!("{}/target/dev", base); - let migration = prepare_migration(base.into(), target_dir.into()).unwrap(); - - let sequencer = TestSequencer::start( - Default::default(), - StarknetConfig { disable_fee: false, ..Default::default() }, - ) - .await; - - let account = SingleOwnerAccount::new( - JsonRpcClient::new(HttpTransport::new(sequencer.url())), - LocalWallet::from_signing_key(SigningKey::from_secret_scalar( - sequencer.raw_account().private_key, - )), - sequencer.raw_account().account_address, - chain_id::TESTNET, - ExecutionEncoding::New, - ); - - assert!( - execute_strategy( - &ws, - &migration, - &account, - Some(TxConfig { fee_estimate_multiplier: Some(0.2f64), wait: false, receipt: false }), - ) - .await - .is_err() - ); - sequencer.stop().unwrap(); -} - -#[test] -fn migrate_world_without_seed_will_fail() { - let base = "../../../examples/spawn-and-move"; - let target_dir = format!("{}/target/dev", base); - let manifest = BaseManifest::load_from_path( - &Utf8Path::new(base).to_path_buf().join(MANIFESTS_DIR).join(BASE_DIR), - ) - .unwrap(); - let world = WorldDiff::compute(manifest, None); - let res = prepare_for_migration(None, None, &Utf8Path::new(&target_dir).to_path_buf(), world); - assert!(res.is_err_and(|e| e.to_string().contains("Missing seed for World deployment."))) -} - -#[ignore] -#[tokio::test] -async fn migration_from_remote() { - let config = build_test_config("../../../examples/spawn-and-move/Scarb.toml").unwrap(); - let ws = ops::read_workspace(config.manifest_path(), &config) - .unwrap_or_else(|op| panic!("Error building workspace: {op:?}")); - let base = "../../../examples/spawn-and-move"; - let target_dir = format!("{}/target/dev", base); - - let sequencer = - TestSequencer::start(SequencerConfig::default(), get_default_test_starknet_config()).await; - - let account = SingleOwnerAccount::new( - JsonRpcClient::new(HttpTransport::new(sequencer.url())), - LocalWallet::from_signing_key(SigningKey::from_secret_scalar( - sequencer.raw_account().private_key, - )), - sequencer.raw_account().account_address, - chain_id::TESTNET, - ExecutionEncoding::New, - ); - - let manifest = BaseManifest::load_from_path( - &Utf8Path::new(base).to_path_buf().join(MANIFESTS_DIR).join(BASE_DIR), - ) - .unwrap(); - let world = WorldDiff::compute(manifest, None); - - let migration = prepare_for_migration( - None, - Some(felt!("0x12345")), - &Utf8Path::new(&target_dir).to_path_buf(), - world, - ) - .unwrap(); - - execute_strategy(&ws, &migration, &account, None).await.unwrap(); - - let local_manifest = BaseManifest::load_from_path( - &Utf8Path::new(base).to_path_buf().join(MANIFESTS_DIR).join(BASE_DIR), - ) - .unwrap(); - let remote_manifest = DeploymentManifest::load_from_remote( - JsonRpcClient::new(HttpTransport::new(sequencer.url())), - migration.world_address().unwrap(), - ) - .await - .unwrap(); - - sequencer.stop().unwrap(); - - assert_eq!(local_manifest.world.inner.class_hash, remote_manifest.world.inner.class_hash); - assert_eq!(local_manifest.models.len(), remote_manifest.models.len()); -} diff --git a/crates/sozo/ops/src/migration/mod.rs b/crates/sozo/ops/src/migration/mod.rs index 679855a2a4..b193826a06 100644 --- a/crates/sozo/ops/src/migration/mod.rs +++ b/crates/sozo/ops/src/migration/mod.rs @@ -1,44 +1,22 @@ -use std::path::Path; - -use anyhow::{anyhow, bail, Context, Result}; -use camino::Utf8PathBuf; -use dojo_lang::compiler::{ABIS_DIR, BASE_DIR, DEPLOYMENTS_DIR, MANIFESTS_DIR, OVERLAYS_DIR}; -use dojo_world::contracts::abi::world::ResourceMetadata; -use dojo_world::contracts::cairo_utils; -use dojo_world::contracts::world::WorldContract; -use dojo_world::manifest::{ - AbiFormat, AbstractManifestError, BaseManifest, DeploymentManifest, DojoContract, Manifest, - ManifestMethods, OverlayManifest, -}; -use dojo_world::metadata::dojo_metadata_from_workspace; -use dojo_world::migration::contract::ContractMigration; -use dojo_world::migration::strategy::{generate_salt, prepare_for_migration, MigrationStrategy}; +use anyhow::{anyhow, Result}; +use dojo_lang::compiler::MANIFESTS_DIR; use dojo_world::migration::world::WorldDiff; -use dojo_world::migration::{ - Declarable, DeployOutput, Deployable, MigrationError, RegisterOutput, StateDiff, TxConfig, -}; -use dojo_world::utils::TransactionWaiter; +use dojo_world::migration::{DeployOutput, TxnConfig, UpgradeOutput}; use scarb::core::Workspace; -use scarb_ui::Ui; -use starknet::accounts::{Account, ConnectedAccount, SingleOwnerAccount}; -use starknet::core::types::{ - BlockId, BlockTag, FieldElement, FunctionCall, InvokeTransactionResult, StarknetError, -}; -use starknet::core::utils::{ - cairo_short_string_to_felt, get_contract_address, get_selector_from_name, -}; -use starknet::providers::{Provider, ProviderError}; -use tokio::fs; +use starknet::accounts::{ConnectedAccount, SingleOwnerAccount}; +use starknet::core::types::FieldElement; +use starknet::providers::Provider; +use starknet::signers::Signer; -#[cfg(test)] -#[path = "migration_test.rs"] -mod migration_test; +mod migrate; mod ui; +mod utils; -use starknet::signers::Signer; -use ui::MigrationUi; - -use self::ui::{bold_message, italic_message}; +use self::migrate::update_manifests_and_abis; +pub use self::migrate::{ + apply_diff, execute_strategy, prepare_migration, print_strategy, upload_metadata, +}; +use self::ui::MigrationUi; #[derive(Debug, Default, Clone)] pub struct MigrationOutput { @@ -48,15 +26,27 @@ pub struct MigrationOutput { // Represents if full migration got completeled. // If false that means migration got partially completed. pub full: bool, + + pub models: Vec, + pub contracts: Vec>, } +#[derive(Debug, Default, Clone)] +pub struct ContractMigrationOutput { + pub name: String, + pub contract_address: FieldElement, + pub base_class_hash: FieldElement, +} + +#[allow(clippy::too_many_arguments)] pub async fn migrate( ws: &Workspace<'_>, world_address: Option, - chain_id: String, + rpc_url: String, account: &SingleOwnerAccount, - name: Option, + name: &str, dry_run: bool, + txn_config: TxnConfig, ) -> Result<()> where P: Provider + Sync + Send + 'static, @@ -64,24 +54,28 @@ where { let ui = ws.config().ui(); - // Setup account for migration and fetch world address if it exists. - ui.print(format!("Chain ID: {}\n", &chain_id)); - // its path to a file so `parent` should never return `None` let manifest_dir = ws.manifest_path().parent().unwrap().to_path_buf(); + let profile_name = + ws.current_profile().expect("Scarb profile expected to be defined.").to_string(); + let profile_dir = manifest_dir.join(MANIFESTS_DIR).join(&profile_name); + let target_dir = ws.target_dir().path_existent().unwrap(); let target_dir = target_dir.join(ws.config().profile().as_str()); // Load local and remote World manifests. let (local_manifest, remote_manifest) = - load_world_manifests(&manifest_dir, account, world_address, &ui).await.map_err(|e| { - ui.error(e.to_string()); - anyhow!( - "\n Use `sozo clean` to clean your project, or `sozo clean --manifests-abis` to \ - clean manifest and abi files only.\nThen, rebuild your project with `sozo build`.", - ) - })?; + utils::load_world_manifests(&profile_dir, account, world_address, &ui).await.map_err( + |e| { + ui.error(e.to_string()); + anyhow!( + "\n Use `sozo clean` to clean your project, or `sozo clean --manifests-abis` \ + to clean manifest and abi files only.\nThen, rebuild your project with `sozo \ + build`.", + ) + }, + )?; // Calculate diff between local and remote World manifests. ui.print_step(2, "🧰", "Evaluating Worlds diff..."); @@ -94,417 +88,63 @@ where return Ok(()); } - let strategy = prepare_migration(&target_dir, diff, name.clone(), world_address, &ui)?; + let mut strategy = prepare_migration(&target_dir, diff, name, world_address, &ui)?; let world_address = strategy.world_address().expect("world address must exist"); if dry_run { print_strategy(&ui, account.provider(), &strategy).await; + + update_manifests_and_abis( + ws, + local_manifest, + &profile_dir, + &profile_name, + &rpc_url, + world_address, + None, + name, + ) + .await?; } else { // Migrate according to the diff. - match apply_diff(ws, account, None, &strategy).await { - Ok(migration_output) => { - update_manifests_and_abis( - ws, - local_manifest, - remote_manifest, - &manifest_dir, - migration_output, - &chain_id, - name.as_ref(), - ) - .await?; - } + let migration_output = match apply_diff(ws, account, txn_config, &mut strategy).await { + Ok(migration_output) => Some(migration_output), Err(e) => { update_manifests_and_abis( ws, local_manifest, - remote_manifest, - &manifest_dir, - MigrationOutput { world_address, ..Default::default() }, - &chain_id, - name.as_ref(), + &profile_dir, + &profile_name, + &rpc_url, + world_address, + None, + name, ) .await?; return Err(e)?; } - } - }; - - Ok(()) -} - -fn build_deployed_path(manifest_dir: &Utf8PathBuf, chain_id: &str, extension: &str) -> Utf8PathBuf { - manifest_dir.join(MANIFESTS_DIR).join(DEPLOYMENTS_DIR).join(chain_id).with_extension(extension) -} - -async fn update_manifests_and_abis( - ws: &Workspace<'_>, - local_manifest: BaseManifest, - remote_manifest: Option, - manifest_dir: &Utf8PathBuf, - migration_output: MigrationOutput, - chain_id: &str, - salt: Option<&String>, -) -> Result<()> { - let ui = ws.config().ui(); - ui.print("\n✨ Updating manifests..."); - - let deployed_path = build_deployed_path(manifest_dir, chain_id, "toml"); - let deployed_path_json = build_deployed_path(manifest_dir, chain_id, "json"); - - let mut local_manifest: DeploymentManifest = local_manifest.into(); - - if deployed_path.exists() { - let previous_manifest = DeploymentManifest::load_from_path(&deployed_path)?; - local_manifest.merge_from_previous(previous_manifest); - }; - - local_manifest.world.inner.address = Some(migration_output.world_address); - if let Some(salt) = salt { - local_manifest.world.inner.seed = Some(salt.to_owned()); - } - - if migration_output.world_tx_hash.is_some() { - local_manifest.world.inner.transaction_hash = migration_output.world_tx_hash; - } - if migration_output.world_block_number.is_some() { - local_manifest.world.inner.block_number = migration_output.world_block_number; - } - - let base_class_hash = match remote_manifest { - Some(manifest) => *manifest.base.inner.class_hash(), - None => *local_manifest.base.inner.class_hash(), - }; - - local_manifest.contracts.iter_mut().for_each(|c| { - let salt = generate_salt(&c.name); - c.inner.address = - Some(get_contract_address(salt, base_class_hash, &[], migration_output.world_address)); - }); - - // copy abi files from `abi/base` to `abi/deployments/{chain_id}` and update abi path in - // local_manifest - update_manifest_abis(&mut local_manifest, manifest_dir, chain_id).await; - - local_manifest.write_to_path_toml(&deployed_path)?; - local_manifest.write_to_path_json(&deployed_path_json, manifest_dir)?; - ui.print("\n✨ Done."); - - Ok(()) -} - -async fn update_manifest_abis( - local_manifest: &mut DeploymentManifest, - manifest_dir: &Utf8PathBuf, - chain_id: &str, -) { - fs::create_dir_all(manifest_dir.join(ABIS_DIR).join(DEPLOYMENTS_DIR)) - .await - .expect("Failed to create folder"); - - async fn inner_helper(manifest_dir: &Utf8PathBuf, manifest: &mut Manifest, chain_id: &str) - where - T: ManifestMethods, - { - // unwraps in call to abi is safe because we always write abis for DojoContracts as relative - // path. - let base_relative_path = manifest.inner.abi().unwrap().to_path().unwrap(); - let deployed_relative_path = - Utf8PathBuf::new().join(ABIS_DIR).join(DEPLOYMENTS_DIR).join(chain_id).join( - base_relative_path - .strip_prefix(Utf8PathBuf::new().join(ABIS_DIR).join(BASE_DIR)) - .unwrap(), - ); - - let full_base_path = manifest_dir.join(base_relative_path); - let full_deployed_path = manifest_dir.join(deployed_relative_path.clone()); - - fs::create_dir_all(full_deployed_path.parent().unwrap()) - .await - .expect("Failed to create folder"); - fs::copy(full_base_path, full_deployed_path).await.expect("Failed to copy abi file"); - manifest.inner.set_abi(Some(AbiFormat::Path(deployed_relative_path))); - } - - for contract in local_manifest.contracts.iter_mut() { - inner_helper::(manifest_dir, contract, chain_id).await; - } -} - -pub async fn apply_diff( - ws: &Workspace<'_>, - account: &SingleOwnerAccount, - txn_config: Option, - strategy: &MigrationStrategy, -) -> Result -where - P: Provider + Sync + Send + 'static, - S: Signer + Sync + Send + 'static, -{ - let ui = ws.config().ui(); - - println!(" "); - - let migration_output = execute_strategy(ws, strategy, account, txn_config) - .await - .map_err(|e| anyhow!(e)) - .with_context(|| "Problem trying to migrate.")?; - - if migration_output.full { - if let Some(block_number) = migration_output.world_block_number { - ui.print(format!( - "\n🎉 Successfully migrated World on block #{} at address {}", - block_number, - bold_message(format!( - "{:#x}", - strategy.world_address().expect("world address must exist") - )) - )); - } else { - ui.print(format!( - "\n🎉 Successfully migrated World at address {}", - bold_message(format!( - "{:#x}", - strategy.world_address().expect("world address must exist") - )) - )); - } - } else { - ui.print(format!( - "\n🚨 Partially migrated World at address {}", - bold_message(format!( - "{:#x}", - strategy.world_address().expect("world address must exist") - )) - )); - } - - Ok(migration_output) -} - -async fn load_world_manifests( - manifest_dir: &Utf8PathBuf, - account: &SingleOwnerAccount, - world_address: Option, - ui: &Ui, -) -> Result<(BaseManifest, Option)> -where - P: Provider + Sync + Send + 'static, - S: Signer + Sync + Send + 'static, -{ - ui.print_step(1, "🌎", "Building World state..."); - - let mut local_manifest = - BaseManifest::load_from_path(&manifest_dir.join(MANIFESTS_DIR).join(BASE_DIR)) - .map_err(|_| anyhow!("Fail to load local manifest file."))?; - - let overlay_path = manifest_dir.join(MANIFESTS_DIR).join(OVERLAYS_DIR); - if overlay_path.exists() { - let overlay_manifest = - OverlayManifest::load_from_path(&manifest_dir.join(MANIFESTS_DIR).join(OVERLAYS_DIR)) - .map_err(|_| anyhow!("Fail to load overlay manifest file."))?; - - // merge user defined changes to base manifest - local_manifest.merge(overlay_manifest); - } - - let remote_manifest = if let Some(address) = world_address { - match DeploymentManifest::load_from_remote(account.provider(), address).await { - Ok(manifest) => { - ui.print_sub(format!("Found remote World: {address:#x}")); - Some(manifest) - } - Err(AbstractManifestError::RemoteWorldNotFound) => None, - Err(e) => { - ui.verbose(format!("{e:?}")); - return Err(anyhow!("Failed to build remote World state: {e}")); + }; + + update_manifests_and_abis( + ws, + local_manifest.clone(), + &profile_dir, + &profile_name, + &rpc_url, + world_address, + migration_output.clone(), + name, + ) + .await?; + + if let Some(migration_output) = migration_output { + if !ws.config().offline() { + upload_metadata(ws, account, migration_output, txn_config).await?; } } - } else { - None - }; - - if remote_manifest.is_none() { - ui.print_sub("No remote World found"); - } - - Ok((local_manifest, remote_manifest)) -} - -pub fn prepare_migration( - target_dir: &Utf8PathBuf, - diff: WorldDiff, - name: Option, - world_address: Option, - ui: &Ui, -) -> Result { - ui.print_step(3, "📦", "Preparing for migration..."); - - if name.is_none() && !diff.world.is_same() { - bail!( - "World name is required when attempting to migrate the World contract. Please provide \ - it using `--name`." - ); - } - - let name = if let Some(name) = name { - Some(cairo_short_string_to_felt(&name).with_context(|| "Failed to parse World name.")?) - } else { - None }; - let migration = prepare_for_migration(world_address, name, target_dir, diff) - .with_context(|| "Problem preparing for migration.")?; - - let info = migration.info(); - - ui.print_sub(format!( - "Total items to be migrated ({}): New {} Update {}", - info.new + info.update, - info.new, - info.update - )); - - Ok(migration) -} - -pub async fn execute_strategy( - ws: &Workspace<'_>, - strategy: &MigrationStrategy, - migrator: &SingleOwnerAccount, - txn_config: Option, -) -> Result -where - P: Provider + Sync + Send + 'static, - S: Signer + Sync + Send + 'static, -{ - let ui = ws.config().ui(); - let mut world_tx_hash: Option = None; - let mut world_block_number: Option = None; - - match &strategy.base { - Some(base) => { - ui.print_header("# Base Contract"); - - match base.declare(migrator, txn_config.unwrap_or_default()).await { - Ok(res) => { - ui.print_sub(format!("Class Hash: {:#x}", res.class_hash)); - } - Err(MigrationError::ClassAlreadyDeclared) => { - ui.print_sub(format!("Already declared: {:#x}", base.diff.local)); - } - Err(MigrationError::ArtifactError(e)) => { - return Err(handle_artifact_error(&ui, base.artifact_path(), e)); - } - Err(e) => { - ui.verbose(format!("{e:?}")); - return Err(e.into()); - } - }; - } - None => {} - }; - - match &strategy.world { - Some(world) => { - ui.print_header("# World"); - - let calldata = vec![strategy.base.as_ref().unwrap().diff.local]; - let deploy_result = - deploy_contract(world, "world", calldata.clone(), migrator, &ui, &txn_config) - .await - .map_err(|e| { - ui.verbose(format!("{e:?}")); - anyhow!("Failed to deploy world: {e}") - })?; - - (world_tx_hash, world_block_number) = - if let ContractDeploymentOutput::Output(deploy_result) = deploy_result { - (Some(deploy_result.transaction_hash), deploy_result.block_number) - } else { - (None, None) - }; - - ui.print_sub(format!("Contract address: {:#x}", world.contract_address)); - - let offline = ws.config().offline(); - - if offline { - ui.print_sub("Skipping metadata upload because of offline mode"); - } else { - let metadata = dojo_metadata_from_workspace(ws); - if let Some(meta) = metadata.as_ref().and_then(|inner| inner.world()) { - match meta.upload().await { - Ok(hash) => { - let mut encoded_uri = - cairo_utils::encode_uri(&format!("ipfs://{hash}"))?; - - // Metadata is expecting an array of capacity 3. - if encoded_uri.len() < 3 { - encoded_uri.extend(vec![FieldElement::ZERO; 3 - encoded_uri.len()]); - } - - let world_metadata = ResourceMetadata { - resource_id: FieldElement::ZERO, - metadata_uri: encoded_uri, - }; - - let InvokeTransactionResult { transaction_hash } = - WorldContract::new(world.contract_address, migrator) - .set_metadata(&world_metadata) - .send() - .await - .map_err(|e| { - ui.verbose(format!("{e:?}")); - anyhow!("Failed to set World metadata: {e}") - })?; - - TransactionWaiter::new(transaction_hash, migrator.provider()).await?; - - ui.print_sub(format!( - "Set Metadata transaction: {:#x}", - transaction_hash - )); - ui.print_sub(format!("Metadata uri: ipfs://{hash}")); - } - Err(err) => { - ui.print_sub(format!("Failed to set World metadata:\n{err}")); - } - } - } - } - } - None => {} - }; - - let mut migration_output = MigrationOutput { - world_address: strategy.world_address()?, - world_tx_hash, - world_block_number, - full: false, - }; - - // Once Torii supports indexing arrays, we should declare and register the - // ResourceMetadata model. - - match register_models(strategy, migrator, &ui, txn_config).await { - Ok(_) => (), - Err(e) => { - ui.anyhow(&e); - return Ok(migration_output); - } - } - match deploy_contracts(strategy, migrator, &ui, txn_config).await { - Ok(_) => (), - Err(e) => { - ui.anyhow(&e); - return Ok(migration_output); - } - }; - - migration_output.full = true; - - Ok(migration_output) + Ok(()) } enum ContractDeploymentOutput { @@ -512,261 +152,6 @@ enum ContractDeploymentOutput { Output(DeployOutput), } -async fn deploy_contract( - contract: &ContractMigration, - contract_id: &str, - constructor_calldata: Vec, - migrator: &SingleOwnerAccount, - ui: &Ui, - txn_config: &Option, -) -> Result -where - P: Provider + Sync + Send + 'static, - S: Signer + Sync + Send + 'static, -{ - match contract - .deploy(contract.diff.local, constructor_calldata, migrator, txn_config.unwrap_or_default()) - .await - { - Ok(val) => { - if let Some(declare) = val.clone().declare { - ui.print_hidden_sub(format!( - "Declare transaction: {:#x}", - declare.transaction_hash - )); - } - - ui.print_hidden_sub(format!("Deploy transaction: {:#x}", val.transaction_hash)); - - Ok(ContractDeploymentOutput::Output(val)) - } - Err(MigrationError::ContractAlreadyDeployed(contract_address)) => { - Ok(ContractDeploymentOutput::AlreadyDeployed(contract_address)) - } - Err(MigrationError::ArtifactError(e)) => { - return Err(handle_artifact_error(ui, contract.artifact_path(), e)); - } - Err(e) => { - ui.verbose(format!("{e:?}")); - Err(anyhow!("Failed to migrate {contract_id}: {e}")) - } - } -} - -async fn register_models( - strategy: &MigrationStrategy, - migrator: &SingleOwnerAccount, - ui: &Ui, - txn_config: Option, -) -> Result> -where - P: Provider + Sync + Send + 'static, - S: Signer + Sync + Send + 'static, -{ - let models = &strategy.models; - - if models.is_empty() { - return Ok(None); - } - - ui.print_header(format!("# Models ({})", models.len())); - - let mut declare_output = vec![]; - - for c in models.iter() { - ui.print(italic_message(&c.diff.name).to_string()); - - let res = c.declare(migrator, txn_config.unwrap_or_default()).await; - match res { - Ok(output) => { - ui.print_hidden_sub(format!("Declare transaction: {:#x}", output.transaction_hash)); - - declare_output.push(output); - } - - // Continue if model is already declared - Err(MigrationError::ClassAlreadyDeclared) => { - ui.print_sub(format!("Already declared: {:#x}", c.diff.local)); - continue; - } - Err(MigrationError::ArtifactError(e)) => { - return Err(handle_artifact_error(ui, c.artifact_path(), e)); - } - Err(e) => { - ui.verbose(format!("{e:?}")); - bail!("Failed to declare model {}: {e}", c.diff.name) - } - } - - ui.print_sub(format!("Class hash: {:#x}", c.diff.local)); - } - - let world_address = strategy.world_address()?; - let world = WorldContract::new(world_address, migrator); - - let calls = models - .iter() - .map(|c| world.register_model_getcall(&c.diff.local.into())) - .collect::>(); - - let InvokeTransactionResult { transaction_hash } = - migrator.execute(calls).send().await.map_err(|e| { - ui.verbose(format!("{e:?}")); - anyhow!("Failed to register models to World: {e}") - })?; - - TransactionWaiter::new(transaction_hash, migrator.provider()).await?; - - ui.print(format!("All models are registered at: {transaction_hash:#x}")); - - Ok(Some(RegisterOutput { transaction_hash, declare_output })) -} - -async fn deploy_contracts( - strategy: &MigrationStrategy, - migrator: &SingleOwnerAccount, - ui: &Ui, - txn_config: Option, -) -> Result>> -where - P: Provider + Sync + Send + 'static, - S: Signer + Sync + Send + 'static, -{ - let contracts = &strategy.contracts; - - if contracts.is_empty() { - return Ok(vec![]); - } - - ui.print_header(format!("# Contracts ({})", contracts.len())); - - let mut deploy_output = vec![]; - - let world_address = strategy.world_address()?; - - for contract in strategy.contracts.iter() { - let name = &contract.diff.name; - ui.print(italic_message(name).to_string()); - match contract - .world_deploy( - world_address, - contract.diff.local, - migrator, - txn_config.unwrap_or_default(), - ) - .await - { - Ok(output) => { - if let Some(ref declare) = output.declare { - ui.print_hidden_sub(format!( - "Declare transaction: {:#x}", - declare.transaction_hash - )); - } - - ui.print_hidden_sub(format!("Deploy transaction: {:#x}", output.transaction_hash)); - ui.print_sub(format!("Contract address: {:#x}", output.contract_address)); - deploy_output.push(Some(output)); - } - Err(MigrationError::ContractAlreadyDeployed(contract_address)) => { - ui.print_sub(format!("Already deployed: {:#x}", contract_address)); - deploy_output.push(None); - } - Err(MigrationError::ArtifactError(e)) => { - return Err(handle_artifact_error(ui, contract.artifact_path(), e)); - } - Err(e) => { - ui.verbose(format!("{e:?}")); - return Err(anyhow!("Failed to migrate {name}: {e}")); - } - } - } - - Ok(deploy_output) -} - -pub fn handle_artifact_error(ui: &Ui, artifact_path: &Path, error: anyhow::Error) -> anyhow::Error { - let path = artifact_path.to_string_lossy(); - let name = artifact_path.file_name().unwrap().to_string_lossy(); - ui.verbose(format!("{path}: {error:?}")); - - anyhow!( - "Discrepancy detected in {name}.\nUse `sozo clean` to clean your project or `sozo clean \ - --artifacts` to clean artifacts only.\nThen, rebuild your project with `sozo build`." - ) -} - -pub async fn get_contract_operation_name

( - provider: &P, - contract: &ContractMigration, - world_address: Option, -) -> String -where - P: Provider + Sync + Send + 'static, -{ - if let Some(world_address) = world_address { - if let Ok(base_class_hash) = provider - .call( - FunctionCall { - contract_address: world_address, - calldata: vec![], - entry_point_selector: get_selector_from_name("base").unwrap(), - }, - BlockId::Tag(BlockTag::Pending), - ) - .await - { - let contract_address = - get_contract_address(contract.salt, base_class_hash[0], &[], world_address); - - match provider - .get_class_hash_at(BlockId::Tag(BlockTag::Pending), contract_address) - .await - { - Ok(current_class_hash) if current_class_hash != contract.diff.local => { - return format!("upgrade {}", contract.diff.name); - } - Err(ProviderError::StarknetError(StarknetError::ContractNotFound)) => { - return format!("deploy {}", contract.diff.name); - } - Ok(_) => return "already deployed".to_string(), - Err(_) => return format!("deploy {}", contract.diff.name), - } - } - } - format!("deploy {}", contract.diff.name) -} - -pub async fn print_strategy

(ui: &Ui, provider: &P, strategy: &MigrationStrategy) -where - P: Provider + Sync + Send + 'static, -{ - ui.print("\n📋 Migration Strategy\n"); - - if let Some(base) = &strategy.base { - ui.print_header("# Base Contract"); - ui.print_sub(format!("declare (class hash: {:#x})\n", base.diff.local)); - } - - if let Some(world) = &strategy.world { - ui.print_header("# World"); - ui.print_sub(format!("declare (class hash: {:#x})\n", world.diff.local)); - } - - if !&strategy.models.is_empty() { - ui.print_header(format!("# Models ({})", &strategy.models.len())); - for m in &strategy.models { - ui.print_sub(format!("register {} (class hash: {:#x})", m.diff.name, m.diff.local)); - } - ui.print(" "); - } - - if !&strategy.contracts.is_empty() { - ui.print_header(format!("# Contracts ({})", &strategy.contracts.len())); - for c in &strategy.contracts { - let op_name = get_contract_operation_name(provider, c, strategy.world_address).await; - ui.print_sub(format!("{op_name} (class hash: {:#x})", c.diff.local)); - } - ui.print(" "); - } +enum ContractUpgradeOutput { + Output(UpgradeOutput), } diff --git a/crates/sozo/ops/src/migration/utils.rs b/crates/sozo/ops/src/migration/utils.rs new file mode 100644 index 0000000000..d21e7c4c29 --- /dev/null +++ b/crates/sozo/ops/src/migration/utils.rs @@ -0,0 +1,63 @@ +use anyhow::{anyhow, Result}; +use camino::Utf8PathBuf; +use dojo_lang::compiler::{BASE_DIR, OVERLAYS_DIR}; +use dojo_world::manifest::{ + AbstractManifestError, BaseManifest, DeploymentManifest, OverlayManifest, +}; +use scarb_ui::Ui; +use starknet::accounts::{ConnectedAccount, SingleOwnerAccount}; +use starknet::providers::Provider; +use starknet::signers::Signer; +use starknet_crypto::FieldElement; + +use super::ui::MigrationUi; + +/// Loads: +/// - `BaseManifest` from filesystem +/// - `DeployedManifest` from onchain dataa if `world_address` is `Some` +pub(super) async fn load_world_manifests( + profile_dir: &Utf8PathBuf, + account: &SingleOwnerAccount, + world_address: Option, + ui: &Ui, +) -> Result<(BaseManifest, Option)> +where + P: Provider + Sync + Send, + S: Signer + Sync + Send, +{ + ui.print_step(1, "🌎", "Building World state..."); + + let mut local_manifest = BaseManifest::load_from_path(&profile_dir.join(BASE_DIR)) + .map_err(|e| anyhow!("Fail to load local manifest file: {e}."))?; + + let overlay_path = profile_dir.join(OVERLAYS_DIR); + if overlay_path.exists() { + let overlay_manifest = OverlayManifest::load_from_path(&profile_dir.join(OVERLAYS_DIR)) + .map_err(|e| anyhow!("Fail to load overlay manifest file: {e}."))?; + + // merge user defined changes to base manifest + local_manifest.merge(overlay_manifest); + } + + let remote_manifest = if let Some(address) = world_address { + match DeploymentManifest::load_from_remote(account.provider(), address).await { + Ok(manifest) => { + ui.print_sub(format!("Found remote World: {address:#x}")); + Some(manifest) + } + Err(AbstractManifestError::RemoteWorldNotFound) => None, + Err(e) => { + ui.verbose(format!("{e:?}")); + return Err(anyhow!("Failed to build remote World state: {e}")); + } + } + } else { + None + }; + + if remote_manifest.is_none() { + ui.print_sub("No remote World found"); + } + + Ok((local_manifest, remote_manifest)) +} diff --git a/crates/sozo/ops/src/register.rs b/crates/sozo/ops/src/register.rs index 882ca55c8a..df86b27b0e 100644 --- a/crates/sozo/ops/src/register.rs +++ b/crates/sozo/ops/src/register.rs @@ -4,7 +4,8 @@ use anyhow::{Context, Result}; use dojo_world::contracts::model::ModelReader; use dojo_world::contracts::{WorldContract, WorldContractReader}; use dojo_world::manifest::DeploymentManifest; -use dojo_world::migration::TxConfig; +use dojo_world::migration::TxnConfig; +use dojo_world::utils::TransactionExt; use scarb::core::Config; use starknet::accounts::ConnectedAccount; use starknet::providers::Provider; @@ -15,7 +16,7 @@ use crate::utils::handle_transaction_result; pub async fn model_register( models: Vec, world: &WorldContract, - transaction: TxConfig, + txn_config: TxnConfig, world_reader: WorldContractReader

, world_address: FieldElement, config: &Config, @@ -63,16 +64,15 @@ where .map(|c| world.register_model_getcall(&(*c).into())) .collect::>(); - let res = - world.account.execute(calls).send().await.with_context(|| "Failed to send transaction")?; + let res = world + .account + .execute(calls) + .send_with_cfg(&txn_config) + .await + .with_context(|| "Failed to send transaction")?; - handle_transaction_result( - &world.account.provider(), - res, - transaction.wait, - transaction.receipt, - ) - .await?; + handle_transaction_result(&world.account.provider(), res, txn_config.wait, txn_config.receipt) + .await?; Ok(()) } diff --git a/crates/sozo/ops/src/statistics.rs b/crates/sozo/ops/src/statistics.rs new file mode 100644 index 0000000000..81b77a15b7 --- /dev/null +++ b/crates/sozo/ops/src/statistics.rs @@ -0,0 +1,166 @@ +use std::fs::{self, File}; +use std::io::{self, BufReader}; +use std::path::PathBuf; + +use anyhow::{Context, Result}; +use camino::Utf8PathBuf; +use starknet::core::types::contract::SierraClass; +use starknet::core::types::FlattenedSierraClass; + +#[derive(Debug, PartialEq)] +pub struct ContractStatistics { + pub contract_name: String, + pub number_felts: u64, + pub file_size: u64, +} + +fn read_sierra_json_program(file: &File) -> Result { + let contract_artifact: SierraClass = serde_json::from_reader(BufReader::new(file))?; + let contract_artifact: FlattenedSierraClass = contract_artifact.flatten()?; + + Ok(contract_artifact) +} + +fn get_sierra_byte_code_size(contract_artifact: FlattenedSierraClass) -> u64 { + contract_artifact.sierra_program.len() as u64 +} + +fn get_file_size(file: &File) -> Result { + file.metadata().map(|metadata| metadata.len()) +} + +fn get_contract_statistics_for_file( + contract_name: String, + sierra_json_file: File, + contract_artifact: FlattenedSierraClass, +) -> Result { + let file_size = get_file_size(&sierra_json_file).context("Error getting file size")?; + let number_felts = get_sierra_byte_code_size(contract_artifact); + Ok(ContractStatistics { file_size, contract_name, number_felts }) +} + +pub fn get_contract_statistics_for_dir( + target_directory: &Utf8PathBuf, +) -> Result> { + let mut contract_statistics = Vec::new(); + let target_directory = target_directory.as_str(); + let dir: fs::ReadDir = fs::read_dir(target_directory)?; + for entry in dir { + let path: PathBuf = entry?.path(); + + if path.is_dir() { + continue; + } + + let contract_name: String = + path.file_stem().context("Error getting file name")?.to_string_lossy().to_string(); + + let sierra_json_file: File = + File::open(&path).context(format!("Error opening file: {}", path.to_string_lossy()))?; + + let contract_artifact: FlattenedSierraClass = read_sierra_json_program(&sierra_json_file) + .context(format!( + "Error parsing Sierra class artifact: {}", + path.to_string_lossy() + ))?; + + contract_statistics.push(get_contract_statistics_for_file( + contract_name, + sierra_json_file, + contract_artifact, + )?); + } + Ok(contract_statistics) +} + +#[cfg(test)] +mod tests { + use std::fs::File; + use std::path::Path; + + use camino::Utf8PathBuf; + + use super::{ + get_contract_statistics_for_dir, get_contract_statistics_for_file, get_file_size, + get_sierra_byte_code_size, read_sierra_json_program, ContractStatistics, + }; + + const TEST_SIERRA_JSON_CONTRACT: &str = + "../../../bin/sozo/tests/test_data/compiled_contracts/test_contract.json"; + const TEST_SIERRA_FOLDER_CONTRACTS: &str = + "../../../bin/sozo/tests/test_data/compiled_contracts/"; + + #[test] + fn get_sierra_byte_code_size_returns_correct_size() { + let sierra_json_file = File::open(TEST_SIERRA_JSON_CONTRACT) + .unwrap_or_else(|err| panic!("Failed to open file: {}", err)); + let flattened_sierra_class = read_sierra_json_program(&sierra_json_file) + .unwrap_or_else(|err| panic!("Failed to read JSON program: {}", err)); + const EXPECTED_NUMBER_OF_FELTS: u64 = 2175; + + let number_of_felts = get_sierra_byte_code_size(flattened_sierra_class); + + assert_eq!( + number_of_felts, EXPECTED_NUMBER_OF_FELTS, + "Number of felts mismatch. Expected {}, got {}", + EXPECTED_NUMBER_OF_FELTS, number_of_felts + ); + } + + #[test] + fn get_contract_statistics_for_file_returns_correct_statistics() { + let sierra_json_file = File::open(TEST_SIERRA_JSON_CONTRACT) + .unwrap_or_else(|err| panic!("Failed to open file: {}", err)); + let contract_artifact = read_sierra_json_program(&sierra_json_file) + .unwrap_or_else(|err| panic!("Failed to read JSON program: {}", err)); + let filename = Path::new(TEST_SIERRA_JSON_CONTRACT) + .file_stem() + .expect("Error getting file name") + .to_string_lossy() + .to_string(); + let expected_contract_statistics: ContractStatistics = ContractStatistics { + contract_name: String::from("test_contract"), + number_felts: 2175, + file_size: 114925, + }; + + let statistics = + get_contract_statistics_for_file(filename.clone(), sierra_json_file, contract_artifact) + .expect("Error getting contract statistics for file"); + + assert_eq!(statistics, expected_contract_statistics); + } + + #[test] + fn get_contract_statistics_for_dir_returns_correct_statistics() { + let target_dir = Utf8PathBuf::from(TEST_SIERRA_FOLDER_CONTRACTS); + + let contract_statistics = get_contract_statistics_for_dir(&target_dir) + .unwrap_or_else(|_| panic!("Error getting contracts in dir {target_dir}")); + + assert_eq!(contract_statistics.len(), 1, "Mismatch number of contract statistics"); + } + + #[test] + fn get_file_size_returns_correct_size() { + let sierra_json_file = File::open(TEST_SIERRA_JSON_CONTRACT) + .unwrap_or_else(|err| panic!("Failed to open test file: {}", err)); + const EXPECTED_SIZE: u64 = 114925; + + let file_size = get_file_size(&sierra_json_file) + .unwrap_or_else(|_| panic!("Error getting file size for test file")); + + assert_eq!(file_size, EXPECTED_SIZE, "File size mismatch"); + } + + #[test] + fn read_sierra_json_program_returns_ok_when_successful() { + // Arrange + let sierra_json_file = File::open(TEST_SIERRA_JSON_CONTRACT) + .unwrap_or_else(|err| panic!("Failed to open test file: {}", err)); + + let result = read_sierra_json_program(&sierra_json_file); + + assert!(result.is_ok(), "Expected Ok result"); + } +} diff --git a/crates/sozo/ops/src/tests/auth.rs b/crates/sozo/ops/src/tests/auth.rs new file mode 100644 index 0000000000..0535b56384 --- /dev/null +++ b/crates/sozo/ops/src/tests/auth.rs @@ -0,0 +1,213 @@ +use dojo_test_utils::sequencer::{ + get_default_test_starknet_config, SequencerConfig, TestSequencer, +}; +use dojo_world::contracts::world::WorldContract; +use dojo_world::migration::TxnConfig; +use starknet::accounts::{Account, ConnectedAccount}; +use starknet::core::utils::cairo_short_string_to_felt; + +use super::setup; +use crate::auth::{self, ModelContract, OwnerResource, ResourceType}; +use crate::execute; + +const ACTION_CONTRACT_NAME: &str = "dojo_examples::actions::actions"; + +#[tokio::test(flavor = "multi_thread")] +async fn auth_grant_writer_ok() { + let sequencer = + TestSequencer::start(SequencerConfig::default(), get_default_test_starknet_config()).await; + + let world = setup::setup(&sequencer).await.unwrap(); + + // Shouldn't have any permission at this point. + let account2 = sequencer.account_at_index(2); + + // Setup new world contract handler with account 2. + let world_2 = WorldContract::new(world.address, account2); + + assert!(!execute_spawn(&world_2).await); + + // Account2 does not have the permission to write, but granting + // writer to the actions contract allows the execution of it's systems by + // any account. + let moves_mc = ModelContract { + model: cairo_short_string_to_felt("Moves").unwrap(), + contract: ACTION_CONTRACT_NAME.to_string(), + }; + + let position_mc = ModelContract { + model: cairo_short_string_to_felt("Position").unwrap(), + contract: ACTION_CONTRACT_NAME.to_string(), + }; + + auth::grant_writer( + &world, + vec![moves_mc, position_mc], + TxnConfig { wait: true, ..Default::default() }, + ) + .await + .unwrap(); + + assert!(execute_spawn(&world_2).await); +} + +#[tokio::test(flavor = "multi_thread")] +async fn auth_revoke_writer_ok() { + let sequencer = + TestSequencer::start(SequencerConfig::default(), get_default_test_starknet_config()).await; + + let world = setup::setup(&sequencer).await.unwrap(); + + // Shouldn't have any permission at this point. + let account2 = sequencer.account_at_index(2); + + // Setup new world contract handler with account 2. + let world_2 = WorldContract::new(world.address, account2); + + assert!(!execute_spawn(&world_2).await); + + // Account2 does not have the permission to write, but granting + // writer to the actions contract allows the execution of it's systems by + // any account. + let moves_mc = ModelContract { + model: cairo_short_string_to_felt("Moves").unwrap(), + contract: ACTION_CONTRACT_NAME.to_string(), + }; + + let position_mc = ModelContract { + model: cairo_short_string_to_felt("Position").unwrap(), + contract: ACTION_CONTRACT_NAME.to_string(), + }; + + // Here we are granting the permission to write + auth::grant_writer( + &world, + vec![moves_mc.clone(), position_mc.clone()], + TxnConfig { wait: true, ..Default::default() }, + ) + .await + .unwrap(); + + // This should be executable now + assert!(execute_spawn(&world_2).await); + + // Here we are revoking the access again. + auth::revoke_writer( + &world, + vec![moves_mc, position_mc], + TxnConfig { wait: true, ..Default::default() }, + ) + .await + .unwrap(); + + // Here it shouldn't be executable. + assert!(!execute_spawn(&world_2).await); +} + +#[tokio::test(flavor = "multi_thread")] +async fn auth_grant_owner_ok() { + let sequencer = + TestSequencer::start(SequencerConfig::default(), get_default_test_starknet_config()).await; + + let world = setup::setup(&sequencer).await.unwrap(); + + // Shouldn't have any permission at this point. + let account_2 = sequencer.account_at_index(2); + let account_2_addr = account_2.address(); + + // Setup new world contract handler with account 2. + let world_2 = WorldContract::new(world.address, account_2); + + assert!(!execute_spawn(&world_2).await); + + // Account2 does not have the permission to write, let's give this account + // ownership of both models. + let moves = OwnerResource { + resource: ResourceType::Model(cairo_short_string_to_felt("Moves").unwrap()), + owner: account_2_addr, + }; + + let position = OwnerResource { + resource: ResourceType::Model(cairo_short_string_to_felt("Position").unwrap()), + owner: account_2_addr, + }; + + auth::grant_owner( + &world, + vec![moves, position], + TxnConfig { wait: true, ..Default::default() }, + ) + .await + .unwrap(); + + assert!(execute_spawn(&world_2).await); +} + +#[tokio::test(flavor = "multi_thread")] +async fn auth_revoke_owner_ok() { + let sequencer = + TestSequencer::start(SequencerConfig::default(), get_default_test_starknet_config()).await; + + let world = setup::setup(&sequencer).await.unwrap(); + + // Shouldn't have any permission at this point. + let account_2 = sequencer.account_at_index(2); + let account_2_addr = account_2.address(); + + // Setup new world contract handler with account 2. + let world_2 = WorldContract::new(world.address, account_2); + + assert!(!execute_spawn(&world_2).await); + + // Account2 does not have the permission to write, let's give this account + // ownership of both models. + let moves = OwnerResource { + resource: ResourceType::Model(cairo_short_string_to_felt("Moves").unwrap()), + owner: account_2_addr, + }; + + let position = OwnerResource { + resource: ResourceType::Model(cairo_short_string_to_felt("Position").unwrap()), + owner: account_2_addr, + }; + + auth::grant_owner( + &world, + vec![moves.clone(), position.clone()], + TxnConfig { wait: true, ..Default::default() }, + ) + .await + .unwrap(); + + assert!(execute_spawn(&world_2).await); + auth::revoke_owner( + &world, + vec![moves, position], + TxnConfig { wait: true, ..Default::default() }, + ) + .await + .unwrap(); + + assert!(!execute_spawn(&world_2).await); +} +/// Executes the `spawn` system on `actions` contract. +/// +/// # Returns +/// +/// True if the execution was successful, false otherwise. +async fn execute_spawn( + world: &WorldContract, +) -> bool { + let contract_actions = ACTION_CONTRACT_NAME.to_string(); + let system_spawn = "spawn".to_string(); + + execute::execute( + contract_actions, + system_spawn, + vec![], + world, + &TxnConfig { wait: true, ..Default::default() }, + ) + .await + .is_ok() +} diff --git a/crates/sozo/ops/src/tests/call.rs b/crates/sozo/ops/src/tests/call.rs new file mode 100644 index 0000000000..331215c03f --- /dev/null +++ b/crates/sozo/ops/src/tests/call.rs @@ -0,0 +1,147 @@ +use dojo_test_utils::sequencer::{ + get_default_test_starknet_config, SequencerConfig, TestSequencer, +}; +use dojo_world::contracts::WorldContractReader; +use starknet::accounts::SingleOwnerAccount; +use starknet::providers::jsonrpc::HttpTransport; +use starknet::providers::JsonRpcClient; +use starknet::signers::LocalWallet; +use starknet_crypto::FieldElement; + +use super::setup; +use crate::{call, utils}; + +const CONTRACT_NAME: &str = "dojo_examples::actions::actions"; +const ENTRYPOINT: &str = "tile_terrain"; + +#[tokio::test] +async fn call_with_bad_address() { + let sequencer = + TestSequencer::start(SequencerConfig::default(), get_default_test_starknet_config()).await; + + let world = setup::setup(&sequencer).await.unwrap(); + let provider = sequencer.provider(); + let world_reader = WorldContractReader::new(world.address, provider); + + assert!( + call::call( + world_reader, + "0xBadCoffeeBadCode".to_string(), + ENTRYPOINT.to_string(), + vec![FieldElement::ZERO, FieldElement::ZERO], + None + ) + .await + .is_err() + ); +} + +#[tokio::test] +async fn call_with_bad_name() { + let sequencer = + TestSequencer::start(SequencerConfig::default(), get_default_test_starknet_config()).await; + + let world = setup::setup(&sequencer).await.unwrap(); + let provider = sequencer.provider(); + let world_reader = WorldContractReader::new(world.address, provider); + + assert!( + call::call( + world_reader, + "BadName".to_string(), + ENTRYPOINT.to_string(), + vec![FieldElement::ZERO, FieldElement::ZERO], + None + ) + .await + .is_err() + ); +} + +#[tokio::test] +async fn call_with_bad_entrypoint() { + let sequencer = + TestSequencer::start(SequencerConfig::default(), get_default_test_starknet_config()).await; + + let world = setup::setup(&sequencer).await.unwrap(); + let provider = sequencer.provider(); + let world_reader = WorldContractReader::new(world.address, provider); + + assert!( + call::call( + world_reader, + CONTRACT_NAME.to_string(), + "BadEntryPoint".to_string(), + vec![FieldElement::ZERO, FieldElement::ZERO], + None + ) + .await + .is_err() + ); +} + +#[tokio::test] +async fn call_with_bad_calldata() { + let sequencer = + TestSequencer::start(SequencerConfig::default(), get_default_test_starknet_config()).await; + + let world = setup::setup(&sequencer).await.unwrap(); + let provider = sequencer.provider(); + let world_reader = WorldContractReader::new(world.address, provider); + + assert!( + call::call(world_reader, CONTRACT_NAME.to_string(), ENTRYPOINT.to_string(), vec![], None) + .await + .is_err() + ); +} + +#[tokio::test] +async fn call_with_contract_name() { + let sequencer = + TestSequencer::start(SequencerConfig::default(), get_default_test_starknet_config()).await; + + let world = setup::setup(&sequencer).await.unwrap(); + let provider = sequencer.provider(); + let world_reader = WorldContractReader::new(world.address, provider); + + assert!( + call::call( + world_reader, + CONTRACT_NAME.to_string(), + ENTRYPOINT.to_string(), + vec![FieldElement::ZERO, FieldElement::ZERO], + None, + ) + .await + .is_ok() + ); +} + +#[tokio::test] +async fn call_with_contract_address() { + let sequencer = + TestSequencer::start(SequencerConfig::default(), get_default_test_starknet_config()).await; + + let world = setup::setup(&sequencer).await.unwrap(); + let provider = sequencer.provider(); + let world_reader = WorldContractReader::new(world.address, provider); + + let contract_address = utils::get_contract_address::< + SingleOwnerAccount, LocalWallet>, + >(&world, CONTRACT_NAME.to_string()) + .await + .unwrap(); + + assert!( + call::call( + world_reader, + format!("{:#x}", contract_address), + ENTRYPOINT.to_string(), + vec![FieldElement::ZERO, FieldElement::ZERO], + None, + ) + .await + .is_ok() + ); +} diff --git a/crates/sozo/ops/src/tests/migration.rs b/crates/sozo/ops/src/tests/migration.rs new file mode 100644 index 0000000000..54f25c79e7 --- /dev/null +++ b/crates/sozo/ops/src/tests/migration.rs @@ -0,0 +1,502 @@ +use std::str; + +use camino::Utf8Path; +use dojo_lang::compiler::{BASE_DIR, MANIFESTS_DIR}; +use dojo_test_utils::compiler::build_full_test_config; +use dojo_test_utils::migration::prepare_migration_with_world_and_seed; +use dojo_test_utils::sequencer::{ + get_default_test_starknet_config, SequencerConfig, StarknetConfig, TestSequencer, +}; +use dojo_world::contracts::WorldContractReader; +use dojo_world::manifest::{BaseManifest, DeploymentManifest, WORLD_CONTRACT_NAME}; +use dojo_world::metadata::{ + dojo_metadata_from_workspace, ArtifactMetadata, DojoMetadata, Uri, WorldMetadata, + IPFS_CLIENT_URL, IPFS_PASSWORD, IPFS_USERNAME, +}; +use dojo_world::migration::strategy::prepare_for_migration; +use dojo_world::migration::world::WorldDiff; +use dojo_world::migration::TxnConfig; +use futures::TryStreamExt; +use ipfs_api_backend_hyper::{HyperBackend, IpfsApi, IpfsClient, TryFromUri}; +use starknet::accounts::{ExecutionEncoding, SingleOwnerAccount}; +use starknet::core::chain_id; +use starknet::core::types::{BlockId, BlockTag}; +use starknet::core::utils::{get_selector_from_name, parse_cairo_short_string}; +use starknet::macros::felt; +use starknet::providers::jsonrpc::HttpTransport; +use starknet::providers::JsonRpcClient; +use starknet::signers::{LocalWallet, SigningKey}; +use starknet_crypto::FieldElement; + +use super::setup::{load_config, setup_migration, setup_ws}; +use crate::migration::{execute_strategy, upload_metadata}; +use crate::utils::get_contract_address_from_reader; + +#[tokio::test(flavor = "multi_thread")] +async fn migrate_with_auto_mine() { + let config = load_config(); + let ws = setup_ws(&config); + + let migration = setup_migration().unwrap(); + + let sequencer = + TestSequencer::start(SequencerConfig::default(), get_default_test_starknet_config()).await; + + let mut account = sequencer.account(); + account.set_block_id(BlockId::Tag(BlockTag::Pending)); + + execute_strategy(&ws, &migration, &account, TxnConfig::default()).await.unwrap(); + + sequencer.stop().unwrap(); +} + +#[tokio::test(flavor = "multi_thread")] +async fn migrate_with_block_time() { + let config = load_config(); + let ws = setup_ws(&config); + + let migration = setup_migration().unwrap(); + + let sequencer = TestSequencer::start( + SequencerConfig { block_time: Some(1000), ..Default::default() }, + get_default_test_starknet_config(), + ) + .await; + + let mut account = sequencer.account(); + account.set_block_id(BlockId::Tag(BlockTag::Pending)); + + execute_strategy(&ws, &migration, &account, TxnConfig::default()).await.unwrap(); + sequencer.stop().unwrap(); +} + +#[tokio::test(flavor = "multi_thread")] +async fn migrate_with_small_fee_multiplier_will_fail() { + let config = load_config(); + let ws = setup_ws(&config); + + let migration = setup_migration().unwrap(); + + let sequencer = TestSequencer::start( + Default::default(), + StarknetConfig { disable_fee: false, ..Default::default() }, + ) + .await; + + let account = SingleOwnerAccount::new( + JsonRpcClient::new(HttpTransport::new(sequencer.url())), + LocalWallet::from_signing_key(SigningKey::from_secret_scalar( + sequencer.raw_account().private_key, + )), + sequencer.raw_account().account_address, + chain_id::TESTNET, + ExecutionEncoding::New, + ); + + assert!( + execute_strategy( + &ws, + &migration, + &account, + TxnConfig { fee_estimate_multiplier: Some(0.2f64), ..Default::default() }, + ) + .await + .is_err() + ); + sequencer.stop().unwrap(); +} + +#[tokio::test] +async fn migration_from_remote() { + let config = load_config(); + let ws = setup_ws(&config); + + let base = "../../../examples/spawn-and-move"; + let target_dir = format!("{}/target/dev", base); + + let sequencer = + TestSequencer::start(SequencerConfig::default(), get_default_test_starknet_config()).await; + + let account = SingleOwnerAccount::new( + JsonRpcClient::new(HttpTransport::new(sequencer.url())), + LocalWallet::from_signing_key(SigningKey::from_secret_scalar( + sequencer.raw_account().private_key, + )), + sequencer.raw_account().account_address, + chain_id::TESTNET, + ExecutionEncoding::New, + ); + + let profile_name = ws.current_profile().unwrap().to_string(); + + let manifest = BaseManifest::load_from_path( + &Utf8Path::new(base).to_path_buf().join(MANIFESTS_DIR).join(&profile_name).join(BASE_DIR), + ) + .unwrap(); + + let world = WorldDiff::compute(manifest, None); + + let migration = prepare_for_migration( + None, + felt!("0x12345"), + &Utf8Path::new(&target_dir).to_path_buf(), + world, + ) + .unwrap(); + + execute_strategy(&ws, &migration, &account, TxnConfig::default()).await.unwrap(); + + let local_manifest = BaseManifest::load_from_path( + &Utf8Path::new(base).to_path_buf().join(MANIFESTS_DIR).join(&profile_name).join(BASE_DIR), + ) + .unwrap(); + + let remote_manifest = DeploymentManifest::load_from_remote( + JsonRpcClient::new(HttpTransport::new(sequencer.url())), + migration.world_address().unwrap(), + ) + .await + .unwrap(); + + sequencer.stop().unwrap(); + + assert_eq!(local_manifest.world.inner.class_hash, remote_manifest.world.inner.class_hash); + assert_eq!(local_manifest.models.len(), remote_manifest.models.len()); +} + +#[tokio::test(flavor = "multi_thread")] +async fn migrate_with_metadata() { + let config = build_full_test_config("../../../examples/spawn-and-move/Scarb.toml", false) + .unwrap_or_else(|c| panic!("Error loading config: {c:?}")); + let ws = setup_ws(&config); + + let migration = setup_migration().unwrap(); + + let sequencer = + TestSequencer::start(SequencerConfig::default(), get_default_test_starknet_config()).await; + + let mut account = sequencer.account(); + account.set_block_id(BlockId::Tag(BlockTag::Pending)); + + let output = execute_strategy(&ws, &migration, &account, TxnConfig::default()).await.unwrap(); + + let res = upload_metadata(&ws, &account, output.clone(), TxnConfig::default()).await; + assert!(res.is_ok()); + + let provider = sequencer.provider(); + let world_reader = WorldContractReader::new(output.world_address, &provider); + + let client = IpfsClient::from_str(IPFS_CLIENT_URL) + .unwrap_or_else(|_| panic!("Unable to initialize the IPFS Client")) + .with_credentials(IPFS_USERNAME, IPFS_PASSWORD); + + let dojo_metadata = dojo_metadata_from_workspace(&ws); + + // check world metadata + let resource = world_reader.metadata(&FieldElement::ZERO).call().await.unwrap(); + let element_name = WORLD_CONTRACT_NAME.to_string(); + + let full_uri = get_and_check_metadata_uri(&element_name, &resource.metadata_uri); + let resource_bytes = get_ipfs_resource_data(&client, &element_name, &full_uri).await; + + let metadata = resource_bytes_to_world_metadata(&resource_bytes, &element_name); + + assert_eq!(metadata.name, dojo_metadata.world.name, ""); + assert_eq!(metadata.description, dojo_metadata.world.description, ""); + assert_eq!(metadata.cover_uri, dojo_metadata.world.cover_uri, ""); + assert_eq!(metadata.icon_uri, dojo_metadata.world.icon_uri, ""); + assert_eq!(metadata.website, dojo_metadata.world.website, ""); + assert_eq!(metadata.socials, dojo_metadata.world.socials, ""); + + check_artifact_fields( + &client, + &metadata.artifacts, + &dojo_metadata.world.artifacts, + &element_name, + ) + .await; + + // check model metadata + for m in migration.models { + let selector = get_selector_from_name(&m.diff.name).unwrap(); + check_artifact_metadata(&client, &world_reader, selector, &m.diff.name, &dojo_metadata) + .await; + } + + // check contract metadata + for c in migration.contracts { + let contract_address = + get_contract_address_from_reader(&world_reader, c.diff.name.clone()).await.unwrap(); + check_artifact_metadata( + &client, + &world_reader, + contract_address, + &c.diff.name, + &dojo_metadata, + ) + .await; + } +} + +#[tokio::test(flavor = "multi_thread")] +async fn migration_with_mismatching_world_address_and_seed() { + let base_dir = "../../../examples/spawn-and-move"; + let target_dir = format!("{}/target/dev", base_dir); + + let result = prepare_migration_with_world_and_seed( + base_dir.into(), + target_dir.into(), + Some(felt!("0x1")), + "sozo_test", + ); + + assert!(result.is_err_and(|e| e.to_string().contains( + "Calculated world address doesn't match provided world address.\nIf you are deploying \ + with custom seed make sure `world_address` is correctly configured (or not set) \ + `Scarb.toml`" + ))); +} + +/// Get the hash from a IPFS URI +/// +/// # Arguments +/// +/// * `uri` - a full IPFS URI +/// +/// # Returns +/// +/// A [`String`] containing the hash from the URI. +fn get_hash_from_uri(uri: &str) -> String { + let hash = match uri.strip_prefix("ipfs://") { + Some(s) => s.to_string(), + None => uri.to_owned(), + }; + match hash.strip_suffix('/') { + Some(s) => s.to_string(), + None => hash, + } +} + +/// Check a metadata field which refers to a file. +/// +/// # Arguments +/// +/// * `client` - a IPFS client. +/// * `uri` - the IPFS URI of the abi field. +/// * `expected_uri` - the URI of the expected file. +/// * `field_name` - the field name. +/// * `element_name` - the fully qualified name of the element linked to this field. +async fn check_file_field( + client: &HyperBackend, + uri: &Uri, + expected_uri: &Uri, + field_name: String, + element_name: &String, +) { + if let Uri::Ipfs(uri) = uri { + let resource_data = get_ipfs_resource_data(client, element_name, uri).await; + assert!( + !resource_data.is_empty(), + "{field_name} IPFS artifact for {} is empty", + element_name + ); + + if let Uri::File(f) = expected_uri { + let file_content = std::fs::read_to_string(f).unwrap(); + let resource_content = std::str::from_utf8(&resource_data).unwrap_or_else(|_| { + panic!( + "Unable to stringify resource data for field '{}' of {}", + field_name, element_name + ) + }); + + assert!( + file_content.eq(&resource_content), + "local '{field_name}' content differs from the one uploaded on IPFS for {}", + element_name + ); + } else { + panic!( + "The field '{field_name}' of {} is not a file (Should never happen !)", + element_name + ); + } + } else { + panic!("The '{field_name}' field is not an IPFS artifact for {}", element_name); + } +} + +/// Convert resource bytes to a ArtifactMetadata object. +/// +/// # Arguments +/// +/// * `raw_data` - resource data as bytes. +/// * `element_name` - name of the element linked to this resource. +/// +/// # Returns +/// +/// A [`ArtifactMetadata`] object. +fn resource_bytes_to_metadata(raw_data: &[u8], element_name: &String) -> ArtifactMetadata { + let data = std::str::from_utf8(raw_data) + .unwrap_or_else(|_| panic!("Unable to stringify raw metadata for {}", element_name)); + serde_json::from_str(data) + .unwrap_or_else(|_| panic!("Unable to deserialize metadata for {}", element_name)) +} + +/// Convert resource bytes to a WorldMetadata object. +/// +/// # Arguments +/// +/// * `raw_data` - resource data as bytes. +/// * `element_name` - name of the element linked to this resource. +/// +/// # Returns +/// +/// A [`WorldMetadata`] object. +fn resource_bytes_to_world_metadata(raw_data: &[u8], element_name: &String) -> WorldMetadata { + let data = std::str::from_utf8(raw_data) + .unwrap_or_else(|_| panic!("Unable to stringify raw metadata for {}", element_name)); + serde_json::from_str(data) + .unwrap_or_else(|_| panic!("Unable to deserialize metadata for {}", element_name)) +} + +/// Read the content of a resource identified by its IPFS URI. +/// +/// # Arguments +/// +/// * `client` - a IPFS client. +/// * `element_name` - the name of the element (model or contract) linked to this artifact. +/// * `uri` - the IPFS resource URI. +/// +/// # Returns +/// +/// A [`Vec`] containing the resource content as bytes. +async fn get_ipfs_resource_data( + client: &HyperBackend, + element_name: &String, + uri: &String, +) -> Vec { + let hash = get_hash_from_uri(uri); + + let res = client.cat(&hash).map_ok(|chunk| chunk.to_vec()).try_concat().await; + assert!(res.is_ok(), "Unable to read the IPFS artifact {} for {}", uri, element_name); + + res.unwrap() +} + +/// Check the validity of artifact metadata fields. +/// +/// # Arguments +/// +/// * `client` - a IPFS client. +/// * `metadata` - the metadata to check. +/// * `expected_metadata` - the metadata values coming from local Dojo metadata. +/// * `element_name` - the name of the element linked to this metadata. +async fn check_artifact_fields( + client: &HyperBackend, + metadata: &ArtifactMetadata, + expected_metadata: &ArtifactMetadata, + element_name: &String, +) { + assert!(metadata.abi.is_some(), "'abi' field not set for {}", element_name); + let abi = metadata.abi.as_ref().unwrap(); + let expected_abi = expected_metadata.abi.as_ref().unwrap(); + check_file_field(client, abi, expected_abi, "abi".to_string(), element_name).await; + + assert!(metadata.source.is_some(), "'source' field not set for {}", element_name); + let source = metadata.source.as_ref().unwrap(); + let expected_source = expected_metadata.source.as_ref().unwrap(); + check_file_field(client, source, expected_source, "source".to_string(), element_name).await; +} + +/// Check the validity of a IPFS artifact metadata. +/// +/// # Arguments +/// +/// * `client` - a IPFS client. +/// * `element_name` - the fully qualified name of the element linked to the artifact. +/// * `uri` - the full metadata URI. +/// * `expected_metadata` - the expected metadata values coming from local Dojo metadata. +async fn check_ipfs_metadata( + client: &HyperBackend, + element_name: &String, + uri: &String, + expected_metadata: &ArtifactMetadata, +) { + let resource_bytes = get_ipfs_resource_data(client, element_name, uri).await; + let metadata = resource_bytes_to_metadata(&resource_bytes, element_name); + + check_artifact_fields(client, &metadata, expected_metadata, element_name).await; +} + +/// Rebuild the full metadata URI from an array of 3 FieldElement. +/// +/// # Arguments +/// +/// * `element_name` - name of the element (model or contract) linked to the metadata URI. +/// * `uri` - uri as an array of 3 FieldElement. +/// +/// # Returns +/// +/// A [`String`] containing the full metadata URI. +fn get_and_check_metadata_uri(element_name: &String, uri: &Vec) -> String { + assert!(uri.len() == 3, "bad metadata URI length for {} ({:#?})", element_name, uri); + + let mut i = 0; + let mut full_uri = "".to_string(); + + while i < uri.len() && uri[i] != FieldElement::ZERO { + let uri_str = parse_cairo_short_string(&uri[i]); + assert!( + uri_str.is_ok(), + "unable to parse the part {} of the metadata URI for {}", + i + 1, + element_name + ); + + full_uri = format!("{}{}", full_uri, uri_str.unwrap()); + + i += 1; + } + + assert!(!full_uri.is_empty(), "metadata URI is empty for {}", element_name); + + assert!( + full_uri.starts_with("ipfs://"), + "metadata URI for {} is not an IPFS artifact", + element_name + ); + + full_uri +} + +/// Check an artifact metadata read from the resource registry against its value +/// in the local Dojo metadata. +/// +/// # Arguments +/// +/// * `client` - a IPFS client. +/// * `world_reader` - a world reader object. +/// * `resource_id` - the resource ID in the resource registry. +/// * `element_name` - the fully qualified name of the element linked to this metadata. +/// * `dojo_metadata` - local Dojo metadata. +async fn check_artifact_metadata( + client: &HyperBackend, + world_reader: &WorldContractReader

, + resource_id: FieldElement, + element_name: &String, + dojo_metadata: &DojoMetadata, +) { + let resource = world_reader.metadata(&resource_id).call().await.unwrap(); + + let expected_artifact = dojo_metadata.artifacts.get(element_name); + assert!( + expected_artifact.is_some(), + "Unable to find local artifact metadata for {}", + element_name + ); + let expected_artifact = expected_artifact.unwrap(); + + let full_uri = get_and_check_metadata_uri(element_name, &resource.metadata_uri); + check_ipfs_metadata(client, element_name, &full_uri, expected_artifact).await; +} diff --git a/crates/sozo/ops/src/tests/mod.rs b/crates/sozo/ops/src/tests/mod.rs new file mode 100644 index 0000000000..f754ddc5a6 --- /dev/null +++ b/crates/sozo/ops/src/tests/mod.rs @@ -0,0 +1,5 @@ +pub mod auth; +pub mod call; +pub mod migration; +pub mod setup; +pub mod utils; diff --git a/crates/sozo/ops/src/tests/setup.rs b/crates/sozo/ops/src/tests/setup.rs new file mode 100644 index 0000000000..7c0777d937 --- /dev/null +++ b/crates/sozo/ops/src/tests/setup.rs @@ -0,0 +1,84 @@ +use anyhow::Result; +use dojo_test_utils::compiler::build_test_config; +use dojo_test_utils::migration::prepare_migration_with_world_and_seed; +use dojo_test_utils::sequencer::TestSequencer; +use dojo_world::contracts::world::WorldContract; +use dojo_world::migration::strategy::MigrationStrategy; +use dojo_world::migration::TxnConfig; +use scarb::core::{Config, Workspace}; +use scarb::ops; +use starknet::accounts::SingleOwnerAccount; +use starknet::core::types::{BlockId, BlockTag}; +use starknet::providers::jsonrpc::HttpTransport; +use starknet::providers::JsonRpcClient; +use starknet::signers::LocalWallet; + +use crate::migration; + +/// Load the spawn-and-moves project configuration. +/// +/// # Returns +/// +/// A [`Config`] object loaded from the spawn-and-moves Scarb.toml file. +pub fn load_config() -> Config { + build_test_config("../../../examples/spawn-and-move/Scarb.toml") + .unwrap_or_else(|c| panic!("Error loading config: {c:?}")) +} + +/// Setups the workspace for the spawn-and-moves project. +/// +/// # Arguments +/// * `config` - the project configuration. +/// +/// # Returns +/// +/// A [`Workspace`] loaded from the spawn-and-moves project. +pub fn setup_ws(config: &Config) -> Workspace<'_> { + ops::read_workspace(config.manifest_path(), config) + .unwrap_or_else(|op| panic!("Error building workspace: {op:?}")) +} + +/// Prepare the migration for the spawn-and-moves project. +/// +/// # Returns +/// +/// A [`MigrationStrategy`] to execute to migrate the full spawn-and-moves project. +pub fn setup_migration() -> Result { + let base_dir = "../../../examples/spawn-and-move"; + let target_dir = format!("{}/target/dev", base_dir); + + prepare_migration_with_world_and_seed(base_dir.into(), target_dir.into(), None, "sozo_test") +} + +/// Setups the project by migrating the full spawn-and-moves project. +/// +/// # Arguments +/// +/// * `sequencer` - The sequencer used for tests. +/// +/// # Returns +/// +/// A [`WorldContract`] initialized with the migrator account, +/// the account 0 of the sequencer. +pub async fn setup( + sequencer: &TestSequencer, +) -> Result, LocalWallet>>> { + let config = load_config(); + let ws = setup_ws(&config); + + let migration = setup_migration()?; + + let mut account = sequencer.account(); + account.set_block_id(BlockId::Tag(BlockTag::Pending)); + + let output = migration::execute_strategy( + &ws, + &migration, + &account, + TxnConfig { wait: true, ..Default::default() }, + ) + .await?; + let world = WorldContract::new(output.world_address, account); + + Ok(world) +} diff --git a/crates/sozo/ops/src/tests/utils.rs b/crates/sozo/ops/src/tests/utils.rs new file mode 100644 index 0000000000..71859f079e --- /dev/null +++ b/crates/sozo/ops/src/tests/utils.rs @@ -0,0 +1,105 @@ +use dojo_test_utils::sequencer::{ + get_default_test_starknet_config, SequencerConfig, TestSequencer, +}; +use dojo_world::contracts::world::WorldContract; +use dojo_world::contracts::WorldContractReader; +use starknet::accounts::ConnectedAccount; +use starknet::core::types::{BlockId, BlockTag, FieldElement}; + +use super::setup; +use crate::utils; + +const ACTION_CONTRACT_NAME: &str = "dojo_examples::actions::actions"; + +#[tokio::test(flavor = "multi_thread")] +async fn get_contract_address_from_world() { + let sequencer = + TestSequencer::start(SequencerConfig::default(), get_default_test_starknet_config()).await; + + let world = setup::setup(&sequencer).await.unwrap(); + + let contract_address = + utils::get_contract_address(&world, ACTION_CONTRACT_NAME.to_string()).await.unwrap(); + + assert!(contract_address != FieldElement::ZERO); +} + +#[tokio::test(flavor = "multi_thread")] +async fn get_contract_address_from_string() { + let sequencer = + TestSequencer::start(SequencerConfig::default(), get_default_test_starknet_config()).await; + + let account = sequencer.account(); + let world = WorldContract::new(FieldElement::ZERO, account); + + let contract_address = utils::get_contract_address(&world, "0x1234".to_string()).await.unwrap(); + + assert_eq!(contract_address, FieldElement::from_hex_be("0x1234").unwrap()); +} + +#[tokio::test(flavor = "multi_thread")] +async fn get_contract_address_from_world_with_world_reader() { + let sequencer = + TestSequencer::start(SequencerConfig::default(), get_default_test_starknet_config()).await; + + let world = setup::setup(&sequencer).await.unwrap(); + let account = sequencer.account(); + let provider = account.provider(); + let world_reader = WorldContractReader::new(world.address, provider); + + let contract_address = + utils::get_contract_address_from_reader(&world_reader, ACTION_CONTRACT_NAME.to_string()) + .await + .unwrap(); + + assert!(contract_address != FieldElement::ZERO); +} + +#[tokio::test(flavor = "multi_thread")] +async fn get_contract_address_from_string_with_world_reader() { + let sequencer = + TestSequencer::start(SequencerConfig::default(), get_default_test_starknet_config()).await; + + let provider = sequencer.provider(); + let world_reader = WorldContractReader::new(FieldElement::ZERO, provider); + + let contract_address = + utils::get_contract_address_from_reader(&world_reader, "0x1234".to_string()).await.unwrap(); + + assert_eq!(contract_address, FieldElement::from_hex_be("0x1234").unwrap()); +} + +#[test] +fn parse_block_id_bad_hash() { + assert!(utils::parse_block_id("0xBadHash".to_string()).is_err()); +} + +#[test] +fn parse_block_id_bad_string() { + assert!(utils::parse_block_id("BadString".to_string()).is_err()); +} + +#[test] +fn parse_block_id_hash() { + assert!( + utils::parse_block_id("0x1234".to_string()).unwrap() + == BlockId::Hash(FieldElement::from_hex_be("0x1234").unwrap()) + ); +} + +#[test] +fn parse_block_id_pending() { + assert!( + utils::parse_block_id("pending".to_string()).unwrap() == BlockId::Tag(BlockTag::Pending) + ); +} + +#[test] +fn parse_block_id_latest() { + assert!(utils::parse_block_id("latest".to_string()).unwrap() == BlockId::Tag(BlockTag::Latest)); +} + +#[test] +fn parse_block_id_number() { + assert!(utils::parse_block_id("42".to_string()).unwrap() == BlockId::Number(42)); +} diff --git a/crates/sozo/ops/src/utils.rs b/crates/sozo/ops/src/utils.rs index a2aaf99f96..00735e59a4 100644 --- a/crates/sozo/ops/src/utils.rs +++ b/crates/sozo/ops/src/utils.rs @@ -1,8 +1,80 @@ -use anyhow::Result; +use anyhow::{anyhow, Result}; +use dojo_world::contracts::world::{WorldContract, WorldContractReader}; +use dojo_world::migration::strategy::generate_salt; use dojo_world::utils::{execution_status_from_maybe_pending_receipt, TransactionWaiter}; -use starknet::core::types::{ExecutionResult, InvokeTransactionResult}; +use starknet::accounts::ConnectedAccount; +use starknet::core::types::{ + BlockId, BlockTag, ExecutionResult, FieldElement, InvokeTransactionResult, +}; use starknet::providers::Provider; +/// Retrieves a contract address from it's name +/// using the world's data, or parses a hex string into +/// a [`FieldElement`]. +/// +/// # Arguments +/// +/// * `world` - The world's contract connector. +/// * `name_or_address` - A string with a contract name or a hexadecimal address. +/// +/// # Returns +/// +/// A [`FieldElement`] with the address of the contract on success. +pub async fn get_contract_address( + world: &WorldContract, + name_or_address: String, +) -> Result { + if name_or_address.starts_with("0x") { + FieldElement::from_hex_be(&name_or_address).map_err(anyhow::Error::from) + } else { + let contract_class_hash = world.base().call().await?; + Ok(starknet::core::utils::get_contract_address( + generate_salt(&name_or_address), + contract_class_hash.into(), + &[], + world.address, + )) + } +} + +/// Retrieves a contract address from its name +/// using a world contract reader, or parses a hex string into +/// a [`FieldElement`]. +/// +/// # Arguments +/// +/// * `world_reader` - The world contract reader. +/// * `name_or_address` - A string with a contract name or a hexadecimal address. +/// +/// # Returns +/// +/// A [`FieldElement`] with the address of the contract on success. +pub async fn get_contract_address_from_reader( + world_reader: &WorldContractReader

, + name_or_address: String, +) -> Result { + if name_or_address.starts_with("0x") { + FieldElement::from_hex_be(&name_or_address).map_err(anyhow::Error::from) + } else { + let contract_class_hash = world_reader.base().call().await?; + Ok(starknet::core::utils::get_contract_address( + generate_salt(&name_or_address), + contract_class_hash.into(), + &[], + world_reader.address, + )) + } +} + +/// Handles a transaction result configuring a +/// [`TransactionWaiter`] if required. +/// +/// # Arguments +/// +/// * `provider` - Starknet provider to fetch transaction status. +/// * `transaction_result` - Result of the transaction to handle. +/// * `wait_for_tx` - Wait for the transaction to be mined. +/// * `show_receipt` - If the receipt of the transaction should be displayed on stdout. pub async fn handle_transaction_result

( provider: P, transaction_result: InvokeTransactionResult, @@ -35,3 +107,30 @@ where Ok(()) } + +/// Parses a string into a [`BlockId`]. +/// +/// # Arguments +/// +/// * `block_str` - a string representing a block ID. It could be a +/// block hash starting with 0x, a block number, 'pending' or 'latest'. +/// +/// # Returns +/// +/// The parsed [`BlockId`] on success. +pub fn parse_block_id(block_str: String) -> Result { + if block_str.starts_with("0x") { + let hash = FieldElement::from_hex_be(&block_str) + .map_err(|_| anyhow!("Unable to parse block hash: {}", block_str))?; + Ok(BlockId::Hash(hash)) + } else if block_str.eq("pending") { + Ok(BlockId::Tag(BlockTag::Pending)) + } else if block_str.eq("latest") { + Ok(BlockId::Tag(BlockTag::Latest)) + } else { + match block_str.parse::() { + Ok(n) => Ok(BlockId::Number(n)), + Err(_) => Err(anyhow!("Unable to parse block ID: {}", block_str)), + } + } +} diff --git a/crates/torii/client/src/client/error.rs b/crates/torii/client/src/client/error.rs index 9f9b831939..1efc06c429 100644 --- a/crates/torii/client/src/client/error.rs +++ b/crates/torii/client/src/client/error.rs @@ -1,5 +1,6 @@ use dojo_world::contracts::model::ModelError; use starknet::core::utils::{CairoShortStringToFeltError, ParseCairoShortStringError}; +use torii_grpc::types::schema::SchemaError; #[derive(Debug, thiserror::Error)] pub enum Error { @@ -22,6 +23,8 @@ pub enum Error { Model(#[from] ModelError), #[error("Unsupported query")] UnsupportedQuery, + #[error(transparent)] + Schema(#[from] SchemaError), } #[derive(Debug, thiserror::Error)] diff --git a/crates/torii/client/src/client/mod.rs b/crates/torii/client/src/client/mod.rs index aad72ef099..a87415bb95 100644 --- a/crates/torii/client/src/client/mod.rs +++ b/crates/torii/client/src/client/mod.rs @@ -10,6 +10,7 @@ use dojo_types::packing::unpack; use dojo_types::schema::Ty; use dojo_types::WorldMetadata; use dojo_world::contracts::WorldContractReader; +use futures::lock::Mutex; use parking_lot::{RwLock, RwLockReadGuard}; use starknet::core::utils::cairo_short_string_to_felt; use starknet::providers::jsonrpc::HttpTransport; @@ -20,6 +21,7 @@ use torii_grpc::client::{EntityUpdateStreaming, ModelDiffsStreaming}; use torii_grpc::proto::world::RetrieveEntitiesResponse; use torii_grpc::types::schema::Entity; use torii_grpc::types::{KeysClause, Query}; +use torii_relay::client::EventLoop; use torii_relay::types::Message; use crate::client::error::{Error, ParseError}; @@ -99,14 +101,15 @@ impl Client { }) } - /// Waits for the relay to be ready and listening for messages. - pub async fn wait_for_relay(&mut self) -> Result<(), Error> { - self.relay_client.command_sender.wait_for_relay().await.map_err(Error::RelayClient) + /// Starts the relay client event loop. + /// This is a blocking call. Spawn this on a separate task. + pub fn relay_runner(&self) -> Arc> { + self.relay_client.event_loop.clone() } /// Publishes a message to a topic. /// Returns the message id. - pub async fn publish_message(&mut self, message: Message) -> Result, Error> { + pub async fn publish_message(&self, message: Message) -> Result, Error> { self.relay_client .command_sender .publish(message) @@ -137,6 +140,14 @@ impl Client { Ok(entities.into_iter().map(TryInto::try_into).collect::, _>>()?) } + /// Similary to entities, this function retrieves event messages matching the query parameter. + pub async fn event_messages(&self, query: Query) -> Result, Error> { + let mut grpc_client = self.inner.write().await; + let RetrieveEntitiesResponse { entities, total_count: _ } = + grpc_client.retrieve_event_messages(query).await?; + Ok(entities.into_iter().map(TryInto::try_into).collect::, _>>()?) + } + /// A direct stream to grpc subscribe entities pub async fn on_entity_updated( &self, @@ -147,6 +158,16 @@ impl Client { Ok(stream) } + /// A direct stream to grpc subscribe event messages + pub async fn on_event_message_updated( + &self, + ids: Vec, + ) -> Result { + let mut grpc_client = self.inner.write().await; + let stream = grpc_client.subscribe_event_messages(ids).await?; + Ok(stream) + } + /// Returns the value of a model. /// /// This function will only return `None`, if `model` doesn't exist. If there is no model with diff --git a/crates/torii/core/src/cache.rs b/crates/torii/core/src/cache.rs index 0a3ae7c1ec..b795a6f1bf 100644 --- a/crates/torii/core/src/cache.rs +++ b/crates/torii/core/src/cache.rs @@ -40,6 +40,10 @@ impl ModelCache { } async fn update_schema(&self, model: &str) -> Result { + let model_name: String = sqlx::query_scalar("SELECT name FROM models WHERE id = ?") + .bind(model) + .fetch_one(&self.pool) + .await?; let model_members: Vec = sqlx::query_as( "SELECT id, model_idx, member_idx, name, type, type_enum, enum_options, key FROM \ model_members WHERE model_id = ? ORDER BY model_idx ASC, member_idx ASC", @@ -52,7 +56,7 @@ impl ModelCache { return Err(QueryError::ModelNotFound(model.into()).into()); } - let ty = parse_sql_model_members(model, &model_members); + let ty = parse_sql_model_members(&model_name, &model_members); let mut cache = self.cache.write().await; cache.insert(model.into(), ty.clone()); diff --git a/crates/torii/core/src/engine.rs b/crates/torii/core/src/engine.rs index 014852b268..6763db01a0 100644 --- a/crates/torii/core/src/engine.rs +++ b/crates/torii/core/src/engine.rs @@ -4,8 +4,8 @@ use std::time::Duration; use anyhow::Result; use dojo_world::contracts::world::WorldContractReader; use starknet::core::types::{ - BlockId, EmittedEvent, Event, EventFilter, MaybePendingBlockWithTxHashes, - MaybePendingTransactionReceipt, Transaction, TransactionReceipt, + BlockId, BlockTag, Event, EventFilter, MaybePendingBlockWithTxHashes, MaybePendingBlockWithTxs, + MaybePendingTransactionReceipt, PendingTransactionReceipt, Transaction, TransactionReceipt, }; use starknet::core::utils::get_selector_from_name; use starknet::providers::Provider; @@ -31,6 +31,8 @@ impl Default for Processors

{ } } +pub(crate) const LOG_TARGET: &str = "tori_core::engine"; + #[derive(Debug)] pub struct EngineConfig { pub block_time: Duration, @@ -93,11 +95,11 @@ impl Engine { } pub async fn start(&mut self) -> Result<()> { - let mut head = self.db.head().await?; + let (mut head, mut pending_block_tx) = self.db.head().await?; if head == 0 { head = self.config.start_block; } else if self.config.start_block != 0 { - warn!("start block ignored, stored head exists and will be used instead"); + warn!(target: LOG_TARGET, "Start block ignored, stored head exists and will be used instead."); } // Sync the first page of transactions to determine if the provider supports katana. @@ -129,16 +131,18 @@ impl Engine { break Ok(()); } _ = async { - match self.sync_to_head(head, cursor.clone()).await { - Ok((latest_block_number, next_cursor)) => { - if let Some(next_cursor) = next_cursor { - cursor = Some(next_cursor); + match self.sync_to_head(head, pending_block_tx, cursor.clone()).await { + Ok((latest_block_number, latest_pending_tx, new_cursor)) => { + if let Some(_) = new_cursor { + cursor = new_cursor; } + + pending_block_tx = latest_pending_tx; head = latest_block_number; backoff_delay = Duration::from_secs(1); } Err(e) => { - error!("getting block: {}", e); + error!(target: LOG_TARGET, error = %e, "Syncing to head."); sleep(backoff_delay).await; if backoff_delay < max_backoff_delay { backoff_delay *= 2; @@ -154,29 +158,103 @@ impl Engine { pub async fn sync_to_head( &mut self, from: u64, - cursor: Option, - ) -> Result<(u64, Option)> { + mut pending_block_tx: Option, + mut katana_cursor: Option, + ) -> Result<(u64, Option, Option)> { let latest_block_number = self.provider.block_hash_and_number().await?.block_number; - let mut new_cursor = None; - if let Some(cursor) = cursor { - if cursor.block_number <= latest_block_number { - // we fetch pending block too - new_cursor = Some(self.sync_range_katana(&cursor).await?); - } + // katana sync + if let Some(cursor) = katana_cursor { + katana_cursor = Some(self.sync_range_katana(&cursor).await?); } else { + // default sync if from < latest_block_number { // if `from` == 0, then the block may or may not be processed yet. let from = if from == 0 { from } else { from + 1 }; + pending_block_tx = self.sync_range(from, latest_block_number, pending_block_tx).await?; + } else { + // pending block sync + pending_block_tx = self.sync_pending(latest_block_number + 1, pending_block_tx).await?; + } + } - self.sync_range(from, latest_block_number).await?; - }; + + Ok((latest_block_number, pending_block_tx, katana_cursor)) + } + + pub async fn sync_pending( + &mut self, + block_number: u64, + mut pending_block_tx: Option, + ) -> Result> { + let block = if let MaybePendingBlockWithTxs::PendingBlock(pending) = + self.provider.get_block_with_txs(BlockId::Tag(BlockTag::Pending)).await? + { + pending + } else { + return Ok(None); + }; + + // Skip transactions that have been processed already + // Our cursor is the last processed transaction + let mut pending_block_tx_cursor = pending_block_tx; + for transaction in block.transactions { + if let Some(tx) = pending_block_tx_cursor { + if transaction.transaction_hash() != &tx { + continue; + } + + pending_block_tx_cursor = None; + continue; + } + + match self + .process_transaction_and_receipt( + &transaction, + None, + block_number, + block.timestamp, + ) + .await + { + Err(e) => { + match e.to_string().as_str() { + "TransactionHashNotFound" => { + warn!(target: LOG_TARGET, error = %e, transaction_hash = %format!("{:#x}", transaction.transaction_hash()), "Processing pending transaction."); + // We failed to fetch the transaction, which might be due to us indexing + // the pending transaction too fast. We will + // fail silently and retry processing the transaction in the next + // iteration. + return Ok(pending_block_tx); + } + _ => { + error!(target: LOG_TARGET, error = %e, transaction_hash = %format!("{:#x}", transaction.transaction_hash()), "Processing pending transaction."); + return Err(e); + } + } + } + Ok(_) => { + info!(target: LOG_TARGET, transaction_hash = %format!("{:#x}", transaction.transaction_hash()), "Processed pending transaction.") + } + } + + pending_block_tx = Some(*transaction.transaction_hash()); } - Ok((latest_block_number, new_cursor)) + // Set the head to the last processed pending transaction + // Head block number should still be latest block number + self.db.set_head(block_number - 1, pending_block_tx); + + self.db.execute().await?; + Ok(pending_block_tx) } - pub async fn sync_range(&mut self, from: u64, to: u64) -> Result<()> { + pub async fn sync_range( + &mut self, + from: u64, + to: u64, + mut pending_block_tx: Option, + ) -> Result> { // Process all blocks from current to latest. let get_events = |token: Option| { self.provider.get_events( @@ -197,16 +275,72 @@ impl Engine { while let Some(token) = &events_pages.last().unwrap().continuation_token { events_pages.push(get_events(Some(token.clone())).await?); } - let mut last_block: u64 = 0; - for events_page in events_pages { - for event in events_page.events { - self.process(event, &mut last_block).await?; + + // Flatten events pages and events according to the pending block cursor + // to array of (block_number, transaction_hash) + let mut transactions = vec![]; + for events_page in &events_pages { + for event in &events_page.events { + let block_number = match event.block_number { + Some(block_number) => block_number, + None => return Err(anyhow::anyhow!("Event without block number.")), + }; + + // Keep track of last block number and fetch block timestamp + if let None = self.processed_blocks.get(&block_number) { + let block = self.get_block_metadata(block_number).await?; + + if let Some(ref block_tx) = self.block_tx { + block_tx.send(block.block_number).await?; + } + + self.process_block(&block).await?; + info!(target: LOG_TARGET, block_number = %block_number, "Processed block."); + + } + + // Then we skip all transactions until we reach the last pending processed + // transaction (if any) + if let Some(tx) = pending_block_tx { + if event.transaction_hash != tx { + continue; + } + + // Then we skip that processed transaction + pending_block_tx = None; + continue; + } + + if let Some((_, last_tx_hash)) = transactions.last() { + // Dedup transactions + // As me might have multiple events for the same transaction + if *last_tx_hash == event.transaction_hash { + continue; + } + } + transactions.push((block_number, event.transaction_hash)); } } + // Process all transactions + for (block_number, transaction_hash) in transactions { + // Process transaction + let transaction = self.provider.get_transaction_by_hash(transaction_hash).await?; + + self.process_transaction_and_receipt( + &transaction, + None, + block_number, + self.processed_blocks[&block_number].timestamp, + ) + .await?; + } + + self.db.set_head(to, pending_block_tx); + self.db.execute().await?; - Ok(()) + Ok(pending_block_tx) } async fn sync_range_katana( @@ -215,38 +349,7 @@ impl Engine { ) -> Result { let transactions = self.provider.get_transactions(cursor.clone()).await?; - self.process_katana( - transactions.transactions, - cursor.block_number, - transactions.cursor.block_number - 1, - ) - .await?; - self.db.execute().await?; - - Ok(transactions.cursor) - } - - async fn get_block_metadata(&self, block_number: u64) -> Result { - match self.provider.get_block_with_tx_hashes(BlockId::Number(block_number)).await? { - MaybePendingBlockWithTxHashes::Block(block) => Ok(Block { - block_number: block.block_number, - _parent_hash: block.parent_hash, - timestamp: block.timestamp, - }), - MaybePendingBlockWithTxHashes::PendingBlock(block) => Ok(Block { - block_number, - _parent_hash: block.parent_hash, - timestamp: block.timestamp, - }), - } - } - - async fn process_katana( - &mut self, - transactions: Vec<(Transaction, MaybePendingTransactionReceipt)>, - from: u64, - to: u64, - ) -> Result<()> { + let (from, to) = (cursor.block_number, transactions.cursor.block_number-1); for block_number in from..=to { if let Some(ref block_tx) = self.block_tx { block_tx.send(block_number).await?; @@ -256,9 +359,9 @@ impl Engine { self.process_block(&block).await?; } - self.db.set_head(to); + self.db.set_head(to, None); - for (transaction, receipt) in transactions { + for (transaction, receipt) in transactions.transactions { let block_number = match &receipt { MaybePendingTransactionReceipt::Receipt(receipt) => match receipt { TransactionReceipt::Invoke(receipt) => receipt.block_number, @@ -290,38 +393,24 @@ impl Engine { .await?; } - Ok(()) - } - - async fn process(&mut self, event: EmittedEvent, last_block: &mut u64) -> Result<()> { - let block_number = match event.block_number { - Some(block_number) => block_number, - None => { - let error = anyhow::anyhow!("event has no block number"); - error!("processing event: {}", error); - - return Err(error); - } - }; - let block = self.get_block_metadata(block_number).await?; - - if block_number > *last_block { - *last_block = block_number; - - if let Some(ref block_tx) = self.block_tx { - block_tx.send(block_number).await?; - } + self.db.execute().await?; - self.process_block(&block).await?; + Ok(transactions.cursor) + } - self.db.set_head(block_number); + async fn get_block_metadata(&self, block_number: u64) -> Result { + match self.provider.get_block_with_tx_hashes(BlockId::Number(block_number)).await? { + MaybePendingBlockWithTxHashes::Block(block) => Ok(Block { + block_number: block.block_number, + _parent_hash: block.parent_hash, + timestamp: block.timestamp, + }), + MaybePendingBlockWithTxHashes::PendingBlock(block) => Ok(Block { + block_number, + _parent_hash: block.parent_hash, + timestamp: block.timestamp, + }), } - - let transaction = self.provider.get_transaction_by_hash(event.transaction_hash).await?; - self.process_transaction_and_receipt(&transaction, None, block_number, block.timestamp) - .await?; - - Ok(()) } async fn process_transaction_and_receipt( @@ -332,34 +421,29 @@ impl Engine { block_timestamp: u64, ) -> Result<()> { let transaction_hash = transaction.transaction_hash(); - let receipt = receipt.unwrap_or( - match self.provider.get_transaction_receipt(transaction_hash).await { - Ok(receipt) => receipt, - Err(e) => { - error!("getting transaction receipt: {}", e); - return Err(e.into()); - } - }, + self.provider + .get_transaction_receipt(transaction_hash) + .await?, ); - let receipt = match receipt { + let events = match &receipt { MaybePendingTransactionReceipt::Receipt(TransactionReceipt::Invoke(receipt)) => { - Some(TransactionReceipt::Invoke(receipt.clone())) + Some(&receipt.events) } MaybePendingTransactionReceipt::Receipt(TransactionReceipt::L1Handler(receipt)) => { - Some(TransactionReceipt::L1Handler(receipt.clone())) + Some(&receipt.events) } + MaybePendingTransactionReceipt::PendingReceipt(PendingTransactionReceipt::Invoke( + receipt, + )) => Some(&receipt.events), + MaybePendingTransactionReceipt::PendingReceipt( + PendingTransactionReceipt::L1Handler(receipt), + ) => Some(&receipt.events), _ => None, }; - if let Some(receipt) = receipt { - let events = match &receipt { - TransactionReceipt::Invoke(invoke_receipt) => &invoke_receipt.events, - TransactionReceipt::L1Handler(l1_handler_receipt) => &l1_handler_receipt.events, - _ => return Ok(()), - }; - + if let Some(events) = events { let mut world_event = false; for (event_idx, event) in events.iter().enumerate() { if event.from_address != self.world.address { @@ -414,7 +498,7 @@ impl Engine { &mut self, block_number: u64, block_timestamp: u64, - transaction_receipt: &TransactionReceipt, + transaction_receipt: &MaybePendingTransactionReceipt, transaction_hash: FieldElement, transaction: &Transaction, ) -> Result<()> { @@ -439,18 +523,16 @@ impl Engine { &mut self, block_number: u64, block_timestamp: u64, - transaction_receipt: &TransactionReceipt, + transaction_receipt: &MaybePendingTransactionReceipt, event_id: &str, event: &Event, ) -> Result<()> { - let transaction_hash = match transaction_receipt { - TransactionReceipt::Invoke(invoke_receipt) => invoke_receipt.transaction_hash, - TransactionReceipt::L1Handler(l1_handler_receipt) => { - l1_handler_receipt.transaction_hash - } - _ => return Ok(()), - }; - self.db.store_event(event_id, event, transaction_hash, block_timestamp); + self.db.store_event( + event_id, + event, + *transaction_receipt.transaction_hash(), + block_timestamp, + ); for processor in &self.processors.event { // If the processor has no event_key, means it's a catch-all processor. // We also validate the event @@ -476,9 +558,10 @@ impl Engine { }; trace!( + target: LOG_TARGET, keys = ?unprocessed_event.keys, data = ?unprocessed_event.data, - "unprocessed event", + "Unprocessed event.", ); } } diff --git a/crates/torii/core/src/model.rs b/crates/torii/core/src/model.rs index 59b3fe1044..2c60fd344e 100644 --- a/crates/torii/core/src/model.rs +++ b/crates/torii/core/src/model.rs @@ -173,7 +173,11 @@ pub fn parse_sql_model_members(model: &str, model_members_all: &[SqlModelMember] } /// Creates a query that fetches all models and their nested data. -pub fn build_sql_query(model_schemas: &Vec) -> Result { +pub fn build_sql_query( + model_schemas: &Vec, + entities_table: &str, + entity_relation_column: &str, +) -> Result { fn parse_struct( path: &str, schema: &Struct, @@ -223,11 +227,16 @@ pub fn build_sql_query(model_schemas: &Vec) -> Result { let selections_clause = global_selections.join(", "); let join_clause = global_tables .into_iter() - .map(|table| format!(" JOIN {table} ON entities.id = {table}.entity_id")) + .map(|table| { + format!(" JOIN {table} ON {entities_table}.id = {table}.{entity_relation_column}") + }) .collect::>() .join(" "); - Ok(format!("SELECT entities.id, entities.keys, {selections_clause} FROM entities{join_clause}")) + Ok(format!( + "SELECT {entities_table}.id, {entities_table}.keys, {selections_clause} FROM \ + {entities_table}{join_clause}" + )) } /// Populate the values of a Ty (schema) from SQLite row. @@ -528,7 +537,7 @@ mod tests { ], }); - let query = build_sql_query(&vec![ty]).unwrap(); + let query = build_sql_query(&vec![ty], "entities", "entity_id").unwrap(); assert_eq!( query, r#"SELECT entities.id, entities.keys, Position.external_name AS "Position.name", Position.external_age AS "Position.age", Position$vec.external_x AS "Position$vec.x", Position$vec.external_y AS "Position$vec.y" FROM entities JOIN Position ON entities.id = Position.entity_id JOIN Position$vec ON entities.id = Position$vec.entity_id"# diff --git a/crates/torii/core/src/processors/event_message.rs b/crates/torii/core/src/processors/event_message.rs index ce68f574ba..f74bc730a9 100644 --- a/crates/torii/core/src/processors/event_message.rs +++ b/crates/torii/core/src/processors/event_message.rs @@ -2,7 +2,7 @@ use anyhow::{Error, Result}; use async_trait::async_trait; use dojo_world::contracts::model::ModelReader; use dojo_world::contracts::world::WorldContractReader; -use starknet::core::types::{Event, TransactionReceipt}; +use starknet::core::types::{Event, MaybePendingTransactionReceipt}; use starknet::providers::Provider; use tracing::info; @@ -10,6 +10,8 @@ use super::EventProcessor; use crate::processors::MODEL_INDEX; use crate::sql::Sql; +pub(crate) const LOG_TARGET: &str = "torii_core::processors::event_message"; + #[derive(Default)] pub struct EventMessageProcessor; @@ -40,7 +42,7 @@ where db: &mut Sql, _block_number: u64, block_timestamp: u64, - _transaction_receipt: &TransactionReceipt, + _transaction_receipt: &MaybePendingTransactionReceipt, event_id: &str, event: &Event, ) -> Result<(), Error> { @@ -50,7 +52,11 @@ where Err(_) => return Ok(()), }; - info!("store event message: {}", model.name()); + info!( + target: LOG_TARGET, + model = %model.name(), + "Store event message." + ); // skip the first key, as its the event selector // and dont include last key as its the system key diff --git a/crates/torii/core/src/processors/metadata_update.rs b/crates/torii/core/src/processors/metadata_update.rs index 2df4ac4e9a..07859743b2 100644 --- a/crates/torii/core/src/processors/metadata_update.rs +++ b/crates/torii/core/src/processors/metadata_update.rs @@ -7,7 +7,7 @@ use base64::Engine as _; use dojo_world::contracts::world::WorldContractReader; use dojo_world::metadata::{Uri, WorldMetadata}; use reqwest::Client; -use starknet::core::types::{Event, TransactionReceipt}; +use starknet::core::types::{Event, MaybePendingTransactionReceipt}; use starknet::core::utils::parse_cairo_short_string; use starknet::providers::Provider; use starknet_crypto::FieldElement; @@ -20,6 +20,8 @@ use crate::sql::Sql; const IPFS_URL: &str = "https://cartridge.infura-ipfs.io/ipfs/"; const MAX_RETRY: u8 = 3; +pub(crate) const LOG_TARGET: &str = "torii_core::processors::metadata_update"; + #[derive(Default)] pub struct MetadataUpdateProcessor; @@ -35,9 +37,10 @@ where fn validate(&self, event: &Event) -> bool { if event.keys.len() > 1 { info!( - "invalid keys for event {}: {}", - >::event_key(self), - >::event_keys_as_string(self, event), + target: LOG_TARGET, + event_key = %>::event_key(self), + invalid_keys = %>::event_keys_as_string(self, event), + "Invalid event keys." ); return false; } @@ -50,7 +53,7 @@ where db: &mut Sql, _block_number: u64, block_timestamp: u64, - _transaction_receipt: &TransactionReceipt, + _transaction_receipt: &MaybePendingTransactionReceipt, _event_id: &str, event: &Event, ) -> Result<(), Error> { @@ -67,7 +70,12 @@ where "".to_string() }; - info!("Resource {:#x} metadata set: {}", resource, uri_str); + info!( + target: LOG_TARGET, + resource = %format!("{:#x}", resource), + uri = %uri_str, + "Resource metadata set." + ); db.set_metadata(resource, &uri_str, block_timestamp); let db = db.clone(); @@ -86,10 +94,20 @@ async fn try_retrieve(mut db: Sql, resource: FieldElement, uri_str: String) { db.update_metadata(&resource, &uri_str, &metadata, &icon_img, &cover_img) .await .unwrap(); - info!("Updated resource {resource:#x} metadata from ipfs"); + info!( + target: LOG_TARGET, + resource = %format!("{:#x}", resource), + "Updated resource metadata from ipfs." + ); } Err(e) => { - error!("Error retrieving resource {resource:#x} uri {uri_str}: {e}") + error!( + target: LOG_TARGET, + resource = %format!("{:#x}", resource), + uri = %uri_str, + error = %e, + "Retrieving resource uri." + ); } } } @@ -126,7 +144,11 @@ async fn fetch_content(cid: &str, mut retries: u8) -> Result { Err(e) => { retries -= 1; if retries > 0 { - info!("Fetch uri failure: {}", e); + info!( + target: LOG_TARGET, + error = %e, + "Fetch uri." + ); tokio::time::sleep(Duration::from_secs(3)).await; } } diff --git a/crates/torii/core/src/processors/mod.rs b/crates/torii/core/src/processors/mod.rs index 670dc782af..5dfd43c766 100644 --- a/crates/torii/core/src/processors/mod.rs +++ b/crates/torii/core/src/processors/mod.rs @@ -1,7 +1,7 @@ use anyhow::{Error, Result}; use async_trait::async_trait; use dojo_world::contracts::world::WorldContractReader; -use starknet::core::types::{Event, Transaction, TransactionReceipt}; +use starknet::core::types::{Event, MaybePendingTransactionReceipt, Transaction}; use starknet::providers::Provider; use starknet_crypto::FieldElement; @@ -37,7 +37,7 @@ where db: &mut Sql, block_number: u64, block_timestamp: u64, - transaction_receipt: &TransactionReceipt, + transaction_receipt: &MaybePendingTransactionReceipt, event_id: &str, event: &Event, ) -> Result<(), Error>; @@ -64,7 +64,7 @@ pub trait TransactionProcessor { provider: &P, block_number: u64, block_timestamp: u64, - transaction_receipt: &TransactionReceipt, + transaction_receipt: &MaybePendingTransactionReceipt, transaction_hash: FieldElement, transaction: &Transaction, ) -> Result<(), Error>; diff --git a/crates/torii/core/src/processors/register_model.rs b/crates/torii/core/src/processors/register_model.rs index f5facb99e5..5b097ab40c 100644 --- a/crates/torii/core/src/processors/register_model.rs +++ b/crates/torii/core/src/processors/register_model.rs @@ -2,7 +2,7 @@ use anyhow::{Error, Ok, Result}; use async_trait::async_trait; use dojo_world::contracts::model::ModelReader; use dojo_world::contracts::world::WorldContractReader; -use starknet::core::types::{Event, TransactionReceipt}; +use starknet::core::types::{Event, MaybePendingTransactionReceipt}; use starknet::core::utils::parse_cairo_short_string; use starknet::providers::Provider; use tracing::{debug, info}; @@ -10,6 +10,8 @@ use tracing::{debug, info}; use super::EventProcessor; use crate::sql::Sql; +pub(crate) const LOG_TARGET: &str = "torii_core::processors::register_model"; + #[derive(Default)] pub struct RegisterModelProcessor; @@ -25,9 +27,10 @@ where fn validate(&self, event: &Event) -> bool { if event.keys.len() > 1 { info!( - "invalid keys for event {}: {}", - >::event_key(self), - >::event_keys_as_string(self, event), + target: LOG_TARGET, + event_key = %>::event_key(self), + invalid_keys = %>::event_keys_as_string(self, event), + "Invalid event keys." ); return false; } @@ -40,7 +43,7 @@ where db: &mut Sql, _block_number: u64, block_timestamp: u64, - _transaction_receipt: &TransactionReceipt, + _transaction_receipt: &MaybePendingTransactionReceipt, _event_id: &str, event: &Event, ) -> Result<(), Error> { @@ -56,16 +59,21 @@ where let class_hash = event.data[1]; let contract_address = event.data[3]; - info!(name, "Registered model"); + info!( + target: LOG_TARGET, + name = %name, + "Registered model." + ); debug!( - name, - ?schema, - ?layout, - ?class_hash, - ?contract_address, - packed_size, - unpacked_size, - "Registered model content" + target: LOG_TARGET, + name = %name, + schema = ?schema, + layout = ?layout, + class_hash = ?class_hash, + contract_address = ?contract_address, + packed_size = %packed_size, + unpacked_size = %unpacked_size, + "Registered model content." ); db.register_model( diff --git a/crates/torii/core/src/processors/store_del_record.rs b/crates/torii/core/src/processors/store_del_record.rs index f889a45517..c7802fef77 100644 --- a/crates/torii/core/src/processors/store_del_record.rs +++ b/crates/torii/core/src/processors/store_del_record.rs @@ -2,7 +2,7 @@ use anyhow::{Error, Ok, Result}; use async_trait::async_trait; use dojo_world::contracts::model::ModelReader; use dojo_world::contracts::world::WorldContractReader; -use starknet::core::types::{Event, TransactionReceipt}; +use starknet::core::types::{Event, MaybePendingTransactionReceipt}; use starknet::core::utils::{get_selector_from_name, parse_cairo_short_string}; use starknet::providers::Provider; use tracing::info; @@ -11,6 +11,8 @@ use super::EventProcessor; use crate::processors::{MODEL_INDEX, NUM_KEYS_INDEX}; use crate::sql::Sql; +pub(crate) const LOG_TARGET: &str = "torii_core::processors::store_del_record"; + #[derive(Default)] pub struct StoreDelRecordProcessor; @@ -26,9 +28,10 @@ where fn validate(&self, event: &Event) -> bool { if event.keys.len() > 1 { info!( - "invalid keys for event {}: {}", - >::event_key(self), - >::event_keys_as_string(self, event), + target: LOG_TARGET, + event_key = %>::event_key(self), + invalid_keys = %>::event_keys_as_string(self, event), + "Invalid event keys." ); return false; } @@ -41,12 +44,16 @@ where db: &mut Sql, _block_number: u64, _block_timestamp: u64, - _transaction_receipt: &TransactionReceipt, + _transaction_receipt: &MaybePendingTransactionReceipt, _event_id: &str, event: &Event, ) -> Result<(), Error> { let name = parse_cairo_short_string(&event.data[MODEL_INDEX])?; - info!("store delete record: {}", name); + info!( + target: LOG_TARGET, + name = %name, + "Store delete record." + ); // this is temporary until the model name hash is precomputed let model = db.model(&format!("{:#x}", get_selector_from_name(&name)?)).await?; diff --git a/crates/torii/core/src/processors/store_set_record.rs b/crates/torii/core/src/processors/store_set_record.rs index 428f31858f..03c9b71e49 100644 --- a/crates/torii/core/src/processors/store_set_record.rs +++ b/crates/torii/core/src/processors/store_set_record.rs @@ -2,7 +2,7 @@ use anyhow::{Error, Ok, Result}; use async_trait::async_trait; use dojo_world::contracts::model::ModelReader; use dojo_world::contracts::world::WorldContractReader; -use starknet::core::types::{Event, TransactionReceipt}; +use starknet::core::types::{Event, MaybePendingTransactionReceipt}; use starknet::core::utils::{get_selector_from_name, parse_cairo_short_string}; use starknet::providers::Provider; use tracing::info; @@ -11,6 +11,8 @@ use super::EventProcessor; use crate::processors::{MODEL_INDEX, NUM_KEYS_INDEX}; use crate::sql::Sql; +pub(crate) const LOG_TARGET: &str = "torii_core::processors::store_set_record"; + #[derive(Default)] pub struct StoreSetRecordProcessor; @@ -26,9 +28,10 @@ where fn validate(&self, event: &Event) -> bool { if event.keys.len() > 1 { info!( - "invalid keys for event {}: {}", - >::event_key(self), - >::event_keys_as_string(self, event), + target: LOG_TARGET, + event_key = %>::event_key(self), + invalid_keys = %>::event_keys_as_string(self, event), + "Invalid event keys." ); return false; } @@ -41,12 +44,16 @@ where db: &mut Sql, _block_number: u64, block_timestamp: u64, - _transaction_receipt: &TransactionReceipt, + _transaction_receipt: &MaybePendingTransactionReceipt, event_id: &str, event: &Event, ) -> Result<(), Error> { let name = parse_cairo_short_string(&event.data[MODEL_INDEX])?; - info!("store set record: {}", name); + info!( + target: LOG_TARGET, + name = %name, + "Store set record.", + ); // this is temporary until the model name hash is precomputed let model = db.model(&format!("{:#x}", get_selector_from_name(&name)?)).await?; diff --git a/crates/torii/core/src/processors/store_transaction.rs b/crates/torii/core/src/processors/store_transaction.rs index 448de5f6ff..7ceaaf0d89 100644 --- a/crates/torii/core/src/processors/store_transaction.rs +++ b/crates/torii/core/src/processors/store_transaction.rs @@ -1,6 +1,6 @@ use anyhow::{Error, Ok, Result}; use async_trait::async_trait; -use starknet::core::types::{Transaction, TransactionReceipt}; +use starknet::core::types::{MaybePendingTransactionReceipt, Transaction}; use starknet::providers::Provider; use starknet_crypto::FieldElement; @@ -18,7 +18,7 @@ impl TransactionProcessor

for StoreTransactionProcessor { _provider: &P, block_number: u64, block_timestamp: u64, - _receipt: &TransactionReceipt, + _receipt: &MaybePendingTransactionReceipt, transaction_hash: FieldElement, transaction: &Transaction, ) -> Result<(), Error> { diff --git a/crates/torii/core/src/sql.rs b/crates/torii/core/src/sql.rs index 1dc24e454d..51e0eb5d60 100644 --- a/crates/torii/core/src/sql.rs +++ b/crates/torii/core/src/sql.rs @@ -16,7 +16,10 @@ use super::World; use crate::model::ModelSQLReader; use crate::query_queue::{Argument, QueryQueue}; use crate::simple_broker::SimpleBroker; -use crate::types::{Entity as EntityUpdated, Event as EventEmitted, Model as ModelRegistered}; +use crate::types::{ + Entity as EntityUpdated, Event as EventEmitted, EventMessage as EventMessageUpdated, + Model as ModelRegistered, +}; use crate::utils::{must_utc_datetime_from_timestamp, utc_dt_string_from_timestamp}; pub const FELT_DELIMITER: &str = "/"; @@ -50,20 +53,33 @@ impl Sql { Ok(Self { pool, world_address, query_queue }) } - pub async fn head(&self) -> Result { + pub async fn head(&self) -> Result<(u64, Option)> { let mut conn: PoolConnection = self.pool.acquire().await?; - let indexer_query = sqlx::query_as::<_, (i64,)>("SELECT head FROM indexers WHERE id = ?") - .bind(format!("{:#x}", self.world_address)); - - let indexer: (i64,) = indexer_query.fetch_one(&mut *conn).await?; - Ok(indexer.0.try_into().expect("doesn't fit in u64")) + let indexer_query = sqlx::query_as::<_, (i64, Option)>( + "SELECT head, pending_block_tx FROM indexers WHERE id = ?", + ) + .bind(format!("{:#x}", self.world_address)); + + let indexer: (i64, Option) = indexer_query.fetch_one(&mut *conn).await?; + Ok(( + indexer.0.try_into().expect("doesn't fit in u64"), + indexer.1.map(|f| FieldElement::from_str(&f)).transpose()?, + )) } - pub fn set_head(&mut self, head: u64) { + pub fn set_head(&mut self, head: u64, pending_block_tx: Option) { let head = Argument::Int(head.try_into().expect("doesn't fit in u64")); - let id = Argument::String(format!("{:#x}", self.world_address)); + let id = Argument::FieldElement(self.world_address); + let pending_block_tx = if let Some(f) = pending_block_tx { + Argument::String(format!("{:#x}", f)) + } else { + Argument::Null + }; - self.query_queue.enqueue("UPDATE indexers SET head = ? WHERE id = ?", vec![head, id]); + self.query_queue.enqueue( + "UPDATE indexers SET head = ?, pending_block_tx = ? WHERE id = ?", + vec![head, pending_block_tx, id], + ); } pub async fn world(&self) -> Result { @@ -212,7 +228,7 @@ impl Sql { VALUES (?, ?, ?, ?) ON CONFLICT(id) DO UPDATE SET \ updated_at=CURRENT_TIMESTAMP, event_id=EXCLUDED.event_id RETURNING \ *"; - let entity_updated: EntityUpdated = sqlx::query_as(insert_entities) + let event_message_updated: EventMessageUpdated = sqlx::query_as(insert_entities) .bind(&entity_id) .bind(&keys_str) .bind(event_id) @@ -231,7 +247,7 @@ impl Sql { ); self.query_queue.execute_all().await?; - SimpleBroker::publish(entity_updated); + SimpleBroker::publish(event_message_updated); Ok(()) } diff --git a/crates/torii/core/src/sql_test.rs b/crates/torii/core/src/sql_test.rs index c6742621c8..bd8c74226b 100644 --- a/crates/torii/core/src/sql_test.rs +++ b/crates/torii/core/src/sql_test.rs @@ -6,14 +6,17 @@ use dojo_test_utils::sequencer::{ get_default_test_starknet_config, SequencerConfig, TestSequencer, }; use dojo_world::contracts::world::WorldContractReader; -use dojo_world::migration::strategy::MigrationStrategy; +use dojo_world::migration::TxnConfig; +use dojo_world::utils::TransactionWaiter; use scarb::ops; use sozo_ops::migration::execute_strategy; use sqlx::sqlite::{SqliteConnectOptions, SqlitePoolOptions}; -use starknet::core::types::{BlockId, BlockTag, Event, FieldElement}; +use starknet::accounts::{Account, Call}; +use starknet::core::types::{BlockId, BlockTag}; use starknet::core::utils::get_selector_from_name; use starknet::providers::jsonrpc::HttpTransport; use starknet::providers::{JsonRpcClient, Provider}; +use starknet_crypto::poseidon_hash_many; use tokio::sync::broadcast; use crate::engine::{Engine, EngineConfig, Processors}; @@ -22,27 +25,16 @@ use crate::processors::store_set_record::StoreSetRecordProcessor; use crate::provider::provider::KatanaProvider; use crate::provider::KatanaClient; use crate::sql::Sql; -use crate::utils::utc_dt_string_from_timestamp; pub async fn bootstrap_engine( world: WorldContractReader

, db: Sql, provider: R, - migration: MigrationStrategy, - sequencer: TestSequencer, ) -> Result, Box> where P: Provider + Send + Sync, R: KatanaProvider + Send + Sync, { - let mut account = sequencer.account(); - account.set_block_id(BlockId::Tag(BlockTag::Pending)); - - let config = build_test_config("../../../examples/spawn-and-move/Scarb.toml").unwrap(); - let ws = ops::read_workspace(config.manifest_path(), &config) - .unwrap_or_else(|op| panic!("Error building workspace: {op:?}")); - execute_strategy(&ws, &migration, &account, None).await.unwrap(); - let (shutdown_tx, _) = broadcast::channel(1); let mut engine = Engine::new( world, @@ -57,7 +49,7 @@ where None, ); - let _ = engine.sync_to_head(0, None).await?; + let _ = engine.sync_to_head(0, None, None).await?; Ok(engine) } @@ -76,19 +68,43 @@ async fn test_load_from_remote() { let provider = JsonRpcClient::new(HttpTransport::new(sequencer.url())); let world = WorldContractReader::new(migration.world_address().unwrap(), &provider); - let mut db = Sql::new(pool.clone(), migration.world_address().unwrap()).await.unwrap(); - let _ = bootstrap_engine( - world, - db.clone(), - &KatanaClient::new(crate::provider::http::HttpTransport::new(sequencer.url())), - migration, - sequencer, - ) - .await; + let mut account = sequencer.account(); + account.set_block_id(BlockId::Tag(BlockTag::Pending)); - let block_timestamp = 1710754478_u64; + let config = build_test_config("../../../examples/spawn-and-move/Scarb.toml").unwrap(); + let ws = ops::read_workspace(config.manifest_path(), &config) + .unwrap_or_else(|op| panic!("Error building workspace: {op:?}")); + let migration_output = + execute_strategy(&ws, &migration, &account, TxnConfig::default()).await.unwrap(); + let world_address = migration_output.world_address; + + assert!(migration.world_address().unwrap() == world_address); + + // spawn + let tx = account + .execute(vec![Call { + to: migration_output + .contracts + .first() + .expect("shouldn't be empty") + .as_ref() + .expect("should be deployed") + .contract_address, + selector: get_selector_from_name("spawn").unwrap(), + calldata: vec![], + }]) + .send() + .await + .unwrap(); + + TransactionWaiter::new(tx.transaction_hash, &provider).await.unwrap(); + + let mut db = Sql::new(pool.clone(), world_address).await.unwrap(); + let _ = bootstrap_engine(world, db.clone(), &KatanaClient::new(crate::provider::http::HttpTransport::new(sequencer.url())),).await; + + let _block_timestamp = 1710754478_u64; let models = sqlx::query("SELECT * FROM models").fetch_all(&pool).await.unwrap(); - assert_eq!(models.len(), 3); + assert_eq!(models.len(), 4); let (id, name, packed_size, unpacked_size): (String, String, u8, u8) = sqlx::query_as( "SELECT id, name, packed_size, unpacked_size FROM models WHERE name = 'Position'", @@ -114,29 +130,23 @@ async fn test_load_from_remote() { assert_eq!(packed_size, 1); assert_eq!(unpacked_size, 2); - let event_id = format!("0x{:064x}:0x{:04x}:0x{:04x}", 0, 42, 69); - db.store_event( - &event_id, - &Event { - from_address: FieldElement::ONE, - keys: Vec::from([FieldElement::TWO]), - data: Vec::from([FieldElement::TWO, FieldElement::THREE]), - }, - FieldElement::THREE, - block_timestamp, - ); + // print all entities + let entities = sqlx::query("SELECT * FROM entities").fetch_all(&pool).await.unwrap(); + assert_eq!(entities.len(), 1); - db.execute().await.unwrap(); + let (id, keys): (String, String) = sqlx::query_as( + format!( + "SELECT id, keys FROM entities WHERE id = '{:#x}'", + poseidon_hash_many(&[account.address()]) + ) + .as_str(), + ) + .fetch_one(&pool) + .await + .unwrap(); - let query = format!( - "SELECT keys, data, transaction_hash, executed_at FROM events WHERE id = '{}'", - event_id - ); - let (keys, data, tx_hash, executed_at): (String, String, String, String) = - sqlx::query_as(&query).fetch_one(&pool).await.unwrap(); + assert_eq!(id, format!("{:#x}", poseidon_hash_many(&[account.address()]))); + assert_eq!(keys, format!("{:#x}/", account.address())); - assert_eq!(keys, format!("{:#x}/", FieldElement::TWO)); - assert_eq!(data, format!("{:#x}/{:#x}/", FieldElement::TWO, FieldElement::THREE)); - assert_eq!(tx_hash, format!("{:#x}", FieldElement::THREE)); - assert_eq!(executed_at, utc_dt_string_from_timestamp(block_timestamp)); + db.execute().await.unwrap(); } diff --git a/crates/torii/core/src/types.rs b/crates/torii/core/src/types.rs index 0e0fc35cac..24f982f1e2 100644 --- a/crates/torii/core/src/types.rs +++ b/crates/torii/core/src/types.rs @@ -39,6 +39,17 @@ pub struct Entity { pub updated_at: DateTime, } +#[derive(FromRow, Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct EventMessage { + pub id: String, + pub keys: String, + pub event_id: String, + pub executed_at: DateTime, + pub created_at: DateTime, + pub updated_at: DateTime, +} + #[derive(FromRow, Deserialize, Debug, Clone)] #[serde(rename_all = "camelCase")] pub struct Model { diff --git a/crates/torii/graphql/src/object/entity.rs b/crates/torii/graphql/src/object/entity.rs index 52d0f89494..dd73c9c6fd 100644 --- a/crates/torii/graphql/src/object/entity.rs +++ b/crates/torii/graphql/src/object/entity.rs @@ -123,7 +123,7 @@ fn model_union_field() -> Field { let model_ids: Vec<(String, String)> = sqlx::query_as( "SELECT id, name FROM models - WHERE id = ( + WHERE id IN ( SELECT model_id FROM entity_model WHERE entity_id = ? diff --git a/crates/torii/graphql/src/object/event_message.rs b/crates/torii/graphql/src/object/event_message.rs index 7af1e311e4..007573b3ca 100644 --- a/crates/torii/graphql/src/object/event_message.rs +++ b/crates/torii/graphql/src/object/event_message.rs @@ -8,7 +8,7 @@ use sqlx::pool::PoolConnection; use sqlx::{Pool, Sqlite}; use tokio_stream::StreamExt; use torii_core::simple_broker::SimpleBroker; -use torii_core::types::Entity; +use torii_core::types::EventMessage; use super::inputs::keys_input::keys_argument; use super::{BasicObject, ResolvableObject, TypeMapping, ValueMapping}; @@ -75,14 +75,18 @@ impl ResolvableObject for EventMessageObject { }; // if id is None, then subscribe to all entities // if id is Some, then subscribe to only the entity with that id - Ok(SimpleBroker::::subscribe().filter_map(move |entity: Entity| { - if id.is_none() || id == Some(entity.id.clone()) { - Some(Ok(Value::Object(EventMessageObject::value_mapping(entity)))) - } else { - // id != entity.id , then don't send anything, still listening - None - } - })) + Ok(SimpleBroker::::subscribe().filter_map( + move |entity: EventMessage| { + if id.is_none() || id == Some(entity.id.clone()) { + Some(Ok(Value::Object(EventMessageObject::value_mapping( + entity, + )))) + } else { + // id != entity.id , then don't send anything, still listening + None + } + }, + )) }) }, ) @@ -92,7 +96,7 @@ impl ResolvableObject for EventMessageObject { } impl EventMessageObject { - pub fn value_mapping(entity: Entity) -> ValueMapping { + pub fn value_mapping(entity: EventMessage) -> ValueMapping { let keys: Vec<&str> = entity.keys.split('/').filter(|&k| !k.is_empty()).collect(); IndexMap::from([ (Name::new("id"), Value::from(entity.id)), @@ -123,7 +127,7 @@ fn model_union_field() -> Field { let model_ids: Vec<(String, String)> = sqlx::query_as( "SELECT id, name FROM models - WHERE id = ( + WHERE id IN ( SELECT model_id FROM event_model WHERE entity_id = ? diff --git a/crates/torii/graphql/src/server.rs b/crates/torii/graphql/src/server.rs index a6d81a35e4..4ac5f3dd78 100644 --- a/crates/torii/graphql/src/server.rs +++ b/crates/torii/graphql/src/server.rs @@ -49,7 +49,17 @@ fn graphql_filter( ); let subscription_endpoint = if let Some(external_url) = external_url { - format!("{external_url}/graphql/ws").replace("http", "ws") + let mut websocket_url = external_url.clone(); + websocket_url.set_path("/graphql/ws"); + + let websocket_scheme = match websocket_url.scheme() { + "http" => "ws", + "https" => "wss", + _ => panic!("Invalid URL scheme"), // URL validated on input so this never hits + }; + + let _ = websocket_url.set_scheme(websocket_scheme); + websocket_url.to_string() } else { "/graphql/ws".to_string() }; diff --git a/crates/torii/graphql/src/tests/entities_test.rs b/crates/torii/graphql/src/tests/entities_test.rs index 2948b58da0..c6efd000d9 100644 --- a/crates/torii/graphql/src/tests/entities_test.rs +++ b/crates/torii/graphql/src/tests/entities_test.rs @@ -7,7 +7,7 @@ mod tests { use crate::schema::build_schema; use crate::tests::{ - run_graphql_query, spinup_types_test, Connection, Entity, Record, Subrecord, + run_graphql_query, spinup_types_test, Connection, Entity, Record, RecordSibling, Subrecord, }; async fn entities_query(schema: &Schema, arg: &str) -> Value { @@ -62,6 +62,11 @@ mod tests { random_u8 random_u128 }} + ... on RecordSibling {{ + __typename + record_id + random_u8 + }} ... on Subrecord {{ __typename record_id @@ -224,10 +229,16 @@ mod tests { let id = poseidon_hash_many(&[FieldElement::ZERO]); let entity = entity_model_query(&schema, &id).await; let models = entity.get("models").ok_or("no models found").unwrap(); + + // models should contain record & recordsibling let record: Record = serde_json::from_value(models[0].clone()).unwrap(); assert_eq!(&record.__typename, "Record"); assert_eq!(record.record_id, 0); + let record_sibling: RecordSibling = serde_json::from_value(models[1].clone()).unwrap(); + assert_eq!(&record_sibling.__typename, "RecordSibling"); + assert_eq!(record_sibling.record_id, 0); + let id = poseidon_hash_many(&[FieldElement::ZERO, FieldElement::ONE]); let entity = entity_model_query(&schema, &id).await; let models = entity.get("models").ok_or("no models found").unwrap(); diff --git a/crates/torii/graphql/src/tests/metadata_test.rs b/crates/torii/graphql/src/tests/metadata_test.rs index 01914abfb7..c834ea1d3c 100644 --- a/crates/torii/graphql/src/tests/metadata_test.rs +++ b/crates/torii/graphql/src/tests/metadata_test.rs @@ -1,6 +1,6 @@ #[cfg(test)] mod tests { - use dojo_world::metadata::Metadata as DojoMetadata; + use dojo_world::metadata::{project_to_world_metadata, ProjectMetadata}; use sqlx::SqlitePool; use starknet_crypto::FieldElement; use torii_core::sql::Sql; @@ -51,7 +51,7 @@ mod tests { let schema = build_schema(&pool).await.unwrap(); let cover_img = "QWxsIHlvdXIgYmFzZSBiZWxvbmcgdG8gdXM="; - let dojo_metadata: DojoMetadata = toml::from_str( + let project_metadata: ProjectMetadata = toml::from_str( r#" [world] name = "example" @@ -62,7 +62,7 @@ mod tests { "#, ) .unwrap(); - let world_metadata = dojo_metadata.world.unwrap(); + let world_metadata = project_to_world_metadata(project_metadata.world); db.set_metadata(&RESOURCE, URI, BLOCK_TIMESTAMP); db.update_metadata(&RESOURCE, URI, &world_metadata, &None, &Some(cover_img.to_string())) .await diff --git a/crates/torii/graphql/src/tests/mod.rs b/crates/torii/graphql/src/tests/mod.rs index 1d40053613..55e8bc0a16 100644 --- a/crates/torii/graphql/src/tests/mod.rs +++ b/crates/torii/graphql/src/tests/mod.rs @@ -11,6 +11,7 @@ use dojo_types::primitive::Primitive; use dojo_types::schema::{Enum, EnumOption, Member, Struct, Ty}; use dojo_world::contracts::WorldContractReader; use dojo_world::manifest::DeploymentManifest; +use dojo_world::migration::TxnConfig; use dojo_world::utils::TransactionWaiter; use scarb::ops; use serde::Deserialize; @@ -293,7 +294,7 @@ pub async fn spinup_types_test() -> Result { let ws = ops::read_workspace(config.manifest_path(), &config) .unwrap_or_else(|op| panic!("Error building workspace: {op:?}")); - execute_strategy(&ws, &migration, &account, None).await.unwrap(); + execute_strategy(&ws, &migration, &account, TxnConfig::default()).await.unwrap(); let manifest = DeploymentManifest::load_from_remote(&provider, migration.world_address().unwrap()) diff --git a/crates/torii/graphql/src/tests/models_test.rs b/crates/torii/graphql/src/tests/models_test.rs index ff3e825ec5..c5d18c8466 100644 --- a/crates/torii/graphql/src/tests/models_test.rs +++ b/crates/torii/graphql/src/tests/models_test.rs @@ -151,6 +151,7 @@ mod tests { // End to end test spins up a test sequencer and deploys types-test project, this takes a while // to run so combine all related tests into one + #[allow(clippy::get_first)] #[tokio::test(flavor = "multi_thread")] async fn models_test() -> Result<()> { let pool = spinup_types_test().await?; diff --git a/crates/torii/grpc/Cargo.toml b/crates/torii/grpc/Cargo.toml index 24987b5534..d0d19105d2 100644 --- a/crates/torii/grpc/Cargo.toml +++ b/crates/torii/grpc/Cargo.toml @@ -30,6 +30,12 @@ serde_json.workspace = true tower = "0.4.13" tracing.workspace = true +[dev-dependencies] +scarb.workspace = true +dojo-test-utils.workspace = true +sozo-ops.workspace = true +dojo-world = { path = "../../dojo-world", features = [ "contracts" ] } + [target.'cfg(target_arch = "wasm32")'.dependencies] tonic-web-wasm-client.workspace = true wasm-prost.workspace = true diff --git a/crates/torii/grpc/proto/world.proto b/crates/torii/grpc/proto/world.proto index 2ad33e2d15..4bf5b181a9 100644 --- a/crates/torii/grpc/proto/world.proto +++ b/crates/torii/grpc/proto/world.proto @@ -70,16 +70,6 @@ message RetrieveEntitiesResponse { uint32 total_count = 2; } -message RetrieveEventMessagesRequest { - // The entities to retrieve - types.Query query = 1; -} - -message RetrieveEventMessagesResponse { - repeated types.Entity events = 1; - uint32 total_count = 2; -} - message RetrieveEventsRequest { // The events to retrieve types.EventQuery query = 1; diff --git a/crates/torii/grpc/src/client.rs b/crates/torii/grpc/src/client.rs index 90852a0701..4a11e04777 100644 --- a/crates/torii/grpc/src/client.rs +++ b/crates/torii/grpc/src/client.rs @@ -3,7 +3,7 @@ use std::num::ParseIntError; use futures_util::stream::MapOk; use futures_util::{Stream, StreamExt, TryStreamExt}; -use starknet::core::types::{FromByteSliceError, FromStrError, StateUpdate}; +use starknet::core::types::{FromStrError, StateUpdate}; use starknet_crypto::FieldElement; use crate::proto::world::{ @@ -11,27 +11,22 @@ use crate::proto::world::{ SubscribeEntitiesRequest, SubscribeEntityResponse, SubscribeModelsRequest, SubscribeModelsResponse, }; -use crate::types::schema::Entity; +use crate::types::schema::{self, Entity, SchemaError}; use crate::types::{KeysClause, Query}; #[derive(Debug, thiserror::Error)] pub enum Error { #[error(transparent)] Grpc(tonic::Status), - #[error("Missing expected data")] - MissingExpectedData, - #[error("Unsupported type")] - UnsupportedType, #[error(transparent)] ParseStr(FromStrError), #[error(transparent)] - SliceError(FromByteSliceError), - #[error(transparent)] ParseInt(ParseIntError), - #[cfg(not(target_arch = "wasm32"))] #[error(transparent)] Transport(tonic::transport::Error), + #[error(transparent)] + Schema(#[from] schema::SchemaError), } /// A lightweight wrapper around the grpc client. @@ -71,7 +66,9 @@ impl WorldClient { .world_metadata(MetadataRequest {}) .await .map_err(Error::Grpc) - .and_then(|res| res.into_inner().metadata.ok_or(Error::MissingExpectedData)) + .and_then(|res| { + res.into_inner().metadata.ok_or(Error::Schema(SchemaError::MissingExpectedData)) + }) .and_then(|metadata| metadata.try_into().map_err(Error::ParseStr)) } @@ -83,6 +80,18 @@ impl WorldClient { self.inner.retrieve_entities(request).await.map_err(Error::Grpc).map(|res| res.into_inner()) } + pub async fn retrieve_event_messages( + &mut self, + query: Query, + ) -> Result { + let request = RetrieveEntitiesRequest { query: Some(query.into()) }; + self.inner + .retrieve_event_messages(request) + .await + .map_err(Error::Grpc) + .map(|res| res.into_inner()) + } + /// Subscribe to entities updates of a World. pub async fn subscribe_entities( &mut self, @@ -102,6 +111,25 @@ impl WorldClient { })))) } + /// Subscribe to event messages of a World. + pub async fn subscribe_event_messages( + &mut self, + hashed_keys: Vec, + ) -> Result { + let hashed_keys = hashed_keys.iter().map(|hashed| hashed.to_bytes_be().to_vec()).collect(); + let stream = self + .inner + .subscribe_event_messages(SubscribeEntitiesRequest { hashed_keys }) + .await + .map_err(Error::Grpc) + .map(|res| res.into_inner())?; + + Ok(EntityUpdateStreaming(stream.map_ok(Box::new(|res| { + let entity = res.entity.expect("entity must exist"); + entity.try_into().expect("must able to serialize") + })))) + } + /// Subscribe to the model diff for a set of models of a World. pub async fn subscribe_model_diffs( &mut self, diff --git a/crates/torii/grpc/src/server/logger.rs b/crates/torii/grpc/src/server/logger.rs index 093a5bb50d..c49fbd1c7b 100644 --- a/crates/torii/grpc/src/server/logger.rs +++ b/crates/torii/grpc/src/server/logger.rs @@ -5,6 +5,8 @@ use tonic::body::BoxBody; use tower::{Layer, Service}; use tracing::info; +pub(crate) const LOG_TARGET: &str = "torii::grpc::server::logger"; + #[derive(Debug, Clone, Default)] pub struct Logger { inner: S, @@ -42,7 +44,12 @@ where let uri = req.uri().path(); let method = req.method(); - info!(target: "grpc", ?method, ?uri); + info!( + target: LOG_TARGET, + method = ?method, + uri = ?uri, + "gRPC request." + ); inner.call(req).await }) } diff --git a/crates/torii/grpc/src/server/mod.rs b/crates/torii/grpc/src/server/mod.rs index b5332acb27..0a572cc9f8 100644 --- a/crates/torii/grpc/src/server/mod.rs +++ b/crates/torii/grpc/src/server/mod.rs @@ -1,6 +1,9 @@ pub mod logger; pub mod subscriptions; +#[cfg(test)] +mod tests; + use std::future::Future; use std::net::SocketAddr; use std::pin::Pin; @@ -16,7 +19,7 @@ use proto::world::{ }; use sqlx::sqlite::SqliteRow; use sqlx::{Pool, Row, Sqlite}; -use starknet::core::utils::cairo_short_string_to_felt; +use starknet::core::utils::{cairo_short_string_to_felt, get_selector_from_name}; use starknet::providers::jsonrpc::HttpTransport; use starknet::providers::JsonRpcClient; use starknet_crypto::FieldElement; @@ -38,6 +41,14 @@ use crate::proto::world::{SubscribeEntitiesRequest, SubscribeEntityResponse}; use crate::proto::{self}; use crate::types::ComparisonOperator; +pub(crate) static ENTITIES_TABLE: &str = "entities"; +pub(crate) static ENTITIES_MODEL_RELATION_TABLE: &str = "entity_model"; +pub(crate) static ENTITIES_ENTITY_RELATION_COLUMN: &str = "entity_id"; + +pub(crate) static EVENTS_MESSAGES_TABLE: &str = "events_messages"; +pub(crate) static EVENTS_MESSAGES_MODEL_RELATION_TABLE: &str = "event_model"; +pub(crate) static EVENTS_MESSAGES_ENTITY_RELATION_COLUMN: &str = "event_message_id"; + #[derive(Clone)] pub struct DojoWorld { pool: Pool, @@ -73,6 +84,12 @@ impl DojoWorld { Arc::clone(&model_cache), )); + tokio::task::spawn(subscriptions::event_message::Service::new( + pool.clone(), + Arc::clone(&event_message_manager), + Arc::clone(&model_cache), + )); + Self { pool, world_address, @@ -99,9 +116,9 @@ impl DojoWorld { .fetch_one(&self.pool) .await?; - let models: Vec<(String, String, String, u32, u32, String)> = sqlx::query_as( - "SELECT name, class_hash, contract_address, packed_size, unpacked_size, layout FROM \ - models", + let models: Vec<(String, String, String, String, u32, u32, String)> = sqlx::query_as( + "SELECT id, name, class_hash, contract_address, packed_size, unpacked_size, layout \ + FROM models", ) .fetch_all(&self.pool) .await?; @@ -110,12 +127,12 @@ impl DojoWorld { for model in models { let schema = self.model_cache.schema(&model.0).await?; models_metadata.push(proto::types::ModelMetadata { - name: model.0, - class_hash: model.1, - contract_address: model.2, - packed_size: model.3, - unpacked_size: model.4, - layout: hex::decode(&model.5).unwrap(), + name: model.1, + class_hash: model.2, + contract_address: model.3, + packed_size: model.4, + unpacked_size: model.5, + layout: hex::decode(&model.6).unwrap(), schema: serde_json::to_vec(&schema).unwrap(), }); } @@ -134,13 +151,21 @@ impl DojoWorld { limit: u32, offset: u32, ) -> Result<(Vec, u32), Error> { - self.query_by_hashed_keys("entities", "entity_model", None, limit, offset).await + self.query_by_hashed_keys( + ENTITIES_TABLE, + ENTITIES_MODEL_RELATION_TABLE, + ENTITIES_ENTITY_RELATION_COLUMN, + None, + limit, + offset, + ) + .await } async fn events_all(&self, limit: u32, offset: u32) -> Result, Error> { let query = r#" SELECT keys, data, transaction_hash - FROM events + FROM events ORDER BY id DESC LIMIT ? OFFSET ? "# @@ -151,10 +176,11 @@ impl DojoWorld { row_events.iter().map(map_row_to_event).collect() } - async fn query_by_hashed_keys( + pub(crate) async fn query_by_hashed_keys( &self, table: &str, model_relation_table: &str, + entity_relation_column: &str, hashed_keys: Option, limit: u32, offset: u32, @@ -191,7 +217,7 @@ impl DojoWorld { // query to filter with limit and offset let query = format!( r#" - SELECT {table}.id, group_concat({model_relation_table}.model_id) as model_names + SELECT {table}.id, group_concat({model_relation_table}.model_id) as model_ids FROM {table} JOIN {model_relation_table} ON {table}.id = {model_relation_table}.entity_id {filter_ids} @@ -206,10 +232,13 @@ impl DojoWorld { let mut entities = Vec::with_capacity(db_entities.len()); for (entity_id, models_str) in db_entities { - let model_names: Vec<&str> = models_str.split(',').collect(); - let schemas = self.model_cache.schemas(model_names).await?; + let model_ids: Vec<&str> = models_str.split(',').collect(); + let schemas = self.model_cache.schemas(model_ids).await?; - let entity_query = format!("{} WHERE {table}.id = ?", build_sql_query(&schemas)?); + let entity_query = format!( + "{} WHERE {table}.id = ?", + build_sql_query(&schemas, table, entity_relation_column)? + ); let row = sqlx::query(&entity_query).bind(&entity_id).fetch_one(&self.pool).await?; let models = schemas @@ -232,10 +261,11 @@ impl DojoWorld { Ok((entities, total_count)) } - async fn query_by_keys( + pub(crate) async fn query_by_keys( &self, table: &str, model_relation_table: &str, + entity_relation_column: &str, keys_clause: proto::types::KeysClause, limit: u32, offset: u32, @@ -259,9 +289,9 @@ impl DojoWorld { SELECT count(*) FROM {table} JOIN {model_relation_table} ON {table}.id = {model_relation_table}.entity_id - WHERE {model_relation_table}.model_id = '{}' and {table}.keys LIKE ? + WHERE {model_relation_table}.model_id = '{:#x}' and {table}.keys LIKE ? "#, - keys_clause.model + get_selector_from_name(&keys_clause.model).map_err(ParseError::NonAsciiName)? ); // total count of rows that matches keys_pattern without limit and offset @@ -270,26 +300,30 @@ impl DojoWorld { let models_query = format!( r#" - SELECT group_concat({model_relation_table}.model_id) as model_names + SELECT group_concat({model_relation_table}.model_id) as model_ids FROM {table} JOIN {model_relation_table} ON {table}.id = {model_relation_table}.entity_id WHERE {table}.keys LIKE ? GROUP BY {table}.id - HAVING model_names REGEXP '(^|,){}(,|$)' + HAVING INSTR(model_ids, '{:#x}') > 0 LIMIT 1 "#, - keys_clause.model + get_selector_from_name(&keys_clause.model).map_err(ParseError::NonAsciiName)? ); let (models_str,): (String,) = sqlx::query_as(&models_query).bind(&keys_pattern).fetch_one(&self.pool).await?; - let model_names = models_str.split(',').collect::>(); - let schemas = self.model_cache.schemas(model_names).await?; + println!("models_str: {}", models_str); + + let model_ids = models_str.split(',').collect::>(); + let schemas = self.model_cache.schemas(model_ids).await?; + + println!("schemas: {:?}", schemas); // query to filter with limit and offset let entities_query = format!( "{} WHERE {table}.keys LIKE ? ORDER BY {table}.event_id DESC LIMIT ? OFFSET ?", - build_sql_query(&schemas)? + build_sql_query(&schemas, table, entity_relation_column)? ); let db_entities = sqlx::query(&entities_query) .bind(&keys_pattern) @@ -307,7 +341,7 @@ impl DojoWorld { )) } - async fn events_by_keys( + pub(crate) async fn events_by_keys( &self, keys_clause: proto::types::EventKeysClause, limit: u32, @@ -344,10 +378,11 @@ impl DojoWorld { row_events.iter().map(map_row_to_event).collect() } - async fn query_by_member( + pub(crate) async fn query_by_member( &self, table: &str, model_relation_table: &str, + entity_relation_column: &str, member_clause: proto::types::MemberClause, _limit: u32, _offset: u32, @@ -377,25 +412,25 @@ impl DojoWorld { let models_query = format!( r#" - SELECT group_concat({model_relation_table}.model_id) as model_names + SELECT group_concat({model_relation_table}.model_id) as model_ids FROM {table} JOIN {model_relation_table} ON {table}.id = {model_relation_table}.entity_id GROUP BY {table}.id - HAVING model_names REGEXP '(^|,){}(,|$)' + HAVING INSTR(model_ids, '{:#x}') > 0 LIMIT 1 "#, - member_clause.model + get_selector_from_name(&member_clause.model).map_err(ParseError::NonAsciiName)? ); let (models_str,): (String,) = sqlx::query_as(&models_query).fetch_one(&self.pool).await?; - let model_names = models_str.split(',').collect::>(); - let schemas = self.model_cache.schemas(model_names).await?; + let model_ids = models_str.split(',').collect::>(); + let schemas = self.model_cache.schemas(model_ids).await?; let table_name = member_clause.model; let column_name = format!("external_{}", member_clause.member); let member_query = format!( "{} WHERE {table_name}.{column_name} {comparison_operator} ?", - build_sql_query(&schemas)? + build_sql_query(&schemas, table, entity_relation_column)? ); let db_entities = @@ -413,6 +448,7 @@ impl DojoWorld { &self, _table: &str, _model_relation_table: &str, + _entity_relation_column: &str, _composite: proto::types::CompositeClause, _limit: u32, _offset: u32, @@ -422,6 +458,10 @@ impl DojoWorld { } pub async fn model_metadata(&self, model: &str) -> Result { + // selector + let model = + format!("{:#x}", get_selector_from_name(model).map_err(ParseError::NonAsciiName)?); + let (name, class_hash, contract_address, packed_size, unpacked_size, layout): ( String, String, @@ -433,11 +473,11 @@ impl DojoWorld { "SELECT name, class_hash, contract_address, packed_size, unpacked_size, layout FROM \ models WHERE id = ?", ) - .bind(model) + .bind(&model) .fetch_one(&self.pool) .await?; - let schema = self.model_cache.schema(model).await?; + let schema = self.model_cache.schema(&model).await?; let layout = hex::decode(&layout).unwrap(); Ok(proto::types::ModelMetadata { @@ -499,8 +539,9 @@ impl DojoWorld { } self.query_by_hashed_keys( - "entities", - "entity_model", + ENTITIES_TABLE, + ENTITIES_MODEL_RELATION_TABLE, + ENTITIES_ENTITY_RELATION_COLUMN, Some(hashed_keys), query.limit, query.offset, @@ -517,8 +558,9 @@ impl DojoWorld { } self.query_by_keys( - "entities", - "entity_model", + ENTITIES_TABLE, + ENTITIES_MODEL_RELATION_TABLE, + ENTITIES_ENTITY_RELATION_COLUMN, keys, query.limit, query.offset, @@ -527,8 +569,9 @@ impl DojoWorld { } ClauseType::Member(member) => { self.query_by_member( - "entities", - "entity_model", + ENTITIES_TABLE, + ENTITIES_MODEL_RELATION_TABLE, + ENTITIES_ENTITY_RELATION_COLUMN, member, query.limit, query.offset, @@ -537,8 +580,9 @@ impl DojoWorld { } ClauseType::Composite(composite) => { self.query_by_composite( - "entities", - "entity_model", + ENTITIES_TABLE, + ENTITIES_MODEL_RELATION_TABLE, + ENTITIES_ENTITY_RELATION_COLUMN, composite, query.limit, query.offset, @@ -576,8 +620,9 @@ impl DojoWorld { } self.query_by_hashed_keys( - "event_messages", - "event_model", + EVENTS_MESSAGES_TABLE, + EVENTS_MESSAGES_MODEL_RELATION_TABLE, + EVENTS_MESSAGES_ENTITY_RELATION_COLUMN, Some(hashed_keys), query.limit, query.offset, @@ -594,8 +639,9 @@ impl DojoWorld { } self.query_by_keys( - "event_messages", - "event_model", + EVENTS_MESSAGES_TABLE, + EVENTS_MESSAGES_MODEL_RELATION_TABLE, + EVENTS_MESSAGES_ENTITY_RELATION_COLUMN, keys, query.limit, query.offset, @@ -604,8 +650,9 @@ impl DojoWorld { } ClauseType::Member(member) => { self.query_by_member( - "event_messages", - "event_model", + EVENTS_MESSAGES_TABLE, + EVENTS_MESSAGES_MODEL_RELATION_TABLE, + EVENTS_MESSAGES_ENTITY_RELATION_COLUMN, member, query.limit, query.offset, @@ -614,8 +661,9 @@ impl DojoWorld { } ClauseType::Composite(composite) => { self.query_by_composite( - "event_messages", - "event_model", + EVENTS_MESSAGES_TABLE, + EVENTS_MESSAGES_MODEL_RELATION_TABLE, + ENTITIES_ENTITY_RELATION_COLUMN, composite, query.limit, query.offset, diff --git a/crates/torii/grpc/src/server/subscriptions/entity.rs b/crates/torii/grpc/src/server/subscriptions/entity.rs index e4a86176c8..a92b88543d 100644 --- a/crates/torii/grpc/src/server/subscriptions/entity.rs +++ b/crates/torii/grpc/src/server/subscriptions/entity.rs @@ -21,6 +21,8 @@ use tracing::{error, trace}; use crate::proto; +pub(crate) const LOG_TARGET: &str = "torii::grpc::server::subscriptions::entity"; + pub struct EntitiesSubscriber { /// Entity ids that the subscriber is interested in hashed_keys: HashSet, @@ -89,18 +91,21 @@ impl Service { // publish all updates if ids is empty or only ids that are subscribed to if sub.hashed_keys.is_empty() || sub.hashed_keys.contains(&hashed) { let models_query = r#" - SELECT group_concat(entity_model.model_id) as model_names + SELECT group_concat(entity_model.model_id) as model_ids FROM entities JOIN entity_model ON entities.id = entity_model.entity_id WHERE entities.id = ? GROUP BY entities.id "#; - let (model_names,): (String,) = + let (model_ids,): (String,) = sqlx::query_as(models_query).bind(hashed_keys).fetch_one(&pool).await?; - let model_names: Vec<&str> = model_names.split(',').collect(); - let schemas = cache.schemas(model_names).await?; + let model_ids: Vec<&str> = model_ids.split(',').collect(); + let schemas = cache.schemas(model_ids).await?; - let entity_query = format!("{} WHERE entities.id = ?", build_sql_query(&schemas)?); + let entity_query = format!( + "{} WHERE entities.id = ?", + build_sql_query(&schemas, "entities", "entity_id")? + ); let row = sqlx::query(&entity_query).bind(hashed_keys).fetch_one(&pool).await?; let models = schemas @@ -128,7 +133,7 @@ impl Service { } for id in closed_stream { - trace!(target = "subscription", "closing entity stream idx: {id}"); + trace!(target = LOG_TARGET, id = %id, "Closing entity stream."); subs.remove_subscriber(id).await } @@ -148,7 +153,7 @@ impl Future for Service { let pool = pin.pool.clone(); tokio::spawn(async move { if let Err(e) = Service::publish_updates(subs, cache, pool, &entity.id).await { - error!(target = "subscription", "error when publishing entity update: {e}"); + error!(target = LOG_TARGET, error = %e, "Publishing entity update."); } }); } diff --git a/crates/torii/grpc/src/server/subscriptions/event_message.rs b/crates/torii/grpc/src/server/subscriptions/event_message.rs index ce3987618c..76796e30de 100644 --- a/crates/torii/grpc/src/server/subscriptions/event_message.rs +++ b/crates/torii/grpc/src/server/subscriptions/event_message.rs @@ -16,11 +16,12 @@ use torii_core::cache::ModelCache; use torii_core::error::{Error, ParseError}; use torii_core::model::{build_sql_query, map_row_to_ty}; use torii_core::simple_broker::SimpleBroker; -use torii_core::types::Entity; +use torii_core::types::EventMessage; use tracing::{error, trace}; use crate::proto; +pub(crate) const LOG_TARGET: &str = "torii::grpc::server::subscriptions::event_message"; pub struct EventMessagesSubscriber { /// Entity ids that the subscriber is interested in hashed_keys: HashSet, @@ -59,7 +60,7 @@ pub struct Service { pool: Pool, subs_manager: Arc, model_cache: Arc, - simple_broker: Pin + Send>>, + simple_broker: Pin + Send>>, } impl Service { @@ -72,7 +73,7 @@ impl Service { pool, subs_manager, model_cache, - simple_broker: Box::pin(SimpleBroker::::subscribe()), + simple_broker: Box::pin(SimpleBroker::::subscribe()), } } @@ -89,19 +90,21 @@ impl Service { // publish all updates if ids is empty or only ids that are subscribed to if sub.hashed_keys.is_empty() || sub.hashed_keys.contains(&hashed) { let models_query = r#" - SELECT group_concat(event_model.model_id) as model_names + SELECT group_concat(event_model.model_id) as model_ids FROM event_messages JOIN event_model ON event_messages.id = event_model.entity_id WHERE event_messages.id = ? GROUP BY event_messages.id "#; - let (model_names,): (String,) = + let (model_ids,): (String,) = sqlx::query_as(models_query).bind(hashed_keys).fetch_one(&pool).await?; - let model_names: Vec<&str> = model_names.split(',').collect(); - let schemas = cache.schemas(model_names).await?; + let model_ids: Vec<&str> = model_ids.split(',').collect(); + let schemas = cache.schemas(model_ids).await?; - let entity_query = - format!("{} WHERE event_messages.id = ?", build_sql_query(&schemas)?); + let entity_query = format!( + "{} WHERE event_messages.id = ?", + build_sql_query(&schemas, "event_messages", "event_message_id")? + ); let row = sqlx::query(&entity_query).bind(hashed_keys).fetch_one(&pool).await?; let models = schemas @@ -129,7 +132,7 @@ impl Service { } for id in closed_stream { - trace!(target = "subscription", "closing entity stream idx: {id}"); + trace!(target = LOG_TARGET, id = %id, "Closing entity stream."); subs.remove_subscriber(id).await } @@ -149,7 +152,7 @@ impl Future for Service { let pool = pin.pool.clone(); tokio::spawn(async move { if let Err(e) = Service::publish_updates(subs, cache, pool, &entity.id).await { - error!(target = "subscription", "error when publishing entity update: {e}"); + error!(target = LOG_TARGET, error = %e, "Publishing entity update."); } }); } diff --git a/crates/torii/grpc/src/server/subscriptions/model_diff.rs b/crates/torii/grpc/src/server/subscriptions/model_diff.rs index 2aa15cc1eb..ad257c719c 100644 --- a/crates/torii/grpc/src/server/subscriptions/model_diff.rs +++ b/crates/torii/grpc/src/server/subscriptions/model_diff.rs @@ -22,6 +22,8 @@ use super::error::SubscriptionError; use crate::proto; use crate::types::KeysClause; +pub(crate) const LOG_TARGET: &str = "torii::grpc::server::subscriptions::model_diff"; + pub struct ModelMetadata { pub name: FieldElement, pub packed_size: usize, @@ -180,7 +182,7 @@ where } for id in closed_stream { - trace!(target = "subscription", "closing stream idx: {id}"); + trace!(target = LOG_TARGET, id = %id, "Closing stream."); subs.remove_subscriber(id).await; } @@ -209,7 +211,7 @@ where if let Some(provider) = pin.idle_provider.take() { if let Some(block_num) = pin.state_update_queue.pop_front() { - debug!(target = "subscription", "fetching state update for block {block_num}"); + debug!(target = LOG_TARGET, block_number = %block_num, "Fetching state update."); pin.state_update_req_fut = Some(Box::pin(Self::fetch_state_update(provider, block_num))); } else { @@ -231,13 +233,15 @@ where } Ok(MaybePendingStateUpdate::PendingUpdate(_)) => { - debug!(target = "subscription", "ignoring pending state update {block_num}") + debug!(target = LOG_TARGET, block_number = %block_num, "Ignoring pending state update.") } Err(e) => { error!( - target = "subscription", - "failed to fetch state update for block {block_num}: {e}" + target = LOG_TARGET, + block_num = %block_num, + error = %e, + "Fetching state update for block." ); } } @@ -253,7 +257,7 @@ where pin.state_update_queue.pop_front(); } Err(e) => { - error!(target = "subscription", "error when publishing state update: {e}") + error!(target = LOG_TARGET, error = %e, "Publishing state update.") } } } else { diff --git a/crates/torii/grpc/src/server/tests/entities_test.rs b/crates/torii/grpc/src/server/tests/entities_test.rs new file mode 100644 index 0000000000..9c9a871280 --- /dev/null +++ b/crates/torii/grpc/src/server/tests/entities_test.rs @@ -0,0 +1,115 @@ +use std::str::FromStr; +use std::sync::Arc; + +use dojo_test_utils::compiler::build_test_config; +use dojo_test_utils::migration::prepare_migration; +use dojo_test_utils::sequencer::{ + get_default_test_starknet_config, SequencerConfig, TestSequencer, +}; +use dojo_world::contracts::WorldContractReader; +use dojo_world::migration::TxnConfig; +use dojo_world::utils::TransactionWaiter; +use scarb::ops; +use sozo_ops::migration::execute_strategy; +use sqlx::sqlite::{SqliteConnectOptions, SqlitePoolOptions}; +use starknet::accounts::{Account, Call}; +use starknet::core::types::{BlockId, BlockTag}; +use starknet::core::utils::get_selector_from_name; +use starknet::providers::jsonrpc::HttpTransport; +use starknet::providers::JsonRpcClient; +use starknet_crypto::poseidon_hash_many; +use tokio::sync::broadcast; +use torii_core::engine::{Engine, EngineConfig, Processors}; +use torii_core::processors::register_model::RegisterModelProcessor; +use torii_core::processors::store_set_record::StoreSetRecordProcessor; +use torii_core::sql::Sql; + +use crate::server::DojoWorld; +use crate::types::schema::Entity; +use crate::types::KeysClause; + +#[tokio::test(flavor = "multi_thread")] +async fn test_entities_queries() { + let options = + SqliteConnectOptions::from_str("sqlite::memory:").unwrap().create_if_missing(true); + let pool = SqlitePoolOptions::new().max_connections(5).connect_with(options).await.unwrap(); + sqlx::migrate!("../migrations").run(&pool).await.unwrap(); + let base_path = "../../../examples/spawn-and-move"; + let target_path = format!("{}/target/dev", base_path); + let migration = prepare_migration(base_path.into(), target_path.into()).unwrap(); + let sequencer = + TestSequencer::start(SequencerConfig::default(), get_default_test_starknet_config()).await; + let provider = Arc::new(JsonRpcClient::new(HttpTransport::new(sequencer.url()))); + let world = WorldContractReader::new(migration.world_address().unwrap(), &provider); + + let mut account = sequencer.account(); + account.set_block_id(BlockId::Tag(BlockTag::Pending)); + + let config = build_test_config("../../../examples/spawn-and-move/Scarb.toml").unwrap(); + let ws = ops::read_workspace(config.manifest_path(), &config) + .unwrap_or_else(|op| panic!("Error building workspace: {op:?}")); + let migration_output = + execute_strategy(&ws, &migration, &account, TxnConfig::default()).await.unwrap(); + + let world_address = migration_output.world_address; + + // spawn + let tx = account + .execute(vec![Call { + to: migration_output + .contracts + .first() + .expect("shouldn't be empty") + .as_ref() + .expect("should be deployed") + .contract_address, + selector: get_selector_from_name("spawn").unwrap(), + calldata: vec![], + }]) + .send() + .await + .unwrap(); + + TransactionWaiter::new(tx.transaction_hash, &provider).await.unwrap(); + + let db = Sql::new(pool.clone(), world_address).await.unwrap(); + + let (shutdown_tx, _) = broadcast::channel(1); + let mut engine = Engine::new( + world, + db.clone(), + &provider, + Processors { + event: vec![Box::new(RegisterModelProcessor), Box::new(StoreSetRecordProcessor)], + ..Processors::default() + }, + EngineConfig::default(), + shutdown_tx, + None, + ); + + let _ = engine.sync_to_head(0, None).await.unwrap(); + + let (_, receiver) = tokio::sync::mpsc::channel(1); + let grpc = DojoWorld::new(db.pool, receiver, world_address, provider.clone()); + + let entities = grpc + .query_by_keys( + "entities", + "entity_model", + "entity_id", + KeysClause { model: "Moves".to_string(), keys: vec![account.address()] }.into(), + 1, + 0, + ) + .await + .unwrap() + .0; + + assert_eq!(entities.len(), 1); + + let entity: Entity = entities.first().unwrap().clone().try_into().unwrap(); + assert_eq!(entity.models.first().unwrap().name, "Position"); + assert_eq!(entity.models.get(1).unwrap().name, "Moves"); + assert_eq!(entity.hashed_keys, poseidon_hash_many(&[account.address()])); +} diff --git a/crates/torii/grpc/src/server/tests/mod.rs b/crates/torii/grpc/src/server/tests/mod.rs new file mode 100644 index 0000000000..4e714887c0 --- /dev/null +++ b/crates/torii/grpc/src/server/tests/mod.rs @@ -0,0 +1 @@ +mod entities_test; diff --git a/crates/torii/grpc/src/types/schema.rs b/crates/torii/grpc/src/types/schema.rs index 894ea4e85b..a02aa330db 100644 --- a/crates/torii/grpc/src/types/schema.rs +++ b/crates/torii/grpc/src/types/schema.rs @@ -2,11 +2,21 @@ use crypto_bigint::{Encoding, U256}; use dojo_types::primitive::Primitive; use dojo_types::schema::{Enum, EnumOption, Member, Struct, Ty}; use serde::{Deserialize, Serialize}; +use starknet::core::types::FromByteSliceError; use starknet_crypto::FieldElement; -use crate::client::Error as ClientError; use crate::proto::{self}; +#[derive(Debug, thiserror::Error)] +pub enum SchemaError { + #[error("Missing expected data")] + MissingExpectedData, + #[error("Unsupported type")] + UnsupportedType, + #[error(transparent)] + SliceError(#[from] FromByteSliceError), +} + #[derive(Debug, Serialize, Deserialize, PartialEq, Hash, Eq, Clone)] pub struct Entity { pub hashed_keys: FieldElement, @@ -20,11 +30,10 @@ pub struct Model { } impl TryFrom for Entity { - type Error = ClientError; + type Error = SchemaError; fn try_from(entity: proto::types::Entity) -> Result { Ok(Self { - hashed_keys: FieldElement::from_byte_slice_be(&entity.hashed_keys) - .map_err(ClientError::SliceError)?, + hashed_keys: FieldElement::from_byte_slice_be(&entity.hashed_keys)?, models: entity .models .into_iter() @@ -35,7 +44,7 @@ impl TryFrom for Entity { } impl TryFrom for Model { - type Error = ClientError; + type Error = SchemaError; fn try_from(model: proto::types::Model) -> Result { Ok(Self { name: model.name, @@ -49,7 +58,7 @@ impl TryFrom for Model { } impl TryFrom for proto::types::Ty { - type Error = ClientError; + type Error = SchemaError; fn try_from(ty: Ty) -> Result { let ty_type = match ty { Ty::Primitive(primitive) => { @@ -65,18 +74,18 @@ impl TryFrom for proto::types::Ty { } impl TryFrom for Member { - type Error = ClientError; + type Error = SchemaError; fn try_from(member: proto::types::Member) -> Result { Ok(Member { name: member.name, - ty: member.ty.ok_or(ClientError::MissingExpectedData)?.try_into()?, + ty: member.ty.ok_or(SchemaError::MissingExpectedData)?.try_into()?, key: member.key, }) } } impl TryFrom for proto::types::Member { - type Error = ClientError; + type Error = SchemaError; fn try_from(member: Member) -> Result { Ok(proto::types::Member { name: member.name, @@ -119,7 +128,7 @@ impl From for proto::types::Enum { } impl TryFrom for Struct { - type Error = ClientError; + type Error = SchemaError; fn try_from(r#struct: proto::types::Struct) -> Result { Ok(Struct { name: r#struct.name, @@ -133,7 +142,7 @@ impl TryFrom for Struct { } impl TryFrom for proto::types::Struct { - type Error = ClientError; + type Error = SchemaError; fn try_from(r#struct: Struct) -> Result { Ok(proto::types::Struct { name: r#struct.name, @@ -147,7 +156,7 @@ impl TryFrom for proto::types::Struct { } impl TryFrom for proto::types::Model { - type Error = ClientError; + type Error = SchemaError; fn try_from(r#struct: Struct) -> Result { let r#struct: proto::types::Struct = r#struct.try_into()?; @@ -167,14 +176,14 @@ impl From for proto::types::Model { // warning. #[allow(deprecated)] impl TryFrom for Primitive { - type Error = ClientError; + type Error = SchemaError; fn try_from(primitive: proto::types::Primitive) -> Result { let primitive_type = primitive.r#type; let value_type = primitive .value - .ok_or(ClientError::MissingExpectedData)? + .ok_or(SchemaError::MissingExpectedData)? .value_type - .ok_or(ClientError::MissingExpectedData)?; + .ok_or(SchemaError::MissingExpectedData)?; let primitive = match &value_type { proto::types::value::ValueType::BoolValue(bool) => Primitive::Bool(Some(*bool)), @@ -185,7 +194,7 @@ impl TryFrom for Primitive { Some(proto::types::PrimitiveType::U32) => Primitive::U32(Some(*int as u32)), Some(proto::types::PrimitiveType::U64) => Primitive::U64(Some(*int)), Some(proto::types::PrimitiveType::Usize) => Primitive::USize(Some(*int as u32)), - _ => return Err(ClientError::UnsupportedType), + _ => return Err(SchemaError::UnsupportedType), } } proto::types::value::ValueType::ByteValue(bytes) => { @@ -196,17 +205,17 @@ impl TryFrom for Primitive { | Some(proto::types::PrimitiveType::ContractAddress) => { Primitive::Felt252(Some( FieldElement::from_byte_slice_be(bytes) - .map_err(ClientError::SliceError)?, + .map_err(SchemaError::SliceError)?, )) } Some(proto::types::PrimitiveType::U256) => { Primitive::U256(Some(U256::from_be_slice(bytes))) } - _ => return Err(ClientError::UnsupportedType), + _ => return Err(SchemaError::UnsupportedType), } } _ => { - return Err(ClientError::UnsupportedType); + return Err(SchemaError::UnsupportedType); } }; @@ -215,7 +224,7 @@ impl TryFrom for Primitive { } impl TryFrom for proto::types::Primitive { - type Error = ClientError; + type Error = SchemaError; fn try_from(primitive: Primitive) -> Result { use proto::types::value::ValueType; @@ -252,9 +261,9 @@ impl TryFrom for proto::types::Primitive { } impl TryFrom for Ty { - type Error = ClientError; + type Error = SchemaError; fn try_from(ty: proto::types::Ty) -> Result { - match ty.ty_type.ok_or(ClientError::MissingExpectedData)? { + match ty.ty_type.ok_or(SchemaError::MissingExpectedData)? { proto::types::ty::TyType::Primitive(primitive) => { Ok(Ty::Primitive(primitive.try_into()?)) } diff --git a/crates/torii/libp2p/Cargo.toml b/crates/torii/libp2p/Cargo.toml index e25d2a47bf..98e64b5cb6 100644 --- a/crates/torii/libp2p/Cargo.toml +++ b/crates/torii/libp2p/Cargo.toml @@ -18,18 +18,18 @@ crypto-bigint.workspace = true dojo-types.workspace = true regex = "1.10.3" serde_json = { version = "1.0.114", features = [ "preserve_order" ] } -starknet-core = "0.9.0" +starknet.workspace = true starknet-crypto.workspace = true -starknet-ff = "0.3.6" thiserror.workspace = true tracing-subscriber = { version = "0.3", features = [ "env-filter" ] } tracing.workspace = true indexmap = "2.2.5" chrono.workspace = true +dojo-world = { path = "../../dojo-world", features = [ "contracts" ] } [dev-dependencies] -dojo-world = { path = "../../dojo-world", features = [ "metadata" ] } tempfile = "3.9.0" +dojo-test-utils.workspace = true [target.'cfg(not(target_arch = "wasm32"))'.dependencies] libp2p = { git = "https://github.com/libp2p/rust-libp2p", features = [ "ed25519", "gossipsub", "identify", "macros", "noise", "ping", "quic", "relay", "tcp", "tokio", "yamux" ] } diff --git a/crates/torii/libp2p/src/client/mod.rs b/crates/torii/libp2p/src/client/mod.rs index 2efbb9c655..5412c0b9ec 100644 --- a/crates/torii/libp2p/src/client/mod.rs +++ b/crates/torii/libp2p/src/client/mod.rs @@ -18,6 +18,8 @@ use crate::constants; use crate::errors::Error; use crate::types::Message; +pub(crate) const LOG_TARGET: &str = "torii::relay::client"; + #[derive(NetworkBehaviour)] #[behaviour(out_event = "ClientEvent")] struct Behaviour { @@ -39,7 +41,6 @@ pub struct EventLoop { #[derive(Debug)] enum Command { Publish(Message, oneshot::Sender>), - WaitForRelay(oneshot::Sender>), } impl RelayClient { @@ -48,7 +49,7 @@ impl RelayClient { let local_key = identity::Keypair::generate_ed25519(); let peer_id = PeerId::from(local_key.public()); - info!(target: "torii::relay::client", peer_id = %peer_id, "Local peer id"); + info!(target: LOG_TARGET, peer_id = %peer_id, "Local peer id."); let mut swarm = libp2p::SwarmBuilder::with_existing_identity(local_key) .with_tokio() @@ -82,7 +83,7 @@ impl RelayClient { }) .build(); - info!(target: "torii::relay::client", addr = %relay_addr, "Dialing relay"); + info!(target: LOG_TARGET, addr = %relay_addr, "Dialing relay."); swarm.dial(relay_addr.parse::()?)?; let (command_sender, command_receiver) = futures::channel::mpsc::unbounded(); @@ -97,7 +98,7 @@ impl RelayClient { let local_key = identity::Keypair::generate_ed25519(); let peer_id = PeerId::from(local_key.public()); - info!(target: "torii::relay::client", peer_id = %peer_id, "Local peer id"); + info!(target: LOG_TARGET, peer_id = %peer_id, "Local peer id."); let mut swarm = libp2p::SwarmBuilder::with_existing_identity(local_key) .with_wasm_bindgen() @@ -133,7 +134,7 @@ impl RelayClient { }) .build(); - info!(target: "torii::relay::client", addr = %relay_addr, "Dialing relay"); + info!(target: LOG_TARGET, addr = %relay_addr, "Dialing relay."); swarm.dial(relay_addr.parse::()?)?; let (command_sender, command_receiver) = futures::channel::mpsc::unbounded(); @@ -153,61 +154,64 @@ impl CommandSender { Self { sender } } - pub async fn publish(&mut self, data: Message) -> Result { + pub async fn publish(&self, data: Message) -> Result { let (tx, rx) = oneshot::channel(); self.sender.unbounded_send(Command::Publish(data, tx)).expect("Failed to send command"); rx.await.expect("Failed to receive response") } - - pub async fn wait_for_relay(&mut self) -> Result<(), Error> { - let (tx, rx) = oneshot::channel(); - - self.sender.unbounded_send(Command::WaitForRelay(tx)).expect("Failed to send command"); - - rx.await.expect("Failed to receive response") - } } impl EventLoop { + async fn handle_command( + &mut self, + command: Command, + is_relay_ready: bool, + commands_queue: Arc>>, + ) { + match command { + Command::Publish(data, sender) => { + // if the relay is not ready yet, add the message to the queue + if !is_relay_ready { + commands_queue.lock().await.push(Command::Publish(data, sender)); + } else { + sender.send(self.publish(&data)).expect("Failed to send response"); + } + } + } + } + pub async fn run(&mut self) { let mut is_relay_ready = false; - let mut relay_ready_tx = None; + let commands_queue = Arc::new(Mutex::new(Vec::new())); loop { // Poll the swarm for new events. select! { command = self.command_receiver.select_next_some() => { - match command { - Command::Publish(data, sender) => { - sender.send(self.publish(&data)).expect("Failed to send response"); - } - Command::WaitForRelay(sender) => { - if is_relay_ready { - sender.send(Ok(())).expect("Failed to send response"); - } else { - relay_ready_tx = Some(sender); - } - } - } + self.handle_command(command, is_relay_ready, commands_queue.clone()).await; }, event = self.swarm.select_next_some() => { match event { SwarmEvent::Behaviour(ClientEvent::Gossipsub(gossipsub::Event::Subscribed { topic, .. })) => { // Handle behaviour events. - info!(target: "torii::relay::client::gossipsub", topic = ?topic, "Relay ready. Received subscription confirmation"); + info!(target: LOG_TARGET, topic = ?topic, "Relay ready. Received subscription confirmation."); + + if !is_relay_ready { + is_relay_ready = true; - is_relay_ready = true; - if let Some(tx) = relay_ready_tx.take() { - tx.send(Ok(())).expect("Failed to send response"); + // Execute all the commands that were queued while the relay was not ready. + for command in commands_queue.lock().await.drain(..) { + self.handle_command(command, is_relay_ready, commands_queue.clone()).await; + } } } SwarmEvent::ConnectionClosed { cause: Some(cause), .. } => { - info!(target: "torii::relay::client", cause = ?cause, "Connection closed"); + info!(target: LOG_TARGET, cause = ?cause, "Connection closed."); if let libp2p::swarm::ConnectionError::KeepAliveTimeout = cause { - info!(target: "torii::relay::client", "Connection closed due to keep alive timeout. Shutting down client."); + info!(target: LOG_TARGET, "Connection closed due to keep alive timeout. Shutting down client."); return; } } diff --git a/crates/torii/libp2p/src/server/mod.rs b/crates/torii/libp2p/src/server/mod.rs index 6a04b0c0b2..8806c46040 100644 --- a/crates/torii/libp2p/src/server/mod.rs +++ b/crates/torii/libp2p/src/server/mod.rs @@ -9,7 +9,7 @@ use std::{fs, io}; use chrono::Utc; use crypto_bigint::U256; use dojo_types::primitive::Primitive; -use dojo_types::schema::{Member, Struct, Ty}; +use dojo_types::schema::{Struct, Ty}; use futures::StreamExt; use indexmap::IndexMap; use libp2p::core::multiaddr::Protocol; @@ -21,8 +21,10 @@ use libp2p::{identify, identity, noise, ping, relay, tcp, yamux, PeerId, Swarm, use libp2p_webrtc as webrtc; use rand::thread_rng; use serde_json::Number; -use starknet_crypto::{poseidon_hash_many, verify}; -use starknet_ff::FieldElement; +use starknet::core::types::{BlockId, BlockTag, FunctionCall}; +use starknet::core::utils::get_selector_from_name; +use starknet::providers::Provider; +use starknet_crypto::{poseidon_hash_many, verify, FieldElement}; use torii_core::sql::Sql; use tracing::{info, warn}; use webrtc::tokio::Certificate; @@ -32,12 +34,14 @@ use crate::errors::Error; mod events; -use sqlx::Row; +use dojo_world::contracts::model::ModelReader; use crate::server::events::ServerEvent; use crate::typed_data::PrimitiveType; use crate::types::Message; +pub(crate) const LOG_TARGET: &str = "torii::relay::server"; + #[derive(NetworkBehaviour)] #[behaviour(out_event = "ServerEvent")] pub struct Behaviour { @@ -47,14 +51,16 @@ pub struct Behaviour { gossipsub: gossipsub::Behaviour, } -pub struct Relay { +pub struct Relay { swarm: Swarm, db: Sql, + provider: Box

, } -impl Relay { +impl Relay

{ pub fn new( pool: Sql, + provider: P, port: u16, port_webrtc: u16, local_key_path: Option, @@ -74,7 +80,7 @@ impl Relay { Certificate::generate(&mut thread_rng()).unwrap() }; - info!(target: "torii::relay::server", peer_id = %PeerId::from(local_key.public()), "Relay peer id"); + info!(target: LOG_TARGET, peer_id = %PeerId::from(local_key.public()), "Relay peer id."); let mut swarm = libp2p::SwarmBuilder::with_existing_identity(local_key) .with_tokio() @@ -86,7 +92,9 @@ impl Relay { }) .expect("Failed to create WebRTC transport") .with_behaviour(|key| { - let message_id_fn = |message: &gossipsub::Message| { + // Hash messages by their content. No two messages of the same content will be + // propagated. + let _message_id_fn = |message: &gossipsub::Message| { let mut s = DefaultHasher::new(); message.data.hash(&mut s); gossipsub::MessageId::from(s.finish().to_string()) @@ -94,7 +102,8 @@ impl Relay { let gossipsub_config = gossipsub::ConfigBuilder::default() .heartbeat_interval(Duration::from_secs(constants::GOSSIPSUB_HEARTBEAT_INTERVAL_SECS)) // This is set to aid debugging by not cluttering the log space .validation_mode(gossipsub::ValidationMode::Strict) // This sets the kind of message validation. The default is Strict (enforce message signing) - .message_id_fn(message_id_fn) // content-address messages. No two messages of the same content will be propagated. + // TODO: Use this once we incorporate nonces in the message model? + // .message_id_fn(message_id_fn) // content-address messages. No two messages of the same content will be propagated. .build() .map_err(|msg| io::Error::new(io::ErrorKind::Other, msg)).unwrap(); // Temporary hack because `build` does not return a proper `std::error::Error`. @@ -144,7 +153,7 @@ impl Relay { .subscribe(&IdentTopic::new(constants::MESSAGING_TOPIC)) .unwrap(); - Ok(Self { swarm, db: pool }) + Ok(Self { swarm, db: pool, provider: Box::new(provider) }) } pub async fn run(&mut self) { @@ -163,32 +172,32 @@ impl Relay { Ok(message) => message, Err(e) => { info!( - target: "torii::relay::server::gossipsub", + target: LOG_TARGET, error = %e, - "Failed to deserialize message" + "Deserializing message." ); continue; } }; - let ty = match validate_message(&data.message.message) { + let ty = match validate_message(&self.db, &data.message.message).await { Ok(parsed_message) => parsed_message, Err(e) => { info!( - target: "torii::relay::server::gossipsub", + target: LOG_TARGET, error = %e, - "Failed to validate message" + "Validating message." ); continue; } }; info!( - target: "torii::relay::server", + target: LOG_TARGET, message_id = %message_id, peer_id = %peer_id, data = ?data, - "Received message" + "Received message." ); // retrieve entity identity from db @@ -196,9 +205,9 @@ impl Relay { Ok(pool) => pool, Err(e) => { warn!( - target: "torii::relay::server", + target: LOG_TARGET, error = %e, - "Failed to acquire pool" + "Acquiring pool." ); continue; } @@ -208,17 +217,18 @@ impl Relay { Ok(keys) => keys, Err(e) => { warn!( - target: "torii::relay::server", + target: LOG_TARGET, error = %e, - "Failed to get message model keys" + "Retrieving message model keys." ); continue; } }; // select only identity field, if doesn't exist, empty string - let entity = match sqlx::query("SELECT * FROM ? WHERE id = ?") - .bind(&ty.as_struct().unwrap().name) + let query = + format!("SELECT external_identity FROM {} WHERE id = ?", ty.name()); + let entity_identity: Option = match sqlx::query_scalar(&query) .bind(format!("{:#x}", poseidon_hash_many(&keys))) .fetch_optional(&mut *pool) .await @@ -226,15 +236,15 @@ impl Relay { Ok(entity_identity) => entity_identity, Err(e) => { warn!( - target: "torii::relay::server", + target: LOG_TARGET, error = %e, - "Failed to fetch entity" + "Fetching entity." ); continue; } }; - if entity.is_none() { + if entity_identity.is_none() { // we can set the entity without checking identity if let Err(e) = self .db @@ -246,80 +256,98 @@ impl Relay { .await { info!( - target: "torii::relay::server", + target: LOG_TARGET, error = %e, - "Failed to set message" + "Setting message." ); continue; } else { info!( - target: "torii::relay::server", + target: LOG_TARGET, message_id = %message_id, peer_id = %peer_id, - "Message set" + "Message set." ); continue; } } - let entity = entity.unwrap(); - let identity = match FieldElement::from_str(&match entity - .try_get::("identity") - { - Ok(identity) => identity, - Err(e) => { - warn!( - target: "torii::relay::server", - error = %e, - "Failed to get identity from model" + let entity_identity = + match FieldElement::from_str(&entity_identity.unwrap()) { + Ok(identity) => identity, + Err(e) => { + warn!( + target: LOG_TARGET, + error = %e, + "Parsing identity." + ); + continue; + } + }; + + // TODO: have a nonce in model to check + // against entity nonce and message nonce + // to prevent replay attacks. + + // Verify the signature + let message_hash = + if let Ok(message) = data.message.encode(entity_identity) { + message + } else { + info!( + target: LOG_TARGET, + "Encoding message." ); continue; - } - }) { - Ok(identity) => identity, + }; + + let public_key = match self + .provider + .call( + FunctionCall { + contract_address: entity_identity, + entry_point_selector: get_selector_from_name( + "getPublicKey", + ) + .unwrap(), + calldata: vec![], + }, + BlockId::Tag(BlockTag::Pending), + ) + .await + { + Ok(res) => res[0], Err(e) => { warn!( - target: "torii::relay::server", + target: LOG_TARGET, error = %e, - "Failed to parse identity" + "Fetching public key." ); continue; } }; - // TODO: have a nonce in model to check - // against entity nonce and message nonce - // to prevent replay attacks. - - // Verify the signature - let message_hash = if let Ok(message) = data.message.encode(identity) { - message - } else { - info!( - target: "torii::relay::server", - "Failed to encode message" - ); - continue; - }; - - // for the public key used for verification; use identity from model - if let Ok(valid) = verify( - &identity, + if !match verify( + &public_key, &message_hash, &data.signature_r, &data.signature_s, ) { - if !valid { - info!( - target: "torii::relay::server", - "Invalid signature" + Ok(valid) => valid, + Err(e) => { + warn!( + target: LOG_TARGET, + error = %e, + "Verifying signature." ); continue; } - } else { + } { info!( - target: "torii::relay::server", - "Failed to verify signature" + target: LOG_TARGET, + message_id = %message_id, + peer_id = %peer_id, + "Invalid signature." ); continue; } @@ -335,25 +363,25 @@ impl Relay { .await { info!( - target: "torii::relay::server", + target: LOG_TARGET, error = %e, - "Failed to set message" + "Setting message." ); } info!( - target: "torii::relay::server", + target: LOG_TARGET, message_id = %message_id, peer_id = %peer_id, - "Message verified and set" + "Message verified and set." ); } ServerEvent::Gossipsub(gossipsub::Event::Subscribed { peer_id, topic }) => { info!( - target: "torii::relay::server::gossipsub", + target: LOG_TARGET, peer_id = %peer_id, topic = %topic, - "Subscribed to topic" + "Subscribed to topic." ); } ServerEvent::Gossipsub(gossipsub::Event::Unsubscribed { @@ -361,10 +389,10 @@ impl Relay { topic, }) => { info!( - target: "torii::relay::server::gossipsub", + target: LOG_TARGET, peer_id = %peer_id, topic = %topic, - "Unsubscribed from topic" + "Unsubscribed from topic." ); } ServerEvent::Identify(identify::Event::Received { @@ -372,28 +400,30 @@ impl Relay { peer_id, }) => { info!( - target: "torii::relay::server::identify", + target: LOG_TARGET, peer_id = %peer_id, observed_addr = %observed_addr, - "Received identify event" + "Received identify event." ); self.swarm.add_external_address(observed_addr.clone()); } ServerEvent::Ping(ping::Event { peer, result, .. }) => { info!( - target: "torii::relay::server::ping", + target: LOG_TARGET, peer_id = %peer, result = ?result, - "Received ping event" + "Received ping event." ); } _ => {} } } SwarmEvent::NewListenAddr { address, .. } => { - info!(target: "torii::relay::server", address = %address, "New listen address"); + info!(target: LOG_TARGET, address = %address, "New listen address."); + } + event => { + info!(target: LOG_TARGET, event = ?event, "Unhandled event."); } - _ => {} } } } @@ -443,7 +473,7 @@ pub fn ty_to_string_type(ty: &Ty) -> String { Primitive::U64(_) => "u64".to_string(), Primitive::U128(_) => "u128".to_string(), Primitive::U256(_) => "u256".to_string(), - Primitive::Felt252(_) => "felt".to_string(), + Primitive::Felt252(_) => "felt252".to_string(), Primitive::ClassHash(_) => "class_hash".to_string(), Primitive::ContractAddress(_) => "contract_address".to_string(), Primitive::Bool(_) => "bool".to_string(), @@ -494,162 +524,87 @@ pub fn parse_ty_to_primitive(ty: &Ty) -> Result { } pub fn parse_object_to_ty( - name: String, + model: &mut Struct, object: &IndexMap, -) -> Result { - let mut ty_struct = Struct { name, children: vec![] }; - +) -> Result<(), Error> { for (field_name, value) in object { - // value has to be of type object - let object = if let PrimitiveType::Object(object) = value { - object - } else { - return Err(Error::InvalidMessageError("Value is not an object".to_string())); - }; - - let r#type = if let Some(r#type) = object.get("type") { - if let PrimitiveType::String(r#type) = r#type { - r#type - } else { - return Err(Error::InvalidMessageError("Type is not a string".to_string())); - } - } else { - return Err(Error::InvalidMessageError("Type is missing".to_string())); - }; - - let value = if let Some(value) = object.get("value") { - value - } else { - return Err(Error::InvalidMessageError("Value is missing".to_string())); - }; - - let key = if let Some(key) = object.get("key") { - if let PrimitiveType::Bool(key) = key { - *key - } else { - return Err(Error::InvalidMessageError("Key is not a boolean".to_string())); - } - } else { - return Err(Error::InvalidMessageError("Key is missing".to_string())); - }; + let field = model.children.iter_mut().find(|m| m.name == *field_name).ok_or_else(|| { + Error::InvalidMessageError(format!("Field {} not found in model", field_name)) + })?; match value { PrimitiveType::Object(object) => { - let ty = parse_object_to_ty(field_name.clone(), object)?; - ty_struct.children.push(Member { name: field_name.clone(), ty, key }); + parse_object_to_ty(model, object)?; } PrimitiveType::Array(_) => { // tuples not supported yet unimplemented!() } - PrimitiveType::Number(number) => { - ty_struct.children.push(Member { - name: field_name.clone(), - ty: match r#type.as_str() { - "u8" => Ty::Primitive(Primitive::U8(Some(number.as_u64().unwrap() as u8))), - "u16" => { - Ty::Primitive(Primitive::U16(Some(number.as_u64().unwrap() as u16))) - } - "u32" => { - Ty::Primitive(Primitive::U32(Some(number.as_u64().unwrap() as u32))) - } - "usize" => { - Ty::Primitive(Primitive::USize(Some(number.as_u64().unwrap() as u32))) - } - "u64" => Ty::Primitive(Primitive::U64(Some(number.as_u64().unwrap()))), - _ => { - return Err(Error::InvalidMessageError( - "Invalid number type".to_string(), - )); - } - }, - key, - }); - } + PrimitiveType::Number(number) => match &mut field.ty { + Ty::Primitive(primitive) => match *primitive { + Primitive::U8(ref mut u8) => { + *u8 = Some(number.as_u64().unwrap() as u8); + } + Primitive::U16(ref mut u16) => { + *u16 = Some(number.as_u64().unwrap() as u16); + } + Primitive::U32(ref mut u32) => { + *u32 = Some(number.as_u64().unwrap() as u32); + } + Primitive::USize(ref mut usize) => { + *usize = Some(number.as_u64().unwrap() as u32); + } + Primitive::U64(ref mut u64) => { + *u64 = Some(number.as_u64().unwrap()); + } + _ => { + return Err(Error::InvalidMessageError("Invalid number type".to_string())); + } + }, + Ty::Enum(enum_) => { + enum_.option = Some(number.as_u64().unwrap() as u8); + } + _ => return Err(Error::InvalidMessageError("Invalid number type".to_string())), + }, PrimitiveType::Bool(boolean) => { - ty_struct.children.push(Member { - name: field_name.clone(), - ty: Ty::Primitive(Primitive::Bool(Some(*boolean))), - key, - }); + field.ty = Ty::Primitive(Primitive::Bool(Some(*boolean))); } - PrimitiveType::String(string) => match r#type.as_str() { - "u8" => { - ty_struct.children.push(Member { - name: field_name.clone(), - ty: Ty::Primitive(Primitive::U8(Some(u8::from_str(string).unwrap()))), - key, - }); - } - "u16" => { - ty_struct.children.push(Member { - name: field_name.clone(), - ty: Ty::Primitive(Primitive::U16(Some(u16::from_str(string).unwrap()))), - key, - }); - } - "u32" => { - ty_struct.children.push(Member { - name: field_name.clone(), - ty: Ty::Primitive(Primitive::U32(Some(u32::from_str(string).unwrap()))), - key, - }); - } - "usize" => { - ty_struct.children.push(Member { - name: field_name.clone(), - ty: Ty::Primitive(Primitive::USize(Some(u32::from_str(string).unwrap()))), - key, - }); - } - "u64" => { - ty_struct.children.push(Member { - name: field_name.clone(), - ty: Ty::Primitive(Primitive::U64(Some(u64::from_str(string).unwrap()))), - key, - }); - } - "u128" => { - ty_struct.children.push(Member { - name: field_name.clone(), - ty: Ty::Primitive(Primitive::U128(Some(u128::from_str(string).unwrap()))), - key, - }); - } - "u256" => { - ty_struct.children.push(Member { - name: field_name.clone(), - ty: Ty::Primitive(Primitive::U256(Some(U256::from_be_hex(string)))), - key, - }); - } - "felt" => { - ty_struct.children.push(Member { - name: field_name.clone(), - ty: Ty::Primitive(Primitive::Felt252(Some( - FieldElement::from_str(string).unwrap(), - ))), - key, - }); - } - "class_hash" => { - ty_struct.children.push(Member { - name: field_name.clone(), - ty: Ty::Primitive(Primitive::ClassHash(Some( - FieldElement::from_str(string).unwrap(), - ))), - key, - }); - } - "contract_address" => { - ty_struct.children.push(Member { - name: field_name.clone(), - ty: Ty::Primitive(Primitive::ContractAddress(Some( - FieldElement::from_str(string).unwrap(), - ))), - key, - }); - } + PrimitiveType::String(string) => match &mut field.ty { + Ty::Primitive(primitive) => match primitive { + Primitive::U8(v) => { + *v = Some(u8::from_str(string).unwrap()); + } + Primitive::U16(v) => { + *v = Some(u16::from_str(string).unwrap()); + } + Primitive::U32(v) => { + *v = Some(u32::from_str(string).unwrap()); + } + Primitive::USize(v) => { + *v = Some(u32::from_str(string).unwrap()); + } + Primitive::U64(v) => { + *v = Some(u64::from_str(string).unwrap()); + } + Primitive::U128(v) => { + *v = Some(u128::from_str(string).unwrap()); + } + Primitive::U256(v) => { + *v = Some(U256::from_be_hex(string)); + } + Primitive::Felt252(v) => { + *v = Some(FieldElement::from_str(string).unwrap()); + } + Primitive::ClassHash(v) => { + *v = Some(FieldElement::from_str(string).unwrap()); + } + Primitive::ContractAddress(v) => { + *v = Some(FieldElement::from_str(string).unwrap()); + } + Primitive::Bool(v) => { + *v = Some(bool::from_str(string).unwrap()); + } + }, _ => { return Err(Error::InvalidMessageError("Invalid string type".to_string())); } @@ -657,12 +612,15 @@ pub fn parse_object_to_ty( } } - Ok(Ty::Struct(ty_struct)) + Ok(()) } // Validates the message model // and returns the identity and signature -fn validate_message(message: &IndexMap) -> Result { +async fn validate_message( + db: &Sql, + message: &IndexMap, +) -> Result { let model_name = if let Some(model_name) = message.get("model") { if let PrimitiveType::String(model_name) = model_name { model_name @@ -672,10 +630,32 @@ fn validate_message(message: &IndexMap) -> Result) -> Result anyhow::Result { if path.exists() { let bytes = fs::read(path)?; - info!(target: "torii::relay::server", path = %path.display(), "Using existing identity"); + info!(target: LOG_TARGET, path = %path.display(), "Using existing identity."); return Ok(identity::Keypair::from_protobuf_encoding(&bytes)?); // This only works for ed25519 but that is what we are using. } @@ -699,7 +679,7 @@ fn read_or_create_identity(path: &Path) -> anyhow::Result { fs::write(path, identity.to_protobuf_encoding()?)?; - info!(target: "torii::relay::server", path = %path.display(), "Generated new identity"); + info!(target: LOG_TARGET, path = %path.display(), "Generated new identity."); Ok(identity) } @@ -708,7 +688,7 @@ fn read_or_create_certificate(path: &Path) -> anyhow::Result { if path.exists() { let pem = fs::read_to_string(path)?; - info!(target: "torii::relay::server", path = %path.display(), "Using existing certificate"); + info!(target: LOG_TARGET, path = %path.display(), "Using existing certificate."); return Ok(Certificate::from_pem(&pem)?); } @@ -716,7 +696,7 @@ fn read_or_create_certificate(path: &Path) -> anyhow::Result { let cert = Certificate::generate(&mut rand::thread_rng())?; fs::write(path, cert.serialize_pem().as_bytes())?; - info!(target: "torii::relay::server", path = %path.display(), "Generated new certificate"); + info!(target: LOG_TARGET, path = %path.display(), "Generated new certificate."); Ok(cert) } diff --git a/crates/torii/libp2p/src/tests.rs b/crates/torii/libp2p/src/tests.rs index b19a5be1a4..5220333683 100644 --- a/crates/torii/libp2p/src/tests.rs +++ b/crates/torii/libp2p/src/tests.rs @@ -13,10 +13,15 @@ mod test { #[cfg(not(target_arch = "wasm32"))] #[tokio::test] async fn test_client_messaging() -> Result<(), Box> { + use dojo_test_utils::sequencer::{ + get_default_test_starknet_config, SequencerConfig, TestSequencer, + }; use dojo_types::schema::{Member, Struct, Ty}; use indexmap::IndexMap; use sqlx::sqlite::{SqliteConnectOptions, SqlitePoolOptions}; - use starknet_ff::FieldElement; + use starknet::providers::jsonrpc::HttpTransport; + use starknet::providers::JsonRpcClient; + use starknet_crypto::FieldElement; use tokio::time::sleep; use torii_core::sql::Sql; @@ -35,21 +40,25 @@ mod test { let pool = SqlitePoolOptions::new().max_connections(5).connect_with(options).await.unwrap(); sqlx::migrate!("../migrations").run(&pool).await.unwrap(); + let sequencer = + TestSequencer::start(SequencerConfig::default(), get_default_test_starknet_config()) + .await; + let provider = JsonRpcClient::new(HttpTransport::new(sequencer.url())); + let db = Sql::new(pool.clone(), FieldElement::from_bytes_be(&[0; 32]).unwrap()).await?; // Initialize the relay server - let mut relay_server: Relay = Relay::new(db, 9900, 9901, None, None)?; + let mut relay_server = Relay::new(db, provider, 9900, 9901, None, None)?; tokio::spawn(async move { relay_server.run().await; }); // Initialize the first client (listener) - let mut client = RelayClient::new("/ip4/127.0.0.1/tcp/9900".to_string())?; + let client = RelayClient::new("/ip4/127.0.0.1/tcp/9900".to_string())?; tokio::spawn(async move { client.event_loop.lock().await.run().await; }); - client.command_sender.wait_for_relay().await?; let mut data = Struct { name: "Message".to_string(), children: vec![] }; data.children.push(Member { diff --git a/crates/torii/libp2p/src/typed_data.rs b/crates/torii/libp2p/src/typed_data.rs index 4dec9753e9..dc752f751b 100644 --- a/crates/torii/libp2p/src/typed_data.rs +++ b/crates/torii/libp2p/src/typed_data.rs @@ -3,11 +3,10 @@ use std::str::FromStr; use indexmap::IndexMap; use serde::{Deserialize, Serialize}; use serde_json::Number; -use starknet_core::utils::{ +use starknet::core::utils::{ cairo_short_string_to_felt, get_selector_from_name, CairoShortStringToFeltError, }; -use starknet_crypto::poseidon_hash_many; -use starknet_ff::FieldElement; +use starknet_crypto::{poseidon_hash_many, FieldElement}; use crate::errors::Error; @@ -288,8 +287,6 @@ impl PrimitiveType { ) -> Result { match self { PrimitiveType::Object(obj) => { - println!("r#type: {}", r#type); - ctx.is_preset = preset_types.contains_key(r#type); let mut hashes = Vec::new(); @@ -333,7 +330,6 @@ impl PrimitiveType { let type_hash = encode_type(r#type, if ctx.is_preset { preset_types } else { types })?; - println!("type_hash: {}", type_hash); hashes.push(get_selector_from_name(&type_hash).map_err(|_| { Error::InvalidMessageError(format!("Invalid type {} for selector", r#type)) })?); @@ -495,8 +491,8 @@ impl TypedData { #[cfg(test)] mod tests { - use starknet_core::utils::starknet_keccak; - use starknet_ff::FieldElement; + use starknet::core::utils::starknet_keccak; + use starknet_crypto::FieldElement; use super::*; diff --git a/crates/torii/libp2p/src/types.rs b/crates/torii/libp2p/src/types.rs index a059038d1e..cfad2d5b87 100644 --- a/crates/torii/libp2p/src/types.rs +++ b/crates/torii/libp2p/src/types.rs @@ -1,5 +1,5 @@ use serde::{Deserialize, Serialize}; -use starknet_ff::FieldElement; +use starknet_crypto::FieldElement; use crate::typed_data::TypedData; diff --git a/crates/torii/migrations/20240314182410_event_model.sql b/crates/torii/migrations/20240314182410_event_model.sql index 3e2ff3e7d0..dcec9f3bd4 100644 --- a/crates/torii/migrations/20240314182410_event_model.sql +++ b/crates/torii/migrations/20240314182410_event_model.sql @@ -2,7 +2,6 @@ CREATE TABLE event_messages ( id TEXT NOT NULL PRIMARY KEY, keys TEXT, event_id TEXT NOT NULL, - model_names TEXT, executed_at DATETIME NOT NULL, created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, updated_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP diff --git a/crates/torii/migrations/20240426211245_pending_block.sql b/crates/torii/migrations/20240426211245_pending_block.sql new file mode 100644 index 0000000000..2db2edb6be --- /dev/null +++ b/crates/torii/migrations/20240426211245_pending_block.sql @@ -0,0 +1,2 @@ +-- Add the pending block txn cursor to indexers table +ALTER TABLE indexers ADD COLUMN pending_block_tx TEXT NULL DEFAULT NULL; \ No newline at end of file diff --git a/crates/torii/server/Cargo.toml b/crates/torii/server/Cargo.toml index 314bfc6bfb..6e447748c4 100644 --- a/crates/torii/server/Cargo.toml +++ b/crates/torii/server/Cargo.toml @@ -13,6 +13,7 @@ camino.workspace = true chrono.workspace = true clap.workspace = true ctrlc = { version = "3.4", features = [ "termination" ] } +dojo-metrics.workspace = true dojo-types = { path = "../../dojo-types" } dojo-world = { path = "../../dojo-world" } either = "1.9.0" @@ -23,8 +24,6 @@ hyper-reverse-proxy = { git = "https://github.com/tarrencev/hyper-reverse-proxy" hyper.workspace = true indexmap = "1.9.3" lazy_static.workspace = true -metrics = { path = "../../metrics" } -metrics-process.workspace = true scarb.workspace = true serde.workspace = true serde_json.workspace = true @@ -48,5 +47,5 @@ camino.workspace = true [features] default = [ "jemalloc", "sqlite" ] -jemalloc = [ "metrics/jemalloc" ] +jemalloc = [ "dojo-metrics/jemalloc" ] sqlite = [ "sqlx/sqlite" ] diff --git a/crates/torii/types-test/Scarb.lock b/crates/torii/types-test/Scarb.lock index 69570413ff..112ae6dbe9 100644 --- a/crates/torii/types-test/Scarb.lock +++ b/crates/torii/types-test/Scarb.lock @@ -3,7 +3,7 @@ version = 1 [[package]] name = "dojo" -version = "0.5.1" +version = "0.6.0" dependencies = [ "dojo_plugin", ] @@ -15,7 +15,7 @@ source = "git+https://github.com/dojoengine/dojo?tag=v0.3.11#1e651b5d4d3b79b14a7 [[package]] name = "types_test" -version = "0.5.1" +version = "0.6.0" dependencies = [ "dojo", ] diff --git a/crates/torii/types-test/Scarb.toml b/crates/torii/types-test/Scarb.toml index b1cff3572a..0f11558350 100644 --- a/crates/torii/types-test/Scarb.toml +++ b/crates/torii/types-test/Scarb.toml @@ -1,7 +1,7 @@ [package] cairo-version = "2.4.0" name = "types_test" -version = "0.5.1" +version = "0.6.0" [cairo] sierra-replace-ids = true diff --git a/crates/torii/types-test/manifests/base/contracts/records.toml b/crates/torii/types-test/manifests/base/contracts/records.toml deleted file mode 100644 index 1c7550826c..0000000000 --- a/crates/torii/types-test/manifests/base/contracts/records.toml +++ /dev/null @@ -1,7 +0,0 @@ -kind = "DojoContract" -class_hash = "0x5f0a221b80b5667c20574d62953f99eb6ddf7d351f531a9f0c56f96adb0d48b" -abi = "abis/base/contracts/records.json" -reads = [] -writes = [] -computed = [] -name = "types_test::contracts::records" diff --git a/crates/torii/types-test/manifests/base/world.toml b/crates/torii/types-test/manifests/base/world.toml deleted file mode 100644 index 1553652ecc..0000000000 --- a/crates/torii/types-test/manifests/base/world.toml +++ /dev/null @@ -1,3 +0,0 @@ -kind = "Class" -class_hash = "0x799bc4e9da10bfb3dd88e6f223c9cfbf7745435cd14f5d69675ea448e578cd" -name = "dojo::world::world" diff --git a/crates/torii/types-test/abis/base/contracts/records.json b/crates/torii/types-test/manifests/dev/abis/base/contracts/types_test_contracts_records.json similarity index 100% rename from crates/torii/types-test/abis/base/contracts/records.json rename to crates/torii/types-test/manifests/dev/abis/base/contracts/types_test_contracts_records.json diff --git a/crates/torii/types-test/manifests/dev/abis/base/dojo_world_world.json b/crates/torii/types-test/manifests/dev/abis/base/dojo_world_world.json new file mode 100644 index 0000000000..abb75db4f5 --- /dev/null +++ b/crates/torii/types-test/manifests/dev/abis/base/dojo_world_world.json @@ -0,0 +1,657 @@ +[ + { + "type": "impl", + "name": "World", + "interface_name": "dojo::world::IWorld" + }, + { + "type": "struct", + "name": "core::array::Span::", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::" + } + ] + }, + { + "type": "struct", + "name": "dojo::resource_metadata::ResourceMetadata", + "members": [ + { + "name": "resource_id", + "type": "core::felt252" + }, + { + "name": "metadata_uri", + "type": "core::array::Span::" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::" + } + ] + }, + { + "type": "enum", + "name": "core::bool", + "variants": [ + { + "name": "False", + "type": "()" + }, + { + "name": "True", + "type": "()" + } + ] + }, + { + "type": "interface", + "name": "dojo::world::IWorld", + "items": [ + { + "type": "function", + "name": "metadata", + "inputs": [ + { + "name": "resource_id", + "type": "core::felt252" + } + ], + "outputs": [ + { + "type": "dojo::resource_metadata::ResourceMetadata" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "set_metadata", + "inputs": [ + { + "name": "metadata", + "type": "dojo::resource_metadata::ResourceMetadata" + } + ], + "outputs": [], + "state_mutability": "external" + }, + { + "type": "function", + "name": "model", + "inputs": [ + { + "name": "name", + "type": "core::felt252" + } + ], + "outputs": [ + { + "type": "(core::starknet::class_hash::ClassHash, core::starknet::contract_address::ContractAddress)" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "register_model", + "inputs": [ + { + "name": "class_hash", + "type": "core::starknet::class_hash::ClassHash" + } + ], + "outputs": [], + "state_mutability": "external" + }, + { + "type": "function", + "name": "deploy_contract", + "inputs": [ + { + "name": "salt", + "type": "core::felt252" + }, + { + "name": "class_hash", + "type": "core::starknet::class_hash::ClassHash" + } + ], + "outputs": [ + { + "type": "core::starknet::contract_address::ContractAddress" + } + ], + "state_mutability": "external" + }, + { + "type": "function", + "name": "upgrade_contract", + "inputs": [ + { + "name": "address", + "type": "core::starknet::contract_address::ContractAddress" + }, + { + "name": "class_hash", + "type": "core::starknet::class_hash::ClassHash" + } + ], + "outputs": [ + { + "type": "core::starknet::class_hash::ClassHash" + } + ], + "state_mutability": "external" + }, + { + "type": "function", + "name": "uuid", + "inputs": [], + "outputs": [ + { + "type": "core::integer::u32" + } + ], + "state_mutability": "external" + }, + { + "type": "function", + "name": "emit", + "inputs": [ + { + "name": "keys", + "type": "core::array::Array::" + }, + { + "name": "values", + "type": "core::array::Span::" + } + ], + "outputs": [], + "state_mutability": "view" + }, + { + "type": "function", + "name": "entity", + "inputs": [ + { + "name": "model", + "type": "core::felt252" + }, + { + "name": "keys", + "type": "core::array::Span::" + }, + { + "name": "layout", + "type": "core::array::Span::" + } + ], + "outputs": [ + { + "type": "core::array::Span::" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "set_entity", + "inputs": [ + { + "name": "model", + "type": "core::felt252" + }, + { + "name": "keys", + "type": "core::array::Span::" + }, + { + "name": "values", + "type": "core::array::Span::" + }, + { + "name": "layout", + "type": "core::array::Span::" + } + ], + "outputs": [], + "state_mutability": "external" + }, + { + "type": "function", + "name": "base", + "inputs": [], + "outputs": [ + { + "type": "core::starknet::class_hash::ClassHash" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "delete_entity", + "inputs": [ + { + "name": "model", + "type": "core::felt252" + }, + { + "name": "keys", + "type": "core::array::Span::" + }, + { + "name": "layout", + "type": "core::array::Span::" + } + ], + "outputs": [], + "state_mutability": "external" + }, + { + "type": "function", + "name": "is_owner", + "inputs": [ + { + "name": "address", + "type": "core::starknet::contract_address::ContractAddress" + }, + { + "name": "resource", + "type": "core::felt252" + } + ], + "outputs": [ + { + "type": "core::bool" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "grant_owner", + "inputs": [ + { + "name": "address", + "type": "core::starknet::contract_address::ContractAddress" + }, + { + "name": "resource", + "type": "core::felt252" + } + ], + "outputs": [], + "state_mutability": "external" + }, + { + "type": "function", + "name": "revoke_owner", + "inputs": [ + { + "name": "address", + "type": "core::starknet::contract_address::ContractAddress" + }, + { + "name": "resource", + "type": "core::felt252" + } + ], + "outputs": [], + "state_mutability": "external" + }, + { + "type": "function", + "name": "is_writer", + "inputs": [ + { + "name": "model", + "type": "core::felt252" + }, + { + "name": "system", + "type": "core::starknet::contract_address::ContractAddress" + } + ], + "outputs": [ + { + "type": "core::bool" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "grant_writer", + "inputs": [ + { + "name": "model", + "type": "core::felt252" + }, + { + "name": "system", + "type": "core::starknet::contract_address::ContractAddress" + } + ], + "outputs": [], + "state_mutability": "external" + }, + { + "type": "function", + "name": "revoke_writer", + "inputs": [ + { + "name": "model", + "type": "core::felt252" + }, + { + "name": "system", + "type": "core::starknet::contract_address::ContractAddress" + } + ], + "outputs": [], + "state_mutability": "external" + } + ] + }, + { + "type": "impl", + "name": "UpgradeableWorld", + "interface_name": "dojo::world::IUpgradeableWorld" + }, + { + "type": "interface", + "name": "dojo::world::IUpgradeableWorld", + "items": [ + { + "type": "function", + "name": "upgrade", + "inputs": [ + { + "name": "new_class_hash", + "type": "core::starknet::class_hash::ClassHash" + } + ], + "outputs": [], + "state_mutability": "external" + } + ] + }, + { + "type": "constructor", + "name": "constructor", + "inputs": [ + { + "name": "contract_base", + "type": "core::starknet::class_hash::ClassHash" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::WorldSpawned", + "kind": "struct", + "members": [ + { + "name": "address", + "type": "core::starknet::contract_address::ContractAddress", + "kind": "data" + }, + { + "name": "creator", + "type": "core::starknet::contract_address::ContractAddress", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::ContractDeployed", + "kind": "struct", + "members": [ + { + "name": "salt", + "type": "core::felt252", + "kind": "data" + }, + { + "name": "class_hash", + "type": "core::starknet::class_hash::ClassHash", + "kind": "data" + }, + { + "name": "address", + "type": "core::starknet::contract_address::ContractAddress", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::ContractUpgraded", + "kind": "struct", + "members": [ + { + "name": "class_hash", + "type": "core::starknet::class_hash::ClassHash", + "kind": "data" + }, + { + "name": "address", + "type": "core::starknet::contract_address::ContractAddress", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::WorldUpgraded", + "kind": "struct", + "members": [ + { + "name": "class_hash", + "type": "core::starknet::class_hash::ClassHash", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::MetadataUpdate", + "kind": "struct", + "members": [ + { + "name": "resource", + "type": "core::felt252", + "kind": "data" + }, + { + "name": "uri", + "type": "core::array::Span::", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::ModelRegistered", + "kind": "struct", + "members": [ + { + "name": "name", + "type": "core::felt252", + "kind": "data" + }, + { + "name": "class_hash", + "type": "core::starknet::class_hash::ClassHash", + "kind": "data" + }, + { + "name": "prev_class_hash", + "type": "core::starknet::class_hash::ClassHash", + "kind": "data" + }, + { + "name": "address", + "type": "core::starknet::contract_address::ContractAddress", + "kind": "data" + }, + { + "name": "prev_address", + "type": "core::starknet::contract_address::ContractAddress", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::StoreSetRecord", + "kind": "struct", + "members": [ + { + "name": "table", + "type": "core::felt252", + "kind": "data" + }, + { + "name": "keys", + "type": "core::array::Span::", + "kind": "data" + }, + { + "name": "values", + "type": "core::array::Span::", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::StoreDelRecord", + "kind": "struct", + "members": [ + { + "name": "table", + "type": "core::felt252", + "kind": "data" + }, + { + "name": "keys", + "type": "core::array::Span::", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::WriterUpdated", + "kind": "struct", + "members": [ + { + "name": "model", + "type": "core::felt252", + "kind": "data" + }, + { + "name": "system", + "type": "core::starknet::contract_address::ContractAddress", + "kind": "data" + }, + { + "name": "value", + "type": "core::bool", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::OwnerUpdated", + "kind": "struct", + "members": [ + { + "name": "address", + "type": "core::starknet::contract_address::ContractAddress", + "kind": "data" + }, + { + "name": "resource", + "type": "core::felt252", + "kind": "data" + }, + { + "name": "value", + "type": "core::bool", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::Event", + "kind": "enum", + "variants": [ + { + "name": "WorldSpawned", + "type": "dojo::world::world::WorldSpawned", + "kind": "nested" + }, + { + "name": "ContractDeployed", + "type": "dojo::world::world::ContractDeployed", + "kind": "nested" + }, + { + "name": "ContractUpgraded", + "type": "dojo::world::world::ContractUpgraded", + "kind": "nested" + }, + { + "name": "WorldUpgraded", + "type": "dojo::world::world::WorldUpgraded", + "kind": "nested" + }, + { + "name": "MetadataUpdate", + "type": "dojo::world::world::MetadataUpdate", + "kind": "nested" + }, + { + "name": "ModelRegistered", + "type": "dojo::world::world::ModelRegistered", + "kind": "nested" + }, + { + "name": "StoreSetRecord", + "type": "dojo::world::world::StoreSetRecord", + "kind": "nested" + }, + { + "name": "StoreDelRecord", + "type": "dojo::world::world::StoreDelRecord", + "kind": "nested" + }, + { + "name": "WriterUpdated", + "type": "dojo::world::world::WriterUpdated", + "kind": "nested" + }, + { + "name": "OwnerUpdated", + "type": "dojo::world::world::OwnerUpdated", + "kind": "nested" + } + ] + } +] \ No newline at end of file diff --git a/crates/torii/types-test/manifests/dev/abis/base/models/types_test_models_record.json b/crates/torii/types-test/manifests/dev/abis/base/models/types_test_models_record.json new file mode 100644 index 0000000000..e828f0da74 --- /dev/null +++ b/crates/torii/types-test/manifests/dev/abis/base/models/types_test_models_record.json @@ -0,0 +1,389 @@ +[ + { + "type": "impl", + "name": "DojoModelImpl", + "interface_name": "dojo::model::IDojoModel" + }, + { + "type": "struct", + "name": "core::array::Span::", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::>", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::>" + } + ] + }, + { + "type": "struct", + "name": "dojo::database::introspect::Struct", + "members": [ + { + "name": "name", + "type": "core::felt252" + }, + { + "name": "attrs", + "type": "core::array::Span::" + }, + { + "name": "children", + "type": "core::array::Span::>" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::<(core::felt252, core::array::Span::)>", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::<(core::felt252, core::array::Span::)>" + } + ] + }, + { + "type": "struct", + "name": "dojo::database::introspect::Enum", + "members": [ + { + "name": "name", + "type": "core::felt252" + }, + { + "name": "attrs", + "type": "core::array::Span::" + }, + { + "name": "children", + "type": "core::array::Span::<(core::felt252, core::array::Span::)>" + } + ] + }, + { + "type": "enum", + "name": "dojo::database::introspect::Ty", + "variants": [ + { + "name": "Primitive", + "type": "core::felt252" + }, + { + "name": "Struct", + "type": "dojo::database::introspect::Struct" + }, + { + "name": "Enum", + "type": "dojo::database::introspect::Enum" + }, + { + "name": "Tuple", + "type": "core::array::Span::>" + }, + { + "name": "Array", + "type": "core::integer::u32" + } + ] + }, + { + "type": "interface", + "name": "dojo::model::IDojoModel", + "items": [ + { + "type": "function", + "name": "name", + "inputs": [], + "outputs": [ + { + "type": "core::felt252" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "unpacked_size", + "inputs": [], + "outputs": [ + { + "type": "core::integer::u32" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "packed_size", + "inputs": [], + "outputs": [ + { + "type": "core::integer::u32" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "layout", + "inputs": [], + "outputs": [ + { + "type": "core::array::Span::" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "schema", + "inputs": [], + "outputs": [ + { + "type": "dojo::database::introspect::Ty" + } + ], + "state_mutability": "view" + } + ] + }, + { + "type": "impl", + "name": "recordImpl", + "interface_name": "types_test::models::Irecord" + }, + { + "type": "enum", + "name": "types_test::models::Depth", + "variants": [ + { + "name": "Zero", + "type": "()" + }, + { + "name": "One", + "type": "()" + }, + { + "name": "Two", + "type": "()" + }, + { + "name": "Three", + "type": "()" + } + ] + }, + { + "type": "struct", + "name": "core::integer::u256", + "members": [ + { + "name": "low", + "type": "core::integer::u128" + }, + { + "name": "high", + "type": "core::integer::u128" + } + ] + }, + { + "type": "enum", + "name": "core::bool", + "variants": [ + { + "name": "False", + "type": "()" + }, + { + "name": "True", + "type": "()" + } + ] + }, + { + "type": "struct", + "name": "types_test::models::NestedMost", + "members": [ + { + "name": "depth", + "type": "types_test::models::Depth" + }, + { + "name": "type_number", + "type": "core::integer::u8" + }, + { + "name": "type_string", + "type": "core::felt252" + } + ] + }, + { + "type": "struct", + "name": "types_test::models::NestedMore", + "members": [ + { + "name": "depth", + "type": "types_test::models::Depth" + }, + { + "name": "type_number", + "type": "core::integer::u8" + }, + { + "name": "type_string", + "type": "core::felt252" + }, + { + "name": "type_nested_most", + "type": "types_test::models::NestedMost" + } + ] + }, + { + "type": "struct", + "name": "types_test::models::Nested", + "members": [ + { + "name": "depth", + "type": "types_test::models::Depth" + }, + { + "name": "type_number", + "type": "core::integer::u8" + }, + { + "name": "type_string", + "type": "core::felt252" + }, + { + "name": "type_nested_more", + "type": "types_test::models::NestedMore" + } + ] + }, + { + "type": "struct", + "name": "types_test::models::Record", + "members": [ + { + "name": "record_id", + "type": "core::integer::u32" + }, + { + "name": "depth", + "type": "types_test::models::Depth" + }, + { + "name": "type_u8", + "type": "core::integer::u8" + }, + { + "name": "type_u16", + "type": "core::integer::u16" + }, + { + "name": "type_u32", + "type": "core::integer::u32" + }, + { + "name": "type_u64", + "type": "core::integer::u64" + }, + { + "name": "type_u128", + "type": "core::integer::u128" + }, + { + "name": "type_u256", + "type": "core::integer::u256" + }, + { + "name": "type_bool", + "type": "core::bool" + }, + { + "name": "type_felt", + "type": "core::felt252" + }, + { + "name": "type_class_hash", + "type": "core::starknet::class_hash::ClassHash" + }, + { + "name": "type_contract_address", + "type": "core::starknet::contract_address::ContractAddress" + }, + { + "name": "type_deeply_nested", + "type": "types_test::models::Nested" + }, + { + "name": "type_nested_one", + "type": "types_test::models::NestedMost" + }, + { + "name": "type_nested_two", + "type": "types_test::models::NestedMost" + }, + { + "name": "random_u8", + "type": "core::integer::u8" + }, + { + "name": "random_u128", + "type": "core::integer::u128" + }, + { + "name": "composite_u256", + "type": "core::integer::u256" + } + ] + }, + { + "type": "interface", + "name": "types_test::models::Irecord", + "items": [ + { + "type": "function", + "name": "ensure_abi", + "inputs": [ + { + "name": "model", + "type": "types_test::models::Record" + } + ], + "outputs": [], + "state_mutability": "view" + } + ] + }, + { + "type": "event", + "name": "types_test::models::record::Event", + "kind": "enum", + "variants": [] + } +] \ No newline at end of file diff --git a/crates/torii/types-test/manifests/dev/abis/base/models/types_test_models_record_sibling.json b/crates/torii/types-test/manifests/dev/abis/base/models/types_test_models_record_sibling.json new file mode 100644 index 0000000000..f72c295cf8 --- /dev/null +++ b/crates/torii/types-test/manifests/dev/abis/base/models/types_test_models_record_sibling.json @@ -0,0 +1,213 @@ +[ + { + "type": "impl", + "name": "DojoModelImpl", + "interface_name": "dojo::model::IDojoModel" + }, + { + "type": "struct", + "name": "core::array::Span::", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::>", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::>" + } + ] + }, + { + "type": "struct", + "name": "dojo::database::introspect::Struct", + "members": [ + { + "name": "name", + "type": "core::felt252" + }, + { + "name": "attrs", + "type": "core::array::Span::" + }, + { + "name": "children", + "type": "core::array::Span::>" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::<(core::felt252, core::array::Span::)>", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::<(core::felt252, core::array::Span::)>" + } + ] + }, + { + "type": "struct", + "name": "dojo::database::introspect::Enum", + "members": [ + { + "name": "name", + "type": "core::felt252" + }, + { + "name": "attrs", + "type": "core::array::Span::" + }, + { + "name": "children", + "type": "core::array::Span::<(core::felt252, core::array::Span::)>" + } + ] + }, + { + "type": "enum", + "name": "dojo::database::introspect::Ty", + "variants": [ + { + "name": "Primitive", + "type": "core::felt252" + }, + { + "name": "Struct", + "type": "dojo::database::introspect::Struct" + }, + { + "name": "Enum", + "type": "dojo::database::introspect::Enum" + }, + { + "name": "Tuple", + "type": "core::array::Span::>" + }, + { + "name": "Array", + "type": "core::integer::u32" + } + ] + }, + { + "type": "interface", + "name": "dojo::model::IDojoModel", + "items": [ + { + "type": "function", + "name": "name", + "inputs": [], + "outputs": [ + { + "type": "core::felt252" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "unpacked_size", + "inputs": [], + "outputs": [ + { + "type": "core::integer::u32" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "packed_size", + "inputs": [], + "outputs": [ + { + "type": "core::integer::u32" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "layout", + "inputs": [], + "outputs": [ + { + "type": "core::array::Span::" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "schema", + "inputs": [], + "outputs": [ + { + "type": "dojo::database::introspect::Ty" + } + ], + "state_mutability": "view" + } + ] + }, + { + "type": "impl", + "name": "record_siblingImpl", + "interface_name": "types_test::models::Irecord_sibling" + }, + { + "type": "struct", + "name": "types_test::models::RecordSibling", + "members": [ + { + "name": "record_id", + "type": "core::integer::u32" + }, + { + "name": "random_u8", + "type": "core::integer::u8" + } + ] + }, + { + "type": "interface", + "name": "types_test::models::Irecord_sibling", + "items": [ + { + "type": "function", + "name": "ensure_abi", + "inputs": [ + { + "name": "model", + "type": "types_test::models::RecordSibling" + } + ], + "outputs": [], + "state_mutability": "view" + } + ] + }, + { + "type": "event", + "name": "types_test::models::record_sibling::Event", + "kind": "enum", + "variants": [] + } +] \ No newline at end of file diff --git a/crates/torii/types-test/manifests/dev/abis/base/models/types_test_models_subrecord.json b/crates/torii/types-test/manifests/dev/abis/base/models/types_test_models_subrecord.json new file mode 100644 index 0000000000..97938e7cc3 --- /dev/null +++ b/crates/torii/types-test/manifests/dev/abis/base/models/types_test_models_subrecord.json @@ -0,0 +1,221 @@ +[ + { + "type": "impl", + "name": "DojoModelImpl", + "interface_name": "dojo::model::IDojoModel" + }, + { + "type": "struct", + "name": "core::array::Span::", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::>", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::>" + } + ] + }, + { + "type": "struct", + "name": "dojo::database::introspect::Struct", + "members": [ + { + "name": "name", + "type": "core::felt252" + }, + { + "name": "attrs", + "type": "core::array::Span::" + }, + { + "name": "children", + "type": "core::array::Span::>" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::<(core::felt252, core::array::Span::)>", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::<(core::felt252, core::array::Span::)>" + } + ] + }, + { + "type": "struct", + "name": "dojo::database::introspect::Enum", + "members": [ + { + "name": "name", + "type": "core::felt252" + }, + { + "name": "attrs", + "type": "core::array::Span::" + }, + { + "name": "children", + "type": "core::array::Span::<(core::felt252, core::array::Span::)>" + } + ] + }, + { + "type": "enum", + "name": "dojo::database::introspect::Ty", + "variants": [ + { + "name": "Primitive", + "type": "core::felt252" + }, + { + "name": "Struct", + "type": "dojo::database::introspect::Struct" + }, + { + "name": "Enum", + "type": "dojo::database::introspect::Enum" + }, + { + "name": "Tuple", + "type": "core::array::Span::>" + }, + { + "name": "Array", + "type": "core::integer::u32" + } + ] + }, + { + "type": "interface", + "name": "dojo::model::IDojoModel", + "items": [ + { + "type": "function", + "name": "name", + "inputs": [], + "outputs": [ + { + "type": "core::felt252" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "unpacked_size", + "inputs": [], + "outputs": [ + { + "type": "core::integer::u32" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "packed_size", + "inputs": [], + "outputs": [ + { + "type": "core::integer::u32" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "layout", + "inputs": [], + "outputs": [ + { + "type": "core::array::Span::" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "schema", + "inputs": [], + "outputs": [ + { + "type": "dojo::database::introspect::Ty" + } + ], + "state_mutability": "view" + } + ] + }, + { + "type": "impl", + "name": "subrecordImpl", + "interface_name": "types_test::models::Isubrecord" + }, + { + "type": "struct", + "name": "types_test::models::Subrecord", + "members": [ + { + "name": "record_id", + "type": "core::integer::u32" + }, + { + "name": "subrecord_id", + "type": "core::integer::u32" + }, + { + "name": "type_u8", + "type": "core::integer::u8" + }, + { + "name": "random_u8", + "type": "core::integer::u8" + } + ] + }, + { + "type": "interface", + "name": "types_test::models::Isubrecord", + "items": [ + { + "type": "function", + "name": "ensure_abi", + "inputs": [ + { + "name": "model", + "type": "types_test::models::Subrecord" + } + ], + "outputs": [], + "state_mutability": "view" + } + ] + }, + { + "type": "event", + "name": "types_test::models::subrecord::Event", + "kind": "enum", + "variants": [] + } +] \ No newline at end of file diff --git a/crates/torii/types-test/manifests/dev/base/contracts/types_test_contracts_records.toml b/crates/torii/types-test/manifests/dev/base/contracts/types_test_contracts_records.toml new file mode 100644 index 0000000000..27cbb6f796 --- /dev/null +++ b/crates/torii/types-test/manifests/dev/base/contracts/types_test_contracts_records.toml @@ -0,0 +1,9 @@ +kind = "DojoContract" +class_hash = "0x658309df749cea1c32e21920740011e829626ab06c9b4d0c05b75f82a20693b" +original_class_hash = "0x658309df749cea1c32e21920740011e829626ab06c9b4d0c05b75f82a20693b" +base_class_hash = "0x0" +abi = "manifests/dev/abis/base/contracts/types_test_contracts_records.json" +reads = [] +writes = [] +computed = [] +name = "types_test::contracts::records" diff --git a/crates/torii/types-test/manifests/base/base.toml b/crates/torii/types-test/manifests/dev/base/dojo_base_base.toml similarity index 57% rename from crates/torii/types-test/manifests/base/base.toml rename to crates/torii/types-test/manifests/dev/base/dojo_base_base.toml index 05caa48e93..d926bca4d7 100644 --- a/crates/torii/types-test/manifests/base/base.toml +++ b/crates/torii/types-test/manifests/dev/base/dojo_base_base.toml @@ -1,3 +1,4 @@ kind = "Class" class_hash = "0x679177a2cb757694ac4f326d01052ff0963eac0bc2a17116a2b87badcdf6f76" +original_class_hash = "0x679177a2cb757694ac4f326d01052ff0963eac0bc2a17116a2b87badcdf6f76" name = "dojo::base::base" diff --git a/crates/torii/types-test/manifests/dev/base/dojo_world_world.toml b/crates/torii/types-test/manifests/dev/base/dojo_world_world.toml new file mode 100644 index 0000000000..a08f22b905 --- /dev/null +++ b/crates/torii/types-test/manifests/dev/base/dojo_world_world.toml @@ -0,0 +1,5 @@ +kind = "Class" +class_hash = "0x799bc4e9da10bfb3dd88e6f223c9cfbf7745435cd14f5d69675ea448e578cd" +original_class_hash = "0x799bc4e9da10bfb3dd88e6f223c9cfbf7745435cd14f5d69675ea448e578cd" +abi = "manifests/dev/abis/base/dojo_world_world.json" +name = "dojo::world::world" diff --git a/crates/torii/types-test/manifests/base/models/record.toml b/crates/torii/types-test/manifests/dev/base/models/types_test_models_record.toml similarity index 88% rename from crates/torii/types-test/manifests/base/models/record.toml rename to crates/torii/types-test/manifests/dev/base/models/types_test_models_record.toml index 6c886eeaec..1474e6e48a 100644 --- a/crates/torii/types-test/manifests/base/models/record.toml +++ b/crates/torii/types-test/manifests/dev/base/models/types_test_models_record.toml @@ -1,5 +1,7 @@ kind = "DojoModel" class_hash = "0x134456282bbaf00e0895ff43f286af8d490202baf6279d2b05be9bc0c05f059" +original_class_hash = "0x134456282bbaf00e0895ff43f286af8d490202baf6279d2b05be9bc0c05f059" +abi = "manifests/dev/abis/base/models/types_test_models_record.json" name = "types_test::models::record" [[members]] diff --git a/crates/torii/types-test/manifests/base/models/record_sibling.toml b/crates/torii/types-test/manifests/dev/base/models/types_test_models_record_sibling.toml similarity index 60% rename from crates/torii/types-test/manifests/base/models/record_sibling.toml rename to crates/torii/types-test/manifests/dev/base/models/types_test_models_record_sibling.toml index 9505aa8518..cf795b24b0 100644 --- a/crates/torii/types-test/manifests/base/models/record_sibling.toml +++ b/crates/torii/types-test/manifests/dev/base/models/types_test_models_record_sibling.toml @@ -1,5 +1,7 @@ kind = "DojoModel" class_hash = "0x4e92336e21ac7970b9bd9f4e294705f7864c0b29f53fdbf42ff7a9d7f0a53f3" +original_class_hash = "0x4e92336e21ac7970b9bd9f4e294705f7864c0b29f53fdbf42ff7a9d7f0a53f3" +abi = "manifests/dev/abis/base/models/types_test_models_record_sibling.json" name = "types_test::models::record_sibling" [[members]] diff --git a/crates/torii/types-test/manifests/base/models/subrecord.toml b/crates/torii/types-test/manifests/dev/base/models/types_test_models_subrecord.toml similarity index 69% rename from crates/torii/types-test/manifests/base/models/subrecord.toml rename to crates/torii/types-test/manifests/dev/base/models/types_test_models_subrecord.toml index f529bae93e..36158d7d9f 100644 --- a/crates/torii/types-test/manifests/base/models/subrecord.toml +++ b/crates/torii/types-test/manifests/dev/base/models/types_test_models_subrecord.toml @@ -1,5 +1,7 @@ kind = "DojoModel" class_hash = "0x7a47c3a9c8509a1d4a0379e50799eba7b173db6e41961341fe3f856a51d627" +original_class_hash = "0x7a47c3a9c8509a1d4a0379e50799eba7b173db6e41961341fe3f856a51d627" +abi = "manifests/dev/abis/base/models/types_test_models_subrecord.json" name = "types_test::models::subrecord" [[members]] diff --git a/dojoup/dojoup b/dojoup/dojoup index 9347c5aa52..6ded3265e8 100755 --- a/dojoup/dojoup +++ b/dojoup/dojoup @@ -73,6 +73,9 @@ main() { DOJOUP_REPO=${DOJOUP_REPO-dojoengine/dojo} + # Store user specified version seperately. + DOJOUP_USER_VERSION=${DOJOUP_VERSION} + # Install by downloading binaries if [[ "$DOJOUP_REPO" == "dojoengine/dojo" && -z "$DOJOUP_BRANCH" && -z "$DOJOUP_COMMIT" ]]; then DOJOUP_VERSION=${DOJOUP_VERSION-stable} @@ -133,10 +136,21 @@ main() { RELEASE_URL="https://github.com/${DOJOUP_REPO}/releases/download/${DOJOUP_TAG}/" BIN_ARCHIVE_URL="${RELEASE_URL}dojo_${DOJOUP_VERSION}_${PLATFORM}_${ARCHITECTURE}.$EXT" + # Check if the version mentioned by user exists in the Dojo repository. + if ! curl --output /dev/null --silent --head --fail "$BIN_ARCHIVE_URL"; then + say "Version ${DOJOUP_VERSION} does not match any release listed at https://github.com/dojoengine/dojo/releases." + say "Please specify a valid version, or omit -v to install the latest stable version automatically." + err "Aborting installation." + fi + echo $BIN_ARCHIVE_URL + # Display message only if version is not mentioned by user. + if [ ! -n "$DOJOUP_USER_VERSION" ]; then + say "downloading latest dojo" + fi + # Download and extract the binaries archive - say "downloading latest dojo" if [ "$PLATFORM" = "win32" ]; then tmp="$(mktemp -d 2>/dev/null || echo ".")/dojo.zip" ensure download "$BIN_ARCHIVE_URL" "$tmp" @@ -195,9 +209,11 @@ EOF ensure git checkout "$DOJOUP_COMMIT" fi - # Build the repo and install the binaries locally to the .dojo bin directory. - # --root appends /bin to the directory it is given, so we pass DOJO_DIR. - ensure cargo install --path ./dojo --bin dojo --locked --force --root "$DOJO_DIR" + for bin in "${BINS[@]}"; do + # Build the repo and install the binaries locally to the .dojo bin directory. + # --root appends /bin to the directory it is given, so we pass DOJO_DIR. + ensure cargo install --path ./bin/$bin $bin --locked --force --root "$DOJO_DIR" + done say "done" welcome_msg diff --git a/examples/spawn-and-move/Scarb.lock b/examples/spawn-and-move/Scarb.lock index af4e4b139f..9525e9db0e 100644 --- a/examples/spawn-and-move/Scarb.lock +++ b/examples/spawn-and-move/Scarb.lock @@ -3,14 +3,14 @@ version = 1 [[package]] name = "dojo" -version = "0.5.1" +version = "0.6.0" dependencies = [ "dojo_plugin", ] [[package]] name = "dojo_examples" -version = "0.5.1" +version = "0.6.0" dependencies = [ "dojo", ] diff --git a/examples/spawn-and-move/Scarb.toml b/examples/spawn-and-move/Scarb.toml index ac1a76a442..602db7b248 100644 --- a/examples/spawn-and-move/Scarb.toml +++ b/examples/spawn-and-move/Scarb.toml @@ -1,7 +1,7 @@ [package] cairo-version = "2.4.0" name = "dojo_examples" -version = "0.5.1" +version = "0.6.0" # Use the prelude with the less imports as possible # from corelib. edition = "2023_10" diff --git a/examples/spawn-and-move/manifests/base/contracts/actions.toml b/examples/spawn-and-move/manifests/base/contracts/actions.toml deleted file mode 100644 index c5bed7ab76..0000000000 --- a/examples/spawn-and-move/manifests/base/contracts/actions.toml +++ /dev/null @@ -1,7 +0,0 @@ -kind = "DojoContract" -class_hash = "0x2a3b1c5473dfb9fd1be08b94fae201b30b4e63ed8caed996476cc4ad44cadb2" -abi = "abis/base/contracts/actions.json" -reads = [] -writes = [] -computed = [] -name = "dojo_examples::actions::actions" diff --git a/examples/spawn-and-move/manifests/base/world.toml b/examples/spawn-and-move/manifests/base/world.toml deleted file mode 100644 index 1553652ecc..0000000000 --- a/examples/spawn-and-move/manifests/base/world.toml +++ /dev/null @@ -1,3 +0,0 @@ -kind = "Class" -class_hash = "0x799bc4e9da10bfb3dd88e6f223c9cfbf7745435cd14f5d69675ea448e578cd" -name = "dojo::world::world" diff --git a/examples/spawn-and-move/manifests/deployments/KATANA.json b/examples/spawn-and-move/manifests/deployments/KATANA.json deleted file mode 100644 index 844426adde..0000000000 --- a/examples/spawn-and-move/manifests/deployments/KATANA.json +++ /dev/null @@ -1,357 +0,0 @@ -{ - "world": { - "kind": "Contract", - "class_hash": "0x799bc4e9da10bfb3dd88e6f223c9cfbf7745435cd14f5d69675ea448e578cd", - "abi": null, - "address": "0x1385f25d20a724edc9c7b3bd9636c59af64cbaf9fcd12f33b3af96b2452f295", - "transaction_hash": "0x6afefdcc49b3563a4f3657900ba71e9f9356861b15b942a73f2018f046a1048", - "block_number": 3, - "seed": "dojo_examples", - "name": "dojo::world::world" - }, - "base": { - "kind": "Class", - "class_hash": "0x679177a2cb757694ac4f326d01052ff0963eac0bc2a17116a2b87badcdf6f76", - "abi": null, - "name": "dojo::base::base" - }, - "contracts": [ - { - "kind": "DojoContract", - "address": "0x3539c9b89b08095ba914653fb0f20e55d4b172a415beade611bc260b346d0f7", - "class_hash": "0x2a3b1c5473dfb9fd1be08b94fae201b30b4e63ed8caed996476cc4ad44cadb2", - "abi": [ - { - "type": "impl", - "name": "DojoResourceProviderImpl", - "interface_name": "dojo::world::IDojoResourceProvider" - }, - { - "type": "interface", - "name": "dojo::world::IDojoResourceProvider", - "items": [ - { - "type": "function", - "name": "dojo_resource", - "inputs": [], - "outputs": [ - { - "type": "core::felt252" - } - ], - "state_mutability": "view" - } - ] - }, - { - "type": "impl", - "name": "WorldProviderImpl", - "interface_name": "dojo::world::IWorldProvider" - }, - { - "type": "struct", - "name": "dojo::world::IWorldDispatcher", - "members": [ - { - "name": "contract_address", - "type": "core::starknet::contract_address::ContractAddress" - } - ] - }, - { - "type": "interface", - "name": "dojo::world::IWorldProvider", - "items": [ - { - "type": "function", - "name": "world", - "inputs": [], - "outputs": [ - { - "type": "dojo::world::IWorldDispatcher" - } - ], - "state_mutability": "view" - } - ] - }, - { - "type": "impl", - "name": "ActionsComputedImpl", - "interface_name": "dojo_examples::actions::IActionsComputed" - }, - { - "type": "struct", - "name": "dojo_examples::models::Vec2", - "members": [ - { - "name": "x", - "type": "core::integer::u32" - }, - { - "name": "y", - "type": "core::integer::u32" - } - ] - }, - { - "type": "struct", - "name": "dojo_examples::models::Position", - "members": [ - { - "name": "player", - "type": "core::starknet::contract_address::ContractAddress" - }, - { - "name": "vec", - "type": "dojo_examples::models::Vec2" - } - ] - }, - { - "type": "interface", - "name": "dojo_examples::actions::IActionsComputed", - "items": [ - { - "type": "function", - "name": "tile_terrain", - "inputs": [ - { - "name": "vec", - "type": "dojo_examples::models::Vec2" - } - ], - "outputs": [ - { - "type": "core::felt252" - } - ], - "state_mutability": "view" - }, - { - "type": "function", - "name": "quadrant", - "inputs": [ - { - "name": "pos", - "type": "dojo_examples::models::Position" - } - ], - "outputs": [ - { - "type": "core::integer::u8" - } - ], - "state_mutability": "view" - } - ] - }, - { - "type": "impl", - "name": "ActionsImpl", - "interface_name": "dojo_examples::actions::IActions" - }, - { - "type": "enum", - "name": "dojo_examples::models::Direction", - "variants": [ - { - "name": "None", - "type": "()" - }, - { - "name": "Left", - "type": "()" - }, - { - "name": "Right", - "type": "()" - }, - { - "name": "Up", - "type": "()" - }, - { - "name": "Down", - "type": "()" - } - ] - }, - { - "type": "interface", - "name": "dojo_examples::actions::IActions", - "items": [ - { - "type": "function", - "name": "spawn", - "inputs": [], - "outputs": [], - "state_mutability": "view" - }, - { - "type": "function", - "name": "move", - "inputs": [ - { - "name": "direction", - "type": "dojo_examples::models::Direction" - } - ], - "outputs": [], - "state_mutability": "view" - } - ] - }, - { - "type": "impl", - "name": "UpgradableImpl", - "interface_name": "dojo::components::upgradeable::IUpgradeable" - }, - { - "type": "interface", - "name": "dojo::components::upgradeable::IUpgradeable", - "items": [ - { - "type": "function", - "name": "upgrade", - "inputs": [ - { - "name": "new_class_hash", - "type": "core::starknet::class_hash::ClassHash" - } - ], - "outputs": [], - "state_mutability": "external" - } - ] - }, - { - "type": "event", - "name": "dojo::components::upgradeable::upgradeable::Upgraded", - "kind": "struct", - "members": [ - { - "name": "class_hash", - "type": "core::starknet::class_hash::ClassHash", - "kind": "data" - } - ] - }, - { - "type": "event", - "name": "dojo::components::upgradeable::upgradeable::Event", - "kind": "enum", - "variants": [ - { - "name": "Upgraded", - "type": "dojo::components::upgradeable::upgradeable::Upgraded", - "kind": "nested" - } - ] - }, - { - "type": "event", - "name": "dojo_examples::actions::actions::Moved", - "kind": "struct", - "members": [ - { - "name": "player", - "type": "core::starknet::contract_address::ContractAddress", - "kind": "key" - }, - { - "name": "direction", - "type": "dojo_examples::models::Direction", - "kind": "data" - } - ] - }, - { - "type": "event", - "name": "dojo_examples::actions::actions::Event", - "kind": "enum", - "variants": [ - { - "name": "UpgradeableEvent", - "type": "dojo::components::upgradeable::upgradeable::Event", - "kind": "nested" - }, - { - "name": "Moved", - "type": "dojo_examples::actions::actions::Moved", - "kind": "nested" - } - ] - } - ], - "reads": [ - "Moves", - "Position" - ], - "writes": [], - "computed": [], - "name": "dojo_examples::actions::actions" - } - ], - "models": [ - { - "kind": "DojoModel", - "members": [ - { - "name": "player", - "type": "ContractAddress", - "key": true - }, - { - "name": "direction", - "type": "Direction", - "key": false - } - ], - "class_hash": "0x52659850f9939482810d9f6b468b91dc99e0b7fa42c2016cf12833ec06ce911", - "abi": null, - "name": "dojo_examples::actions::actions::moved" - }, - { - "kind": "DojoModel", - "members": [ - { - "name": "player", - "type": "ContractAddress", - "key": true - }, - { - "name": "remaining", - "type": "u8", - "key": false - }, - { - "name": "last_direction", - "type": "Direction", - "key": false - } - ], - "class_hash": "0x511fbd833938f5c4b743eea1e67605a125d7ff60e8a09e8dc227ad2fb59ca54", - "abi": null, - "name": "dojo_examples::models::moves" - }, - { - "kind": "DojoModel", - "members": [ - { - "name": "player", - "type": "ContractAddress", - "key": true - }, - { - "name": "vec", - "type": "Vec2", - "key": false - } - ], - "class_hash": "0xb33ae053213ccb2a57967ffc4411901f3efab24781ca867adcd0b90f2fece5", - "abi": null, - "name": "dojo_examples::models::position" - } - ] -} \ No newline at end of file diff --git a/examples/spawn-and-move/manifests/deployments/KATANA.toml b/examples/spawn-and-move/manifests/deployments/KATANA.toml deleted file mode 100644 index 72bc65458c..0000000000 --- a/examples/spawn-and-move/manifests/deployments/KATANA.toml +++ /dev/null @@ -1,76 +0,0 @@ -[world] -kind = "Contract" -class_hash = "0x799bc4e9da10bfb3dd88e6f223c9cfbf7745435cd14f5d69675ea448e578cd" -address = "0x1385f25d20a724edc9c7b3bd9636c59af64cbaf9fcd12f33b3af96b2452f295" -transaction_hash = "0x6afefdcc49b3563a4f3657900ba71e9f9356861b15b942a73f2018f046a1048" -block_number = 3 -seed = "dojo_examples" -name = "dojo::world::world" - -[base] -kind = "Class" -class_hash = "0x679177a2cb757694ac4f326d01052ff0963eac0bc2a17116a2b87badcdf6f76" -name = "dojo::base::base" - -[[contracts]] -kind = "DojoContract" -address = "0x3539c9b89b08095ba914653fb0f20e55d4b172a415beade611bc260b346d0f7" -class_hash = "0x2a3b1c5473dfb9fd1be08b94fae201b30b4e63ed8caed996476cc4ad44cadb2" -abi = "abis/deployments/KATANA/contracts/actions.json" -reads = [ - "Moves", - "Position", -] -writes = [] -computed = [] -name = "dojo_examples::actions::actions" - -[[models]] -kind = "DojoModel" -class_hash = "0x52659850f9939482810d9f6b468b91dc99e0b7fa42c2016cf12833ec06ce911" -name = "dojo_examples::actions::actions::moved" - -[[models.members]] -name = "player" -type = "ContractAddress" -key = true - -[[models.members]] -name = "direction" -type = "Direction" -key = false - -[[models]] -kind = "DojoModel" -class_hash = "0x511fbd833938f5c4b743eea1e67605a125d7ff60e8a09e8dc227ad2fb59ca54" -name = "dojo_examples::models::moves" - -[[models.members]] -name = "player" -type = "ContractAddress" -key = true - -[[models.members]] -name = "remaining" -type = "u8" -key = false - -[[models.members]] -name = "last_direction" -type = "Direction" -key = false - -[[models]] -kind = "DojoModel" -class_hash = "0xb33ae053213ccb2a57967ffc4411901f3efab24781ca867adcd0b90f2fece5" -name = "dojo_examples::models::position" - -[[models.members]] -name = "player" -type = "ContractAddress" -key = true - -[[models.members]] -name = "vec" -type = "Vec2" -key = false diff --git a/examples/spawn-and-move/abis/deployments/KATANA/contracts/actions.json b/examples/spawn-and-move/manifests/dev/abis/base/contracts/dojo_examples_actions_actions.json similarity index 90% rename from examples/spawn-and-move/abis/deployments/KATANA/contracts/actions.json rename to examples/spawn-and-move/manifests/dev/abis/base/contracts/dojo_examples_actions_actions.json index f7c8509d5b..279700126b 100644 --- a/examples/spawn-and-move/abis/deployments/KATANA/contracts/actions.json +++ b/examples/spawn-and-move/manifests/dev/abis/base/contracts/dojo_examples_actions_actions.json @@ -227,23 +227,6 @@ } ] }, - { - "type": "event", - "name": "dojo_examples::actions::actions::Moved", - "kind": "struct", - "members": [ - { - "name": "player", - "type": "core::starknet::contract_address::ContractAddress", - "kind": "key" - }, - { - "name": "direction", - "type": "dojo_examples::models::Direction", - "kind": "data" - } - ] - }, { "type": "event", "name": "dojo_examples::actions::actions::Event", @@ -253,11 +236,6 @@ "name": "UpgradeableEvent", "type": "dojo::components::upgradeable::upgradeable::Event", "kind": "nested" - }, - { - "name": "Moved", - "type": "dojo_examples::actions::actions::Moved", - "kind": "nested" } ] } diff --git a/examples/spawn-and-move/manifests/dev/abis/base/dojo_world_world.json b/examples/spawn-and-move/manifests/dev/abis/base/dojo_world_world.json new file mode 100644 index 0000000000..abb75db4f5 --- /dev/null +++ b/examples/spawn-and-move/manifests/dev/abis/base/dojo_world_world.json @@ -0,0 +1,657 @@ +[ + { + "type": "impl", + "name": "World", + "interface_name": "dojo::world::IWorld" + }, + { + "type": "struct", + "name": "core::array::Span::", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::" + } + ] + }, + { + "type": "struct", + "name": "dojo::resource_metadata::ResourceMetadata", + "members": [ + { + "name": "resource_id", + "type": "core::felt252" + }, + { + "name": "metadata_uri", + "type": "core::array::Span::" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::" + } + ] + }, + { + "type": "enum", + "name": "core::bool", + "variants": [ + { + "name": "False", + "type": "()" + }, + { + "name": "True", + "type": "()" + } + ] + }, + { + "type": "interface", + "name": "dojo::world::IWorld", + "items": [ + { + "type": "function", + "name": "metadata", + "inputs": [ + { + "name": "resource_id", + "type": "core::felt252" + } + ], + "outputs": [ + { + "type": "dojo::resource_metadata::ResourceMetadata" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "set_metadata", + "inputs": [ + { + "name": "metadata", + "type": "dojo::resource_metadata::ResourceMetadata" + } + ], + "outputs": [], + "state_mutability": "external" + }, + { + "type": "function", + "name": "model", + "inputs": [ + { + "name": "name", + "type": "core::felt252" + } + ], + "outputs": [ + { + "type": "(core::starknet::class_hash::ClassHash, core::starknet::contract_address::ContractAddress)" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "register_model", + "inputs": [ + { + "name": "class_hash", + "type": "core::starknet::class_hash::ClassHash" + } + ], + "outputs": [], + "state_mutability": "external" + }, + { + "type": "function", + "name": "deploy_contract", + "inputs": [ + { + "name": "salt", + "type": "core::felt252" + }, + { + "name": "class_hash", + "type": "core::starknet::class_hash::ClassHash" + } + ], + "outputs": [ + { + "type": "core::starknet::contract_address::ContractAddress" + } + ], + "state_mutability": "external" + }, + { + "type": "function", + "name": "upgrade_contract", + "inputs": [ + { + "name": "address", + "type": "core::starknet::contract_address::ContractAddress" + }, + { + "name": "class_hash", + "type": "core::starknet::class_hash::ClassHash" + } + ], + "outputs": [ + { + "type": "core::starknet::class_hash::ClassHash" + } + ], + "state_mutability": "external" + }, + { + "type": "function", + "name": "uuid", + "inputs": [], + "outputs": [ + { + "type": "core::integer::u32" + } + ], + "state_mutability": "external" + }, + { + "type": "function", + "name": "emit", + "inputs": [ + { + "name": "keys", + "type": "core::array::Array::" + }, + { + "name": "values", + "type": "core::array::Span::" + } + ], + "outputs": [], + "state_mutability": "view" + }, + { + "type": "function", + "name": "entity", + "inputs": [ + { + "name": "model", + "type": "core::felt252" + }, + { + "name": "keys", + "type": "core::array::Span::" + }, + { + "name": "layout", + "type": "core::array::Span::" + } + ], + "outputs": [ + { + "type": "core::array::Span::" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "set_entity", + "inputs": [ + { + "name": "model", + "type": "core::felt252" + }, + { + "name": "keys", + "type": "core::array::Span::" + }, + { + "name": "values", + "type": "core::array::Span::" + }, + { + "name": "layout", + "type": "core::array::Span::" + } + ], + "outputs": [], + "state_mutability": "external" + }, + { + "type": "function", + "name": "base", + "inputs": [], + "outputs": [ + { + "type": "core::starknet::class_hash::ClassHash" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "delete_entity", + "inputs": [ + { + "name": "model", + "type": "core::felt252" + }, + { + "name": "keys", + "type": "core::array::Span::" + }, + { + "name": "layout", + "type": "core::array::Span::" + } + ], + "outputs": [], + "state_mutability": "external" + }, + { + "type": "function", + "name": "is_owner", + "inputs": [ + { + "name": "address", + "type": "core::starknet::contract_address::ContractAddress" + }, + { + "name": "resource", + "type": "core::felt252" + } + ], + "outputs": [ + { + "type": "core::bool" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "grant_owner", + "inputs": [ + { + "name": "address", + "type": "core::starknet::contract_address::ContractAddress" + }, + { + "name": "resource", + "type": "core::felt252" + } + ], + "outputs": [], + "state_mutability": "external" + }, + { + "type": "function", + "name": "revoke_owner", + "inputs": [ + { + "name": "address", + "type": "core::starknet::contract_address::ContractAddress" + }, + { + "name": "resource", + "type": "core::felt252" + } + ], + "outputs": [], + "state_mutability": "external" + }, + { + "type": "function", + "name": "is_writer", + "inputs": [ + { + "name": "model", + "type": "core::felt252" + }, + { + "name": "system", + "type": "core::starknet::contract_address::ContractAddress" + } + ], + "outputs": [ + { + "type": "core::bool" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "grant_writer", + "inputs": [ + { + "name": "model", + "type": "core::felt252" + }, + { + "name": "system", + "type": "core::starknet::contract_address::ContractAddress" + } + ], + "outputs": [], + "state_mutability": "external" + }, + { + "type": "function", + "name": "revoke_writer", + "inputs": [ + { + "name": "model", + "type": "core::felt252" + }, + { + "name": "system", + "type": "core::starknet::contract_address::ContractAddress" + } + ], + "outputs": [], + "state_mutability": "external" + } + ] + }, + { + "type": "impl", + "name": "UpgradeableWorld", + "interface_name": "dojo::world::IUpgradeableWorld" + }, + { + "type": "interface", + "name": "dojo::world::IUpgradeableWorld", + "items": [ + { + "type": "function", + "name": "upgrade", + "inputs": [ + { + "name": "new_class_hash", + "type": "core::starknet::class_hash::ClassHash" + } + ], + "outputs": [], + "state_mutability": "external" + } + ] + }, + { + "type": "constructor", + "name": "constructor", + "inputs": [ + { + "name": "contract_base", + "type": "core::starknet::class_hash::ClassHash" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::WorldSpawned", + "kind": "struct", + "members": [ + { + "name": "address", + "type": "core::starknet::contract_address::ContractAddress", + "kind": "data" + }, + { + "name": "creator", + "type": "core::starknet::contract_address::ContractAddress", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::ContractDeployed", + "kind": "struct", + "members": [ + { + "name": "salt", + "type": "core::felt252", + "kind": "data" + }, + { + "name": "class_hash", + "type": "core::starknet::class_hash::ClassHash", + "kind": "data" + }, + { + "name": "address", + "type": "core::starknet::contract_address::ContractAddress", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::ContractUpgraded", + "kind": "struct", + "members": [ + { + "name": "class_hash", + "type": "core::starknet::class_hash::ClassHash", + "kind": "data" + }, + { + "name": "address", + "type": "core::starknet::contract_address::ContractAddress", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::WorldUpgraded", + "kind": "struct", + "members": [ + { + "name": "class_hash", + "type": "core::starknet::class_hash::ClassHash", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::MetadataUpdate", + "kind": "struct", + "members": [ + { + "name": "resource", + "type": "core::felt252", + "kind": "data" + }, + { + "name": "uri", + "type": "core::array::Span::", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::ModelRegistered", + "kind": "struct", + "members": [ + { + "name": "name", + "type": "core::felt252", + "kind": "data" + }, + { + "name": "class_hash", + "type": "core::starknet::class_hash::ClassHash", + "kind": "data" + }, + { + "name": "prev_class_hash", + "type": "core::starknet::class_hash::ClassHash", + "kind": "data" + }, + { + "name": "address", + "type": "core::starknet::contract_address::ContractAddress", + "kind": "data" + }, + { + "name": "prev_address", + "type": "core::starknet::contract_address::ContractAddress", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::StoreSetRecord", + "kind": "struct", + "members": [ + { + "name": "table", + "type": "core::felt252", + "kind": "data" + }, + { + "name": "keys", + "type": "core::array::Span::", + "kind": "data" + }, + { + "name": "values", + "type": "core::array::Span::", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::StoreDelRecord", + "kind": "struct", + "members": [ + { + "name": "table", + "type": "core::felt252", + "kind": "data" + }, + { + "name": "keys", + "type": "core::array::Span::", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::WriterUpdated", + "kind": "struct", + "members": [ + { + "name": "model", + "type": "core::felt252", + "kind": "data" + }, + { + "name": "system", + "type": "core::starknet::contract_address::ContractAddress", + "kind": "data" + }, + { + "name": "value", + "type": "core::bool", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::OwnerUpdated", + "kind": "struct", + "members": [ + { + "name": "address", + "type": "core::starknet::contract_address::ContractAddress", + "kind": "data" + }, + { + "name": "resource", + "type": "core::felt252", + "kind": "data" + }, + { + "name": "value", + "type": "core::bool", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::Event", + "kind": "enum", + "variants": [ + { + "name": "WorldSpawned", + "type": "dojo::world::world::WorldSpawned", + "kind": "nested" + }, + { + "name": "ContractDeployed", + "type": "dojo::world::world::ContractDeployed", + "kind": "nested" + }, + { + "name": "ContractUpgraded", + "type": "dojo::world::world::ContractUpgraded", + "kind": "nested" + }, + { + "name": "WorldUpgraded", + "type": "dojo::world::world::WorldUpgraded", + "kind": "nested" + }, + { + "name": "MetadataUpdate", + "type": "dojo::world::world::MetadataUpdate", + "kind": "nested" + }, + { + "name": "ModelRegistered", + "type": "dojo::world::world::ModelRegistered", + "kind": "nested" + }, + { + "name": "StoreSetRecord", + "type": "dojo::world::world::StoreSetRecord", + "kind": "nested" + }, + { + "name": "StoreDelRecord", + "type": "dojo::world::world::StoreDelRecord", + "kind": "nested" + }, + { + "name": "WriterUpdated", + "type": "dojo::world::world::WriterUpdated", + "kind": "nested" + }, + { + "name": "OwnerUpdated", + "type": "dojo::world::world::OwnerUpdated", + "kind": "nested" + } + ] + } +] \ No newline at end of file diff --git a/examples/spawn-and-move/manifests/dev/abis/base/models/dojo_examples_actions_actions_moved.json b/examples/spawn-and-move/manifests/dev/abis/base/models/dojo_examples_actions_actions_moved.json new file mode 100644 index 0000000000..55206aa098 --- /dev/null +++ b/examples/spawn-and-move/manifests/dev/abis/base/models/dojo_examples_actions_actions_moved.json @@ -0,0 +1,239 @@ +[ + { + "type": "impl", + "name": "DojoModelImpl", + "interface_name": "dojo::model::IDojoModel" + }, + { + "type": "struct", + "name": "core::array::Span::", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::>", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::>" + } + ] + }, + { + "type": "struct", + "name": "dojo::database::introspect::Struct", + "members": [ + { + "name": "name", + "type": "core::felt252" + }, + { + "name": "attrs", + "type": "core::array::Span::" + }, + { + "name": "children", + "type": "core::array::Span::>" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::<(core::felt252, core::array::Span::)>", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::<(core::felt252, core::array::Span::)>" + } + ] + }, + { + "type": "struct", + "name": "dojo::database::introspect::Enum", + "members": [ + { + "name": "name", + "type": "core::felt252" + }, + { + "name": "attrs", + "type": "core::array::Span::" + }, + { + "name": "children", + "type": "core::array::Span::<(core::felt252, core::array::Span::)>" + } + ] + }, + { + "type": "enum", + "name": "dojo::database::introspect::Ty", + "variants": [ + { + "name": "Primitive", + "type": "core::felt252" + }, + { + "name": "Struct", + "type": "dojo::database::introspect::Struct" + }, + { + "name": "Enum", + "type": "dojo::database::introspect::Enum" + }, + { + "name": "Tuple", + "type": "core::array::Span::>" + }, + { + "name": "Array", + "type": "core::integer::u32" + } + ] + }, + { + "type": "interface", + "name": "dojo::model::IDojoModel", + "items": [ + { + "type": "function", + "name": "name", + "inputs": [], + "outputs": [ + { + "type": "core::felt252" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "unpacked_size", + "inputs": [], + "outputs": [ + { + "type": "core::integer::u32" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "packed_size", + "inputs": [], + "outputs": [ + { + "type": "core::integer::u32" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "layout", + "inputs": [], + "outputs": [ + { + "type": "core::array::Span::" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "schema", + "inputs": [], + "outputs": [ + { + "type": "dojo::database::introspect::Ty" + } + ], + "state_mutability": "view" + } + ] + }, + { + "type": "impl", + "name": "movedImpl", + "interface_name": "dojo_examples::actions::actions::Imoved" + }, + { + "type": "enum", + "name": "dojo_examples::models::Direction", + "variants": [ + { + "name": "None", + "type": "()" + }, + { + "name": "Left", + "type": "()" + }, + { + "name": "Right", + "type": "()" + }, + { + "name": "Up", + "type": "()" + }, + { + "name": "Down", + "type": "()" + } + ] + }, + { + "type": "struct", + "name": "dojo_examples::actions::actions::Moved", + "members": [ + { + "name": "player", + "type": "core::starknet::contract_address::ContractAddress" + }, + { + "name": "direction", + "type": "dojo_examples::models::Direction" + } + ] + }, + { + "type": "interface", + "name": "dojo_examples::actions::actions::Imoved", + "items": [ + { + "type": "function", + "name": "ensure_abi", + "inputs": [ + { + "name": "model", + "type": "dojo_examples::actions::actions::Moved" + } + ], + "outputs": [], + "state_mutability": "view" + } + ] + }, + { + "type": "event", + "name": "dojo_examples::actions::actions::moved::Event", + "kind": "enum", + "variants": [] + } +] \ No newline at end of file diff --git a/examples/spawn-and-move/manifests/dev/abis/base/models/dojo_examples_models_emote_message.json b/examples/spawn-and-move/manifests/dev/abis/base/models/dojo_examples_models_emote_message.json new file mode 100644 index 0000000000..ead98d0724 --- /dev/null +++ b/examples/spawn-and-move/manifests/dev/abis/base/models/dojo_examples_models_emote_message.json @@ -0,0 +1,239 @@ +[ + { + "type": "impl", + "name": "DojoModelImpl", + "interface_name": "dojo::model::IDojoModel" + }, + { + "type": "struct", + "name": "core::array::Span::", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::>", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::>" + } + ] + }, + { + "type": "struct", + "name": "dojo::database::introspect::Struct", + "members": [ + { + "name": "name", + "type": "core::felt252" + }, + { + "name": "attrs", + "type": "core::array::Span::" + }, + { + "name": "children", + "type": "core::array::Span::>" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::<(core::felt252, core::array::Span::)>", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::<(core::felt252, core::array::Span::)>" + } + ] + }, + { + "type": "struct", + "name": "dojo::database::introspect::Enum", + "members": [ + { + "name": "name", + "type": "core::felt252" + }, + { + "name": "attrs", + "type": "core::array::Span::" + }, + { + "name": "children", + "type": "core::array::Span::<(core::felt252, core::array::Span::)>" + } + ] + }, + { + "type": "enum", + "name": "dojo::database::introspect::Ty", + "variants": [ + { + "name": "Primitive", + "type": "core::felt252" + }, + { + "name": "Struct", + "type": "dojo::database::introspect::Struct" + }, + { + "name": "Enum", + "type": "dojo::database::introspect::Enum" + }, + { + "name": "Tuple", + "type": "core::array::Span::>" + }, + { + "name": "Array", + "type": "core::integer::u32" + } + ] + }, + { + "type": "interface", + "name": "dojo::model::IDojoModel", + "items": [ + { + "type": "function", + "name": "name", + "inputs": [], + "outputs": [ + { + "type": "core::felt252" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "unpacked_size", + "inputs": [], + "outputs": [ + { + "type": "core::integer::u32" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "packed_size", + "inputs": [], + "outputs": [ + { + "type": "core::integer::u32" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "layout", + "inputs": [], + "outputs": [ + { + "type": "core::array::Span::" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "schema", + "inputs": [], + "outputs": [ + { + "type": "dojo::database::introspect::Ty" + } + ], + "state_mutability": "view" + } + ] + }, + { + "type": "impl", + "name": "emote_messageImpl", + "interface_name": "dojo_examples::models::Iemote_message" + }, + { + "type": "enum", + "name": "dojo_examples::models::Emote", + "variants": [ + { + "name": "None", + "type": "()" + }, + { + "name": "Happy", + "type": "()" + }, + { + "name": "Sad", + "type": "()" + }, + { + "name": "Angry", + "type": "()" + }, + { + "name": "Love", + "type": "()" + } + ] + }, + { + "type": "struct", + "name": "dojo_examples::models::EmoteMessage", + "members": [ + { + "name": "identity", + "type": "core::starknet::contract_address::ContractAddress" + }, + { + "name": "emote", + "type": "dojo_examples::models::Emote" + } + ] + }, + { + "type": "interface", + "name": "dojo_examples::models::Iemote_message", + "items": [ + { + "type": "function", + "name": "ensure_abi", + "inputs": [ + { + "name": "model", + "type": "dojo_examples::models::EmoteMessage" + } + ], + "outputs": [], + "state_mutability": "view" + } + ] + }, + { + "type": "event", + "name": "dojo_examples::models::emote_message::Event", + "kind": "enum", + "variants": [] + } +] \ No newline at end of file diff --git a/examples/spawn-and-move/manifests/dev/abis/base/models/dojo_examples_models_moves.json b/examples/spawn-and-move/manifests/dev/abis/base/models/dojo_examples_models_moves.json new file mode 100644 index 0000000000..169cbc50ed --- /dev/null +++ b/examples/spawn-and-move/manifests/dev/abis/base/models/dojo_examples_models_moves.json @@ -0,0 +1,243 @@ +[ + { + "type": "impl", + "name": "DojoModelImpl", + "interface_name": "dojo::model::IDojoModel" + }, + { + "type": "struct", + "name": "core::array::Span::", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::>", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::>" + } + ] + }, + { + "type": "struct", + "name": "dojo::database::introspect::Struct", + "members": [ + { + "name": "name", + "type": "core::felt252" + }, + { + "name": "attrs", + "type": "core::array::Span::" + }, + { + "name": "children", + "type": "core::array::Span::>" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::<(core::felt252, core::array::Span::)>", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::<(core::felt252, core::array::Span::)>" + } + ] + }, + { + "type": "struct", + "name": "dojo::database::introspect::Enum", + "members": [ + { + "name": "name", + "type": "core::felt252" + }, + { + "name": "attrs", + "type": "core::array::Span::" + }, + { + "name": "children", + "type": "core::array::Span::<(core::felt252, core::array::Span::)>" + } + ] + }, + { + "type": "enum", + "name": "dojo::database::introspect::Ty", + "variants": [ + { + "name": "Primitive", + "type": "core::felt252" + }, + { + "name": "Struct", + "type": "dojo::database::introspect::Struct" + }, + { + "name": "Enum", + "type": "dojo::database::introspect::Enum" + }, + { + "name": "Tuple", + "type": "core::array::Span::>" + }, + { + "name": "Array", + "type": "core::integer::u32" + } + ] + }, + { + "type": "interface", + "name": "dojo::model::IDojoModel", + "items": [ + { + "type": "function", + "name": "name", + "inputs": [], + "outputs": [ + { + "type": "core::felt252" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "unpacked_size", + "inputs": [], + "outputs": [ + { + "type": "core::integer::u32" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "packed_size", + "inputs": [], + "outputs": [ + { + "type": "core::integer::u32" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "layout", + "inputs": [], + "outputs": [ + { + "type": "core::array::Span::" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "schema", + "inputs": [], + "outputs": [ + { + "type": "dojo::database::introspect::Ty" + } + ], + "state_mutability": "view" + } + ] + }, + { + "type": "impl", + "name": "movesImpl", + "interface_name": "dojo_examples::models::Imoves" + }, + { + "type": "enum", + "name": "dojo_examples::models::Direction", + "variants": [ + { + "name": "None", + "type": "()" + }, + { + "name": "Left", + "type": "()" + }, + { + "name": "Right", + "type": "()" + }, + { + "name": "Up", + "type": "()" + }, + { + "name": "Down", + "type": "()" + } + ] + }, + { + "type": "struct", + "name": "dojo_examples::models::Moves", + "members": [ + { + "name": "player", + "type": "core::starknet::contract_address::ContractAddress" + }, + { + "name": "remaining", + "type": "core::integer::u8" + }, + { + "name": "last_direction", + "type": "dojo_examples::models::Direction" + } + ] + }, + { + "type": "interface", + "name": "dojo_examples::models::Imoves", + "items": [ + { + "type": "function", + "name": "ensure_abi", + "inputs": [ + { + "name": "model", + "type": "dojo_examples::models::Moves" + } + ], + "outputs": [], + "state_mutability": "view" + } + ] + }, + { + "type": "event", + "name": "dojo_examples::models::moves::Event", + "kind": "enum", + "variants": [] + } +] \ No newline at end of file diff --git a/examples/spawn-and-move/manifests/dev/abis/base/models/dojo_examples_models_position.json b/examples/spawn-and-move/manifests/dev/abis/base/models/dojo_examples_models_position.json new file mode 100644 index 0000000000..9b6466644a --- /dev/null +++ b/examples/spawn-and-move/manifests/dev/abis/base/models/dojo_examples_models_position.json @@ -0,0 +1,227 @@ +[ + { + "type": "impl", + "name": "DojoModelImpl", + "interface_name": "dojo::model::IDojoModel" + }, + { + "type": "struct", + "name": "core::array::Span::", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::>", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::>" + } + ] + }, + { + "type": "struct", + "name": "dojo::database::introspect::Struct", + "members": [ + { + "name": "name", + "type": "core::felt252" + }, + { + "name": "attrs", + "type": "core::array::Span::" + }, + { + "name": "children", + "type": "core::array::Span::>" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::<(core::felt252, core::array::Span::)>", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::<(core::felt252, core::array::Span::)>" + } + ] + }, + { + "type": "struct", + "name": "dojo::database::introspect::Enum", + "members": [ + { + "name": "name", + "type": "core::felt252" + }, + { + "name": "attrs", + "type": "core::array::Span::" + }, + { + "name": "children", + "type": "core::array::Span::<(core::felt252, core::array::Span::)>" + } + ] + }, + { + "type": "enum", + "name": "dojo::database::introspect::Ty", + "variants": [ + { + "name": "Primitive", + "type": "core::felt252" + }, + { + "name": "Struct", + "type": "dojo::database::introspect::Struct" + }, + { + "name": "Enum", + "type": "dojo::database::introspect::Enum" + }, + { + "name": "Tuple", + "type": "core::array::Span::>" + }, + { + "name": "Array", + "type": "core::integer::u32" + } + ] + }, + { + "type": "interface", + "name": "dojo::model::IDojoModel", + "items": [ + { + "type": "function", + "name": "name", + "inputs": [], + "outputs": [ + { + "type": "core::felt252" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "unpacked_size", + "inputs": [], + "outputs": [ + { + "type": "core::integer::u32" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "packed_size", + "inputs": [], + "outputs": [ + { + "type": "core::integer::u32" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "layout", + "inputs": [], + "outputs": [ + { + "type": "core::array::Span::" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "schema", + "inputs": [], + "outputs": [ + { + "type": "dojo::database::introspect::Ty" + } + ], + "state_mutability": "view" + } + ] + }, + { + "type": "impl", + "name": "positionImpl", + "interface_name": "dojo_examples::models::Iposition" + }, + { + "type": "struct", + "name": "dojo_examples::models::Vec2", + "members": [ + { + "name": "x", + "type": "core::integer::u32" + }, + { + "name": "y", + "type": "core::integer::u32" + } + ] + }, + { + "type": "struct", + "name": "dojo_examples::models::Position", + "members": [ + { + "name": "player", + "type": "core::starknet::contract_address::ContractAddress" + }, + { + "name": "vec", + "type": "dojo_examples::models::Vec2" + } + ] + }, + { + "type": "interface", + "name": "dojo_examples::models::Iposition", + "items": [ + { + "type": "function", + "name": "ensure_abi", + "inputs": [ + { + "name": "model", + "type": "dojo_examples::models::Position" + } + ], + "outputs": [], + "state_mutability": "view" + } + ] + }, + { + "type": "event", + "name": "dojo_examples::models::position::Event", + "kind": "enum", + "variants": [] + } +] \ No newline at end of file diff --git a/examples/spawn-and-move/abis/base/contracts/actions.json b/examples/spawn-and-move/manifests/dev/abis/deployments/contracts/dojo_examples_actions_actions.json similarity index 90% rename from examples/spawn-and-move/abis/base/contracts/actions.json rename to examples/spawn-and-move/manifests/dev/abis/deployments/contracts/dojo_examples_actions_actions.json index f7c8509d5b..279700126b 100644 --- a/examples/spawn-and-move/abis/base/contracts/actions.json +++ b/examples/spawn-and-move/manifests/dev/abis/deployments/contracts/dojo_examples_actions_actions.json @@ -227,23 +227,6 @@ } ] }, - { - "type": "event", - "name": "dojo_examples::actions::actions::Moved", - "kind": "struct", - "members": [ - { - "name": "player", - "type": "core::starknet::contract_address::ContractAddress", - "kind": "key" - }, - { - "name": "direction", - "type": "dojo_examples::models::Direction", - "kind": "data" - } - ] - }, { "type": "event", "name": "dojo_examples::actions::actions::Event", @@ -253,11 +236,6 @@ "name": "UpgradeableEvent", "type": "dojo::components::upgradeable::upgradeable::Event", "kind": "nested" - }, - { - "name": "Moved", - "type": "dojo_examples::actions::actions::Moved", - "kind": "nested" } ] } diff --git a/examples/spawn-and-move/manifests/dev/abis/deployments/dojo_world_world.json b/examples/spawn-and-move/manifests/dev/abis/deployments/dojo_world_world.json new file mode 100644 index 0000000000..abb75db4f5 --- /dev/null +++ b/examples/spawn-and-move/manifests/dev/abis/deployments/dojo_world_world.json @@ -0,0 +1,657 @@ +[ + { + "type": "impl", + "name": "World", + "interface_name": "dojo::world::IWorld" + }, + { + "type": "struct", + "name": "core::array::Span::", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::" + } + ] + }, + { + "type": "struct", + "name": "dojo::resource_metadata::ResourceMetadata", + "members": [ + { + "name": "resource_id", + "type": "core::felt252" + }, + { + "name": "metadata_uri", + "type": "core::array::Span::" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::" + } + ] + }, + { + "type": "enum", + "name": "core::bool", + "variants": [ + { + "name": "False", + "type": "()" + }, + { + "name": "True", + "type": "()" + } + ] + }, + { + "type": "interface", + "name": "dojo::world::IWorld", + "items": [ + { + "type": "function", + "name": "metadata", + "inputs": [ + { + "name": "resource_id", + "type": "core::felt252" + } + ], + "outputs": [ + { + "type": "dojo::resource_metadata::ResourceMetadata" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "set_metadata", + "inputs": [ + { + "name": "metadata", + "type": "dojo::resource_metadata::ResourceMetadata" + } + ], + "outputs": [], + "state_mutability": "external" + }, + { + "type": "function", + "name": "model", + "inputs": [ + { + "name": "name", + "type": "core::felt252" + } + ], + "outputs": [ + { + "type": "(core::starknet::class_hash::ClassHash, core::starknet::contract_address::ContractAddress)" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "register_model", + "inputs": [ + { + "name": "class_hash", + "type": "core::starknet::class_hash::ClassHash" + } + ], + "outputs": [], + "state_mutability": "external" + }, + { + "type": "function", + "name": "deploy_contract", + "inputs": [ + { + "name": "salt", + "type": "core::felt252" + }, + { + "name": "class_hash", + "type": "core::starknet::class_hash::ClassHash" + } + ], + "outputs": [ + { + "type": "core::starknet::contract_address::ContractAddress" + } + ], + "state_mutability": "external" + }, + { + "type": "function", + "name": "upgrade_contract", + "inputs": [ + { + "name": "address", + "type": "core::starknet::contract_address::ContractAddress" + }, + { + "name": "class_hash", + "type": "core::starknet::class_hash::ClassHash" + } + ], + "outputs": [ + { + "type": "core::starknet::class_hash::ClassHash" + } + ], + "state_mutability": "external" + }, + { + "type": "function", + "name": "uuid", + "inputs": [], + "outputs": [ + { + "type": "core::integer::u32" + } + ], + "state_mutability": "external" + }, + { + "type": "function", + "name": "emit", + "inputs": [ + { + "name": "keys", + "type": "core::array::Array::" + }, + { + "name": "values", + "type": "core::array::Span::" + } + ], + "outputs": [], + "state_mutability": "view" + }, + { + "type": "function", + "name": "entity", + "inputs": [ + { + "name": "model", + "type": "core::felt252" + }, + { + "name": "keys", + "type": "core::array::Span::" + }, + { + "name": "layout", + "type": "core::array::Span::" + } + ], + "outputs": [ + { + "type": "core::array::Span::" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "set_entity", + "inputs": [ + { + "name": "model", + "type": "core::felt252" + }, + { + "name": "keys", + "type": "core::array::Span::" + }, + { + "name": "values", + "type": "core::array::Span::" + }, + { + "name": "layout", + "type": "core::array::Span::" + } + ], + "outputs": [], + "state_mutability": "external" + }, + { + "type": "function", + "name": "base", + "inputs": [], + "outputs": [ + { + "type": "core::starknet::class_hash::ClassHash" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "delete_entity", + "inputs": [ + { + "name": "model", + "type": "core::felt252" + }, + { + "name": "keys", + "type": "core::array::Span::" + }, + { + "name": "layout", + "type": "core::array::Span::" + } + ], + "outputs": [], + "state_mutability": "external" + }, + { + "type": "function", + "name": "is_owner", + "inputs": [ + { + "name": "address", + "type": "core::starknet::contract_address::ContractAddress" + }, + { + "name": "resource", + "type": "core::felt252" + } + ], + "outputs": [ + { + "type": "core::bool" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "grant_owner", + "inputs": [ + { + "name": "address", + "type": "core::starknet::contract_address::ContractAddress" + }, + { + "name": "resource", + "type": "core::felt252" + } + ], + "outputs": [], + "state_mutability": "external" + }, + { + "type": "function", + "name": "revoke_owner", + "inputs": [ + { + "name": "address", + "type": "core::starknet::contract_address::ContractAddress" + }, + { + "name": "resource", + "type": "core::felt252" + } + ], + "outputs": [], + "state_mutability": "external" + }, + { + "type": "function", + "name": "is_writer", + "inputs": [ + { + "name": "model", + "type": "core::felt252" + }, + { + "name": "system", + "type": "core::starknet::contract_address::ContractAddress" + } + ], + "outputs": [ + { + "type": "core::bool" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "grant_writer", + "inputs": [ + { + "name": "model", + "type": "core::felt252" + }, + { + "name": "system", + "type": "core::starknet::contract_address::ContractAddress" + } + ], + "outputs": [], + "state_mutability": "external" + }, + { + "type": "function", + "name": "revoke_writer", + "inputs": [ + { + "name": "model", + "type": "core::felt252" + }, + { + "name": "system", + "type": "core::starknet::contract_address::ContractAddress" + } + ], + "outputs": [], + "state_mutability": "external" + } + ] + }, + { + "type": "impl", + "name": "UpgradeableWorld", + "interface_name": "dojo::world::IUpgradeableWorld" + }, + { + "type": "interface", + "name": "dojo::world::IUpgradeableWorld", + "items": [ + { + "type": "function", + "name": "upgrade", + "inputs": [ + { + "name": "new_class_hash", + "type": "core::starknet::class_hash::ClassHash" + } + ], + "outputs": [], + "state_mutability": "external" + } + ] + }, + { + "type": "constructor", + "name": "constructor", + "inputs": [ + { + "name": "contract_base", + "type": "core::starknet::class_hash::ClassHash" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::WorldSpawned", + "kind": "struct", + "members": [ + { + "name": "address", + "type": "core::starknet::contract_address::ContractAddress", + "kind": "data" + }, + { + "name": "creator", + "type": "core::starknet::contract_address::ContractAddress", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::ContractDeployed", + "kind": "struct", + "members": [ + { + "name": "salt", + "type": "core::felt252", + "kind": "data" + }, + { + "name": "class_hash", + "type": "core::starknet::class_hash::ClassHash", + "kind": "data" + }, + { + "name": "address", + "type": "core::starknet::contract_address::ContractAddress", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::ContractUpgraded", + "kind": "struct", + "members": [ + { + "name": "class_hash", + "type": "core::starknet::class_hash::ClassHash", + "kind": "data" + }, + { + "name": "address", + "type": "core::starknet::contract_address::ContractAddress", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::WorldUpgraded", + "kind": "struct", + "members": [ + { + "name": "class_hash", + "type": "core::starknet::class_hash::ClassHash", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::MetadataUpdate", + "kind": "struct", + "members": [ + { + "name": "resource", + "type": "core::felt252", + "kind": "data" + }, + { + "name": "uri", + "type": "core::array::Span::", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::ModelRegistered", + "kind": "struct", + "members": [ + { + "name": "name", + "type": "core::felt252", + "kind": "data" + }, + { + "name": "class_hash", + "type": "core::starknet::class_hash::ClassHash", + "kind": "data" + }, + { + "name": "prev_class_hash", + "type": "core::starknet::class_hash::ClassHash", + "kind": "data" + }, + { + "name": "address", + "type": "core::starknet::contract_address::ContractAddress", + "kind": "data" + }, + { + "name": "prev_address", + "type": "core::starknet::contract_address::ContractAddress", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::StoreSetRecord", + "kind": "struct", + "members": [ + { + "name": "table", + "type": "core::felt252", + "kind": "data" + }, + { + "name": "keys", + "type": "core::array::Span::", + "kind": "data" + }, + { + "name": "values", + "type": "core::array::Span::", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::StoreDelRecord", + "kind": "struct", + "members": [ + { + "name": "table", + "type": "core::felt252", + "kind": "data" + }, + { + "name": "keys", + "type": "core::array::Span::", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::WriterUpdated", + "kind": "struct", + "members": [ + { + "name": "model", + "type": "core::felt252", + "kind": "data" + }, + { + "name": "system", + "type": "core::starknet::contract_address::ContractAddress", + "kind": "data" + }, + { + "name": "value", + "type": "core::bool", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::OwnerUpdated", + "kind": "struct", + "members": [ + { + "name": "address", + "type": "core::starknet::contract_address::ContractAddress", + "kind": "data" + }, + { + "name": "resource", + "type": "core::felt252", + "kind": "data" + }, + { + "name": "value", + "type": "core::bool", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::Event", + "kind": "enum", + "variants": [ + { + "name": "WorldSpawned", + "type": "dojo::world::world::WorldSpawned", + "kind": "nested" + }, + { + "name": "ContractDeployed", + "type": "dojo::world::world::ContractDeployed", + "kind": "nested" + }, + { + "name": "ContractUpgraded", + "type": "dojo::world::world::ContractUpgraded", + "kind": "nested" + }, + { + "name": "WorldUpgraded", + "type": "dojo::world::world::WorldUpgraded", + "kind": "nested" + }, + { + "name": "MetadataUpdate", + "type": "dojo::world::world::MetadataUpdate", + "kind": "nested" + }, + { + "name": "ModelRegistered", + "type": "dojo::world::world::ModelRegistered", + "kind": "nested" + }, + { + "name": "StoreSetRecord", + "type": "dojo::world::world::StoreSetRecord", + "kind": "nested" + }, + { + "name": "StoreDelRecord", + "type": "dojo::world::world::StoreDelRecord", + "kind": "nested" + }, + { + "name": "WriterUpdated", + "type": "dojo::world::world::WriterUpdated", + "kind": "nested" + }, + { + "name": "OwnerUpdated", + "type": "dojo::world::world::OwnerUpdated", + "kind": "nested" + } + ] + } +] \ No newline at end of file diff --git a/examples/spawn-and-move/manifests/dev/abis/deployments/models/dojo_examples_actions_actions_moved.json b/examples/spawn-and-move/manifests/dev/abis/deployments/models/dojo_examples_actions_actions_moved.json new file mode 100644 index 0000000000..55206aa098 --- /dev/null +++ b/examples/spawn-and-move/manifests/dev/abis/deployments/models/dojo_examples_actions_actions_moved.json @@ -0,0 +1,239 @@ +[ + { + "type": "impl", + "name": "DojoModelImpl", + "interface_name": "dojo::model::IDojoModel" + }, + { + "type": "struct", + "name": "core::array::Span::", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::>", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::>" + } + ] + }, + { + "type": "struct", + "name": "dojo::database::introspect::Struct", + "members": [ + { + "name": "name", + "type": "core::felt252" + }, + { + "name": "attrs", + "type": "core::array::Span::" + }, + { + "name": "children", + "type": "core::array::Span::>" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::<(core::felt252, core::array::Span::)>", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::<(core::felt252, core::array::Span::)>" + } + ] + }, + { + "type": "struct", + "name": "dojo::database::introspect::Enum", + "members": [ + { + "name": "name", + "type": "core::felt252" + }, + { + "name": "attrs", + "type": "core::array::Span::" + }, + { + "name": "children", + "type": "core::array::Span::<(core::felt252, core::array::Span::)>" + } + ] + }, + { + "type": "enum", + "name": "dojo::database::introspect::Ty", + "variants": [ + { + "name": "Primitive", + "type": "core::felt252" + }, + { + "name": "Struct", + "type": "dojo::database::introspect::Struct" + }, + { + "name": "Enum", + "type": "dojo::database::introspect::Enum" + }, + { + "name": "Tuple", + "type": "core::array::Span::>" + }, + { + "name": "Array", + "type": "core::integer::u32" + } + ] + }, + { + "type": "interface", + "name": "dojo::model::IDojoModel", + "items": [ + { + "type": "function", + "name": "name", + "inputs": [], + "outputs": [ + { + "type": "core::felt252" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "unpacked_size", + "inputs": [], + "outputs": [ + { + "type": "core::integer::u32" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "packed_size", + "inputs": [], + "outputs": [ + { + "type": "core::integer::u32" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "layout", + "inputs": [], + "outputs": [ + { + "type": "core::array::Span::" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "schema", + "inputs": [], + "outputs": [ + { + "type": "dojo::database::introspect::Ty" + } + ], + "state_mutability": "view" + } + ] + }, + { + "type": "impl", + "name": "movedImpl", + "interface_name": "dojo_examples::actions::actions::Imoved" + }, + { + "type": "enum", + "name": "dojo_examples::models::Direction", + "variants": [ + { + "name": "None", + "type": "()" + }, + { + "name": "Left", + "type": "()" + }, + { + "name": "Right", + "type": "()" + }, + { + "name": "Up", + "type": "()" + }, + { + "name": "Down", + "type": "()" + } + ] + }, + { + "type": "struct", + "name": "dojo_examples::actions::actions::Moved", + "members": [ + { + "name": "player", + "type": "core::starknet::contract_address::ContractAddress" + }, + { + "name": "direction", + "type": "dojo_examples::models::Direction" + } + ] + }, + { + "type": "interface", + "name": "dojo_examples::actions::actions::Imoved", + "items": [ + { + "type": "function", + "name": "ensure_abi", + "inputs": [ + { + "name": "model", + "type": "dojo_examples::actions::actions::Moved" + } + ], + "outputs": [], + "state_mutability": "view" + } + ] + }, + { + "type": "event", + "name": "dojo_examples::actions::actions::moved::Event", + "kind": "enum", + "variants": [] + } +] \ No newline at end of file diff --git a/examples/spawn-and-move/manifests/dev/abis/deployments/models/dojo_examples_models_emote_message.json b/examples/spawn-and-move/manifests/dev/abis/deployments/models/dojo_examples_models_emote_message.json new file mode 100644 index 0000000000..ead98d0724 --- /dev/null +++ b/examples/spawn-and-move/manifests/dev/abis/deployments/models/dojo_examples_models_emote_message.json @@ -0,0 +1,239 @@ +[ + { + "type": "impl", + "name": "DojoModelImpl", + "interface_name": "dojo::model::IDojoModel" + }, + { + "type": "struct", + "name": "core::array::Span::", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::>", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::>" + } + ] + }, + { + "type": "struct", + "name": "dojo::database::introspect::Struct", + "members": [ + { + "name": "name", + "type": "core::felt252" + }, + { + "name": "attrs", + "type": "core::array::Span::" + }, + { + "name": "children", + "type": "core::array::Span::>" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::<(core::felt252, core::array::Span::)>", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::<(core::felt252, core::array::Span::)>" + } + ] + }, + { + "type": "struct", + "name": "dojo::database::introspect::Enum", + "members": [ + { + "name": "name", + "type": "core::felt252" + }, + { + "name": "attrs", + "type": "core::array::Span::" + }, + { + "name": "children", + "type": "core::array::Span::<(core::felt252, core::array::Span::)>" + } + ] + }, + { + "type": "enum", + "name": "dojo::database::introspect::Ty", + "variants": [ + { + "name": "Primitive", + "type": "core::felt252" + }, + { + "name": "Struct", + "type": "dojo::database::introspect::Struct" + }, + { + "name": "Enum", + "type": "dojo::database::introspect::Enum" + }, + { + "name": "Tuple", + "type": "core::array::Span::>" + }, + { + "name": "Array", + "type": "core::integer::u32" + } + ] + }, + { + "type": "interface", + "name": "dojo::model::IDojoModel", + "items": [ + { + "type": "function", + "name": "name", + "inputs": [], + "outputs": [ + { + "type": "core::felt252" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "unpacked_size", + "inputs": [], + "outputs": [ + { + "type": "core::integer::u32" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "packed_size", + "inputs": [], + "outputs": [ + { + "type": "core::integer::u32" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "layout", + "inputs": [], + "outputs": [ + { + "type": "core::array::Span::" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "schema", + "inputs": [], + "outputs": [ + { + "type": "dojo::database::introspect::Ty" + } + ], + "state_mutability": "view" + } + ] + }, + { + "type": "impl", + "name": "emote_messageImpl", + "interface_name": "dojo_examples::models::Iemote_message" + }, + { + "type": "enum", + "name": "dojo_examples::models::Emote", + "variants": [ + { + "name": "None", + "type": "()" + }, + { + "name": "Happy", + "type": "()" + }, + { + "name": "Sad", + "type": "()" + }, + { + "name": "Angry", + "type": "()" + }, + { + "name": "Love", + "type": "()" + } + ] + }, + { + "type": "struct", + "name": "dojo_examples::models::EmoteMessage", + "members": [ + { + "name": "identity", + "type": "core::starknet::contract_address::ContractAddress" + }, + { + "name": "emote", + "type": "dojo_examples::models::Emote" + } + ] + }, + { + "type": "interface", + "name": "dojo_examples::models::Iemote_message", + "items": [ + { + "type": "function", + "name": "ensure_abi", + "inputs": [ + { + "name": "model", + "type": "dojo_examples::models::EmoteMessage" + } + ], + "outputs": [], + "state_mutability": "view" + } + ] + }, + { + "type": "event", + "name": "dojo_examples::models::emote_message::Event", + "kind": "enum", + "variants": [] + } +] \ No newline at end of file diff --git a/examples/spawn-and-move/manifests/dev/abis/deployments/models/dojo_examples_models_moves.json b/examples/spawn-and-move/manifests/dev/abis/deployments/models/dojo_examples_models_moves.json new file mode 100644 index 0000000000..169cbc50ed --- /dev/null +++ b/examples/spawn-and-move/manifests/dev/abis/deployments/models/dojo_examples_models_moves.json @@ -0,0 +1,243 @@ +[ + { + "type": "impl", + "name": "DojoModelImpl", + "interface_name": "dojo::model::IDojoModel" + }, + { + "type": "struct", + "name": "core::array::Span::", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::>", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::>" + } + ] + }, + { + "type": "struct", + "name": "dojo::database::introspect::Struct", + "members": [ + { + "name": "name", + "type": "core::felt252" + }, + { + "name": "attrs", + "type": "core::array::Span::" + }, + { + "name": "children", + "type": "core::array::Span::>" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::<(core::felt252, core::array::Span::)>", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::<(core::felt252, core::array::Span::)>" + } + ] + }, + { + "type": "struct", + "name": "dojo::database::introspect::Enum", + "members": [ + { + "name": "name", + "type": "core::felt252" + }, + { + "name": "attrs", + "type": "core::array::Span::" + }, + { + "name": "children", + "type": "core::array::Span::<(core::felt252, core::array::Span::)>" + } + ] + }, + { + "type": "enum", + "name": "dojo::database::introspect::Ty", + "variants": [ + { + "name": "Primitive", + "type": "core::felt252" + }, + { + "name": "Struct", + "type": "dojo::database::introspect::Struct" + }, + { + "name": "Enum", + "type": "dojo::database::introspect::Enum" + }, + { + "name": "Tuple", + "type": "core::array::Span::>" + }, + { + "name": "Array", + "type": "core::integer::u32" + } + ] + }, + { + "type": "interface", + "name": "dojo::model::IDojoModel", + "items": [ + { + "type": "function", + "name": "name", + "inputs": [], + "outputs": [ + { + "type": "core::felt252" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "unpacked_size", + "inputs": [], + "outputs": [ + { + "type": "core::integer::u32" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "packed_size", + "inputs": [], + "outputs": [ + { + "type": "core::integer::u32" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "layout", + "inputs": [], + "outputs": [ + { + "type": "core::array::Span::" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "schema", + "inputs": [], + "outputs": [ + { + "type": "dojo::database::introspect::Ty" + } + ], + "state_mutability": "view" + } + ] + }, + { + "type": "impl", + "name": "movesImpl", + "interface_name": "dojo_examples::models::Imoves" + }, + { + "type": "enum", + "name": "dojo_examples::models::Direction", + "variants": [ + { + "name": "None", + "type": "()" + }, + { + "name": "Left", + "type": "()" + }, + { + "name": "Right", + "type": "()" + }, + { + "name": "Up", + "type": "()" + }, + { + "name": "Down", + "type": "()" + } + ] + }, + { + "type": "struct", + "name": "dojo_examples::models::Moves", + "members": [ + { + "name": "player", + "type": "core::starknet::contract_address::ContractAddress" + }, + { + "name": "remaining", + "type": "core::integer::u8" + }, + { + "name": "last_direction", + "type": "dojo_examples::models::Direction" + } + ] + }, + { + "type": "interface", + "name": "dojo_examples::models::Imoves", + "items": [ + { + "type": "function", + "name": "ensure_abi", + "inputs": [ + { + "name": "model", + "type": "dojo_examples::models::Moves" + } + ], + "outputs": [], + "state_mutability": "view" + } + ] + }, + { + "type": "event", + "name": "dojo_examples::models::moves::Event", + "kind": "enum", + "variants": [] + } +] \ No newline at end of file diff --git a/examples/spawn-and-move/manifests/dev/abis/deployments/models/dojo_examples_models_position.json b/examples/spawn-and-move/manifests/dev/abis/deployments/models/dojo_examples_models_position.json new file mode 100644 index 0000000000..9b6466644a --- /dev/null +++ b/examples/spawn-and-move/manifests/dev/abis/deployments/models/dojo_examples_models_position.json @@ -0,0 +1,227 @@ +[ + { + "type": "impl", + "name": "DojoModelImpl", + "interface_name": "dojo::model::IDojoModel" + }, + { + "type": "struct", + "name": "core::array::Span::", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::>", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::>" + } + ] + }, + { + "type": "struct", + "name": "dojo::database::introspect::Struct", + "members": [ + { + "name": "name", + "type": "core::felt252" + }, + { + "name": "attrs", + "type": "core::array::Span::" + }, + { + "name": "children", + "type": "core::array::Span::>" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::<(core::felt252, core::array::Span::)>", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::<(core::felt252, core::array::Span::)>" + } + ] + }, + { + "type": "struct", + "name": "dojo::database::introspect::Enum", + "members": [ + { + "name": "name", + "type": "core::felt252" + }, + { + "name": "attrs", + "type": "core::array::Span::" + }, + { + "name": "children", + "type": "core::array::Span::<(core::felt252, core::array::Span::)>" + } + ] + }, + { + "type": "enum", + "name": "dojo::database::introspect::Ty", + "variants": [ + { + "name": "Primitive", + "type": "core::felt252" + }, + { + "name": "Struct", + "type": "dojo::database::introspect::Struct" + }, + { + "name": "Enum", + "type": "dojo::database::introspect::Enum" + }, + { + "name": "Tuple", + "type": "core::array::Span::>" + }, + { + "name": "Array", + "type": "core::integer::u32" + } + ] + }, + { + "type": "interface", + "name": "dojo::model::IDojoModel", + "items": [ + { + "type": "function", + "name": "name", + "inputs": [], + "outputs": [ + { + "type": "core::felt252" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "unpacked_size", + "inputs": [], + "outputs": [ + { + "type": "core::integer::u32" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "packed_size", + "inputs": [], + "outputs": [ + { + "type": "core::integer::u32" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "layout", + "inputs": [], + "outputs": [ + { + "type": "core::array::Span::" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "schema", + "inputs": [], + "outputs": [ + { + "type": "dojo::database::introspect::Ty" + } + ], + "state_mutability": "view" + } + ] + }, + { + "type": "impl", + "name": "positionImpl", + "interface_name": "dojo_examples::models::Iposition" + }, + { + "type": "struct", + "name": "dojo_examples::models::Vec2", + "members": [ + { + "name": "x", + "type": "core::integer::u32" + }, + { + "name": "y", + "type": "core::integer::u32" + } + ] + }, + { + "type": "struct", + "name": "dojo_examples::models::Position", + "members": [ + { + "name": "player", + "type": "core::starknet::contract_address::ContractAddress" + }, + { + "name": "vec", + "type": "dojo_examples::models::Vec2" + } + ] + }, + { + "type": "interface", + "name": "dojo_examples::models::Iposition", + "items": [ + { + "type": "function", + "name": "ensure_abi", + "inputs": [ + { + "name": "model", + "type": "dojo_examples::models::Position" + } + ], + "outputs": [], + "state_mutability": "view" + } + ] + }, + { + "type": "event", + "name": "dojo_examples::models::position::Event", + "kind": "enum", + "variants": [] + } +] \ No newline at end of file diff --git a/examples/spawn-and-move/manifests/dev/base/contracts/dojo_examples_actions_actions.toml b/examples/spawn-and-move/manifests/dev/base/contracts/dojo_examples_actions_actions.toml new file mode 100644 index 0000000000..4047d46932 --- /dev/null +++ b/examples/spawn-and-move/manifests/dev/base/contracts/dojo_examples_actions_actions.toml @@ -0,0 +1,9 @@ +kind = "DojoContract" +class_hash = "0x471a1a36581d24f68ae6984c714daa75d424c0a6581872a65e645eb11a8e45" +original_class_hash = "0x471a1a36581d24f68ae6984c714daa75d424c0a6581872a65e645eb11a8e45" +base_class_hash = "0x0" +abi = "manifests/dev/abis/base/contracts/dojo_examples_actions_actions.json" +reads = [] +writes = [] +computed = [] +name = "dojo_examples::actions::actions" diff --git a/examples/spawn-and-move/manifests/base/base.toml b/examples/spawn-and-move/manifests/dev/base/dojo_base_base.toml similarity index 57% rename from examples/spawn-and-move/manifests/base/base.toml rename to examples/spawn-and-move/manifests/dev/base/dojo_base_base.toml index 05caa48e93..d926bca4d7 100644 --- a/examples/spawn-and-move/manifests/base/base.toml +++ b/examples/spawn-and-move/manifests/dev/base/dojo_base_base.toml @@ -1,3 +1,4 @@ kind = "Class" class_hash = "0x679177a2cb757694ac4f326d01052ff0963eac0bc2a17116a2b87badcdf6f76" +original_class_hash = "0x679177a2cb757694ac4f326d01052ff0963eac0bc2a17116a2b87badcdf6f76" name = "dojo::base::base" diff --git a/examples/spawn-and-move/manifests/dev/base/dojo_world_world.toml b/examples/spawn-and-move/manifests/dev/base/dojo_world_world.toml new file mode 100644 index 0000000000..a08f22b905 --- /dev/null +++ b/examples/spawn-and-move/manifests/dev/base/dojo_world_world.toml @@ -0,0 +1,5 @@ +kind = "Class" +class_hash = "0x799bc4e9da10bfb3dd88e6f223c9cfbf7745435cd14f5d69675ea448e578cd" +original_class_hash = "0x799bc4e9da10bfb3dd88e6f223c9cfbf7745435cd14f5d69675ea448e578cd" +abi = "manifests/dev/abis/base/dojo_world_world.json" +name = "dojo::world::world" diff --git a/examples/spawn-and-move/manifests/base/models/moved.toml b/examples/spawn-and-move/manifests/dev/base/models/dojo_examples_actions_actions_moved.toml similarity index 61% rename from examples/spawn-and-move/manifests/base/models/moved.toml rename to examples/spawn-and-move/manifests/dev/base/models/dojo_examples_actions_actions_moved.toml index 71c08c95bb..51827417b0 100644 --- a/examples/spawn-and-move/manifests/base/models/moved.toml +++ b/examples/spawn-and-move/manifests/dev/base/models/dojo_examples_actions_actions_moved.toml @@ -1,5 +1,7 @@ kind = "DojoModel" class_hash = "0x52659850f9939482810d9f6b468b91dc99e0b7fa42c2016cf12833ec06ce911" +original_class_hash = "0x52659850f9939482810d9f6b468b91dc99e0b7fa42c2016cf12833ec06ce911" +abi = "manifests/dev/abis/base/models/dojo_examples_actions_actions_moved.json" name = "dojo_examples::actions::actions::moved" [[members]] diff --git a/examples/spawn-and-move/manifests/dev/base/models/dojo_examples_models_emote_message.toml b/examples/spawn-and-move/manifests/dev/base/models/dojo_examples_models_emote_message.toml new file mode 100644 index 0000000000..00aca3ebdc --- /dev/null +++ b/examples/spawn-and-move/manifests/dev/base/models/dojo_examples_models_emote_message.toml @@ -0,0 +1,15 @@ +kind = "DojoModel" +class_hash = "0x6d5aef0819f5267c840c57d37ff774b4e185fc7da4a10e58cb9f575aa2ed1c" +original_class_hash = "0x6d5aef0819f5267c840c57d37ff774b4e185fc7da4a10e58cb9f575aa2ed1c" +abi = "manifests/dev/abis/base/models/dojo_examples_models_emote_message.json" +name = "dojo_examples::models::emote_message" + +[[members]] +name = "identity" +type = "ContractAddress" +key = true + +[[members]] +name = "emote" +type = "Emote" +key = false diff --git a/examples/spawn-and-move/manifests/base/models/moves.toml b/examples/spawn-and-move/manifests/dev/base/models/dojo_examples_models_moves.toml similarity index 67% rename from examples/spawn-and-move/manifests/base/models/moves.toml rename to examples/spawn-and-move/manifests/dev/base/models/dojo_examples_models_moves.toml index 393330b64b..31bb7ff6cc 100644 --- a/examples/spawn-and-move/manifests/base/models/moves.toml +++ b/examples/spawn-and-move/manifests/dev/base/models/dojo_examples_models_moves.toml @@ -1,5 +1,7 @@ kind = "DojoModel" class_hash = "0x511fbd833938f5c4b743eea1e67605a125d7ff60e8a09e8dc227ad2fb59ca54" +original_class_hash = "0x511fbd833938f5c4b743eea1e67605a125d7ff60e8a09e8dc227ad2fb59ca54" +abi = "manifests/dev/abis/base/models/dojo_examples_models_moves.json" name = "dojo_examples::models::moves" [[members]] diff --git a/examples/spawn-and-move/manifests/base/models/position.toml b/examples/spawn-and-move/manifests/dev/base/models/dojo_examples_models_position.toml similarity index 61% rename from examples/spawn-and-move/manifests/base/models/position.toml rename to examples/spawn-and-move/manifests/dev/base/models/dojo_examples_models_position.toml index 8af1214219..8721663cef 100644 --- a/examples/spawn-and-move/manifests/base/models/position.toml +++ b/examples/spawn-and-move/manifests/dev/base/models/dojo_examples_models_position.toml @@ -1,5 +1,7 @@ kind = "DojoModel" class_hash = "0xb33ae053213ccb2a57967ffc4411901f3efab24781ca867adcd0b90f2fece5" +original_class_hash = "0xb33ae053213ccb2a57967ffc4411901f3efab24781ca867adcd0b90f2fece5" +abi = "manifests/dev/abis/base/models/dojo_examples_models_position.json" name = "dojo_examples::models::position" [[members]] diff --git a/examples/spawn-and-move/manifests/dev/manifest.json b/examples/spawn-and-move/manifests/dev/manifest.json new file mode 100644 index 0000000000..bb9ce0f641 --- /dev/null +++ b/examples/spawn-and-move/manifests/dev/manifest.json @@ -0,0 +1,1962 @@ +{ + "world": { + "kind": "WorldContract", + "class_hash": "0x799bc4e9da10bfb3dd88e6f223c9cfbf7745435cd14f5d69675ea448e578cd", + "original_class_hash": "0x799bc4e9da10bfb3dd88e6f223c9cfbf7745435cd14f5d69675ea448e578cd", + "abi": [ + { + "type": "impl", + "name": "World", + "interface_name": "dojo::world::IWorld" + }, + { + "type": "struct", + "name": "core::array::Span::", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::" + } + ] + }, + { + "type": "struct", + "name": "dojo::resource_metadata::ResourceMetadata", + "members": [ + { + "name": "resource_id", + "type": "core::felt252" + }, + { + "name": "metadata_uri", + "type": "core::array::Span::" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::" + } + ] + }, + { + "type": "enum", + "name": "core::bool", + "variants": [ + { + "name": "False", + "type": "()" + }, + { + "name": "True", + "type": "()" + } + ] + }, + { + "type": "interface", + "name": "dojo::world::IWorld", + "items": [ + { + "type": "function", + "name": "metadata", + "inputs": [ + { + "name": "resource_id", + "type": "core::felt252" + } + ], + "outputs": [ + { + "type": "dojo::resource_metadata::ResourceMetadata" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "set_metadata", + "inputs": [ + { + "name": "metadata", + "type": "dojo::resource_metadata::ResourceMetadata" + } + ], + "outputs": [], + "state_mutability": "external" + }, + { + "type": "function", + "name": "model", + "inputs": [ + { + "name": "name", + "type": "core::felt252" + } + ], + "outputs": [ + { + "type": "(core::starknet::class_hash::ClassHash, core::starknet::contract_address::ContractAddress)" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "register_model", + "inputs": [ + { + "name": "class_hash", + "type": "core::starknet::class_hash::ClassHash" + } + ], + "outputs": [], + "state_mutability": "external" + }, + { + "type": "function", + "name": "deploy_contract", + "inputs": [ + { + "name": "salt", + "type": "core::felt252" + }, + { + "name": "class_hash", + "type": "core::starknet::class_hash::ClassHash" + } + ], + "outputs": [ + { + "type": "core::starknet::contract_address::ContractAddress" + } + ], + "state_mutability": "external" + }, + { + "type": "function", + "name": "upgrade_contract", + "inputs": [ + { + "name": "address", + "type": "core::starknet::contract_address::ContractAddress" + }, + { + "name": "class_hash", + "type": "core::starknet::class_hash::ClassHash" + } + ], + "outputs": [ + { + "type": "core::starknet::class_hash::ClassHash" + } + ], + "state_mutability": "external" + }, + { + "type": "function", + "name": "uuid", + "inputs": [], + "outputs": [ + { + "type": "core::integer::u32" + } + ], + "state_mutability": "external" + }, + { + "type": "function", + "name": "emit", + "inputs": [ + { + "name": "keys", + "type": "core::array::Array::" + }, + { + "name": "values", + "type": "core::array::Span::" + } + ], + "outputs": [], + "state_mutability": "view" + }, + { + "type": "function", + "name": "entity", + "inputs": [ + { + "name": "model", + "type": "core::felt252" + }, + { + "name": "keys", + "type": "core::array::Span::" + }, + { + "name": "layout", + "type": "core::array::Span::" + } + ], + "outputs": [ + { + "type": "core::array::Span::" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "set_entity", + "inputs": [ + { + "name": "model", + "type": "core::felt252" + }, + { + "name": "keys", + "type": "core::array::Span::" + }, + { + "name": "values", + "type": "core::array::Span::" + }, + { + "name": "layout", + "type": "core::array::Span::" + } + ], + "outputs": [], + "state_mutability": "external" + }, + { + "type": "function", + "name": "base", + "inputs": [], + "outputs": [ + { + "type": "core::starknet::class_hash::ClassHash" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "delete_entity", + "inputs": [ + { + "name": "model", + "type": "core::felt252" + }, + { + "name": "keys", + "type": "core::array::Span::" + }, + { + "name": "layout", + "type": "core::array::Span::" + } + ], + "outputs": [], + "state_mutability": "external" + }, + { + "type": "function", + "name": "is_owner", + "inputs": [ + { + "name": "address", + "type": "core::starknet::contract_address::ContractAddress" + }, + { + "name": "resource", + "type": "core::felt252" + } + ], + "outputs": [ + { + "type": "core::bool" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "grant_owner", + "inputs": [ + { + "name": "address", + "type": "core::starknet::contract_address::ContractAddress" + }, + { + "name": "resource", + "type": "core::felt252" + } + ], + "outputs": [], + "state_mutability": "external" + }, + { + "type": "function", + "name": "revoke_owner", + "inputs": [ + { + "name": "address", + "type": "core::starknet::contract_address::ContractAddress" + }, + { + "name": "resource", + "type": "core::felt252" + } + ], + "outputs": [], + "state_mutability": "external" + }, + { + "type": "function", + "name": "is_writer", + "inputs": [ + { + "name": "model", + "type": "core::felt252" + }, + { + "name": "system", + "type": "core::starknet::contract_address::ContractAddress" + } + ], + "outputs": [ + { + "type": "core::bool" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "grant_writer", + "inputs": [ + { + "name": "model", + "type": "core::felt252" + }, + { + "name": "system", + "type": "core::starknet::contract_address::ContractAddress" + } + ], + "outputs": [], + "state_mutability": "external" + }, + { + "type": "function", + "name": "revoke_writer", + "inputs": [ + { + "name": "model", + "type": "core::felt252" + }, + { + "name": "system", + "type": "core::starknet::contract_address::ContractAddress" + } + ], + "outputs": [], + "state_mutability": "external" + } + ] + }, + { + "type": "impl", + "name": "UpgradeableWorld", + "interface_name": "dojo::world::IUpgradeableWorld" + }, + { + "type": "interface", + "name": "dojo::world::IUpgradeableWorld", + "items": [ + { + "type": "function", + "name": "upgrade", + "inputs": [ + { + "name": "new_class_hash", + "type": "core::starknet::class_hash::ClassHash" + } + ], + "outputs": [], + "state_mutability": "external" + } + ] + }, + { + "type": "constructor", + "name": "constructor", + "inputs": [ + { + "name": "contract_base", + "type": "core::starknet::class_hash::ClassHash" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::WorldSpawned", + "kind": "struct", + "members": [ + { + "name": "address", + "type": "core::starknet::contract_address::ContractAddress", + "kind": "data" + }, + { + "name": "creator", + "type": "core::starknet::contract_address::ContractAddress", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::ContractDeployed", + "kind": "struct", + "members": [ + { + "name": "salt", + "type": "core::felt252", + "kind": "data" + }, + { + "name": "class_hash", + "type": "core::starknet::class_hash::ClassHash", + "kind": "data" + }, + { + "name": "address", + "type": "core::starknet::contract_address::ContractAddress", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::ContractUpgraded", + "kind": "struct", + "members": [ + { + "name": "class_hash", + "type": "core::starknet::class_hash::ClassHash", + "kind": "data" + }, + { + "name": "address", + "type": "core::starknet::contract_address::ContractAddress", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::WorldUpgraded", + "kind": "struct", + "members": [ + { + "name": "class_hash", + "type": "core::starknet::class_hash::ClassHash", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::MetadataUpdate", + "kind": "struct", + "members": [ + { + "name": "resource", + "type": "core::felt252", + "kind": "data" + }, + { + "name": "uri", + "type": "core::array::Span::", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::ModelRegistered", + "kind": "struct", + "members": [ + { + "name": "name", + "type": "core::felt252", + "kind": "data" + }, + { + "name": "class_hash", + "type": "core::starknet::class_hash::ClassHash", + "kind": "data" + }, + { + "name": "prev_class_hash", + "type": "core::starknet::class_hash::ClassHash", + "kind": "data" + }, + { + "name": "address", + "type": "core::starknet::contract_address::ContractAddress", + "kind": "data" + }, + { + "name": "prev_address", + "type": "core::starknet::contract_address::ContractAddress", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::StoreSetRecord", + "kind": "struct", + "members": [ + { + "name": "table", + "type": "core::felt252", + "kind": "data" + }, + { + "name": "keys", + "type": "core::array::Span::", + "kind": "data" + }, + { + "name": "values", + "type": "core::array::Span::", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::StoreDelRecord", + "kind": "struct", + "members": [ + { + "name": "table", + "type": "core::felt252", + "kind": "data" + }, + { + "name": "keys", + "type": "core::array::Span::", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::WriterUpdated", + "kind": "struct", + "members": [ + { + "name": "model", + "type": "core::felt252", + "kind": "data" + }, + { + "name": "system", + "type": "core::starknet::contract_address::ContractAddress", + "kind": "data" + }, + { + "name": "value", + "type": "core::bool", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::OwnerUpdated", + "kind": "struct", + "members": [ + { + "name": "address", + "type": "core::starknet::contract_address::ContractAddress", + "kind": "data" + }, + { + "name": "resource", + "type": "core::felt252", + "kind": "data" + }, + { + "name": "value", + "type": "core::bool", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::Event", + "kind": "enum", + "variants": [ + { + "name": "WorldSpawned", + "type": "dojo::world::world::WorldSpawned", + "kind": "nested" + }, + { + "name": "ContractDeployed", + "type": "dojo::world::world::ContractDeployed", + "kind": "nested" + }, + { + "name": "ContractUpgraded", + "type": "dojo::world::world::ContractUpgraded", + "kind": "nested" + }, + { + "name": "WorldUpgraded", + "type": "dojo::world::world::WorldUpgraded", + "kind": "nested" + }, + { + "name": "MetadataUpdate", + "type": "dojo::world::world::MetadataUpdate", + "kind": "nested" + }, + { + "name": "ModelRegistered", + "type": "dojo::world::world::ModelRegistered", + "kind": "nested" + }, + { + "name": "StoreSetRecord", + "type": "dojo::world::world::StoreSetRecord", + "kind": "nested" + }, + { + "name": "StoreDelRecord", + "type": "dojo::world::world::StoreDelRecord", + "kind": "nested" + }, + { + "name": "WriterUpdated", + "type": "dojo::world::world::WriterUpdated", + "kind": "nested" + }, + { + "name": "OwnerUpdated", + "type": "dojo::world::world::OwnerUpdated", + "kind": "nested" + } + ] + } + ], + "address": "0x1385f25d20a724edc9c7b3bd9636c59af64cbaf9fcd12f33b3af96b2452f295", + "transaction_hash": "0x6afefdcc49b3563a4f3657900ba71e9f9356861b15b942a73f2018f046a1048", + "block_number": 3, + "seed": "dojo_examples", + "metadata": { + "profile_name": "dev", + "rpc_url": "http://localhost:5050/" + }, + "name": "dojo::world::world" + }, + "base": { + "kind": "Class", + "class_hash": "0x679177a2cb757694ac4f326d01052ff0963eac0bc2a17116a2b87badcdf6f76", + "original_class_hash": "0x679177a2cb757694ac4f326d01052ff0963eac0bc2a17116a2b87badcdf6f76", + "abi": null, + "name": "dojo::base::base" + }, + "contracts": [ + { + "kind": "DojoContract", + "address": "0x3539c9b89b08095ba914653fb0f20e55d4b172a415beade611bc260b346d0f7", + "class_hash": "0x471a1a36581d24f68ae6984c714daa75d424c0a6581872a65e645eb11a8e45", + "original_class_hash": "0x471a1a36581d24f68ae6984c714daa75d424c0a6581872a65e645eb11a8e45", + "base_class_hash": "0x679177a2cb757694ac4f326d01052ff0963eac0bc2a17116a2b87badcdf6f76", + "abi": [ + { + "type": "impl", + "name": "DojoResourceProviderImpl", + "interface_name": "dojo::world::IDojoResourceProvider" + }, + { + "type": "interface", + "name": "dojo::world::IDojoResourceProvider", + "items": [ + { + "type": "function", + "name": "dojo_resource", + "inputs": [], + "outputs": [ + { + "type": "core::felt252" + } + ], + "state_mutability": "view" + } + ] + }, + { + "type": "impl", + "name": "WorldProviderImpl", + "interface_name": "dojo::world::IWorldProvider" + }, + { + "type": "struct", + "name": "dojo::world::IWorldDispatcher", + "members": [ + { + "name": "contract_address", + "type": "core::starknet::contract_address::ContractAddress" + } + ] + }, + { + "type": "interface", + "name": "dojo::world::IWorldProvider", + "items": [ + { + "type": "function", + "name": "world", + "inputs": [], + "outputs": [ + { + "type": "dojo::world::IWorldDispatcher" + } + ], + "state_mutability": "view" + } + ] + }, + { + "type": "impl", + "name": "ActionsComputedImpl", + "interface_name": "dojo_examples::actions::IActionsComputed" + }, + { + "type": "struct", + "name": "dojo_examples::models::Vec2", + "members": [ + { + "name": "x", + "type": "core::integer::u32" + }, + { + "name": "y", + "type": "core::integer::u32" + } + ] + }, + { + "type": "struct", + "name": "dojo_examples::models::Position", + "members": [ + { + "name": "player", + "type": "core::starknet::contract_address::ContractAddress" + }, + { + "name": "vec", + "type": "dojo_examples::models::Vec2" + } + ] + }, + { + "type": "interface", + "name": "dojo_examples::actions::IActionsComputed", + "items": [ + { + "type": "function", + "name": "tile_terrain", + "inputs": [ + { + "name": "vec", + "type": "dojo_examples::models::Vec2" + } + ], + "outputs": [ + { + "type": "core::felt252" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "quadrant", + "inputs": [ + { + "name": "pos", + "type": "dojo_examples::models::Position" + } + ], + "outputs": [ + { + "type": "core::integer::u8" + } + ], + "state_mutability": "view" + } + ] + }, + { + "type": "impl", + "name": "ActionsImpl", + "interface_name": "dojo_examples::actions::IActions" + }, + { + "type": "enum", + "name": "dojo_examples::models::Direction", + "variants": [ + { + "name": "None", + "type": "()" + }, + { + "name": "Left", + "type": "()" + }, + { + "name": "Right", + "type": "()" + }, + { + "name": "Up", + "type": "()" + }, + { + "name": "Down", + "type": "()" + } + ] + }, + { + "type": "interface", + "name": "dojo_examples::actions::IActions", + "items": [ + { + "type": "function", + "name": "spawn", + "inputs": [], + "outputs": [], + "state_mutability": "view" + }, + { + "type": "function", + "name": "move", + "inputs": [ + { + "name": "direction", + "type": "dojo_examples::models::Direction" + } + ], + "outputs": [], + "state_mutability": "view" + } + ] + }, + { + "type": "impl", + "name": "UpgradableImpl", + "interface_name": "dojo::components::upgradeable::IUpgradeable" + }, + { + "type": "interface", + "name": "dojo::components::upgradeable::IUpgradeable", + "items": [ + { + "type": "function", + "name": "upgrade", + "inputs": [ + { + "name": "new_class_hash", + "type": "core::starknet::class_hash::ClassHash" + } + ], + "outputs": [], + "state_mutability": "external" + } + ] + }, + { + "type": "event", + "name": "dojo::components::upgradeable::upgradeable::Upgraded", + "kind": "struct", + "members": [ + { + "name": "class_hash", + "type": "core::starknet::class_hash::ClassHash", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::components::upgradeable::upgradeable::Event", + "kind": "enum", + "variants": [ + { + "name": "Upgraded", + "type": "dojo::components::upgradeable::upgradeable::Upgraded", + "kind": "nested" + } + ] + }, + { + "type": "event", + "name": "dojo_examples::actions::actions::Event", + "kind": "enum", + "variants": [ + { + "name": "UpgradeableEvent", + "type": "dojo::components::upgradeable::upgradeable::Event", + "kind": "nested" + } + ] + } + ], + "reads": [], + "writes": [], + "computed": [], + "name": "dojo_examples::actions::actions" + } + ], + "models": [ + { + "kind": "DojoModel", + "members": [ + { + "name": "player", + "type": "ContractAddress", + "key": true + }, + { + "name": "direction", + "type": "Direction", + "key": false + } + ], + "class_hash": "0x52659850f9939482810d9f6b468b91dc99e0b7fa42c2016cf12833ec06ce911", + "original_class_hash": "0x52659850f9939482810d9f6b468b91dc99e0b7fa42c2016cf12833ec06ce911", + "abi": [ + { + "type": "impl", + "name": "DojoModelImpl", + "interface_name": "dojo::model::IDojoModel" + }, + { + "type": "struct", + "name": "core::array::Span::", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::>", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::>" + } + ] + }, + { + "type": "struct", + "name": "dojo::database::introspect::Struct", + "members": [ + { + "name": "name", + "type": "core::felt252" + }, + { + "name": "attrs", + "type": "core::array::Span::" + }, + { + "name": "children", + "type": "core::array::Span::>" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::<(core::felt252, core::array::Span::)>", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::<(core::felt252, core::array::Span::)>" + } + ] + }, + { + "type": "struct", + "name": "dojo::database::introspect::Enum", + "members": [ + { + "name": "name", + "type": "core::felt252" + }, + { + "name": "attrs", + "type": "core::array::Span::" + }, + { + "name": "children", + "type": "core::array::Span::<(core::felt252, core::array::Span::)>" + } + ] + }, + { + "type": "enum", + "name": "dojo::database::introspect::Ty", + "variants": [ + { + "name": "Primitive", + "type": "core::felt252" + }, + { + "name": "Struct", + "type": "dojo::database::introspect::Struct" + }, + { + "name": "Enum", + "type": "dojo::database::introspect::Enum" + }, + { + "name": "Tuple", + "type": "core::array::Span::>" + }, + { + "name": "Array", + "type": "core::integer::u32" + } + ] + }, + { + "type": "interface", + "name": "dojo::model::IDojoModel", + "items": [ + { + "type": "function", + "name": "name", + "inputs": [], + "outputs": [ + { + "type": "core::felt252" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "unpacked_size", + "inputs": [], + "outputs": [ + { + "type": "core::integer::u32" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "packed_size", + "inputs": [], + "outputs": [ + { + "type": "core::integer::u32" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "layout", + "inputs": [], + "outputs": [ + { + "type": "core::array::Span::" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "schema", + "inputs": [], + "outputs": [ + { + "type": "dojo::database::introspect::Ty" + } + ], + "state_mutability": "view" + } + ] + }, + { + "type": "impl", + "name": "movedImpl", + "interface_name": "dojo_examples::actions::actions::Imoved" + }, + { + "type": "enum", + "name": "dojo_examples::models::Direction", + "variants": [ + { + "name": "None", + "type": "()" + }, + { + "name": "Left", + "type": "()" + }, + { + "name": "Right", + "type": "()" + }, + { + "name": "Up", + "type": "()" + }, + { + "name": "Down", + "type": "()" + } + ] + }, + { + "type": "struct", + "name": "dojo_examples::actions::actions::Moved", + "members": [ + { + "name": "player", + "type": "core::starknet::contract_address::ContractAddress" + }, + { + "name": "direction", + "type": "dojo_examples::models::Direction" + } + ] + }, + { + "type": "interface", + "name": "dojo_examples::actions::actions::Imoved", + "items": [ + { + "type": "function", + "name": "ensure_abi", + "inputs": [ + { + "name": "model", + "type": "dojo_examples::actions::actions::Moved" + } + ], + "outputs": [], + "state_mutability": "view" + } + ] + }, + { + "type": "event", + "name": "dojo_examples::actions::actions::moved::Event", + "kind": "enum", + "variants": [] + } + ], + "name": "dojo_examples::actions::actions::moved" + }, + { + "kind": "DojoModel", + "members": [ + { + "name": "identity", + "type": "ContractAddress", + "key": true + }, + { + "name": "emote", + "type": "Emote", + "key": false + } + ], + "class_hash": "0x6d5aef0819f5267c840c57d37ff774b4e185fc7da4a10e58cb9f575aa2ed1c", + "original_class_hash": "0x6d5aef0819f5267c840c57d37ff774b4e185fc7da4a10e58cb9f575aa2ed1c", + "abi": [ + { + "type": "impl", + "name": "DojoModelImpl", + "interface_name": "dojo::model::IDojoModel" + }, + { + "type": "struct", + "name": "core::array::Span::", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::>", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::>" + } + ] + }, + { + "type": "struct", + "name": "dojo::database::introspect::Struct", + "members": [ + { + "name": "name", + "type": "core::felt252" + }, + { + "name": "attrs", + "type": "core::array::Span::" + }, + { + "name": "children", + "type": "core::array::Span::>" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::<(core::felt252, core::array::Span::)>", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::<(core::felt252, core::array::Span::)>" + } + ] + }, + { + "type": "struct", + "name": "dojo::database::introspect::Enum", + "members": [ + { + "name": "name", + "type": "core::felt252" + }, + { + "name": "attrs", + "type": "core::array::Span::" + }, + { + "name": "children", + "type": "core::array::Span::<(core::felt252, core::array::Span::)>" + } + ] + }, + { + "type": "enum", + "name": "dojo::database::introspect::Ty", + "variants": [ + { + "name": "Primitive", + "type": "core::felt252" + }, + { + "name": "Struct", + "type": "dojo::database::introspect::Struct" + }, + { + "name": "Enum", + "type": "dojo::database::introspect::Enum" + }, + { + "name": "Tuple", + "type": "core::array::Span::>" + }, + { + "name": "Array", + "type": "core::integer::u32" + } + ] + }, + { + "type": "interface", + "name": "dojo::model::IDojoModel", + "items": [ + { + "type": "function", + "name": "name", + "inputs": [], + "outputs": [ + { + "type": "core::felt252" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "unpacked_size", + "inputs": [], + "outputs": [ + { + "type": "core::integer::u32" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "packed_size", + "inputs": [], + "outputs": [ + { + "type": "core::integer::u32" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "layout", + "inputs": [], + "outputs": [ + { + "type": "core::array::Span::" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "schema", + "inputs": [], + "outputs": [ + { + "type": "dojo::database::introspect::Ty" + } + ], + "state_mutability": "view" + } + ] + }, + { + "type": "impl", + "name": "emote_messageImpl", + "interface_name": "dojo_examples::models::Iemote_message" + }, + { + "type": "enum", + "name": "dojo_examples::models::Emote", + "variants": [ + { + "name": "None", + "type": "()" + }, + { + "name": "Happy", + "type": "()" + }, + { + "name": "Sad", + "type": "()" + }, + { + "name": "Angry", + "type": "()" + }, + { + "name": "Love", + "type": "()" + } + ] + }, + { + "type": "struct", + "name": "dojo_examples::models::EmoteMessage", + "members": [ + { + "name": "identity", + "type": "core::starknet::contract_address::ContractAddress" + }, + { + "name": "emote", + "type": "dojo_examples::models::Emote" + } + ] + }, + { + "type": "interface", + "name": "dojo_examples::models::Iemote_message", + "items": [ + { + "type": "function", + "name": "ensure_abi", + "inputs": [ + { + "name": "model", + "type": "dojo_examples::models::EmoteMessage" + } + ], + "outputs": [], + "state_mutability": "view" + } + ] + }, + { + "type": "event", + "name": "dojo_examples::models::emote_message::Event", + "kind": "enum", + "variants": [] + } + ], + "name": "dojo_examples::models::emote_message" + }, + { + "kind": "DojoModel", + "members": [ + { + "name": "player", + "type": "ContractAddress", + "key": true + }, + { + "name": "remaining", + "type": "u8", + "key": false + }, + { + "name": "last_direction", + "type": "Direction", + "key": false + } + ], + "class_hash": "0x511fbd833938f5c4b743eea1e67605a125d7ff60e8a09e8dc227ad2fb59ca54", + "original_class_hash": "0x511fbd833938f5c4b743eea1e67605a125d7ff60e8a09e8dc227ad2fb59ca54", + "abi": [ + { + "type": "impl", + "name": "DojoModelImpl", + "interface_name": "dojo::model::IDojoModel" + }, + { + "type": "struct", + "name": "core::array::Span::", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::>", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::>" + } + ] + }, + { + "type": "struct", + "name": "dojo::database::introspect::Struct", + "members": [ + { + "name": "name", + "type": "core::felt252" + }, + { + "name": "attrs", + "type": "core::array::Span::" + }, + { + "name": "children", + "type": "core::array::Span::>" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::<(core::felt252, core::array::Span::)>", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::<(core::felt252, core::array::Span::)>" + } + ] + }, + { + "type": "struct", + "name": "dojo::database::introspect::Enum", + "members": [ + { + "name": "name", + "type": "core::felt252" + }, + { + "name": "attrs", + "type": "core::array::Span::" + }, + { + "name": "children", + "type": "core::array::Span::<(core::felt252, core::array::Span::)>" + } + ] + }, + { + "type": "enum", + "name": "dojo::database::introspect::Ty", + "variants": [ + { + "name": "Primitive", + "type": "core::felt252" + }, + { + "name": "Struct", + "type": "dojo::database::introspect::Struct" + }, + { + "name": "Enum", + "type": "dojo::database::introspect::Enum" + }, + { + "name": "Tuple", + "type": "core::array::Span::>" + }, + { + "name": "Array", + "type": "core::integer::u32" + } + ] + }, + { + "type": "interface", + "name": "dojo::model::IDojoModel", + "items": [ + { + "type": "function", + "name": "name", + "inputs": [], + "outputs": [ + { + "type": "core::felt252" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "unpacked_size", + "inputs": [], + "outputs": [ + { + "type": "core::integer::u32" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "packed_size", + "inputs": [], + "outputs": [ + { + "type": "core::integer::u32" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "layout", + "inputs": [], + "outputs": [ + { + "type": "core::array::Span::" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "schema", + "inputs": [], + "outputs": [ + { + "type": "dojo::database::introspect::Ty" + } + ], + "state_mutability": "view" + } + ] + }, + { + "type": "impl", + "name": "movesImpl", + "interface_name": "dojo_examples::models::Imoves" + }, + { + "type": "enum", + "name": "dojo_examples::models::Direction", + "variants": [ + { + "name": "None", + "type": "()" + }, + { + "name": "Left", + "type": "()" + }, + { + "name": "Right", + "type": "()" + }, + { + "name": "Up", + "type": "()" + }, + { + "name": "Down", + "type": "()" + } + ] + }, + { + "type": "struct", + "name": "dojo_examples::models::Moves", + "members": [ + { + "name": "player", + "type": "core::starknet::contract_address::ContractAddress" + }, + { + "name": "remaining", + "type": "core::integer::u8" + }, + { + "name": "last_direction", + "type": "dojo_examples::models::Direction" + } + ] + }, + { + "type": "interface", + "name": "dojo_examples::models::Imoves", + "items": [ + { + "type": "function", + "name": "ensure_abi", + "inputs": [ + { + "name": "model", + "type": "dojo_examples::models::Moves" + } + ], + "outputs": [], + "state_mutability": "view" + } + ] + }, + { + "type": "event", + "name": "dojo_examples::models::moves::Event", + "kind": "enum", + "variants": [] + } + ], + "name": "dojo_examples::models::moves" + }, + { + "kind": "DojoModel", + "members": [ + { + "name": "player", + "type": "ContractAddress", + "key": true + }, + { + "name": "vec", + "type": "Vec2", + "key": false + } + ], + "class_hash": "0xb33ae053213ccb2a57967ffc4411901f3efab24781ca867adcd0b90f2fece5", + "original_class_hash": "0xb33ae053213ccb2a57967ffc4411901f3efab24781ca867adcd0b90f2fece5", + "abi": [ + { + "type": "impl", + "name": "DojoModelImpl", + "interface_name": "dojo::model::IDojoModel" + }, + { + "type": "struct", + "name": "core::array::Span::", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::>", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::>" + } + ] + }, + { + "type": "struct", + "name": "dojo::database::introspect::Struct", + "members": [ + { + "name": "name", + "type": "core::felt252" + }, + { + "name": "attrs", + "type": "core::array::Span::" + }, + { + "name": "children", + "type": "core::array::Span::>" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::<(core::felt252, core::array::Span::)>", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::<(core::felt252, core::array::Span::)>" + } + ] + }, + { + "type": "struct", + "name": "dojo::database::introspect::Enum", + "members": [ + { + "name": "name", + "type": "core::felt252" + }, + { + "name": "attrs", + "type": "core::array::Span::" + }, + { + "name": "children", + "type": "core::array::Span::<(core::felt252, core::array::Span::)>" + } + ] + }, + { + "type": "enum", + "name": "dojo::database::introspect::Ty", + "variants": [ + { + "name": "Primitive", + "type": "core::felt252" + }, + { + "name": "Struct", + "type": "dojo::database::introspect::Struct" + }, + { + "name": "Enum", + "type": "dojo::database::introspect::Enum" + }, + { + "name": "Tuple", + "type": "core::array::Span::>" + }, + { + "name": "Array", + "type": "core::integer::u32" + } + ] + }, + { + "type": "interface", + "name": "dojo::model::IDojoModel", + "items": [ + { + "type": "function", + "name": "name", + "inputs": [], + "outputs": [ + { + "type": "core::felt252" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "unpacked_size", + "inputs": [], + "outputs": [ + { + "type": "core::integer::u32" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "packed_size", + "inputs": [], + "outputs": [ + { + "type": "core::integer::u32" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "layout", + "inputs": [], + "outputs": [ + { + "type": "core::array::Span::" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "schema", + "inputs": [], + "outputs": [ + { + "type": "dojo::database::introspect::Ty" + } + ], + "state_mutability": "view" + } + ] + }, + { + "type": "impl", + "name": "positionImpl", + "interface_name": "dojo_examples::models::Iposition" + }, + { + "type": "struct", + "name": "dojo_examples::models::Vec2", + "members": [ + { + "name": "x", + "type": "core::integer::u32" + }, + { + "name": "y", + "type": "core::integer::u32" + } + ] + }, + { + "type": "struct", + "name": "dojo_examples::models::Position", + "members": [ + { + "name": "player", + "type": "core::starknet::contract_address::ContractAddress" + }, + { + "name": "vec", + "type": "dojo_examples::models::Vec2" + } + ] + }, + { + "type": "interface", + "name": "dojo_examples::models::Iposition", + "items": [ + { + "type": "function", + "name": "ensure_abi", + "inputs": [ + { + "name": "model", + "type": "dojo_examples::models::Position" + } + ], + "outputs": [], + "state_mutability": "view" + } + ] + }, + { + "type": "event", + "name": "dojo_examples::models::position::Event", + "kind": "enum", + "variants": [] + } + ], + "name": "dojo_examples::models::position" + } + ] +} \ No newline at end of file diff --git a/examples/spawn-and-move/manifests/dev/manifest.toml b/examples/spawn-and-move/manifests/dev/manifest.toml new file mode 100644 index 0000000000..156abd3709 --- /dev/null +++ b/examples/spawn-and-move/manifests/dev/manifest.toml @@ -0,0 +1,105 @@ +[world] +kind = "WorldContract" +class_hash = "0x799bc4e9da10bfb3dd88e6f223c9cfbf7745435cd14f5d69675ea448e578cd" +original_class_hash = "0x799bc4e9da10bfb3dd88e6f223c9cfbf7745435cd14f5d69675ea448e578cd" +abi = "abis/deployments/dojo_world_world.json" +address = "0x1385f25d20a724edc9c7b3bd9636c59af64cbaf9fcd12f33b3af96b2452f295" +transaction_hash = "0x6afefdcc49b3563a4f3657900ba71e9f9356861b15b942a73f2018f046a1048" +block_number = 3 +seed = "dojo_examples" +name = "dojo::world::world" + +[world.metadata] +profile_name = "dev" +rpc_url = "http://localhost:5050/" + +[base] +kind = "Class" +class_hash = "0x679177a2cb757694ac4f326d01052ff0963eac0bc2a17116a2b87badcdf6f76" +original_class_hash = "0x679177a2cb757694ac4f326d01052ff0963eac0bc2a17116a2b87badcdf6f76" +name = "dojo::base::base" + +[[contracts]] +kind = "DojoContract" +address = "0x3539c9b89b08095ba914653fb0f20e55d4b172a415beade611bc260b346d0f7" +class_hash = "0x471a1a36581d24f68ae6984c714daa75d424c0a6581872a65e645eb11a8e45" +original_class_hash = "0x471a1a36581d24f68ae6984c714daa75d424c0a6581872a65e645eb11a8e45" +base_class_hash = "0x679177a2cb757694ac4f326d01052ff0963eac0bc2a17116a2b87badcdf6f76" +abi = "abis/deployments/contracts/dojo_examples_actions_actions.json" +reads = [] +writes = [] +computed = [] +name = "dojo_examples::actions::actions" + +[[models]] +kind = "DojoModel" +class_hash = "0x52659850f9939482810d9f6b468b91dc99e0b7fa42c2016cf12833ec06ce911" +original_class_hash = "0x52659850f9939482810d9f6b468b91dc99e0b7fa42c2016cf12833ec06ce911" +abi = "abis/deployments/models/dojo_examples_actions_actions_moved.json" +name = "dojo_examples::actions::actions::moved" + +[[models.members]] +name = "player" +type = "ContractAddress" +key = true + +[[models.members]] +name = "direction" +type = "Direction" +key = false + +[[models]] +kind = "DojoModel" +class_hash = "0x6d5aef0819f5267c840c57d37ff774b4e185fc7da4a10e58cb9f575aa2ed1c" +original_class_hash = "0x6d5aef0819f5267c840c57d37ff774b4e185fc7da4a10e58cb9f575aa2ed1c" +abi = "abis/deployments/models/dojo_examples_models_emote_message.json" +name = "dojo_examples::models::emote_message" + +[[models.members]] +name = "identity" +type = "ContractAddress" +key = true + +[[models.members]] +name = "emote" +type = "Emote" +key = false + +[[models]] +kind = "DojoModel" +class_hash = "0x511fbd833938f5c4b743eea1e67605a125d7ff60e8a09e8dc227ad2fb59ca54" +original_class_hash = "0x511fbd833938f5c4b743eea1e67605a125d7ff60e8a09e8dc227ad2fb59ca54" +abi = "abis/deployments/models/dojo_examples_models_moves.json" +name = "dojo_examples::models::moves" + +[[models.members]] +name = "player" +type = "ContractAddress" +key = true + +[[models.members]] +name = "remaining" +type = "u8" +key = false + +[[models.members]] +name = "last_direction" +type = "Direction" +key = false + +[[models]] +kind = "DojoModel" +class_hash = "0xb33ae053213ccb2a57967ffc4411901f3efab24781ca867adcd0b90f2fece5" +original_class_hash = "0xb33ae053213ccb2a57967ffc4411901f3efab24781ca867adcd0b90f2fece5" +abi = "abis/deployments/models/dojo_examples_models_position.json" +name = "dojo_examples::models::position" + +[[models.members]] +name = "player" +type = "ContractAddress" +key = true + +[[models.members]] +name = "vec" +type = "Vec2" +key = false diff --git a/examples/spawn-and-move/manifests/overlays/contracts/actions.toml b/examples/spawn-and-move/manifests/overlays/contracts/actions.toml deleted file mode 100644 index 0eb4bb1a43..0000000000 --- a/examples/spawn-and-move/manifests/overlays/contracts/actions.toml +++ /dev/null @@ -1,2 +0,0 @@ -name = "dojo_examples::actions::actions" -reads = [ "Moves", "Position" ] diff --git a/examples/spawn-and-move/scripts/default_auth.sh b/examples/spawn-and-move/scripts/default_auth.sh new file mode 100755 index 0000000000..2fd5e34e42 --- /dev/null +++ b/examples/spawn-and-move/scripts/default_auth.sh @@ -0,0 +1,32 @@ +#!/bin/bash +# +# Execute this script being into `examples/spawn-and-move`. +# And then -> `./scripts/default_auth.sh` +# +set -euo pipefail +pushd $(dirname "$0")/.. + +export RPC_URL="http://localhost:5050"; + +export WORLD_ADDRESS=$(cat ./manifests/dev/manifest.json | jq -r '.world.address') + +export ACTIONS_ADDRESS=$(cat ./manifests/dev/manifest.json | jq -r '.contracts[] | select(.kind == "DojoContract" ).address') + +echo "---------------------------------------------------------------------------" +echo world : $WORLD_ADDRESS +echo " " +echo actions : $ACTIONS_ADDRESS +echo "---------------------------------------------------------------------------" + +# List of the models. +MODELS=("Position" "Moves") + +AUTH_MODELS="" +# Give permission to the action system to write on all the models. +for component in ${MODELS[@]}; do + AUTH_MODELS+="$component,$ACTIONS_ADDRESS " +done + +sozo auth grant writer $AUTH_MODELS + +echo "Default authorizations have been successfully set." diff --git a/examples/spawn-and-move/src/actions.cairo b/examples/spawn-and-move/src/actions.cairo index 2da040ff48..647109738b 100644 --- a/examples/spawn-and-move/src/actions.cairo +++ b/examples/spawn-and-move/src/actions.cairo @@ -21,13 +21,8 @@ mod actions { use dojo_examples::models::{Position, Moves, Direction, Vec2}; use dojo_examples::utils::next_position; - #[event] - #[derive(Drop, starknet::Event)] - enum Event { - Moved: Moved, - } - - #[derive(starknet::Event, Model, Copy, Drop, Serde)] + #[derive(Model, Copy, Drop, Serde)] + #[dojo::event] struct Moved { #[key] player: ContractAddress, @@ -67,13 +62,12 @@ mod actions { fn spawn(world: IWorldDispatcher) { let player = get_caller_address(); let position = get!(world, player, (Position)); - let moves = get!(world, player, (Moves)); - + set!( world, ( Moves { - player, remaining: moves.remaining + 1, last_direction: Direction::None(()) + player, remaining: 99, last_direction: Direction::None(()) }, Position { player, vec: Vec2 { x: position.vec.x + 10, y: position.vec.y + 10 } @@ -123,12 +117,13 @@ mod tests { // System calls actions_system.spawn(); + let initial_moves = get!(world, caller, Moves); actions_system.move(Direction::Right(())); let moves = get!(world, caller, Moves); let right_dir_felt: felt252 = Direction::Right(()).into(); - assert(moves.remaining == 0, 'moves is wrong'); + assert(moves.remaining == initial_moves.remaining - 1, 'moves is wrong'); assert(moves.last_direction.into() == right_dir_felt, 'last direction is wrong'); let new_position = get!(world, caller, Position); diff --git a/examples/spawn-and-move/src/models.cairo b/examples/spawn-and-move/src/models.cairo index c917342ece..f55e421d10 100644 --- a/examples/spawn-and-move/src/models.cairo +++ b/examples/spawn-and-move/src/models.cairo @@ -21,6 +21,22 @@ impl DirectionIntoFelt252 of Into { } } +#[derive(Serde, Copy, Drop, Introspect)] +enum Emote { + None, + Happy, + Sad, + Angry, + Love, +} + +#[derive(Model, Copy, Drop, Serde)] +struct EmoteMessage { + #[key] + identity: ContractAddress, + emote: Emote, +} + #[derive(Model, Copy, Drop, Serde)] struct Moves { #[key] diff --git a/monitoring/grafana/katana.json b/monitoring/grafana/katana.json new file mode 100644 index 0000000000..1e85a36de3 --- /dev/null +++ b/monitoring/grafana/katana.json @@ -0,0 +1,1171 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": { + "type": "grafana", + "uid": "-- Grafana --" + }, + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "target": { + "limit": 100, + "matchAny": false, + "tags": [], + "type": "dashboard" + }, + "type": "dashboard" + } + ] + }, + "editable": true, + "fiscalYearStartMonth": 0, + "graphTooltip": 0, + "id": 2, + "links": [], + "liveNow": false, + "panels": [ + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 0 + }, + "id": 122, + "panels": [], + "title": "Execution", + "type": "row" + }, + { + "datasource": { + "type": "prometheus", + "uid": "cdh4g2kxwgx6od" + }, + "description": "The total amount of L1 gas that has been processed", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "Total gas", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "smooth", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 1 + }, + "id": 121, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": false + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "cdh4g2kxwgx6od" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "katana_block_producer_l1_gas_processed_total{instance=\"localhost:9100\"}", + "fullMetaSearch": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "A", + "useBackend": false + } + ], + "title": "L1 Gas Processed", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "cdh4g2kxwgx6od" + }, + "description": "The total amount of Cairo steps that has been processed", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "Total steps", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "smooth", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 1 + }, + "id": 123, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": false + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "cdh4g2kxwgx6od" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "katana_block_producer_cairo_steps_processed_total{instance=\"localhost:9100\"}", + "fullMetaSearch": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "A", + "useBackend": false + } + ], + "title": "Cairo Steps Processed", + "type": "timeseries" + }, + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 9 + }, + "id": 108, + "panels": [], + "title": "RPC Server", + "type": "row" + }, + { + "datasource": { + "type": "prometheus", + "uid": "cdh4g2kxwgx6od" + }, + "description": "", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [ + { + "matcher": { + "id": "byName", + "options": "http" + }, + "properties": [ + { + "id": "displayName", + "value": "HTTP" + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "ws" + }, + "properties": [ + { + "id": "displayName", + "value": "WebSocket" + } + ] + } + ] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 10 + }, + "id": 109, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "10.1.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "cdh4g2kxwgx6od" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "sum(katana_rpc_server_connections_connections_opened{instance=~\"$instance\"} - katana_rpc_server_connections_connections_closed{instance=~\"$instance\"}) by (transport)", + "format": "time_series", + "fullMetaSearch": false, + "includeNullMetadata": true, + "legendFormat": "{{transport}}", + "range": true, + "refId": "A", + "useBackend": false + } + ], + "title": "Active Connections", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "cdh4g2kxwgx6od" + }, + "description": "", + "fieldConfig": { + "defaults": { + "custom": { + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "scaleDistribution": { + "type": "linear" + } + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 10 + }, + "id": 111, + "maxDataPoints": 25, + "options": { + "calculate": false, + "cellGap": 1, + "cellValues": { + "unit": "s" + }, + "color": { + "exponent": 0.2, + "fill": "dark-orange", + "min": 0, + "mode": "opacity", + "reverse": false, + "scale": "exponential", + "scheme": "Oranges", + "steps": 128 + }, + "exemplars": { + "color": "rgba(255,0,255,0.7)" + }, + "filterValues": { + "le": 1e-9 + }, + "legend": { + "show": true + }, + "rowsFrame": { + "layout": "auto", + "value": "Latency time" + }, + "tooltip": { + "mode": "single", + "showColorScale": false, + "yHistogram": false + }, + "yAxis": { + "axisLabel": "Quantile", + "axisPlacement": "left", + "reverse": false, + "unit": "percentunit" + } + }, + "pluginVersion": "10.4.1", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "cdh4g2kxwgx6od" + }, + "editorMode": "code", + "exemplar": false, + "expr": "avg(max_over_time(katana_rpc_server_connections_request_time_seconds{instance=~\"$instance\"}[$__rate_interval]) > 0) by (quantile)", + "format": "time_series", + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "A" + } + ], + "title": "Request Latency time", + "type": "heatmap" + }, + { + "datasource": { + "type": "prometheus", + "uid": "cdh4g2kxwgx6od" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "points", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "s" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 18 + }, + "id": 120, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "10.1.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "cdh4g2kxwgx6od" + }, + "editorMode": "code", + "expr": "max(max_over_time(katana_rpc_server_calls_time_seconds[$__rate_interval])) by (method) > 0", + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "A" + } + ], + "title": "Maximum call latency per method", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "cdh4g2kxwgx6od" + }, + "description": "", + "fieldConfig": { + "defaults": { + "custom": { + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "scaleDistribution": { + "type": "linear" + } + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 18 + }, + "id": 112, + "maxDataPoints": 25, + "options": { + "calculate": false, + "cellGap": 1, + "cellValues": { + "unit": "s" + }, + "color": { + "exponent": 0.2, + "fill": "dark-orange", + "min": 0, + "mode": "opacity", + "reverse": false, + "scale": "exponential", + "scheme": "Oranges", + "steps": 128 + }, + "exemplars": { + "color": "rgba(255,0,255,0.7)" + }, + "filterValues": { + "le": 1e-9 + }, + "legend": { + "show": true + }, + "rowsFrame": { + "layout": "auto", + "value": "Latency time" + }, + "tooltip": { + "mode": "single", + "showColorScale": false, + "yHistogram": false + }, + "yAxis": { + "axisLabel": "Quantile", + "axisPlacement": "left", + "reverse": false, + "unit": "percentunit" + } + }, + "pluginVersion": "10.4.1", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "cdh4g2kxwgx6od" + }, + "editorMode": "code", + "exemplar": false, + "expr": "avg(max_over_time(katana_rpc_server_calls_time_seconds{instance=~\"$instance\"}[$__rate_interval]) > 0) by (quantile)", + "format": "time_series", + "instant": false, + "legendFormat": "{{quantile}}", + "range": true, + "refId": "A" + } + ], + "title": "Call Latency time", + "type": "heatmap" + }, + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 26 + }, + "id": 97, + "panels": [], + "title": "Process", + "type": "row" + }, + { + "datasource": { + "type": "prometheus", + "uid": "cdh4g2kxwgx6od" + }, + "description": "100% = 1 core", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "percentunit" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 27 + }, + "id": 99, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "cdh4g2kxwgx6od" + }, + "editorMode": "builder", + "expr": "avg(rate(katana_process_cpu_seconds_total{instance=~\"$instance\"}[1m]))", + "instant": false, + "legendFormat": "Process", + "range": true, + "refId": "A" + } + ], + "title": "CPU", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "cdh4g2kxwgx6od" + }, + "description": "", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "decbytes" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 27 + }, + "id": 101, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "cdh4g2kxwgx6od" + }, + "editorMode": "code", + "expr": "katana_process_resident_memory_bytes{instance=~\"$instance\"}", + "instant": false, + "legendFormat": "Resident", + "range": true, + "refId": "A" + } + ], + "title": "Memory", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "cdh4g2kxwgx6od" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "decbytes" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 35 + }, + "id": 98, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "cdh4g2kxwgx6od" + }, + "editorMode": "builder", + "expr": "katana_jemalloc_active{instance=~\"$instance\"}", + "instant": false, + "legendFormat": "Active", + "range": true, + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "cdh4g2kxwgx6od" + }, + "editorMode": "builder", + "expr": "katana_jemalloc_allocated{instance=~\"$instance\"}", + "hide": false, + "instant": false, + "legendFormat": "Allocated", + "range": true, + "refId": "B" + }, + { + "datasource": { + "type": "prometheus", + "uid": "cdh4g2kxwgx6od" + }, + "editorMode": "builder", + "expr": "katana_jemalloc_mapped{instance=~\"$instance\"}", + "hide": false, + "instant": false, + "legendFormat": "Mapped", + "range": true, + "refId": "C" + }, + { + "datasource": { + "type": "prometheus", + "uid": "cdh4g2kxwgx6od" + }, + "editorMode": "builder", + "expr": "katana_jemalloc_metadata{instance=~\"$instance\"}", + "hide": false, + "instant": false, + "legendFormat": "Metadata", + "range": true, + "refId": "D" + }, + { + "datasource": { + "type": "prometheus", + "uid": "cdh4g2kxwgx6od" + }, + "editorMode": "builder", + "expr": "katana_jemalloc_resident{instance=~\"$instance\"}", + "hide": false, + "instant": false, + "legendFormat": "Resident", + "range": true, + "refId": "E" + }, + { + "datasource": { + "type": "prometheus", + "uid": "cdh4g2kxwgx6od" + }, + "editorMode": "builder", + "expr": "katana_jemalloc_retained{instance=~\"$instance\"}", + "hide": false, + "instant": false, + "legendFormat": "Retained", + "range": true, + "refId": "F" + } + ], + "title": "Jemalloc Memory", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "cdh4g2kxwgx6od" + }, + "description": "", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "none" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 35 + }, + "id": 100, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "cdh4g2kxwgx6od" + }, + "editorMode": "builder", + "expr": "katana_process_open_fds{instance=~\"$instance\"}", + "instant": false, + "legendFormat": "Open", + "range": true, + "refId": "A" + } + ], + "title": "File Descriptors", + "type": "timeseries" + } + ], + "refresh": "5s", + "revision": 1, + "schemaVersion": 39, + "tags": [], + "templating": { + "list": [ + { + "current": { + "selected": false, + "text": "localhost:9100", + "value": "localhost:9100" + }, + "datasource": { + "type": "prometheus", + "uid": "cdh4g2kxwgx6od" + }, + "definition": "query_result(up)", + "hide": 0, + "includeAll": false, + "multi": false, + "name": "instance", + "options": [], + "query": { + "query": "query_result(up)", + "refId": "PrometheusVariableQueryEditor-VariableQuery" + }, + "refresh": 1, + "regex": "/.*instance=\\\"([^\\\"]*).*/", + "skipUrlSync": false, + "sort": 0, + "type": "query" + } + ] + }, + "time": { + "from": "now-5m", + "to": "now" + }, + "timepicker": {}, + "timezone": "", + "title": "katana", + "uid": "2k8BXz24x", + "version": 6, + "weekStart": "" +} diff --git a/monitoring/prometheus/config.yml b/monitoring/prometheus/config.yml new file mode 100644 index 0000000000..4fedc3b0c6 --- /dev/null +++ b/monitoring/prometheus/config.yml @@ -0,0 +1,6 @@ +scrape_configs: + - job_name: katana + metrics_path: "/" + scrape_interval: 5s + static_configs: + - targets: ["localhost:9100"] diff --git a/rust-toolchain.toml b/rust-toolchain.toml index 639f4f17d9..624eb0ea63 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,2 +1,2 @@ [toolchain] -channel = "1.74.0" +channel = "1.76.0" diff --git a/scripts/clippy.sh b/scripts/clippy.sh index a3ec765c8c..dc726f10b9 100755 --- a/scripts/clippy.sh +++ b/scripts/clippy.sh @@ -1,5 +1,12 @@ #!/bin/bash +# Tells the shell to exit immediately if a command exits with a non-zero status +set -e +# Enables tracing of the commands as they are executed, showing the commands and their arguments +set -x +# Causes a pipeline to return a failure status if any command in the pipeline fails +set -o pipefail + run_clippy() { cargo clippy --all-targets "$@" -- -D warnings -D future-incompatible -D nonstandard-style -D rust-2018-idioms -D unused } diff --git a/scripts/teardown_test_artifacts.sh b/scripts/teardown_test_artifacts.sh new file mode 100644 index 0000000000..8dec13caf1 --- /dev/null +++ b/scripts/teardown_test_artifacts.sh @@ -0,0 +1,11 @@ +#!/bin/bash + +# When tests are run, the `build.rs` of `dojo-test-utils` is re-building the +# cairo artifacts ONLY if they don't exist. +# This script gives an easy way to remove those artifacts. + +rm -rf examples/spawn-and-move/target +rm -rf examples/spawn-and-move/manifests + +rm -rf crates/torii/types-test/target +rm -rf crates/torii/types-test/manifests