diff --git a/.circleci/config.yml b/.circleci/config.yml
index b928a91c725..f7e8477ebe7 100644
--- a/.circleci/config.yml
+++ b/.circleci/config.yml
@@ -117,7 +117,7 @@ jobs:
PGPASSWORD=codarules psql -h localhost -p 5432 -U admin -d archiver -a -f src/app/archive/create_schema.sql
- run:
name: Archive node unit tests
- command: ./scripts/skip_if_only_frontend_or_rfcs.sh bash -c 'eval `opam config env`; dune runtest src/app/archive'
+ command: ./scripts/skip_if_only_frontend_or_rfcs.sh bash -c 'eval `opam config env`; export PATH="$HOME/.cargo/bin:$PATH" && dune runtest src/app/archive'
environment:
DUNE_PROFILE: test_archive_processor
- run:
@@ -183,7 +183,7 @@ jobs:
no_output_timeout: 1h
- run:
name: Build Archive Process
- command: bash -c 'eval `opam config env` && LIBP2P_NIXLESS=1 make build_archive'
+ command: bash -c 'eval `opam config env` && export PATH="$HOME/.cargo/bin:$PATH" && LIBP2P_NIXLESS=1 make build_archive'
environment:
DUNE_PROFILE: testnet_postake_medium_curves
# NOTE: If we were using a machine executor we would be able to mount the sql file in
@@ -327,7 +327,7 @@ jobs:
- run:
name: Compare test signatures for consensus, nonconsensus code
- command: ./scripts/skip_if_only_frontend_or_rfcs.sh bash -c 'eval `opam config env` && ./scripts/compare_test_signatures.sh'
+ command: ./scripts/skip_if_only_frontend_or_rfcs.sh bash -c 'eval `opam config env` && export PATH="$HOME/.cargo/bin:$PATH" && ./scripts/compare_test_signatures.sh'
update-branch-protection:
docker:
- image: python:3
@@ -610,7 +610,7 @@ jobs:
# See https://bkase.dev/posts/ocaml-writer#fn-3 for rationale
- run:
name: Generate PV keys
- command: ./scripts/skip_if_only_frontend_or_rfcs.sh bash -c 'set -o pipefail; eval `opam config env` && LIBP2P_NIXLESS=1 make build_pv_keys 2>&1 | tee /tmp/artifacts/buildocaml.log'
+ command: ./scripts/skip_if_only_frontend_or_rfcs.sh bash -c 'set -o pipefail; eval `opam config env` && PATH="$HOME/.cargo/bin:$PATH" LIBP2P_NIXLESS=1 make build_pv_keys 2>&1 | tee /tmp/artifacts/buildocaml.log'
environment:
DUNE_PROFILE: testnet_postake_medium_curves
EXTRA_NIX_ARGS: --option sandbox false
@@ -621,8 +621,8 @@ jobs:
- run:
name: Rebuild for pvkey changes
command: |
- ./scripts/skip_if_only_frontend_or_rfcs.sh bash -c 'set -o pipefail; eval `opam config env` && make build 2>&1 | tee /tmp/artifacts/buildocaml2.log'
- ./scripts/skip_if_only_frontend_or_rfcs.sh bash -c 'eval `opam config env` && dune build src/app/generate_keypair/generate_keypair.exe'
+ ./scripts/skip_if_only_frontend_or_rfcs.sh bash -c 'set -o pipefail; eval `opam config env` && export PATH="$HOME/.cargo/bin:$PATH" && make build 2>&1 | tee /tmp/artifacts/buildocaml2.log'
+ ./scripts/skip_if_only_frontend_or_rfcs.sh bash -c 'eval `opam config env` && export PATH="$HOME/.cargo/bin:$PATH" && dune build src/app/generate_keypair/generate_keypair.exe'
environment:
DUNE_PROFILE: testnet_postake_medium_curves
EXTRA_NIX_ARGS: --option sandbox false
@@ -633,11 +633,11 @@ jobs:
- run:
name: Output compiled ledger and genesis proof
command: |
- ./scripts/skip_if_only_frontend_or_rfcs.sh bash -c 'eval `opam config env` && dune exec --profile=testnet_postake_medium_curves src/app/runtime_genesis_ledger/runtime_genesis_ledger.exe'
+ ./scripts/skip_if_only_frontend_or_rfcs.sh bash -c 'eval `opam config env` && export PATH="$HOME/.cargo/bin:$PATH" && dune exec --profile=testnet_postake_medium_curves src/app/runtime_genesis_ledger/runtime_genesis_ledger.exe'
- run:
name: Generate runtime ledger with 10k accounts
command: |
- ./scripts/skip_if_only_frontend_or_rfcs.sh bash -c 'eval `opam config env` && dune exec --profile=testnet_postake_medium_curves src/app/runtime_genesis_ledger/runtime_genesis_ledger.exe -- --config-file genesis_ledgers/phase_three/config.json'
+ ./scripts/skip_if_only_frontend_or_rfcs.sh bash -c 'eval `opam config env` && export PATH="$HOME/.cargo/bin:$PATH" && dune exec --profile=testnet_postake_medium_curves src/app/runtime_genesis_ledger/runtime_genesis_ledger.exe -- --config-file genesis_ledgers/phase_three/config.json'
no_output_timeout: 20m
- run:
name: Upload genesis data
@@ -1122,48 +1122,6 @@ jobs:
GO: /usr/lib/go/bin/go
- store_artifacts:
path: test_output/artifacts
- test--test_postake:
- resource_class: large
- docker:
- - image: codaprotocol/coda:toolchain-9924f4c56a40d65d36440e8f70b93720f29ba171
- steps:
-
- - run:
- name: Disable LFS checkout
- command: |
- git config --global filter.lfs.smudge "git-lfs smudge --skip %f"
- git config --global lfs.fetchexclude "*"
- - checkout
-
-
- - run:
- name: Update Submodules
- command: git submodule sync && git submodule update --init --recursive
- - run:
- name: Create opam cache signature file including a year/date stamp to ensure occasional rebuilds
- command: |
- cat scripts/setup-opam.sh > opam_ci_cache.sig
- cat src/opam.export >> opam_ci_cache.sig
- date +%Y-%m >> opam_ci_cache.sig
- - restore_cache:
- name: Restore cache - opam
- keys:
- - opam-linux-v1-{{ checksum "opam_ci_cache.sig" }}
- - run:
- name: Install opam dependencies - opam -- LIBP2P_NIXLESS=1 make setup-opam
- command: ./scripts/skip_if_only_frontend_or_rfcs.sh bash -c 'LIBP2P_NIXLESS=1 make setup-opam'
-
- - run:
- name: Build libp2p_helper
- command: LIBP2P_NIXLESS=1 GO=/usr/lib/go/bin/go make libp2p_helper
- - run:
- name: Running test -- test_postake:full-test
- command: ./scripts/skip_if_only_frontend_or_rfcs.sh bash -c 'source ~/.profile && ./scripts/test.py run --non-interactive --collect-artifacts --yes "test_postake:full-test"'
- - run:
- name: Running test -- test_postake:transaction-snark-profiler -k 2
- command: ./scripts/skip_if_only_frontend_or_rfcs.sh bash -c 'source ~/.profile && ./scripts/test.py run --non-interactive --collect-artifacts --yes "test_postake:transaction-snark-profiler -k 2"'
- - store_artifacts:
- path: test_output/artifacts
test--test_postake_catchup:
resource_class: large
docker:
@@ -1558,7 +1516,6 @@ workflows:
- build-artifacts--testnet_postake_medium_curves
- test-unit--dev
- test-unit--nonconsensus_medium_curves
- - test--test_postake
- test--test_postake_catchup
- test--test_postake_five_even_txns
- test--test_postake_snarkless
diff --git a/.circleci/config.yml.jinja b/.circleci/config.yml.jinja
index bf7e169797f..48230d3852a 100644
--- a/.circleci/config.yml.jinja
+++ b/.circleci/config.yml.jinja
@@ -110,7 +110,7 @@ jobs:
PGPASSWORD=codarules psql -h localhost -p 5432 -U admin -d archiver -a -f src/app/archive/create_schema.sql
- run:
name: Archive node unit tests
- command: ./scripts/skip_if_only_frontend_or_rfcs.sh bash -c 'eval `opam config env`; dune runtest src/app/archive'
+ command: ./scripts/skip_if_only_frontend_or_rfcs.sh bash -c 'eval `opam config env`; export PATH="$HOME/.cargo/bin:$PATH" && dune runtest src/app/archive'
environment:
DUNE_PROFILE: test_archive_processor
- run:
@@ -152,7 +152,7 @@ jobs:
no_output_timeout: 1h
- run:
name: Build Archive Process
- command: bash -c 'eval `opam config env` && LIBP2P_NIXLESS=1 make build_archive'
+ command: bash -c 'eval `opam config env` && export PATH="$HOME/.cargo/bin:$PATH" && LIBP2P_NIXLESS=1 make build_archive'
environment:
DUNE_PROFILE: testnet_postake_medium_curves
# NOTE: If we were using a machine executor we would be able to mount the sql file in
@@ -224,7 +224,7 @@ jobs:
{{ opam_init_linux }}
- run:
name: Compare test signatures for consensus, nonconsensus code
- command: ./scripts/skip_if_only_frontend_or_rfcs.sh bash -c 'eval `opam config env` && ./scripts/compare_test_signatures.sh'
+ command: ./scripts/skip_if_only_frontend_or_rfcs.sh bash -c 'eval `opam config env` && export PATH="$HOME/.cargo/bin:$PATH" && ./scripts/compare_test_signatures.sh'
update-branch-protection:
docker:
- image: python:3
@@ -440,7 +440,7 @@ jobs:
# See https://bkase.dev/posts/ocaml-writer#fn-3 for rationale
- run:
name: Generate PV keys
- command: ./scripts/skip_if_only_frontend_or_rfcs.sh bash -c 'set -o pipefail; eval `opam config env` && LIBP2P_NIXLESS=1 make build_pv_keys 2>&1 | tee /tmp/artifacts/buildocaml.log'
+ command: ./scripts/skip_if_only_frontend_or_rfcs.sh bash -c 'set -o pipefail; eval `opam config env` && PATH="$HOME/.cargo/bin:$PATH" LIBP2P_NIXLESS=1 make build_pv_keys 2>&1 | tee /tmp/artifacts/buildocaml.log'
environment:
DUNE_PROFILE: {{profile}}
EXTRA_NIX_ARGS: --option sandbox false
@@ -451,8 +451,8 @@ jobs:
- run:
name: Rebuild for pvkey changes
command: |
- ./scripts/skip_if_only_frontend_or_rfcs.sh bash -c 'set -o pipefail; eval `opam config env` && make build 2>&1 | tee /tmp/artifacts/buildocaml2.log'
- ./scripts/skip_if_only_frontend_or_rfcs.sh bash -c 'eval `opam config env` && dune build src/app/generate_keypair/generate_keypair.exe'
+ ./scripts/skip_if_only_frontend_or_rfcs.sh bash -c 'set -o pipefail; eval `opam config env` && export PATH="$HOME/.cargo/bin:$PATH" && make build 2>&1 | tee /tmp/artifacts/buildocaml2.log'
+ ./scripts/skip_if_only_frontend_or_rfcs.sh bash -c 'eval `opam config env` && export PATH="$HOME/.cargo/bin:$PATH" && dune build src/app/generate_keypair/generate_keypair.exe'
environment:
DUNE_PROFILE: {{profile}}
EXTRA_NIX_ARGS: --option sandbox false
@@ -463,11 +463,11 @@ jobs:
- run:
name: Output compiled ledger and genesis proof
command: |
- ./scripts/skip_if_only_frontend_or_rfcs.sh bash -c 'eval `opam config env` && dune exec --profile={{profile}} src/app/runtime_genesis_ledger/runtime_genesis_ledger.exe'
+ ./scripts/skip_if_only_frontend_or_rfcs.sh bash -c 'eval `opam config env` && export PATH="$HOME/.cargo/bin:$PATH" && dune exec --profile={{profile}} src/app/runtime_genesis_ledger/runtime_genesis_ledger.exe'
- run:
name: Generate runtime ledger with 10k accounts
command: |
- ./scripts/skip_if_only_frontend_or_rfcs.sh bash -c 'eval `opam config env` && dune exec --profile={{profile}} src/app/runtime_genesis_ledger/runtime_genesis_ledger.exe -- --config-file genesis_ledgers/phase_three/config.json'
+ ./scripts/skip_if_only_frontend_or_rfcs.sh bash -c 'eval `opam config env` && export PATH="$HOME/.cargo/bin:$PATH" && dune exec --profile={{profile}} src/app/runtime_genesis_ledger/runtime_genesis_ledger.exe -- --config-file genesis_ledgers/phase_three/config.json'
no_output_timeout: 20m
- run:
name: Upload genesis data
diff --git a/.mergify.yml b/.mergify.yml
index 19f36b663be..606ab2dcbf1 100644
--- a/.mergify.yml
+++ b/.mergify.yml
@@ -10,7 +10,6 @@ pull_request_rules:
- "status-success=ci/circleci: test--dev--coda-bootstrap-test"
- "status-success=ci/circleci: test--dev--coda-delegation-test"
- "status-success=ci/circleci: test--dev--coda-shared-state-test"
- - "status-success=ci/circleci: test--test_postake"
- "status-success=ci/circleci: test--test_postake_five_even_txns"
- "status-success=ci/circleci: test--test_postake_snarkless"
- "status-success=ci/circleci: test--test_postake_split"
@@ -35,7 +34,6 @@ pull_request_rules:
- "status-success=ci/circleci: test--dev--coda-bootstrap-test"
- "status-success=ci/circleci: test--dev--coda-delegation-test"
- "status-success=ci/circleci: test--dev--coda-shared-state-test"
- - "status-success=ci/circleci: test--test_postake"
- "status-success=ci/circleci: test--test_postake_five_even_txns"
- "status-success=ci/circleci: test--test_postake_snarkless"
- "status-success=ci/circleci: test--test_postake_split"
diff --git a/CODEOWNERS b/CODEOWNERS
index 313025af160..9ee805f76d4 100644
--- a/CODEOWNERS
+++ b/CODEOWNERS
@@ -34,7 +34,6 @@
/src/lib/crypto_params/ @CodaProtocol/crypto-eng-reviewers
/src/lib/dummy_values/ @CodaProtocol/crypto-eng-reviewers
/src/lib/hash_prefixes/ @CodaProtocol/crypto-eng-reviewers
-/src/lib/keys_lib/ @CodaProtocol/crypto-eng-reviewers
/src/lib/non_zero_curve_point/ @CodaProtocol/crypto-eng-reviewers
/src/lib/outside_hash_image/ @CodaProtocol/crypto-eng-reviewers
/src/lib/pokolog/ @CodaProtocol/crypto-eng-reviewers
diff --git a/buildkite/src/Command/WithCargo.dhall b/buildkite/src/Command/WithCargo.dhall
new file mode 100644
index 00000000000..3476e4515c4
--- /dev/null
+++ b/buildkite/src/Command/WithCargo.dhall
@@ -0,0 +1,5 @@
+let withCargo
+ : Text -> Text
+ = \(str : Text) -> "export PATH=\"\$HOME/.cargo/bin:\$PATH\" && " ++ str
+
+in { withCargo = withCargo }
diff --git a/buildkite/src/Jobs/ArchiveNode.dhall b/buildkite/src/Jobs/ArchiveNode.dhall
index df178289848..890adc9a1ea 100644
--- a/buildkite/src/Jobs/ArchiveNode.dhall
+++ b/buildkite/src/Jobs/ArchiveNode.dhall
@@ -5,6 +5,7 @@ let Pipeline = ../Pipeline/Dsl.dhall
let JobSpec = ../Pipeline/JobSpec.dhall
let Command = ../Command/Base.dhall
let OpamInit = ../Command/OpamInit.dhall
+let WithCargo = ../Command/WithCargo.dhall
let Docker = ../Command/Docker/Type.dhall
let Size = ../Command/Size.dhall
in
@@ -40,7 +41,7 @@ Pipeline.build
(Prelude.Text.concatSep " && "
[ "bash buildkite/scripts/setup-database-for-archive-node.sh ${user} ${password} ${db}"
, "PGPASSWORD=${password} psql -h localhost -p 5432 -U ${user} -d ${db} -a -f src/app/archive/create_schema.sql"
- , "dune runtest src/app/archive"
+ , WithCargo.withCargo "dune runtest src/app/archive"
, "PGPASSWORD=codarules psql -h localhost -p 5432 -U admin -d archiver -a -f src/app/archive/drop_tables.sql"
, "PGPASSWORD=${password} psql -h localhost -p 5432 -U ${user} -d ${db} -a -f src/app/archive/create_schema.sql"
, "LIBP2P_NIXLESS=1 GO=/usr/lib/go/bin/go make libp2p_helper"
diff --git a/buildkite/src/Jobs/CompareSignatures.dhall b/buildkite/src/Jobs/CompareSignatures.dhall
index 62928547690..8f3e8edd9c0 100644
--- a/buildkite/src/Jobs/CompareSignatures.dhall
+++ b/buildkite/src/Jobs/CompareSignatures.dhall
@@ -4,6 +4,7 @@ let Cmd = ../Lib/Cmds.dhall
let Pipeline = ../Pipeline/Dsl.dhall
let Command = ../Command/Base.dhall
let OpamInit = ../Command/OpamInit.dhall
+let WithCargo = ../Command/WithCargo.dhall
let Docker = ../Command/Docker/Type.dhall
let Size = ../Command/Size.dhall
let JobSpec = ../Pipeline/JobSpec.dhall
@@ -22,7 +23,7 @@ Pipeline.build
, steps =
[ Command.build
Command.Config::
- { commands = OpamInit.andThenRunInDocker ([] : List Text) "./scripts/compare_test_signatures.sh"
+ { commands = OpamInit.andThenRunInDocker ([] : List Text) (WithCargo.withCargo "./scripts/compare_test_signatures.sh")
, label = "Compare test signatures"
, key = "compare-test-signatures"
, target = Size.Large
diff --git a/dockerfiles/Dockerfile-coda-demo b/dockerfiles/Dockerfile-coda-demo
index 7fdd34b74bd..6d172f42d2b 100644
--- a/dockerfiles/Dockerfile-coda-demo
+++ b/dockerfiles/Dockerfile-coda-demo
@@ -29,13 +29,13 @@ RUN echo "deb [trusted=yes] http://packages.o1test.net $deb_repo main" > /etc/ap
&& apt-get install --force-yes coda-testnet-postake-medium-curves=$coda_version -y \
&& mkdir /root/keys \
&& chmod go-rwx /root/keys \
- && echo '{"box_primitive":"xsalsa20poly1305","pw_primitive":"argon2i","nonce":"7S1YA5PinXhnLgLJ3xemVnVPWdJdhKZ9RSNQbns","pwsalt":"AzDoECCYyJL8KuoB2vrsVc9Wg3xJ","pwdiff":[134217728,6],"ciphertext":"5UQuiQVbXPmR63ikri792dWR6Dz5dYZm8dLzwDyqWovdP5CzrLY6Fjw3QTHXA9J3PDkPZpvhrQfGkgU81kr9184dfoJDhn5EXxJMCAM44SZdmBYVszEQaSQnyy4BwsbRXmfjBMSW9ooGu2a5dFi5KHX5na6fr62VUB"}' > /root/keys/demo-block-producer \
+ && echo '{"box_primitive":"xsalsa20poly1305","pw_primitive":"argon2i","nonce":"8jGuTAxw3zxtWasVqcD1H6rEojHLS1yJmG3aHHd","pwsalt":"AiUCrMJ6243h3TBmZ2rqt3Voim1Y","pwdiff":[134217728,6],"ciphertext":"DbAy736GqEKWe9NQWT4yaejiZUo9dJ6rsK7cpS43APuEf5AH1Qw6xb1s35z8D2akyLJBrUr6m"}' > /root/keys/demo-block-producer \
&& chmod go-rwx /root/keys/demo-block-producer \
&& mkdir -p ~/.coda-config/wallets/store \
- && echo '4vsRCVMNTrCx4NpN6kKTkFKLcFN4vXUP5RB9PqSZe1qsyDs4AW5XeNgAf16WUPRBCakaPiXcxjp6JUpGNQ6fdU977x5LntvxrSg11xrmK6ZDaGSMEGj12dkeEpyKcEpkzcKwYWZ2Yf2vpwQP' > ~/.coda-config/wallets/store/4vsRCVMNTrCx4NpN6kKTkFKLcFN4vXUP5RB9PqSZe1qsyDs4AW5XeNgAf16WUPRBCakaPiXcxjp6JUpGNQ6fdU977x5LntvxrSg11xrmK6ZDaGSMEGj12dkeEpyKcEpkzcKwYWZ2Yf2vpwQP.pub \
- && cp /root/keys/demo-block-producer ~/.coda-config/wallets/store/4vsRCVMNTrCx4NpN6kKTkFKLcFN4vXUP5RB9PqSZe1qsyDs4AW5XeNgAf16WUPRBCakaPiXcxjp6JUpGNQ6fdU977x5LntvxrSg11xrmK6ZDaGSMEGj12dkeEpyKcEpkzcKwYWZ2Yf2vpwQP \
+ && echo 'B62qrPN5Y5yq8kGE3FbVKbGTdTAJNdtNtB5sNVpxyRwWGcDEhpMzc8g' > ~/.coda-config/wallets/store/B62qrPN5Y5yq8kGE3FbVKbGTdTAJNdtNtB5sNVpxyRwWGcDEhpMzc8g.pub \
+ && cp /root/keys/demo-block-producer ~/.coda-config/wallets/store/B62qrPN5Y5yq8kGE3FbVKbGTdTAJNdtNtB5sNVpxyRwWGcDEhpMzc8g \
&& rm /var/lib/coda/genesis* \
- && echo '{"ledger":{"accounts":[{"pk":"4vsRCVMNTrCx4NpN6kKTkFKLcFN4vXUP5RB9PqSZe1qsyDs4AW5XeNgAf16WUPRBCakaPiXcxjp6JUpGNQ6fdU977x5LntvxrSg11xrmK6ZDaGSMEGj12dkeEpyKcEpkzcKwYWZ2Yf2vpwQP","balance":"66000","sk":null,"delegate":null}]}}' > /config.json \
+ && echo '{"ledger":{"accounts":[{"pk":"B62qrPN5Y5yq8kGE3FbVKbGTdTAJNdtNtB5sNVpxyRwWGcDEhpMzc8g","balance":"66000","sk":null,"delegate":null}]}}' > /config.json \
&& coda-create-genesis --genesis-dir /var/lib/coda --config-file /config.json
diff --git a/docs/demo.md b/docs/demo.md
index 6118817987f..40a0ea974c9 100644
--- a/docs/demo.md
+++ b/docs/demo.md
@@ -5,10 +5,10 @@ If all you need is a running daemon and some blocks, the
configuration as the testnet, but instead of the community participants ledger
it uses a simple ledger with a single demo account.
-The public key of the demo account is `4vsRCVMNTrCx4NpN6kKTkFKLcFN4vXUP5RB9PqSZe1qsyDs4AW5XeNgAf16WUPRBCakaPiXcxjp6JUpGNQ6fdU977x5LntvxrSg11xrmK6ZDaGSMEGj12dkeEpyKcEpkzcKwYWZ2Yf2vpwQP`, with the following private key file (the password is the empty string):
+The public key of the demo account is `B62qrPN5Y5yq8kGE3FbVKbGTdTAJNdtNtB5sNVpxyRwWGcDEhpMzc8g`, with the following private key file (the password is the empty string):
```
-{"box_primitive":"xsalsa20poly1305","pw_primitive":"argon2i","nonce":"7S1YA5PinXhnLgLJ3xemVnVPWdJdhKZ9RSNQbns","pwsalt":"AzDoECCYyJL8KuoB2vrsVc9Wg3xJ","pwdiff":[134217728,6],"ciphertext":"5UQuiQVbXPmR63ikri792dWR6Dz5dYZm8dLzwDyqWovdP5CzrLY6Fjw3QTHXA9J3PDkPZpvhrQfGkgU81kr9184dfoJDhn5EXxJMCAM44SZdmBYVszEQaSQnyy4BwsbRXmfjBMSW9ooGu2a5dFi5KHX5na6fr62VUB"}
+{"box_primitive":"xsalsa20poly1305","pw_primitive":"argon2i","nonce":"8jGuTAxw3zxtWasVqcD1H6rEojHLS1yJmG3aHHd","pwsalt":"AiUCrMJ6243h3TBmZ2rqt3Voim1Y","pwdiff":[134217728,6],"ciphertext":"DbAy736GqEKWe9NQWT4yaejiZUo9dJ6rsK7cpS43APuEf5AH1Qw6xb1s35z8D2akyLJBrUr6m"}
```
This account has 100% of the stake.
diff --git a/frontend/client_sdk/package.json b/frontend/client_sdk/package.json
index 49279b6b256..78577258b37 100644
--- a/frontend/client_sdk/package.json
+++ b/frontend/client_sdk/package.json
@@ -6,7 +6,7 @@
"build": "bsb -make-world && tsc src/SDKWrapper.ts -d",
"start": "bsb -make-world -w",
"clean": "bsb -clean-world",
- "make-jsoo": "make -C ../.. client_sdk && cp ../../_build/default/src/app/client_sdk/client_sdk.bc.js src",
+ "make-jsoo": "make -C ../.. client_sdk && cp ../../_build/default/src/app/client_sdk/client_sdk.bc.js src && chmod 0666 src/client_sdk.bc.js",
"prepublishOnly": "yarn make-jsoo && yarn build && yarn test",
"test": "tsc test/Test.ts && node test/Test.js"
},
diff --git a/frontend/wallet/src/render/views/sidebar/UnlockModal.re b/frontend/wallet/src/render/views/sidebar/UnlockModal.re
index a99abecc3e7..c005f3111a3 100644
--- a/frontend/wallet/src/render/views/sidebar/UnlockModal.re
+++ b/frontend/wallet/src/render/views/sidebar/UnlockModal.re
@@ -82,7 +82,7 @@ let make = (~account, ~onClose, ~onSuccess) => {
{React.string(" ")}
{React.string(". ")}
- {if (PublicKey.toString(account) == "4vsRCVMNTrCx4NpN6kKTkFKLcFN4vXUP5RB9PqSZe1qsyDs4AW5XeNgAf16WUPRBCakaPiXcxjp6JUpGNQ6fdU977x5LntvxrSg11xrmK6ZDaGSMEGj12dkeEpyKcEpkzcKwYWZ2Yf2vpwQP") {
+ {if (PublicKey.toString(account) == "B62qrPN5Y5yq8kGE3FbVKbGTdTAJNdtNtB5sNVpxyRwWGcDEhpMzc8g") {
@@ -956,6 +957,9 @@ let ensure_testnet_id_still_good _ = Deferred.unit
[%%endif]
let snark_hashes =
+ let module Hashes = struct
+ type t = string list [@@deriving to_yojson]
+ end in
let open Command.Let_syntax in
Command.basic ~summary:"List hashes of proving and verification keys"
[%map_open
@@ -965,8 +969,8 @@ let snark_hashes =
if json then
print
(Yojson.Safe.to_string
- (Snark_keys.key_hashes_to_yojson Snark_keys.key_hashes))
- else List.iter Snark_keys.key_hashes ~f:print]
+ (Hashes.to_yojson Precomputed_values.key_hashes))
+ else List.iter Precomputed_values.key_hashes ~f:print]
let internal_commands =
[ (Snark_worker.Intf.command_name, Snark_worker.command)
@@ -1032,7 +1036,6 @@ let coda_commands logger =
; ("advanced", Client.advanced)
; ("internal", Command.group ~summary:"Internal commands" internal_commands)
; (Parallel.worker_command_name, Parallel.worker_command)
- ; (Snark_flame_graphs.name, Snark_flame_graphs.command)
; ("transaction-snark-profiler", Transaction_snark_profiler.command) ]
[%%if
@@ -1067,7 +1070,6 @@ let coda_commands logger =
; (module Coda_change_snark_worker_test)
; (module Full_test)
; (module Transaction_snark_profiler)
- ; (module Snark_flame_graphs)
; (module Coda_archive_node_test)
; (module Coda_archive_processor_test) ]
: (module Integration_test) list )
diff --git a/src/app/cli/src/init/client.ml b/src/app/cli/src/init/client.ml
index 60499b514df..f22ba6481d9 100644
--- a/src/app/cli/src/init/client.ml
+++ b/src/app/cli/src/init/client.ml
@@ -686,7 +686,7 @@ let constraint_system_digests =
(Command.Param.return (fun () ->
let all =
Transaction_snark.constraint_system_digests ()
- @ Blockchain_snark.Blockchain_transition.constraint_system_digests
+ @ Blockchain_snark.Blockchain_snark_state.constraint_system_digests
()
in
let all =
diff --git a/src/app/cli/src/init/snark_flame_graphs.ml b/src/app/cli/src/init/snark_flame_graphs.ml
deleted file mode 100644
index a50d17d0612..00000000000
--- a/src/app/cli/src/init/snark_flame_graphs.ml
+++ /dev/null
@@ -1,34 +0,0 @@
-open Core
-open Snark_params
-
-let name = "snark-flame-graphs"
-
-let main () =
- let open Blockchain_snark.Blockchain_transition in
- let module M = Make (Transaction_snark.Verification.Make (struct
- let keys = Transaction_snark.Keys.Verification.dummy
- end)) in
- let module W = M.Wrap_base (struct
- let verification_key = Keys.Verification.dummy.step
- end) in
- let cwd = Sys.getcwd () in
- let module L_Tick = Snarky_log.Constraints (Snark_params.Tick) in
- let module L_Tock = Snarky_log.Constraints (Snark_params.Tock) in
- let logger = Logger.create () in
- let log main typ = Snarky.Checked.(exists typ >>= main) in
- let logs =
- [ ( "step"
- , L_Tick.log
- (log
- (M.Step_base.main ~logger ~proof_level:Full
- ~constraint_constants:
- Genesis_constants.Constraint_constants.compiled)
- Tick.Field.typ) )
- ; ("wrap", L_Tock.log (log W.main Crypto_params.Wrap_input.typ)) ]
- in
- List.iter logs ~f:(fun (name, log) ->
- Snarky_log.to_file (cwd ^/ name ^ ".flame-graph") log )
-
-let command =
- let open Command.Let_syntax in
- Command.basic ~summary:"transaction snark profiler" (return main)
diff --git a/src/app/cli/src/init/transaction_snark_profiler.ml b/src/app/cli/src/init/transaction_snark_profiler.ml
index 64c8fa5209e..94fae28fa10 100644
--- a/src/app/cli/src/init/transaction_snark_profiler.ml
+++ b/src/app/cli/src/init/transaction_snark_profiler.ml
@@ -145,7 +145,7 @@ let pending_coinbase_stack_target (t : Transaction.t) stack =
(* This gives the "wall-clock time" to snarkify the given list of transactions, assuming
unbounded parallelism. *)
let profile (module T : Transaction_snark.S) sparse_ledger0
- (transitions : Transaction.t list) preeval =
+ (transitions : Transaction.t list) _ =
let constraint_constants = Genesis_constants.Constraint_constants.compiled in
let txn_global_slot = Lazy.force curr_global_slot in
let (base_proof_time, _, _), base_proofs =
@@ -167,8 +167,7 @@ let profile (module T : Transaction_snark.S) sparse_ledger0
in
let span, proof =
time (fun () ->
- T.of_transaction ?preeval ~constraint_constants
- ~sok_digest:Sok_message.Digest.default
+ T.of_transaction ~sok_digest:Sok_message.Digest.default
~source:(Sparse_ledger.merkle_root sparse_ledger)
~target:(Sparse_ledger.merkle_root sparse_ledger')
~init_stack:coinbase_stack_source ~next_available_token_before
@@ -311,10 +310,7 @@ let main num_transactions repeats preeval () =
Snarky.Libsnark.set_no_profiling false ;
Snarky.Libsnark.set_printing_off () ;
Test_util.with_randomness 123456789 (fun () ->
- let keys = Transaction_snark.Keys.create () in
- let module T = Transaction_snark.Make (struct
- let keys = keys
- end) in
+ let module T = Transaction_snark.Make () in
run (profile (module T)) num_transactions repeats preeval )
let dry num_transactions repeats preeval () =
diff --git a/src/app/cli/src/tests/coda_processes.ml b/src/app/cli/src/tests/coda_processes.ml
index abbe980c203..dd2c6057099 100644
--- a/src/app/cli/src/tests/coda_processes.ml
+++ b/src/app/cli/src/tests/coda_processes.ml
@@ -92,7 +92,7 @@ let local_configs ?block_production_interval
in
configs
-let stabalize_and_start_or_timeout ?(timeout_ms = 20000.) nodes =
+let stabalize_and_start_or_timeout ?(timeout_ms = 60000.) nodes =
let ready () =
let check_ready node =
let%map peers = Coda_process.peers_exn node in
diff --git a/src/app/cli/src/tests/full_test.ml b/src/app/cli/src/tests/full_test.ml
index f40b339efe3..9e0e346213b 100644
--- a/src/app/cli/src/tests/full_test.ml
+++ b/src/app/cli/src/tests/full_test.ml
@@ -38,7 +38,7 @@ let with_check = false
[%%endif]
[%%if
-curve_size = 753]
+curve_size = 255]
let medium_curves = true
diff --git a/src/app/client_sdk/string_sign.ml b/src/app/client_sdk/string_sign.ml
index 3de1cf83639..55d40fed3b0 100644
--- a/src/app/client_sdk/string_sign.ml
+++ b/src/app/client_sdk/string_sign.ml
@@ -63,6 +63,7 @@ module Message = struct
; string_bits t ]
|> Array.of_list |> Blake2.bits_to_string |> Blake2.digest_string
|> Blake2.to_raw_string |> Blake2.string_to_bits |> Array.to_list
+ |> Base.(Fn.flip List.take (Int.min 256 (Tock.Field.size_in_bits - 1)))
|> Tock.Field.project
let hash t ~public_key ~r =
diff --git a/src/app/client_sdk/tests/test_signatures.js b/src/app/client_sdk/tests/test_signatures.js
index dbe73475ea7..60d85f7ee1f 100644
--- a/src/app/client_sdk/tests/test_signatures.js
+++ b/src/app/client_sdk/tests/test_signatures.js
@@ -2,16 +2,16 @@ var coda = require("../../../../_build/default/src/app/client_sdk/client_sdk.bc.
var keypair = {
privateKey:
- "6BnSDyt3FKhJSt5oDk1HHeM5J8uKSnp7eaSYndj53y7g7oYzUEhHFrkpk6po4XfNFyjtoJK4ovVHvmCgdUqXVEfTXoAC1CNpaGLAKtu7ah9i4dTi3FtcoKpZhtiTGrRQkEN6Q95cb39Kp",
+ "EKFdTXQKPsEi2JUSE3JkmKtKFu8uDcgc5MmR5zj6nz5FUWPVuK6c",
publicKey:
- "4vsRCVnc5xmYJhaVbUgkg6po6nR3Mu7KEFunP3uQL67qZmPNnJKev57TRvMfuJ15XDP8MjaLSh7THG7CpTiTkfgRcQAKGmFo1XGMStCucmWAxBUiXjycDbx7hbVCqkDYiezM8Lvr1NMdTEGU",
+ "B62qkef7po74VEvJYcLYsdZ83FuKidgNZ8Xiaitzo8gKJXaxLwxgG7T",
};
var receiver =
- "4vsRCVHzeYYbneMkHR3u445f8zYwo6nhx3UHKZQH7B2txTV5Shz66Ds9PdxoRKCiALWtuwPQDwpm2Kj22QPcZpKCLr6rnHmUMztKpWxL9meCPQcTkKhmK5HyM4Y9dMnTKrEjD1MX71kLTUaP";
+ "B62qnekV6LVbEttV7j3cxJmjSbxDWuXa5h3KeVEXHPGKTzthQaBufrY";
var newDelegate =
- "4vsRCVQNkGihARy4Jg9FsJ6NFtnwDsRnTqi2gQnPAoCNUoyLveY6FEnicGMmwEumPx3GjLxAb5fAivVSLnYRPPMfb5HdkhLdjHunjgqp6g7gYi8cWy4avdmHMRomaKkWyWeWn91w7baaFnUk";
+ "B62qoW9n8n54FqHV8dPp7eCtpAZS1Jw9zsK7AHHiYmUzi6Wvms8reqt";
var payments = [
{
diff --git a/src/app/client_sdk/tests/test_signatures.ml b/src/app/client_sdk/tests/test_signatures.ml
index 54cd703eefe..98bcc8ac61e 100644
--- a/src/app/client_sdk/tests/test_signatures.ml
+++ b/src/app/client_sdk/tests/test_signatures.ml
@@ -27,13 +27,13 @@ open Signature_lib
let signer_pk =
Public_key.Compressed.of_base58_check_exn
- "4vsRCVnc5xmYJhaVbUgkg6po6nR3Mu7KEFunP3uQL67qZmPNnJKev57TRvMfuJ15XDP8MjaLSh7THG7CpTiTkfgRcQAKGmFo1XGMStCucmWAxBUiXjycDbx7hbVCqkDYiezM8Lvr1NMdTEGU"
+ "B62qkef7po74VEvJYcLYsdZ83FuKidgNZ8Xiaitzo8gKJXaxLwxgG7T"
(* signer *)
let keypair =
let private_key =
Private_key.of_base58_check_exn
- "6BnSDyt3FKhJSt5oDk1HHeM5J8uKSnp7eaSYndj53y7g7oYzUEhHFrkpk6po4XfNFyjtoJK4ovVHvmCgdUqXVEfTXoAC1CNpaGLAKtu7ah9i4dTi3FtcoKpZhtiTGrRQkEN6Q95cb39Kp"
+ "EKFdTXQKPsEi2JUSE3JkmKtKFu8uDcgc5MmR5zj6nz5FUWPVuK6c"
in
let public_key = Public_key.decompress_exn signer_pk in
Keypair.{public_key; private_key}
@@ -41,12 +41,12 @@ let keypair =
(* payment receiver *)
let receiver =
Public_key.Compressed.of_base58_check_exn
- "4vsRCVHzeYYbneMkHR3u445f8zYwo6nhx3UHKZQH7B2txTV5Shz66Ds9PdxoRKCiALWtuwPQDwpm2Kj22QPcZpKCLr6rnHmUMztKpWxL9meCPQcTkKhmK5HyM4Y9dMnTKrEjD1MX71kLTUaP"
+ "B62qnekV6LVbEttV7j3cxJmjSbxDWuXa5h3KeVEXHPGKTzthQaBufrY"
(* delegatee *)
let new_delegate =
Public_key.Compressed.of_base58_check_exn
- "4vsRCVQNkGihARy4Jg9FsJ6NFtnwDsRnTqi2gQnPAoCNUoyLveY6FEnicGMmwEumPx3GjLxAb5fAivVSLnYRPPMfb5HdkhLdjHunjgqp6g7gYi8cWy4avdmHMRomaKkWyWeWn91w7baaFnUk"
+ "B62qoW9n8n54FqHV8dPp7eCtpAZS1Jw9zsK7AHHiYmUzi6Wvms8reqt"
let make_common ~fee ~fee_payer_pk ~nonce ~valid_until memo =
let fee = Currency.Fee.of_int fee in
diff --git a/src/app/runtime_genesis_ledger/dune b/src/app/runtime_genesis_ledger/dune
index 644fbd9549a..24db46ea394 100644
--- a/src/app/runtime_genesis_ledger/dune
+++ b/src/app/runtime_genesis_ledger/dune
@@ -11,7 +11,6 @@
coda_genesis_proof
coda_state
genesis_ledger_helper
- keys_lib
coda_runtime_config
snark_keys
snark_params
diff --git a/src/app/test_executive/test_executive.ml b/src/app/test_executive/test_executive.ml
index 011bbef5cf3..6c7e28b0da5 100644
--- a/src/app/test_executive/test_executive.ml
+++ b/src/app/test_executive/test_executive.ml
@@ -23,8 +23,8 @@ let network_config_from_inputs {coda_image; _} =
; num_snark_workers= 4
; snark_worker_fee= "0.025"
; snark_worker_public_key=
- "4vsRCVQZ41uqXfVVfkBNUuNNS7PgSJGdMDNAyKGDdU1WkdxxyxQ7oMdFcjDRf45fiGKkdYKkLPBrE1KnxmyBuvaTW97A5C8XjNSiJmvo9oHa4AwyVsZ3ACaspgQ3EyxQXk6uujaxzvQhbLDx"
- }
+ (* NB: This is currently a dummy key (the key from the demo node) *)
+ "B62qmV8K4tpFaDhda9RvnG4H8H4z4cxUxGRsifwtbLT7jt1JpXbz6Hw" }
let main inputs () =
don't_wait_for
diff --git a/src/config/curve/medium.mlh b/src/config/curve/medium.mlh
index baa195501fd..69648940713 100644
--- a/src/config/curve/medium.mlh
+++ b/src/config/curve/medium.mlh
@@ -1 +1 @@
-[%%define curve_size 753]
+[%%define curve_size 255]
diff --git a/src/config/curve/small.mlh b/src/config/curve/small.mlh
deleted file mode 100644
index b2c0c949dd6..00000000000
--- a/src/config/curve/small.mlh
+++ /dev/null
@@ -1 +0,0 @@
-[%%define curve_size 298]
diff --git a/src/config/debug.mlh b/src/config/debug.mlh
index 1012e274a8b..e60e53f6e92 100644
--- a/src/config/debug.mlh
+++ b/src/config/debug.mlh
@@ -1,5 +1,5 @@
[%%import "/src/config/ledger_depth/full.mlh"]
-[%%import "/src/config/curve/small.mlh"]
+[%%import "/src/config/curve/medium.mlh"]
[%%import "/src/config/coinbase/standard.mlh"]
[%%import "/src/config/consensus/postake_short.mlh"]
[%%import "/src/config/scan_state/standard.mlh"]
diff --git a/src/config/dev.mlh b/src/config/dev.mlh
index e7e8e21f982..9130b1b5bc8 100644
--- a/src/config/dev.mlh
+++ b/src/config/dev.mlh
@@ -1,5 +1,5 @@
[%%import "/src/config/ledger_depth/small.mlh"]
-[%%import "/src/config/curve/small.mlh"]
+[%%import "/src/config/curve/medium.mlh"]
[%%import "/src/config/coinbase/standard.mlh"]
[%%import "/src/config/consensus/postake_short.mlh"]
[%%import "/src/config/scan_state/medium.mlh"]
diff --git a/src/config/dev_frontend.mlh b/src/config/dev_frontend.mlh
index 68cea1c70ca..4790d5f0a04 100644
--- a/src/config/dev_frontend.mlh
+++ b/src/config/dev_frontend.mlh
@@ -1,5 +1,5 @@
[%%import "/src/config/ledger_depth/full.mlh"]
-[%%import "/src/config/curve/small.mlh"]
+[%%import "/src/config/curve/medium.mlh"]
[%%import "/src/config/coinbase/standard.mlh"]
[%%import "/src/config/consensus/postake_short.mlh"]
[%%import "/src/config/scan_state/standard.mlh"]
diff --git a/src/config/dev_snark.mlh b/src/config/dev_snark.mlh
index e916f804e91..29ef4b9b450 100644
--- a/src/config/dev_snark.mlh
+++ b/src/config/dev_snark.mlh
@@ -1,5 +1,5 @@
[%%import "/src/config/ledger_depth/full.mlh"]
-[%%import "/src/config/curve/small.mlh"]
+[%%import "/src/config/curve/medium.mlh"]
[%%import "/src/config/coinbase/standard.mlh"]
[%%import "/src/config/consensus/postake_short.mlh"]
[%%import "/src/config/scan_state/standard.mlh"]
diff --git a/src/config/fake_hash.mlh b/src/config/fake_hash.mlh
index c1e765f15fd..d5d7acfb7a8 100644
--- a/src/config/fake_hash.mlh
+++ b/src/config/fake_hash.mlh
@@ -1,5 +1,5 @@
[%%import "/src/config/ledger_depth/tiny.mlh"]
-[%%import "/src/config/curve/small.mlh"]
+[%%import "/src/config/curve/medium.mlh"]
[%%import "/src/config/coinbase/standard.mlh"]
[%%import "/src/config/consensus/postake_tiny.mlh"]
[%%import "/src/config/scan_state/medium.mlh"]
diff --git a/src/config/fuzz_small.mlh b/src/config/fuzz_small.mlh
index 404ba803759..0255cdab324 100644
--- a/src/config/fuzz_small.mlh
+++ b/src/config/fuzz_small.mlh
@@ -1,5 +1,5 @@
[%%import "/src/config/ledger_depth/tiny.mlh"]
-[%%import "/src/config/curve/small.mlh"]
+[%%import "/src/config/curve/medium.mlh"]
[%%import "/src/config/coinbase/standard.mlh"]
[%%import "/src/config/consensus/postake_short.mlh"]
[%%import "/src/config/scan_state/medium.mlh"]
diff --git a/src/config/print_versioned_types.mlh b/src/config/print_versioned_types.mlh
index 710e0b08076..a48df07ffc8 100644
--- a/src/config/print_versioned_types.mlh
+++ b/src/config/print_versioned_types.mlh
@@ -1,5 +1,5 @@
[%%import "/src/config/ledger_depth/small.mlh"]
-[%%import "/src/config/curve/small.mlh"]
+[%%import "/src/config/curve/medium.mlh"]
[%%import "/src/config/coinbase/standard.mlh"]
[%%import "/src/config/consensus/postake_short.mlh"]
[%%import "/src/config/scan_state/medium.mlh"]
diff --git a/src/config/test_archive_processor.mlh b/src/config/test_archive_processor.mlh
index c1e765f15fd..d5d7acfb7a8 100644
--- a/src/config/test_archive_processor.mlh
+++ b/src/config/test_archive_processor.mlh
@@ -1,5 +1,5 @@
[%%import "/src/config/ledger_depth/tiny.mlh"]
-[%%import "/src/config/curve/small.mlh"]
+[%%import "/src/config/curve/medium.mlh"]
[%%import "/src/config/coinbase/standard.mlh"]
[%%import "/src/config/consensus/postake_tiny.mlh"]
[%%import "/src/config/scan_state/medium.mlh"]
diff --git a/src/config/test_postake.mlh b/src/config/test_postake.mlh
index 1b5c6674931..40b332e904b 100644
--- a/src/config/test_postake.mlh
+++ b/src/config/test_postake.mlh
@@ -1,5 +1,5 @@
[%%import "/src/config/ledger_depth/full.mlh"]
-[%%import "/src/config/curve/small.mlh"]
+[%%import "/src/config/curve/medium.mlh"]
[%%import "/src/config/coinbase/standard.mlh"]
[%%import "/src/config/consensus/postake_tiny.mlh"]
[%%import "/src/config/scan_state/tiny.mlh"]
diff --git a/src/config/test_postake_catchup.mlh b/src/config/test_postake_catchup.mlh
index 38e21f55908..dafe0ba6380 100644
--- a/src/config/test_postake_catchup.mlh
+++ b/src/config/test_postake_catchup.mlh
@@ -1,5 +1,5 @@
[%%import "/src/config/ledger_depth/tiny.mlh"]
-[%%import "/src/config/curve/small.mlh"]
+[%%import "/src/config/curve/medium.mlh"]
[%%import "/src/config/coinbase/standard.mlh"]
[%%import "/src/config/consensus/postake_short.mlh"]
[%%import "/src/config/scan_state/medium.mlh"]
diff --git a/src/config/test_postake_five_even_txns.mlh b/src/config/test_postake_five_even_txns.mlh
index 8d4d31dea87..41db5e9aefd 100644
--- a/src/config/test_postake_five_even_txns.mlh
+++ b/src/config/test_postake_five_even_txns.mlh
@@ -1,5 +1,5 @@
[%%import "/src/config/ledger_depth/tiny.mlh"]
-[%%import "/src/config/curve/small.mlh"]
+[%%import "/src/config/curve/medium.mlh"]
[%%import "/src/config/coinbase/standard.mlh"]
[%%import "/src/config/consensus/postake_tiny.mlh"]
[%%import "/src/config/scan_state/medium.mlh"]
diff --git a/src/config/test_postake_full_epoch.mlh b/src/config/test_postake_full_epoch.mlh
index e927dcd382d..c5e770d533a 100644
--- a/src/config/test_postake_full_epoch.mlh
+++ b/src/config/test_postake_full_epoch.mlh
@@ -1,5 +1,5 @@
[%%import "/src/config/ledger_depth/full.mlh"]
-[%%import "/src/config/curve/small.mlh"]
+[%%import "/src/config/curve/medium.mlh"]
[%%import "/src/config/coinbase/standard.mlh"]
[%%import "/src/config/consensus/postake_nano.mlh"]
[%%import "/src/config/scan_state/tiny.mlh"]
diff --git a/src/config/test_postake_holy_grail.mlh b/src/config/test_postake_holy_grail.mlh
index 3ddccde7abb..b1321305bc1 100644
--- a/src/config/test_postake_holy_grail.mlh
+++ b/src/config/test_postake_holy_grail.mlh
@@ -1,5 +1,5 @@
[%%import "/src/config/ledger_depth/tiny.mlh"]
-[%%import "/src/config/curve/small.mlh"]
+[%%import "/src/config/curve/medium.mlh"]
[%%import "/src/config/coinbase/standard.mlh"]
[%%import "/src/config/consensus/postake_tiny.mlh"]
[%%import "/src/config/scan_state/medium.mlh"]
diff --git a/src/config/test_postake_snarkless.mlh b/src/config/test_postake_snarkless.mlh
index 7058f671ba0..57036ea2e59 100644
--- a/src/config/test_postake_snarkless.mlh
+++ b/src/config/test_postake_snarkless.mlh
@@ -1,5 +1,5 @@
[%%import "/src/config/ledger_depth/full.mlh"]
-[%%import "/src/config/curve/small.mlh"]
+[%%import "/src/config/curve/medium.mlh"]
[%%import "/src/config/coinbase/standard.mlh"]
[%%import "/src/config/consensus/postake_short.mlh"]
[%%import "/src/config/scan_state/small.mlh"]
diff --git a/src/config/test_postake_split.mlh b/src/config/test_postake_split.mlh
index 7691d50fac3..9fbcfe60f97 100644
--- a/src/config/test_postake_split.mlh
+++ b/src/config/test_postake_split.mlh
@@ -1,5 +1,5 @@
[%%import "/src/config/ledger_depth/full.mlh"]
-[%%import "/src/config/curve/small.mlh"]
+[%%import "/src/config/curve/medium.mlh"]
[%%import "/src/config/coinbase/standard.mlh"]
[%%import "/src/config/consensus/postake_short.mlh"]
[%%import "/src/config/scan_state/small.mlh"]
diff --git a/src/config/test_postake_three_producers.mlh b/src/config/test_postake_three_producers.mlh
index a810ae927ea..f078ced73df 100644
--- a/src/config/test_postake_three_producers.mlh
+++ b/src/config/test_postake_three_producers.mlh
@@ -1,5 +1,5 @@
[%%import "/src/config/ledger_depth/tiny.mlh"]
-[%%import "/src/config/curve/small.mlh"]
+[%%import "/src/config/curve/medium.mlh"]
[%%import "/src/config/coinbase/standard.mlh"]
[%%import "/src/config/consensus/postake_tiny.mlh"]
[%%import "/src/config/scan_state/medium.mlh"]
diff --git a/src/config/testnet_postake.mlh b/src/config/testnet_postake.mlh
index 60c86a423f3..49bca0b7c12 100644
--- a/src/config/testnet_postake.mlh
+++ b/src/config/testnet_postake.mlh
@@ -1,5 +1,5 @@
[%%import "/src/config/ledger_depth/full.mlh"]
-[%%import "/src/config/curve/small.mlh"]
+[%%import "/src/config/curve/medium.mlh"]
[%%import "/src/config/coinbase/standard.mlh"]
[%%import "/src/config/consensus/postake_tiny.mlh"]
[%%import "/src/config/scan_state/small.mlh"]
diff --git a/src/config/testnet_postake_many_producers.mlh b/src/config/testnet_postake_many_producers.mlh
index 87259f80d75..7fcaafc2186 100644
--- a/src/config/testnet_postake_many_producers.mlh
+++ b/src/config/testnet_postake_many_producers.mlh
@@ -1,5 +1,5 @@
[%%import "/src/config/ledger_depth/full.mlh"]
-[%%import "/src/config/curve/small.mlh"]
+[%%import "/src/config/curve/medium.mlh"]
[%%import "/src/config/coinbase/standard.mlh"]
[%%import "/src/config/consensus/postake_tiny.mlh"]
[%%import "/src/config/scan_state/small.mlh"]
diff --git a/src/config/testnet_postake_medium_curves.mlh b/src/config/testnet_postake_medium_curves.mlh
index e3dbed09172..be8b3aace66 100644
--- a/src/config/testnet_postake_medium_curves.mlh
+++ b/src/config/testnet_postake_medium_curves.mlh
@@ -23,7 +23,7 @@
[%%define genesis_ledger "testnet_postake"]
-[%%define genesis_state_timestamp "2020-05-12 10:00:00-07:00"]
+[%%define genesis_state_timestamp "2020-07-24 12:40:00-07:00"]
[%%define block_window_duration 180000]
[%%define integration_tests false]
diff --git a/src/config/testnet_postake_snarkless.mlh b/src/config/testnet_postake_snarkless.mlh
index ac40c5990ed..2ff819fe79b 100644
--- a/src/config/testnet_postake_snarkless.mlh
+++ b/src/config/testnet_postake_snarkless.mlh
@@ -1,5 +1,5 @@
[%%import "/src/config/ledger_depth/full.mlh"]
-[%%import "/src/config/curve/small.mlh"]
+[%%import "/src/config/curve/medium.mlh"]
[%%import "/src/config/coinbase/standard.mlh"]
[%%import "/src/config/consensus/postake_tiny.mlh"]
[%%import "/src/config/scan_state/small.mlh"]
diff --git a/src/config/testnet_postake_snarkless_fake_hash.mlh b/src/config/testnet_postake_snarkless_fake_hash.mlh
index 925eff6dab1..2268277fbd0 100644
--- a/src/config/testnet_postake_snarkless_fake_hash.mlh
+++ b/src/config/testnet_postake_snarkless_fake_hash.mlh
@@ -1,5 +1,5 @@
[%%import "/src/config/ledger_depth/full.mlh"]
-[%%import "/src/config/curve/small.mlh"]
+[%%import "/src/config/curve/medium.mlh"]
[%%import "/src/config/coinbase/standard.mlh"]
[%%import "/src/config/consensus/postake_tiny.mlh"]
[%%import "/src/config/scan_state/medium.mlh"]
diff --git a/src/config/testnet_public.mlh b/src/config/testnet_public.mlh
index 1e6baf0312a..ccd7e54d4b7 100644
--- a/src/config/testnet_public.mlh
+++ b/src/config/testnet_public.mlh
@@ -1,5 +1,5 @@
[%%import "/src/config/ledger_depth/full.mlh"]
-[%%import "/src/config/curve/small.mlh"]
+[%%import "/src/config/curve/medium.mlh"]
[%%import "/src/config/coinbase/standard.mlh"]
[%%import "/src/config/consensus/postake_short.mlh"]
[%%import "/src/config/scan_state/standard.mlh"]
diff --git a/src/curve_choice.opam b/src/curve_choice.opam
deleted file mode 100644
index 3f309a68f5b..00000000000
--- a/src/curve_choice.opam
+++ /dev/null
@@ -1,6 +0,0 @@
-opam-version: "1.2"
-version: "0.1"
-build: [
- ["dune" "build" "--only" "src" "--root" "." "-j" jobs "@install"]
-]
-
diff --git a/src/lib/block_producer/block_producer.ml b/src/lib/block_producer/block_producer.ml
index c16d701d359..c60860fa4ce 100644
--- a/src/lib/block_producer/block_producer.ml
+++ b/src/lib/block_producer/block_producer.ml
@@ -234,17 +234,6 @@ let generate_next_state ~constraint_constants ~previous_protocol_state
measure "making Snark and Internal transitions" (fun () ->
let snark_transition =
Snark_transition.create_value
- ?sok_digest:
- (Option.map ledger_proof_opt ~f:(fun (proof, _) ->
- Ledger_proof.sok_digest proof ))
- ?ledger_proof:
- (Option.map ledger_proof_opt ~f:(fun (proof, _) ->
- Ledger_proof.underlying_proof proof ))
- ~supply_increase:
- (Option.value_map ~default:Currency.Amount.zero
- ~f:(fun (proof, _) ->
- (Ledger_proof.statement proof).supply_increase )
- ledger_proof_opt)
~blockchain_state:
(Protocol_state.blockchain_state protocol_state)
~consensus_transition:consensus_transition_data
@@ -261,6 +250,8 @@ let generate_next_state ~constraint_constants ~previous_protocol_state
~prover_state:
(Consensus.Data.Block_data.prover_state block_data)
~staged_ledger_diff:(Staged_ledger_diff.forget diff)
+ ~ledger_proof:
+ (Option.map ledger_proof_opt ~f:(fun (proof, _) -> proof))
in
let witness =
{ Pending_coinbase_witness.pending_coinbases=
diff --git a/src/lib/blockchain_snark/blockchain_snark_state.ml b/src/lib/blockchain_snark/blockchain_snark_state.ml
index 6456610daa3..0302a0104f5 100644
--- a/src/lib/blockchain_snark/blockchain_snark_state.ml
+++ b/src/lib/blockchain_snark/blockchain_snark_state.ml
@@ -1,200 +1,390 @@
-(* TODO: rename *)
-
open Core_kernel
open Snark_params
open Tick
open Coda_base
open Coda_state
+open Pickles_types
+
+include struct
+ open Snarky.Request
+
+ type _ t +=
+ | Prev_state : Protocol_state.Value.t t
+ | Transition : Snark_transition.Value.t t
+end
-module type Update_intf = sig
- module Checked : sig
- val update :
- logger:Logger.t
- -> proof_level:Genesis_constants.Proof_level.t
- -> constraint_constants:Genesis_constants.Constraint_constants.t
- -> State_hash.var * State_body_hash.var * Protocol_state.var
- -> Snark_transition.var
- -> ( State_hash.var * Protocol_state.var * [`Success of Boolean.var]
- , _ )
- Checked.t
- end
+module Witness = struct
+ type t =
+ {prev_state: Protocol_state.Value.t; transition: Snark_transition.Value.t}
end
-module Make_update (T : Transaction_snark.Verification.S) = struct
- module Checked = struct
- (* Blockchain_snark ~old ~nonce ~ledger_snark ~ledger_hash ~timestamp ~new_hash
- Input:
- old : Blockchain.t
- old_snark : proof
- nonce : int
- work_snark : proof
- ledger_hash : Ledger_hash.t
- timestamp : Time.t
- new_hash : State_hash.t
- Witness:
- transition : Transition.t
- such that
- the old_snark verifies against old
- new = update_with_asserts(old, nonce, timestamp, ledger_hash)
- hash(new) = new_hash
- the work_snark verifies against the old.ledger_hash and new_ledger_hash
- new.timestamp > old.timestamp
- transition consensus data is valid
- new consensus state is a function of the old consensus state
- *)
- let verify_complete_merge ~proof_level =
- match proof_level with
- | Genesis_constants.Proof_level.Full ->
- T.verify_complete_merge
- | _ ->
- fun _ _ _ _ _ _ _ _ _ -> Checked.return Boolean.true_
-
- let%snarkydef update ~(logger : Logger.t) ~proof_level
- ~constraint_constants
- ((previous_state_hash, previous_state_body_hash, previous_state) :
- State_hash.var * State_body_hash.var * Protocol_state.var)
- (transition : Snark_transition.var) :
- ( State_hash.var * Protocol_state.var * [`Success of Boolean.var]
- , _ )
- Tick.Checked.t =
- let supply_increase = Snark_transition.supply_increase transition in
- let%bind `Success updated_consensus_state, consensus_state =
+let blockchain_handler on_unhandled {Witness.prev_state; transition} =
+ let open Snarky.Request in
+ fun (With {request; respond} as r) ->
+ let k x = respond (Provide x) in
+ match request with
+ | Prev_state ->
+ k prev_state
+ | Transition ->
+ k transition
+ | _ ->
+ on_unhandled r
+
+let wrap_handler h w =
+ match h with
+ | None ->
+ blockchain_handler
+ (fun (Snarky.Request.With {respond; _}) -> respond Unhandled)
+ w
+ | Some h ->
+ (* TODO: Clean up the handler composition interface. *)
+ fun r -> blockchain_handler h w r
+
+let with_handler k w ?handler =
+ let h = wrap_handler handler w in
+ k ?handler:(Some h)
+
+(* Blockchain_snark ~old ~nonce ~ledger_snark ~ledger_hash ~timestamp ~new_hash
+ Input:
+ old : Blockchain.t
+ old_snark : proof
+ nonce : int
+ work_snark : proof
+ ledger_hash : Ledger_hash.t
+ timestamp : Time.t
+ new_hash : State_hash.t
+ Witness:
+ transition : Transition.t
+ such that
+ the old_snark verifies against old
+ new = update_with_asserts(old, nonce, timestamp, ledger_hash)
+ hash(new) = new_hash
+ the work_snark verifies against the old.ledger_hash and new_ledger_hash
+ new.timestamp > old.timestamp
+ transition consensus data is valid
+ new consensus state is a function of the old consensus state
+*)
+let%snarkydef step ~(logger : Logger.t)
+ ~(proof_level : Genesis_constants.Proof_level.t)
+ ~(constraint_constants : Genesis_constants.Constraint_constants.t)
+ Hlist.HlistId.
+ [ previous_state_hash
+ ; (txn_snark : Transaction_snark.Statement.With_sok.Checked.t) ]
+ new_state_hash : (_, _) Tick.Checked.t =
+ let%bind transition =
+ with_label __LOC__
+ (exists Snark_transition.typ ~request:(As_prover.return Transition))
+ in
+ let%bind previous_state, previous_state_body_hash =
+ let%bind t =
+ with_label __LOC__
+ (exists
+ (Protocol_state.typ ~constraint_constants)
+ ~request:(As_prover.return Prev_state))
+ in
+ let%bind h, body = Protocol_state.hash_checked t in
+ let%map () =
+ with_label __LOC__ (State_hash.assert_equal h previous_state_hash)
+ in
+ (t, body)
+ in
+ let%bind `Success updated_consensus_state, consensus_state =
+ with_label __LOC__
+ (Consensus_state_hooks.next_state_checked ~constraint_constants
+ ~prev_state:previous_state ~prev_state_hash:previous_state_hash
+ transition txn_snark.supply_increase)
+ in
+ let prev_pending_coinbase_root =
+ previous_state |> Protocol_state.blockchain_state
+ |> Blockchain_state.staged_ledger_hash
+ |> Staged_ledger_hash.pending_coinbase_hash_var
+ in
+ let%bind genesis_state_hash =
+ (*get the genesis state hash from previous state unless previous state is the genesis state itslef*)
+ Protocol_state.genesis_state_hash_checked ~state_hash:previous_state_hash
+ previous_state
+ in
+ let%bind new_state =
+ let t =
+ Protocol_state.create_var ~previous_state_hash ~genesis_state_hash
+ ~blockchain_state:(Snark_transition.blockchain_state transition)
+ ~consensus_state
+ ~constants:(Protocol_state.constants previous_state)
+ in
+ let%map () =
+ let%bind h, _ = Protocol_state.hash_checked t in
+ with_label __LOC__ (State_hash.assert_equal h new_state_hash)
+ in
+ t
+ in
+ let%bind txn_snark_should_verify, success =
+ let%bind ledger_hash_didn't_change =
+ Frozen_ledger_hash.equal_var
+ ( previous_state |> Protocol_state.blockchain_state
+ |> Blockchain_state.snarked_ledger_hash )
+ txn_snark.target
+ and supply_increase_is_zero =
+ Currency.Amount.(equal_var txn_snark.supply_increase (var_of_t zero))
+ in
+ let%bind new_pending_coinbase_hash, deleted_stack, no_coinbases_popped =
+ let%bind root_after_delete, deleted_stack =
+ Pending_coinbase.Checked.pop_coinbases ~constraint_constants
+ prev_pending_coinbase_root
+ ~proof_emitted:(Boolean.not ledger_hash_didn't_change)
+ in
+ (*If snarked ledger hash did not change (no new ledger proof) then pop_coinbases should be a no-op*)
+ let%bind no_coinbases_popped =
+ Pending_coinbase.Hash.equal_var root_after_delete
+ prev_pending_coinbase_root
+ in
+ (*new stack or update one*)
+ let%map new_root =
with_label __LOC__
- (Consensus_state_hooks.next_state_checked ~constraint_constants
- ~prev_state:previous_state ~prev_state_hash:previous_state_hash
- transition supply_increase)
+ (Pending_coinbase.Checked.add_coinbase ~constraint_constants
+ root_after_delete
+ ( Snark_transition.pending_coinbase_action transition
+ , ( Snark_transition.coinbase_receiver transition
+ , Snark_transition.coinbase_amount transition )
+ , previous_state_body_hash ))
in
- let prev_pending_coinbase_root =
- previous_state |> Protocol_state.blockchain_state
- |> Blockchain_state.staged_ledger_hash
- |> Staged_ledger_hash.pending_coinbase_hash_var
+ (new_root, deleted_stack, no_coinbases_popped)
+ in
+ let pending_coinbase_source_stack =
+ Pending_coinbase.Stack.Checked.create_with deleted_stack
+ in
+ let%bind txn_snark_input_correct =
+ let lh t =
+ Protocol_state.blockchain_state t
+ |> Blockchain_state.snarked_ledger_hash
+ in
+ let open Checked in
+ let%bind () =
+ Fee_excess.(assert_equal_checked (var_of_t zero) txn_snark.fee_excess)
in
- let%bind success =
- let%bind ledger_hash_didn't_change =
- Frozen_ledger_hash.equal_var
+ all
+ [ Frozen_ledger_hash.equal_var txn_snark.source (lh previous_state)
+ ; Frozen_ledger_hash.equal_var txn_snark.target (lh new_state)
+ ; Pending_coinbase.Stack.equal_var
+ txn_snark.pending_coinbase_stack_state.source
+ pending_coinbase_source_stack
+ ; Pending_coinbase.Stack.equal_var
+ txn_snark.pending_coinbase_stack_state.target deleted_stack
+ ; Token_id.Checked.equal txn_snark.next_available_token_before
( previous_state |> Protocol_state.blockchain_state
- |> Blockchain_state.snarked_ledger_hash )
+ |> Blockchain_state.snarked_next_available_token )
+ ; Token_id.Checked.equal txn_snark.next_available_token_after
( transition |> Snark_transition.blockchain_state
- |> Blockchain_state.snarked_ledger_hash )
- and supply_increase_is_zero =
- Currency.Amount.(equal_var supply_increase (var_of_t zero))
- in
- let%bind new_pending_coinbase_hash, deleted_stack, no_coinbases_popped
- =
- let%bind root_after_delete, deleted_stack =
- Pending_coinbase.Checked.pop_coinbases ~constraint_constants
- prev_pending_coinbase_root
- ~proof_emitted:(Boolean.not ledger_hash_didn't_change)
- in
- (*If snarked ledger hash did not change (no new ledger proof) then pop_coinbases should be a no-op*)
- let%bind no_coinbases_popped =
- Pending_coinbase.Hash.equal_var root_after_delete
- prev_pending_coinbase_root
- in
- (*new stack or update one*)
- let%map new_root =
- with_label __LOC__
- (Pending_coinbase.Checked.add_coinbase ~constraint_constants
- root_after_delete
- ( Snark_transition.pending_coinbase_action transition
- , ( Snark_transition.coinbase_receiver transition
- , Snark_transition.coinbase_amount transition )
- , previous_state_body_hash ))
- in
- (new_root, deleted_stack, no_coinbases_popped)
- in
- let%bind nothing_changed =
- Boolean.all
- [ ledger_hash_didn't_change
- ; supply_increase_is_zero
- ; no_coinbases_popped ]
- in
- let%bind correct_coinbase_status =
- let new_root =
- transition |> Snark_transition.blockchain_state
- |> Blockchain_state.staged_ledger_hash
- |> Staged_ledger_hash.pending_coinbase_hash_var
- in
- Pending_coinbase.Hash.equal_var new_pending_coinbase_hash new_root
- in
- let pending_coinbase_source_stack =
- Pending_coinbase.Stack.Checked.create_with deleted_stack
- in
- let%bind correct_transaction_snark =
- with_label __LOC__
- (verify_complete_merge ~proof_level
- (Snark_transition.sok_digest transition)
- ( previous_state |> Protocol_state.blockchain_state
- |> Blockchain_state.snarked_ledger_hash )
- ( transition |> Snark_transition.blockchain_state
- |> Blockchain_state.snarked_ledger_hash )
- pending_coinbase_source_stack deleted_stack supply_increase
- ( previous_state |> Protocol_state.blockchain_state
- |> Blockchain_state.snarked_next_available_token )
- ( transition |> Snark_transition.blockchain_state
- |> Blockchain_state.snarked_next_available_token )
- (As_prover.return
- (Option.value ~default:Tock.Proof.dummy
- (Snark_transition.ledger_proof transition))))
- in
- let%bind correct_snark =
- Boolean.(correct_transaction_snark || nothing_changed)
+ |> Blockchain_state.snarked_next_available_token ) ]
+ >>= Boolean.all
+ in
+ let%bind nothing_changed =
+ let%bind next_available_token_didn't_change =
+ Token_id.Checked.equal txn_snark.next_available_token_after
+ txn_snark.next_available_token_before
+ in
+ Boolean.all
+ [ ledger_hash_didn't_change
+ ; supply_increase_is_zero
+ ; no_coinbases_popped
+ ; next_available_token_didn't_change ]
+ in
+ let%bind correct_coinbase_status =
+ let new_root =
+ transition |> Snark_transition.blockchain_state
+ |> Blockchain_state.staged_ledger_hash
+ |> Staged_ledger_hash.pending_coinbase_hash_var
+ in
+ Pending_coinbase.Hash.equal_var new_pending_coinbase_hash new_root
+ in
+ let%bind () =
+ Boolean.Assert.any [txn_snark_input_correct; nothing_changed]
+ in
+ let transaction_snark_should_verifiy = Boolean.not nothing_changed in
+ let%bind result =
+ Boolean.all [updated_consensus_state; correct_coinbase_status]
+ in
+ let%map () =
+ as_prover
+ As_prover.(
+ Let_syntax.(
+ let%map txn_snark_input_correct =
+ read Boolean.typ txn_snark_input_correct
+ and nothing_changed = read Boolean.typ nothing_changed
+ and no_coinbases_popped = read Boolean.typ no_coinbases_popped
+ and updated_consensus_state =
+ read Boolean.typ updated_consensus_state
+ and correct_coinbase_status =
+ read Boolean.typ correct_coinbase_status
+ and result = read Boolean.typ result in
+ [%log trace]
+ "blockchain snark update success: $result = \
+ (transaction_snark_input_correct=$transaction_snark_input_correct \
+ ∨ nothing_changed \
+ (no_coinbases_popped=$no_coinbases_popped)=$nothing_changed) \
+ ∧ updated_consensus_state=$updated_consensus_state ∧ \
+ correct_coinbase_status=$correct_coinbase_status"
+ ~metadata:
+ [ ( "transaction_snark_input_correct"
+ , `Bool txn_snark_input_correct )
+ ; ("nothing_changed", `Bool nothing_changed)
+ ; ("updated_consensus_state", `Bool updated_consensus_state)
+ ; ("correct_coinbase_status", `Bool correct_coinbase_status)
+ ; ("result", `Bool result)
+ ; ("no_coinbases_popped", `Bool no_coinbases_popped) ]))
+ in
+ (transaction_snark_should_verifiy, result)
+ in
+ let txn_snark_should_verify =
+ match proof_level with
+ | Check | None ->
+ Boolean.false_
+ | Full ->
+ txn_snark_should_verify
+ in
+ let%bind is_base_case =
+ Protocol_state.consensus_state new_state
+ |> Consensus.Data.Consensus_state.is_genesis_state_var
+ in
+ let prev_should_verify =
+ match proof_level with
+ | Check | None ->
+ Boolean.false_
+ | Full ->
+ Boolean.not is_base_case
+ in
+ let%map () = Boolean.Assert.any [is_base_case; success] in
+ (prev_should_verify, txn_snark_should_verify)
+
+let check w ?handler ~proof_level ~constraint_constants txn_snark
+ new_state_hash : unit Or_error.t =
+ let open Tick in
+ check
+ (Fn.flip handle (wrap_handler handler w)
+ (let%bind prev =
+ exists State_hash.typ
+ ~compute:(As_prover.return (Protocol_state.hash w.prev_state))
+ and curr =
+ exists State_hash.typ ~compute:(As_prover.return new_state_hash)
+ and txn_snark =
+ exists Transaction_snark.Statement.With_sok.typ
+ ~compute:(As_prover.return txn_snark)
in
- let%bind result =
- Boolean.all
- [correct_snark; updated_consensus_state; correct_coinbase_status]
+ step ~proof_level ~constraint_constants ~logger:(Logger.create ())
+ [prev; txn_snark] curr))
+ ()
+
+let rule ~proof_level ~constraint_constants transaction_snark self :
+ _ Pickles.Inductive_rule.t =
+ { prevs= [self; transaction_snark]
+ ; main=
+ (fun [x1; x2] x ->
+ let b1, b2 =
+ Run.run_checked
+ (step ~proof_level ~constraint_constants ~logger:(Logger.create ())
+ [x1; x2] x)
in
- let%map () =
- as_prover
- As_prover.(
- Let_syntax.(
- let%map correct_transaction_snark =
- read Boolean.typ correct_transaction_snark
- and nothing_changed = read Boolean.typ nothing_changed
- and no_coinbases_popped = read Boolean.typ no_coinbases_popped
- and updated_consensus_state =
- read Boolean.typ updated_consensus_state
- and correct_coinbase_status =
- read Boolean.typ correct_coinbase_status
- and result = read Boolean.typ result in
- [%log trace]
- "blockchain snark update success: $result = \
- (correct_transaction_snark=$correct_transaction_snark ∨ \
- nothing_changed \
- (no_coinbases_popped=$no_coinbases_popped)=$nothing_changed) \
- ∧ updated_consensus_state=$updated_consensus_state ∧ \
- correct_coinbase_status=$correct_coinbase_status"
- ~metadata:
- [ ( "correct_transaction_snark"
- , `Bool correct_transaction_snark )
- ; ("nothing_changed", `Bool nothing_changed)
- ; ("updated_consensus_state", `Bool updated_consensus_state)
- ; ("correct_coinbase_status", `Bool correct_coinbase_status)
- ; ("result", `Bool result)
- ; ("no_coinbases_popped", `Bool no_coinbases_popped) ]))
+ [b1; b2] )
+ ; main_value=
+ (fun [prev; (txn : Transaction_snark.Statement.With_sok.t)] curr ->
+ let lh t =
+ Protocol_state.blockchain_state t
+ |> Blockchain_state.snarked_ledger_hash
in
- result
- in
- let%bind genesis_state_hash =
- (*get the genesis state hash from previous state unless previous state is the genesis state itslef*)
- Protocol_state.genesis_state_hash_checked
- ~state_hash:previous_state_hash previous_state
- in
- let new_state =
- Protocol_state.create_var ~previous_state_hash ~genesis_state_hash
- ~blockchain_state:(Snark_transition.blockchain_state transition)
- ~consensus_state
- ~constants:(Protocol_state.constants previous_state)
- in
- let%map state_hash, _ = Protocol_state.hash_checked new_state in
- (state_hash, new_state, `Success success)
- end
+ [ not
+ (Consensus.Data.Consensus_state.is_genesis_state
+ (Protocol_state.consensus_state curr))
+ ; List.for_all ~f:Fn.id
+ [ Frozen_ledger_hash.equal (lh prev) (lh curr)
+ ; Currency.Amount.(equal zero)
+ txn.Transaction_snark.Statement.supply_increase
+ ; Pending_coinbase.Stack.equal
+ txn.pending_coinbase_stack_state.source
+ txn.pending_coinbase_stack_state.target ]
+ |> not ] ) }
+
+module Statement = struct
+ type t = Protocol_state.Value.t
+
+ let to_field_elements (t : t) : Tick.Field.t array = [|Protocol_state.hash t|]
end
-module Checked = struct
- let%snarkydef is_base_case state =
- Protocol_state.consensus_state state
- |> Consensus.Data.Consensus_state.is_genesis_state_var
+module Statement_var = struct
+ type t = State_hash.var
- let%snarkydef hash (t : Protocol_state.var) = Protocol_state.hash_checked t
+ let to_field_elements (t : t) = [|State_hash.var_to_hash_packed t|]
end
+
+let typ =
+ Typ.transport State_hash.typ ~there:Protocol_state.hash ~back:(fun _ ->
+ failwith "cannot unhash" )
+
+type tag =
+ (State_hash.var, Protocol_state.value, Nat.N2.n, Nat.N1.n) Pickles.Tag.t
+
+module type S = sig
+ module Proof :
+ Pickles.Proof_intf
+ with type t = (Nat.N2.n, Nat.N2.n) Pickles.Proof.t
+ and type statement = Protocol_state.Value.t
+
+ val tag : tag
+
+ val cache_handle : Pickles.Cache_handle.t
+
+ open Nat
+
+ val step :
+ Witness.t
+ -> ( Protocol_state.Value.t
+ * (Transaction_snark.Statement.With_sok.t * unit)
+ , N2.n * (N2.n * unit)
+ , N1.n * (N2.n * unit)
+ , Protocol_state.Value.t
+ , Proof.t )
+ Pickles.Prover.t
+end
+
+let verify state proof ~key =
+ Pickles.verify (module Nat.N2) (module Statement) key [(state, proof)]
+
+module Make (T : sig
+ val tag : Transaction_snark.tag
+end) =
+struct
+ let proof_level = Genesis_constants.Proof_level.compiled
+
+ let constraint_constants = Genesis_constants.Constraint_constants.compiled
+
+ let tag, cache_handle, p, Pickles.Provers.[step] =
+ Pickles.compile ~cache:Cache_dir.cache
+ (module Statement_var)
+ (module Statement)
+ ~typ
+ ~branches:(module Nat.N1)
+ ~max_branching:(module Nat.N2)
+ ~name:"blockchain-snark"
+ ~choices:(fun ~self ->
+ [rule ~proof_level ~constraint_constants T.tag self] )
+
+ let step = with_handler step
+
+ module Proof = (val p)
+end
+
+let constraint_system_digests () =
+ let digest = Tick.R1CS_constraint_system.digest in
+ [ ( "blockchain-step"
+ , digest
+ (let main x =
+ let open Tick in
+ let%bind x1 = exists Coda_base.State_hash.typ in
+ let%bind x2 = exists Transaction_snark.Statement.With_sok.typ in
+ let%map _ =
+ step ~proof_level:Genesis_constants.Proof_level.compiled
+ ~constraint_constants:
+ Genesis_constants.Constraint_constants.compiled
+ ~logger:(Logger.create ()) [x1; x2] x
+ in
+ ()
+ in
+ Tick.constraint_system ~exposing:[Coda_base.State_hash.typ] main) ) ]
diff --git a/src/lib/blockchain_snark/blockchain_snark_state.mli b/src/lib/blockchain_snark/blockchain_snark_state.mli
index 63f4bac8a3f..58bccad4944 100644
--- a/src/lib/blockchain_snark/blockchain_snark_state.mli
+++ b/src/lib/blockchain_snark/blockchain_snark_state.mli
@@ -1,26 +1,53 @@
open Coda_base
open Coda_state
-open Snark_params.Tick
-
-module type Update_intf = sig
- module Checked : sig
- val update :
- logger:Logger.t
- -> proof_level:Genesis_constants.Proof_level.t
- -> constraint_constants:Genesis_constants.Constraint_constants.t
- -> State_hash.var * State_body_hash.var * Protocol_state.var
- -> Snark_transition.var
- -> ( State_hash.var * Protocol_state.var * [`Success of Boolean.var]
- , _ )
- Checked.t
- end
+open Core_kernel
+open Pickles_types
+
+module Witness : sig
+ type t =
+ {prev_state: Protocol_state.Value.t; transition: Snark_transition.Value.t}
end
-module Make_update (T : Transaction_snark.Verification.S) : Update_intf
+type tag =
+ (State_hash.var, Protocol_state.value, Nat.N2.n, Nat.N1.n) Pickles.Tag.t
+
+val verify :
+ Protocol_state.Value.t -> Proof.t -> key:Pickles.Verification_key.t -> bool
+
+val check :
+ Witness.t
+ -> ?handler:(Snarky.Request.request -> Snarky.Request.response)
+ -> proof_level:Genesis_constants.Proof_level.t
+ -> constraint_constants:Genesis_constants.Constraint_constants.t
+ -> Transaction_snark.Statement.With_sok.t
+ -> State_hash.t
+ -> unit Or_error.t
+
+module type S = sig
+ module Proof :
+ Pickles.Proof_intf
+ with type t = (Nat.N2.n, Nat.N2.n) Pickles.Proof.t
+ and type statement = Protocol_state.Value.t
-module Checked : sig
- val hash :
- Protocol_state.var -> (State_hash.var * State_body_hash.var, _) Checked.t
+ val tag : tag
- val is_base_case : Protocol_state.var -> (Boolean.var, _) Checked.t
+ val cache_handle : Pickles.Cache_handle.t
+
+ open Nat
+
+ val step :
+ Witness.t
+ -> ( Protocol_state.Value.t
+ * (Transaction_snark.Statement.With_sok.t * unit)
+ , N2.n * (N2.n * unit)
+ , N1.n * (N2.n * unit)
+ , Protocol_state.Value.t
+ , Proof.t )
+ Pickles.Prover.t
end
+
+module Make (T : sig
+ val tag : Transaction_snark.tag
+end) : S
+
+val constraint_system_digests : unit -> (string * Md5.t) list
diff --git a/src/lib/blockchain_snark/blockchain_transition.ml b/src/lib/blockchain_snark/blockchain_transition.ml
deleted file mode 100644
index 901b57f7440..00000000000
--- a/src/lib/blockchain_snark/blockchain_transition.ml
+++ /dev/null
@@ -1,333 +0,0 @@
-open Core_kernel
-open Async_kernel
-open Snark_params
-open Snark_bits
-open Coda_state
-open Fold_lib
-module Digest = Random_oracle.Digest
-module Storage = Storage.List.Make (Storage.Disk)
-
-module Keys = struct
- module Per_curve_location = struct
- module T = struct
- type t = {step: Storage.location; wrap: Storage.location}
- [@@deriving sexp]
- end
-
- include T
- include Sexpable.To_stringable (T)
- end
-
- module Proving = struct
- module Location = Per_curve_location
-
- let checksum ~step ~wrap =
- Md5.digest_string
- ("Blockchain_transition_proving" ^ Md5.to_hex step ^ Md5.to_hex wrap)
-
- type t = {step: Tick.Proving_key.t; wrap: Tock.Proving_key.t}
-
- let dummy =
- { step= Dummy_values.Tick.Groth16.proving_key
- ; wrap= Dummy_values.Tock.Bowe_gabizon18.proving_key }
-
- let load ({step; wrap} : Location.t) =
- let open Storage in
- let logger = Logger.create () in
- let tick_controller =
- Controller.create ~logger (module Tick.Proving_key)
- in
- let tock_controller =
- Controller.create ~logger (module Tock.Proving_key)
- in
- let open Async in
- let load c p =
- match%map load_with_checksum c p with
- | Ok x ->
- x
- | Error e ->
- failwithf
- !"Blockchain_snark: load failed on %{sexp:Storage.location}: \
- %{sexp:[`Checksum_no_match|`No_exist|`IO_error of Error.t]}"
- p e ()
- in
- let%map step = load tick_controller step
- and wrap = load tock_controller wrap in
- let t = {step= step.data; wrap= wrap.data} in
- (t, checksum ~step:step.checksum ~wrap:wrap.checksum)
- end
-
- module Verification = struct
- module Location = Per_curve_location
-
- let checksum ~step ~wrap =
- Md5.digest_string
- ( "Blockchain_transition_verification" ^ Md5.to_hex step
- ^ Md5.to_hex wrap )
-
- type t = {step: Tick.Verification_key.t; wrap: Tock.Verification_key.t}
-
- let dummy =
- { step=
- Tick_backend.Verification_key.get_dummy
- ~input_size:Coda_base.Transition_system.step_input_size
- ; wrap=
- Tock_backend.Bowe_gabizon.Verification_key.get_dummy
- ~input_size:Wrap_input.size }
-
- let load ({step; wrap} : Location.t) =
- let open Storage in
- let logger = Logger.create () in
- let tick_controller =
- Controller.create ~logger (module Tick.Verification_key)
- in
- let tock_controller =
- Controller.create ~logger (module Tock.Verification_key)
- in
- let open Async in
- let load c p =
- match%map load_with_checksum c p with
- | Ok x ->
- x
- | Error _e ->
- failwithf
- !"Blockchain_snark: load failed on %{sexp:Storage.location}"
- p ()
- in
- let%map step = load tick_controller step
- and wrap = load tock_controller wrap in
- let t = {step= step.data; wrap= wrap.data} in
- (t, checksum ~step:step.checksum ~wrap:wrap.checksum)
- end
-
- type t = {proving: Proving.t; verification: Verification.t}
-
- let dummy = {proving= Proving.dummy; verification= Verification.dummy}
-
- module Checksum = struct
- type t = {proving: Md5.t; verification: Md5.t}
- end
-
- module Location = struct
- module T = struct
- type t =
- {proving: Proving.Location.t; verification: Verification.Location.t}
- [@@deriving sexp]
- end
-
- include T
- include Sexpable.To_stringable (T)
- end
-
- let load ({proving; verification} : Location.t) =
- let%map proving, proving_checksum = Proving.load proving
- and verification, verification_checksum = Verification.load verification in
- ( {proving; verification}
- , {Checksum.proving= proving_checksum; verification= verification_checksum}
- )
-end
-
-module Make (T : Transaction_snark.Verification.S) = struct
- module System = struct
- module U = Blockchain_snark_state.Make_update (T)
- module Update = Snark_transition
-
- module State = struct
- include Protocol_state
-
- include (
- Blockchain_snark_state :
- module type of Blockchain_snark_state
- with module Checked := Blockchain_snark_state.Checked )
-
- include (U : module type of U with module Checked := U.Checked)
-
- module Hash = struct
- include Coda_base.State_hash
-
- let var_to_field = var_to_hash_packed
- end
-
- module Body_hash = struct
- include Coda_base.State_body_hash
-
- let var_to_field = var_to_hash_packed
- end
-
- module Checked = struct
- include Blockchain_snark_state.Checked
- include U.Checked
- end
- end
- end
-
- open Coda_base
-
- include Transition_system.Make (struct
- module Tick = struct
- let size_in_bits = Tick.Field.size_in_bits
-
- module Packed = struct
- type value = Random_oracle.Digest.t
-
- type var = Random_oracle.Checked.Digest.t
-
- let typ = Tick.Field.typ
-
- let size_in_bits = size_in_bits
- end
-
- module Unpacked = struct
- type value = bool list
-
- type var = Tick.Boolean.var list
-
- let typ : (var, value) Tick.Typ.t =
- Tick.Typ.list ~length:size_in_bits Tick.Boolean.typ
-
- let var_to_bits (x : var) =
- Bitstring_lib.Bitstring.Lsb_first.of_list
- (x :> Tick.Boolean.var list)
-
- let var_of_bits = Bitstring_lib.Bitstring.Lsb_first.to_list
-
- let var_to_triples xs =
- let open Fold in
- to_list
- (group3 ~default:Tick.Boolean.false_
- (of_list (var_to_bits xs :> Tick.Boolean.var list)))
-
- let var_of_value = List.map ~f:Tick.Boolean.var_of_value
-
- let size_in_bits = size_in_bits
- end
-
- let project_value = Tick.Field.project
-
- let project_var = Tick.Field.Var.project
-
- let unpack_value = Tick.Field.unpack
-
- let choose_preimage_var =
- Tick.Field.Checked.choose_preimage_var ~length:size_in_bits
- end
-
- module Tock = Bits.Snarkable.Field (Tock)
- end)
- (System)
-
- module Keys = struct
- include Keys
-
- let step_cached =
- let load =
- let open Tick in
- let open Cached.Let_syntax in
- let%map verification =
- Cached.component ~label:"step_verification" ~f:Keypair.vk
- (module Verification_key)
- and proving =
- Cached.component ~label:"step_proving" ~f:Keypair.pk
- (module Proving_key)
- in
- (verification, {proving with value= ()})
- in
- Cached.Spec.create ~load ~name:"blockchain-snark step keys"
- ~autogen_path:Cache_dir.autogen_path
- ~manual_install_path:Cache_dir.manual_install_path
- ~brew_install_path:Cache_dir.brew_install_path
- ~s3_install_path:Cache_dir.s3_install_path
- ~digest_input:
- (Fn.compose Md5.to_hex Tick.R1CS_constraint_system.digest)
- ~create_env:Tick.Keypair.generate
- ~input:
- (Tick.constraint_system ~exposing:(Step_base.input ())
- (Step_base.main ~logger:(Logger.null ())
- ~proof_level:Genesis_constants.Proof_level.compiled
- ~constraint_constants:
- Genesis_constants.Constraint_constants.compiled))
-
- let cached () =
- let open Cached.Deferred_with_track_generated.Let_syntax in
- let paths = Fn.compose Cache_dir.possible_paths Filename.basename in
- let%bind step_vk, step_pk = Cached.run step_cached in
- let module Wrap = Wrap_base (struct
- let verification_key = step_vk.value
- end) in
- let wrap_cached =
- let load =
- let open Tock in
- let open Cached.Let_syntax in
- let%map verification =
- Cached.component ~label:"wrap_verification" ~f:Keypair.vk
- (module Verification_key)
- and proving =
- Cached.component ~label:"wrap_proving" ~f:Keypair.pk
- (module Proving_key)
- in
- (verification, {proving with value= ()})
- in
- Cached.Spec.create ~load ~name:"blockchain-snark wrap keys"
- ~autogen_path:Cache_dir.autogen_path
- ~manual_install_path:Cache_dir.manual_install_path
- ~brew_install_path:Cache_dir.brew_install_path
- ~s3_install_path:Cache_dir.s3_install_path
- ~digest_input:(fun x ->
- Md5.to_hex (Tock.R1CS_constraint_system.digest (Lazy.force x)) )
- ~input:(lazy (Tock.constraint_system ~exposing:Wrap.input Wrap.main))
- ~create_env:(fun x -> Tock.Keypair.generate (Lazy.force x))
- in
- let%map wrap_vk, wrap_pk = Cached.run wrap_cached in
- let location : Location.t =
- { proving= {step= paths step_pk.path; wrap= paths wrap_pk.path}
- ; verification= {step= paths step_vk.path; wrap= paths wrap_vk.path} }
- in
- let checksum : Checksum.t =
- { proving=
- Proving.checksum ~step:step_pk.checksum ~wrap:wrap_pk.checksum
- ; verification=
- Verification.checksum ~step:step_vk.checksum ~wrap:wrap_vk.checksum
- }
- in
- let t : Verification.t = {step= step_vk.value; wrap= wrap_vk.value} in
- (location, t, checksum)
- end
-end
-
-let instance_hash wrap_vk =
- let open Coda_base in
- let init =
- Random_oracle.update ~state:Hash_prefix.transition_system_snark
- Snark_params.Tick.Verifier.(
- let vk = vk_of_backend_vk wrap_vk in
- let g1 = Tick.Inner_curve.to_affine_exn in
- let g2 = Tick.Pairing.G2.Unchecked.to_affine_exn in
- Verification_key.to_field_elements
- { vk with
- query_base= g1 vk.query_base
- ; query= List.map ~f:g1 vk.query
- ; delta= g2 vk.delta })
- in
- stage (fun state ->
- Random_oracle.hash ~init [|(Protocol_state.hash state :> Tick.Field.t)|]
- )
-
-let constraint_system_digests () =
- let module M = Make (Transaction_snark.Verification.Make (struct
- let keys = Transaction_snark.Keys.Verification.dummy
- end)) in
- let module W = M.Wrap_base (struct
- let verification_key = Keys.Verification.dummy.step
- end) in
- let digest = Tick.R1CS_constraint_system.digest in
- let digest' = Tock.R1CS_constraint_system.digest in
- [ ( "blockchain-step"
- , digest
- M.Step_base.(
- Tick.constraint_system ~exposing:(input ())
- (main ~logger:(Logger.null ())
- ~proof_level:Genesis_constants.Proof_level.compiled
- ~constraint_constants:
- Genesis_constants.Constraint_constants.compiled)) )
- ; ("blockchain-wrap", digest' W.(Tock.constraint_system ~exposing:input main))
- ]
diff --git a/src/lib/blockchain_snark/dune b/src/lib/blockchain_snark/dune
index b728fecbd70..d1e2e77a6ce 100644
--- a/src/lib/blockchain_snark/dune
+++ b/src/lib/blockchain_snark/dune
@@ -2,7 +2,7 @@
(name blockchain_snark)
(public_name blockchain_snark)
(library_flags -linkall)
- (libraries core cached cache_dir snarky snark_params coda_base coda_state
+ (libraries core cache_dir snarky snark_params coda_base coda_state
transaction_snark bignum_bigint consensus)
(inline_tests)
(preprocess
diff --git a/src/lib/cache_dir/cache_dir.ml b/src/lib/cache_dir/cache_dir.ml
index d0720e7102b..69931b8c739 100644
--- a/src/lib/cache_dir/cache_dir.ml
+++ b/src/lib/cache_dir/cache_dir.ml
@@ -5,6 +5,9 @@ let autogen_path = Filename.temp_dir_name ^/ "coda_cache_dir"
let s3_install_path = "/tmp/s3_cache_dir"
+let s3_keys_bucket_prefix =
+ "https://s3-us-west-2.amazonaws.com/snark-keys.o1test.net"
+
let manual_install_path = "/var/lib/coda"
let brew_install_path =
@@ -18,6 +21,15 @@ let brew_install_path =
| _ ->
"/usr/local/var/coda"
+let cache =
+ let dir d w = Key_cache.Spec.On_disk {directory= d; should_write= w} in
+ [ dir manual_install_path false
+ ; dir brew_install_path false
+ ; dir s3_install_path false
+ ; dir autogen_path true
+ ; Key_cache.Spec.S3
+ {bucket_prefix= s3_keys_bucket_prefix; install_path= s3_install_path} ]
+
let env_path =
match Sys.getenv "CODA_KEYS_PATH" with
| Some path ->
diff --git a/src/lib/cache_dir/dune b/src/lib/cache_dir/dune
index 4bc58e8c7be..22ef2c74592 100644
--- a/src/lib/cache_dir/dune
+++ b/src/lib/cache_dir/dune
@@ -6,7 +6,7 @@
async
core
; libs
+ key_cache
blake2
- coda_compile_config
logger)
(preprocess (pps ppx_coda ppx_version ppx_let ppx_custom_printf)))
diff --git a/src/lib/cached/cached.ml b/src/lib/cached/cached.ml
deleted file mode 100644
index a9a2a0084f1..00000000000
--- a/src/lib/cached/cached.ml
+++ /dev/null
@@ -1,299 +0,0 @@
-open Core
-open Async
-
-type 'a value = {path: string; value: 'a; checksum: Md5.t}
-
-let try_load bin path =
- let logger = Logger.create () in
- let controller = Storage.Disk.Controller.create ~logger bin in
- match%map Storage.Disk.load_with_checksum controller path with
- | Ok {Storage.Checked_data.data; checksum} ->
- [%log trace]
- ~metadata:[("path", `String path)]
- "Loaded value successfully from $path" ;
- Ok {path; value= data; checksum}
- | Error `Checksum_no_match ->
- Or_error.error_string "Checksum failure"
- | Error ((`IO_error _ | `No_exist) as err) -> (
- match err with
- | `IO_error e ->
- Or_error.errorf "Could not load value. The error was: %s"
- (Error.to_string_hum e)
- | `No_exist ->
- Or_error.error_string "Cached value not found in default location" )
-
-module Component = struct
- type (_, 'env) t =
- | Load :
- { label: string
- ; f: 'env -> 'a
- ; bin: 'a Binable.m }
- -> ('a value, 'env) t
-
- let path (Load {label; f= _; bin= _}) ~base_path = base_path ^ "_" ^ label
-
- let load (Load {label= _; f= _; bin} as l) ~base_path =
- try_load bin (path ~base_path l)
-
- let store (Load {label= _; f; bin} as l) ~base_path ~env =
- let path = path ~base_path l in
- let logger = Logger.create () in
- let controller = Storage.Disk.Controller.create ~logger bin in
- let value = f env in
- let%map checksum =
- Storage.Disk.store_with_checksum controller path value
- in
- {path; value; checksum}
-end
-
-module With_components = struct
- module T = struct
- type ('a, 'env) t =
- | Pure : 'a -> ('a, 'env) t
- | Ap : ('a, 'env) Component.t * ('a -> 'b, 'env) t -> ('b, 'env) t
-
- let return x = Pure x
-
- let rec map : type a b e. (a, e) t -> f:(a -> b) -> (b, e) t =
- fun t ~f ->
- match t with
- | Pure x ->
- Pure (f x)
- | Ap (c, t1) ->
- Ap (c, map t1 ~f:(fun g x -> f (g x)))
-
- let rec apply : type a b e. (a -> b, e) t -> (a, e) t -> (b, e) t =
- fun t1 t2 ->
- match (t1, t2) with
- | Pure f, y ->
- map ~f y
- | Ap (x, y), z ->
- Ap (x, apply (map y ~f:Fn.flip) z)
-
- let map = `Define_using_apply
- end
-
- include T
- include Applicative.Make2 (T)
-
- let rec load : type a e.
- (a, e) t -> base_path:string -> a Deferred.Or_error.t =
- let open Deferred.Or_error.Let_syntax in
- fun t ~base_path ->
- match t with
- | Pure x ->
- return x
- | Ap ((Load _ as c), tf) ->
- let%map x = Component.load c ~base_path and f = load tf ~base_path in
- f x
-
- let rec path : type a e. (a, e) t -> base_path:string -> string list =
- fun t ~base_path ->
- match t with
- | Pure _ ->
- []
- | Ap ((Load _ as c), tf) ->
- Component.path c ~base_path :: path tf ~base_path
-
- let rec store : type a e.
- (a, e) t -> base_path:string -> env:e -> a Deferred.t =
- let open Deferred.Let_syntax in
- fun t ~base_path ~env ->
- match t with
- | Pure x ->
- return x
- | Ap ((Load _ as c), tf) ->
- let%bind x = Component.store c ~base_path ~env in
- let%map f = store tf ~base_path ~env in
- f x
-
- module Let_syntax = struct
- let return = return
-
- module Let_syntax = struct
- let return = return
-
- let map = map
-
- let both t1 t2 = apply (map t1 ~f:(fun x y -> (x, y))) t2
-
- module Open_on_rhs = struct end
- end
- end
-end
-
-include With_components
-
-type ('a, 'e) cached = ('a, 'e) t
-
-let component ~label ~f bin = Ap (Component.Load {label; f; bin}, Pure Fn.id)
-
-module Spec = struct
- type 'a t =
- | T :
- { load: ('a, 'env) With_components.t
- ; name: string
- ; autogen_path: string
- ; manual_install_path: string
- ; brew_install_path: string
- ; s3_install_path: string
- ; digest_input: 'input -> string
- ; create_env: 'input -> 'env
- ; input: 'input }
- -> 'a t
-
- let create ~load ~name ~autogen_path ~manual_install_path ~brew_install_path
- ~s3_install_path ~digest_input ~create_env ~input =
- T
- { load
- ; name
- ; autogen_path
- ; manual_install_path
- ; brew_install_path
- ; s3_install_path
- ; digest_input
- ; create_env
- ; input }
-end
-
-module Track_generated = struct
- type t = [`Generated_something | `Locally_generated | `Cache_hit]
-
- let empty = `Cache_hit
-
- let ( + ) x y =
- match (x, y) with
- | `Generated_something, _ | _, `Generated_something ->
- `Generated_something
- | `Locally_generated, _ | _, `Locally_generated ->
- `Locally_generated
- | `Cache_hit, `Cache_hit ->
- `Cache_hit
-end
-
-module With_track_generated = struct
- type 'a t = {data: 'a; dirty: Track_generated.t}
-end
-
-(* This is just the writer monad in a deferred *)
-module Deferred_with_track_generated = struct
- type 'a t = 'a With_track_generated.t Deferred.t
-
- include Monad.Make2 (struct
- type nonrec ('a, 'm) t = 'a t
-
- let return x =
- Deferred.return
- {With_track_generated.data= x; dirty= Track_generated.empty}
-
- let map = `Define_using_bind
-
- let bind t ~f =
- let open Deferred.Let_syntax in
- let%bind {With_track_generated.data; dirty= dirty1} = t in
- let%map {With_track_generated.data= output; dirty= dirty2} = f data in
- { With_track_generated.data= output
- ; dirty= Track_generated.(dirty1 + dirty2) }
- end)
-end
-
-let run
- (Spec.T
- { load
- ; name
- ; autogen_path
- ; manual_install_path
- ; brew_install_path
- ; s3_install_path
- ; digest_input
- ; create_env
- ; input }) =
- let open Deferred.Let_syntax in
- let hash = digest_input input in
- let s3_bucket_prefix =
- "https://s3-us-west-2.amazonaws.com/snark-keys.o1test.net"
- in
- let base_path directory = directory ^/ hash in
- let full_paths directory =
- With_components.path load ~base_path:(base_path directory)
- in
- match%bind
- Deferred.List.fold
- [manual_install_path; brew_install_path; s3_install_path] ~init:None
- ~f:(fun acc path ->
- if is_some acc then return acc
- else
- match%map With_components.load load ~base_path:(base_path path) with
- | Ok x ->
- Core_kernel.printf
- !"Loaded %s from the following paths %{sexp: string list}\n"
- name (full_paths path) ;
- Some x
- | Error e ->
- Core_kernel.printf
- !"Error loading from (name %s) (base_path %s) (full paths \
- %{sexp: string list}: %s\n"
- name (base_path path) (full_paths path) (Error.to_string_hum e) ;
- None )
- with
- | Some data ->
- return {With_track_generated.data; dirty= `Cache_hit}
- | None -> (
- Core_kernel.printf
- !"Could not load %s from the following paths:\n\
- \ \n\
- %{sexp: string list}\n\
- %{sexp: string list}\n\
- %{sexp: string list}\n\
- \ \n\
- \ Trying s3 http:\n\
- \ %{sexp: string list}...\n"
- name
- (full_paths manual_install_path)
- (full_paths brew_install_path)
- (full_paths s3_install_path)
- (full_paths s3_bucket_prefix) ;
- (* Attempt load from s3 *)
- let open Deferred.Let_syntax in
- let%bind () = Async.Unix.mkdir ~p:() s3_install_path in
- let%bind () = Async.Unix.mkdir ~p:() autogen_path in
- match%bind
- let open Deferred.Result.Let_syntax in
- let%bind () =
- Cache_dir.load_from_s3
- (full_paths s3_bucket_prefix)
- (full_paths s3_install_path)
- ~logger:(Logger.create ())
- in
- With_components.load load ~base_path:(base_path s3_install_path)
- with
- | Ok data ->
- Core_kernel.printf
- !"Successfully loaded keys from s3 and placed them in %{sexp: \
- string list}\n"
- (full_paths s3_install_path) ;
- return {With_track_generated.data; dirty= `Cache_hit}
- | Error e -> (
- Core_kernel.printf "Failed to load keys from s3: %s, looking at %s\n"
- (Error.to_string_hum e) autogen_path ;
- match%bind
- With_components.load load ~base_path:(base_path autogen_path)
- with
- | Ok data ->
- Core_kernel.printf
- !"Loaded %s from autogen path %{sexp: string list}\n"
- name (full_paths autogen_path) ;
- (* We consider this a "cache miss" for the purposes of tracking
- * that we need to push to s3 *)
- return {With_track_generated.data; dirty= `Locally_generated}
- | Error _e ->
- Core_kernel.printf
- !"Could not load %s from autogen path %{sexp: string list}. \
- Autogenerating...\n"
- name (full_paths autogen_path) ;
- let%bind () = Unix.mkdir ~p:() autogen_path in
- let%map data =
- With_components.store load ~base_path:(base_path autogen_path)
- ~env:(create_env input)
- in
- {With_track_generated.data; dirty= `Generated_something} ) )
diff --git a/src/lib/cached/cached.mli b/src/lib/cached/cached.mli
deleted file mode 100644
index bef512383ff..00000000000
--- a/src/lib/cached/cached.mli
+++ /dev/null
@@ -1,61 +0,0 @@
-open Core
-open Async
-
-type 'a value = {path: string; value: 'a; checksum: Md5.t}
-
-include Applicative.S2
-
-module Let_syntax : sig
- val return : 'a -> ('a, 'e) t
-
- module Let_syntax : sig
- val return : 'a -> ('a, 'e) t
-
- val map : ('a, 'e) t -> f:('a -> 'b) -> ('b, 'e) t
-
- val both : ('a, 'e) t -> ('b, 'e) t -> ('a * 'b, 'e) t
-
- module Open_on_rhs : sig end
- end
-end
-
-val component :
- label:string -> f:('e -> 'a) -> 'a Binable.m -> ('a value, 'e) t
-
-type ('a, 'e) cached = ('a, 'e) t
-
-module Spec : sig
- type 'a t
-
- val create :
- load:('a, 'env) cached
- -> name:string
- -> autogen_path:string
- -> manual_install_path:string
- -> brew_install_path:string
- -> s3_install_path:string
- -> digest_input:('input -> string)
- -> create_env:('input -> 'env)
- -> input:'input
- -> 'a t
-end
-
-(** A monoid for tracking the "dirty bit" of whether or not we've generated
- * something or only received cache hits *)
-module Track_generated : sig
- type t = [`Generated_something | `Locally_generated | `Cache_hit]
-
- val empty : t
-
- (** Generated_something overrides caches hits *)
- val ( + ) : t -> t -> t
-end
-
-module With_track_generated : sig
- type 'a t = {data: 'a; dirty: Track_generated.t}
-end
-
-module Deferred_with_track_generated :
- Monad.S with type 'a t = 'a With_track_generated.t Deferred.t
-
-val run : 'a Spec.t -> 'a Deferred_with_track_generated.t
diff --git a/src/lib/cached/dune b/src/lib/cached/dune
deleted file mode 100644
index 48b8f627659..00000000000
--- a/src/lib/cached/dune
+++ /dev/null
@@ -1,9 +0,0 @@
-(library
- (name cached)
- (public_name cached)
- (library_flags -linkall)
- (inline_tests)
- (libraries core storage async cache_dir)
- (preprocess
- (pps ppx_coda ppx_version ppx_jane bisect_ppx -- -conditional))
- (synopsis "Cache computation results on disk"))
diff --git a/src/lib/coda_base/data_hash.ml b/src/lib/coda_base/data_hash.ml
index 2b1fd21c436..df3a70d054d 100644
--- a/src/lib/coda_base/data_hash.ml
+++ b/src/lib/coda_base/data_hash.ml
@@ -103,56 +103,24 @@ struct
let equal_var x y = Field.Checked.equal x.digest y.digest
let typ : (var, t) Typ.t =
- let store (t : t) =
- let open Typ.Store.Let_syntax in
- let n = Bigint.of_field t in
- let rec go i acc =
- if Int.(i < 0) then return (Bitstring.Lsb_first.of_list acc)
- else
- let%bind b = Boolean.typ.store (Bigint.test_bit n i) in
- go Int.(i - 1) (b :: acc)
- in
- let%map bits = go (Field.size_in_bits - 1) [] in
- {bits= Some bits; digest= Field.Var.project (bits :> Boolean.var list)}
- in
- let read (t : var) = Field.typ.read t.digest in
- let alloc =
- let open Typ.Alloc.Let_syntax in
- let rec go i acc =
- if Int.(i < 0) then return (Bitstring.Lsb_first.of_list acc)
- else
- let%bind b = Boolean.typ.alloc in
- go Int.(i - 1) (b :: acc)
- in
- let%map bits = go (Field.size_in_bits - 1) [] in
- {bits= Some bits; digest= Field.Var.project (bits :> Boolean.var list)}
- in
- let check {bits; _} =
- Checked.List.iter
- (Option.value_exn bits :> Boolean.var list)
- ~f:Boolean.typ.check
- in
- {store; read; alloc; check}
+ Typ.transport_var Typ.field
+ ~there:(fun {digest; bits= _} -> digest)
+ ~back:(fun digest -> {digest; bits= None})
[%%endif]
end
module T0 = struct
- [%%versioned_binable
+ [%%versioned_asserted
module Stable = struct
module V1 = struct
- type t = Field.t [@@deriving sexp, compare, hash]
+ type t = Field.t
+ [@@deriving sexp, compare, hash, version {asserted}, bin_io]
let to_latest = Fn.id
-
- module Arg = struct
- type nonrec t = t
-
- [%%define_locally Field.(to_string, of_string)]
- end
-
- include Binable.Of_stringable (Arg)
end
+
+ module Tests = struct end
end]
module Tests = struct
@@ -166,19 +134,10 @@ module T0 = struct
Field.gen
[%%if
- curve_size = 298]
-
- let%test "Binable from stringable V1" =
- let known_good_digest = "66e2f2648cf3d2c39465ddbe4f05202a" in
- Ppx_version_runtime.Serialization.check_serialization
- (module Stable.V1)
- field known_good_digest
-
- [%%elif
- curve_size = 753]
+ curve_size = 255]
let%test "Binable from stringable V1" =
- let known_good_digest = "0e586911e7deaf7e5b49c801bf248c92" in
+ let known_good_digest = "8fffa8b873e2f0600ad8327fa5423859" in
Ppx_version_runtime.Serialization.check_serialization
(module Stable.V1)
field known_good_digest
diff --git a/src/lib/coda_base/fee_excess.ml b/src/lib/coda_base/fee_excess.ml
index b569a4145e5..5b2efd50d9a 100644
--- a/src/lib/coda_base/fee_excess.ml
+++ b/src/lib/coda_base/fee_excess.ml
@@ -178,6 +178,13 @@ let to_input_checked {fee_token_l; fee_excess_l; fee_token_r; fee_excess_r} =
; fee_token_r
; Fee.Signed.Checked.to_input fee_excess_r ]
+let assert_equal_checked (t1 : var) (t2 : var) =
+ Checked.all_unit
+ [ Token_id.Checked.Assert.equal t1.fee_token_l t2.fee_token_l
+ ; Fee.Signed.Checked.assert_equal t1.fee_excess_l t2.fee_excess_l
+ ; Token_id.Checked.Assert.equal t1.fee_token_r t2.fee_token_r
+ ; Fee.Signed.Checked.assert_equal t1.fee_excess_r t2.fee_excess_r ]
+
[%%endif]
(** Eliminate a fee excess, either by combining it with one to the left/right,
diff --git a/src/lib/coda_base/gen/gen.ml b/src/lib/coda_base/gen/gen.ml
index 0fe6501a03b..1c46865277f 100644
--- a/src/lib/coda_base/gen/gen.ml
+++ b/src/lib/coda_base/gen/gen.ml
@@ -20,7 +20,7 @@ let keypairs =
(* This key is also at the start of all the release ledgers. It's needed to generate a valid genesis transition *)
(Keypair.of_private_key_exn
(Private_key.of_base58_check_exn
- "6BnSKU5GQjgvEPbM45Qzazsf6M8eCrQdpL7x4jAvA4sr8Ga3FAx8AxdgWcqN7uNGu1SthMgDeMSUvEbkY9a56UxwmJpTzhzVUjfgfFsjJSVp9H1yWHt6H5couPNpF7L7e5u7NBGYnDMhx"))
+ "EKEjf4cZcaUScpV3iAE8r9PaEj4dbPbyUhzWryhhxQqjTTSCfyo8"))
generated_keypairs
let expr ~loc =
diff --git a/src/lib/coda_base/proof.ml b/src/lib/coda_base/proof.ml
index d391ca36597..a48e54019a4 100644
--- a/src/lib/coda_base/proof.ml
+++ b/src/lib/coda_base/proof.ml
@@ -2,56 +2,24 @@
"/src/config.mlh"]
open Core_kernel
-open Snark_params
+open Pickles_types
+
+let blockchain_dummy = Dummy_values.blockchain_proof
+
+let transaction_dummy = Dummy_values.transaction_proof
+
+module T = Pickles.Proof.Make (Nat.N2) (Nat.N2)
[%%versioned_binable
module Stable = struct
module V1 = struct
- type t = Tock.Proof.t [@@deriving version {asserted}]
+ type t = T.t [@@deriving sexp, bin_io, version {asserted}, yojson, compare]
let to_latest = Fn.id
-
- module T = struct
- type nonrec t = t
-
- let to_string = Binable.to_string (module Tock_backend.Proof)
-
- let of_string = Binable.of_string (module Tock_backend.Proof)
- end
-
- (* TODO: Figure out what the right thing to do is for conversion failures *)
- include Binable.Of_stringable (T)
- include Sexpable.Of_stringable (T)
-
- let compare a b = String.compare (T.to_string a) (T.to_string b)
-
- module Base58_check = Base58_check.Make (struct
- let version_byte = Base58_check.Version_bytes.proof
-
- let description = "Tock proof"
- end)
-
- let to_yojson s = `String (Base58_check.encode (T.to_string s))
-
- let of_yojson = function
- | `String s -> (
- match Base58_check.decode s with
- | Ok decoded ->
- Ok (T.of_string decoded)
- | Error e ->
- Error
- (sprintf "Proof.of_yojson, bad Base58Check: %s"
- (Error.to_string_hum e)) )
- | _ ->
- Error "Proof.of_yojson expected `String"
end
end]
-type t = Stable.Latest.t
-
-let dummy = Tock.Proof.dummy
-
-include Sexpable.Of_stringable (Stable.Latest.T)
+type t = Stable.Latest.t [@@deriving sexp, yojson, compare]
[%%define_locally
Stable.Latest.(to_yojson, of_yojson)]
@@ -63,21 +31,11 @@ let%test_module "proof-tests" =
*)
[%%if
- curve_size = 298]
-
- let%test "proof serialization v1" =
- let proof = Tock_backend.Proof.get_dummy () in
- let known_good_digest = "7b2f3495a9b190a72e134bc5a5c7d53f" in
- Ppx_version_runtime.Serialization.check_serialization
- (module Stable.V1)
- proof known_good_digest
-
- [%%elif
- curve_size = 753]
+ curve_size = 255]
let%test "proof serialization v1" =
- let proof = Tock_backend.Proof.get_dummy () in
- let known_good_digest = "4e54b20026fe9e66fcb432ff6772bd7c" in
+ let proof = blockchain_dummy in
+ let known_good_digest = "a02b4f4ad38bc5c0a51da3a39c3673b4" in
Ppx_version_runtime.Serialization.check_serialization
(module Stable.V1)
proof known_good_digest
diff --git a/src/lib/coda_base/proof.mli b/src/lib/coda_base/proof.mli
index df7f2124623..2d5225edc74 100644
--- a/src/lib/coda_base/proof.mli
+++ b/src/lib/coda_base/proof.mli
@@ -1,8 +1,10 @@
-open Snark_params
+open Pickles_types
-type t = Tock.Proof.t [@@deriving sexp, yojson]
+type t = (Nat.N2.n, Nat.N2.n) Pickles.Proof.t [@@deriving sexp, yojson]
-val dummy : Tock.Proof.t
+val blockchain_dummy : t
+
+val transaction_dummy : t
[%%versioned:
module Stable : sig
diff --git a/src/lib/coda_base/signature.ml b/src/lib/coda_base/signature.ml
index ccee14d5555..bac80b81fbb 100644
--- a/src/lib/coda_base/signature.ml
+++ b/src/lib/coda_base/signature.ml
@@ -40,27 +40,14 @@ module Stable = struct
module Tests = struct
[%%if
- curve_size = 298]
+ curve_size = 255]
- let%test "signature serialization v1 (curve_size=298)" =
+ let%test "signature serialization v1 (curve_size=255)" =
let signature =
Quickcheck.random_value
~seed:(`Deterministic "signature serialization") V1.gen
in
- let known_good_digest = "5581d593702a09f4418fe46bde1ca116" in
- Ppx_version_runtime.Serialization.check_serialization
- (module V1)
- signature known_good_digest
-
- [%%elif
- curve_size = 753]
-
- let%test "signature serialization v1 (curve_size=753)" =
- let signature =
- Quickcheck.random_value
- ~seed:(`Deterministic "signature serialization") V1.gen
- in
- let known_good_digest = "7cc56fd93cef313e1eef9fc83f55aedb" in
+ let known_good_digest = "b991865dd2ff76596c470a72a4282cbd" in
Ppx_version_runtime.Serialization.check_serialization
(module V1)
signature known_good_digest
diff --git a/src/lib/coda_base/transition_system.ml b/src/lib/coda_base/transition_system.ml
deleted file mode 100644
index 2e14ac59658..00000000000
--- a/src/lib/coda_base/transition_system.ml
+++ /dev/null
@@ -1,320 +0,0 @@
-open Core_kernel
-open Util
-open Snark_params
-
-module type S = sig
- open Tick
-
- module Update : Snarkable.S
-
- module State : sig
- module Hash : sig
- type t [@@deriving sexp]
-
- type var
-
- val typ : (var, t) Typ.t
-
- val var_to_field : var -> Field.Var.t
-
- val equal_var : var -> var -> (Boolean.var, _) Checked.t
- end
-
- module Body_hash : sig
- type t [@@deriving sexp]
-
- type var
-
- val typ : (var, t) Typ.t
-
- val var_to_field : var -> Field.Var.t
- end
-
- type var
-
- type value [@@deriving sexp]
-
- val typ :
- constraint_constants:Genesis_constants.Constraint_constants.t
- -> (var, value) Typ.t
-
- module Checked : sig
- val hash : var -> (Hash.var * Body_hash.var, _) Checked.t
-
- val is_base_case : var -> (Boolean.var, _) Checked.t
-
- val update :
- logger:Logger.t
- -> proof_level:Genesis_constants.Proof_level.t
- -> constraint_constants:Genesis_constants.Constraint_constants.t
- -> Hash.var * Body_hash.var * var
- (*Previous state hash, previous state body hash, previous state*)
- -> Update.var
- -> (Hash.var * var * [`Success of Boolean.var], _) Checked.t
- end
- end
-end
-
-module type Tick_keypair_intf = sig
- val keys : Tick.Keypair.t
-end
-
-module type Tock_keypair_intf = sig
- val keys : Tock.Keypair.t
-end
-
-let step_input () = Tick.Data_spec.[Tick.Field.typ]
-
-let step_input_size = Tick.Data_spec.size (step_input ())
-
-(* Someday:
- Tighten this up. Doing this with all these equalities is kind of a hack, but
- doing it right required an annoying change to the bits intf. *)
-module Make (Digest : sig
- module Tick :
- Tick.Snarkable.Bits.Lossy
- with type Packed.var = Tick.Field.Var.t
- and type Packed.value = Random_oracle.Digest.t
-end)
-(System : S) =
-struct
- module Step_base = struct
- open System
-
- module Prover_state = struct
- type t =
- { wrap_vk: Tock.Verification_key.t
- ; prev_proof: Tock.Proof.t
- ; prev_state: State.value
- ; genesis_state_hash: State.Hash.t
- ; expected_next_state: State.value option
- ; update: Update.value }
- [@@deriving fields]
- end
-
- open Tick
- open Let_syntax
-
- let input = step_input
-
- let wrap_vk_length = 11324
-
- let wrap_vk_typ = Typ.list ~length:wrap_vk_length Boolean.typ
-
- module Verifier = Tick.Verifier
-
- let wrap_input_size = Tock.Data_spec.size [Wrap_input.typ]
-
- let wrap_vk_triple_length =
- Verifier.Verification_key.summary_length_in_bits
- ~twist_extension_degree:3 ~input_size:wrap_input_size
- |> bit_length_to_triple_length
-
- let hash_vk vk =
- make_checked (fun () ->
- Random_oracle.Checked.update
- ~state:
- (Random_oracle.State.map Hash_prefix.transition_system_snark
- ~f:Snark_params.Tick.Field.Var.constant)
- (Verifier.Verification_key.to_field_elements vk) )
-
- let compute_top_hash wrap_vk_state state_hash =
- make_checked (fun () ->
- Random_oracle.Checked.(
- update ~state:wrap_vk_state [|State.Hash.var_to_field state_hash|]
- |> digest) )
-
- let%snarkydef prev_state_valid ~proof_level wrap_vk_section wrap_vk
- prev_state_hash =
- match proof_level with
- | Genesis_constants.Proof_level.Full ->
- (* TODO: Should build compositionally on the prev_state hash (instead of converting to bits) *)
- let%bind prev_top_hash =
- compute_top_hash wrap_vk_section prev_state_hash
- >>= Wrap_input.Checked.tick_field_to_scalars
- in
- let%bind precomp =
- Verifier.Verification_key.Precomputation.create wrap_vk
- in
- let%bind proof =
- exists Verifier.Proof.typ
- ~compute:
- As_prover.(
- map get_state
- ~f:
- (Fn.compose Verifier.proof_of_backend_proof
- Prover_state.prev_proof))
- in
- (* true if not with_snark *)
- Verifier.verify wrap_vk precomp prev_top_hash proof
- | Check | None ->
- return Boolean.true_
-
- let exists' typ ~f = exists typ ~compute:As_prover.(map get_state ~f)
-
- let%snarkydef main ~constraint_constants ~(logger : Logger.t) ~proof_level
- (top_hash : Digest.Tick.Packed.var) =
- let%bind prev_state =
- exists' (State.typ ~constraint_constants) ~f:Prover_state.prev_state
- and update = exists' Update.typ ~f:Prover_state.update in
- let%bind prev_state_hash, prev_state_body_hash =
- State.Checked.hash prev_state
- in
- let%bind next_state_hash, next_state, `Success success =
- with_label __LOC__
- (State.Checked.update ~logger ~proof_level ~constraint_constants
- (prev_state_hash, prev_state_body_hash, prev_state)
- update)
- in
- let%bind wrap_vk =
- exists' (Verifier.Verification_key.typ ~input_size:wrap_input_size)
- ~f:(fun {Prover_state.wrap_vk; _} ->
- Verifier.vk_of_backend_vk wrap_vk )
- in
- let%bind wrap_vk_section = hash_vk wrap_vk in
- let%bind next_top_hash =
- with_label __LOC__
- ((* We could be reusing the intermediate state of the hash on sh here instead of
- hashing anew *)
- compute_top_hash wrap_vk_section next_state_hash)
- in
- let%bind () =
- [%with_label "Check top hashes match as prover"]
- (as_prover
- As_prover.(
- Let_syntax.(
- let%bind prover_state = get_state in
- match Prover_state.expected_next_state prover_state with
- | Some expected_next_state ->
- let%bind in_snark_next_state =
- read (State.typ ~constraint_constants) next_state
- in
- let%bind next_top_hash = read Field.typ next_top_hash in
- let%bind top_hash = read Field.typ top_hash in
- let updated = State.sexp_of_value in_snark_next_state in
- let original = State.sexp_of_value expected_next_state in
- if Field.equal next_top_hash top_hash then return ()
- else
- let diff =
- Sexp_diff_kernel.Algo.diff ~original ~updated ()
- in
- [%log fatal]
- "Out-of-SNARK and in-SNARK calculations of the next \
- top hash differ"
- ~metadata:
- [ ( "state_sexp_diff"
- , `String
- (Sexp_diff_kernel.Display
- .display_as_plain_string diff) ) ] ;
- (* Fail here: the error raised in the snark below is
- strictly less informative, displaying only the hashes
- and their representation as constraint system
- variables.
- *)
- failwithf
- !"Out-of-SNARK and in-SNARK calculations of the next \
- top hash differ:\n\
- out of SNARK: %{sexp: Field.t}\n\
- in SNARK: %{sexp: Field.t}\n\n\
- (Caution: in the below, the fields of the \
- non-SNARK part of the staged ledger hash -- namely \
- ledger_hash, aux_hash, and pending_coinbase_aux -- \
- are replaced with dummy values in the snark, and \
- will necessarily differ.)\n\
- Out of SNARK state:\n\
- %{sexp: State.value}\n\n\
- In SNARK state:\n\
- %{sexp: State.value}"
- top_hash next_top_hash expected_next_state
- in_snark_next_state ()
- | None ->
- [%log error]
- "From the current prover state, got None for the \
- expected next state, which should be true only when \
- calculating precomputed values" ;
- return ())))
- in
- let%bind () =
- with_label __LOC__ Field.Checked.Assert.(equal next_top_hash top_hash)
- in
- let%bind prev_state_valid =
- prev_state_valid ~proof_level wrap_vk_section wrap_vk prev_state_hash
- in
- let%bind inductive_case_passed =
- with_label __LOC__ Boolean.(prev_state_valid && success)
- in
- let%bind is_base_case = State.Checked.is_base_case next_state in
- let%bind () =
- as_prover
- As_prover.(
- Let_syntax.(
- let%map prev_valid = read Boolean.typ prev_state_valid
- and success = read Boolean.typ success
- and is_base_case = read Boolean.typ is_base_case in
- let result = (prev_valid && success) || is_base_case in
- [%log trace]
- "transition system debug state: (previous valid=$prev_valid \
- ∧ update success=$success) ∨ base case=$is_base_case = \
- $result"
- ~metadata:
- [ ("prev_valid", `Bool prev_valid)
- ; ("success", `Bool success)
- ; ("is_base_case", `Bool is_base_case)
- ; ("result", `Bool result) ]))
- in
- with_label __LOC__
- (Boolean.Assert.any [is_base_case; inductive_case_passed])
- end
-
- module Step (Tick_keypair : Tick_keypair_intf) = struct
- include Step_base
- include Tick_keypair
- end
-
- module type Step_vk_intf = sig
- val verification_key : Tick.Verification_key.t
- end
-
- module Wrap_base (Step_vk : Step_vk_intf) = struct
- open Tock
-
- let input = Tock.Data_spec.[Wrap_input.typ]
-
- module Verifier = Tock.Groth_verifier
-
- module Prover_state = struct
- type t = {proof: Tick.Proof.t} [@@deriving fields]
- end
-
- let step_vk = Verifier.vk_of_backend_vk Step_vk.verification_key
-
- let step_vk_precomp =
- Verifier.Verification_key.Precomputation.create_constant step_vk
-
- let step_vk_constant = Verifier.constant_vk step_vk
-
- let%snarkydef main (input : Wrap_input.var) =
- let%bind result =
- (* The use of choose_preimage here is justified since we feed it to the verifier, which doesn't
- depend on which unpacking is provided. *)
- let%bind input = Wrap_input.Checked.to_scalar input in
- let%bind proof =
- exists Verifier.Proof.typ
- ~compute:
- As_prover.(
- map get_state
- ~f:
- (Fn.compose Verifier.proof_of_backend_proof
- Prover_state.proof))
- in
- Verifier.verify step_vk_constant step_vk_precomp [input] proof
- in
- with_label __LOC__ (Boolean.Assert.is_true result)
- end
-
- module Wrap (Step_vk : Step_vk_intf) (Tock_keypair : Tock_keypair_intf) =
- struct
- include Wrap_base (Step_vk)
- include Tock_keypair
- end
-end
diff --git a/src/lib/coda_base/user_command_memo.ml b/src/lib/coda_base/user_command_memo.ml
index 7a168c489f7..114c829b7dc 100644
--- a/src/lib/coda_base/user_command_memo.ml
+++ b/src/lib/coda_base/user_command_memo.ml
@@ -163,8 +163,8 @@ let to_bits t = Fold_lib.Fold.to_list (fold_bits t)
[%%ifdef
consensus_mechanism]
-module Boolean = Tick0.Boolean
-module Typ = Tick0.Typ
+module Boolean = Tick.Boolean
+module Typ = Tick.Typ
(* the code below is much the same as in Random_oracle.Digest; tag and length bytes
make it a little different
diff --git a/src/lib/coda_base/user_command_memo.mli b/src/lib/coda_base/user_command_memo.mli
index 93a8a11a16b..1b6b6625510 100644
--- a/src/lib/coda_base/user_command_memo.mli
+++ b/src/lib/coda_base/user_command_memo.mli
@@ -36,7 +36,7 @@ module Checked : sig
end
(** typ representation *)
-val typ : (Checked.t, t) Curve_choice.Tick0.Typ.t
+val typ : (Checked.t, t) Typ.t
[%%endif]
diff --git a/src/lib/coda_graphql/coda_graphql.ml b/src/lib/coda_graphql/coda_graphql.ml
index 79cbb53c26d..446ab8d92f6 100644
--- a/src/lib/coda_graphql/coda_graphql.ml
+++ b/src/lib/coda_graphql/coda_graphql.ml
@@ -319,11 +319,11 @@ module Types = struct
obj "SignedFee" ~doc:"Signed fee" ~fields:(fun _ ->
[ field "sign" ~typ:(non_null sign) ~doc:"+/-"
~args:Arg.[]
- ~resolve:(fun _ fee -> Currency.Fee.Signed.sgn fee)
+ ~resolve:(fun _ fee -> Currency.Amount.Signed.sgn fee)
; field "feeMagnitude" ~typ:(non_null uint64) ~doc:"Fee"
~args:Arg.[]
~resolve:(fun _ fee ->
- Currency.Fee.(to_uint64 (Signed.magnitude fee)) ) ] )
+ Currency.Amount.(to_uint64 (Signed.magnitude fee)) ) ] )
let work_statement =
obj "WorkDescription"
@@ -350,11 +350,13 @@ module Types = struct
({fee_excess= {fee_excess_l; _}; _} :
Transaction_snark.Statement.t) ->
(* TODO: Expose full fee excess data. *)
- fee_excess_l )
+ { fee_excess_l with
+ magnitude= Currency.Amount.of_fee fee_excess_l.magnitude } )
; field "supplyIncrease" ~typ:(non_null uint64)
~doc:"Increase in total coinbase reward "
~args:Arg.[]
- ~resolve:(fun _ {Transaction_snark.Statement.supply_increase; _} ->
+ ~resolve:
+ (fun _ ({supply_increase; _} : Transaction_snark.Statement.t) ->
Currency.Amount.to_uint64 supply_increase )
; field "workId" ~doc:"Unique identifier for a snark work"
~typ:(non_null int)
@@ -1131,41 +1133,8 @@ module Types = struct
coinbase_receiver ) ] )
let protocol_state_proof : (Coda_lib.t, Proof.t option) typ =
- let display_g1_elem (g1 : Crypto_params.Tick_backend.Inner_curve.t) =
- let x, y = Crypto_params.Tick_backend.Inner_curve.to_affine_exn g1 in
- List.map [x; y] ~f:Crypto_params.Tick0.Field.to_string
- in
- let display_g2_elem (g2 : Curve_choice.Tock_full.G2.t) =
- let open Curve_choice.Tock_full in
- let x, y = G2.to_affine_exn g2 in
- let to_string (fqe : Fqe.t) =
- let vector = Fqe.to_vector fqe in
- List.init (Fq.Vector.length vector) ~f:(fun i ->
- let fq = Fq.Vector.get vector i in
- Crypto_params.Tick0.Field.to_string fq )
- in
- List.map [x; y] ~f:to_string
- in
- let string_list_field ~resolve =
- field
- ~typ:(non_null @@ list (non_null string))
- ~args:Arg.[]
- ~resolve:(fun _ (proof : Proof.t) -> display_g1_elem (resolve proof))
- in
- let string_list_list_field ~resolve =
- field
- ~typ:(non_null @@ list (non_null @@ list @@ non_null string))
- ~args:Arg.[]
- ~resolve:(fun _ (proof : Proof.t) -> display_g2_elem (resolve proof))
- in
- obj "protocolStateProof" ~fields:(fun _ ->
- [ string_list_field "a" ~resolve:(fun (proof : Proof.t) -> proof.a)
- ; string_list_list_field "b" ~resolve:(fun (proof : Proof.t) -> proof.b)
- ; string_list_field "c" ~resolve:(fun (proof : Proof.t) -> proof.c)
- ; string_list_list_field "delta_prime"
- ~resolve:(fun (proof : Proof.t) -> proof.delta_prime)
- ; string_list_field "z" ~resolve:(fun (proof : Proof.t) -> proof.z) ]
- )
+ (* TODO *)
+ obj "protocolStateProof" ~fields:(fun _ -> [])
let block :
( Coda_lib.t
diff --git a/src/lib/coda_lib/coda_lib.ml b/src/lib/coda_lib/coda_lib.ml
index e56337db534..c87c7465e51 100644
--- a/src/lib/coda_lib/coda_lib.ml
+++ b/src/lib/coda_lib/coda_lib.ml
@@ -137,7 +137,7 @@ module Snark_worker = struct
let run_process ~logger ~proof_level client_port kill_ivar num_threads =
let env =
Option.map
- ~f:(fun num -> `Extend [("OMP_NUM_THREADS", string_of_int num)])
+ ~f:(fun num -> `Extend [("RAYON_NUM_THREADS", string_of_int num)])
num_threads
in
let%map snark_worker_process =
diff --git a/src/lib/coda_state/dune b/src/lib/coda_state/dune
index bec68070b72..5c5d2e9f641 100644
--- a/src/lib/coda_state/dune
+++ b/src/lib/coda_state/dune
@@ -3,5 +3,5 @@
(public_name coda_state)
(inline_tests)
(preprocessor_deps ../../config.mlh)
- (preprocess (pps ppx_coda ppx_version ppx_snarky ppx_optcomp ppx_bin_prot ppx_compare ppx_sexp_conv ppx_hash ppx_fields_conv ppx_let ppx_inline_test ppx_assert ppx_deriving.std ppx_deriving_yojson h_list.ppx))
+ (preprocess (pps ppx_custom_printf ppx_coda ppx_version ppx_snarky ppx_optcomp ppx_bin_prot ppx_compare ppx_sexp_conv ppx_hash ppx_fields_conv ppx_let ppx_inline_test ppx_assert ppx_deriving.std ppx_deriving_yojson h_list.ppx))
(libraries core_kernel coda_base snark_params consensus test_genesis_ledger bitstring_lib fold_lib tuple_lib with_hash))
diff --git a/src/lib/coda_state/snark_transition.ml b/src/lib/coda_state/snark_transition.ml
index 08355c3cd80..1b7ef3f7de7 100644
--- a/src/lib/coda_state/snark_transition.ml
+++ b/src/lib/coda_state/snark_transition.ml
@@ -7,16 +7,12 @@ module Poly = struct
module V1 = struct
type ( 'blockchain_state
, 'consensus_transition
- , 'sok_digest
, 'amount
, 'producer_pk
, 'pending_coinbase_action )
t =
{ blockchain_state: 'blockchain_state
; consensus_transition: 'consensus_transition
- ; sok_digest: 'sok_digest
- ; supply_increase: 'amount
- ; ledger_proof: Proof.Stable.V1.t option
; coinbase_receiver: 'producer_pk
; coinbase_amount: 'amount
; pending_coinbase_action: 'pending_coinbase_action }
@@ -26,23 +22,18 @@ module Poly = struct
type ( 'blockchain_state
, 'consensus_transition
- , 'sok_digest
, 'amount
, 'producer_pk
, 'pending_coinbase_action )
t =
( 'blockchain_state
, 'consensus_transition
- , 'sok_digest
, 'amount
, 'producer_pk
, 'pending_coinbase_action )
Stable.Latest.t =
{ blockchain_state: 'blockchain_state
; consensus_transition: 'consensus_transition
- ; sok_digest: 'sok_digest
- ; supply_increase: 'amount
- ; ledger_proof: Proof.t option
; coinbase_receiver: 'producer_pk
; coinbase_amount: 'amount
; pending_coinbase_action: 'pending_coinbase_action }
@@ -56,7 +47,6 @@ module Value = struct
type t =
( Blockchain_state.Value.Stable.V1.t
, Consensus.Data.Consensus_transition.Value.Stable.V1.t
- , Sok_message.Digest.Stable.V1.t
, Currency.Amount.Stable.V1.t
, Signature_lib.Public_key.Compressed.Stable.V1.t
, Pending_coinbase.Update.Action.Stable.V1.t )
@@ -74,9 +64,6 @@ end
Poly.
( blockchain_state
, consensus_transition
- , ledger_proof
- , sok_digest
- , supply_increase
, coinbase_receiver
, coinbase_amount
, pending_coinbase_action
@@ -88,20 +75,15 @@ type value = Value.t
type var =
( Blockchain_state.var
, Consensus.Data.Consensus_transition.var
- , Sok_message.Digest.Checked.t
, Currency.Amount.var
, Signature_lib.Public_key.Compressed.var
, Pending_coinbase.Update.Action.var )
Poly.t
-let create_value ?(sok_digest = Sok_message.Digest.default) ?ledger_proof
- ~supply_increase ~blockchain_state ~consensus_transition ~coinbase_receiver
+let create_value ~blockchain_state ~consensus_transition ~coinbase_receiver
~coinbase_amount ~pending_coinbase_action () : Value.t =
{ blockchain_state
; consensus_transition
- ; ledger_proof
- ; sok_digest
- ; supply_increase
; coinbase_receiver
; coinbase_amount
; pending_coinbase_action }
@@ -114,32 +96,16 @@ let genesis ~constraint_constants ~genesis_ledger : value =
~snarked_next_available_token:
(Ledger.next_available_token genesis_ledger)
; consensus_transition= Consensus.Data.Consensus_transition.genesis
- ; supply_increase= Currency.Amount.zero
- ; sok_digest=
- Sok_message.digest
- { fee= Currency.Fee.zero
- ; prover=
- Account.public_key (List.hd_exn (Ledger.to_list genesis_ledger)) }
- ; ledger_proof= None
; coinbase_receiver= Signature_lib.Public_key.Compressed.empty
; coinbase_amount= Currency.Amount.zero
; pending_coinbase_action= Pending_coinbase.Update.Action.Update_none }
let typ =
let open Snark_params.Tick.Typ in
- let ledger_proof =
- { store= Store.return
- ; read= Read.return
- ; check= (fun _ -> Snark_params.Tick.Checked.return ())
- ; alloc= Alloc.return None }
- in
of_hlistable ~var_to_hlist:to_hlist ~var_of_hlist:of_hlist
~value_to_hlist:to_hlist ~value_of_hlist:of_hlist
[ Blockchain_state.typ
; Consensus.Data.Consensus_transition.typ
- ; Sok_message.Digest.typ
- ; Currency.Amount.typ
- ; ledger_proof
; Signature_lib.Public_key.Compressed.typ
; Currency.Amount.typ
; Pending_coinbase.Update.Action.typ ]
diff --git a/src/lib/coda_state/snark_transition.mli b/src/lib/coda_state/snark_transition.mli
index 257c5487e52..a34638ef97e 100644
--- a/src/lib/coda_state/snark_transition.mli
+++ b/src/lib/coda_state/snark_transition.mli
@@ -4,16 +4,12 @@ open Coda_base
module Poly : sig
type ( 'blockchain_state
, 'consensus_transition
- , 'sok_digest
, 'amount
, 'producer_pk
, 'pending_coinbase_action )
t =
{ blockchain_state: 'blockchain_state
; consensus_transition: 'consensus_transition
- ; sok_digest: 'sok_digest
- ; supply_increase: 'amount
- ; ledger_proof: Proof.Stable.V1.t option
; coinbase_receiver: 'producer_pk
; coinbase_amount: 'amount
; pending_coinbase_action: 'pending_coinbase_action }
@@ -24,7 +20,6 @@ module Poly : sig
module V1 : sig
type ( 'blockchain_state
, 'consensus_transition
- , 'sok_digest
, 'amount
, 'producer_pk
, 'pending_coinbase_action )
@@ -36,14 +31,12 @@ module Poly : sig
end
with type ( 'blockchain_state
, 'consensus_transition
- , 'sok_digest
, 'amount
, 'producer_pk
, 'pending_coinbase_action )
V1.t =
( 'blockchain_state
, 'consensus_transition
- , 'sok_digest
, 'amount
, 'producer_pk
, 'pending_coinbase_action )
@@ -56,7 +49,6 @@ module Value : sig
type t =
( Blockchain_state.Value.Stable.V1.t
, Consensus.Data.Consensus_transition.Value.Stable.V1.t
- , Sok_message.Digest.Stable.V1.t
, Currency.Amount.Stable.V1.t
, Signature_lib.Public_key.Compressed.Stable.V1.t
, Pending_coinbase.Update.Action.Stable.V1.t )
@@ -75,7 +67,6 @@ type value = Value.t
type var =
( Blockchain_state.var
, Consensus.Data.Consensus_transition.var
- , Sok_message.Digest.Checked.t
, Currency.Amount.var
, Signature_lib.Public_key.Compressed.var
, Pending_coinbase.Update.Action.var )
@@ -85,10 +76,7 @@ include
Snark_params.Tick.Snarkable.S with type value := Value.t and type var := var
val create_value :
- ?sok_digest:Sok_message.Digest.t
- -> ?ledger_proof:Proof.t
- -> supply_increase:Currency.Amount.t
- -> blockchain_state:Blockchain_state.Value.t
+ blockchain_state:Blockchain_state.Value.t
-> consensus_transition:Consensus.Data.Consensus_transition.Value.Stable.V1.t
-> coinbase_receiver:Signature_lib.Public_key.Compressed.t
-> coinbase_amount:Currency.Amount.t
@@ -101,21 +89,15 @@ val genesis :
-> genesis_ledger:Ledger.t Lazy.t
-> Value.t
-val blockchain_state :
- ('blockchain_state, _, _, _, _, _) Poly.t -> 'blockchain_state
-
val consensus_transition :
- (_, 'consensus_transition, _, _, _, _) Poly.t -> 'consensus_transition
-
-val sok_digest : (_, _, 'sok_digest, _, _, _) Poly.t -> 'sok_digest
-
-val supply_increase : (_, _, _, 'amount, _, _) Poly.t -> 'amount
+ (_, 'consensus_transition, _, _, _) Poly.t -> 'consensus_transition
-val coinbase_amount : (_, _, _, 'amount, _, _) Poly.t -> 'amount
+val coinbase_amount : (_, _, 'amount, _, _) Poly.t -> 'amount
-val ledger_proof : _ Poly.t -> Proof.t option
-
-val coinbase_receiver : (_, _, _, _, 'producer_pk, _) Poly.t -> 'producer_pk
+val coinbase_receiver : (_, _, _, 'producer_pk, _) Poly.t -> 'producer_pk
val pending_coinbase_action :
- (_, _, _, _, _, 'pending_coinbase_action) Poly.t -> 'pending_coinbase_action
+ (_, _, _, _, 'pending_coinbase_action) Poly.t -> 'pending_coinbase_action
+
+val blockchain_state :
+ ('blockchain_state, _, _, _, _) Poly.t -> 'blockchain_state
diff --git a/src/lib/coda_transition/internal_transition.ml b/src/lib/coda_transition/internal_transition.ml
index bb86e443e8b..2fbc70269d9 100644
--- a/src/lib/coda_transition/internal_transition.ml
+++ b/src/lib/coda_transition/internal_transition.ml
@@ -16,12 +16,15 @@ module type S = sig
val create :
snark_transition:Snark_transition.Value.t
+ -> ledger_proof:Ledger_proof.t option
-> prover_state:Consensus.Data.Prover_state.t
-> staged_ledger_diff:Staged_ledger_diff.t
-> t
val snark_transition : t -> Snark_transition.Value.t
+ val ledger_proof : t -> Ledger_proof.t option
+
val prover_state : t -> Consensus.Data.Prover_state.t
val staged_ledger_diff : t -> Staged_ledger_diff.t
@@ -32,6 +35,7 @@ module Stable = struct
module V1 = struct
type t =
{ snark_transition: Snark_transition.Value.Stable.V1.t
+ ; ledger_proof: Ledger_proof.Stable.V1.t option
; prover_state: Consensus.Data.Prover_state.Stable.V1.t
; staged_ledger_diff: Staged_ledger_diff.Stable.V1.t }
@@ -42,9 +46,13 @@ end]
(* bin_io, version omitted *)
type t = Stable.Latest.t =
{ snark_transition: Snark_transition.Value.t
+ ; ledger_proof: Ledger_proof.t option
; prover_state: Consensus.Data.Prover_state.t
; staged_ledger_diff: Staged_ledger_diff.t }
[@@deriving sexp, fields, to_yojson]
-let create ~snark_transition ~prover_state ~staged_ledger_diff =
- {Stable.Latest.snark_transition; staged_ledger_diff; prover_state}
+let create ~snark_transition ~ledger_proof ~prover_state ~staged_ledger_diff =
+ { Stable.Latest.snark_transition
+ ; ledger_proof
+ ; staged_ledger_diff
+ ; prover_state }
diff --git a/src/lib/codable/codable.ml b/src/lib/codable/codable.ml
index 15486e455a0..1138e38795b 100644
--- a/src/lib/codable/codable.ml
+++ b/src/lib/codable/codable.ml
@@ -92,7 +92,7 @@ struct
let of_base58_check s =
let open Or_error.Let_syntax in
let%bind decoded = Base58_check.decode s in
- Ok (Binable.of_string (module T) decoded)
+ Or_error.try_with (fun () -> Binable.of_string (module T) decoded)
let of_base58_check_exn s = of_base58_check s |> Or_error.ok_exn
diff --git a/src/lib/consensus/intf.ml b/src/lib/consensus/intf.ml
index ff5d026cfdb..a70db63e73c 100644
--- a/src/lib/consensus/intf.ml
+++ b/src/lib/consensus/intf.ml
@@ -198,7 +198,6 @@ module type Snark_transition = sig
module Poly : sig
type ( 'blockchain_state
, 'consensus_transition
- , 'sok_digest
, 'amount
, 'public_key
, 'pending_coinbase_action )
@@ -213,14 +212,16 @@ module type Snark_transition = sig
type var =
( blockchain_state_var
, consensus_transition_var
- , Sok_message.Digest.Checked.t
, Amount.var
, Public_key.Compressed.var
, Pending_coinbase.Update.Action.var )
Poly.t
val consensus_transition :
- (_, 'consensus_transition, _, _, _, _) Poly.t -> 'consensus_transition
+ (_, 'consensus_transition, _, _, _) Poly.t -> 'consensus_transition
+
+ val blockchain_state :
+ ('blockchain_state, _, _, _, _) Poly.t -> 'blockchain_state
end
module type State_hooks = sig
diff --git a/src/lib/consensus/proof_of_stake.ml b/src/lib/consensus/proof_of_stake.ml
index 76e43f472e6..2ea9c1362d6 100644
--- a/src/lib/consensus/proof_of_stake.ml
+++ b/src/lib/consensus/proof_of_stake.ml
@@ -71,7 +71,7 @@ let compute_delegatee_table_sparse_ledger keys ledger =
module Segment_id = Coda_numbers.Nat.Make32 ()
-module Typ = Crypto_params.Tick0.Typ
+module Typ = Snark_params.Tick.Typ
module Configuration = struct
[%%versioned
@@ -490,9 +490,10 @@ module Data = struct
type ('global_slot, 'epoch_seed, 'delegator) t =
{global_slot: 'global_slot; seed: 'epoch_seed; delegator: 'delegator}
- [@@deriving hlist]
+ [@@deriving sexp, hlist]
type value = (Global_slot.t, Epoch_seed.t, Coda_base.Account.Index.t) t
+ [@@deriving sexp]
type var =
( Global_slot.Checked.t
@@ -525,10 +526,9 @@ module Data = struct
~value_to_hlist:to_hlist ~value_of_hlist:of_hlist
let hash_to_group ~constraint_constants msg =
- Group_map.to_group
- (Random_oracle.hash ~init:Coda_base.Hash_prefix.vrf_message
- (Random_oracle.pack_input (to_input ~constraint_constants msg)))
- |> Tick.Inner_curve.of_affine
+ Random_oracle.hash ~init:Coda_base.Hash_prefix.vrf_message
+ (Random_oracle.pack_input (to_input ~constraint_constants msg))
+ |> Group_map.to_group |> Tick.Inner_curve.of_affine
module Checked = struct
open Tick
@@ -544,10 +544,10 @@ module Data = struct
let hash_to_group msg =
let%bind input = to_input msg in
Tick.make_checked (fun () ->
- Group_map.Checked.to_group
- (Random_oracle.Checked.hash
- ~init:Coda_base.Hash_prefix.vrf_message
- (Random_oracle.Checked.pack_input input)) )
+ Random_oracle.Checked.hash
+ ~init:Coda_base.Hash_prefix.vrf_message
+ (Random_oracle.Checked.pack_input input)
+ |> Group_map.Checked.to_group )
end
let gen
@@ -583,22 +583,24 @@ module Data = struct
let description = "Vrf Truncated Output"
end)
- let length_in_bytes = 32
-
- let length_in_bits = 8 * length_in_bytes
-
open Tick
+ let length_in_bits = Int.min 256 (Field.size_in_bits - 2)
+
type var = Boolean.var array
let typ : (var, t) Typ.t =
Typ.array ~length:length_in_bits Boolean.typ
- |> Typ.transport ~there:Blake2.string_to_bits
+ |> Typ.transport
+ ~there:(fun s ->
+ Array.sub (Blake2.string_to_bits s) ~pos:0 ~len:length_in_bits
+ )
~back:Blake2.bits_to_string
- let dummy = String.init length_in_bytes ~f:(fun _ -> '\000')
+ let dummy = String.init 32 ~f:(fun _ -> '\000')
- let to_bits t = Fold.(to_list (string_bits t))
+ let to_bits t =
+ Fold.(to_list (string_bits t)) |> Fn.flip List.take length_in_bits
end
open Tick
@@ -746,7 +748,7 @@ module Data = struct
end
module T =
- Vrf_lib.Integrated.Make (Snark_params.Tick) (Scalar) (Group) (Message)
+ Vrf_lib.Integrated.Make (Tick) (Scalar) (Group) (Message)
(struct
type value = Snark_params.Tick.Field.t
diff --git a/src/lib/crypto_params/crypto_params.ml b/src/lib/crypto_params/crypto_params.ml
index 6e01c7967b9..dbdb2b1ee83 100644
--- a/src/lib/crypto_params/crypto_params.ml
+++ b/src/lib/crypto_params/crypto_params.ml
@@ -1,175 +1,30 @@
-open Core
-include Curve_choice
+let () = Pickles.Backend.Tock.Keypair.set_urs_info Cache_dir.cache
-module Tock_backend = struct
- module Full = Cycle.Mnt6
+let () = Pickles.Backend.Tick.Keypair.set_urs_info Cache_dir.cache
- module Bowe_gabizon = struct
- let bg_salt =
- lazy (Random_oracle.salt (Hash_prefixes.bowe_gabizon_hash :> string))
+module Tock = struct
+ module Full = Pickles.Impls.Wrap
+ module Run = Pickles.Impls.Wrap
- let bg_params () = Lazy.force Group_map_params.params
+ let group_map_params () = Lazy.force Group_map_params.params
- include Snarky.Libsnark.Make_bowe_gabizon
- (Full)
- (Bowe_gabizon_hash.Make (struct
- module Field = Tick0.Field
-
- module Fqe = struct
- type t = Full.Fqe.t
-
- let to_list x =
- let v = Full.Fqe.to_vector x in
- List.init (Field.Vector.length v) ~f:(Field.Vector.get v)
- end
-
- module G1 = Full.G1
- module G2 = Full.G2
-
- let group_map x =
- Group_map.to_group (module Field) ~params:(bg_params ()) x
-
- let hash xs = Random_oracle.hash ~init:(Lazy.force bg_salt) xs
- end))
-
- module Field = Full.Field
- module Bigint = Full.Bigint
- module Var = Full.Var
-
- module R1CS_constraint_system = struct
- include Full.R1CS_constraint_system
-
- let finalize = swap_AB_if_beneficial
- end
-
- let field_size = Full.field_size
- end
-
- include Bowe_gabizon
- module Inner_curve = Cycle.Mnt4.G1
- module Inner_twisted_curve = Cycle.Mnt4.G2
+ include Full.Internal_Basic
+ module Number = Snarky.Number.Make (Full.Internal_Basic)
+ module Enumerable = Snarky.Enumerable.Make (Full.Internal_Basic)
+ module Inner_curve = Pickles.Backend.Tock.Inner_curve
end
-module Tock0 = Snarky.Snark.Make (Tock_backend)
-
-module Wrap_input = struct
- (*
- The input to a Tick snark is always a Tick.field element which is a pedersen hash.
-
- If Tock.field is bigger,
- we have the input to wrapping SNARKs be a single Tock.field element
- (since it just needs to faithfully represent 1 Tick element)
-
- If Tock.field is smaller,
- we have the input to wrapping SNARKs be two field elements
- one of which will be (n - 1) bits and one of which will be 1 bit.
- This should basically cost the same as the above.
-*)
-
- open Bitstring_lib
-
- module type S = sig
- open Tock0
-
- type t
-
- type var
-
- val of_tick_field : Tick0.Field.t -> t
-
- val typ : (var, t) Typ.t
-
- module Checked : sig
- val tick_field_to_scalars :
- Tick0.Field.Var.t
- -> (Tick0.Boolean.var Bitstring.Lsb_first.t list, _) Tick0.Checked.t
-
- val to_scalar : var -> (Boolean.var Bitstring.Lsb_first.t, _) Checked.t
- end
- end
-
- module Tock_field_larger : S = struct
- open Tock0
-
- type var = Field.Var.t
-
- type t = Field.t
-
- let typ = Field.typ
-
- let of_tick_field (x : Tick0.Field.t) : t =
- Tock0.Field.project (Tick0.Field.unpack x)
-
- module Checked = struct
- let tick_field_to_scalars x =
- let open Tick0 in
- let open Let_syntax in
- Field.Checked.choose_preimage_var x ~length:Field.size_in_bits
- >>| fun x -> [Bitstring.Lsb_first.of_list x]
-
- let to_scalar x =
- let open Let_syntax in
- Field.Checked.choose_preimage_var ~length:Tick0.Field.size_in_bits x
- >>| Bitstring.Lsb_first.of_list
- end
- end
-
- module Tock_field_smaller : S = struct
- open Tock0
-
- type var = {low_bits: Field.Var.t; high_bit: Boolean.var}
- [@@deriving hlist]
-
- type t = Tick0.Field.t
-
- let spec = Data_spec.[Field.typ; Boolean.typ]
-
- (* This is duplicated. Where to put utility functions? *)
- let split_last_exn =
- let rec go acc x xs =
- match xs with
- | [] ->
- (List.rev acc, x)
- | x' :: xs ->
- go (x :: acc) x' xs
- in
- function
- | [] -> failwith "split_last: Empty list" | x :: xs -> go [] x xs
-
- let of_tick_field (x : Tick0.Field.t) : t = x
-
- let typ : (var, t) Typ.t =
- Typ.of_hlistable spec ~var_to_hlist ~var_of_hlist
- ~value_to_hlist:(fun (x : Tick0.Field.t) ->
- let low_bits, high_bit = split_last_exn (Tick0.Field.unpack x) in
- [Tock0.Field.project low_bits; high_bit] )
- ~value_of_hlist:(fun Snarky.H_list.[low_bits; high_bit] ->
- Tick0.Field.project (Tock0.Field.unpack low_bits @ [high_bit]) )
-
- module Checked = struct
- let tick_field_to_scalars x =
- let open Tick0 in
- let open Let_syntax in
- Field.Checked.choose_preimage_var ~length:Field.size_in_bits x
- >>| fun x ->
- let low_bits, high_bit = split_last_exn x in
- [ Bitstring.Lsb_first.of_list low_bits
- ; Bitstring.Lsb_first.of_list [high_bit] ]
-
- let to_scalar {low_bits; high_bit} =
- let%map low_bits =
- Field.Checked.unpack ~length:(Tick0.Field.size_in_bits - 1) low_bits
- in
- Bitstring.Lsb_first.of_list (low_bits @ [high_bit])
- end
- end
-
- let m =
- if Bigint.(Tock0.Field.size < Tick0.Field.size) then
- (module Tock_field_smaller : S )
- else (module Tock_field_larger : S)
+module Tick = struct
+ module Full = Pickles.Impls.Step
+ module Run = Pickles.Impls.Step
- include (val m)
+ let group_map_params =
+ Group_map.Params.create
+ (module Pickles.Backend.Tock.Field)
+ Pickles.Backend.Tock.Inner_curve.Params.{a; b}
- let size = Tock0.Data_spec.size [typ]
+ include Full.Internal_Basic
+ module Number = Snarky.Number.Make (Full.Internal_Basic)
+ module Enumerable = Snarky.Enumerable.Make (Full.Internal_Basic)
+ module Inner_curve = Pickles.Backend.Tick.Inner_curve
end
diff --git a/src/lib/crypto_params/dune b/src/lib/crypto_params/dune
index c36f23af3cf..7a5ee5573be 100644
--- a/src/lib/crypto_params/dune
+++ b/src/lib/crypto_params/dune
@@ -5,11 +5,12 @@
(library_flags -linkall)
(inline_tests)
(libraries
+ cache_dir
hash_prefixes
group_map
snarky
+ pickles
bignum_bigint
- curve_choice
bowe_gabizon_hash
random_oracle
tuple_lib)
diff --git a/src/lib/crypto_params/gen/dune b/src/lib/crypto_params/gen/dune
index 3afa16c08ec..a90f2ca141d 100644
--- a/src/lib/crypto_params/gen/dune
+++ b/src/lib/crypto_params/gen/dune
@@ -1,7 +1,6 @@
(executable
(name gen)
- (libraries async core crs digestif fold_lib snarky tuple_lib ppxlib group_map
- curve_choice)
+ (libraries pickles async core crs digestif fold_lib snarky tuple_lib ppxlib group_map)
(preprocessor_deps ../../../config.mlh)
(preprocess
(pps ppx_version ppx_bin_prot ppx_optcomp ppxlib.metaquot))
diff --git a/src/lib/crypto_params/gen/gen.ml b/src/lib/crypto_params/gen/gen.ml
index 48f19df0486..5c5bdedac10 100644
--- a/src/lib/crypto_params/gen/gen.ml
+++ b/src/lib/crypto_params/gen/gen.ml
@@ -6,17 +6,17 @@ open Asttypes
open Parsetree
open Longident
open Core
-module Impl = Curve_choice.Tick0
-module Group = Curve_choice.Tick_backend.Inner_curve
+module Impl = Pickles.Impls.Step.Internal_Basic
+module Group = Pickles.Backend.Tick.Inner_curve
let group_map_params =
Group_map.Params.create
- (module Curve_choice.Tick0.Field)
- Curve_choice.Tick_backend.Inner_curve.Coefficients.{a; b}
+ (module Pickles.Backend.Tick.Field)
+ Group.Params.{a; b}
let group_map_params_structure ~loc =
let module T = struct
- type t = Curve_choice.Tick_backend.Field.t Group_map.Params.t
+ type t = Pickles.Backend.Tick.Field.Stable.Latest.t Group_map.Params.t
[@@deriving bin_io_unversioned]
end in
let module E = Ppxlib.Ast_builder.Make (struct
@@ -27,7 +27,8 @@ let group_map_params_structure ~loc =
let params =
lazy
(let module T = struct
- type t = Curve_choice.Tick_backend.Field.t Group_map.Params.t
+ type t =
+ Pickles.Backend.Tick.Field.Stable.Latest.t Group_map.Params.t
[@@deriving bin_io_unversioned]
end in
Core.Binable.of_string
diff --git a/src/lib/crypto_params/init.ml b/src/lib/crypto_params/init.ml
deleted file mode 100644
index eb020ed52de..00000000000
--- a/src/lib/crypto_params/init.ml
+++ /dev/null
@@ -1 +0,0 @@
-include Curve_choice
diff --git a/src/lib/curve_choice/curve_choice.ml b/src/lib/curve_choice/curve_choice.ml
deleted file mode 100644
index 112900d678a..00000000000
--- a/src/lib/curve_choice/curve_choice.ml
+++ /dev/null
@@ -1,69 +0,0 @@
-[%%import
-"/src/config.mlh"]
-
-module Intf = Intf
-
-[%%if
-curve_size = 298]
-
-module Cycle = Snarky.Libsnark.Mnt298
-module Snarkette_tick = Snarkette.Mnt6_80
-module Snarkette_tock = Snarkette.Mnt4_80
-
-[%%elif
-curve_size = 753]
-
-module Cycle = Snarky.Libsnark.Mnt753
-module Snarkette_tick = Snarkette.Mnt6753
-module Snarkette_tock = Snarkette.Mnt4753
-
-[%%else]
-
-[%%show
-curve_size]
-
-[%%error
-"invalid value for \"curve_size\""]
-
-[%%endif]
-
-module Tick_full = Cycle.Mnt4
-module Tock_full = Cycle.Mnt6
-
-module Tick_backend = struct
- module Full = Tick_full
- include Full.Default
-
- module Inner_curve = struct
- include Tock_full.G1
-
- let find_y x =
- let ( + ) = Field.add in
- let ( * ) = Field.mul in
- let y2 = (x * Field.square x) + (Coefficients.a * x) + Coefficients.b in
- if Field.is_square y2 then Some (Field.sqrt y2) else None
-
- let point_near_x x =
- let rec go x = function
- | Some y ->
- of_affine (x, y)
- | None ->
- let x' = Field.(add one x) in
- go x' (find_y x')
- in
- go x (find_y x)
- end
-
- module Inner_twisted_curve = Tock_full.G2
-end
-
-module Tick0 = Snarky.Snark.Make (Tick_backend)
-
-module Runners = struct
- module Tick =
- Snarky.Snark.Run.Make
- (Tick_backend)
- (struct
- type t = unit
- end)
-end
diff --git a/src/lib/curve_choice/curve_choice.mli b/src/lib/curve_choice/curve_choice.mli
deleted file mode 100644
index 05bad53e87f..00000000000
--- a/src/lib/curve_choice/curve_choice.mli
+++ /dev/null
@@ -1,71 +0,0 @@
-module Intf = Intf
-
-module Cycle : sig
- module Mnt4 : Intf.Backend_intf
-
- module Mnt6 :
- Intf.Backend_intf
- with module Common.Field = Mnt4.Fq
- with module Fq = Mnt4.Common.Field
-end
-
-module Snarkette_tick : Intf.Snarkette_tick_intf
-
-module Snarkette_tock : Intf.Snarkette_tock_intf
-
-module Tick_full = Cycle.Mnt4
-module Tock_full = Cycle.Mnt6
-
-module Tick_backend : sig
- module Full = Tick_full
-
- include
- module type of Full.Default
- with module Field = Full.Default.Field
- with module Bigint = Full.Default.Bigint
- with module Proving_key = Full.Default.Proving_key
- with module Verification_key = Full.Default.Verification_key
- with module Keypair = Full.Default.Keypair
- with module Proof = Full.Default.Proof
-
- module Inner_curve : sig
- include
- module type of Tock_full.G1
- with type t = Tock_full.G1.t
- and type Affine.t = Tock_full.G1.Affine.t
- and type Vector.t = Tock_full.G1.Vector.t
-
- val find_y : Field.t -> Field.t option
-
- val point_near_x : Field.t -> t
- end
-
- module Inner_twisted_curve = Tock_full.G2
-end
-
-module Tick0 :
- Snarky.Snark_intf.S
- with type field = Tick_backend.Field.t
- and type Bigint.t = Tick_backend.Bigint.R.t
- and type R1CS_constraint_system.t = Tick_backend.R1CS_constraint_system.t
- and type Var.t = Tick_backend.Var.t
- and type Field.Vector.t = Tick_backend.Field.Vector.t
- and type Verification_key.t = Tick_backend.Verification_key.t
- and type Proving_key.t = Tick_backend.Proving_key.t
- and type Proof.t = Tick_backend.Proof.t
- and type Proof.message = Tick_backend.Proof.message
-
-module Runners : sig
- module Tick :
- Snarky.Snark_intf.Run
- with type field = Tick_backend.Field.t
- and type prover_state = unit
- and type Bigint.t = Tick_backend.Bigint.R.t
- and type R1CS_constraint_system.t = Tick_backend.R1CS_constraint_system.t
- and type Var.t = Tick_backend.Var.t
- and type Field.Constant.Vector.t = Tick_backend.Field.Vector.t
- and type Verification_key.t = Tick_backend.Verification_key.t
- and type Proving_key.t = Tick_backend.Proving_key.t
- and type Proof.t = Tick_backend.Proof.t
- and type Proof.message = Tick_backend.Proof.message
-end
diff --git a/src/lib/curve_choice/dune b/src/lib/curve_choice/dune
deleted file mode 100644
index 0e211d802f0..00000000000
--- a/src/lib/curve_choice/dune
+++ /dev/null
@@ -1,7 +0,0 @@
-(library
- (name curve_choice)
- (public_name curve_choice)
- (libraries snarkette snarky)
- (preprocessor_deps ../../config.mlh)
- (preprocess
- (pps ppx_version ppx_optcomp ppx_sexp_conv ppx_deriving_yojson)))
diff --git a/src/lib/curve_choice/intf.ml b/src/lib/curve_choice/intf.ml
deleted file mode 100644
index daba3d9ad5c..00000000000
--- a/src/lib/curve_choice/intf.ml
+++ /dev/null
@@ -1,799 +0,0 @@
-open Core_kernel
-
-module type Field_intf = sig
- type t
-
- module Vector : Snarky.Vector.S_binable_sexpable with type elt = t
-
- include Snarky.Field_intf.S with type t := t with module Vector := Vector
-
- val typ : t Ctypes.typ
-
- val delete : t -> unit
-end
-
-module type Bigint_intf = sig
- type field
-
- module R : sig
- type t [@@deriving bin_io]
-
- val typ : t Ctypes.typ
-
- val of_decimal_string : string -> t
-
- val of_numeral : string -> base:int -> t
-
- val of_field : field -> t
-
- val of_data : Core.Bigstring.t -> bitcount:int -> t
-
- val length_in_bytes : int
-
- val div : t -> t -> t
-
- val to_field : t -> field
-
- val to_bigstring : t -> Core.Bigstring.t
-
- val compare : t -> t -> int
-
- val test_bit : t -> int -> bool
-
- val find_wnaf : Unsigned.Size_t.t -> t -> Snarky.Long_vector.t
- end
-
- module Q : sig
- type t
-
- val typ : t Ctypes.typ
-
- val test_bit : t -> int -> bool
-
- val find_wnaf : Unsigned.Size_t.t -> t -> Snarky.Long_vector.t
- end
-end
-
-module type Common_intf = sig
- val prefix : string
-
- module Field : Field_intf
-
- module Bigint : Bigint_intf with type field := Field.t
-
- module Var : sig
- type t = Field.t Snarky.Backend_types.Var.t
-
- val typ : t Ctypes_static.typ
-
- val index : t -> int
-
- val create : int -> t
- end
-
- module R1CS_constraint_system : sig
- type t = Field.t Snarky.Backend_types.R1CS_constraint_system.t
-
- val typ : t Ctypes_static.typ
-
- val create : unit -> t
-
- val clear : t -> unit
-
- val finalize : t -> unit
-
- val add_constraint :
- ?label:string
- -> t
- -> Field.t Snarky.Cvar.t Snarky.Constraint.basic
- -> unit
-
- val digest : t -> Core_kernel.Md5.t
-
- val set_primary_input_size : t -> int -> unit
-
- val set_auxiliary_input_size : t -> int -> unit
-
- val get_primary_input_size : t -> int
-
- val get_auxiliary_input_size : t -> int
-
- val to_json :
- t
- -> ([> `Assoc of (string * 'a) list
- | `List of 'a list
- | `String of string ]
- as
- 'a)
-
- val swap_AB_if_beneficial : t -> unit
- end
-
- val field_size : Bigint.R.t
-end
-
-module type Proof_system_intf = sig
- type field_vector
-
- type r1cs_constraint_system
-
- module Proving_key : sig
- type t [@@deriving bin_io]
-
- val func_name : string -> string
-
- val typ : t Ctypes.typ
-
- val is_initialized : t -> [`No of r1cs_constraint_system | `Yes]
-
- val delete : t -> unit
-
- val to_string : t -> string
-
- val of_string : string -> t
-
- val to_bigstring : t -> Bigstring.t
-
- val of_bigstring : Bigstring.t -> t
-
- val set_constraint_system : t -> r1cs_constraint_system -> unit
- end
-
- module Verification_key : sig
- type t
-
- val typ : t Ctypes.typ
-
- val delete : t -> unit
-
- val to_string : t -> string
-
- val of_string : string -> t
-
- val to_bigstring : t -> Bigstring.t
-
- val of_bigstring : Bigstring.t -> t
-
- val size_in_bits : t -> int
-
- val get_dummy : input_size:int -> t
- end
-
- module Keypair : sig
- type t
-
- val typ : t Ctypes.typ
-
- val delete : t -> unit
-
- val pk : t -> Proving_key.t
-
- val vk : t -> Verification_key.t
-
- val create : r1cs_constraint_system -> t
- end
-
- module Proof : sig
- type message = unit
-
- type t
-
- val typ : t Ctypes.typ
-
- val delete : t -> unit
-
- val create :
- ?message:message
- -> Proving_key.t
- -> primary:field_vector
- -> auxiliary:field_vector
- -> t
-
- val verify :
- ?message:message -> t -> Verification_key.t -> field_vector -> bool
-
- val get_dummy : unit -> t
-
- include Binable.S with type t := t
- end
-end
-
-module type Field_extension = sig
- type field_vector
-
- type t [@@deriving bin_io, sexp]
-
- val typ : t Ctypes_static.typ
-
- val delete : t -> unit
-
- val print : t -> unit
-
- val random : unit -> t
-
- val square : t -> t
-
- val sqrt : t -> t
-
- val create_zero : unit -> t
-
- val ( + ) : t -> t -> t
-
- val inv : t -> t
-
- val ( * ) : t -> t -> t
-
- val sub : t -> t -> t
-
- val equal : t -> t -> bool
-
- val to_vector : t -> field_vector
-
- val of_vector : field_vector -> t
-
- val schedule_delete : t -> unit
-end
-
-module type Group_intf = sig
- type fp
-
- type fq
-
- type bigint
-
- module Coefficients : sig
- val a : fq
-
- val b : fq
- end
-
- type t [@@deriving bin_io]
-
- val typ : t Ctypes_static.typ
-
- val add : t -> t -> t
-
- val ( + ) : t -> t -> t
-
- val negate : t -> t
-
- val double : t -> t
-
- val scale : t -> bigint -> t
-
- val scale_field : t -> fp -> t
-
- val zero : t
-
- val one : t
-
- module Affine : sig
- type t = fq * fq [@@deriving bin_io]
- end
-
- val to_affine_exn : t -> Affine.t
-
- val to_affine : t -> Affine.t option
-
- val of_affine : Affine.t -> t
-
- val equal : t -> t -> bool
-
- val random : unit -> t
-
- val delete : t -> unit
-
- val print : t -> unit
-
- val subgroup_check : t -> unit
-
- module Vector : Snarky.Vector.S_binable with type elt := t
-end
-
-module type Backend_intf = sig
- module Common : Common_intf
-
- module Default : sig
- module R1CS_constraint_system : sig
- include module type of Common.R1CS_constraint_system
-
- val finalize : t -> unit
- end
-
- include
- Common_intf
- with module Field = Common.Field
- with module R1CS_constraint_system := R1CS_constraint_system
-
- include
- Proof_system_intf
- with type field_vector := Common.Field.Vector.t
- and type r1cs_constraint_system := R1CS_constraint_system.t
- end
-
- module GM : sig
- module R1CS_constraint_system : sig
- include module type of Common.R1CS_constraint_system
-
- val finalize : t -> unit
- end
-
- include
- Common_intf
- with module Field = Common.Field
- with module R1CS_constraint_system := R1CS_constraint_system
-
- include
- Proof_system_intf
- with type field_vector := Common.Field.Vector.t
- and type r1cs_constraint_system := R1CS_constraint_system.t
- end
-
- include
- Common_intf
- with module Field = Common.Field
- with module Bigint = Common.Bigint
-
- val field_size : Bigint.R.t
-
- module Fq : Field_intf
-
- module Fqk : sig
- type t
-
- val typ : t Ctypes.typ
-
- val delete :
- (t -> unit Snarky.Ctypes_foreign.return) Snarky.Ctypes_foreign.result
-
- val one : t
-
- val to_elts : t -> Fq.Vector.t
- end
-
- module Fqe : Field_extension with type field_vector := Fq.Vector.t
-
- module G1 :
- Group_intf
- with type fp := Field.t
- and type fq := Fq.t
- and type bigint := Bigint.R.t
-
- module G2 :
- Group_intf
- with type fp := Field.t
- and type fq := Fqe.t
- and type bigint := Bigint.R.t
-
- module GM_proof_accessors : sig
- val a : GM.Proof.t -> G1.t
-
- val b : GM.Proof.t -> G2.t
-
- val c : GM.Proof.t -> G1.t
- end
-
- module GM_verification_key_accessors : sig
- val h : GM.Verification_key.t -> G2.t
-
- val g_alpha : GM.Verification_key.t -> G1.t
-
- val h_beta : GM.Verification_key.t -> G2.t
-
- val g_gamma : GM.Verification_key.t -> G1.t
-
- val h_gamma : GM.Verification_key.t -> G2.t
-
- val query : GM.Verification_key.t -> G1.Vector.t
-
- val g_alpha_h_beta : GM.Verification_key.t -> Fqk.t
- end
-
- module Groth16_proof_accessors : sig
- val a : Default.Proof.t -> G1.t
-
- val b : Default.Proof.t -> G2.t
-
- val c : Default.Proof.t -> G1.t
- end
-
- module Groth16 : sig
- module R1CS_constraint_system : sig
- include module type of Common.R1CS_constraint_system
-
- val finalize : t -> unit
- end
-
- include
- Common_intf
- with module Field = Common.Field
- with module R1CS_constraint_system := R1CS_constraint_system
-
- module Verification_key : sig
- type t = Default.Verification_key.t
-
- include module type of Default.Verification_key with type t := t
-
- val delta : t -> G2.t
-
- val query : t -> G1.Vector.t
-
- val alpha_beta : t -> Fqk.t
- end
-
- module Proving_key : sig
- type t = Default.Proving_key.t
-
- include module type of Default.Proving_key with type t := t
- end
-
- module Keypair : sig
- type t = Default.Keypair.t
-
- include module type of Default.Keypair with type t := t
- end
- end
-end
-
-module type Snarkette_elliptic_curve = sig
- module N = Snarkette.Nat
-
- type fq
-
- type t = {x: fq; y: fq; z: fq} [@@deriving bin_io, sexp, yojson]
-
- val zero : t
-
- module Coefficients : sig
- val a : fq
-
- val b : fq
- end
-
- module Affine : sig
- type t = fq * fq
- end
-
- val of_affine : Affine.t -> t
-
- val is_zero : t -> bool
-
- val to_affine_exn : t -> Affine.t
-
- val to_affine : t -> Affine.t option
-
- val is_well_formed : t -> bool
-
- val ( + ) : t -> t -> t
-
- val scale : t -> N.t -> t
-
- val ( * ) : N.t -> t -> t
-
- val negate : t -> t
-
- val ( - ) : t -> t -> t
-
- val one : t
-end
-
-module type Snarkette_GM_processed_verification_key = sig
- type g1
-
- type g2
-
- type fqe
-
- type g1_precomp
-
- type g2_precomp
-
- type verification_key
-
- type t =
- { g_alpha: g1
- ; h_beta: g2
- ; g_alpha_h_beta: fqe
- ; g_gamma_pc: g1_precomp
- ; h_gamma_pc: g2_precomp
- ; h_pc: g2_precomp
- ; query: g1 array }
- [@@deriving bin_io, sexp]
-
- val create : verification_key -> t
-end
-
-module type Snarkette_BG_processed_verification_key = sig
- type g1
-
- type fqe
-
- type g2_precomp
-
- type verification_key
-
- type t = {alpha_beta: fqe * fqe; delta_pc: g2_precomp; query: g1 array}
- [@@deriving bin_io, sexp]
-
- val create : verification_key -> t
-end
-
-module type Snarkette_tick_intf = sig
- module N = Snarkette.Nat
-
- module Fq :
- Snarkette.Fields.Fp_intf with module Nat = N and type t = private N.t
-
- val non_residue : Fq.t
-
- module Fq3 : sig
- include
- Snarkette.Fields.Degree_3_extension_intf
- with module Nat = N
- with type base = Fq.t
-
- val non_residue : Fq.t
-
- val frobenius : t -> int -> t
-
- module Params : sig
- val frobenius_coeffs_c1 : Fq.t array
-
- val frobenius_coeffs_c2 : Fq.t array
- end
- end
-
- module Fq2 :
- Snarkette.Fields.Degree_2_extension_intf
- with module Nat = N
- with type base = Fq.t
-
- module Fq6 : sig
- include
- Snarkette.Fields.Degree_2_extension_intf
- with module Nat = N
- with type base = Fq3.t
-
- val mul_by_2345 : t -> t -> t
-
- val frobenius : t -> int -> t
-
- val cyclotomic_exp : t -> N.t -> t
-
- val unitary_inverse : t -> t
-
- module Params : sig
- val non_residue : Fq.t
-
- val frobenius_coeffs_c1 : Fq.t array
- end
- end
-
- module G1 : Snarkette_elliptic_curve with module N := N with type fq := Fq.t
-
- module G2 : Snarkette_elliptic_curve with module N := N with type fq := Fq3.t
-
- module Pairing_info : sig
- val twist : Fq3.t
-
- val loop_count : N.t
-
- val is_loop_count_neg : bool
-
- val final_exponent : N.t
-
- val final_exponent_last_chunk_abs_of_w0 : N.t
-
- val final_exponent_last_chunk_is_w0_neg : bool
-
- val final_exponent_last_chunk_w1 : N.t
- end
-
- module Pairing : sig
- module G1_precomputation : sig
- type t [@@deriving bin_io, sexp]
-
- val create : G1.t -> t
- end
-
- module G2_precomputation : sig
- type t [@@deriving bin_io, sexp]
-
- val create : G2.t -> t
- end
-
- val final_exponentiation : Fq6.t -> Fq6.t
-
- val miller_loop : G1_precomputation.t -> G2_precomputation.t -> Fq6.t
-
- val unreduced_pairing : G1.t -> G2.t -> Fq6.t
-
- val reduced_pairing : G1.t -> G2.t -> Fq6.t
- end
-
- module Inputs : sig
- module N = N
- module G1 = G1
- module G2 = G2
- module Fq = Fq
- module Fqe = Fq3
- module Fq_target = Fq6
- module Pairing = Pairing
- end
-
- module Groth_maller : sig
- module Verification_key : sig
- type t =
- { h: G2.t
- ; g_alpha: G1.t
- ; h_beta: G2.t
- ; g_alpha_h_beta: Fq6.t
- ; g_gamma: G1.t
- ; h_gamma: G2.t
- ; query: G1.t array }
- [@@deriving bin_io, sexp]
-
- val map_to_two :
- 'a sexp_list -> f:('a -> 'b * 'c) -> 'b sexp_list * 'c sexp_list
-
- val fold_bits : t -> bool Fold_lib.Fold.t
-
- val fold : t -> (bool * bool * bool) Fold_lib.Fold.t
-
- module Processed :
- Snarkette_GM_processed_verification_key
- with type g1 := G1.t
- and type g2 := G2.t
- and type fqe := Fq6.t
- and type g1_precomp := Pairing.G1_precomputation.t
- and type g2_precomp := Pairing.G2_precomputation.t
- and type verification_key := t
- end
-
- val check : bool -> string -> (unit, Error.t) Result.t
-
- module Proof : sig
- type t = {a: G1.t; b: G2.t; c: G1.t} [@@deriving bin_io, sexp]
-
- val is_well_formed : t -> unit Or_error.t
- end
-
- val verify :
- Verification_key.Processed.t -> N.t List.t -> Proof.t -> unit Or_error.t
- end
-
- module Groth16 : sig
- module Verification_key : sig
- type t = {query: G1.t array; delta: G2.t; alpha_beta: Fq6.t}
- [@@deriving bin_io, sexp]
-
- type vk = t
-
- module Processed : sig
- type t =
- { query: G1.t array
- ; alpha_beta: Fq6.t
- ; delta: Pairing.G2_precomputation.t }
- [@@deriving bin_io, sexp]
-
- val create : vk -> t
- end
- end
-
- val check : bool -> string -> (unit, Error.t) Base.Result.t
-
- module Proof : sig
- type t = {a: G1.t; b: G2.t; c: G1.t} [@@deriving bin_io, sexp]
-
- val is_well_formed : t -> unit Or_error.t
- end
-
- val one_pc : Pairing.G2_precomputation.t lazy_t
-
- val verify :
- Verification_key.Processed.t
- -> N.t sexp_list
- -> Proof.t
- -> unit Or_error.t
- end
-
- module Make_bowe_gabizon (M : sig
- val hash :
- ?message:Fq.t array
- -> a:G1.t
- -> b:G2.t
- -> c:G1.t
- -> delta_prime:G2.t
- -> G1.t
- end) : sig
- module Verification_key : sig
- type t = {alpha_beta: Fq3.t * Fq3.t; delta: G2.t; query: G1.t array}
- [@@deriving bin_io, sexp]
-
- val map_to_two :
- 'a sexp_list -> f:('a -> 'b * 'c) -> 'b sexp_list * 'c sexp_list
-
- val fold_bits : t -> bool Fold_lib.Fold.t
-
- val fold : t -> (bool * bool * bool) Fold_lib.Fold.t
-
- module Processed :
- Snarkette_BG_processed_verification_key
- with type g1 := G1.t
- and type fqe := Fq3.t
- and type g2_precomp := Pairing.G2_precomputation.t
- and type verification_key := t
- end
-
- val check : bool -> string -> (unit, Error.t) Pervasives.result
-
- module Proof : sig
- type t = {a: G1.t; b: G2.t; c: G1.t; delta_prime: G2.t; z: G1.t}
- [@@deriving bin_io, sexp]
-
- val is_well_formed : t -> unit Or_error.t
- end
-
- val one_pc : Pairing.G2_precomputation.t lazy_t
-
- val verify :
- ?message:Fq.t array
- -> Verification_key.Processed.t
- -> N.t sexp_list
- -> Proof.t
- -> unit Or_error.t
- end
-end
-
-module type Snarkette_tock_intf = sig
- module N = Snarkette.Nat
-
- module Fq :
- Snarkette.Fields.Fp_intf with module Nat = N and type t = private N.t
-
- val non_residue : Fq.t
-
- module Fq2 : sig
- include
- Snarkette.Fields.Degree_2_extension_intf
- with module Nat = N
- with type base = Fq.t
-
- module Params : sig
- val non_residue : Fq.t
- end
- end
-
- module Fq4 : sig
- include
- Snarkette.Fields.Degree_2_extension_intf
- with module Nat = N
- with type base = Fq2.t
-
- module Params : sig
- val frobenius_coeffs_c1 : Fq.t array
-
- val non_residue : Fq2.t
- end
- end
-
- module G1 : Snarkette_elliptic_curve with module N := N with type fq := Fq.t
-
- module G2 : Snarkette_elliptic_curve with module N := N with type fq := Fq2.t
-
- module Pairing_info : sig
- val twist : Fq.t * Fq.t
-
- val loop_count : N.t
-
- val is_loop_count_neg : bool
-
- val final_exponent : N.t
-
- val final_exponent_last_chunk_abs_of_w0 : N.t
-
- val final_exponent_last_chunk_is_w0_neg : bool
-
- val final_exponent_last_chunk_w1 : N.t
- end
-end
diff --git a/src/lib/dummy_values/gen_values/dune b/src/lib/dummy_values/gen_values/dune
index f2990080cba..f372e7a3d52 100644
--- a/src/lib/dummy_values/gen_values/dune
+++ b/src/lib/dummy_values/gen_values/dune
@@ -1,6 +1,6 @@
(executable
(name gen_values)
- (libraries async core crypto_params snarky ppxlib)
+ (libraries async core crypto_params pickles ppxlib)
(preprocess
(pps ppx_version ppx_jane ppxlib.metaquot))
(modes native))
diff --git a/src/lib/dummy_values/gen_values/gen_values.ml b/src/lib/dummy_values/gen_values/gen_values.ml
index 8e3df0965af..84242384ebf 100644
--- a/src/lib/dummy_values/gen_values/gen_values.ml
+++ b/src/lib/dummy_values/gen_values/gen_values.ml
@@ -4,164 +4,39 @@ open Parsetree
open Longident
open Core
open Async
-
-module Curve_name = struct
- type t = Tick | Tock
-
- let to_string = function Tick -> "Tick" | Tock -> "Tock"
-end
-
-module Proof_system_name = struct
- type t = Groth16 | Bowe_gabizon18
-
- let to_string = function
- | Groth16 ->
- "Groth16"
- | Bowe_gabizon18 ->
- "Bowe_gabizon18"
-end
-
-module Make
- (B : Snarky.Backend_intf.S) (M : sig
- val curve : Curve_name.t
-
- val proof_system : Proof_system_name.t
- end) =
-struct
- open M
- module Impl = Snarky.Snark.Make (B)
-
- let proof_string =
- let open Impl in
- let exposing = Data_spec.[Typ.field] in
- let main x =
- let%bind _z = Field.Checked.mul x x in
- Field.Checked.Assert.equal x x
- in
- let keypair = generate_keypair main ~exposing in
- let proof = prove (Keypair.pk keypair) exposing () main Field.one in
- assert (verify proof (Keypair.vk keypair) exposing Field.one) ;
- Binable.to_string (module Proof) proof
-
- let vk_string, pk_string =
- let open Impl in
- let kp =
- match M.curve with
- | Tick ->
- generate_keypair
- ~exposing:Data_spec.[Boolean.typ]
- (fun b -> Boolean.Assert.is_true b)
- | Tock -> (
- (* Hack *)
- let n =
- Crypto_params.Tock0.Data_spec.(size [Crypto_params.Wrap_input.typ])
- in
- match n with
- | 1 ->
- generate_keypair
- ~exposing:Data_spec.[Boolean.typ]
- (fun b1 -> Boolean.Assert.is_true b1)
- | 2 ->
- generate_keypair
- ~exposing:Data_spec.[Boolean.typ; Boolean.typ]
- (fun b1 _b2 -> Boolean.Assert.is_true b1)
- | _ ->
- assert false )
- in
- ( Verification_key.to_string (Keypair.vk kp)
- , Proving_key.to_string (Keypair.pk kp) )
-
- let structure ~loc =
- let ident str = Loc.make ~loc (Longident.parse str) in
- let ( ^. ) x y = x ^ "." ^ y in
- let module E = Ppxlib.Ast_builder.Make (struct
- let loc = loc
- end) in
- let open E in
- let curve_name = Curve_name.to_string curve in
- let curve_module_name =
- match proof_system with
- | Bowe_gabizon18 ->
- sprintf "Crypto_params.%s_backend" curve_name
- | Groth16 ->
- sprintf "Crypto_params.%s_backend.Full.Default" curve_name
- in
- let of_string_expr submodule_name str =
- [%expr
- [%e
- pexp_ident
- (ident (curve_module_name ^. submodule_name ^. "of_string"))]
- [%e estring str]]
- in
- let proof_stri =
- [%stri
- let proof =
- Core_kernel.Binable.of_string
- [%e pexp_pack (pmod_ident (ident (curve_module_name ^. "Proof")))]
- [%e estring proof_string]]
- in
- let vk_stri =
- [%stri
- let verification_key = [%e of_string_expr "Verification_key" vk_string]]
- in
- let pk_stri =
- [%stri let proving_key = [%e of_string_expr "Proving_key" pk_string]]
- in
- pstr_module
- (module_binding
- ~name:(Loc.make ~loc (Proof_system_name.to_string proof_system))
- ~expr:(pmod_structure [proof_stri; vk_stri; pk_stri]))
-end
-
-type spec = Curve_name.t * Proof_system_name.t
-
-let proof_system_of_curve : Curve_name.t -> Proof_system_name.t = function
- | Tick ->
- Groth16
- | Tock ->
- Bowe_gabizon18
-
-let backend_of_curve (s : Curve_name.t) =
- match s with
- | Tick ->
- assert (proof_system_of_curve Tick = Groth16) ;
- (module Crypto_params.Tick_backend : Snarky.Backend_intf.S)
- | Tock ->
- assert (proof_system_of_curve Tock = Bowe_gabizon18) ;
- (module Crypto_params.Tock_backend : Snarky.Backend_intf.S)
-
-let structure_item_of_spec ((curve, proof_system) : spec) =
- let module N = struct
- let curve = curve
-
- let proof_system = proof_system
- end in
- let module B = (val backend_of_curve curve) in
- let module M = Make (B) (N) in
- M.structure ~loc:Ppxlib.Location.none
-
-let curves = [Curve_name.Tick; Curve_name.Tock]
+open Pickles_types
+
+let proof_string prev_width =
+ let module Proof = Pickles.Proof.Make (Nat.N2) (Nat.N2) in
+ let dummy = Pickles.Proof.dummy Nat.N2.n Nat.N2.n prev_width in
+ Binable.to_string (module Proof) dummy
+
+let blockchain_proof_string = proof_string Nat.N2.n
+
+let transaction_proof_string = proof_string Nat.N0.n
+
+let str ~loc =
+ let module E = Ppxlib.Ast_builder.Make (struct
+ let loc = loc
+ end) in
+ let open E in
+ [%str
+ let blockchain_proof, transaction_proof =
+ let open Pickles_types in
+ let module Proof = Pickles.Proof.Make (Nat.N2) (Nat.N2) in
+ ( Core_kernel.Binable.of_string
+ (module Proof)
+ [%e estring blockchain_proof_string]
+ , Core_kernel.Binable.of_string
+ (module Proof)
+ [%e estring transaction_proof_string] )]
let main () =
let fmt =
Format.formatter_of_out_channel (Out_channel.create "dummy_values.ml")
in
- let structure =
- let loc = Ppxlib.Location.none in
- List.map curves ~f:(fun curve ->
- let module E = Ppxlib.Ast_builder.Make (struct
- let loc = loc
- end) in
- let open E in
- pstr_module
- (module_binding
- ~name:(Loc.make ~loc (Curve_name.to_string curve))
- ~expr:
- (pmod_structure
- [structure_item_of_spec (curve, proof_system_of_curve curve)]))
- )
- in
- Pprintast.top_phrase fmt (Ptop_def structure) ;
+ let loc = Ppxlib.Location.none in
+ Pprintast.top_phrase fmt (Ptop_def (str ~loc)) ;
exit 0
let () =
diff --git a/src/lib/genesis_ledger/genesis_ledger.ml b/src/lib/genesis_ledger/genesis_ledger.ml
index 585ed18b8f7..2a1df7a053c 100644
--- a/src/lib/genesis_ledger/genesis_ledger.ml
+++ b/src/lib/genesis_ledger/genesis_ledger.ml
@@ -222,7 +222,7 @@ module Register (Accounts : Intf.Named_accounts_intf) :
include Accounts
end
-module Testnet_postake = Register (Public_accounts (Testnet_postake_ledger))
+module Testnet_postake = Register (Balances (Testnet_postake_ledger))
module Testnet_postake_many_producers = Register (Balances (struct
let name = "testnet_postake_many_producers"
diff --git a/src/lib/genesis_ledger/testnet_postake_ledger.ml b/src/lib/genesis_ledger/testnet_postake_ledger.ml
index 199dcfb7bd0..ebe6c6f00f1 100644
--- a/src/lib/genesis_ledger/testnet_postake_ledger.ml
+++ b/src/lib/genesis_ledger/testnet_postake_ledger.ml
@@ -1,370 +1,3 @@
-open Intf.Public_accounts
-module Public_key = Signature_lib.Public_key
-
let name = "testnet_postake"
-let accounts =
- lazy
- [ { pk=
- Public_key.Compressed.of_base58_check_exn
- "4vsRCVw9P7XQFJWS4FsAsiXRSJyEKiVLCnVw2mRreE7iWout75RvZnm9q46sed2GvBF9Rh972AJrnuhrpPfCDGyhgsJm6kxZGhP5x9CTdty4cpFA8FmxNL8gB2UPTweGnQ1svjTVgUAbb8qB"
- ; balance= 0
- ; delegate= None }
- ; { pk=
- Public_key.Compressed.of_base58_check_exn
- "4vsRCVW8do8EqsgJFy8B38k1qdoQ1MW6UuZ33qk8eXTHu8qqoTMZzHPF5r6haHZbvcRgXdYoYJsWAKZwCRqiXYrHhVUPQyqThsdmfkMxjKKhPZrkN23YcAyLMpj1iSAdRjHk8KnSz5vkLBmT"
- ; balance= 1600000
- ; delegate=
- Some
- (Public_key.Compressed.of_base58_check_exn
- "4vsRCVciL8orxBmoa6DeCwLFSuJTdWz9iMzHtCcuiezRwMtWbJDj32nP3JPHrrkwpidTgx8WiF7zuPoqkFmRXBYHXcBow9fB6gGrafiSQZCrjjnvA7E1635kko9RTML8cZAiG2XcFs9xbj6w")
- }
- ; { pk=
- Public_key.Compressed.of_base58_check_exn
- "4vsRCVciL8orxBmoa6DeCwLFSuJTdWz9iMzHtCcuiezRwMtWbJDj32nP3JPHrrkwpidTgx8WiF7zuPoqkFmRXBYHXcBow9fB6gGrafiSQZCrjjnvA7E1635kko9RTML8cZAiG2XcFs9xbj6w"
- ; balance= 0
- ; delegate= None }
- ; { pk=
- Public_key.Compressed.of_base58_check_exn
- "4vsRCVxE2UzLuGa1MfNtZqnJzDeR4vqX4qgn6BQ1qAAub7UWL3g6RpcTrzK5ZPZWqfsR8rZ7wzzVqnQ2mRXDLX24f4yLKSsN29dazXfcuCokipYbQCui1Ce5waTdV6sGBCarFcnw2mrMPMvG"
- ; balance= 1600000
- ; delegate=
- Some
- (Public_key.Compressed.of_base58_check_exn
- "4vsRCVWRSCQNrGSNoojhp258eeKCeXL8JVefQ8KpZ9DJhFE7AkcFm3czHcddUUbkmpavwuKW4o2QsexWzHnjwuD3ejGkGjqha3n2omrt1fCHN9NWN24jfrqrDTkoZDhm4RNKpRMX4jixX631")
- }
- ; { pk=
- Public_key.Compressed.of_base58_check_exn
- "4vsRCVWRSCQNrGSNoojhp258eeKCeXL8JVefQ8KpZ9DJhFE7AkcFm3czHcddUUbkmpavwuKW4o2QsexWzHnjwuD3ejGkGjqha3n2omrt1fCHN9NWN24jfrqrDTkoZDhm4RNKpRMX4jixX631"
- ; balance= 0
- ; delegate= None }
- ; { pk=
- Public_key.Compressed.of_base58_check_exn
- "4vsRCVveUThKmUYTfdpo41KZDyn3p8CrLYnM4sZ9q9wJtrkW4tfdGCq2CFcrw2GnQyBYCdqFSDuasj2NSmSbwryhF8McGj4JDYFyzN8SckcK5AreiiF4impwiimipEkA7ovhT47FxyPzkzUQ"
- ; balance= 1600000
- ; delegate=
- Some
- (Public_key.Compressed.of_base58_check_exn
- "4vsRCVFRYDicwWE2yf9xiwgmsZD47EaZFNUikDQcwYevG9vNMyzZSgnN38yfBGU78PVP4ssHQmpZi744tPemMptXCqdNpQwWfsCC3228onQcxxW4q473fVPqoj19vRp61JUeTCLVHZqQ5xWZ")
- }
- ; { pk=
- Public_key.Compressed.of_base58_check_exn
- "4vsRCVFRYDicwWE2yf9xiwgmsZD47EaZFNUikDQcwYevG9vNMyzZSgnN38yfBGU78PVP4ssHQmpZi744tPemMptXCqdNpQwWfsCC3228onQcxxW4q473fVPqoj19vRp61JUeTCLVHZqQ5xWZ"
- ; balance= 0
- ; delegate= None }
- ; { pk=
- Public_key.Compressed.of_base58_check_exn
- "4vsRCVKwNDotyLx7YE7tM4ApVHFSzb6a3r93c8cV9M9aBud3apY2fbbHUm12c9vviqiZdRmVDyV6onNQvRdMsieSHtPFA5EsnaRyjnTMsWsK3VKY8KkheCoN17SM6gaks57WZajMTP4AVh6V"
- ; balance= 1600000
- ; delegate=
- Some
- (Public_key.Compressed.of_base58_check_exn
- "4vsRCVsnxSQW4QYWwewWqdZ3VMcoC1kVtirphNK3zEEx3UcnwpABSj13c2iyL9w5bsL9KgFBXoF8ixtE1MPAyt9k26VPHRCYHyNtK38sPq5kB218S1w7TWk8mAc36GEdje5c3BpG7LLcAG66")
- }
- ; { pk=
- Public_key.Compressed.of_base58_check_exn
- "4vsRCVsnxSQW4QYWwewWqdZ3VMcoC1kVtirphNK3zEEx3UcnwpABSj13c2iyL9w5bsL9KgFBXoF8ixtE1MPAyt9k26VPHRCYHyNtK38sPq5kB218S1w7TWk8mAc36GEdje5c3BpG7LLcAG66"
- ; balance= 0
- ; delegate= None }
- ; { pk=
- Public_key.Compressed.of_base58_check_exn
- "4vsRCVYDaPGUK9VzzxfSm1xy3hqp9AVJvNaBKi33fdXvS4nat1Aw3s6V3dCz53x6REB6UJxiYZYbHYb47Frn7eKzazVsD5vnDJZkpsxRi3aenw6tJfdgzPFoSmmg7FFN5UNEigzYUrbRhCzF"
- ; balance= 1600000
- ; delegate=
- Some
- (Public_key.Compressed.of_base58_check_exn
- "4vsRCVadXwWMSGA9q81reJRX3BZ5ZKRtgZU7PtGsNq11w2V9tUNf4urZAGncZLUiP4SfWqur7AZsyhJKD41Ke7rJJ8yDibL41ePBeATLUnwNtMTojPDeiBfvTfgHzbAVFktD65vzxMNCvvAJ")
- }
- ; { pk=
- Public_key.Compressed.of_base58_check_exn
- "4vsRCVadXwWMSGA9q81reJRX3BZ5ZKRtgZU7PtGsNq11w2V9tUNf4urZAGncZLUiP4SfWqur7AZsyhJKD41Ke7rJJ8yDibL41ePBeATLUnwNtMTojPDeiBfvTfgHzbAVFktD65vzxMNCvvAJ"
- ; balance= 0
- ; delegate= None }
- ; { pk=
- Public_key.Compressed.of_base58_check_exn
- "4vsRCVG8eGaiychTTqHuqSiPtuUBD91WaTCbLv8UgGRMVn8oiDcE3w1fvpzhryydSh1aX647ZCTD9SNP8913MH8YGxowbFz6F7b34vpkCYLmVTsrF4pUgve94hqfeoPrATbRVeQt5gMHPMWc"
- ; balance= 100000
- ; delegate=
- Some
- (Public_key.Compressed.of_base58_check_exn
- "4vsRCVciL8orxBmoa6DeCwLFSuJTdWz9iMzHtCcuiezRwMtWbJDj32nP3JPHrrkwpidTgx8WiF7zuPoqkFmRXBYHXcBow9fB6gGrafiSQZCrjjnvA7E1635kko9RTML8cZAiG2XcFs9xbj6w")
- }
- ; { pk=
- Public_key.Compressed.of_base58_check_exn
- "4vsRCVQHBmqHwgmu4PDym91FLemj7qXuuL5cNe6YM4QAs68QhH3hQo5GZrvg4k9F9hGij3djbATFDztAmyWcBs3sthhyNZ3q1W1db2g1wMHimMUzZhchnvPVVGiPXQ4j5y1kmyp86BRKLzkU"
- ; balance= 10000
- ; delegate=
- Some
- (Public_key.Compressed.of_base58_check_exn
- "4vsRCVWRSCQNrGSNoojhp258eeKCeXL8JVefQ8KpZ9DJhFE7AkcFm3czHcddUUbkmpavwuKW4o2QsexWzHnjwuD3ejGkGjqha3n2omrt1fCHN9NWN24jfrqrDTkoZDhm4RNKpRMX4jixX631")
- }
- ; { pk=
- Public_key.Compressed.of_base58_check_exn
- "4vsRCVNxyqjmkwqhJSeCsPY113XHqjZ8vyUkye2JSaD5rC1DEvUnUxzej1cAP5xBz6xnxYBW6bvsaiKZmtQuuLkqrMN74Cqfo5fRYrxZP5ywy2qfC6AzwPA1AsDD2i9Jr7SZ1WjFkTquKvjG"
- ; balance= 63000
- ; delegate=
- Some
- (Public_key.Compressed.of_base58_check_exn
- "4vsRCVFRYDicwWE2yf9xiwgmsZD47EaZFNUikDQcwYevG9vNMyzZSgnN38yfBGU78PVP4ssHQmpZi744tPemMptXCqdNpQwWfsCC3228onQcxxW4q473fVPqoj19vRp61JUeTCLVHZqQ5xWZ")
- }
- ; { pk=
- Public_key.Compressed.of_base58_check_exn
- "4vsRCVyzW4UP9qLTtEf5Y7TDAsFfoA9kZDZJbu5NwhDGxdPHQriJq6fqLiE4DHxDjQCUNywhLqbHSsH8m9pUN1epaoC6Ld7GEP98fJTZR94o2YzLeGcFKjUqM9BPYsSjU9hxqs9iXTFN6Ydu"
- ; balance= 63000
- ; delegate=
- Some
- (Public_key.Compressed.of_base58_check_exn
- "4vsRCVsnxSQW4QYWwewWqdZ3VMcoC1kVtirphNK3zEEx3UcnwpABSj13c2iyL9w5bsL9KgFBXoF8ixtE1MPAyt9k26VPHRCYHyNtK38sPq5kB218S1w7TWk8mAc36GEdje5c3BpG7LLcAG66")
- }
- ; { pk=
- Public_key.Compressed.of_base58_check_exn
- "4vsRCVutRdXhTQeNbexNs6xoXFzBGjLnxPxLt6By9TbbhqJ93rJxcuMu1NHYYGWSbb75PqwiCUiL6ZqfrfwjQ6GuR9ZVWdmr1xi2KxpKoELCtC6SAa8TK74unSkuxEf49bjESqfZhqHyVsKa"
- ; balance= 63000
- ; delegate=
- Some
- (Public_key.Compressed.of_base58_check_exn
- "4vsRCVadXwWMSGA9q81reJRX3BZ5ZKRtgZU7PtGsNq11w2V9tUNf4urZAGncZLUiP4SfWqur7AZsyhJKD41Ke7rJJ8yDibL41ePBeATLUnwNtMTojPDeiBfvTfgHzbAVFktD65vzxMNCvvAJ")
- }
- ; { pk=
- Public_key.Compressed.of_base58_check_exn
- "4vsRCVbKw5LWM95jRPEjwccYwcjPCTdiG2A9VYdiz3r9Dqk47LmvPVnZzF6Nm1xu7H89Thu9ZR1Y9ZmmFZNn5R4vUjm6H5yguABryYDGodAoeDApsKu4RaAXuuAaHACaqnS3du7Y3XLQWxcM"
- ; balance= 63000
- ; delegate=
- Some
- (Public_key.Compressed.of_base58_check_exn
- "4vsRCVciL8orxBmoa6DeCwLFSuJTdWz9iMzHtCcuiezRwMtWbJDj32nP3JPHrrkwpidTgx8WiF7zuPoqkFmRXBYHXcBow9fB6gGrafiSQZCrjjnvA7E1635kko9RTML8cZAiG2XcFs9xbj6w")
- }
- ; { pk=
- Public_key.Compressed.of_base58_check_exn
- "4vsRCVM3h5M1M7xzwXQoBf4qvtLfZ3jQ9UNeJNFG5qjFHhMZHSBzPrzPDvotyvauH9G6Wh18UG9uxV4k7t3SXDFdxN7yca6crqqM6GBFfGi6KpHZVK7xTNCkWPMJmQWFTE1z4wbPTbx2LC1o"
- ; balance= 63000
- ; delegate=
- Some
- (Public_key.Compressed.of_base58_check_exn
- "4vsRCVWRSCQNrGSNoojhp258eeKCeXL8JVefQ8KpZ9DJhFE7AkcFm3czHcddUUbkmpavwuKW4o2QsexWzHnjwuD3ejGkGjqha3n2omrt1fCHN9NWN24jfrqrDTkoZDhm4RNKpRMX4jixX631")
- }
- ; { pk=
- Public_key.Compressed.of_base58_check_exn
- "4vsRCVV9KykMe7WP5bDQnKCFFj2QgoVT6Y4oikqEpKHFwKd5mjZh4Wh9qXTcfEYzKfqm5p2SwhyXFVuRXpsfPo3F4gjwkFZdHjRDi9V44hwXDsQ4bXW93s2dJWaScyaeJYeft646WpaGYLW1"
- ; balance= 63000
- ; delegate=
- Some
- (Public_key.Compressed.of_base58_check_exn
- "4vsRCVFRYDicwWE2yf9xiwgmsZD47EaZFNUikDQcwYevG9vNMyzZSgnN38yfBGU78PVP4ssHQmpZi744tPemMptXCqdNpQwWfsCC3228onQcxxW4q473fVPqoj19vRp61JUeTCLVHZqQ5xWZ")
- }
- ; { pk=
- Public_key.Compressed.of_base58_check_exn
- "4vsRCVJ55uvEd9U66mXLyfzDeKRg2tnTD5agA6zrzW1JxTBfrHzdYLRf1WtGnX2PcMmMUfNbGnyWg82rQyRpxQF4YVA8grrFf9NsHKkgXtcAbnb1PAtQFtKiEmYCuSfyYAwpHEG21oxtD2yX"
- ; balance= 63000
- ; delegate=
- Some
- (Public_key.Compressed.of_base58_check_exn
- "4vsRCVsnxSQW4QYWwewWqdZ3VMcoC1kVtirphNK3zEEx3UcnwpABSj13c2iyL9w5bsL9KgFBXoF8ixtE1MPAyt9k26VPHRCYHyNtK38sPq5kB218S1w7TWk8mAc36GEdje5c3BpG7LLcAG66")
- }
- ; { pk=
- Public_key.Compressed.of_base58_check_exn
- "4vsRCVx3vaPinGPHMVBgCAPBUmaqU27tjiUcwPM4A5BBkZx7igjPVHcLfhaPwCP9u1f6gyDqBJdP8WdQQLBNoZzWP38nxn3Aj8s9VDCLySvTGkk88zePsZCjCipk2MNii7cSNyvnJG29GCst"
- ; balance= 63000
- ; delegate=
- Some
- (Public_key.Compressed.of_base58_check_exn
- "4vsRCVadXwWMSGA9q81reJRX3BZ5ZKRtgZU7PtGsNq11w2V9tUNf4urZAGncZLUiP4SfWqur7AZsyhJKD41Ke7rJJ8yDibL41ePBeATLUnwNtMTojPDeiBfvTfgHzbAVFktD65vzxMNCvvAJ")
- }
- ; { pk=
- Public_key.Compressed.of_base58_check_exn
- "4vsRCVyqS1rbjrWefJ9fMYG4BnfVAw9yyG87JJaau7WWXAiKA95QchBQukueJ4ik9nb95k458SZ8CRqanPXZ9Hfeo86ZWLpDDgqiGLdMMB9YDwsUjNzuxtdmFfqUD1LBFKj6oNbAt7DP3pt9"
- ; balance= 63000
- ; delegate=
- Some
- (Public_key.Compressed.of_base58_check_exn
- "4vsRCVciL8orxBmoa6DeCwLFSuJTdWz9iMzHtCcuiezRwMtWbJDj32nP3JPHrrkwpidTgx8WiF7zuPoqkFmRXBYHXcBow9fB6gGrafiSQZCrjjnvA7E1635kko9RTML8cZAiG2XcFs9xbj6w")
- }
- ; { pk=
- Public_key.Compressed.of_base58_check_exn
- "4vsRCVjUwuSa3hBsnU3F23jp6DSUzF8MLLfZKY9GCkB4LYGDj7rNmaCp3UN6kRvnPRFcwGseQpaEG3rNySsk9AQFTe59z7TQL67GVpFZkf5CD7hzPHLgXzoJYpenDN8U7FYAAZDCkKtSUTDb"
- ; balance= 63000
- ; delegate=
- Some
- (Public_key.Compressed.of_base58_check_exn
- "4vsRCVWRSCQNrGSNoojhp258eeKCeXL8JVefQ8KpZ9DJhFE7AkcFm3czHcddUUbkmpavwuKW4o2QsexWzHnjwuD3ejGkGjqha3n2omrt1fCHN9NWN24jfrqrDTkoZDhm4RNKpRMX4jixX631")
- }
- ; { pk=
- Public_key.Compressed.of_base58_check_exn
- "4vsRCVuCRX5kUfGq7Cy8Ldpf1YM7eZTzqXtxt1u5vgPoneCsurWCfzJ9TVK9BQbSyKrbmhgviVUbYCxDYwaBvK2nHke7L3fMMwUw135NLW8vjvSbcYUTjP7pU5LXSsVz1smU7QVd4aWGQVeh"
- ; balance= 63000
- ; delegate=
- Some
- (Public_key.Compressed.of_base58_check_exn
- "4vsRCVFRYDicwWE2yf9xiwgmsZD47EaZFNUikDQcwYevG9vNMyzZSgnN38yfBGU78PVP4ssHQmpZi744tPemMptXCqdNpQwWfsCC3228onQcxxW4q473fVPqoj19vRp61JUeTCLVHZqQ5xWZ")
- }
- ; { pk=
- Public_key.Compressed.of_base58_check_exn
- "4vsRCVtmTqMuPkZb3JhzDoi733Q6rYrCUZNv7oSdgtoYcD9wz8WCRBkYzKJVibUMriyMeXbDt3Q9oeoNeAmjx5W1opdrMg1y53nGq5Bjpkf6Bmt3wjcBrifo6KaVAKA49JvNnqysEHkPND3q"
- ; balance= 63000
- ; delegate=
- Some
- (Public_key.Compressed.of_base58_check_exn
- "4vsRCVsnxSQW4QYWwewWqdZ3VMcoC1kVtirphNK3zEEx3UcnwpABSj13c2iyL9w5bsL9KgFBXoF8ixtE1MPAyt9k26VPHRCYHyNtK38sPq5kB218S1w7TWk8mAc36GEdje5c3BpG7LLcAG66")
- }
- ; { pk=
- Public_key.Compressed.of_base58_check_exn
- "4vsRCVwuHSaYaSxy6dTCD6PmnEk2tnUGGhostBYHSG3ybFe5ansY8pJjQbytKWBgCi1iMNCiTyP2V4udvcjWhhWTa2E39ntuwhSKhgUvGiH3dPwCvMzN6vSBKwesKXd812zWziSa9seJhNdc"
- ; balance= 63000
- ; delegate=
- Some
- (Public_key.Compressed.of_base58_check_exn
- "4vsRCVadXwWMSGA9q81reJRX3BZ5ZKRtgZU7PtGsNq11w2V9tUNf4urZAGncZLUiP4SfWqur7AZsyhJKD41Ke7rJJ8yDibL41ePBeATLUnwNtMTojPDeiBfvTfgHzbAVFktD65vzxMNCvvAJ")
- }
- ; { pk=
- Public_key.Compressed.of_base58_check_exn
- "4vsRCVcxFRWyj1V9UAhrZQvdUhxmGEdncoRbr212vrdzXpeZ4Fjcd87sQ5qupWXuxYXjcTcWgtjvku2xnVX2h4mUNtxeUgsimQDpJgP8SJfTxNTHpjkqdpdcAZnpJSFBQiFevHJ4MufGD1xJ"
- ; balance= 63000
- ; delegate=
- Some
- (Public_key.Compressed.of_base58_check_exn
- "4vsRCVciL8orxBmoa6DeCwLFSuJTdWz9iMzHtCcuiezRwMtWbJDj32nP3JPHrrkwpidTgx8WiF7zuPoqkFmRXBYHXcBow9fB6gGrafiSQZCrjjnvA7E1635kko9RTML8cZAiG2XcFs9xbj6w")
- }
- ; { pk=
- Public_key.Compressed.of_base58_check_exn
- "4vsRCVLedVp1StPQzRqw7FGe6MsFckHN7BJMTmvgCtLYJpg3UQs8nK1hwA9V6XVyzKqyui531V1L5t7EZ5KFDEe8Rxam4DnTCpYZ9k2KPyNWQ1AzoJjjsRzT7noxakjW9FKnMKN2fJJa16xF"
- ; balance= 63000
- ; delegate=
- Some
- (Public_key.Compressed.of_base58_check_exn
- "4vsRCVWRSCQNrGSNoojhp258eeKCeXL8JVefQ8KpZ9DJhFE7AkcFm3czHcddUUbkmpavwuKW4o2QsexWzHnjwuD3ejGkGjqha3n2omrt1fCHN9NWN24jfrqrDTkoZDhm4RNKpRMX4jixX631")
- }
- ; { pk=
- Public_key.Compressed.of_base58_check_exn
- "4vsRCVkQoWsTiQAUXFrLdxnoQWyUqJHozRG1C8DRhmz2s7BHApmDk3KveLu2nvGLDuK5FzyigA2PFAnSk9MmAVqXDs5HaBpqXbEkiisstSMoFoVeZdjPWN1Bs73nZ89UjKiciJr4qFVub6K5"
- ; balance= 63000
- ; delegate=
- Some
- (Public_key.Compressed.of_base58_check_exn
- "4vsRCVFRYDicwWE2yf9xiwgmsZD47EaZFNUikDQcwYevG9vNMyzZSgnN38yfBGU78PVP4ssHQmpZi744tPemMptXCqdNpQwWfsCC3228onQcxxW4q473fVPqoj19vRp61JUeTCLVHZqQ5xWZ")
- }
- ; { pk=
- Public_key.Compressed.of_base58_check_exn
- "4vsRCVXdPRHgjWxwPUSRDaFgK6xt9gjB5JFZLy9iYdV5wrr5gwzMepxQ2Vzap2vBg2FLtuTFUPuRJjD58gB1qbm1Qk7TC1ozrNpeE5HbyeXvvP2mtwwrK45kJ6cJ2DnHx8wvDSyWAtiANndZ"
- ; balance= 63000
- ; delegate=
- Some
- (Public_key.Compressed.of_base58_check_exn
- "4vsRCVsnxSQW4QYWwewWqdZ3VMcoC1kVtirphNK3zEEx3UcnwpABSj13c2iyL9w5bsL9KgFBXoF8ixtE1MPAyt9k26VPHRCYHyNtK38sPq5kB218S1w7TWk8mAc36GEdje5c3BpG7LLcAG66")
- }
- ; { pk=
- Public_key.Compressed.of_base58_check_exn
- "4vsRCVnTXETtBZMCW5qren2nV8hKPA7XZfarDaWMV24DmFCnnAevHNmmqJDThcS4yAPihyARHj116kxHfoPvWxbSFFA35EFEt6qTnFHpYrNNj5NygM8Lf2HQ7k9nUSJxPCJjJ7KAxQe46Czo"
- ; balance= 63000
- ; delegate=
- Some
- (Public_key.Compressed.of_base58_check_exn
- "4vsRCVadXwWMSGA9q81reJRX3BZ5ZKRtgZU7PtGsNq11w2V9tUNf4urZAGncZLUiP4SfWqur7AZsyhJKD41Ke7rJJ8yDibL41ePBeATLUnwNtMTojPDeiBfvTfgHzbAVFktD65vzxMNCvvAJ")
- }
- ; { pk=
- Public_key.Compressed.of_base58_check_exn
- "4vsRCVX66RtkYoTtM4f3HU5GbxKn6KeHg4nGyB2KjtRyxKfFokKNQ4Et6crhNXsFNrMwjVyYxFRnELvKD9mWhK6UScXcURUum5fqfkKGBowSr4EowQWSfdjDpsVbXY6igf6TX8RpffiFjnuk"
- ; balance= 63000
- ; delegate=
- Some
- (Public_key.Compressed.of_base58_check_exn
- "4vsRCVciL8orxBmoa6DeCwLFSuJTdWz9iMzHtCcuiezRwMtWbJDj32nP3JPHrrkwpidTgx8WiF7zuPoqkFmRXBYHXcBow9fB6gGrafiSQZCrjjnvA7E1635kko9RTML8cZAiG2XcFs9xbj6w")
- }
- ; { pk=
- Public_key.Compressed.of_base58_check_exn
- "4vsRCVFsCeqLtM5p5nrEQikzb2bkfi13LJusQzvibzpoJLYtd3aMU2MBykzsBhsCwSKJYzuEWC8eFf9kxmo3QhhasaRKzqrzSLTYvuqz7j6RTdfSPDkfhC8M21JW7Xntaajf6pmBUGjPYYcx"
- ; balance= 63000
- ; delegate=
- Some
- (Public_key.Compressed.of_base58_check_exn
- "4vsRCVWRSCQNrGSNoojhp258eeKCeXL8JVefQ8KpZ9DJhFE7AkcFm3czHcddUUbkmpavwuKW4o2QsexWzHnjwuD3ejGkGjqha3n2omrt1fCHN9NWN24jfrqrDTkoZDhm4RNKpRMX4jixX631")
- }
- ; { pk=
- Public_key.Compressed.of_base58_check_exn
- "4vsRCVnNbFM4CbwpEgEDbm2huDWE8uMNijByxstY3yCi7kZ6kBkV8562JckrHm42xoJsPVH8yYua4peJ5KsN1oK2CxhUAegtU5BAFc5zxADdA3SaVWHAbJaUgGFiuFcToC6V52k74fJu9Zog"
- ; balance= 63000
- ; delegate=
- Some
- (Public_key.Compressed.of_base58_check_exn
- "4vsRCVFRYDicwWE2yf9xiwgmsZD47EaZFNUikDQcwYevG9vNMyzZSgnN38yfBGU78PVP4ssHQmpZi744tPemMptXCqdNpQwWfsCC3228onQcxxW4q473fVPqoj19vRp61JUeTCLVHZqQ5xWZ")
- }
- ; { pk=
- Public_key.Compressed.of_base58_check_exn
- "4vsRCVNU9yU47PdtSFUBp3jEqYWRPejQSiPjgVN46ruz4mJq19sCiVPkSh6rqoww1zFnZAuw7gRAowzxjG7N5vrozBCCfKXaMpxBpsnhhKBiF1Z5cBHH1zECKbJ2HK51C96JVbuQfa6zTcFZ"
- ; balance= 63000
- ; delegate=
- Some
- (Public_key.Compressed.of_base58_check_exn
- "4vsRCVsnxSQW4QYWwewWqdZ3VMcoC1kVtirphNK3zEEx3UcnwpABSj13c2iyL9w5bsL9KgFBXoF8ixtE1MPAyt9k26VPHRCYHyNtK38sPq5kB218S1w7TWk8mAc36GEdje5c3BpG7LLcAG66")
- }
- ; { pk=
- Public_key.Compressed.of_base58_check_exn
- "4vsRCVjXg8PCG1gajD6eqHP58YBuHNzrcpBWqDT31YyJP17aEUFo2j3xf21vhHPPGV3EAnn1euauyJkEm7UTrWfn8ZevJaDJzaDnXAz8t1ammZZ7sSff83Ypkmv9ZcyY1iCLnhyRPH9Aqccq"
- ; balance= 63000
- ; delegate=
- Some
- (Public_key.Compressed.of_base58_check_exn
- "4vsRCVadXwWMSGA9q81reJRX3BZ5ZKRtgZU7PtGsNq11w2V9tUNf4urZAGncZLUiP4SfWqur7AZsyhJKD41Ke7rJJ8yDibL41ePBeATLUnwNtMTojPDeiBfvTfgHzbAVFktD65vzxMNCvvAJ")
- }
- ; { pk=
- Public_key.Compressed.of_base58_check_exn
- "4vsRCVaQMpthEeHCc1eEtTyxCdX8xchkv5spJLyFh7PJB8Ue3gMuE4L4sjbnugRGGbKutAJh2N9omrUtMcvUYzxKFqdSPWY5juaBxYQiur86wd1PWZqmd2gpRcR81qu5RjRiA6Leeaf5JA1L"
- ; balance= 63000
- ; delegate=
- Some
- (Public_key.Compressed.of_base58_check_exn
- "4vsRCVciL8orxBmoa6DeCwLFSuJTdWz9iMzHtCcuiezRwMtWbJDj32nP3JPHrrkwpidTgx8WiF7zuPoqkFmRXBYHXcBow9fB6gGrafiSQZCrjjnvA7E1635kko9RTML8cZAiG2XcFs9xbj6w")
- }
- ; { pk=
- Public_key.Compressed.of_base58_check_exn
- "4vsRCVh4LdeQz614EKeNKUjLanszf5mNmBFVh6M1VnZsfCMdjAqfFPk9yRw8L2peZEwk5oZ2qJtDcVasEQHDLJjgFS6vU6wYXC8VMPmuGAhVSd5p6QwJsqbv2oawrNvQBjkNCrwtAp8WuKX7"
- ; balance= 63000
- ; delegate=
- Some
- (Public_key.Compressed.of_base58_check_exn
- "4vsRCVWRSCQNrGSNoojhp258eeKCeXL8JVefQ8KpZ9DJhFE7AkcFm3czHcddUUbkmpavwuKW4o2QsexWzHnjwuD3ejGkGjqha3n2omrt1fCHN9NWN24jfrqrDTkoZDhm4RNKpRMX4jixX631")
- }
- ; { pk=
- Public_key.Compressed.of_base58_check_exn
- "4vsRCVTbpAZpfoG2GmHAYatSsgkfywH2cC3WEYSVxLUYRezAqVkiGx6ghk4v1ss4EzFmAHoEHhQhdknLWTyqwBBP5pveSfHwvB3s1XMqmkavWH6Y7nowb1Z9JHnyc9UKrbjtgSKq6WQj4LKs"
- ; balance= 63000
- ; delegate=
- Some
- (Public_key.Compressed.of_base58_check_exn
- "4vsRCVFRYDicwWE2yf9xiwgmsZD47EaZFNUikDQcwYevG9vNMyzZSgnN38yfBGU78PVP4ssHQmpZi744tPemMptXCqdNpQwWfsCC3228onQcxxW4q473fVPqoj19vRp61JUeTCLVHZqQ5xWZ")
- }
- ; { pk=
- Public_key.Compressed.of_base58_check_exn
- "4vsRCVNo6wbxac5fy3rgWYS1HKbyC8hb8W14WRr3cPViXRXjErivDp7VYTUhkgbQdKGXGptxZBey91jWEYCNVmmNAU9QrtCKabERrofZeBE8cfKA4c6VBE8as6MaWuSGaFuTipZiqUCsCP5f"
- ; balance= 63000
- ; delegate=
- Some
- (Public_key.Compressed.of_base58_check_exn
- "4vsRCVsnxSQW4QYWwewWqdZ3VMcoC1kVtirphNK3zEEx3UcnwpABSj13c2iyL9w5bsL9KgFBXoF8ixtE1MPAyt9k26VPHRCYHyNtK38sPq5kB218S1w7TWk8mAc36GEdje5c3BpG7LLcAG66")
- }
- ; { pk=
- Public_key.Compressed.of_base58_check_exn
- "4vsRCVzk82Kp5t2RU6tjr6GWE5tZ2x8mBEZNWYqQJixod1TTw7tTbs7FFce9XCsPTGuc3QLuR372pQwZTxw4wjcz1K7ppk8aX7ZWxHYcyJfrAr7yDvDbuiKosDTfGxeN1fcy3uM7cYNSVwtF"
- ; balance= 63000
- ; delegate=
- Some
- (Public_key.Compressed.of_base58_check_exn
- "4vsRCVadXwWMSGA9q81reJRX3BZ5ZKRtgZU7PtGsNq11w2V9tUNf4urZAGncZLUiP4SfWqur7AZsyhJKD41Ke7rJJ8yDibL41ePBeATLUnwNtMTojPDeiBfvTfgHzbAVFktD65vzxMNCvvAJ")
- }
- ; { pk=
- Public_key.Compressed.of_base58_check_exn
- "4vsRCVtkNvH1oSLm8h9XKrf8XFcepwEJCTX4M7tppHjNkkSB2FZ7mmrriaFkTVY2TB3aQduDLXhCwpRCCsmpf8s37bcm9kzfXcMEUBqD35mHBY8FgB6mPLuBJaj68V3SQPWKXQohdUzFJWZd"
- ; balance= 63000
- ; delegate=
- Some
- (Public_key.Compressed.of_base58_check_exn
- "4vsRCVciL8orxBmoa6DeCwLFSuJTdWz9iMzHtCcuiezRwMtWbJDj32nP3JPHrrkwpidTgx8WiF7zuPoqkFmRXBYHXcBow9fB6gGrafiSQZCrjjnvA7E1635kko9RTML8cZAiG2XcFs9xbj6w")
- }
- ; { pk=
- Public_key.Compressed.of_base58_check_exn
- "4vsRCVZR9FPpVMZCksNNu22yd1raLKgvBEHQmx98evVbFMS1mJ4pnJ2jXaBWiFK7KNTkU6CV6VPXELPmmm5YxBuRDySCw9qWUvKkpBMSqYKGox6SUDR3E2tnuqpyWDLRPJiuxvC8jL8jWgSz"
- ; balance= 63000
- ; delegate=
- Some
- (Public_key.Compressed.of_base58_check_exn
- "4vsRCVWRSCQNrGSNoojhp258eeKCeXL8JVefQ8KpZ9DJhFE7AkcFm3czHcddUUbkmpavwuKW4o2QsexWzHnjwuD3ejGkGjqha3n2omrt1fCHN9NWN24jfrqrDTkoZDhm4RNKpRMX4jixX631")
- } ]
+let balances = Test_ledger.balances
diff --git a/src/lib/genesis_ledger_helper/dune b/src/lib/genesis_ledger_helper/dune
index d2e702700ad..24b701e92cc 100644
--- a/src/lib/genesis_ledger_helper/dune
+++ b/src/lib/genesis_ledger_helper/dune
@@ -1,6 +1,6 @@
(library
(public_name genesis_ledger_helper)
(name genesis_ledger_helper)
- (libraries core_kernel coda_base signature_lib genesis_constants runtime_config cache_dir coda_lib)
+ (libraries snark_keys core_kernel coda_base signature_lib genesis_constants runtime_config cache_dir coda_lib)
(preprocessor_deps "../../config.mlh")
- (preprocess (pps ppx_coda ppx_version ppx_optcomp ppx_let ppx_deriving_yojson ppx_custom_printf)))
+ (preprocess (pps ppx_coda ppx_version ppx_optcomp ppx_let ppx_deriving.std ppx_deriving_yojson ppx_custom_printf)))
diff --git a/src/lib/genesis_ledger_helper/genesis_ledger_helper.ml b/src/lib/genesis_ledger_helper/genesis_ledger_helper.ml
index b8906b1c089..f9d1145559f 100644
--- a/src/lib/genesis_ledger_helper/genesis_ledger_helper.ml
+++ b/src/lib/genesis_ledger_helper/genesis_ledger_helper.ml
@@ -446,15 +446,35 @@ module Ledger = struct
return (Error err) ) ) )
end
+(* This hash encodes the data that determines a genesis proof:
+ 1. The blockchain snark constraint system
+ 2. The genesis protocol state (including the genesis ledger)
+
+ It is used to determine whether we should make a new genesis proof, or use the
+ one generated at compile-time.
+*)
+module Base_hash : sig
+ type t [@@deriving eq, yojson]
+
+ val create : id:Pickles.Verification_key.Id.t -> state_hash:State_hash.t -> t
+
+ val to_string : t -> string
+end = struct
+ type t = string [@@deriving eq, yojson]
+
+ let to_string = Fn.id
+
+ let create ~id ~state_hash =
+ Pickles.Verification_key.Id.to_string id
+ |> ( ^ ) (State_hash.to_string state_hash)
+ |> Blake2.digest_string |> Blake2.to_hex
+end
+
module Genesis_proof = struct
- let filename ~base_hash =
- let hash =
- Ledger_hash.to_string base_hash |> Blake2.digest_string |> Blake2.to_hex
- in
- "genesis_proof_" ^ hash
+ let filename ~base_hash = "genesis_proof_" ^ Base_hash.to_string base_hash
- let find_file ~logger ~base_hash =
- let search_paths = Cache_dir.possible_paths "" in
+ let find_file ~logger ~base_hash ~genesis_dir =
+ let search_paths = genesis_dir :: Cache_dir.possible_paths "" in
let file_exists filename path =
let filename = path ^/ filename in
if%map file_exists ~follow_symlinks:true filename then (
@@ -497,38 +517,43 @@ module Genesis_proof = struct
~genesis_ledger:(Genesis_ledger.Packed.t ledger)
~constraint_constants ~consensus_constants
in
- let%map base_hash =
- match proof_level with
- | Genesis_constants.Proof_level.Full ->
- Keys_lib.Keys.step_instance_hash protocol_state_with_hash.data
- | _ ->
- return Snark_params.Tick.Field.zero
- in
{ Genesis_proof.Inputs.runtime_config
; constraint_constants
; proof_level
+ ; blockchain_proof_system_id= Snark_keys.blockchain_verification_key_id ()
; genesis_ledger= ledger
; consensus_constants
; protocol_state_with_hash
- ; base_hash
; genesis_constants }
- let generate inputs =
- match inputs.Genesis_proof.Inputs.proof_level with
+ let generate (inputs : Genesis_proof.Inputs.t) =
+ match inputs.proof_level with
| Genesis_constants.Proof_level.Full ->
- let%map ((module Keys) as keys) = Keys_lib.Keys.create () in
- Genesis_proof.create_values ~keys inputs
+ let module B =
+ Blockchain_snark.Blockchain_snark_state.Make
+ (Transaction_snark.Make ()) in
+ let computed_values =
+ Genesis_proof.create_values
+ (module B)
+ { genesis_ledger= inputs.genesis_ledger
+ ; runtime_config= inputs.runtime_config
+ ; proof_level= inputs.proof_level
+ ; blockchain_proof_system_id= Lazy.force B.Proof.id
+ ; protocol_state_with_hash= inputs.protocol_state_with_hash
+ ; genesis_constants= inputs.genesis_constants
+ ; consensus_constants= inputs.consensus_constants
+ ; constraint_constants= inputs.constraint_constants }
+ in
+ computed_values
| _ ->
- return
- { Genesis_proof.runtime_config= inputs.runtime_config
- ; constraint_constants= inputs.constraint_constants
- ; proof_level= inputs.proof_level
- ; genesis_constants= inputs.genesis_constants
- ; genesis_ledger= inputs.genesis_ledger
- ; consensus_constants= inputs.consensus_constants
- ; protocol_state_with_hash= inputs.protocol_state_with_hash
- ; base_hash= inputs.base_hash
- ; genesis_proof= Dummy_values.Tock.Bowe_gabizon18.proof }
+ { Genesis_proof.runtime_config= inputs.runtime_config
+ ; constraint_constants= inputs.constraint_constants
+ ; proof_level= inputs.proof_level
+ ; genesis_constants= inputs.genesis_constants
+ ; genesis_ledger= inputs.genesis_ledger
+ ; consensus_constants= inputs.consensus_constants
+ ; protocol_state_with_hash= inputs.protocol_state_with_hash
+ ; genesis_proof= Coda_base.Proof.blockchain_dummy }
let store ~filename proof =
(* TODO: Use [Writer.write_bin_prot]. *)
@@ -543,10 +568,25 @@ module Genesis_proof = struct
Reader.file_contents filename
>>| Sexp.of_string >>| Proof.Stable.V1.t_of_sexp )
+ let id_to_json x =
+ `String (Sexp.to_string (Pickles.Verification_key.Id.sexp_of_t x))
+
let load_or_generate ~genesis_dir ~logger ~may_generate
(inputs : Genesis_proof.Inputs.t) =
+ let proof_needed =
+ match inputs.proof_level with Full -> true | _ -> false
+ in
let compiled = Precomputed_values.compiled in
- match%bind find_file ~logger ~base_hash:inputs.base_hash with
+ let base_hash =
+ Base_hash.create ~id:inputs.blockchain_proof_system_id
+ ~state_hash:inputs.protocol_state_with_hash.hash
+ in
+ let compiled_base_hash =
+ Base_hash.create
+ ~id:(Precomputed_values.blockchain_proof_system_id ())
+ ~state_hash:(Lazy.force compiled).protocol_state_with_hash.hash
+ in
+ match%bind find_file ~logger ~base_hash ~genesis_dir with
| Some file -> (
match%map load file with
| Ok genesis_proof ->
@@ -558,7 +598,6 @@ module Genesis_proof = struct
; genesis_ledger= inputs.genesis_ledger
; consensus_constants= inputs.consensus_constants
; protocol_state_with_hash= inputs.protocol_state_with_hash
- ; base_hash= inputs.base_hash
; genesis_proof }
, file )
| Error err ->
@@ -568,16 +607,15 @@ module Genesis_proof = struct
; ("error", `String (Error.to_string_hum err)) ] ;
Error err )
| None
- when Ledger_hash.equal inputs.base_hash (Lazy.force compiled).base_hash
- ->
+ when Base_hash.equal base_hash compiled_base_hash || not proof_needed ->
let compiled = Lazy.force compiled in
[%log info]
"Base hash $computed_hash matches compile-time $compiled_hash, \
using precomputed genesis proof"
~metadata:
- [ ("computed_hash", Ledger_hash.to_yojson inputs.base_hash)
- ; ("compiled_hash", Ledger_hash.to_yojson compiled.base_hash) ] ;
- let filename = genesis_dir ^/ filename ~base_hash:inputs.base_hash in
+ [ ("computed_hash", Base_hash.to_yojson base_hash)
+ ; ("compiled_hash", Base_hash.to_yojson compiled_base_hash) ] ;
+ let filename = genesis_dir ^/ filename ~base_hash in
let values =
{ Genesis_proof.runtime_config= inputs.runtime_config
; constraint_constants= inputs.constraint_constants
@@ -586,7 +624,6 @@ module Genesis_proof = struct
; genesis_ledger= inputs.genesis_ledger
; consensus_constants= inputs.consensus_constants
; protocol_state_with_hash= inputs.protocol_state_with_hash
- ; base_hash= inputs.base_hash
; genesis_proof= compiled.genesis_proof }
in
let%map () =
@@ -607,9 +644,9 @@ module Genesis_proof = struct
[%log info]
"No genesis proof file was found for $base_hash, generating a new \
genesis proof"
- ~metadata:[("base_hash", Ledger_hash.to_yojson inputs.base_hash)] ;
- let%bind values = generate inputs in
- let filename = genesis_dir ^/ filename ~base_hash:inputs.base_hash in
+ ~metadata:[("base_hash", Base_hash.to_yojson base_hash)] ;
+ let values = generate inputs in
+ let filename = genesis_dir ^/ filename ~base_hash in
let%map () =
match%map store ~filename values.genesis_proof with
| Ok () ->
@@ -626,7 +663,7 @@ module Genesis_proof = struct
[%log error]
"No genesis proof file was found for $base_hash and not allowed to \
generate a new genesis proof"
- ~metadata:[("base_hash", Ledger_hash.to_yojson inputs.base_hash)] ;
+ ~metadata:[("base_hash", Base_hash.to_yojson base_hash)] ;
Deferred.Or_error.errorf
"No genesis proof file was found and not allowed to generate a new \
genesis proof"
@@ -827,8 +864,7 @@ let init_from_config_file ?(genesis_dir = Cache_dir.autogen_path) ~logger
Deferred.return
@@ make_genesis_constants ~logger ~default:genesis_constants config
in
- let open Deferred.Let_syntax in
- let%bind proof_inputs =
+ let proof_inputs =
Genesis_proof.generate_inputs ~runtime_config:config ~proof_level
~ledger:genesis_ledger ~constraint_constants ~genesis_constants
in
diff --git a/src/lib/genesis_proof/dune b/src/lib/genesis_proof/dune
index ca69c29605a..9284a59cc80 100644
--- a/src/lib/genesis_proof/dune
+++ b/src/lib/genesis_proof/dune
@@ -4,8 +4,7 @@
(libraries
coda_base
coda_state
- consensus
- keys_lib
+ blockchain_snark
genesis_constants
runtime_config
with_hash)
diff --git a/src/lib/genesis_proof/genesis_proof.ml b/src/lib/genesis_proof/genesis_proof.ml
index add2527328d..fb690431b5b 100644
--- a/src/lib/genesis_proof/genesis_proof.ml
+++ b/src/lib/genesis_proof/genesis_proof.ml
@@ -11,7 +11,7 @@ module Inputs = struct
; consensus_constants: Consensus.Constants.t
; protocol_state_with_hash:
(Protocol_state.value, State_hash.t) With_hash.t
- ; base_hash: State_hash.t }
+ ; blockchain_proof_system_id: Pickles.Verification_key.Id.t }
end
module T = struct
@@ -24,7 +24,6 @@ module T = struct
; consensus_constants: Consensus.Constants.t
; protocol_state_with_hash:
(Protocol_state.value, State_hash.t) With_hash.t
- ; base_hash: State_hash.t
; genesis_proof: Proof.t }
let runtime_config {runtime_config; _} = runtime_config
@@ -77,53 +76,45 @@ end
include T
-let wrap ~keys:(module Keys : Keys_lib.Keys.S) hash proof =
- let open Snark_params in
- let module Wrap = Keys.Wrap in
- let input = Wrap_input.of_tick_field hash in
- let proof =
- Tock.prove
- (Tock.Keypair.pk Wrap.keys)
- Wrap.input {Wrap.Prover_state.proof} Wrap.main input
- in
- assert (Tock.verify proof (Tock.Keypair.vk Wrap.keys) Wrap.input input) ;
- proof
-
-let base_proof ?(logger = Logger.create ())
- ~keys:((module Keys : Keys_lib.Keys.S) as keys) (t : Inputs.t) =
+let base_proof (module B : Blockchain_snark.Blockchain_snark_state.S)
+ (t : Inputs.t) =
let genesis_ledger = Genesis_ledger.Packed.t t.genesis_ledger in
let constraint_constants = t.constraint_constants in
let consensus_constants = t.consensus_constants in
- let proof_level = t.proof_level in
- let open Snark_params in
- let prover_state =
- { Keys.Step.Prover_state.prev_proof= Tock.Proof.dummy
- ; wrap_vk= Tock.Keypair.vk Keys.Wrap.keys
- ; prev_state=
- Protocol_state.negative_one ~genesis_ledger ~constraint_constants
- ~consensus_constants
- ; genesis_state_hash= t.protocol_state_with_hash.hash
- ; expected_next_state= None
- ; update= Snark_transition.genesis ~constraint_constants ~genesis_ledger }
+ let prev_state =
+ Protocol_state.negative_one ~genesis_ledger ~constraint_constants
+ ~consensus_constants
+ in
+ let curr = t.protocol_state_with_hash.data in
+ let dummy_txn_stmt : Transaction_snark.Statement.With_sok.t =
+ { sok_digest= Coda_base.Sok_message.Digest.default
+ ; source=
+ Blockchain_state.snarked_ledger_hash
+ (Protocol_state.blockchain_state prev_state)
+ ; target=
+ Blockchain_state.snarked_ledger_hash
+ (Protocol_state.blockchain_state curr)
+ ; supply_increase= Currency.Amount.zero
+ ; fee_excess= Fee_excess.zero
+ ; next_available_token_before= Token_id.(next default)
+ ; next_available_token_after= Token_id.(next default)
+ ; pending_coinbase_stack_state=
+ { source= Coda_base.Pending_coinbase.Stack.empty
+ ; target= Coda_base.Pending_coinbase.Stack.empty } }
in
- let main x =
- Tick.handle
- (Keys.Step.main ~logger ~proof_level ~constraint_constants x)
+ let open Pickles_types in
+ let blockchain_dummy = Pickles.Proof.dummy Nat.N2.n Nat.N2.n Nat.N2.n in
+ let txn_dummy = Pickles.Proof.dummy Nat.N2.n Nat.N2.n Nat.N0.n in
+ B.step
+ ~handler:
(Consensus.Data.Prover_state.precomputed_handler ~constraint_constants
~genesis_ledger)
- in
- let tick =
- Tick.prove
- (Tick.Keypair.pk Keys.Step.keys)
- (Keys.Step.input ()) prover_state main t.base_hash
- in
- assert (
- Tick.verify tick
- (Tick.Keypair.vk Keys.Step.keys)
- (Keys.Step.input ()) t.base_hash ) ;
- wrap ~keys t.base_hash tick
+ { transition= Snark_transition.genesis ~constraint_constants ~genesis_ledger
+ ; prev_state }
+ [(prev_state, blockchain_dummy); (dummy_txn_stmt, txn_dummy)]
+ t.protocol_state_with_hash.data
-let create_values ?logger ~keys (t : Inputs.t) =
+let create_values b (t : Inputs.t) =
{ runtime_config= t.runtime_config
; constraint_constants= t.constraint_constants
; proof_level= t.proof_level
@@ -131,5 +122,4 @@ let create_values ?logger ~keys (t : Inputs.t) =
; genesis_ledger= t.genesis_ledger
; consensus_constants= t.consensus_constants
; protocol_state_with_hash= t.protocol_state_with_hash
- ; base_hash= t.base_hash
- ; genesis_proof= base_proof ?logger ~keys t }
+ ; genesis_proof= base_proof b t }
diff --git a/src/lib/key_cache/key_cache.ml b/src/lib/key_cache/key_cache.ml
index eee6ba26e0d..9b92902c9ad 100644
--- a/src/lib/key_cache/key_cache.ml
+++ b/src/lib/key_cache/key_cache.ml
@@ -126,28 +126,18 @@ module Sync : S with module M := Or_error = struct
end
let read spec {Disk_storable.to_string; read= r; write= w} k =
- let errs = ref [] in
- match
- List.find_mapi spec ~f:(fun i s ->
- let res =
- match s with
- | Spec.On_disk {directory; _} ->
- (on_disk to_string r w directory).read k
- | S3 {bucket_prefix; install_path} ->
- Unix.mkdir_p install_path ;
- (s3 to_string r ~bucket_prefix ~install_path).read k
- in
- match res with
- | Error e ->
- errs := e :: !errs ;
- None
- | Ok x ->
- Some (i, x) )
- with
- | Some (i, x) ->
- Ok (x, if i = 0 then `Cache_hit else `Locally_generated)
- | None ->
- Error (Error.of_list !errs)
+ Or_error.find_map_ok spec ~f:(fun s ->
+ let res, cache_hit =
+ match s with
+ | Spec.On_disk {directory; should_write} ->
+ ( (on_disk to_string r w directory).read k
+ , if should_write then `Locally_generated else `Cache_hit )
+ | S3 {bucket_prefix; install_path} ->
+ Unix.mkdir_p install_path ;
+ ((s3 to_string r ~bucket_prefix ~install_path).read k, `Cache_hit)
+ in
+ let%map.Or_error.Let_syntax res = res in
+ (res, cache_hit) )
let write spec {Disk_storable.to_string; read= r; write= w} k v =
let errs =
@@ -236,27 +226,18 @@ module Async : S with module M := Async.Deferred.Or_error = struct
end
let read spec {Disk_storable.to_string; read= r; write= w} k =
- let errs = ref [] in
- match%map
- Deferred.List.find_mapi spec ~f:(fun i s ->
- let res =
- match s with
- | Spec.On_disk {directory; _} ->
- (on_disk to_string r w directory).read k
- | S3 {bucket_prefix; install_path} ->
- (s3 to_string r ~bucket_prefix ~install_path).read k
- in
- match%map res with
- | Error e ->
- errs := e :: !errs ;
- None
- | Ok x ->
- Some (i, x) )
- with
- | Some (i, x) ->
- Ok (x, if i = 0 then `Cache_hit else `Locally_generated)
- | None ->
- Error (Error.of_list !errs)
+ Deferred.Or_error.find_map_ok spec ~f:(fun s ->
+ let open Deferred.Or_error.Let_syntax in
+ match s with
+ | Spec.On_disk {directory; should_write} ->
+ let%map res = (on_disk to_string r w directory).read k in
+ (res, if should_write then `Locally_generated else `Cache_hit)
+ | S3 {bucket_prefix; install_path} ->
+ let%bind.Async () = Unix.mkdir ~p:() install_path in
+ let%map res =
+ (s3 to_string r ~bucket_prefix ~install_path).read k
+ in
+ (res, `Cache_hit) )
let write spec {Disk_storable.to_string; read= r; write= w} k v =
let%map errs =
diff --git a/src/lib/keys_lib/dune b/src/lib/keys_lib/dune
deleted file mode 100644
index 158090fd8c2..00000000000
--- a/src/lib/keys_lib/dune
+++ /dev/null
@@ -1,10 +0,0 @@
-(library
- (name keys_lib)
- (public_name keys_lib)
- (library_flags -linkall)
- (inline_tests)
- (libraries core async coda_base snark_keys blockchain_snark
- transaction_snark)
- (preprocess
- (pps ppx_version ppx_jane bisect_ppx -- -conditional))
- (synopsis "SNARK keys"))
diff --git a/src/lib/keys_lib/keys.ml b/src/lib/keys_lib/keys.ml
deleted file mode 100644
index 6eb2c5bda47..00000000000
--- a/src/lib/keys_lib/keys.ml
+++ /dev/null
@@ -1,169 +0,0 @@
-open Core
-open Snark_params
-open Coda_state
-
-module Step_prover_state = struct
- type t =
- { wrap_vk: Tock.Verification_key.t
- ; prev_proof: Tock.Proof.t
- ; prev_state: Protocol_state.value
- ; genesis_state_hash: Coda_base.State_hash.t
- ; expected_next_state: Protocol_state.value option
- ; update: Snark_transition.value }
-end
-
-module Wrap_prover_state = struct
- type t = {proof: Tick.Proof.t}
-end
-
-module type S = sig
- val transaction_snark_keys : Transaction_snark.Keys.Verification.t
-
- module Step : sig
- val keys : Tick.Keypair.t
-
- val input :
- unit
- -> ('a, 'b, Tick.Field.Var.t -> 'a, Tick.Field.t -> 'b) Tick.Data_spec.t
-
- module Verification_key : sig
- val to_bool_list : Tock.Verification_key.t -> bool list
- end
-
- module Prover_state : module type of Step_prover_state
-
- val instance_hash : Protocol_state.value -> Tick.Field.t
-
- val main :
- logger:Logger.t
- -> proof_level:Genesis_constants.Proof_level.t
- -> constraint_constants:Genesis_constants.Constraint_constants.t
- -> Tick.Field.Var.t
- -> (unit, Prover_state.t) Tick.Checked.t
- end
-
- module Wrap : sig
- val keys : Tock.Keypair.t
-
- val input :
- ('a, 'b, Wrap_input.var -> 'a, Wrap_input.t -> 'b) Tock.Data_spec.t
-
- module Prover_state : module type of Wrap_prover_state
-
- val main : Wrap_input.var -> (unit, Prover_state.t) Tock.Checked.t
- end
-end
-
-let tx_vk = lazy (Snark_keys.transaction_verification ())
-
-let bc_pk = lazy (Snark_keys.blockchain_proving ())
-
-let bc_vk = lazy (Snark_keys.blockchain_verification ())
-
-let step_instance_hash protocol_state =
- let open Async in
- let%map bc_vk = Lazy.force bc_vk in
- unstage
- (Blockchain_snark.Blockchain_transition.instance_hash bc_vk.wrap)
- protocol_state
-
-let keys = Set_once.create ()
-
-let create () : (module S) Async.Deferred.t =
- match Set_once.get keys with
- | Some x ->
- Async.Deferred.return x
- | None ->
- let open Async in
- let%map tx_vk = Lazy.force tx_vk
- and bc_pk = Lazy.force bc_pk
- and bc_vk = Lazy.force bc_vk in
- let module T = Transaction_snark.Verification.Make (struct
- let keys = tx_vk
- end) in
- let module B = Blockchain_snark.Blockchain_transition.Make (T) in
- let module Step = B.Step (struct
- let keys = Tick.Keypair.create ~pk:bc_pk.step ~vk:bc_vk.step
- end) in
- let module Wrap =
- B.Wrap (struct
- let verification_key = bc_vk.step
- end)
- (struct
- let keys = Tock.Keypair.create ~pk:bc_pk.wrap ~vk:bc_vk.wrap
- end)
- in
- let module M = struct
- let transaction_snark_keys = tx_vk
-
- module Step = struct
- include (
- Step :
- module type of Step with module Prover_state := Step.Prover_state )
-
- module Prover_state = Step_prover_state
-
- module Verification_key = struct
- let to_bool_list = Snark_params.tock_vk_to_bool_list
- end
-
- let instance_hash =
- unstage
- (Blockchain_snark.Blockchain_transition.instance_hash
- (Tock.Keypair.vk Wrap.keys))
-
- let main ~logger ~proof_level ~constraint_constants x =
- let there
- { Prover_state.wrap_vk
- ; prev_proof
- ; prev_state
- ; genesis_state_hash
- ; update
- ; expected_next_state } =
- { Step.Prover_state.wrap_vk
- ; prev_proof
- ; prev_state
- ; genesis_state_hash
- ; update
- ; expected_next_state }
- in
- let back
- { Step.Prover_state.wrap_vk
- ; prev_proof
- ; prev_state
- ; genesis_state_hash
- ; update
- ; expected_next_state } =
- { Prover_state.wrap_vk
- ; prev_proof
- ; prev_state
- ; genesis_state_hash
- ; update
- ; expected_next_state }
- in
- let open Tick in
- with_state
- ~and_then:(fun s -> As_prover.set_state (back s))
- As_prover.(map get_state ~f:there)
- (main ~logger ~proof_level ~constraint_constants x)
- end
-
- module Wrap = struct
- include (
- Wrap :
- module type of Wrap with module Prover_state := Wrap.Prover_state )
-
- module Prover_state = Wrap_prover_state
-
- let main x =
- let there {Prover_state.proof} = {Wrap.Prover_state.proof} in
- let back {Wrap.Prover_state.proof} = {Prover_state.proof} in
- let open Tock in
- with_state
- ~and_then:(fun s -> As_prover.set_state (back s))
- As_prover.(map get_state ~f:there)
- (main x)
- end
- end in
- Set_once.set_exn keys Lexing.dummy_pos (module M : S) ;
- (module M : S)
diff --git a/src/lib/keys_lib/keys.mli b/src/lib/keys_lib/keys.mli
deleted file mode 100644
index 91a016f2852..00000000000
--- a/src/lib/keys_lib/keys.mli
+++ /dev/null
@@ -1,58 +0,0 @@
-open Snark_params
-open Coda_state
-
-module Step_prover_state : sig
- type t =
- { wrap_vk: Tock.Verification_key.t
- ; prev_proof: Tock.Proof.t
- ; prev_state: Protocol_state.value
- ; genesis_state_hash: Coda_base.State_hash.t
- ; expected_next_state: Protocol_state.value option
- ; update: Snark_transition.value }
-end
-
-module Wrap_prover_state : sig
- type t = {proof: Tick.Proof.t}
-end
-
-module type S = sig
- val transaction_snark_keys : Transaction_snark.Keys.Verification.t
-
- module Step : sig
- val keys : Tick.Keypair.t
-
- val input :
- unit
- -> ('a, 'b, Tick.Field.Var.t -> 'a, Tick.Field.t -> 'b) Tick.Data_spec.t
-
- module Verification_key : sig
- val to_bool_list : Tock.Verification_key.t -> bool list
- end
-
- module Prover_state : module type of Step_prover_state
-
- val instance_hash : Protocol_state.value -> Tick.Field.t
-
- val main :
- logger:Logger.t
- -> proof_level:Genesis_constants.Proof_level.t
- -> constraint_constants:Genesis_constants.Constraint_constants.t
- -> Tick.Field.Var.t
- -> (unit, Prover_state.t) Tick.Checked.t
- end
-
- module Wrap : sig
- val keys : Tock.Keypair.t
-
- val input :
- ('a, 'b, Wrap_input.var -> 'a, Wrap_input.t -> 'b) Tock.Data_spec.t
-
- module Prover_state : module type of Wrap_prover_state
-
- val main : Wrap_input.var -> (unit, Prover_state.t) Tock.Checked.t
- end
-end
-
-val step_instance_hash : Protocol_state.value -> Tick.Field.t Async.Deferred.t
-
-val create : unit -> (module S) Async.Deferred.t
diff --git a/src/lib/ledger_proof/ledger_proof.ml b/src/lib/ledger_proof/ledger_proof.ml
index d40713af752..28f03c331af 100644
--- a/src/lib/ledger_proof/ledger_proof.ml
+++ b/src/lib/ledger_proof/ledger_proof.ml
@@ -37,11 +37,10 @@ module Prod : Ledger_proof_intf.S with type t = Transaction_snark.t = struct
; next_available_token_before
; next_available_token_after
; pending_coinbase_stack_state
- ; proof_type
; sok_digest= () } ~sok_digest ~proof =
Transaction_snark.create ~source ~target ~pending_coinbase_stack_state
~supply_increase ~fee_excess ~next_available_token_before
- ~next_available_token_after ~sok_digest ~proof ~proof_type
+ ~next_available_token_after ~sok_digest ~proof
end
module Debug :
@@ -66,7 +65,7 @@ struct
let statement ((t, _) : t) : Transaction_snark.Statement.t = t
- let underlying_proof (_ : t) = Proof.dummy
+ let underlying_proof (_ : t) = Proof.transaction_dummy
let statement_target (t : Transaction_snark.Statement.t) = t.target
@@ -96,5 +95,6 @@ type with_witness = With_witness : 't * 't type_witness -> with_witness
module For_tests = struct
let mk_dummy_proof statement =
- create ~statement ~sok_digest:Sok_message.Digest.default ~proof:Proof.dummy
+ create ~statement ~sok_digest:Sok_message.Digest.default
+ ~proof:Proof.transaction_dummy
end
diff --git a/src/lib/ledger_proof/ledger_proof.mli b/src/lib/ledger_proof/ledger_proof.mli
index ff4e4ac018e..0c81d91798e 100644
--- a/src/lib/ledger_proof/ledger_proof.mli
+++ b/src/lib/ledger_proof/ledger_proof.mli
@@ -1,13 +1,12 @@
[%%import "../../config.mlh"]
-open Coda_base
-
module type S = Ledger_proof_intf.S
module Prod : S with type t = Transaction_snark.t
module Debug :
- S with type t = Transaction_snark.Statement.t * Sok_message.Digest.t
+ S
+ with type t = Transaction_snark.Statement.t * Coda_base.Sok_message.Digest.t
[%%if proof_level = "full"]
diff --git a/src/lib/network_pool/snark_pool.ml b/src/lib/network_pool/snark_pool.ml
index 5ca4763ed57..d672a95738d 100644
--- a/src/lib/network_pool/snark_pool.ml
+++ b/src/lib/network_pool/snark_pool.ml
@@ -587,7 +587,8 @@ let%test_module "random set test" =
( work
, One_or_two.map work ~f:(fun statement ->
Ledger_proof.create ~statement
- ~sok_digest:invalid_sok_digest ~proof:Proof.dummy )
+ ~sok_digest:invalid_sok_digest
+ ~proof:Proof.transaction_dummy )
, fee
, some_other_pk )
:: acc )
diff --git a/src/lib/non_zero_curve_point/dune b/src/lib/non_zero_curve_point/dune
index d990a59254c..ca171c13d87 100644
--- a/src/lib/non_zero_curve_point/dune
+++ b/src/lib/non_zero_curve_point/dune
@@ -7,4 +7,4 @@
(libraries core_kernel snark_params fold_lib codable ppx_version.runtime)
(preprocessor_deps ../../config.mlh)
(preprocess
- (pps ppx_snarky ppx_coda ppx_version ppx_optcomp ppx_let ppx_hash ppx_compare ppx_sexp_conv ppx_bin_prot ppx_inline_test ppx_deriving_yojson ppx_deriving.eq h_list.ppx)))
+ (pps ppx_snarky ppx_coda ppx_version ppx_optcomp ppx_let ppx_hash ppx_compare ppx_sexp_conv ppx_bin_prot ppx_inline_test ppx_deriving_yojson ppx_deriving.eq h_list.ppx ppx_custom_printf)))
diff --git a/src/lib/non_zero_curve_point/non_zero_curve_point.ml b/src/lib/non_zero_curve_point/non_zero_curve_point.ml
index fabfb5a595c..42076a7b024 100644
--- a/src/lib/non_zero_curve_point/non_zero_curve_point.ml
+++ b/src/lib/non_zero_curve_point/non_zero_curve_point.ml
@@ -84,27 +84,14 @@ module Compressed = struct
*)
[%%if
- curve_size = 298]
+ curve_size = 255]
let%test "nonzero_curve_point_compressed v1" =
let point =
Quickcheck.random_value
~seed:(`Deterministic "nonzero_curve_point_compressed-seed") V1.gen
in
- let known_good_digest = "437f5bc6710b6a8fda8f9e8cf697fc2c" in
- Ppx_version_runtime.Serialization.check_serialization
- (module V1)
- point known_good_digest
-
- [%%elif
- curve_size = 753]
-
- let%test "nonzero_curve_point_compressed v1" =
- let point =
- Quickcheck.random_value
- ~seed:(`Deterministic "nonzero_curve_point_compressed-seed") V1.gen
- in
- let known_good_digest = "067f8be67e5cc31f5c5ac4be91d5f6db" in
+ let known_good_digest = "951b667e8f1216097665190fc0a7b78a" in
Ppx_version_runtime.Serialization.check_serialization
(module V1)
point known_good_digest
diff --git a/src/lib/pickles/step.ml b/src/lib/pickles/step.ml
index 4bf792f82a3..88ae5abadf3 100644
--- a/src/lib/pickles/step.ml
+++ b/src/lib/pickles/step.ml
@@ -449,12 +449,6 @@ struct
let module V = H3.To_vector (Tick.Curve.Affine) in
V.f prev_values_length (M.f prev_with_proofs)
in
- let pass_through =
- (* TODO: Use the same pad_pass_through function as in wrap *)
- pad
- (Vector.map statements_with_hashes ~f:(fun s -> s.proof_state.me_only))
- Maxes.maxes Maxes.length
- in
ksprintf Common.time "step-prover %d (%d, %d, %d)"
(Index.to_int branch_data.index)
(Domain.size h) (Domain.size k) (Domain.size x) (fun () ->
diff --git a/src/lib/precomputed_values/dune b/src/lib/precomputed_values/dune
index efa93f74658..e0596b3c53d 100644
--- a/src/lib/precomputed_values/dune
+++ b/src/lib/precomputed_values/dune
@@ -1,5 +1,6 @@
(library
(name precomputed_values)
+ (flags -warn-error -22)
(public_name precomputed_values)
(libraries
coda_base
diff --git a/src/lib/precomputed_values/gen_values/dune b/src/lib/precomputed_values/gen_values/dune
index 1aca6e6e142..393841af41c 100644
--- a/src/lib/precomputed_values/gen_values/dune
+++ b/src/lib/precomputed_values/gen_values/dune
@@ -1,5 +1,6 @@
(executable
(name gen_values)
+ (flags -warn-error -22)
(libraries
;; opam deps
async
@@ -9,13 +10,11 @@
coda_base
genesis_proof
global_signer_private_key
- keys_lib
ppxlib
ppx_util
snark_keys
snark_params
- snarky
- staged_ledger)
+ snarky )
(preprocessor_deps ../../../config.mlh)
(preprocess
(pps ppx_version ppx_optcomp ppx_let ppxlib.metaquot))
diff --git a/src/lib/precomputed_values/gen_values/gen_values.ml b/src/lib/precomputed_values/gen_values/gen_values.ml
index 819082504e3..c0579d93ef7 100644
--- a/src/lib/precomputed_values/gen_values/gen_values.ml
+++ b/src/lib/precomputed_values/gen_values/gen_values.ml
@@ -21,22 +21,61 @@ let use_dummy_values = true
[%%endif]
module type S = sig
- val base_hash_expr : Parsetree.expression
+ val blockchain_proof_system_id : Parsetree.expression
val base_proof_expr : Parsetree.expression
+
+ val transaction_verification : Parsetree.expression
+
+ val blockchain_verification : Parsetree.expression
+
+ val key_hashes : Parsetree.expression
end
+let hashes =
+ let module E = Ppxlib.Ast_builder.Make (struct
+ let loc = Location.none
+ end) in
+ let open E in
+ let f (_, x) = estring (Core.Md5.to_hex x) in
+ let ts = Transaction_snark.constraint_system_digests () in
+ let bs =
+ Blockchain_snark.Blockchain_snark_state.constraint_system_digests ()
+ in
+ elist (List.map ts ~f @ List.map bs ~f)
+
module Dummy = struct
let loc = Ppxlib.Location.none
- let base_hash_expr = [%expr Snark_params.Tick.Field.zero]
+ let base_proof_expr = [%expr Coda_base.Proof.blockchain_dummy]
+
+ let blockchain_proof_system_id =
+ [%expr fun () -> Pickles.Verification_key.Id.dummy ()]
- let base_proof_expr = [%expr Dummy_values.Tock.Bowe_gabizon18.proof]
+ let transaction_verification =
+ [%expr fun () -> Pickles.Verification_key.dummy]
+
+ let blockchain_verification =
+ [%expr fun () -> Pickles.Verification_key.dummy]
+
+ let key_hashes = hashes
end
-module Make_real (Keys : Keys_lib.Keys.S) = struct
+module Make_real () = struct
let loc = Ppxlib.Location.none
+ module E = Ppxlib.Ast_builder.Make (struct
+ let loc = loc
+ end)
+
+ open E
+
+ module T = Transaction_snark.Make ()
+
+ module B = Blockchain_snark.Blockchain_snark_state.Make (T)
+
+ let key_hashes = hashes
+
let constraint_constants = Genesis_constants.Constraint_constants.compiled
let genesis_constants = Genesis_constants.compiled
@@ -49,11 +88,9 @@ module Make_real (Keys : Keys_lib.Keys.S) = struct
Genesis_protocol_state.t ~genesis_ledger:Test_genesis_ledger.t
~constraint_constants ~consensus_constants
- let base_hash = Keys.Step.instance_hash protocol_state_with_hash.data
-
let compiled_values =
Genesis_proof.create_values
- ~keys:(module Keys : Keys_lib.Keys.S)
+ (module B)
{ runtime_config= Runtime_config.default
; constraint_constants
; proof_level= Full
@@ -61,21 +98,59 @@ module Make_real (Keys : Keys_lib.Keys.S) = struct
; genesis_ledger= (module Test_genesis_ledger)
; consensus_constants
; protocol_state_with_hash
- ; base_hash }
+ ; blockchain_proof_system_id= Lazy.force B.Proof.id }
- let base_hash_expr =
+ let blockchain_proof_system_id =
[%expr
- Snark_params.Tick.Field.t_of_sexp
- [%e
- Ppx_util.expr_of_sexp ~loc
- (Snark_params.Tick.Field.sexp_of_t base_hash)]]
+ let t =
+ lazy
+ (Core.Sexp.of_string_conv_exn
+ [%e
+ estring
+ (Core.Sexp.to_string
+ (Pickles.Verification_key.Id.sexp_of_t
+ (Lazy.force B.Proof.id)))]
+ Pickles.Verification_key.Id.t_of_sexp)
+ in
+ fun () -> Lazy.force t]
+
+ let transaction_verification =
+ [%expr
+ let t =
+ lazy
+ (Core.Binable.of_string
+ (module Pickles.Verification_key)
+ [%e
+ estring
+ (Binable.to_string
+ (module Pickles.Verification_key)
+ (Lazy.force T.verification_key))])
+ in
+ fun () -> Lazy.force t]
+
+ let blockchain_verification =
+ [%expr
+ let t =
+ lazy
+ (Core.Binable.of_string
+ (module Pickles.Verification_key)
+ [%e
+ estring
+ (Binable.to_string
+ (module Pickles.Verification_key)
+ (Lazy.force B.Proof.verification_key))])
+ in
+ fun () -> Lazy.force t]
let base_proof_expr =
[%expr
- Coda_base.Proof.Stable.V1.t_of_sexp
+ Core.Binable.of_string
+ (module Coda_base.Proof.Stable.V1)
[%e
- Ppx_util.expr_of_sexp ~loc
- (Coda_base.Proof.Stable.V1.sexp_of_t compiled_values.genesis_proof)]]
+ estring
+ (Binable.to_string
+ (module Coda_base.Proof.Stable.V1)
+ compiled_values.genesis_proof)]]
end
open Async
@@ -84,20 +159,17 @@ let main () =
let target = Sys.argv.(1) in
let fmt = Format.formatter_of_out_channel (Out_channel.create target) in
let loc = Ppxlib.Location.none in
- let%bind (module M) =
- if use_dummy_values then return (module Dummy : S)
- else
- let%map (module K) = Keys_lib.Keys.create () in
- (module Make_real (K) : S)
+ let (module M) =
+ if use_dummy_values then (module Dummy : S) else (module Make_real () : S)
in
let structure =
[%str
module T = Genesis_proof.T
include T
- let unit_test_base_hash = Snark_params.Tick.Field.zero
+ let blockchain_proof_system_id = [%e M.blockchain_proof_system_id]
- let unit_test_base_proof = Dummy_values.Tock.Bowe_gabizon18.proof
+ let compiled_base_proof = [%e M.base_proof_expr]
let for_unit_tests =
lazy
@@ -117,12 +189,13 @@ let main () =
; genesis_ledger= Genesis_ledger.for_unit_tests
; consensus_constants= Lazy.force Consensus.Constants.for_unit_tests
; protocol_state_with_hash
- ; base_hash= unit_test_base_hash
- ; genesis_proof= unit_test_base_proof })
+ ; genesis_proof= Coda_base.Proof.blockchain_dummy })
- let compiled_base_hash = [%e M.base_hash_expr]
+ let key_hashes = [%e M.key_hashes]
- let compiled_base_proof = [%e M.base_proof_expr]
+ let blockchain_verification = [%e M.blockchain_verification]
+
+ let transaction_verification = [%e M.transaction_verification]
let compiled =
lazy
@@ -146,7 +219,6 @@ let main () =
; genesis_ledger= (module Test_genesis_ledger)
; consensus_constants
; protocol_state_with_hash
- ; base_hash= compiled_base_hash
; genesis_proof= compiled_base_proof })]
in
Pprintast.top_phrase fmt (Ptop_def structure) ;
diff --git a/src/lib/prover/dune b/src/lib/prover/dune
index 7323a85fe9a..db187d520c7 100644
--- a/src/lib/prover/dune
+++ b/src/lib/prover/dune
@@ -1,6 +1,6 @@
(library
(name prover)
(public_name prover)
- (libraries async core rpc_parallel coda_base coda_state coda_transition blockchain_snark keys_lib memory_stats precomputed_values child_processes)
+ (libraries async core rpc_parallel coda_base coda_state coda_transition blockchain_snark memory_stats precomputed_values child_processes)
(preprocessor_deps "../../config.mlh")
(preprocess (pps ppx_coda ppx_version ppx_jane)))
diff --git a/src/lib/prover/intf.ml b/src/lib/prover/intf.ml
index dd93fdbdac7..58112a15788 100644
--- a/src/lib/prover/intf.ml
+++ b/src/lib/prover/intf.ml
@@ -30,6 +30,7 @@ module type S = sig
-> Blockchain.t
-> Protocol_state.Value.t
-> Snark_transition.value
+ -> Ledger_proof.t option
-> Consensus.Data.Prover_state.t
-> Pending_coinbase_witness.t
-> Blockchain.t Deferred.Or_error.t
diff --git a/src/lib/prover/prover.ml b/src/lib/prover/prover.ml
index 454a1a6e0c2..031a71aa17b 100644
--- a/src/lib/prover/prover.ml
+++ b/src/lib/prover/prover.ml
@@ -15,6 +15,7 @@ module Extend_blockchain_input = struct
{ chain: Blockchain.Stable.V1.t
; next_state: Protocol_state.Value.Stable.V1.t
; block: Snark_transition.Value.Stable.V1.t
+ ; ledger_proof: Ledger_proof.Stable.V1.t option
; prover_state: Consensus.Data.Prover_state.Stable.V1.t
; pending_coinbase: Pending_coinbase_witness.Stable.V1.t }
@@ -26,6 +27,7 @@ module Extend_blockchain_input = struct
{ chain: Blockchain.t
; next_state: Protocol_state.Value.t
; block: Snark_transition.Value.t
+ ; ledger_proof: Ledger_proof.t option
; prover_state: Consensus.Data.Prover_state.t
; pending_coinbase: Pending_coinbase_witness.t }
[@@deriving sexp]
@@ -36,12 +38,11 @@ module Blockchain = Blockchain
module Worker_state = struct
module type S = sig
- module Transaction_snark : Transaction_snark.Verification.S
-
val extend_blockchain :
Blockchain.t
-> Protocol_state.Value.t
-> Snark_transition.value
+ -> Ledger_proof.t option
-> Consensus.Data.Prover_state.t
-> Pending_coinbase_witness.t
-> Blockchain.t Or_error.t
@@ -58,63 +59,58 @@ module Worker_state = struct
Genesis_constants.Constraint_constants.Stable.Latest.t }
[@@deriving bin_io_unversioned]
- type t = (module S) Deferred.t
+ type t = (module S)
+
+ let ledger_proof_opt (chain : Blockchain.t) next_state = function
+ | Some t ->
+ Ledger_proof.
+ ({(statement t) with sok_digest= sok_digest t}, underlying_proof t)
+ | None ->
+ let bs = Protocol_state.blockchain_state in
+ let lh x = Blockchain_state.snarked_ledger_hash (bs x) in
+ let tok x = Blockchain_state.snarked_next_available_token (bs x) in
+ ( { source= lh chain.state
+ ; target= lh next_state
+ ; supply_increase= Currency.Amount.zero
+ ; fee_excess= Fee_excess.zero
+ ; sok_digest= Sok_message.Digest.default
+ ; next_available_token_before= tok chain.state
+ ; next_available_token_after= tok next_state
+ ; pending_coinbase_stack_state=
+ { source= Pending_coinbase.Stack.empty
+ ; target= Pending_coinbase.Stack.empty } }
+ , Proof.transaction_dummy )
let create {logger; proof_level; constraint_constants; _} : t Deferred.t =
Deferred.return
- (let%map (module Keys) = Keys_lib.Keys.create () in
- let module Transaction_snark =
- Transaction_snark.Verification.Make (struct
- let keys = Keys.transaction_snark_keys
- end) in
- let m =
+ (let m =
match proof_level with
| Genesis_constants.Proof_level.Full ->
( module struct
- open Snark_params
- open Keys
- module Transaction_snark = Transaction_snark
+ module T = Transaction_snark.Make ()
+
+ module B = Blockchain_snark.Blockchain_snark_state.Make (T)
- let wrap hash proof =
- let module Wrap = Keys.Wrap in
- Tock.prove
- (Tock.Keypair.pk Wrap.keys)
- Wrap.input {Wrap.Prover_state.proof} Wrap.main
- (Wrap_input.of_tick_field hash)
+ let _ = Pickles.Cache_handle.generate_or_load B.cache_handle
let extend_blockchain (chain : Blockchain.t)
(next_state : Protocol_state.Value.t)
- (block : Snark_transition.value) state_for_handler
- pending_coinbase =
- let next_state_top_hash =
- Keys.Step.instance_hash next_state
- in
- let prover_state =
- { Keys.Step.Prover_state.prev_proof= chain.proof
- ; wrap_vk= Tock.Keypair.vk Keys.Wrap.keys
- ; prev_state= chain.state
- ; genesis_state_hash=
- Coda_state.Protocol_state.genesis_state_hash chain.state
- ; expected_next_state= Some next_state
- ; update= block }
- in
- let main x =
- Tick.handle
- (Keys.Step.main ~logger ~proof_level ~constraint_constants
- x)
- (Consensus.Data.Prover_state.handler ~constraint_constants
- state_for_handler ~pending_coinbase)
- in
+ (block : Snark_transition.value) (t : Ledger_proof.t option)
+ state_for_handler pending_coinbase =
let res =
Or_error.try_with (fun () ->
- let prev_proof =
- Tick.prove
- (Tick.Keypair.pk Keys.Step.keys)
- (Keys.Step.input ()) prover_state main
- next_state_top_hash
+ let t = ledger_proof_opt chain next_state t in
+ let proof =
+ B.step
+ ~handler:
+ (Consensus.Data.Prover_state.handler
+ ~constraint_constants state_for_handler
+ ~pending_coinbase)
+ {transition= block; prev_state= chain.state}
+ [(chain.state, chain.proof); t]
+ next_state
in
- { Blockchain.state= next_state
- ; proof= wrap next_state_top_hash prev_proof } )
+ {Blockchain.state= next_state; proof} )
in
Or_error.iter_error res ~f:(fun e ->
[%log error]
@@ -122,49 +118,30 @@ module Worker_state = struct
"Prover threw an error while extending block: $error" ) ;
res
- let verify state proof =
- Tock.verify proof
- (Tock.Keypair.vk Wrap.keys)
- Wrap.input
- (Wrap_input.of_tick_field (Keys.Step.instance_hash state))
+ let verify state proof = B.Proof.verify [(state, proof)]
end
: S )
| Check ->
( module struct
- open Snark_params
module Transaction_snark = Transaction_snark
let extend_blockchain (chain : Blockchain.t)
(next_state : Protocol_state.Value.t)
- (block : Snark_transition.value) state_for_handler
- pending_coinbase =
- let next_state_top_hash =
- Keys.Step.instance_hash next_state
- in
- let prover_state =
- { Keys.Step.Prover_state.prev_proof= chain.proof
- ; wrap_vk= Tock.Keypair.vk Keys.Wrap.keys
- ; prev_state= chain.state
- ; genesis_state_hash=
- Coda_state.Protocol_state.genesis_state_hash chain.state
- ; expected_next_state= Some next_state
- ; update= block }
- in
- let main x =
- Tick.handle
- (Keys.Step.main ~logger ~proof_level ~constraint_constants
- x)
- (Consensus.Data.Prover_state.handler ~constraint_constants
- state_for_handler ~pending_coinbase)
- in
+ (block : Snark_transition.value) (t : Ledger_proof.t option)
+ state_for_handler pending_coinbase =
+ let t, _proof = ledger_proof_opt chain next_state t in
let res =
- Or_error.map
- (Tick.check
- (main @@ Tick.Field.Var.constant next_state_top_hash)
- prover_state)
- ~f:(fun () ->
- { Blockchain.state= next_state
- ; proof= Dummy_values.Tock.Bowe_gabizon18.proof } )
+ Blockchain_snark.Blockchain_snark_state.check ~proof_level
+ ~constraint_constants
+ {transition= block; prev_state= chain.state}
+ ~handler:
+ (Consensus.Data.Prover_state.handler state_for_handler
+ ~constraint_constants ~pending_coinbase)
+ t
+ (Protocol_state.hash next_state)
+ |> Or_error.map ~f:(fun () ->
+ { Blockchain.state= next_state
+ ; proof= Precomputed_values.compiled_base_proof } )
in
Or_error.iter_error res ~f:(fun e ->
[%log error]
@@ -179,10 +156,10 @@ module Worker_state = struct
( module struct
module Transaction_snark = Transaction_snark
- let extend_blockchain _chain next_state _block
+ let extend_blockchain _chain next_state _block _ledger_proof
_state_for_handler _pending_coinbase =
Ok
- { Blockchain.proof= Dummy_values.Tock.Bowe_gabizon18.proof
+ { Blockchain.proof= Coda_base.Proof.blockchain_dummy
; state= next_state }
let verify _ _ = true
@@ -205,22 +182,25 @@ module Functions = struct
let initialized =
create bin_unit [%bin_type_class: [`Initialized]] (fun w () ->
- let%map (module W) = Worker_state.get w in
- `Initialized )
+ let (module W) = Worker_state.get w in
+ Deferred.return `Initialized )
let extend_blockchain =
create Extend_blockchain_input.Stable.Latest.bin_t
[%bin_type_class: Blockchain.Stable.Latest.t Or_error.t]
- (fun w {chain; next_state; block; prover_state; pending_coinbase} ->
- let%map (module W) = Worker_state.get w in
- W.extend_blockchain chain next_state block prover_state
- pending_coinbase )
+ (fun w
+ {chain; next_state; ledger_proof; block; prover_state; pending_coinbase}
+ ->
+ let (module W) = Worker_state.get w in
+ W.extend_blockchain chain next_state block ledger_proof prover_state
+ pending_coinbase
+ |> Deferred.return )
let verify_blockchain =
create Blockchain.Stable.Latest.bin_t bin_bool
(fun w {Blockchain.state; proof} ->
- let%map (module W) = Worker_state.get w in
- W.verify state proof )
+ let (module W) = Worker_state.get w in
+ W.verify state proof |> Deferred.return )
end
module Worker = struct
@@ -335,11 +315,12 @@ let prove_from_input_sexp {connection; logger; _} sexp =
false
let extend_blockchain {connection; logger; _} chain next_state block
- prover_state pending_coinbase =
+ ledger_proof prover_state pending_coinbase =
let input =
{ Extend_blockchain_input.chain
; next_state
; block
+ ; ledger_proof
; prover_state
; pending_coinbase }
in
@@ -358,9 +339,10 @@ let extend_blockchain {connection; logger; _} chain next_state block
(Sexp.to_string (Extend_blockchain_input.sexp_of_t input)) )
; ( "input-bin-io"
, `String
- (Binable.to_string
- (module Extend_blockchain_input.Stable.Latest)
- input) )
+ (Base64.encode_exn
+ (Binable.to_string
+ (module Extend_blockchain_input.Stable.Latest)
+ input)) )
; ("error", `String (Error.to_string_hum e)) ]
"Prover failed: $error" ;
Error e
@@ -374,6 +356,7 @@ let prove t ~prev_state ~prev_state_proof ~next_state
(Blockchain.create ~proof:prev_state_proof ~state:prev_state)
next_state
(Internal_transition.snark_transition transition)
+ (Internal_transition.ledger_proof transition)
(Internal_transition.prover_state transition)
pending_coinbase
in
diff --git a/src/lib/random_oracle/dune b/src/lib/random_oracle/dune
index 24713cde581..60c4543ea18 100644
--- a/src/lib/random_oracle/dune
+++ b/src/lib/random_oracle/dune
@@ -6,9 +6,8 @@
(inline_tests)
(libraries
core_kernel
- curve_choice
o1trace
+ pickles
random_oracle_input
snarky
- sponge
- sponge_params ))
+ sponge ))
diff --git a/src/lib/random_oracle/random_oracle.ml b/src/lib/random_oracle/random_oracle.ml
index c2853ee49f5..fbf8358624a 100644
--- a/src/lib/random_oracle/random_oracle.ml
+++ b/src/lib/random_oracle/random_oracle.ml
@@ -6,13 +6,11 @@ open Core_kernel
[%%ifdef
consensus_mechanism]
-open Curve_choice
-module Field = Tick0.Field
+open Pickles.Impls.Step.Internal_Basic
[%%else]
open Snark_params_nonconsensus
-module Sponge_params = Sponge_params_nonconsensus.Sponge_params
[%%endif]
@@ -24,36 +22,37 @@ end
module Input = Random_oracle_input
-let params : _ Sponge.Params.t =
- let open Sponge_params in
- {mds; round_constants}
+let params : Field.t Sponge.Params.t =
+ Sponge.Params.(map tweedle_q ~f:Field.of_string)
+(* TODO: Unify with Bn382_inputs in the sponge lib *)
module Inputs = struct
module Field = Field
let rounds_full = 8
- let rounds_partial = 33
+ let rounds_partial = 30
[%%ifdef
consensus_mechanism]
+ (* Computes x^17 *)
let to_the_alpha x =
let open Field in
- let res = x + zero in
- res *= res ;
- (* x^2 *)
- res *= res ;
+ let res = square x in
+ let open Pickles.Backend.Tick.Field in
+ Mutable.square res ;
(* x^4 *)
- res *= x ;
- (* x^5 *)
- res *= res ;
- (* x^10 *)
+ Mutable.square res ;
+ (* x^8 *)
+ Mutable.square res ;
+ (* x^16 *)
res *= x ;
res
[%%else]
+ (* Computes x^17 *)
let to_the_alpha x =
let open Field in
let res = x in
@@ -61,10 +60,10 @@ module Inputs = struct
(* x^2 *)
let res = res * res in
(* x^4 *)
- let res = res * x in
- (* x^5 *)
let res = res * res in
- (* x^10 *)
+ (* x^8 *)
+ let res = res * res in
+ (* x^16 *)
res * x
[%%endif]
@@ -75,13 +74,11 @@ module Inputs = struct
let add_assign ~state i x = Field.(state.(i) += x)
- let apply_affine_map (rows, c) v =
- Array.mapi rows ~f:(fun j row ->
- let open Field in
- let res = zero + zero in
- Array.iteri row ~f:(fun i r -> res += (r * v.(i))) ;
- res += c.(j) ;
- res )
+ let apply_affine_map (_rows, c) v =
+ let open Field in
+ let res = [|v.(0) + v.(2); v.(0) + v.(1); v.(1) + v.(2)|] in
+ Array.iteri res ~f:(fun i ri -> ri += c.(i)) ;
+ res
(* TODO: Have an explicit function for making a copy of a field element. *)
let copy a = Array.map a ~f:(fun x -> Field.(x + zero))
@@ -90,51 +87,18 @@ module Inputs = struct
let add_assign ~state i x = Field.(state.(i) <- state.(i) + x)
- let apply_affine_map (rows, c) v =
- Array.mapi rows ~f:(fun j row ->
- let res = ref Field.zero in
- Array.iteri row ~f:(fun i r -> res := Field.(!res + (r * v.(i)))) ;
- (res := Field.(!res + c.(j))) ;
- !res )
+ (* TODO: Clean this up to use the near mds matrix properly *)
+ let apply_affine_map (_matrix, constants) v =
+ let open Field in
+ let near_mds_matrix_v =
+ [|v.(0) + v.(2); v.(0) + v.(1); v.(1) + v.(2)|]
+ in
+ Array.mapi near_mds_matrix_v ~f:(fun i x -> constants.(i) + x)
let copy a = Array.map a ~f:Fn.id
[%%endif]
end
-
- let _alphath_root =
- let inv_alpha =
- Bigint.of_string Sponge_params.inv_alpha |> Bigint.to_zarith_bigint
- in
- let k = 4 in
- let chunks = (Field.size_in_bits + (k - 1)) / k in
- let inv_alpha =
- let chunk i =
- let b j = Z.testbit inv_alpha ((k * i) + j) in
- Sequence.fold ~init:0
- (Sequence.range ~start:`inclusive ~stop:`exclusive 0 k)
- ~f:(fun acc i -> acc + ((1 lsl i) * Bool.to_int (b i)))
- in
- (* High bits first *)
- Array.init chunks ~f:(fun i -> chunk (chunks - 1 - i))
- in
- let lookup_table x =
- let n = 1 lsl k in
- let arr = Array.init (1 lsl k) ~f:(fun _ -> Field.one) in
- for i = 1 to n - 1 do
- arr.(i) <- Field.( * ) x arr.(i - 1)
- done ;
- arr
- in
- fun x ->
- let tbl = lookup_table x in
- Array.fold inv_alpha ~init:Field.one ~f:(fun acc chunk ->
- Field.( * ) (Fn.apply_n_times ~n:k Field.square acc) tbl.(chunk) )
-
- let%test_unit "alpha_root" =
- let x = Field.random () in
- let root = _alphath_root x in
- [%test_eq: Field.t] (to_the_alpha root) x
end
module Digest = struct
@@ -160,86 +124,14 @@ let hash ?init = hash ?init params
consensus_mechanism]
module Checked = struct
- module Inputs = struct
- module Field = struct
- open Tick0
-
- (* The linear combinations involved in computing Poseidon do not involve very many
- variables, but if they are represented as arithmetic expressions (that is, "Cvars"
- which is what Field.t is under the hood) the expressions grow exponentially in
- in the number of rounds. Thus, we compute with Field elements represented by
- a "reduced" linear combination. That is, a coefficient for each variable and an
- constant term.
-*)
- type t = Field.t Int.Map.t * Field.t
-
- let to_cvar ((m, c) : t) : Field.Var.t =
- Map.fold m ~init:(Field.Var.constant c) ~f:(fun ~key ~data acc ->
- let x =
- let v = Snarky.Cvar.Var key in
- if Field.equal data Field.one then v else Scale (data, v)
- in
- match acc with
- | Constant c when Field.equal Field.zero c ->
- x
- | _ ->
- Add (x, acc) )
-
- let constant c = (Int.Map.empty, c)
-
- let of_cvar (x : Field.Var.t) =
- match x with
- | Constant c ->
- constant c
- | Var v ->
- (Int.Map.singleton v Field.one, Field.zero)
- | x ->
- let c, ts = Field.Var.to_constant_and_terms x in
- ( Int.Map.of_alist_reduce
- (List.map ts ~f:(fun (f, v) -> (Tick_backend.Var.index v, f)))
- ~f:Field.add
- , Option.value ~default:Field.zero c )
-
- let ( + ) (t1, c1) (t2, c2) =
- ( Map.merge t1 t2 ~f:(fun ~key:_ t ->
- match t with
- | `Left x ->
- Some x
- | `Right y ->
- Some y
- | `Both (x, y) ->
- Some Field.(x + y) )
- , Field.add c1 c2 )
-
- let ( * ) (t1, c1) (t2, c2) =
- assert (Int.Map.is_empty t1) ;
- (Map.map t2 ~f:(Field.mul c1), Field.mul c1 c2)
-
- let zero = constant Field.zero
- end
-
- let rounds_full = 8
-
- let rounds_partial = 33
-
- let to_the_alpha x =
- let open Runners.Tick.Field in
- let zero = square in
- let one a = square a * x in
- let one' = x in
- one' |> zero |> one |> one
-
- let to_the_alpha x = Field.of_cvar (to_the_alpha (Field.to_cvar x))
-
- module Operations = Sponge.Make_operations (Field)
- end
+ module Inputs = Pickles.Sponge_inputs.Make (Pickles.Impls.Step)
module Digest = struct
- open Runners.Tick.Field
+ open Pickles.Impls.Step.Field
type nonrec t = t
- let to_bits ?(length = Field.size_in_bits) x =
+ let to_bits ?(length = Field.size_in_bits) (x : t) =
List.take (choose_preimage_var ~length:Field.size_in_bits x) length
end
@@ -249,22 +141,15 @@ module Checked = struct
open Inputs.Field
- let update ~state xs =
- let f = Array.map ~f:of_cvar in
- update params ~state:(f state) (f xs) |> Array.map ~f:to_cvar
+ let update ~state xs = update params ~state xs
let hash ?init xs =
O1trace.measure "Random_oracle.hash" (fun () ->
- hash
- ?init:(Option.map init ~f:(State.map ~f:constant))
- params (Array.map xs ~f:of_cvar)
- |> to_cvar )
+ hash ?init:(Option.map init ~f:(State.map ~f:constant)) params xs )
let pack_input =
Input.pack_to_fields ~size_in_bits:Field.size_in_bits ~pack:Field.Var.pack
- let initial_state = Array.map initial_state ~f:to_cvar
-
let digest xs = xs.(0)
end
@@ -295,13 +180,14 @@ let%test_unit "iterativeness" =
consensus_mechanism]
let%test_unit "sponge checked-unchecked" =
- let module T = Tick0 in
+ let open Pickles.Impls.Step in
+ let module T = Internal_Basic in
let x = T.Field.random () in
let y = T.Field.random () in
T.Test.test_equal ~equal:T.Field.equal ~sexp_of_t:T.Field.sexp_of_t
T.Typ.(field * field)
T.Typ.field
- (fun (x, y) -> Runners.Tick.make_checked (fun () -> Checked.hash [|x; y|]))
+ (fun (x, y) -> make_checked (fun () -> Checked.hash [|x; y|]))
(fun (x, y) -> hash [|x; y|])
(x, y)
diff --git a/src/lib/random_oracle/random_oracle.mli b/src/lib/random_oracle/random_oracle.mli
index da73cc57045..13ca3959b26 100644
--- a/src/lib/random_oracle/random_oracle.mli
+++ b/src/lib/random_oracle/random_oracle.mli
@@ -2,7 +2,7 @@
[%%ifdef consensus_mechanism]
-open Curve_choice.Tick0
+open Pickles.Impls.Step.Internal_Basic
[%%else]
diff --git a/src/lib/signature_lib/private_key.ml b/src/lib/signature_lib/private_key.ml
index 48b7c80fd48..75c7a39e633 100644
--- a/src/lib/signature_lib/private_key.ml
+++ b/src/lib/signature_lib/private_key.ml
@@ -2,7 +2,6 @@
"/src/config.mlh"]
open Core_kernel
-open Async_kernel
[%%ifdef
consensus_mechanism]
@@ -32,7 +31,7 @@ module Stable = struct
[%%else]
- let gen = Inner_curve.Scalar.(gen_uniform_incl one (size - one))
+ let gen = Inner_curve.Scalar.(gen_uniform_incl one (zero - one))
[%%endif]
end
@@ -44,27 +43,14 @@ module Stable = struct
*)
[%%if
- curve_size = 298]
+ curve_size = 255]
let%test "private key serialization v1" =
let pk =
Quickcheck.random_value ~seed:(`Deterministic "private key seed v1")
V1.gen
in
- let known_good_digest = "4bbc6cd7832cfc67f0fe3abcd7f765df" in
- Ppx_version_runtime.Serialization.check_serialization
- (module V1)
- pk known_good_digest
-
- [%%elif
- curve_size = 753]
-
- let%test "private key serialization v1" =
- let pk =
- Quickcheck.random_value ~seed:(`Deterministic "private key seed v1")
- V1.gen
- in
- let known_good_digest = "65c75a7d10b6ce193f0c0e296611a935" in
+ let known_good_digest = "5f4d5a6fee5d45e13ff0ca5c648fe6f1" in
Ppx_version_runtime.Serialization.check_serialization
(module V1)
pk known_good_digest
diff --git a/src/lib/signature_lib/public_key.ml b/src/lib/signature_lib/public_key.ml
index be61137c7ce..d6b93f96f4b 100644
--- a/src/lib/signature_lib/public_key.ml
+++ b/src/lib/signature_lib/public_key.ml
@@ -9,15 +9,13 @@ consensus_mechanism]
include Non_zero_curve_point
module Inner_curve = Snark_params.Tick.Inner_curve
-let of_private_key_exn p = of_inner_curve_exn Inner_curve.(scale_field one p)
+let of_private_key_exn p = of_inner_curve_exn Inner_curve.(scale one p)
[%%else]
include Non_zero_curve_point_nonconsensus.Non_zero_curve_point
module Inner_curve = Snark_params_nonconsensus.Inner_curve
-let of_private_key_exn p =
- of_inner_curve_exn
- Inner_curve.(scale_field one (p : Private_key.t :> Snarkette.Nat.t))
+let of_private_key_exn p = of_inner_curve_exn Inner_curve.(scale one p)
[%%endif]
diff --git a/src/lib/signature_lib/schnorr.ml b/src/lib/signature_lib/schnorr.ml
index c5e1e01d28f..992fae1517d 100644
--- a/src/lib/signature_lib/schnorr.ml
+++ b/src/lib/signature_lib/schnorr.ml
@@ -160,8 +160,6 @@ module Make
val scale : t -> Scalar.t -> t
val to_affine_exn : t -> Field.t * Field.t
-
- val to_affine : t -> (Field.t * Field.t) option
end)
(Message : Message_intf
with type boolean_var := Impl.Boolean.var
@@ -224,11 +222,11 @@ module Make
let verify ((r, s) : Signature.t) (pk : Public_key.t) (m : Message.t) =
let e = Message.hash ~public_key:pk ~r m in
let r_pt = Curve.(scale one s + negate (scale pk e)) in
- match Curve.to_affine r_pt with
- | None ->
- false
- | Some (rx, ry) ->
+ match Curve.to_affine_exn r_pt with
+ | rx, ry ->
is_even ry && Field.equal rx r
+ | exception _ ->
+ false
[%%if
call_logger]
@@ -354,8 +352,6 @@ module Make
val scale : t -> Scalar.t -> t
val to_affine_exn : t -> Field.t * Field.t
-
- val to_affine : t -> (Field.t * Field.t) option
end)
(Message : Message_intf
with type curve := Curve.t
@@ -400,11 +396,11 @@ module Make
let verify ((r, s) : Signature.t) (pk : Public_key.t) (m : Message.t) =
let e = Message.hash ~public_key:pk ~r m in
let r_pt = Curve.(scale one s + negate (scale pk e)) in
- match Curve.to_affine r_pt with
- | None ->
- false
- | Some (rx, ry) ->
+ match Curve.to_affine_exn r_pt with
+ | rx, ry ->
is_even ry && Impl.Field.(equal rx r)
+ | exception _ ->
+ false
end
module Tick = Snark_params_nonconsensus
@@ -416,7 +412,7 @@ open Hash_prefix_states_nonconsensus
module Message = struct
open Tick
- type t = (Field.t, bool) Random_oracle.Input.t
+ type t = (Field.t, bool) Random_oracle.Input.t [@@deriving sexp]
let challenge_length = 128
@@ -430,6 +426,7 @@ module Message = struct
Random_oracle.Input.to_bits ~unpack:Field.unpack input
|> Array.of_list |> Blake2.bits_to_string |> Blake2.digest_string
|> Blake2.to_raw_string |> Blake2.string_to_bits |> Array.to_list
+ |> Fn.flip List.take (Int.min 256 (Tock.Field.size_in_bits - 1))
|> Tock.Field.project
let hash t ~public_key ~r =
diff --git a/src/lib/snark_keys/dune b/src/lib/snark_keys/dune
index f234ef7cd86..982fd91408b 100644
--- a/src/lib/snark_keys/dune
+++ b/src/lib/snark_keys/dune
@@ -1,7 +1,7 @@
(library
(name snark_keys)
(public_name snark_keys)
- (libraries async transaction_snark dummy_values blockchain_snark core)
+ (libraries async transaction_snark blockchain_snark core)
(preprocess
(pps ppx_version ppx_jane ppx_deriving_yojson)))
diff --git a/src/lib/snark_keys/gen_keys/dune b/src/lib/snark_keys/gen_keys/dune
index dc52d790bfe..85d2d6b20f4 100644
--- a/src/lib/snark_keys/gen_keys/dune
+++ b/src/lib/snark_keys/gen_keys/dune
@@ -1,6 +1,7 @@
(executable
(name gen_keys)
- (libraries ppxlib cache_dir remove_snark_keys_trigger cached coda_base
+ (flags -g)
+ (libraries ppxlib cache_dir remove_snark_keys_trigger coda_base
transaction_snark blockchain_snark async core)
(preprocessor_deps ../../../config.mlh)
(preprocess
diff --git a/src/lib/snark_keys/gen_keys/gen_keys.ml b/src/lib/snark_keys/gen_keys/gen_keys.ml
index ed3c27a2d56..682a55f5ae8 100644
--- a/src/lib/snark_keys/gen_keys/gen_keys.ml
+++ b/src/lib/snark_keys/gen_keys/gen_keys.ml
@@ -7,149 +7,60 @@ open Parsetree
open Longident
open Core
-module Blockchain_snark_keys = struct
- module Proving = struct
- let key_location ~loc bc_location =
- let module E = Ppxlib.Ast_builder.Make (struct
- let loc = loc
- end) in
- let open E in
- estring
- (Blockchain_snark.Blockchain_transition.Keys.Proving.Location.to_string
- bc_location)
+let hashes ~loc =
+ let module E = Ppxlib.Ast_builder.Make (struct
+ let loc = loc
+ end) in
+ let open E in
+ let f (_, x) = estring (Core.Md5.to_hex x) in
+ let ts = Transaction_snark.constraint_system_digests () in
+ let bs =
+ Blockchain_snark.Blockchain_snark_state.constraint_system_digests ()
+ in
+ elist (List.map ts ~f @ List.map bs ~f)
- let load_expr ~loc bc_location bc_checksum =
- let module E = Ppxlib.Ast_builder.Make (struct
- let loc = loc
- end) in
- let open E in
- [%expr
- let open Async.Deferred in
- Blockchain_snark.Blockchain_transition.Keys.Proving.load
- (Blockchain_snark.Blockchain_transition.Keys.Proving.Location
- .of_string
- [%e key_location ~loc bc_location])
- >>| fun (keys, checksum) ->
- assert (
- String.equal (Md5_lib.to_hex checksum)
- [%e estring (Md5_lib.to_hex bc_checksum)] ) ;
- keys]
- end
+let from_disk_expr ~loc id =
+ let module E = Ppxlib.Ast_builder.Make (struct
+ let loc = loc
+ end) in
+ let open E in
+ [%expr
+ (* TODO: Not sure what to do with cache hit/generated something *)
+ let open Async in
+ let%map t, _ =
+ Pickles.Verification_key.load ~cache:Cache_dir.cache
+ (Sexp.of_string_conv_exn
+ [%e
+ estring
+ (Pickles.Verification_key.Id.sexp_of_t id |> Sexp.to_string)]
+ Pickles.Verification_key.Id.t_of_sexp)
+ >>| Or_error.ok_exn
+ in
+ t]
- module Verification = struct
- let key_location ~loc bc_location =
- let module E = Ppxlib.Ast_builder.Make (struct
- let loc = loc
- end) in
- let open E in
- estring
- (Blockchain_snark.Blockchain_transition.Keys.Verification.Location
- .to_string bc_location)
+let str ~loc ~blockchain_verification_key_id ~transaction_snark
+ ~blockchain_snark =
+ let module E = Ppxlib.Ast_builder.Make (struct
+ let loc = loc
+ end) in
+ let open E in
+ let hashes = hashes ~loc in
+ [%str
+ open! Core_kernel
- let load_expr ~loc bc_location bc_checksum =
- let module E = Ppxlib.Ast_builder.Make (struct
- let loc = loc
- end) in
- let open E in
- [%expr
- let open Async.Deferred in
- Blockchain_snark.Blockchain_transition.Keys.Verification.load
- (Blockchain_snark.Blockchain_transition.Keys.Verification.Location
- .of_string
- [%e key_location ~loc bc_location])
- >>| fun (keys, checksum) ->
- assert (
- String.equal (Md5_lib.to_hex checksum)
- [%e estring (Md5_lib.to_hex bc_checksum)] ) ;
- keys]
- end
-end
+ let blockchain_verification_key_id = [%e blockchain_verification_key_id]
-module Transaction_snark_keys = struct
- module Proving = struct
- let key_location ~loc t_location =
- let module E = Ppxlib.Ast_builder.Make (struct
- let loc = loc
- end) in
- let open E in
- estring (Transaction_snark.Keys.Proving.Location.to_string t_location)
+ let transaction_verification () = [%e transaction_snark]
- let load_expr ~loc t_location t_checksum =
- let module E = Ppxlib.Ast_builder.Make (struct
- let loc = loc
- end) in
- let open E in
- [%expr
- let open Async.Deferred in
- Transaction_snark.Keys.Proving.load
- (Transaction_snark.Keys.Proving.Location.of_string
- [%e key_location ~loc t_location])
- >>| fun (keys, checksum) ->
- assert (
- String.equal (Md5_lib.to_hex checksum)
- [%e estring (Md5_lib.to_hex t_checksum)] ) ;
- keys]
- end
+ let blockchain_verification () = [%e blockchain_snark]
- module Verification = struct
- let key_location ~loc t_location =
- let module E = Ppxlib.Ast_builder.Make (struct
- let loc = loc
- end) in
- let open E in
- estring
- (Transaction_snark.Keys.Verification.Location.to_string t_location)
+ type key_hashes = string list [@@deriving to_yojson]
- let load_expr ~loc t_location t_checksum =
- let module E = Ppxlib.Ast_builder.Make (struct
- let loc = loc
- end) in
- let open E in
- [%expr
- let open Async.Deferred in
- Transaction_snark.Keys.Verification.load
- (Transaction_snark.Keys.Verification.Location.of_string
- [%e key_location ~loc t_location])
- >>| fun (keys, checksum) ->
- assert (
- String.equal (Md5_lib.to_hex checksum)
- [%e estring (Md5_lib.to_hex t_checksum)] ) ;
- keys]
- end
-end
+ let key_hashes : key_hashes = [%e hashes]]
let ok_or_fail_expr ~loc =
[%expr function Ok x -> x | Error _ -> failwith "Gen_keys error"]
-module Dummy = struct
- module Transaction_keys = struct
- module Proving = struct
- let expr ~loc = [%expr Async.return Transaction_snark.Keys.Proving.dummy]
- end
-
- module Verification = struct
- let expr ~loc =
- [%expr Async.return Transaction_snark.Keys.Verification.dummy]
- end
- end
-
- module Blockchain_keys = struct
- module Proving = struct
- let expr ~loc =
- [%expr
- Async.return
- Blockchain_snark.Blockchain_transition.Keys.Proving.dummy]
- end
-
- module Verification = struct
- let expr ~loc =
- [%expr
- Async.return
- Blockchain_snark.Blockchain_transition.Keys.Verification.dummy]
- end
- end
-end
-
open Async
let loc = Ppxlib.Location.none
@@ -157,50 +68,7 @@ let loc = Ppxlib.Location.none
[%%if
proof_level = "full"]
-let location_expr key_location =
- let module E = Ppxlib.Ast_builder.Make (struct
- let loc = loc
- end) in
- let open E in
- [%expr
- let open Async.Deferred in
- Transaction_snark.Keys.Verification.load
- (Transaction_snark.Keys.Verification.Location.of_string
- [%e
- estring
- (Transaction_snark.Keys.Verification.Location.to_string
- key_location)])]
-
-let gen_keys () =
- let open Async_kernel in
- let%bind {Cached.With_track_generated.data= acc; dirty} =
- let open Cached.Deferred_with_track_generated.Let_syntax in
- let%bind tx_keys_location, tx_keys, tx_keys_checksum =
- Transaction_snark.Keys.cached ()
- in
- let module M =
- (* TODO make toplevel library to encapsulate consensus params *)
- Blockchain_snark.Blockchain_transition.Make (Transaction_snark.Verification
- .Make
- (struct
- let keys = tx_keys
- end)) in
- let%map bc_keys_location, _bc_keys, bc_keys_checksum = M.Keys.cached () in
- ( Blockchain_snark_keys.Proving.load_expr ~loc bc_keys_location.proving
- bc_keys_checksum.proving
- , Blockchain_snark_keys.Proving.key_location ~loc bc_keys_location.proving
- , Blockchain_snark_keys.Verification.load_expr ~loc
- bc_keys_location.verification bc_keys_checksum.verification
- , Blockchain_snark_keys.Verification.key_location ~loc
- bc_keys_location.verification
- , Transaction_snark_keys.Proving.load_expr ~loc tx_keys_location.proving
- tx_keys_checksum.proving
- , Transaction_snark_keys.Proving.key_location ~loc tx_keys_location.proving
- , Transaction_snark_keys.Verification.load_expr ~loc
- tx_keys_location.verification tx_keys_checksum.verification
- , Transaction_snark_keys.Verification.key_location ~loc
- tx_keys_location.verification )
- in
+let handle_dirty dirty =
if Array.mem ~equal:String.equal Sys.argv "--generate-keys-only" then
Stdlib.exit 0 ;
match dirty with
@@ -243,8 +111,9 @@ let gen_keys () =
pull-request is from an external contributor.@ Using the local \
keys@."
| `Cache_hit ->
- (* Excluded above. *) assert false ) ;
- return acc
+ (* Excluded above. *)
+ assert false ) ;
+ Deferred.unit
| Some _, Some profile
when String.is_substring ~substring:"testnet" profile ->
(* We are intentionally aborting the build here with a special error code
@@ -253,91 +122,60 @@ let gen_keys () =
* Exit code is 0xc1 for "CI" *)
exit 0xc1
| Some _, Some _ | _, None | None, _ ->
- return acc )
+ Deferred.unit )
| `Cache_hit ->
- return acc
+ Deferred.unit
+
+let str ~loc =
+ let module T = Transaction_snark.Make () in
+ let module B = Blockchain_snark.Blockchain_snark_state.Make (T) in
+ let%map () =
+ handle_dirty
+ Pickles.(
+ List.map
+ [T.cache_handle; B.cache_handle]
+ ~f:Cache_handle.generate_or_load
+ |> List.reduce_exn ~f:Dirty.( + ))
+ in
+ let module E = Ppxlib.Ast_builder.Make (struct
+ let loc = loc
+ end) in
+ let open E in
+ str ~loc
+ ~blockchain_verification_key_id:
+ [%expr
+ let t =
+ lazy
+ (Sexp.of_string_conv_exn
+ [%e
+ estring
+ ( Pickles.Verification_key.Id.sexp_of_t
+ (Lazy.force B.Proof.id)
+ |> Sexp.to_string )]
+ Pickles.Verification_key.Id.t_of_sexp)
+ in
+ fun () -> Lazy.force t]
+ ~transaction_snark:(from_disk_expr ~loc (Lazy.force T.id))
+ ~blockchain_snark:(from_disk_expr ~loc (Lazy.force B.Proof.id))
[%%else]
-let gen_keys () =
- let dummy_loc = [%expr "dummy-location"] in
+let str ~loc =
+ let e = [%expr Async.Deferred.return Pickles.Verification_key.dummy] in
return
- ( Dummy.Blockchain_keys.Proving.expr ~loc
- , dummy_loc
- , Dummy.Blockchain_keys.Verification.expr ~loc
- , dummy_loc
- , Dummy.Transaction_keys.Proving.expr ~loc
- , dummy_loc
- , Dummy.Transaction_keys.Verification.expr ~loc
- , dummy_loc )
+ (str ~loc
+ ~blockchain_verification_key_id:
+ [%expr Pickles.Verification_key.Id.dummy] ~transaction_snark:e
+ ~blockchain_snark:e)
[%%endif]
let main () =
- (* let%bind blockchain_expr, transaction_expr = *)
- let%bind ( bc_proving
- , bc_proving_loc
- , bc_verification
- , bc_verification_loc
- , tx_proving
- , tx_proving_loc
- , tx_verification
- , tx_verification_loc ) =
- gen_keys ()
- in
let fmt =
Format.formatter_of_out_channel (Out_channel.create "snark_keys.ml")
in
- Pprintast.top_phrase fmt
- (Ptop_def
- [%str
- open Core_kernel
-
- let blockchain_proving () = [%e bc_proving]
-
- let blockchain_verification () = [%e bc_verification]
-
- let transaction_proving () = [%e tx_proving]
-
- let transaction_verification () = [%e tx_verification]
-
- type key_hashes = string list [@@deriving to_yojson]
-
- let key_locations =
- [ ("blockchain_proving", [%e bc_proving_loc])
- ; ("blockchain_verification", [%e bc_verification_loc])
- ; ("transaction_proving", [%e tx_proving_loc])
- ; ("transaction_verification", [%e tx_verification_loc]) ]
-
- let rec location_sexp_to_hashes = function
- | Sexp.Atom s
- when List.mem
- ["base"; "merge"; "step"; "wrap"]
- s ~equal:String.equal ->
- []
- | Sexp.Atom s -> (
- let fn = Filename.basename s in
- match String.split fn ~on:'_' with
- | hash :: _ ->
- [hash]
- | _ ->
- failwith "location_sexp_to_hashes: unexpected filename" )
- | Sexp.List sexps ->
- List.(concat (map sexps ~f:location_sexp_to_hashes))
-
- let location_to_hashes (loc : string) =
- match Sexp.parse loc with
- | Done (sexp, _) ->
- location_sexp_to_hashes sexp
- | _ ->
- []
-
- let key_hashes =
- let locations =
- List.map key_locations ~f:(fun (_name, loc) -> loc)
- in
- let hashes = List.(concat (map locations ~f:location_to_hashes)) in
- List.dedup_and_sort hashes ~compare:String.compare]) ;
+ let%bind str = str ~loc:Location.none in
+ Pprintast.top_phrase fmt (Ptop_def str) ;
exit 0
let () =
diff --git a/src/lib/snark_params/dune b/src/lib/snark_params/dune
index 6f05b0d4348..047836d8129 100644
--- a/src/lib/snark_params/dune
+++ b/src/lib/snark_params/dune
@@ -4,8 +4,8 @@
(library_flags -linkall)
(inline_tests)
(libraries sponge group_map fold_lib o1trace digestif tuple_lib bitstring_lib
- snarky_group_map core_kernel snarky snarky_verifier snarky_field_extensions snarky_curves
- snarky_pairing snark_bits dummy_values crypto_params)
+ snarky_group_map core_kernel snarky snarky_curves
+ snark_bits pickles crypto_params)
(preprocess
(pps ppx_version ppx_assert ppx_base ppx_bench ppx_let ppx_deriving.std ppx_deriving_yojson ppx_sexp_conv ppx_bin_prot
ppx_custom_printf ppx_inline_test ppx_optcomp ppx_snarky bisect_ppx --
diff --git a/src/lib/snark_params/snark_params.ml b/src/lib/snark_params/snark_params.ml
index 571ee2c1e17..d6df7c5abee 100644
--- a/src/lib/snark_params/snark_params.ml
+++ b/src/lib/snark_params/snark_params.ml
@@ -1,10 +1,6 @@
open Core_kernel
open Bitstring_lib
open Snark_bits
-module Tick_backend = Crypto_params.Tick_backend
-module Tock_backend = Crypto_params.Tock_backend
-module Snarkette_tick = Crypto_params.Snarkette_tick
-module Snarkette_tock = Crypto_params.Snarkette_tock
module Make_snarkable (Impl : Snarky.Snark_intf.S) = struct
open Impl
@@ -41,22 +37,18 @@ module Make_snarkable (Impl : Snarky.Snark_intf.S) = struct
end
module Tock0 = struct
- include Crypto_params.Tock0
- module Snarkable = Make_snarkable (Crypto_params.Tock0)
+ include Crypto_params.Tock
+ module Snarkable = Make_snarkable (Crypto_params.Tock)
end
module Tick0 = struct
- include Crypto_params.Tick0
- module Snarkable = Make_snarkable (Crypto_params.Tick0)
+ include Crypto_params.Tick
+ module Snarkable = Make_snarkable (Crypto_params.Tick)
end
let%test_unit "group-map test" =
- let params =
- Group_map.Params.create
- (module Tick0.Field)
- Tick_backend.Inner_curve.Coefficients.{a; b}
- in
- let module M = Snarky.Snark.Run.Make (Tick_backend) (Unit) in
+ let params = Crypto_params.Tock.group_map_params () in
+ let module M = Crypto_params.Tick.Run in
Quickcheck.test ~trials:3 Tick0.Field.gen ~f:(fun t ->
let (), checked_output =
M.run_and_check
@@ -70,10 +62,16 @@ let%test_unit "group-map test" =
()
|> Or_error.ok_exn
in
- [%test_eq: Tick0.Field.t * Tick0.Field.t] checked_output
- (Group_map.to_group (module Tick0.Field) ~params t) )
-
-module Wrap_input = Crypto_params.Wrap_input
+ let ((x, y) as actual) =
+ Group_map.to_group (module Tick0.Field) ~params t
+ in
+ [%test_eq: Tick0.Field.t]
+ Tick0.Field.(
+ (x * x * x)
+ + (Tick0.Inner_curve.Params.a * x)
+ + Tick0.Inner_curve.Params.b)
+ Tick0.Field.(y * y) ;
+ [%test_eq: Tick0.Field.t * Tick0.Field.t] checked_output actual )
module Make_inner_curve_scalar
(Impl : Snark_intf.S)
@@ -136,12 +134,13 @@ struct
end
module Tock = struct
- include (Tock0 : module type of Tock0 with module Proof := Tock0.Proof)
+ include (
+ Tock0 : module type of Tock0 with module Inner_curve := Tock0.Inner_curve )
module Fq = Snarky_field_extensions.Field_extensions.F (Tock0)
module Inner_curve = struct
- include Tock_backend.Inner_curve
+ include Tock0.Inner_curve
include Sexpable.Of_sexpable (struct
type t = Field.t * Field.t [@@deriving sexp]
@@ -156,189 +155,26 @@ module Tock = struct
include Make_inner_curve_aux (Tock0) (Tick0)
- let ctypes_typ = typ
-
- let scale = scale_field
-
module Checked = struct
include Snarky_curves.Make_weierstrass_checked (Fq) (Scalar)
(struct
- include Tock_backend.Inner_curve
-
- let scale = scale_field
+ include Tock0.Inner_curve
end)
- (Coefficients)
+ (Params)
let add_known_unsafe t x = add_unsafe t (constant x)
end
let typ = Checked.typ
end
-
- module Pairing = struct
- module T = struct
- let conv_field =
- Fn.compose Tock0.Field.of_string Snarkette_tock.Fq.to_string
-
- module Impl = Tock0
- open Snarky_field_extensions.Field_extensions
- module Fq = Fq
-
- let non_residue = conv_field Snarkette_tock.non_residue
-
- module Fqe = struct
- module Params = struct
- let non_residue = non_residue
-
- let mul_by_non_residue x = Fq.scale x non_residue
- end
-
- include E2 (Fq) (Params)
-
- let conv = A.map ~f:conv_field
-
- let real_part (x, _) = x
- end
-
- module G1 = struct
- module Unchecked = Inner_curve
-
- let one : Unchecked.t = Inner_curve.one
-
- include Inner_curve.Checked
- end
-
- module G2 = struct
- module Coefficients = struct
- let a = Fq.Unchecked.(Inner_curve.Coefficients.a * non_residue, zero)
-
- let b = Fq.Unchecked.(zero, Inner_curve.Coefficients.b * non_residue)
- end
-
- module Unchecked = struct
- include Snarkette.Elliptic_curve.Make (struct
- include Inner_curve.Scalar
-
- let num_bits _ = Field.size_in_bits
- end)
- (Fqe.Unchecked)
- (Coefficients)
-
- let one =
- let x, y = Snarkette_tock.G2.(to_affine_exn one) in
- {x= Fqe.conv x; y= Fqe.conv y; z= Fqe.Unchecked.one}
- end
-
- include Snarky_curves.Make_weierstrass_checked
- (Fqe)
- (Inner_curve.Scalar)
- (struct
- include Unchecked
-
- let double x = x + x
-
- let random () = scale one (Tick0.Field.random ())
- end)
- (Unchecked.Coefficients)
- end
-
- module Fqk = struct
- module Params = struct
- let non_residue = Fq.Unchecked.(zero, one)
-
- let mul_by_non_residue = Fqe.mul_by_primitive_element
-
- let frobenius_coeffs_c1 =
- Array.map ~f:conv_field
- Snarkette_tock.Fq4.Params.frobenius_coeffs_c1
- end
-
- include F4 (Fqe) (Params)
- end
-
- module G1_precomputation =
- Snarky_pairing.G1_precomputation.Make (Tock0) (Fqe)
- (struct
- let twist = Fq.Unchecked.(zero, one)
- end)
-
- module N = Snarkette.Mnt6_80.N
-
- module Params = struct
- include Snarkette_tock.Pairing_info
-
- let loop_count_is_neg = Snarkette_tock.Pairing_info.is_loop_count_neg
- end
-
- module G2_precomputation = struct
- include Snarky_pairing.G2_precomputation.Make (Fqe) (N)
- (struct
- include Params
-
- let coeff_a = G2.Coefficients.a
- end)
-
- let create_constant =
- Fn.compose create_constant G2.Unchecked.to_affine_exn
- end
- end
-
- include T
- include Snarky_pairing.Miller_loop.Make (T)
- module FE = Snarky_pairing.Final_exponentiation.Make (T)
-
- let final_exponentiation = FE.final_exponentiation4
- end
-
- module Proof = struct
- include Tock0.Proof
-
- let dummy = Dummy_values.Tock.Bowe_gabizon18.proof
- end
-
- module Groth_verifier = struct
- include Snarky_verifier.Groth.Make (Pairing)
-
- let conv_fqe v =
- let v = Tick_backend.Full.Fqe.to_vector v in
- Field.Vector.(get v 0, get v 1)
-
- let conv_g2 p =
- let x, y = Tock_backend.Inner_twisted_curve.to_affine_exn p in
- Pairing.G2.Unchecked.of_affine (conv_fqe x, conv_fqe y)
-
- let conv_fqk p =
- let v = Tick_backend.Full.Fqk.to_elts p in
- let f i =
- let x j = Tock0.Field.Vector.get v ((2 * i) + j) in
- (x 0, x 1)
- in
- (f 0, f 1)
-
- let proof_of_backend_proof p =
- let open Tick_backend.Full.Groth16_proof_accessors in
- {Proof.a= a p; b= conv_g2 (b p); c= c p}
-
- let vk_of_backend_vk vk =
- let open Tick_backend.Full.Groth16.Verification_key in
- let open Inner_curve.Vector in
- let q = query vk in
- { Verification_key.query_base= get q 0
- ; query= List.init (length q - 1) ~f:(fun i -> get q (i + 1))
- ; delta= conv_g2 (delta vk)
- ; alpha_beta= conv_fqk (alpha_beta vk) }
-
- let constant_vk vk =
- let open Verification_key in
- { query_base= Inner_curve.Checked.constant vk.query_base
- ; query= List.map ~f:Inner_curve.Checked.constant vk.query
- ; delta= Pairing.G2.constant vk.delta
- ; alpha_beta= Pairing.Fqk.constant vk.alpha_beta }
- end
end
module Tick = struct
- include (Tick0 : module type of Tick0 with module Field := Tick0.Field)
+ include (
+ Tick0 :
+ module type of Tick0
+ with module Field := Tick0.Field
+ and module Inner_curve := Tick0.Inner_curve )
module Field = struct
include Tick0.Field
@@ -351,7 +187,7 @@ module Tick = struct
module Fq = Snarky_field_extensions.Field_extensions.F (Tick0)
module Inner_curve = struct
- include Crypto_params.Tick_backend.Inner_curve
+ include Crypto_params.Tick.Inner_curve
include Sexpable.Of_sexpable (struct
type t = Field.t * Field.t [@@deriving sexp]
@@ -366,18 +202,10 @@ module Tick = struct
include Make_inner_curve_aux (Tick0) (Tock0)
- let ctypes_typ = typ
-
- let scale = scale_field
-
module Checked = struct
include Snarky_curves.Make_weierstrass_checked (Fq) (Scalar)
- (struct
- include Crypto_params.Tick_backend.Inner_curve
-
- let scale = scale_field
- end)
- (Coefficients)
+ (Crypto_params.Tick.Inner_curve)
+ (Params)
let add_known_unsafe t x = add_unsafe t (constant x)
end
@@ -387,242 +215,11 @@ module Tick = struct
module Util = Snark_util.Make (Tick0)
- module Pairing = struct
- module T = struct
- module Impl = Tick0
- open Snarky_field_extensions.Field_extensions
- module Fq = Fq
-
- let conv_field =
- Fn.compose Tick0.Field.of_string Snarkette_tick.Fq.to_string
-
- let non_residue = conv_field Snarkette_tick.non_residue
-
- module Fqe = struct
- module Params = struct
- let non_residue = non_residue
-
- let mul_by_non_residue x = Fq.scale x non_residue
-
- let frobenius_coeffs_c1 =
- Array.map ~f:conv_field
- Snarkette_tick.Fq3.Params.frobenius_coeffs_c1
-
- let frobenius_coeffs_c2 =
- Array.map ~f:conv_field
- Snarkette_tick.Fq3.Params.frobenius_coeffs_c2
- end
-
- include F3 (Fq) (Params)
-
- let conv = A.map ~f:conv_field
-
- let real_part (x, _, _) = x
- end
-
- module G1 = struct
- module Unchecked = Inner_curve
-
- let one : Unchecked.t = Inner_curve.one
-
- include Inner_curve.Checked
- end
-
- module G2 = struct
- module Unchecked = struct
- include Snarkette.Elliptic_curve.Make (struct
- include Inner_curve.Scalar
-
- let num_bits _ = Field.size_in_bits
- end)
- (Fqe.Unchecked)
- (struct
- let a =
- Tick0.Field.(zero, zero, Inner_curve.Coefficients.a)
-
- let b =
- Fq.Unchecked.
- ( Inner_curve.Coefficients.b * Fqe.Params.non_residue
- , zero
- , zero )
- end)
-
- let one =
- let x, y = Snarkette_tick.G2.(to_affine_exn one) in
- {z= Fqe.Unchecked.one; x= Fqe.conv x; y= Fqe.conv y}
- end
-
- include Snarky_curves.Make_weierstrass_checked
- (Fqe)
- (Inner_curve.Scalar)
- (struct
- include Unchecked
-
- let double x = x + x
-
- let random () = scale one (Tock.Field.random ())
- end)
- (Unchecked.Coefficients)
- end
-
- module Fqk = struct
- module Params = struct
- let frobenius_coeffs_c1 =
- Array.map ~f:conv_field
- Snarkette_tick.Fq6.Params.frobenius_coeffs_c1
- end
-
- module Fq2 =
- E2
- (Fq)
- (struct
- let non_residue = non_residue
-
- let mul_by_non_residue x = Fq.scale x non_residue
- end)
-
- include F6 (Fq) (Fq2) (Fqe) (Params)
- end
-
- module G1_precomputation =
- Snarky_pairing.G1_precomputation.Make (Tick0) (Fqe)
- (struct
- let twist = Fq.Unchecked.(zero, one, zero)
- end)
-
- module N = Snarkette_tick.N
-
- module Params = struct
- include Snarkette_tick.Pairing_info
-
- let loop_count_is_neg = Snarkette_tick.Pairing_info.is_loop_count_neg
- end
-
- module G2_precomputation = struct
- include Snarky_pairing.G2_precomputation.Make (Fqe) (N)
- (struct
- include Params
-
- let coeff_a =
- Tick0.Field.(zero, zero, G1.Unchecked.Coefficients.a)
- end)
-
- let create_constant =
- Fn.compose create_constant G2.Unchecked.to_affine_exn
- end
- end
-
- include T
- include Snarky_pairing.Miller_loop.Make (T)
- module FE = Snarky_pairing.Final_exponentiation.Make (T)
-
- let final_exponentiation = FE.final_exponentiation6
- end
-
- module Run = Crypto_params.Runners.Tick
-
let m : Run.field Snarky.Snark.m = (module Run)
let make_checked c = with_state (As_prover.return ()) (Run.make_checked c)
-
- module Verifier = struct
- include Snarky_verifier.Bowe_gabizon.Make (struct
- include Pairing
-
- module H = Bowe_gabizon_hash.Make (struct
- open Run
- module Field = Field
- module Fqe = Pairing.Fqe
-
- module G1 = struct
- type t = Field.t * Field.t
-
- let to_affine_exn = Fn.id
-
- let of_affine = Fn.id
- end
-
- module G2 = struct
- type t = Fqe.t * Fqe.t
-
- let to_affine_exn = Fn.id
- end
-
- let hash xs =
- Random_oracle.Checked.hash ~init:(Lazy.force Tock_backend.bg_salt) xs
-
- let group_map x =
- Snarky_group_map.Checked.to_group
- (module Run)
- ~params:(Tock_backend.bg_params ())
- x
- end)
-
- let hash ?message ~a ~b ~c ~delta_prime =
- make_checked (fun () -> H.hash ?message ~a ~b ~c ~delta_prime)
- end)
-
- let conv_fqe v =
- let v = Tock_backend.Full.Fqe.to_vector v in
- Field.Vector.(get v 0, get v 1, get v 2)
-
- let conv_g2 p =
- let x, y = Tick_backend.Inner_twisted_curve.to_affine_exn p in
- Pairing.G2.Unchecked.of_affine (conv_fqe x, conv_fqe y)
-
- let conv_fqk (p : Tock_backend.Full.Fqk.t) =
- let v = Tock_backend.Full.Fqk.to_elts p in
- let f i =
- let x j = Tick0.Field.Vector.get v ((3 * i) + j) in
- (x 0, x 1, x 2)
- in
- (f 0, f 1)
-
- let proof_of_backend_proof
- ({a; b; c; delta_prime; z} : Tock_backend.Proof.t) =
- {Proof.a; b= conv_g2 b; c; delta_prime= conv_g2 delta_prime; z}
-
- let vk_of_backend_vk (vk : Tock_backend.Verification_key.t) =
- let open Tock_backend.Verification_key in
- let open Inner_curve.Vector in
- let q = query vk in
- { Verification_key.query_base= get q 0
- ; query= List.init (length q - 1) ~f:(fun i -> get q (i + 1))
- ; delta= conv_g2 (delta vk)
- ; alpha_beta= conv_fqk (alpha_beta vk) }
-
- let constant_vk vk =
- let open Verification_key in
- { query_base= Inner_curve.Checked.constant vk.query_base
- ; query= List.map ~f:Inner_curve.Checked.constant vk.query
- ; delta= Pairing.G2.constant vk.delta
- ; alpha_beta= Pairing.Fqk.constant vk.alpha_beta }
- end
end
-let tock_vk_to_bool_list vk =
- let vk = Tick.Verifier.vk_of_backend_vk vk in
- let g1 = Tick.Inner_curve.to_affine_exn in
- let g2 = Tick.Pairing.G2.Unchecked.to_affine_exn in
- let vk =
- { vk with
- query_base= g1 vk.query_base
- ; query= List.map vk.query ~f:g1
- ; delta= g2 vk.delta }
- in
- Tick.Verifier.Verification_key.(summary_unchecked (summary_input vk))
-
-let embed (x : Tick.Field.t) : Tock.Field.t =
- let n = Tick.Bigint.of_field x in
- let rec go pt acc i =
- if i = Tick.Field.size_in_bits then acc
- else
- go (Tock.Field.add pt pt)
- (if Tick.Bigint.test_bit n i then Tock.Field.add pt acc else acc)
- (i + 1)
- in
- go Tock.Field.one Tock.Field.zero 0
-
(* Let n = Tick.Field.size_in_bits.
Let k = n - 3.
The reason k = n - 3 is as follows. Inside [meets_target], we compare
@@ -639,16 +236,12 @@ module type Snark_intf = Snark_intf.S
module Group_map = struct
let to_group x =
- Group_map.to_group
- (module Tick.Field)
- ~params:(Tock_backend.bg_params ())
- x
+ Group_map.to_group (module Tick.Field) ~params:(Tock.group_map_params ()) x
module Checked = struct
let to_group x =
Snarky_group_map.Checked.to_group
(module Tick.Run)
- ~params:(Tock_backend.bg_params ())
- x
+ ~params:(Tock.group_map_params ()) x
end
end
diff --git a/src/lib/snark_worker/debug.ml b/src/lib/snark_worker/debug.ml
index b9099938ec9..d7626189837 100644
--- a/src/lib/snark_worker/debug.ml
+++ b/src/lib/snark_worker/debug.ml
@@ -7,7 +7,7 @@ module Inputs = struct
module Worker_state = struct
include Unit
- let create ~proof_level ~constraint_constants:_ () =
+ let create ~proof_level () =
match proof_level with
| Genesis_constants.Proof_level.Full ->
failwith "Unable to handle proof-level=Full"
diff --git a/src/lib/snark_worker/dune b/src/lib/snark_worker/dune
index ef07a53b2a7..38fba6a7731 100644
--- a/src/lib/snark_worker/dune
+++ b/src/lib/snark_worker/dune
@@ -4,7 +4,7 @@
(library_flags -linkall)
(inline_tests)
(libraries core async cli_lib currency snark_work_lib coda_base
- blockchain_snark transaction_snark keys_lib perf_histograms
+ blockchain_snark transaction_snark perf_histograms
core_kernel.hash_heap sparse_ledger_lib ledger_proof transaction_witness)
(preprocess
(pps
diff --git a/src/lib/snark_worker/functor.ml b/src/lib/snark_worker/functor.ml
index 7a8853c0db9..84399752397 100644
--- a/src/lib/snark_worker/functor.ml
+++ b/src/lib/snark_worker/functor.ml
@@ -122,11 +122,8 @@ module Make (Inputs : Intf.Inputs_intf) :
let main
(module Rpcs_versioned : Intf.Rpcs_versioned_S
with type Work.ledger_proof = Inputs.Ledger_proof.t) ~logger
- ~proof_level ~constraint_constants daemon_address shutdown_on_disconnect
- =
- let%bind state =
- Worker_state.create ~proof_level ~constraint_constants ()
- in
+ ~proof_level daemon_address shutdown_on_disconnect =
+ let%bind state = Worker_state.create ~proof_level () in
let wait ?(sec = 0.5) () = after (Time.Span.of_sec sec) in
(* retry interval with jitter *)
let retry_pause sec = Random.float_range (sec -. 2.0) (sec +. 2.0) in
@@ -224,9 +221,7 @@ module Make (Inputs : Intf.Inputs_intf) :
in
main
(module Rpcs_versioned)
- ~logger ~proof_level
- ~constraint_constants:Genesis_constants.Constraint_constants.compiled
- daemon_port
+ ~logger ~proof_level daemon_port
(Option.value ~default:true shutdown_on_disconnect))
let arguments ~proof_level ~daemon_address ~shutdown_on_disconnect =
diff --git a/src/lib/snark_worker/intf.ml b/src/lib/snark_worker/intf.ml
index f82d16b53ad..7cd6abe381a 100644
--- a/src/lib/snark_worker/intf.ml
+++ b/src/lib/snark_worker/intf.ml
@@ -13,10 +13,7 @@ module type Inputs_intf = sig
type t
val create :
- proof_level:Genesis_constants.Proof_level.t
- -> constraint_constants:Genesis_constants.Constraint_constants.t
- -> unit
- -> t Deferred.t
+ proof_level:Genesis_constants.Proof_level.t -> unit -> t Deferred.t
val worker_wait_time : float
end
diff --git a/src/lib/snark_worker/prod.ml b/src/lib/snark_worker/prod.ml
index 51e7fdf1151..9b818a3a810 100644
--- a/src/lib/snark_worker/prod.ml
+++ b/src/lib/snark_worker/prod.ml
@@ -25,29 +25,19 @@ module Inputs = struct
module type S = Transaction_snark.S
type t =
- { m: (module S)
+ { m: (module S) option
; cache: Cache.t
- ; proof_level: Genesis_constants.Proof_level.t
- ; constraint_constants: Genesis_constants.Constraint_constants.t }
+ ; proof_level: Genesis_constants.Proof_level.t }
- let create ~proof_level ~constraint_constants () =
- let%map proving, verification =
+ let create ~proof_level () =
+ let m =
match proof_level with
| Genesis_constants.Proof_level.Full ->
- let%map proving = Snark_keys.transaction_proving ()
- and verification = Snark_keys.transaction_verification () in
- (proving, verification)
+ Some (module Transaction_snark.Make () : S)
| Check | None ->
- return Transaction_snark.Keys.(Proving.dummy, Verification.dummy)
+ None
in
- { m=
- ( module Transaction_snark.Make (struct
- let keys = {Transaction_snark.Keys.proving; verification}
- end)
- : S )
- ; cache= Cache.create ()
- ; proof_level
- ; constraint_constants }
+ Deferred.return {m; cache= Cache.create (); proof_level}
let worker_wait_time = 5.
end
@@ -59,15 +49,13 @@ module Inputs = struct
Snark_work_lib.Work.Single.Spec.t
[@@deriving sexp]
- (* TODO: Use public_key once SoK is implemented *)
- let perform_single
- ({m= (module M); cache; proof_level; constraint_constants} :
- Worker_state.t) ~message =
+ let perform_single ({m; cache; proof_level} : Worker_state.t) ~message =
let open Snark_work_lib in
let sok_digest = Coda_base.Sok_message.digest message in
fun (single : single_spec) ->
match proof_level with
| Genesis_constants.Proof_level.Full -> (
+ let (module M) = Option.value_exn m in
let statement = Work.Single.Spec.statement single in
let process k =
let start = Time.now () in
@@ -98,7 +86,7 @@ module Inputs = struct
(input, t, (w : Transaction_witness.t)) ->
process (fun () ->
Or_error.try_with (fun () ->
- M.of_transaction ~constraint_constants ~sok_digest
+ M.of_transaction ~sok_digest
~source:input.Transaction_snark.Statement.source
~target:input.target
{ Transaction_protocol_state.Poly.transaction= t
@@ -118,21 +106,21 @@ module Inputs = struct
process (fun () -> M.merge ~sok_digest proof1 proof2) ) )
| Check | None ->
(* Use a dummy proof. *)
- let stmt, proof_type =
+ let stmt =
match single with
| Work.Single.Spec.Transition (stmt, _, _) ->
- (stmt, `Base)
+ stmt
| Merge (stmt, _, _) ->
- (stmt, `Merge)
+ stmt
in
Or_error.return
@@ ( Transaction_snark.create ~source:stmt.source ~target:stmt.target
- ~proof_type ~supply_increase:stmt.supply_increase
+ ~supply_increase:stmt.supply_increase
~pending_coinbase_stack_state:
stmt.pending_coinbase_stack_state
~next_available_token_before:stmt.next_available_token_before
~next_available_token_after:stmt.next_available_token_after
~fee_excess:stmt.fee_excess ~sok_digest
- ~proof:Precomputed_values.unit_test_base_proof
+ ~proof:Proof.transaction_dummy
, Time.Span.zero )
end
diff --git a/src/lib/snarky b/src/lib/snarky
index 9a6d953114c..1209b85f513 160000
--- a/src/lib/snarky
+++ b/src/lib/snarky
@@ -1 +1 @@
-Subproject commit 9a6d953114c24595e2be41d5945033f52aae70c3
+Subproject commit 1209b85f513f691b9e8bc92d47316adda71cf513
diff --git a/src/lib/sparse_ledger_lib/sparse_ledger.ml b/src/lib/sparse_ledger_lib/sparse_ledger.ml
index b1357fc077a..02ae535363a 100644
--- a/src/lib/sparse_ledger_lib/sparse_ledger.ml
+++ b/src/lib/sparse_ledger_lib/sparse_ledger.ml
@@ -194,7 +194,7 @@ end = struct
let find_index_exn (t : t) aid =
List.Assoc.find_exn t.indexes ~equal:Account_id.equal aid
- let get_exn {T.tree; depth; _} idx =
+ let get_exn ({T.tree; depth; _} as t) idx =
let rec go i tree =
match (i < 0, tree) with
| true, Tree.Account acct ->
@@ -214,9 +214,9 @@ end = struct
" node"
in
failwithf
- "Sparse_ledger.get: Bad index %i. Expected a%s, but got a%s at \
- depth %i."
- idx expected_kind kind (depth - i) ()
+ !"Sparse_ledger.get: Bad index %i. Expected a%s, but got a%s at \
+ depth %i. Tree = %{sexp:t}"
+ idx expected_kind kind (depth - i) t ()
in
go (depth - 1) tree
diff --git a/src/lib/sponge_params/dune b/src/lib/sponge_params/dune
deleted file mode 100644
index 600a317a367..00000000000
--- a/src/lib/sponge_params/dune
+++ /dev/null
@@ -1,7 +0,0 @@
-(library
- (name sponge_params)
- (public_name sponge_params)
- (preprocessor_deps ../../config.mlh)
- (preprocess (pps ppx_version ppx_inline_test ppx_optcomp ppx_deriving.eq))
- (inline_tests)
- (libraries curve_choice))
diff --git a/src/lib/sponge_params/sponge_params.ml b/src/lib/sponge_params/sponge_params.ml
deleted file mode 100644
index 43df6d8133f..00000000000
--- a/src/lib/sponge_params/sponge_params.ml
+++ /dev/null
@@ -1,785 +0,0 @@
-[%%import
-"/src/config.mlh"]
-
-[%%ifdef
-consensus_mechanism]
-
-open Curve_choice.Tick0
-
-[%%else]
-
-open Snark_params_nonconsensus
-
-[%%endif]
-
-[%%if
-curve_size = 298]
-
-let inv_alpha =
- "432656623790237568866681136048225865041022616866203195957516123399240588461280445963602851"
-
-let mds =
- [| [| Field.of_string
- "181324588122329172048070802614406344967661900669343676997796156524662650229663511778086513"
- ; Field.of_string
- "263839662794798230944406038483748877420003467481254943330033497627810628977768312588897021"
- ; Field.of_string
- "47787034086054868794736504598805355240746067397315425760363325479582067585554122384528750"
- |]
- ; [| Field.of_string
- "391385728862913577230643656405794210023251219169789744235774373121108965138889307827345939"
- ; Field.of_string
- "368056256556859616791833365938123080683505948787537081082804782658777406001515743364112843"
- ; Field.of_string
- "249229689710372851346889167834108105226843437678081232334602983010385341756350839066179566"
- |]
- ; [| Field.of_string
- "391761630355250451965959916078641131603140945583687294349414005799846378806556028223600720"
- ; Field.of_string
- "309426222273897994989187985039896323914733463925481353595665936771905869408957537639744345"
- ; Field.of_string
- "429282034891350663871556405902853196474768911490694799502975387461169986038745882893853806"
- |] |]
-
-let round_constants =
- [| [| Field.of_string
- "78119860594733808983474265082430117124674905785489385612351809573030163625517"
- ; Field.of_string
- "41917899842730241418346215913324270532073353586134123463219061327941260175271"
- ; Field.of_string
- "74594641694171623328644944059182600919855574964222988275913344198970402906473"
- |]
- ; [| Field.of_string
- "96215759378377024990520153908983544755208851791126218239402755616994541522004"
- ; Field.of_string
- "64070601581278917442704840630680311036021557676765751754522901046069205253111"
- ; Field.of_string
- "112123228532462696722378911494343451272980413618911326680094528285518792872677"
- |]
- ; [| Field.of_string
- "84572244072021308337360477634782636535511175281144388234379224309078196768262"
- ; Field.of_string
- "45201095631123410354816854701250642083197167601967427301389500806815426216645"
- ; Field.of_string
- "23419302413627434057960523568681421397183896397903197013759822219271473949448"
- |]
- ; [| Field.of_string
- "63220724218126871510891512179599337793645245415246618202146262033908228783613"
- ; Field.of_string
- "67900966560828272306360950341997532094196196655192755442359232962244590070115"
- ; Field.of_string
- "56382132371728071364028077587343004835658613510701494793375685201885283260755"
- |]
- ; [| Field.of_string
- "80317852656339951095312898663286716255545986714650554749917139819628941702909"
- ; Field.of_string
- "110977183257428423540294096816813859894739618561444416996538397449475628658639"
- ; Field.of_string
- "25195781166503180938390820610484311038421647727795615447439501669639084690800"
- |]
- ; [| Field.of_string
- "108664438541952156416331885221418851366456449596370568350972106298760717710264"
- ; Field.of_string
- "17649294376560630922417546944777537620537408190408066211453084495108565929366"
- ; Field.of_string
- "95236435002924956844837407534938226368352771792739587594037613075251645052212"
- |]
- ; [| Field.of_string
- "43150472723422600689013423057826322506171125106415122422656432973040257528684"
- ; Field.of_string
- "77355911134402286174761911573353899889837132781450260391484427670446862700214"
- ; Field.of_string
- "8690728446593494554377477996892461126663797704587025899930929227865493269824"
- |]
- ; [| Field.of_string
- "109175231986025180460846040078523879514558355792739714578031829643740609438879"
- ; Field.of_string
- "64844253590731404811389281562033735091759746904073461140427127388042062490899"
- ; Field.of_string
- "43237071281695629980341250188156848876595681601471702180515324064382368960951"
- |]
- ; [| Field.of_string
- "2704440995725305992776846806711930876273040749514871232837487081811513368296"
- ; Field.of_string
- "66806779110388532101035294912010606217442229808784290357894909707660045365269"
- ; Field.of_string
- "25541187612624070470730890200174075890643652797181103367956318438136878170352"
- |]
- ; [| Field.of_string
- "89300613074831725721350087269266903129165086877175223066581882601662278010666"
- ; Field.of_string
- "36824076981866281177052433916337787028520068526782493484076995129329938182524"
- ; Field.of_string
- "68880449342008497744225106025198236600142055580985632884415488154606462819445"
- |]
- ; [| Field.of_string
- "68556888546596545408135887526582256648006271867854316538090068824142539400698"
- ; Field.of_string
- "111379753250206255125320675615931203940253796355491142745969887430259465111569"
- ; Field.of_string
- "101469186248899356416491489235841069222521093012237305521090058066171355672289"
- |]
- ; [| Field.of_string
- "87819793263125973233157093200229218382531712066157093399606059493857616731410"
- ; Field.of_string
- "11055386921184594780372263378420826851562920740321950336882051897732501262543"
- ; Field.of_string
- "111945832089295501567161822264292548657346358707472584179854375613919325491249"
- |]
- ; [| Field.of_string
- "95630018375719472826904441325138673248990446382783206900295723762884876505178"
- ; Field.of_string
- "94833984285990985873155989049880754188702918168949640563745233736765833491756"
- ; Field.of_string
- "77578854197021606645372788474039811639438242484066959482386065023999206730771"
- |]
- ; [| Field.of_string
- "27799616729223271646690718201487403976485619375555391888533887467404804041014"
- ; Field.of_string
- "42616502170265664498961018686434252976977548128285781725227341660941880774718"
- ; Field.of_string
- "95884094505080541517768389956970969462501217028562326732054532092615835087122"
- |]
- ; [| Field.of_string
- "107531500891040898338150732759493933154418374543568088749403053559827078391994"
- ; Field.of_string
- "17316158269457914256007584527534747738658973027567786054549020564540952112346"
- ; Field.of_string
- "51624680144452294805663893795879183520785046924484587034566439599591446246116"
- |]
- ; [| Field.of_string
- "17698087730709566968258013675219881840614043344609152682517330801348583470562"
- ; Field.of_string
- "111925747861248746962567200879629070277886617811519137515553806421564944666811"
- ; Field.of_string
- "57148554624730554436721083599187229462914514696466218614205595953570212881615"
- |]
- ; [| Field.of_string
- "92002976914130835490768248031171915767210477082066266868807636677032557847243"
- ; Field.of_string
- "58807951133460826577955909810426403194149348045831674376120801431489918282349"
- ; Field.of_string
- "93581873597000319446791963913210464830992618681307774190204379970955657554666"
- |]
- ; [| Field.of_string
- "46734218328816451470118898692627799522173317355773128175090189234250221977353"
- ; Field.of_string
- "12565476532112137808460978474958060441970941349010371267577877299656634907765"
- ; Field.of_string
- "54284813390357004119220859882274190703294683700710665367594256039714984623777"
- |]
- ; [| Field.of_string
- "92046423253202913319296401122133532555630886766139313429473309376931112550800"
- ; Field.of_string
- "15095408309586969968044201398966210357547906905122453139947200130015688526573"
- ; Field.of_string
- "76483858663950700865536712701042004661599554591777656961315837882956812689085"
- |]
- ; [| Field.of_string
- "37793510665854947576525000802927849210746292216845467892500370179796223909690"
- ; Field.of_string
- "84954934523349224038508216623641462700694917568481430996824733443763638196693"
- ; Field.of_string
- "81116649005575743294029244339854405387811058321603450814032274416116019472096"
- |]
- ; [| Field.of_string
- "28313841745366368076212445154871968929195537523489133192784916081223753077949"
- ; Field.of_string
- "17307716513182567320564075539526480893558355908652993731441220999922946005081"
- ; Field.of_string
- "63148771170858502457695904149048034226689843239981287723002468627916462842625"
- |]
- ; [| Field.of_string
- "14724939606645168531546334343600232253284320276481307778787768813885931648950"
- ; Field.of_string
- "4684996260500305121238590806572541849891754312215139285622888510153705963000"
- ; Field.of_string
- "63682763879011752475568476861367553456179860221069473817315669232908763409259"
- |]
- ; [| Field.of_string
- "47776179656187399887062096850541192680190218704758942820514561435612697426715"
- ; Field.of_string
- "42017618175533328439486588850450028995049195954365035474995309904751824054581"
- ; Field.of_string
- "39169739448648613641258102792190571431737464735838931948313779997907435855102"
- |]
- ; [| Field.of_string
- "37525991163523321662699819448962967746703579202577998445997476955224037837979"
- ; Field.of_string
- "67759173441312327668891803222741396828094999063019622301649400178376863820046"
- ; Field.of_string
- "23041132473771739182071223620364590606653086905326129708428084432335332411661"
- |]
- ; [| Field.of_string
- "77778894465896892167598828497939467663479992533052348475467490972714790615441"
- ; Field.of_string
- "20821227542001445006023346122554483849065713580779858784021328359824080462519"
- ; Field.of_string
- "47217242463811495777303984778653549585537750303740616187093690846833142245039"
- |]
- ; [| Field.of_string
- "42826871300142174590405062658305130206548405024021455479047593769907201224399"
- ; Field.of_string
- "8850081254230234130482383430433176873344633494243110112848647064077741649744"
- ; Field.of_string
- "1819639941546179668398979507053724449231350395599747300736218202072168364980"
- |]
- ; [| Field.of_string
- "21219092773772827667886204262476112905428217689703647484316763603169544906986"
- ; Field.of_string
- "35036730416829620763976972888493029852952403098232484869595671405553221294746"
- ; Field.of_string
- "35487050610902505183766069070898136230610758743267437784506875078109148276407"
- |]
- ; [| Field.of_string
- "62560813042054697786535634928462520639989597995560367915904328183428481834648"
- ; Field.of_string
- "112205708104999693686115882430330200785082630634036862526175634736046083007596"
- ; Field.of_string
- "109084747126382177842005646092084591250172358815974554434100716599544229364287"
- |]
- ; [| Field.of_string
- "63740884245554590221521941789197287379354311786803164550686696984009448418872"
- ; Field.of_string
- "58779928727649398559174292364061339806256990859940639552881479945324304668069"
- ; Field.of_string
- "20614241966717622390914334053622572167995367802051836931454426877074875942253"
- |]
- ; [| Field.of_string
- "41621411615229558798583846330993607380846912281220890296433013153854774573504"
- ; Field.of_string
- "20530621481603446397085836296967350209890164029268319619481535419199429275412"
- ; Field.of_string
- "99914592017824500091708233310179001698739309503141229228952777264267035511439"
- |]
- ; [| Field.of_string
- "9497854724940806346676139162466690071592872530638144182764466319052293463165"
- ; Field.of_string
- "7549205476288061047040852944548942878112823732145584918107208536541712726277"
- ; Field.of_string
- "30898915730863004722886730649661235919513859500318540107289237568593577554645"
- |]
- ; [| Field.of_string
- "22697249754607337581727259086359907309326296469394183645633378468855554942575"
- ; Field.of_string
- "72771100592475003378969523202338527077495914171905204927442739996373603143216"
- ; Field.of_string
- "84509851995167666169868678185342549983568150803791023831909660012392522615426"
- |]
- ; [| Field.of_string
- "36601166816771446688370845080961015541431660429079281633209182736773260407536"
- ; Field.of_string
- "19555759172327736128240171000715903945570888389700763573790859521156095228287"
- ; Field.of_string
- "82844424532983875300577689116331373756526403900340445449185486212503235782229"
- |]
- ; [| Field.of_string
- "40833119728631657038301474658571416779079199343770917422783737091842927892625"
- ; Field.of_string
- "68922359316478675184342553333343300163568193749010867527082189412217781430311"
- ; Field.of_string
- "91516472400306837063911995909475588197278444979245081960087094196120449075833"
- |]
- ; [| Field.of_string
- "21304716730402869084944080869903443431235336418077153507261240151959530377653"
- ; Field.of_string
- "106551237424345741137570659736231801772439680702621554106791455938098031620471"
- ; Field.of_string
- "104392597313271110590927764888829150750277653499050463757708547416538850601163"
- |]
- ; [| Field.of_string
- "16907937154215020261110468963982390213438461071031811101554056252102505124726"
- ; Field.of_string
- "23183141532591565112222057191012766855134687114504142337903677590107533245206"
- ; Field.of_string
- "96725517880771645283128624101279195709280644465575982072053504613644938879246"
- |]
- ; [| Field.of_string
- "84556507395241990875812091718422997082915179448604219593521819129312718969906"
- ; Field.of_string
- "100646525819453650494590571397259055384579251368754179569362740802641255820576"
- ; Field.of_string
- "50316555026297423940834952362583934362215303629664094841692233643882339493043"
- |]
- ; [| Field.of_string
- "77363534410783423412630139556441807611393685349073113946053979350631229049878"
- ; Field.of_string
- "54905073434434959485893381841839373267383966385817882684657825178181863944371"
- ; Field.of_string
- "110016011331508430102821620395154714608084938556260733745010992614542669817451"
- |]
- ; [| Field.of_string
- "52040139270046094723964229965823921970388683619580004402190656733318120479093"
- ; Field.of_string
- "495546618036723566920914648951352373868059898268055487677897567226892784967"
- ; Field.of_string
- "2528292188392170914010448139211586215817069915670005292953294092269979070980"
- |]
- ; [| Field.of_string
- "36842840134449713950999812540127591123318806680559982063089906871196226758113"
- ; Field.of_string
- "112314504940338253416202605695368724580971154020421327790335219348068041886245"
- ; Field.of_string
- "51653712314537383078368021242008468828072907802445786549975419682333073143987"
- |]
- ; [| Field.of_string
- "27179054135131403873076215577181710354069071017096145081169516607932870071868"
- ; Field.of_string
- "93264325401956094073193527739715293258814405715822269809955952297346626219055"
- ; Field.of_string
- "75336695567377817226085396912086909560962335091652231383627608374094112503635"
- |]
- ; [| Field.of_string
- "42536477740858058164730818130587261149155820207748153094480456895727052896150"
- ; Field.of_string
- "45297707210835305388426482743535401273114010430724989418303851665124351001731"
- ; Field.of_string
- "28263543670875633354854018109712021307749750769690268127459707194207091046997"
- |]
- ; [| Field.of_string
- "40809484989590048522440442751358616303471639779690405026946053699354967624695"
- ; Field.of_string
- "51589519265418587649124543325590658874910911006853535317847189422703251228717"
- ; Field.of_string
- "73459936981642894525955700397592343967482441686326322443228255968694436816673"
- |]
- ; [| Field.of_string
- "87298777232393189731949522229743081866971743270330772607820990832164835738703"
- ; Field.of_string
- "23328534428894097247289332213412175849711532153957647506361455182140450133738"
- ; Field.of_string
- "51807348624578081645565456865744011145427112815128832643950401419083788780028"
- |]
- ; [| Field.of_string
- "62003629107726929116302469001779155132709624140360743951550189738290955064278"
- ; Field.of_string
- "109311858027068383034683875948676795998030610067675200794951297783857157095297"
- ; Field.of_string
- "2085588517087605436136379278738013214233743532079287631079316773925068862732"
- |]
- ; [| Field.of_string
- "9513664655545306376987968929852776467090105742275395185801917554996684570014"
- ; Field.of_string
- "91103467624252027317764670613760419385374004736848754250298970998535616755199"
- ; Field.of_string
- "39500000352127197728032684892425352332461947514533659433380855624868454474623"
- |]
- ; [| Field.of_string
- "75175260486328125629270378861920310368403601365269629778076078053196928460032"
- ; Field.of_string
- "56923881233337629517433981230592855430598464522180216309153828833928801967999"
- ; Field.of_string
- "20981004218820236011689230170078809973840534961691702543937445515733151438851"
- |]
- ; [| Field.of_string
- "73175203586574092105626230272409823792532423094740797516874387144340145138310"
- ; Field.of_string
- "45186992623753580336479418079070607289916086076906975839720879934817804495460"
- ; Field.of_string
- "96084125187548549854900995260973117424750860440064269432639526863495781270780"
- |]
- ; [| Field.of_string
- "53530507055579550362119832302266967544350117012822630711681736383163390079758"
- ; Field.of_string
- "24484677147631687826970700541691541659768738376645174313438582486313045584324"
- ; Field.of_string
- "99915577684197600584703320523786830947563355229812244982453188909016758004559"
- |]
- ; [| Field.of_string
- "73101441225016284181831039876112223954723401962484828024235461623078642642543"
- ; Field.of_string
- "57434882751817972247799186935032874577110609253567900895922769490031350316077"
- ; Field.of_string
- "73837027842771758252813592393497967898989365991569964687267097531033696791279"
- |] |]
-
-[%%elif
-curve_size = 753]
-
-let inv_alpha =
- "38089537243562684911222013446582397389246099927230862792530457200932138920519187975508085239809399019470973610807689524839248234083267140972451128958905814696110378477590967674064016488951271336010850653690825603837076796509091"
-
-let mds =
- [| [| Field.of_string
- "18008368437737423474309001369890301521976028259557869102888851965525650962978826556079921598599098888590302388431866694591858505845787597588918688371226882207991627422083815074127761663891796146172734531991290402968541914191945"
- ; Field.of_string
- "32962087054539410523956712909309686802653898657605569239066684150412875533806729129396719808139132458477579312916467576544007112173179883702872518317566248974424872120316787037296877442303550788674087649228607529914336317231815"
- ; Field.of_string
- "5483644920564955035638567475101171013329909513705951195576914157062781400017095978578204379959018576768230785151221956162299596069033091214145892295417145700700562355150808732841416210677611704678816316579070697592848376918151"
- |]
- ; [| Field.of_string
- "22978648816866328436434244623482365207916416489037627250857600725663194263360344221738155318310265722276036466391561221273100146793047089336717612168000266025808046352571957200240941276891050003938106626328014785436301089444973"
- ; Field.of_string
- "30994637628885441247541289546067547358628828834593234742882770745561956454298316691254641971835514862825457645395555821312465912408960063865618013131940007283956832467402859348036195396599351702172170219903104023278420827940135"
- ; Field.of_string
- "7096546890972108774287040498267941446510912236116268882520023333699636048386130304511472040490894498194252489942856762189629237475878134498814298584446894911200379613916180563419809701971057277837757006070684068787238347669992"
- |]
- ; [| Field.of_string
- "36972350749469737754741804679554799140755989986720531577443294433161553396641362942311484418395414339763390349161399190591697773588979458652577643792428305947365748981633559835484411429153283032734484874265223184021528054671667"
- ; Field.of_string
- "41737243523985324129413602960234190443256387558212939183466624464606481865667576817406507424236723364751044981130064473555650490691461017936143464747647507236853158008794221676669840197156981325463879378696484711828785706949884"
- ; Field.of_string
- "17173689835840458026597473076649786448044751322360472626284380020090825232350642484368920024327948574743378803111953285570783101340571478325610471380479472689631139762331626281838772360396878623880994496993923849428256427219637"
- |] |]
-
-let round_constants =
- [| [| Field.of_string
- "78119860594733808983474265082430117124674905785489385612351809573030163625517"
- ; Field.of_string
- "41917899842730241418346215913324270532073353586134123463219061327941260175271"
- ; Field.of_string
- "74594641694171623328644944059182600919855574964222988275913344198970402906473"
- |]
- ; [| Field.of_string
- "96215759378377024990520153908983544755208851791126218239402755616994541522004"
- ; Field.of_string
- "64070601581278917442704840630680311036021557676765751754522901046069205253111"
- ; Field.of_string
- "112123228532462696722378911494343451272980413618911326680094528285518792872677"
- |]
- ; [| Field.of_string
- "84572244072021308337360477634782636535511175281144388234379224309078196768262"
- ; Field.of_string
- "45201095631123410354816854701250642083197167601967427301389500806815426216645"
- ; Field.of_string
- "23419302413627434057960523568681421397183896397903197013759822219271473949448"
- |]
- ; [| Field.of_string
- "63220724218126871510891512179599337793645245415246618202146262033908228783613"
- ; Field.of_string
- "67900966560828272306360950341997532094196196655192755442359232962244590070115"
- ; Field.of_string
- "56382132371728071364028077587343004835658613510701494793375685201885283260755"
- |]
- ; [| Field.of_string
- "80317852656339951095312898663286716255545986714650554749917139819628941702909"
- ; Field.of_string
- "110977183257428423540294096816813859894739618561444416996538397449475628658639"
- ; Field.of_string
- "25195781166503180938390820610484311038421647727795615447439501669639084690800"
- |]
- ; [| Field.of_string
- "108664438541952156416331885221418851366456449596370568350972106298760717710264"
- ; Field.of_string
- "17649294376560630922417546944777537620537408190408066211453084495108565929366"
- ; Field.of_string
- "95236435002924956844837407534938226368352771792739587594037613075251645052212"
- |]
- ; [| Field.of_string
- "43150472723422600689013423057826322506171125106415122422656432973040257528684"
- ; Field.of_string
- "77355911134402286174761911573353899889837132781450260391484427670446862700214"
- ; Field.of_string
- "8690728446593494554377477996892461126663797704587025899930929227865493269824"
- |]
- ; [| Field.of_string
- "109175231986025180460846040078523879514558355792739714578031829643740609438879"
- ; Field.of_string
- "64844253590731404811389281562033735091759746904073461140427127388042062490899"
- ; Field.of_string
- "43237071281695629980341250188156848876595681601471702180515324064382368960951"
- |]
- ; [| Field.of_string
- "2704440995725305992776846806711930876273040749514871232837487081811513368296"
- ; Field.of_string
- "66806779110388532101035294912010606217442229808784290357894909707660045365269"
- ; Field.of_string
- "25541187612624070470730890200174075890643652797181103367956318438136878170352"
- |]
- ; [| Field.of_string
- "89300613074831725721350087269266903129165086877175223066581882601662278010666"
- ; Field.of_string
- "36824076981866281177052433916337787028520068526782493484076995129329938182524"
- ; Field.of_string
- "68880449342008497744225106025198236600142055580985632884415488154606462819445"
- |]
- ; [| Field.of_string
- "68556888546596545408135887526582256648006271867854316538090068824142539400698"
- ; Field.of_string
- "111379753250206255125320675615931203940253796355491142745969887430259465111569"
- ; Field.of_string
- "101469186248899356416491489235841069222521093012237305521090058066171355672289"
- |]
- ; [| Field.of_string
- "87819793263125973233157093200229218382531712066157093399606059493857616731410"
- ; Field.of_string
- "11055386921184594780372263378420826851562920740321950336882051897732501262543"
- ; Field.of_string
- "111945832089295501567161822264292548657346358707472584179854375613919325491249"
- |]
- ; [| Field.of_string
- "95630018375719472826904441325138673248990446382783206900295723762884876505178"
- ; Field.of_string
- "94833984285990985873155989049880754188702918168949640563745233736765833491756"
- ; Field.of_string
- "77578854197021606645372788474039811639438242484066959482386065023999206730771"
- |]
- ; [| Field.of_string
- "27799616729223271646690718201487403976485619375555391888533887467404804041014"
- ; Field.of_string
- "42616502170265664498961018686434252976977548128285781725227341660941880774718"
- ; Field.of_string
- "95884094505080541517768389956970969462501217028562326732054532092615835087122"
- |]
- ; [| Field.of_string
- "107531500891040898338150732759493933154418374543568088749403053559827078391994"
- ; Field.of_string
- "17316158269457914256007584527534747738658973027567786054549020564540952112346"
- ; Field.of_string
- "51624680144452294805663893795879183520785046924484587034566439599591446246116"
- |]
- ; [| Field.of_string
- "17698087730709566968258013675219881840614043344609152682517330801348583470562"
- ; Field.of_string
- "111925747861248746962567200879629070277886617811519137515553806421564944666811"
- ; Field.of_string
- "57148554624730554436721083599187229462914514696466218614205595953570212881615"
- |]
- ; [| Field.of_string
- "92002976914130835490768248031171915767210477082066266868807636677032557847243"
- ; Field.of_string
- "58807951133460826577955909810426403194149348045831674376120801431489918282349"
- ; Field.of_string
- "93581873597000319446791963913210464830992618681307774190204379970955657554666"
- |]
- ; [| Field.of_string
- "46734218328816451470118898692627799522173317355773128175090189234250221977353"
- ; Field.of_string
- "12565476532112137808460978474958060441970941349010371267577877299656634907765"
- ; Field.of_string
- "54284813390357004119220859882274190703294683700710665367594256039714984623777"
- |]
- ; [| Field.of_string
- "92046423253202913319296401122133532555630886766139313429473309376931112550800"
- ; Field.of_string
- "15095408309586969968044201398966210357547906905122453139947200130015688526573"
- ; Field.of_string
- "76483858663950700865536712701042004661599554591777656961315837882956812689085"
- |]
- ; [| Field.of_string
- "37793510665854947576525000802927849210746292216845467892500370179796223909690"
- ; Field.of_string
- "84954934523349224038508216623641462700694917568481430996824733443763638196693"
- ; Field.of_string
- "81116649005575743294029244339854405387811058321603450814032274416116019472096"
- |]
- ; [| Field.of_string
- "28313841745366368076212445154871968929195537523489133192784916081223753077949"
- ; Field.of_string
- "17307716513182567320564075539526480893558355908652993731441220999922946005081"
- ; Field.of_string
- "63148771170858502457695904149048034226689843239981287723002468627916462842625"
- |]
- ; [| Field.of_string
- "14724939606645168531546334343600232253284320276481307778787768813885931648950"
- ; Field.of_string
- "4684996260500305121238590806572541849891754312215139285622888510153705963000"
- ; Field.of_string
- "63682763879011752475568476861367553456179860221069473817315669232908763409259"
- |]
- ; [| Field.of_string
- "47776179656187399887062096850541192680190218704758942820514561435612697426715"
- ; Field.of_string
- "42017618175533328439486588850450028995049195954365035474995309904751824054581"
- ; Field.of_string
- "39169739448648613641258102792190571431737464735838931948313779997907435855102"
- |]
- ; [| Field.of_string
- "37525991163523321662699819448962967746703579202577998445997476955224037837979"
- ; Field.of_string
- "67759173441312327668891803222741396828094999063019622301649400178376863820046"
- ; Field.of_string
- "23041132473771739182071223620364590606653086905326129708428084432335332411661"
- |]
- ; [| Field.of_string
- "77778894465896892167598828497939467663479992533052348475467490972714790615441"
- ; Field.of_string
- "20821227542001445006023346122554483849065713580779858784021328359824080462519"
- ; Field.of_string
- "47217242463811495777303984778653549585537750303740616187093690846833142245039"
- |]
- ; [| Field.of_string
- "42826871300142174590405062658305130206548405024021455479047593769907201224399"
- ; Field.of_string
- "8850081254230234130482383430433176873344633494243110112848647064077741649744"
- ; Field.of_string
- "1819639941546179668398979507053724449231350395599747300736218202072168364980"
- |]
- ; [| Field.of_string
- "21219092773772827667886204262476112905428217689703647484316763603169544906986"
- ; Field.of_string
- "35036730416829620763976972888493029852952403098232484869595671405553221294746"
- ; Field.of_string
- "35487050610902505183766069070898136230610758743267437784506875078109148276407"
- |]
- ; [| Field.of_string
- "62560813042054697786535634928462520639989597995560367915904328183428481834648"
- ; Field.of_string
- "112205708104999693686115882430330200785082630634036862526175634736046083007596"
- ; Field.of_string
- "109084747126382177842005646092084591250172358815974554434100716599544229364287"
- |]
- ; [| Field.of_string
- "63740884245554590221521941789197287379354311786803164550686696984009448418872"
- ; Field.of_string
- "58779928727649398559174292364061339806256990859940639552881479945324304668069"
- ; Field.of_string
- "20614241966717622390914334053622572167995367802051836931454426877074875942253"
- |]
- ; [| Field.of_string
- "41621411615229558798583846330993607380846912281220890296433013153854774573504"
- ; Field.of_string
- "20530621481603446397085836296967350209890164029268319619481535419199429275412"
- ; Field.of_string
- "99914592017824500091708233310179001698739309503141229228952777264267035511439"
- |]
- ; [| Field.of_string
- "9497854724940806346676139162466690071592872530638144182764466319052293463165"
- ; Field.of_string
- "7549205476288061047040852944548942878112823732145584918107208536541712726277"
- ; Field.of_string
- "30898915730863004722886730649661235919513859500318540107289237568593577554645"
- |]
- ; [| Field.of_string
- "22697249754607337581727259086359907309326296469394183645633378468855554942575"
- ; Field.of_string
- "72771100592475003378969523202338527077495914171905204927442739996373603143216"
- ; Field.of_string
- "84509851995167666169868678185342549983568150803791023831909660012392522615426"
- |]
- ; [| Field.of_string
- "36601166816771446688370845080961015541431660429079281633209182736773260407536"
- ; Field.of_string
- "19555759172327736128240171000715903945570888389700763573790859521156095228287"
- ; Field.of_string
- "82844424532983875300577689116331373756526403900340445449185486212503235782229"
- |]
- ; [| Field.of_string
- "40833119728631657038301474658571416779079199343770917422783737091842927892625"
- ; Field.of_string
- "68922359316478675184342553333343300163568193749010867527082189412217781430311"
- ; Field.of_string
- "91516472400306837063911995909475588197278444979245081960087094196120449075833"
- |]
- ; [| Field.of_string
- "21304716730402869084944080869903443431235336418077153507261240151959530377653"
- ; Field.of_string
- "106551237424345741137570659736231801772439680702621554106791455938098031620471"
- ; Field.of_string
- "104392597313271110590927764888829150750277653499050463757708547416538850601163"
- |]
- ; [| Field.of_string
- "16907937154215020261110468963982390213438461071031811101554056252102505124726"
- ; Field.of_string
- "23183141532591565112222057191012766855134687114504142337903677590107533245206"
- ; Field.of_string
- "96725517880771645283128624101279195709280644465575982072053504613644938879246"
- |]
- ; [| Field.of_string
- "84556507395241990875812091718422997082915179448604219593521819129312718969906"
- ; Field.of_string
- "100646525819453650494590571397259055384579251368754179569362740802641255820576"
- ; Field.of_string
- "50316555026297423940834952362583934362215303629664094841692233643882339493043"
- |]
- ; [| Field.of_string
- "77363534410783423412630139556441807611393685349073113946053979350631229049878"
- ; Field.of_string
- "54905073434434959485893381841839373267383966385817882684657825178181863944371"
- ; Field.of_string
- "110016011331508430102821620395154714608084938556260733745010992614542669817451"
- |]
- ; [| Field.of_string
- "52040139270046094723964229965823921970388683619580004402190656733318120479093"
- ; Field.of_string
- "495546618036723566920914648951352373868059898268055487677897567226892784967"
- ; Field.of_string
- "2528292188392170914010448139211586215817069915670005292953294092269979070980"
- |]
- ; [| Field.of_string
- "36842840134449713950999812540127591123318806680559982063089906871196226758113"
- ; Field.of_string
- "112314504940338253416202605695368724580971154020421327790335219348068041886245"
- ; Field.of_string
- "51653712314537383078368021242008468828072907802445786549975419682333073143987"
- |]
- ; [| Field.of_string
- "27179054135131403873076215577181710354069071017096145081169516607932870071868"
- ; Field.of_string
- "93264325401956094073193527739715293258814405715822269809955952297346626219055"
- ; Field.of_string
- "75336695567377817226085396912086909560962335091652231383627608374094112503635"
- |]
- ; [| Field.of_string
- "42536477740858058164730818130587261149155820207748153094480456895727052896150"
- ; Field.of_string
- "45297707210835305388426482743535401273114010430724989418303851665124351001731"
- ; Field.of_string
- "28263543670875633354854018109712021307749750769690268127459707194207091046997"
- |]
- ; [| Field.of_string
- "40809484989590048522440442751358616303471639779690405026946053699354967624695"
- ; Field.of_string
- "51589519265418587649124543325590658874910911006853535317847189422703251228717"
- ; Field.of_string
- "73459936981642894525955700397592343967482441686326322443228255968694436816673"
- |]
- ; [| Field.of_string
- "87298777232393189731949522229743081866971743270330772607820990832164835738703"
- ; Field.of_string
- "23328534428894097247289332213412175849711532153957647506361455182140450133738"
- ; Field.of_string
- "51807348624578081645565456865744011145427112815128832643950401419083788780028"
- |]
- ; [| Field.of_string
- "62003629107726929116302469001779155132709624140360743951550189738290955064278"
- ; Field.of_string
- "109311858027068383034683875948676795998030610067675200794951297783857157095297"
- ; Field.of_string
- "2085588517087605436136379278738013214233743532079287631079316773925068862732"
- |]
- ; [| Field.of_string
- "9513664655545306376987968929852776467090105742275395185801917554996684570014"
- ; Field.of_string
- "91103467624252027317764670613760419385374004736848754250298970998535616755199"
- ; Field.of_string
- "39500000352127197728032684892425352332461947514533659433380855624868454474623"
- |]
- ; [| Field.of_string
- "75175260486328125629270378861920310368403601365269629778076078053196928460032"
- ; Field.of_string
- "56923881233337629517433981230592855430598464522180216309153828833928801967999"
- ; Field.of_string
- "20981004218820236011689230170078809973840534961691702543937445515733151438851"
- |]
- ; [| Field.of_string
- "73175203586574092105626230272409823792532423094740797516874387144340145138310"
- ; Field.of_string
- "45186992623753580336479418079070607289916086076906975839720879934817804495460"
- ; Field.of_string
- "96084125187548549854900995260973117424750860440064269432639526863495781270780"
- |]
- ; [| Field.of_string
- "53530507055579550362119832302266967544350117012822630711681736383163390079758"
- ; Field.of_string
- "24484677147631687826970700541691541659768738376645174313438582486313045584324"
- ; Field.of_string
- "99915577684197600584703320523786830947563355229812244982453188909016758004559"
- |]
- ; [| Field.of_string
- "73101441225016284181831039876112223954723401962484828024235461623078642642543"
- ; Field.of_string
- "57434882751817972247799186935032874577110609253567900895922769490031350316077"
- ; Field.of_string
- "73837027842771758252813592393497967898989365991569964687267097531033696791279"
- |] |]
-
-[%%else]
-
-[%%show
-curve_size]
-
-[%%error
-"invalid value for \"curve_size\""]
-
-[%%endif]
diff --git a/src/lib/staged_ledger/staged_ledger.ml b/src/lib/staged_ledger/staged_ledger.ml
index 456183a50ee..4ac9880c2dc 100644
--- a/src/lib/staged_ledger/staged_ledger.ml
+++ b/src/lib/staged_ledger/staged_ledger.ml
@@ -448,7 +448,6 @@ module T = struct
; supply_increase
; pending_coinbase_stack_state=
{pending_coinbase_stack_state.pc with target= pending_coinbase_target}
- ; proof_type= `Base
; sok_digest= () }
, { Stack_state_with_init_stack.pc=
{source= pending_coinbase_target; target= pending_coinbase_target}
@@ -1849,7 +1848,8 @@ let%test_module "test" =
let proofs stmts : Ledger_proof.t One_or_two.t =
let sok_digest = Sok_message.Digest.default in
One_or_two.map stmts ~f:(fun statement ->
- Ledger_proof.create ~statement ~sok_digest ~proof:Proof.dummy )
+ Ledger_proof.create ~statement ~sok_digest
+ ~proof:Proof.transaction_dummy )
let stmt_to_work_random_prover (stmts : Transaction_snark_work.Statement.t)
: Transaction_snark_work.Checked.t option =
diff --git a/src/lib/transaction_snark/dune b/src/lib/transaction_snark/dune
index 348b1ee0c6b..a3dc5389869 100644
--- a/src/lib/transaction_snark/dune
+++ b/src/lib/transaction_snark/dune
@@ -3,7 +3,7 @@
(public_name transaction_snark)
(library_flags -linkall)
(inline_tests)
- (libraries core cache_dir cached snarky coda_base sgn bignum ppx_version.runtime transaction_protocol_state coda_state)
+ (libraries core cache_dir snarky coda_base sgn bignum ppx_version.runtime transaction_protocol_state coda_state)
(preprocess
(pps ppx_snarky ppx_version ppx_jane ppx_deriving.std ppx_deriving_yojson h_list.ppx bisect_ppx -- -conditional))
(synopsis "Transaction state transition snarking library"))
diff --git a/src/lib/transaction_snark/transaction_snark.ml b/src/lib/transaction_snark/transaction_snark.ml
index bdf23e7752e..8cd4e665d99 100644
--- a/src/lib/transaction_snark/transaction_snark.ml
+++ b/src/lib/transaction_snark/transaction_snark.ml
@@ -5,14 +5,6 @@ open Snark_params
module Global_slot = Coda_numbers.Global_slot
open Currency
-let tick_input () =
- let open Tick in
- Data_spec.[Field.typ]
-
-let wrap_input = Tock.Data_spec.[Wrap_input.typ]
-
-let exists' typ ~f = Tick.(exists typ ~compute:As_prover.(map get_state ~f))
-
let top_hash_logging_enabled = ref false
let with_top_hash_logging f =
@@ -37,8 +29,6 @@ module Proof_type = struct
end]
type t = Stable.Latest.t [@@deriving sexp, hash, compare, yojson]
-
- let is_base = function `Base -> true | `Merge -> false
end
module Pending_coinbase_stack_state = struct
@@ -126,7 +116,6 @@ module Statement = struct
, 'pending_coinbase
, 'fee_excess
, 'token_id
- , 'proof_type
, 'sok_digest )
t =
{ source: 'ledger_hash
@@ -136,12 +125,11 @@ module Statement = struct
; fee_excess: 'fee_excess
; next_available_token_before: 'token_id
; next_available_token_after: 'token_id
- ; proof_type: 'proof_type
; sok_digest: 'sok_digest }
[@@deriving compare, equal, hash, sexp, yojson]
let to_latest ledger_hash amount pending_coinbase fee_excess' token_id
- proof_type' sok_digest'
+ sok_digest'
{ source
; target
; supply_increase
@@ -149,7 +137,6 @@ module Statement = struct
; fee_excess
; next_available_token_before
; next_available_token_after
- ; proof_type
; sok_digest } =
{ source= ledger_hash source
; target= ledger_hash target
@@ -159,7 +146,6 @@ module Statement = struct
; fee_excess= fee_excess' fee_excess
; next_available_token_before= token_id next_available_token_before
; next_available_token_after= token_id next_available_token_after
- ; proof_type= proof_type' proof_type
; sok_digest= sok_digest' sok_digest }
end
end]
@@ -169,7 +155,6 @@ module Statement = struct
, 'pending_coinbase
, 'fee_excess
, 'token_id
- , 'proof_type
, 'sok_digest )
t =
( 'ledger_hash
@@ -177,7 +162,6 @@ module Statement = struct
, 'pending_coinbase
, 'fee_excess
, 'token_id
- , 'proof_type
, 'sok_digest )
Stable.Latest.t =
{ source: 'ledger_hash
@@ -187,12 +171,11 @@ module Statement = struct
; fee_excess: 'fee_excess
; next_available_token_before: 'token_id
; next_available_token_after: 'token_id
- ; proof_type: 'proof_type
; sok_digest: 'sok_digest }
[@@deriving compare, equal, hash, sexp, yojson, hlist]
- let typ ledger_hash amount pending_coinbase fee_excess token_id proof_type
- sok_digest =
+ let typ ledger_hash amount pending_coinbase fee_excess token_id sok_digest
+ =
Tick.Typ.of_hlistable
[ ledger_hash
; ledger_hash
@@ -201,7 +184,6 @@ module Statement = struct
; fee_excess
; token_id
; token_id
- ; proof_type
; sok_digest ]
~var_to_hlist:to_hlist ~var_of_hlist:of_hlist ~value_to_hlist:to_hlist
~value_of_hlist:of_hlist
@@ -212,7 +194,6 @@ module Statement = struct
, 'pending_coinbase
, 'fee_excess
, 'token_id
- , 'proof_type
, 'sok_digest )
poly =
( 'ledger_hash
@@ -220,7 +201,6 @@ module Statement = struct
, 'pending_coinbase
, 'fee_excess
, 'token_id
- , 'proof_type
, 'sok_digest )
Poly.t =
{ source: 'ledger_hash
@@ -230,7 +210,6 @@ module Statement = struct
; fee_excess: 'fee_excess
; next_available_token_before: 'token_id
; next_available_token_after: 'token_id
- ; proof_type: 'proof_type
; sok_digest: 'sok_digest }
[@@deriving compare, equal, hash, sexp, yojson]
@@ -243,7 +222,6 @@ module Statement = struct
, Pending_coinbase_stack_state.Stable.V1.t
, Fee_excess.Stable.V1.t
, Token_id.Stable.V1.t
- , Proof_type.Stable.V1.t
, unit )
Poly.Stable.V1.t
[@@deriving compare, equal, hash, sexp, yojson]
@@ -258,7 +236,6 @@ module Statement = struct
, Pending_coinbase_stack_state.t
, Fee_excess.t
, Token_id.t
- , Proof_type.t
, unit )
Poly.t
[@@deriving sexp, hash, compare, yojson]
@@ -273,16 +250,15 @@ module Statement = struct
, Pending_coinbase_stack_state.Stable.V1.t
, Fee_excess.Stable.V1.t
, Token_id.Stable.V1.t
- , unit
, Sok_message.Digest.Stable.V1.t )
Poly.Stable.V1.t
- [@@deriving compare, equal, hash, sexp, yojson]
+ [@@deriving compare, equal, hash, sexp, to_yojson]
let to_latest = Fn.id
end
end]
- type t = Stable.Latest.t [@@deriving sexp, hash, compare, yojson]
+ type t = Stable.Latest.t [@@deriving sexp, hash, compare, to_yojson]
type var =
( Frozen_ledger_hash.var
@@ -290,14 +266,13 @@ module Statement = struct
, Pending_coinbase_stack_state.var
, Fee_excess.var
, Token_id.var
- , unit
, Sok_message.Digest.Checked.t )
Poly.t
let typ : (var, t) Tick.Typ.t =
Poly.typ Frozen_ledger_hash.typ Currency.Amount.typ
Pending_coinbase_stack_state.typ Fee_excess.typ Token_id.typ
- Tick.Typ.unit Sok_message.Digest.typ
+ Sok_message.Digest.typ
let to_input
{ source
@@ -307,7 +282,6 @@ module Statement = struct
; fee_excess
; next_available_token_before
; next_available_token_after
- ; proof_type= _
; sok_digest } =
let input =
Array.reduce_exn ~f:Random_oracle.Input.append
@@ -330,6 +304,8 @@ module Statement = struct
let to_field_elements t = Random_oracle.pack_input (to_input t)
module Checked = struct
+ type t = var
+
let to_input
{ source
; target
@@ -338,7 +314,6 @@ module Statement = struct
; fee_excess
; next_available_token_before
; next_available_token_after
- ; proof_type= _
; sok_digest } =
let open Tick in
let open Checked.Let_syntax in
@@ -387,7 +362,7 @@ module Statement = struct
let to_field_elements t =
let open Tick.Checked.Let_syntax in
- to_input t >>| Random_oracle.Checked.pack_input
+ Tick.Run.run_checked (to_input t >>| Random_oracle.Checked.pack_input)
end
end
@@ -416,7 +391,6 @@ module Statement = struct
; fee_excess
; next_available_token_before= s1.next_available_token_before
; next_available_token_after= s2.next_available_token_after
- ; proof_type= `Merge
; supply_increase
; pending_coinbase_stack_state=
{ source= s1.pending_coinbase_stack_state.source
@@ -439,15 +413,12 @@ module Statement = struct
let%map token1 = Token_id.gen_non_default
and token2 = Token_id.gen_non_default in
(Token_id.min token1 token2, Token_id.max token1 token2)
- and proof_type =
- Bool.quickcheck_generator >>| fun b -> if b then `Merge else `Base
in
( { source
; target
; fee_excess
; next_available_token_before
; next_available_token_after
- ; proof_type
; supply_increase
; pending_coinbase_stack_state=
{source= pending_coinbase_before; target= pending_coinbase_after}
@@ -455,142 +426,58 @@ module Statement = struct
: t )
end
-[%%versioned
-module Stable = struct
- module V1 = struct
- type t =
- { source: Frozen_ledger_hash.Stable.V1.t
- ; target: Frozen_ledger_hash.Stable.V1.t
- ; proof_type: Proof_type.Stable.V1.t
- ; supply_increase: Amount.Stable.V1.t
- ; pending_coinbase_stack_state: Pending_coinbase_stack_state.Stable.V1.t
- ; fee_excess: Fee_excess.Stable.V1.t
- ; next_available_token_before: Token_id.Stable.V1.t
- ; next_available_token_after: Token_id.Stable.V1.t
- ; sok_digest:
- (Sok_message.Digest.Stable.V1.t[@to_yojson
- fun _ -> `String ""])
- ; proof: Proof.Stable.V1.t }
- [@@deriving compare, fields, sexp, version, to_yojson]
-
- let to_latest = Fn.id
- end
-end]
+module Proof = struct
+ open Pickles_types
+ module T = Pickles.Proof.Make (Nat.N2) (Nat.N2)
-type t = Stable.Latest.t =
- { source: Frozen_ledger_hash.t
- ; target: Frozen_ledger_hash.t
- ; proof_type: Proof_type.t
- ; supply_increase: Amount.t
- ; pending_coinbase_stack_state: Pending_coinbase_stack_state.t
- ; fee_excess: Fee_excess.t
- ; next_available_token_before: Token_id.t
- ; next_available_token_after: Token_id.t
- ; sok_digest: Sok_message.Digest.t
- ; proof: Proof.t }
-[@@deriving fields, sexp]
-
-let to_yojson = Stable.Latest.to_yojson
-
-let statement
- ({ source
- ; target
- ; proof_type
- ; fee_excess
- ; next_available_token_before
- ; next_available_token_after
- ; supply_increase
- ; pending_coinbase_stack_state
- ; sok_digest= _
- ; proof= _ } :
- t) : Statement.t =
- { source
- ; target
- ; proof_type
- ; supply_increase
- ; pending_coinbase_stack_state
- ; fee_excess
- ; next_available_token_before
- ; next_available_token_after
- ; sok_digest= () }
-
-let create = Fields.create
-
-let base_top_hash t =
- Random_oracle.hash ~init:Hash_prefix.base_snark
- (Statement.With_sok.to_field_elements t)
-
-let merge_top_hash wrap_vk_bits t =
- Random_oracle.hash ~init:wrap_vk_bits
- (Statement.With_sok.to_field_elements t)
-
-module Verification_keys = struct
- [%%versioned_asserted
+ [%%versioned
module Stable = struct
module V1 = struct
- type t =
- { base: Tick.Verification_key.t
- ; wrap: Tock.Verification_key.t
- ; merge: Tick.Verification_key.t }
+ type t = T.t
+ [@@deriving version {asserted}, yojson, bin_io, compare, sexp]
let to_latest = Fn.id
end
-
- module Tests = struct
- let%test "verification keys v1" =
- let base = Tick.Verification_key.of_string "base key" in
- let wrap = Tock.Verification_key.of_string "wrap key" in
- let merge = Tick.Verification_key.of_string "merge key" in
- let keys = V1.{base; wrap; merge} in
- let known_good_digest = "1cade6287d659338ae1f2c3971ee8d06" in
- Ppx_version_runtime.Serialization.check_serialization
- (module V1)
- keys known_good_digest
- end
end]
- type t = Stable.Latest.t =
- { base: Tick.Verification_key.t
- ; wrap: Tock.Verification_key.t
- ; merge: Tick.Verification_key.t }
-
- let dummy : t =
- let groth16 =
- Tick_backend.Verification_key.get_dummy
- ~input_size:(Tick.Data_spec.size (tick_input ()))
- in
- { merge= groth16
- ; base= groth16
- ; wrap= Tock_backend.Verification_key.get_dummy ~input_size:Wrap_input.size
- }
+ type t = Stable.Latest.t [@@deriving yojson, compare, sexp]
end
-module Keys0 = struct
- module Verification = Verification_keys
-
- module Proving = struct
+[%%versioned
+module Stable = struct
+ module V1 = struct
type t =
- { base: Tick.Proving_key.t
- ; wrap: Tock.Proving_key.t
- ; merge: Tick.Proving_key.t }
-
- let dummy =
- { merge= Dummy_values.Tick.Groth16.proving_key
- ; base= Dummy_values.Tick.Groth16.proving_key
- ; wrap= Dummy_values.Tock.Bowe_gabizon18.proving_key }
- end
+ {statement: Statement.With_sok.Stable.V1.t; proof: Proof.Stable.V1.t}
+ [@@deriving compare, fields, sexp, version, to_yojson]
- module T = struct
- type t = {proving: Proving.t; verification: Verification.t}
+ let to_latest = Fn.id
end
+end]
- include T
-end
+type t = Stable.Latest.t = {statement: Statement.With_sok.t; proof: Proof.t}
+[@@deriving sexp, to_yojson]
+
+let proof t = t.proof
+
+let statement t = {t.statement with sok_digest= ()}
+
+let sok_digest t = t.statement.sok_digest
+
+let to_yojson = Stable.Latest.to_yojson
-(* Staging:
- first make tick base.
- then make tick merge (which top_hashes in the tock wrap vk)
- then make tock wrap (which branches on the tick vk) *)
+let create ~source ~target ~supply_increase ~pending_coinbase_stack_state
+ ~fee_excess ~next_available_token_before ~next_available_token_after
+ ~sok_digest ~proof =
+ { statement=
+ { source
+ ; target
+ ; next_available_token_before
+ ; next_available_token_after
+ ; supply_increase
+ ; pending_coinbase_stack_state
+ ; fee_excess
+ ; sok_digest }
+ ; proof }
module Base = struct
open Tick
@@ -600,7 +487,6 @@ module Base = struct
| Transaction : Transaction_union.t Snarky.Request.t
| State_body : Coda_state.Protocol_state.Body.Value.t Snarky.Request.t
| Init_stack : Pending_coinbase.Stack.t Snarky.Request.t
- | Next_available_token : Token_id.t Snarky.Request.t
module User_command_failure = struct
(** The various ways that a user command may fail. These should be computed
@@ -1870,15 +1756,6 @@ module Base = struct
- apply a transaction and stuff in the wrong target hash
*)
- module Prover_state = struct
- type t =
- { state1: Frozen_ledger_hash.t
- ; state2: Frozen_ledger_hash.t
- ; pending_coinbase_stack_state: Pending_coinbase_stack_state.t
- ; sok_digest: Sok_message.Digest.t }
- [@@deriving fields]
- end
-
(* spec for [main top_hash]:
constraints pass iff
there exist
@@ -1891,42 +1768,30 @@ module Base = struct
such that
H(l1, l2, pending_coinbase_stack_state.source, pending_coinbase_stack_state.target, fee_excess, supply_increase) = top_hash,
applying [t] to ledger with merkle hash [l1] results in ledger with merkle hash [l2]. *)
- let%snarkydef main ~constraint_constants top_hash =
+ let%snarkydef main ~constraint_constants
+ (statement : Statement.With_sok.Checked.t) =
let%bind (module Shifted) = Tick.Inner_curve.Checked.Shifted.create () in
- let%bind root_before =
- exists' Frozen_ledger_hash.typ ~f:Prover_state.state1
- in
let%bind t =
with_label __LOC__
(exists Transaction_union.typ ~request:(As_prover.return Transaction))
in
- let%bind pending_coinbase_before =
- exists' Pending_coinbase.Stack.typ ~f:(fun s ->
- (Prover_state.pending_coinbase_stack_state s).source )
- in
- let%bind pending_coinbase_after =
- exists' Pending_coinbase.Stack.typ ~f:(fun s ->
- (Prover_state.pending_coinbase_stack_state s).target )
- in
let%bind pending_coinbase_init =
exists Pending_coinbase.Stack.typ ~request:(As_prover.return Init_stack)
in
- let%bind next_available_token_before =
- exists Token_id.typ ~request:(As_prover.return Next_available_token)
- in
let%bind state_body =
exists
(Coda_state.Protocol_state.Body.typ ~constraint_constants)
~request:(As_prover.return State_body)
in
+ let pc = statement.pending_coinbase_stack_state in
let%bind ( root_after
, fee_excess
, supply_increase
, next_available_token_after ) =
apply_tagged_transaction ~constraint_constants
(module Shifted)
- root_before pending_coinbase_init pending_coinbase_before
- pending_coinbase_after next_available_token_before state_body t
+ statement.source pending_coinbase_init pc.source pc.target
+ statement.next_available_token_before state_body t
in
let%bind fee_excess =
(* Use the default token for the fee excess if it is zero.
@@ -1947,38 +1812,25 @@ module Base = struct
; fee_token_r= Token_id.(var_of_t default)
; fee_excess_r= Fee.Signed.(Checked.constant zero) }
in
- let%map () =
- [%with_label "Check that the computed hash matches the input hash"]
- (let%bind sok_digest =
- [%with_label "Fetch the sok_digest"]
- (exists' Sok_message.Digest.typ ~f:Prover_state.sok_digest)
- in
- let%bind input =
- Statement.With_sok.Checked.to_field_elements
- { source= root_before
- ; target= root_after
- ; fee_excess
- ; next_available_token_before
- ; next_available_token_after
- ; supply_increase
- ; pending_coinbase_stack_state=
- { source= pending_coinbase_before
- ; target= pending_coinbase_after }
- ; proof_type= ()
- ; sok_digest }
- in
- [%with_label "Compare the hashes"]
- ( make_checked (fun () ->
- Random_oracle.Checked.(
- hash ~init:Hash_prefix.base_snark input) )
- >>= Field.Checked.Assert.equal top_hash ))
- in
- ()
+ Checked.all_unit
+ [ Frozen_ledger_hash.assert_equal root_after statement.target
+ ; Currency.Amount.Checked.assert_equal supply_increase
+ statement.supply_increase
+ ; Fee_excess.assert_equal_checked fee_excess statement.fee_excess
+ ; Token_id.Checked.Assert.equal next_available_token_after
+ statement.next_available_token_after ]
+
+ let rule ~constraint_constants : _ Pickles.Inductive_rule.t =
+ { prevs= []
+ ; main=
+ (fun [] x ->
+ Run.run_checked (main ~constraint_constants x) ;
+ [] )
+ ; main_value= (fun [] _ -> []) }
let transaction_union_handler handler (transaction : Transaction_union.t)
(state_body : Coda_state.Protocol_state.Body.Value.t)
- (init_stack : Pending_coinbase.Stack.t)
- (next_available_token : Token_id.t) : Snarky.Request.request -> _ =
+ (init_stack : Pending_coinbase.Stack.t) : Snarky.Request.request -> _ =
fun (With {request; respond} as r) ->
match request with
| Transaction ->
@@ -1987,77 +1839,13 @@ module Base = struct
respond (Provide state_body)
| Init_stack ->
respond (Provide init_stack)
- | Next_available_token ->
- respond (Provide next_available_token)
| _ ->
handler r
-
- let create_keys () =
- generate_keypair
- (main
- ~constraint_constants:Genesis_constants.Constraint_constants.compiled)
- ~exposing:(tick_input ())
-
- let transaction_union_proof ?(preeval = false) ~constraint_constants
- ~proving_key sok_digest state1 state2 init_stack
- pending_coinbase_stack_state next_available_token_before
- next_available_token_after (transaction : Transaction_union.t) state_body
- handler =
- if preeval then failwith "preeval currently disabled" ;
- let prover_state : Prover_state.t =
- {state1; state2; sok_digest; pending_coinbase_stack_state}
- in
- let handler =
- transaction_union_handler handler transaction state_body init_stack
- next_available_token_before
- in
- let main top_hash = handle (main ~constraint_constants top_hash) handler in
- let statement : Statement.With_sok.t =
- { source= state1
- ; target= state2
- ; supply_increase= Transaction_union.supply_increase transaction
- ; pending_coinbase_stack_state
- ; fee_excess= Transaction_union.fee_excess transaction
- ; next_available_token_before
- ; next_available_token_after
- ; proof_type= ()
- ; sok_digest }
- in
- let top_hash = base_top_hash statement in
- (top_hash, prove proving_key (tick_input ()) prover_state main top_hash)
-
- let cached =
- let load =
- let open Cached.Let_syntax in
- let%map verification =
- Cached.component ~label:"transaction_snark_base_verification"
- ~f:Keypair.vk
- (module Verification_key)
- and proving =
- Cached.component ~label:"transaction_snark_base_proving" ~f:Keypair.pk
- (module Proving_key)
- in
- (verification, {proving with value= ()})
- in
- Cached.Spec.create ~load ~name:"transaction-snark base keys"
- ~autogen_path:Cache_dir.autogen_path
- ~manual_install_path:Cache_dir.manual_install_path
- ~brew_install_path:Cache_dir.brew_install_path
- ~s3_install_path:Cache_dir.s3_install_path
- ~digest_input:(fun x ->
- Md5.to_hex (R1CS_constraint_system.digest (Lazy.force x)) )
- ~input:
- ( lazy
- (constraint_system ~exposing:(tick_input ())
- (main
- ~constraint_constants:
- Genesis_constants.Constraint_constants.compiled)) )
- ~create_env:(fun x -> Keypair.generate (Lazy.force x))
end
module Transition_data = struct
type t =
- { proof: Proof_type.t * Tock_backend.Proof.t
+ { proof: Proof_type.t
; supply_increase: Amount.t
; fee_excess: Fee_excess.t
; sok_digest: Sok_message.Digest.t
@@ -2067,88 +1855,6 @@ end
module Merge = struct
open Tick
- open Let_syntax
-
- module Prover_state = struct
- type t =
- { tock_vk: Tock_backend.Verification_key.t
- ; sok_digest: Sok_message.Digest.t
- ; ledger_hash1: Frozen_ledger_hash.t
- ; ledger_hash2: Frozen_ledger_hash.t
- ; transition12: Transition_data.t
- ; ledger_hash3: Frozen_ledger_hash.t
- ; transition23: Transition_data.t
- ; next_available_token1: Token_id.t
- ; next_available_token2: Token_id.t
- ; next_available_token3: Token_id.t
- ; pending_coinbase_stack1: Pending_coinbase.Stack.t
- ; pending_coinbase_stack2: Pending_coinbase.Stack.t
- ; pending_coinbase_stack3: Pending_coinbase.Stack.t
- ; pending_coinbase_stack4: Pending_coinbase.Stack.t }
- [@@deriving fields]
- end
-
- let input = tick_input
-
- let wrap_input_size = Tock.Data_spec.size wrap_input
-
- module Verifier = Tick.Verifier
-
- let hash_state_if b ~then_ ~else_ =
- make_checked (fun () ->
- Random_oracle.State.map2 then_ else_ ~f:(fun then_ else_ ->
- Run.Field.if_ b ~then_ ~else_ ) )
-
- (* spec for [verify_transition tock_vk proof_field s1 s2]:
- returns a bool which is true iff
- there is a snark proving making tock_vk
- accept on one of [ H(s1, s2, excess); H(s1, s2, excess, tock_vk) ] *)
- let%snarkydef verify_transition tock_vk tock_vk_precomp wrap_vk_hash_state
- get_transition_data s1 s2 ~pending_coinbase_stack1
- ~pending_coinbase_stack2 supply_increase ~fee_excess
- ~next_available_token_before ~next_available_token_after =
- let%bind is_base =
- let get_type s = get_transition_data s |> Transition_data.proof |> fst in
- with_label __LOC__
- (exists' Boolean.typ ~f:(fun s -> Proof_type.is_base (get_type s)))
- in
- let%bind sok_digest =
- exists' Sok_message.Digest.typ
- ~f:(Fn.compose Transition_data.sok_digest get_transition_data)
- in
- let%bind top_hash_init =
- hash_state_if is_base
- ~then_:
- (Random_oracle.State.map ~f:Run.Field.constant Hash_prefix.base_snark)
- ~else_:wrap_vk_hash_state
- in
- let%bind input =
- let%bind input =
- Statement.With_sok.Checked.to_field_elements
- { source= s1
- ; target= s2
- ; fee_excess
- ; next_available_token_before
- ; next_available_token_after
- ; supply_increase
- ; pending_coinbase_stack_state=
- {source= pending_coinbase_stack1; target= pending_coinbase_stack2}
- ; proof_type= ()
- ; sok_digest }
- in
- make_checked (fun () ->
- Random_oracle.Checked.(digest (update ~state:top_hash_init input)) )
- >>= Wrap_input.Checked.tick_field_to_scalars
- in
- let%bind proof =
- exists Verifier.Proof.typ
- ~compute:
- As_prover.(
- map get_state ~f:(fun s ->
- get_transition_data s |> Transition_data.proof |> snd
- |> Verifier.proof_of_backend_proof ))
- in
- Verifier.verify tock_vk tock_vk_precomp input proof
(* spec for [main top_hash]:
constraints pass iff
@@ -2157,376 +1863,109 @@ module Merge = struct
verify_transition tock_vk _ s1 s2 pending_coinbase_stack12.source, pending_coinbase_stack12.target is true
verify_transition tock_vk _ s2 s3 pending_coinbase_stack23.source, pending_coinbase_stack23.target is true
*)
- let%snarkydef main (top_hash : Random_oracle.Checked.Digest.t) =
- let%bind tock_vk =
- exists' (Verifier.Verification_key.typ ~input_size:wrap_input_size)
- ~f:(fun {Prover_state.tock_vk; _} -> Verifier.vk_of_backend_vk tock_vk
- )
- and s1 = exists' Frozen_ledger_hash.typ ~f:Prover_state.ledger_hash1
- and s2 = exists' Frozen_ledger_hash.typ ~f:Prover_state.ledger_hash2
- and s3 = exists' Frozen_ledger_hash.typ ~f:Prover_state.ledger_hash3
- and fee_excess12 =
- exists' Fee_excess.typ
- ~f:(Fn.compose Transition_data.fee_excess Prover_state.transition12)
- and fee_excess23 =
- exists' Fee_excess.typ
- ~f:(Fn.compose Transition_data.fee_excess Prover_state.transition23)
- and supply_increase12 =
- exists' Amount.typ
- ~f:
- (Fn.compose Transition_data.supply_increase Prover_state.transition12)
- and supply_increase23 =
- exists' Amount.typ
- ~f:
- (Fn.compose Transition_data.supply_increase Prover_state.transition23)
- and next_available_token1 =
- exists' Token_id.typ ~f:Prover_state.next_available_token1
- and next_available_token2 =
- exists' Token_id.typ ~f:Prover_state.next_available_token2
- and next_available_token3 =
- exists' Token_id.typ ~f:Prover_state.next_available_token3
- and pending_coinbase1 =
- exists' Pending_coinbase.Stack.typ
- ~f:Prover_state.pending_coinbase_stack1
- and pending_coinbase2 =
- exists' Pending_coinbase.Stack.typ
- ~f:Prover_state.pending_coinbase_stack2
- and pending_coinbase3 =
- exists' Pending_coinbase.Stack.typ
- ~f:Prover_state.pending_coinbase_stack3
- and pending_coinbase4 =
- exists' Pending_coinbase.Stack.typ
- ~f:Prover_state.pending_coinbase_stack4
+ let%snarkydef main
+ ([s1; s2] :
+ (Statement.With_sok.var * (Statement.With_sok.var * _))
+ Pickles_types.Hlist.HlistId.t) (s : Statement.With_sok.Checked.t) =
+ let%bind fee_excess =
+ Fee_excess.combine_checked s1.Statement.fee_excess
+ s2.Statement.fee_excess
in
let%bind () =
with_label __LOC__
(let%bind valid_pending_coinbase_stack_transition =
Pending_coinbase.Stack.Checked.check_merge
- ~transition1:(pending_coinbase1, pending_coinbase2)
- ~transition2:(pending_coinbase3, pending_coinbase4)
+ ~transition1:
+ ( s1.pending_coinbase_stack_state.source
+ , s1.pending_coinbase_stack_state.target )
+ ~transition2:
+ ( s2.pending_coinbase_stack_state.source
+ , s2.pending_coinbase_stack_state.target )
in
Boolean.Assert.is_true valid_pending_coinbase_stack_transition)
in
- let%bind wrap_vk_hash_state =
- make_checked (fun () ->
- Random_oracle.(
- Checked.update
- ~state:
- (State.map Hash_prefix_states.merge_snark ~f:Run.Field.constant)
- (Verifier.Verification_key.to_field_elements tock_vk)) )
- in
- let%bind tock_vk_precomp =
- Verifier.Verification_key.Precomputation.create tock_vk
- in
- let%bind () =
- [%with_label "Check top hash"]
- (let%bind fee_excess =
- Fee_excess.combine_checked fee_excess12 fee_excess23
- in
- let%bind supply_increase =
- Amount.Checked.add supply_increase12 supply_increase23
- in
- let%bind input =
- let%bind sok_digest =
- exists' Sok_message.Digest.typ ~f:Prover_state.sok_digest
- in
- let%bind input =
- Statement.With_sok.Checked.to_field_elements
- { source= s1
- ; target= s3
- ; fee_excess
- ; next_available_token_before= next_available_token1
- ; next_available_token_after= next_available_token3
- ; supply_increase
- ; pending_coinbase_stack_state=
- {source= pending_coinbase1; target= pending_coinbase4}
- ; proof_type= ()
- ; sok_digest }
- in
- make_checked (fun () ->
- Random_oracle.Checked.(
- digest (update ~state:wrap_vk_hash_state input)) )
- in
- Field.Checked.Assert.equal top_hash input)
- and verify_12 =
- [%with_label "Verify left transition"]
- (verify_transition tock_vk tock_vk_precomp wrap_vk_hash_state
- Prover_state.transition12 s1 s2
- ~pending_coinbase_stack1:pending_coinbase1
- ~pending_coinbase_stack2:pending_coinbase2 supply_increase12
- ~fee_excess:fee_excess12
- ~next_available_token_before:next_available_token1
- ~next_available_token_after:next_available_token2)
- and verify_23 =
- [%with_label "Verify right transition"]
- (verify_transition tock_vk tock_vk_precomp wrap_vk_hash_state
- Prover_state.transition23 s2 s3
- ~pending_coinbase_stack1:pending_coinbase3
- ~pending_coinbase_stack2:pending_coinbase4 supply_increase23
- ~fee_excess:fee_excess23
- ~next_available_token_before:next_available_token2
- ~next_available_token_after:next_available_token3)
+ let%bind supply_increase =
+ Amount.Checked.add s1.supply_increase s2.supply_increase
in
- Boolean.Assert.all [verify_12; verify_23]
-
- let create_keys () = generate_keypair ~exposing:(input ()) main
-
- let cached =
- let load =
- let open Cached.Let_syntax in
- let%map verification =
- Cached.component ~label:"transaction_snark_merge_verification"
- ~f:Keypair.vk
- (module Verification_key)
- and proving =
- Cached.component ~label:"transaction_snark_merge_proving" ~f:Keypair.pk
- (module Proving_key)
- in
- (verification, {proving with value= ()})
+ Checked.all_unit
+ [ Fee_excess.assert_equal_checked fee_excess s.fee_excess
+ ; Amount.Checked.assert_equal supply_increase s.supply_increase
+ ; Frozen_ledger_hash.assert_equal s.source s1.source
+ ; Frozen_ledger_hash.assert_equal s1.target s2.source
+ ; Frozen_ledger_hash.assert_equal s2.target s.target
+ ; Token_id.Checked.Assert.equal s.next_available_token_before
+ s1.next_available_token_before
+ ; Token_id.Checked.Assert.equal s1.next_available_token_after
+ s2.next_available_token_before
+ ; Token_id.Checked.Assert.equal s2.next_available_token_after
+ s.next_available_token_after ]
+
+ let rule self : _ Pickles.Inductive_rule.t =
+ let prev_should_verify =
+ match Genesis_constants.Proof_level.compiled with
+ | Full ->
+ true
+ | _ ->
+ false
in
- Cached.Spec.create ~load ~name:"transaction-snark merge keys"
- ~autogen_path:Cache_dir.autogen_path
- ~manual_install_path:Cache_dir.manual_install_path
- ~brew_install_path:Cache_dir.brew_install_path
- ~s3_install_path:Cache_dir.s3_install_path
- ~digest_input:(fun x ->
- Md5.to_hex (R1CS_constraint_system.digest (Lazy.force x)) )
- ~input:(lazy (constraint_system ~exposing:(input ()) main))
- ~create_env:(fun x -> Keypair.generate (Lazy.force x))
+ let b = Boolean.var_of_value prev_should_verify in
+ { prevs= [self; self]
+ ; main=
+ (fun ps x ->
+ Run.run_checked (main ps x) ;
+ [b; b] )
+ ; main_value= (fun _ _ -> [prev_should_verify; prev_should_verify]) }
end
-module Verification = struct
- module Keys = Verification_keys
+open Pickles_types
+
+type tag =
+ ( Statement.With_sok.Checked.t
+ , Statement.With_sok.t
+ , Nat.N2.n
+ , Nat.N2.n )
+ Pickles.Tag.t
+
+let time lab f =
+ let start = Time.now () in
+ let x = f () in
+ let stop = Time.now () in
+ printf "%s: %s\n%!" lab (Time.Span.to_string_hum (Time.diff stop start)) ;
+ x
+
+let system ~constraint_constants =
+ time "Transaction_snark.system" (fun () ->
+ Pickles.compile ~cache:Cache_dir.cache
+ (module Statement.With_sok.Checked)
+ (module Statement.With_sok)
+ ~typ:Statement.With_sok.typ
+ ~branches:(module Nat.N2)
+ ~max_branching:(module Nat.N2)
+ ~name:"transaction-snark"
+ ~choices:(fun ~self ->
+ [Base.rule ~constraint_constants; Merge.rule self] ) )
+module Verification = struct
module type S = sig
- val verify : (t * Sok_message.t) list -> bool
-
- val verify_against_digest : t -> bool
-
- val verify_complete_merge :
- Sok_message.Digest.Checked.t
- -> Frozen_ledger_hash.var
- -> Frozen_ledger_hash.var
- -> Pending_coinbase.Stack.var
- -> Pending_coinbase.Stack.var
- -> Currency.Amount.var
- -> Token_id.var
- -> Token_id.var
- -> (Tock.Proof.t, 's) Tick.As_prover.t
- -> (Tick.Boolean.var, 's) Tick.Checked.t
- end
-
- module Make (K : sig
- val keys : Keys.t
- end) =
- struct
- open K
-
- let wrap_vk_state =
- Random_oracle.update ~state:Hash_prefix.merge_snark
- Snark_params.Tick.Verifier.(
- let vk = vk_of_backend_vk keys.wrap in
- let g1 = Tick.Inner_curve.to_affine_exn in
- let g2 = Tick.Pairing.G2.Unchecked.to_affine_exn in
- Verification_key.to_field_elements
- { vk with
- query_base= g1 vk.query_base
- ; query= List.map ~f:g1 vk.query
- ; delta= g2 vk.delta })
-
- (* someday: Reorganize this module so that the inputs are separated from the proof. *)
- let verify_against_digest
- { source
- ; target
- ; proof
- ; proof_type
- ; fee_excess
- ; next_available_token_before
- ; next_available_token_after
- ; sok_digest
- ; supply_increase
- ; pending_coinbase_stack_state } =
- let (stmt : Statement.With_sok.t) =
- { source
- ; target
- ; proof_type= ()
- ; fee_excess
- ; next_available_token_before
- ; next_available_token_after
- ; sok_digest
- ; supply_increase
- ; pending_coinbase_stack_state }
- in
- let input =
- match proof_type with
- | `Base ->
- base_top_hash stmt
- | `Merge ->
- merge_top_hash wrap_vk_state stmt
- in
- Tock.verify proof keys.wrap wrap_input (Wrap_input.of_tick_field input)
+ val tag : tag
- let verify_one t ~message =
- Sok_message.Digest.equal t.sok_digest (Sok_message.digest message)
- && verify_against_digest t
-
- let verify = List.for_all ~f:(fun (t, m) -> verify_one t ~message:m)
+ val verify : (t * Sok_message.t) list -> bool
- (* spec for [verify_merge s1 s2 _]:
- Returns a boolean which is true if there exists a tock proof proving
- (against the wrap verification key) H(s1, s2, Amount.Signed.zero, wrap_vk).
- This in turn should only happen if there exists a tick proof proving
- (against the merge verification key) H(s1, s2, Amount.Signed.zero, wrap_vk).
+ val id : Pickles.Verification_key.Id.t Lazy.t
- We precompute the parts of the pedersen involving wrap_vk and
- Amount.Signed.zero outside the SNARK since this saves us many constraints.
- *)
+ val verification_key : Pickles.Verification_key.t Lazy.t
- let wrap_vk = Merge.Verifier.(constant_vk (vk_of_backend_vk keys.wrap))
-
- let wrap_precomp =
- Merge.Verifier.(
- Verification_key.Precomputation.create_constant
- (vk_of_backend_vk keys.wrap))
-
- let verify_complete_merge sok_digest s1 s2
- (pending_coinbase_stack1 : Pending_coinbase.Stack.var)
- (pending_coinbase_stack2 : Pending_coinbase.Stack.var) supply_increase
- next_available_token_before next_available_token_after get_proof =
- let open Tick in
- let%bind top_hash =
- let%bind input =
- Statement.With_sok.Checked.to_field_elements
- { source= s1
- ; target= s2
- ; fee_excess= Fee_excess.(var_of_t empty)
- ; next_available_token_before
- ; next_available_token_after
- ; supply_increase
- ; pending_coinbase_stack_state=
- { source= pending_coinbase_stack1
- ; target= pending_coinbase_stack2 }
- ; proof_type= ()
- ; sok_digest }
- in
- make_checked (fun () ->
- Random_oracle.Checked.(
- digest
- (update
- ~state:
- (Random_oracle.State.map wrap_vk_state
- ~f:Run.Field.constant)
- input)) )
- in
- let%bind input = Wrap_input.Checked.tick_field_to_scalars top_hash in
- let%map result =
- let%bind proof =
- exists Merge.Verifier.Proof.typ
- ~compute:
- (As_prover.map get_proof ~f:Merge.Verifier.proof_of_backend_proof)
- in
- Merge.Verifier.verify wrap_vk wrap_precomp input proof
- in
- result
+ val verify_against_digest : t -> bool
end
end
-module Wrap (Vk : sig
- val merge : Tick.Verification_key.t
-
- val base : Tick.Verification_key.t
-end) =
-struct
- open Tock
- module Verifier = Tock.Groth_verifier
-
- let merge_vk = Verifier.vk_of_backend_vk Vk.merge
-
- let merge_vk_precomp =
- Verifier.Verification_key.Precomputation.create_constant merge_vk
-
- let base_vk = Verifier.vk_of_backend_vk Vk.base
-
- let base_vk_precomp =
- Verifier.Verification_key.Precomputation.create_constant base_vk
-
- module Prover_state = struct
- type t = {proof_type: Proof_type.t; proof: Tick.Proof.t}
- [@@deriving fields]
- end
-
- let exists' typ ~f = exists typ ~compute:As_prover.(map get_state ~f)
-
- (* spec for [main input]:
- constraints pass iff
- (b1, b2, .., bn) = unpack input,
- there is a proof making one of [ base_vk; merge_vk ] accept (b1, b2, .., bn) *)
- let%snarkydef main (input : Wrap_input.var) =
- let%bind input = with_label __LOC__ (Wrap_input.Checked.to_scalar input) in
- let%bind is_base =
- exists' Boolean.typ ~f:(fun {Prover_state.proof_type; _} ->
- Proof_type.is_base proof_type )
- in
- let%bind verification_key_precomp =
- with_label __LOC__
- (Verifier.Verification_key.Precomputation.if_ is_base
- ~then_:base_vk_precomp ~else_:merge_vk_precomp)
- in
- let%bind verification_key =
- with_label __LOC__
- (Verifier.Verification_key.if_ is_base
- ~then_:(Verifier.constant_vk base_vk)
- ~else_:(Verifier.constant_vk merge_vk))
- in
- let%bind result =
- let%bind proof =
- exists Verifier.Proof.typ
- ~compute:
- As_prover.(
- map get_state
- ~f:
- (Fn.compose Verifier.proof_of_backend_proof
- Prover_state.proof))
- in
- with_label __LOC__
- (Verifier.verify verification_key verification_key_precomp [input]
- proof)
- in
- with_label __LOC__ (Boolean.Assert.is_true result)
-
- let create_keys () = generate_keypair ~exposing:wrap_input main
-
- let cached =
- let load =
- let open Cached.Let_syntax in
- let%map verification =
- Cached.component ~label:"transaction_snark_wrap_verification"
- ~f:Keypair.vk
- (module Verification_key)
- and proving =
- Cached.component ~label:"transaction_snark_wrap_proving" ~f:Keypair.pk
- (module Proving_key)
- in
- (verification, {proving with value= ()})
- in
- Cached.Spec.create ~load ~name:"transaction-snark wrap keys"
- ~autogen_path:Cache_dir.autogen_path
- ~manual_install_path:Cache_dir.manual_install_path
- ~brew_install_path:Cache_dir.brew_install_path
- ~s3_install_path:Cache_dir.s3_install_path
- ~digest_input:(fun x ->
- Md5.to_hex (R1CS_constraint_system.digest (Lazy.force x)) )
- ~input:(lazy (constraint_system ~exposing:wrap_input main))
- ~create_env:(fun x -> Keypair.generate (Lazy.force x))
-end
-
module type S = sig
include Verification.S
+ val cache_handle : Pickles.Cache_handle.t
+
val of_transaction :
- ?preeval:bool
- -> constraint_constants:Genesis_constants.Constraint_constants.t
- -> sok_digest:Sok_message.Digest.t
+ sok_digest:Sok_message.Digest.t
-> source:Frozen_ledger_hash.t
-> target:Frozen_ledger_hash.t
-> init_stack:Pending_coinbase.Stack.t
@@ -2538,8 +1977,7 @@ module type S = sig
-> t
val of_user_command :
- constraint_constants:Genesis_constants.Constraint_constants.t
- -> sok_digest:Sok_message.Digest.t
+ sok_digest:Sok_message.Digest.t
-> source:Frozen_ledger_hash.t
-> target:Frozen_ledger_hash.t
-> init_stack:Pending_coinbase.Stack.t
@@ -2551,8 +1989,7 @@ module type S = sig
-> t
val of_fee_transfer :
- constraint_constants:Genesis_constants.Constraint_constants.t
- -> sok_digest:Sok_message.Digest.t
+ sok_digest:Sok_message.Digest.t
-> source:Frozen_ledger_hash.t
-> target:Frozen_ledger_hash.t
-> init_stack:Pending_coinbase.Stack.t
@@ -2572,12 +2009,8 @@ let check_transaction_union ?(preeval = false) ~constraint_constants
state_body handler =
if preeval then failwith "preeval currently disabled" ;
let sok_digest = Sok_message.digest sok_message in
- let prover_state : Base.Prover_state.t =
- {state1= source; state2= target; sok_digest; pending_coinbase_stack_state}
- in
let handler =
Base.transaction_union_handler handler transaction state_body init_stack
- next_available_token_before
in
let statement : Statement.With_sok.t =
{ source
@@ -2587,18 +2020,20 @@ let check_transaction_union ?(preeval = false) ~constraint_constants
; fee_excess= Transaction_union.fee_excess transaction
; next_available_token_before
; next_available_token_after
- ; proof_type= ()
; sok_digest }
in
- let top_hash = base_top_hash statement in
let open Tick in
- let main top_hash =
- handle (Base.main ~constraint_constants top_hash) handler
- in
- let main =
- Checked.map (main (Field.Var.constant top_hash)) ~f:As_prover.return
- in
- Or_error.ok_exn (run_and_check main prover_state) |> ignore
+ Or_error.ok_exn
+ (run_and_check
+ (handle
+ (Checked.map ~f:As_prover.return
+ (let open Checked in
+ exists Statement.With_sok.typ
+ ~compute:(As_prover.return statement)
+ >>= Base.main ~constraint_constants))
+ handler)
+ ())
+ |> ignore
let check_transaction ?preeval ~constraint_constants ~sok_message ~source
~target ~init_stack ~pending_coinbase_stack_state
@@ -2639,12 +2074,8 @@ let generate_transaction_union_witness ?(preeval = false) ~constraint_constants
Transaction_protocol_state.block_data transaction_in_block
in
let sok_digest = Sok_message.digest sok_message in
- let prover_state : Base.Prover_state.t =
- {state1= source; state2= target; sok_digest; pending_coinbase_stack_state}
- in
let handler =
Base.transaction_union_handler handler transaction state_body init_stack
- next_available_token_before
in
let statement : Statement.With_sok.t =
{ source
@@ -2654,15 +2085,11 @@ let generate_transaction_union_witness ?(preeval = false) ~constraint_constants
; fee_excess= Transaction_union.fee_excess transaction
; next_available_token_before
; next_available_token_after
- ; proof_type= ()
; sok_digest }
in
- let top_hash = base_top_hash statement in
let open Tick in
- let main top_hash =
- handle (Base.main ~constraint_constants top_hash) handler
- in
- generate_auxiliary_input (tick_input ()) prover_state main top_hash
+ let main x = handle (Base.main ~constraint_constants x) handler in
+ generate_auxiliary_input [Statement.With_sok.typ] () main statement
let generate_transaction_witness ?preeval ~constraint_constants ~sok_message
~source ~target ~init_stack ~pending_coinbase_stack_state
@@ -2679,102 +2106,61 @@ let generate_transaction_witness ?preeval ~constraint_constants ~sok_message
init_stack next_available_token_before next_available_token_after
pending_coinbase_stack_state handler
-let verification_keys_of_keys {Keys0.verification; _} = verification
-
-module Make (K : sig
- val keys : Keys0.t
-end) =
-struct
- open K
-
- include Verification.Make (struct
- let keys = verification_keys_of_keys keys
- end)
-
- module Wrap = Wrap (struct
- let merge = keys.verification.merge
-
- let base = keys.verification.base
- end)
-
- let wrap proof_type proof input =
- let prover_state = {Wrap.Prover_state.proof; proof_type} in
- Tock.prove keys.proving.wrap wrap_input prover_state Wrap.main
- (Wrap_input.of_tick_field input)
-
- let merge_proof sok_digest ledger_hash1 ledger_hash2 ledger_hash3
- next_available_token1 next_available_token2 next_available_token3
- transition12 transition23 =
- let fee_excess =
- Or_error.ok_exn
- @@ Fee_excess.combine transition12.Transition_data.fee_excess
- transition23.Transition_data.fee_excess
- in
- let supply_increase =
- Amount.add transition12.supply_increase transition23.supply_increase
- |> Option.value_exn
- in
- let statement : Statement.With_sok.t =
- { source= ledger_hash1
- ; target= ledger_hash3
- ; supply_increase
- ; pending_coinbase_stack_state=
- { source= transition12.pending_coinbase_stack_state.source
- ; target= transition23.pending_coinbase_stack_state.target }
- ; fee_excess
- ; next_available_token_before= next_available_token1
- ; next_available_token_after= next_available_token2
- ; proof_type= ()
- ; sok_digest }
- in
- let top_hash = merge_top_hash wrap_vk_state statement in
- let prover_state =
- { Merge.Prover_state.sok_digest
- ; ledger_hash1
- ; ledger_hash2
- ; ledger_hash3
- ; next_available_token1
- ; next_available_token2
- ; next_available_token3
- ; pending_coinbase_stack1=
- transition12.pending_coinbase_stack_state.source
- ; pending_coinbase_stack2=
- transition12.pending_coinbase_stack_state.target
- ; pending_coinbase_stack3=
- transition23.pending_coinbase_stack_state.source
- ; pending_coinbase_stack4=
- transition23.pending_coinbase_stack_state.target
- ; transition12
- ; transition23
- ; tock_vk= keys.verification.wrap }
- in
- ( top_hash
- , Tick.prove keys.proving.merge (tick_input ()) prover_state Merge.main
- top_hash )
-
- let of_transaction_union ?preeval ~constraint_constants sok_digest source
- target ~init_stack ~pending_coinbase_stack_state
- ~next_available_token_before ~next_available_token_after transaction
- state_body handler =
- let top_hash, proof =
- Base.transaction_union_proof ?preeval ~constraint_constants sok_digest
- ~proving_key:keys.proving.base source target init_stack
- pending_coinbase_stack_state next_available_token_before
- next_available_token_after transaction state_body handler
+let verify (ts : (t * _) list) ~key =
+ List.for_all ts ~f:(fun ({statement; _}, message) ->
+ Sok_message.Digest.equal
+ (Sok_message.digest message)
+ statement.sok_digest )
+ && Pickles.verify
+ (module Nat.N2)
+ (module Statement.With_sok)
+ key
+ (List.map ts ~f:(fun ({statement; proof}, _) -> (statement, proof)))
+
+module Make () = struct
+ let tag, cache_handle, p, Pickles.Provers.[base; merge] =
+ system
+ ~constraint_constants:Genesis_constants.Constraint_constants.compiled
+
+ module Proof = (val p)
+
+ let id = Proof.id
+
+ let verification_key = Proof.verification_key
+
+ let verify_against_digest {statement; proof} =
+ Proof.verify [(statement, proof)]
+
+ let verify ts =
+ List.for_all ts ~f:(fun (p, m) ->
+ Sok_message.Digest.equal (Sok_message.digest m) p.statement.sok_digest
+ )
+ && Proof.verify
+ (List.map ts ~f:(fun ({statement; proof}, _) -> (statement, proof)))
+
+ let of_transaction_union sok_digest source target ~init_stack
+ ~pending_coinbase_stack_state ~next_available_token_before
+ ~next_available_token_after transaction state_body handler =
+ let s =
+ { Statement.source
+ ; target
+ ; sok_digest
+ ; next_available_token_before
+ ; next_available_token_after
+ ; fee_excess= Transaction_union.fee_excess transaction
+ ; supply_increase= Transaction_union.supply_increase transaction
+ ; pending_coinbase_stack_state }
in
- { source
- ; sok_digest
- ; target
- ; proof_type= `Base
- ; fee_excess= Transaction_union.fee_excess transaction
- ; next_available_token_before
- ; next_available_token_after
- ; pending_coinbase_stack_state
- ; supply_increase= Transaction_union.supply_increase transaction
- ; proof= wrap `Base proof top_hash }
-
- let of_transaction ?preeval ~constraint_constants ~sok_digest ~source ~target
- ~init_stack ~pending_coinbase_stack_state ~next_available_token_before
+ { statement= s
+ ; proof=
+ base []
+ ~handler:
+ (Base.transaction_union_handler handler transaction state_body
+ init_stack)
+ s }
+
+ let of_transaction ~sok_digest ~source ~target ~init_stack
+ ~pending_coinbase_stack_state ~next_available_token_before
~next_available_token_after transaction_in_block handler =
let transaction =
Transaction_protocol_state.transaction transaction_in_block
@@ -2782,17 +2168,17 @@ struct
let state_body =
Transaction_protocol_state.block_data transaction_in_block
in
- of_transaction_union ?preeval ~constraint_constants sok_digest source
- target ~init_stack ~pending_coinbase_stack_state
- ~next_available_token_before ~next_available_token_after
+ of_transaction_union sok_digest source target ~init_stack
+ ~pending_coinbase_stack_state ~next_available_token_before
+ ~next_available_token_after
(Transaction_union.of_transaction transaction)
state_body handler
- let of_user_command ~constraint_constants ~sok_digest ~source ~target
- ~init_stack ~pending_coinbase_stack_state ~next_available_token_before
+ let of_user_command ~sok_digest ~source ~target ~init_stack
+ ~pending_coinbase_stack_state ~next_available_token_before
~next_available_token_after user_command_in_block handler =
- of_transaction ~constraint_constants ~sok_digest ~source ~target
- ~init_stack ~pending_coinbase_stack_state ~next_available_token_before
+ of_transaction ~sok_digest ~source ~target ~init_stack
+ ~pending_coinbase_stack_state ~next_available_token_before
~next_available_token_after
{ user_command_in_block with
transaction=
@@ -2800,11 +2186,11 @@ struct
(Transaction_protocol_state.transaction user_command_in_block) }
handler
- let of_fee_transfer ~constraint_constants ~sok_digest ~source ~target
- ~init_stack ~pending_coinbase_stack_state ~next_available_token_before
+ let of_fee_transfer ~sok_digest ~source ~target ~init_stack
+ ~pending_coinbase_stack_state ~next_available_token_before
~next_available_token_after transfer_in_block handler =
- of_transaction ~constraint_constants ~sok_digest ~source ~target
- ~init_stack ~pending_coinbase_stack_state ~next_available_token_before
+ of_transaction ~sok_digest ~source ~target ~init_stack
+ ~pending_coinbase_stack_state ~next_available_token_before
~next_available_token_after
{ transfer_in_block with
transaction=
@@ -2812,223 +2198,47 @@ struct
(Transaction_protocol_state.transaction transfer_in_block) }
handler
- let merge t1 t2 ~sok_digest =
- if not (Frozen_ledger_hash.( = ) t1.target t2.source) then
+ let merge ({statement= t12; _} as x12) ({statement= t23; _} as x23)
+ ~sok_digest =
+ if not (Frozen_ledger_hash.( = ) t12.target t23.source) then
failwithf
- !"Transaction_snark.merge: t1.target <> t2.source \
+ !"Transaction_snark.merge: t12.target <> t23.source \
(%{sexp:Frozen_ledger_hash.t} vs %{sexp:Frozen_ledger_hash.t})"
- t1.target t2.source () ;
+ t12.target t23.source () ;
if
not
- (Token_id.( = ) t1.next_available_token_after
- t2.next_available_token_before)
+ (Token_id.( = ) t12.next_available_token_after
+ t23.next_available_token_before)
then
failwithf
- !"Transaction_snark.merge: t1.next_available_token_befre <> \
- t2.next_available_token_after (%{sexp:Token_id.t} vs \
+ !"Transaction_snark.merge: t12.next_available_token_befre <> \
+ t23.next_available_token_after (%{sexp:Token_id.t} vs \
%{sexp:Token_id.t})"
- t1.next_available_token_after t2.next_available_token_before () ;
- let input, proof =
- merge_proof sok_digest t1.source t1.target t2.target
- t1.next_available_token_before t1.next_available_token_after
- t2.next_available_token_after
- { Transition_data.proof= (t1.proof_type, t1.proof)
- ; fee_excess= t1.fee_excess
- ; supply_increase= t1.supply_increase
- ; sok_digest= t1.sok_digest
- ; pending_coinbase_stack_state= t1.pending_coinbase_stack_state }
- { Transition_data.proof= (t2.proof_type, t2.proof)
- ; fee_excess= t2.fee_excess
- ; supply_increase= t2.supply_increase
- ; sok_digest= t2.sok_digest
- ; pending_coinbase_stack_state= t2.pending_coinbase_stack_state }
- in
+ t12.next_available_token_after t23.next_available_token_before () ;
let open Or_error.Let_syntax in
- let%map fee_excess = Fee_excess.combine t1.fee_excess t2.fee_excess
+ let%map fee_excess = Fee_excess.combine t12.fee_excess t23.fee_excess
and supply_increase =
- Amount.add t1.supply_increase t2.supply_increase
+ Amount.add t12.supply_increase t23.supply_increase
|> Option.value_map ~f:Or_error.return
~default:
(Or_error.errorf
"Transaction_snark.merge: Supply change amount overflow")
in
- { source= t1.source
- ; target= t2.target
- ; sok_digest
- ; fee_excess
- ; next_available_token_before= t1.next_available_token_before
- ; next_available_token_after= t2.next_available_token_after
- ; supply_increase
- ; pending_coinbase_stack_state=
- { source= t1.pending_coinbase_stack_state.source
- ; target= t2.pending_coinbase_stack_state.target }
- ; proof_type= `Merge
- ; proof= wrap `Merge proof input }
-end
-
-module Keys = struct
- module Storage = Storage.List.Make (Storage.Disk)
-
- module Per_snark_location = struct
- module T = struct
- type t =
- { base: Storage.location
- ; merge: Storage.location
- ; wrap: Storage.location }
- [@@deriving sexp]
- end
-
- include T
- include Sexpable.To_stringable (T)
- end
-
- let checksum ~prefix ~base ~merge ~wrap =
- Md5.digest_string
- ( "Transaction_snark_" ^ prefix ^ Md5.to_hex base ^ Md5.to_hex merge
- ^ Md5.to_hex wrap )
-
- module Verification = struct
- include Keys0.Verification
- module Location = Per_snark_location
-
- let checksum ~base ~merge ~wrap =
- checksum ~prefix:"transaction_snark_verification" ~base ~merge ~wrap
-
- let load ({merge; base; wrap} : Location.t) =
- let open Storage in
- let logger = Logger.create () in
- let tick_controller =
- Controller.create ~logger (module Tick.Verification_key)
- in
- let tock_controller =
- Controller.create ~logger (module Tock.Verification_key)
- in
- let open Async in
- let load c p =
- match%map load_with_checksum c p with
- | Ok x ->
- x
- | Error _e ->
- failwithf
- !"Transaction_snark: load failed on %{sexp:Storage.location}"
- p ()
- in
- let%map base = load tick_controller base
- and merge = load tick_controller merge
- and wrap = load tock_controller wrap in
- let t = {base= base.data; merge= merge.data; wrap= wrap.data} in
- ( t
- , checksum ~base:base.checksum ~merge:merge.checksum ~wrap:wrap.checksum
- )
- end
-
- module Proving = struct
- include Keys0.Proving
- module Location = Per_snark_location
-
- let checksum ~base ~merge ~wrap =
- checksum ~prefix:"transaction_snark_proving" ~base ~merge ~wrap
-
- let load ({merge; base; wrap} : Location.t) =
- let open Storage in
- let logger = Logger.create () in
- let tick_controller =
- Controller.create ~logger (module Tick.Proving_key)
- in
- let tock_controller =
- Controller.create ~logger (module Tock.Proving_key)
- in
- let open Async in
- let load c p =
- match%map load_with_checksum c p with
- | Ok x ->
- x
- | Error _e ->
- failwithf
- !"Transaction_snark: load failed on %{sexp:Storage.location}"
- p ()
- in
- let%map base = load tick_controller base
- and merge = load tick_controller merge
- and wrap = load tock_controller wrap in
- let t = {base= base.data; merge= merge.data; wrap= wrap.data} in
- ( t
- , checksum ~base:base.checksum ~merge:merge.checksum ~wrap:wrap.checksum
- )
- end
-
- module Location = struct
- module T = struct
- type t =
- {proving: Proving.Location.t; verification: Verification.Location.t}
- [@@deriving sexp]
- end
-
- include T
- include Sexpable.To_stringable (T)
- end
-
- include Keys0.T
-
- module Checksum = struct
- type t = {proving: Md5.t; verification: Md5.t}
- end
-
- let create () =
- let base = Base.create_keys () in
- let merge = Merge.create_keys () in
- let wrap =
- let module Wrap = Wrap (struct
- let base = Tick.Keypair.vk base
-
- let merge = Tick.Keypair.vk merge
- end) in
- Wrap.create_keys ()
- in
- { proving=
- { base= Tick.Keypair.pk base
- ; merge= Tick.Keypair.pk merge
- ; wrap= Tock.Keypair.pk wrap }
- ; verification=
- { base= Tick.Keypair.vk base
- ; merge= Tick.Keypair.vk merge
- ; wrap= Tock.Keypair.vk wrap } }
-
- let cached () =
- let paths path = Cache_dir.possible_paths (Filename.basename path) in
- let open Cached.Deferred_with_track_generated.Let_syntax in
- let%bind base_vk, base_pk = Cached.run Base.cached in
- let%bind merge_vk, merge_pk = Cached.run Merge.cached in
- let%map wrap_vk, wrap_pk =
- let module Wrap = Wrap (struct
- let base = base_vk.value
-
- let merge = merge_vk.value
- end) in
- Cached.run Wrap.cached
- in
- let t : Verification.t =
- {base= base_vk.value; merge= merge_vk.value; wrap= wrap_vk.value}
- in
- let location : Location.t =
- { proving=
- { base= paths base_pk.path
- ; merge= paths merge_pk.path
- ; wrap= paths wrap_pk.path }
- ; verification=
- { base= paths base_vk.path
- ; merge= paths merge_vk.path
- ; wrap= paths wrap_vk.path } }
- in
- let checksum =
- { Checksum.proving=
- Proving.checksum ~base:base_pk.checksum ~merge:merge_pk.checksum
- ~wrap:wrap_pk.checksum
- ; verification=
- Verification.checksum ~base:base_vk.checksum ~merge:merge_vk.checksum
- ~wrap:wrap_vk.checksum }
+ let s : Statement.With_sok.t =
+ { Statement.source= t12.source
+ ; target= t23.target
+ ; supply_increase
+ ; fee_excess
+ ; next_available_token_before= t12.next_available_token_before
+ ; next_available_token_after= t23.next_available_token_after
+ ; pending_coinbase_stack_state=
+ { source= t12.pending_coinbase_stack_state.source
+ ; target= t23.pending_coinbase_stack_state.target }
+ ; sok_digest }
in
- (location, t, checksum)
+ { statement= s
+ ; proof= merge [(x12.statement, x12.proof); (x23.statement, x23.proof)] s
+ }
end
let%test_module "transaction_snark" =
@@ -3114,11 +2324,7 @@ let%test_module "transaction_snark" =
~receiver_pk:(Account.public_key receiver.account)
~fee_token ~token amt fee nonce memo
- let keys = Keys.create ()
-
- include Make (struct
- let keys = keys
- end)
+ include Make ()
let state_body =
let compile_time_genesis =
@@ -3163,8 +2369,8 @@ let%test_module "transaction_snark" =
{ Transaction_protocol_state.Poly.transaction= user_command
; block_data= state_body }
in
- of_user_command ~constraint_constants ~sok_digest ~source ~target
- ~init_stack ~pending_coinbase_stack_state ~next_available_token_before
+ of_user_command ~sok_digest ~source ~target ~init_stack
+ ~pending_coinbase_stack_state ~next_available_token_before
~next_available_token_after user_command_in_block handler
(*
@@ -3531,6 +2737,12 @@ let%test_module "transaction_snark" =
(Account.identifier other.account)
(fees - account_fee) ledger ) )
+ module Pc_with_init_stack = struct
+ type t =
+ { pc: Pending_coinbase_stack_state.t
+ ; init_stack: Pending_coinbase.Stack.t }
+ end
+
let test_base_and_merge ~state_hash_and_body1 ~state_hash_and_body2
~carryforward1 ~carryforward2 =
Test_util.with_randomness 123456789 (fun () ->
@@ -3568,7 +2780,6 @@ let%test_module "transaction_snark" =
~prover:wallets.(0).account.public_key
|> Sok_message.digest
in
- let state1 = Ledger.merkle_root ledger in
let next_available_token1 = Ledger.next_available_token ledger in
let sparse_ledger =
Sparse_ledger.of_ledger_subset_exn ledger
@@ -3600,12 +2811,14 @@ let%test_module "transaction_snark" =
if carryforward1 then (stack_with_state, stack_with_state)
else (init_stack1, stack_with_state)
in
- { Pending_coinbase_stack_state.source= source_stack
- ; target= target_stack }
+ { Pc_with_init_stack.pc=
+ {source= source_stack; target= target_stack}
+ ; init_stack= init_stack1 }
in
let proof12 =
- of_user_command' sok_digest ledger t1 init_stack1
- pending_coinbase_stack_state1 state_body1
+ of_user_command' sok_digest ledger t1
+ pending_coinbase_stack_state1.init_stack
+ pending_coinbase_stack_state1.pc state_body1
(unstage @@ Sparse_ledger.handler sparse_ledger)
in
let current_global_slot =
@@ -3617,8 +2830,8 @@ let%test_module "transaction_snark" =
~txn_global_slot:current_global_slot sparse_ledger
(t1 :> User_command.t)
in
- let pending_coinbase_stack_state2, state_body2, init_stack2 =
- let previous_stack = pending_coinbase_stack_state1.target in
+ let pending_coinbase_stack_state2, state_body2 =
+ let previous_stack = pending_coinbase_stack_state1.pc.target in
let stack_with_state2 =
Pending_coinbase.Stack.(
push_state state_body_hash2 previous_stack)
@@ -3642,10 +2855,10 @@ let%test_module "transaction_snark" =
, previous_stack
, state_body2 )
in
- ( { Pending_coinbase_stack_state.source= source_stack
- ; target= target_stack }
- , state_body2
- , init_stack )
+ ( { Pc_with_init_stack.pc=
+ {source= source_stack; target= target_stack}
+ ; init_stack }
+ , state_body2 )
in
Ledger.apply_user_command ~constraint_constants ledger
~txn_global_slot:current_global_slot t1
@@ -3654,8 +2867,9 @@ let%test_module "transaction_snark" =
(Ledger.merkle_root ledger)
(Sparse_ledger.merkle_root sparse_ledger) ;
let proof23 =
- of_user_command' sok_digest ledger t2 init_stack2
- pending_coinbase_stack_state2 state_body2
+ of_user_command' sok_digest ledger t2
+ pending_coinbase_stack_state2.init_stack
+ pending_coinbase_stack_state2.pc state_body2
(unstage @@ Sparse_ledger.handler sparse_ledger)
in
let current_global_slot =
@@ -3667,47 +2881,16 @@ let%test_module "transaction_snark" =
~txn_global_slot:current_global_slot sparse_ledger
(t2 :> User_command.t)
in
- let pending_coinbase_stack_state_merge =
- Pending_coinbase_stack_state.
- { source= pending_coinbase_stack_state1.source
- ; target= pending_coinbase_stack_state2.target }
- in
Ledger.apply_user_command ledger ~constraint_constants
~txn_global_slot:current_global_slot t2
|> Or_error.ok_exn |> ignore ;
[%test_eq: Frozen_ledger_hash.t]
(Ledger.merkle_root ledger)
(Sparse_ledger.merkle_root sparse_ledger) ;
- let total_fees =
- let open Fee in
- let magnitude =
- User_command_payload.fee (t1 :> User_command.t).payload
- + User_command_payload.fee (t2 :> User_command.t).payload
- |> Option.value_exn
- in
- Signed.create ~magnitude ~sgn:Sgn.Pos
- in
- let state3 = Sparse_ledger.merkle_root sparse_ledger in
- let next_available_token3 = Ledger.next_available_token ledger in
let proof13 =
merge ~sok_digest proof12 proof23 |> Or_error.ok_exn
in
- let statement : Statement.With_sok.t =
- { source= state1
- ; target= state3
- ; supply_increase= Amount.zero
- ; pending_coinbase_stack_state=
- pending_coinbase_stack_state_merge
- ; fee_excess=
- Fee_excess.of_single (Token_id.default, total_fees)
- ; next_available_token_before= next_available_token1
- ; next_available_token_after= next_available_token3
- ; proof_type= ()
- ; sok_digest }
- in
- Tock.verify proof13.proof keys.verification.wrap wrap_input
- (Wrap_input.of_tick_field
- (merge_top_hash wrap_vk_state statement)) ) )
+ Proof.verify [(proof13.statement, proof13.proof)] ) )
let%test "base_and_merge: transactions in one block (t1,t2 in b1), \
carryforward the state from a previous transaction t0 in b1" =
@@ -5284,21 +4467,19 @@ let%test_module "account timing check" =
end )
let constraint_system_digests () =
- let module W = Wrap (struct
- let merge = Verification_keys.dummy.merge
-
- let base = Verification_keys.dummy.base
- end) in
let digest = Tick.R1CS_constraint_system.digest in
- let digest' = Tock.R1CS_constraint_system.digest in
[ ( "transaction-merge"
- , digest Merge.(Tick.constraint_system ~exposing:(input ()) main) )
+ , digest
+ Merge.(
+ Tick.constraint_system ~exposing:[Statement.With_sok.typ] (fun x ->
+ let open Tick in
+ let%bind x1 = exists Statement.With_sok.typ in
+ let%bind x2 = exists Statement.With_sok.typ in
+ main [x1; x2] x )) )
; ( "transaction-base"
, digest
Base.(
- Tick.constraint_system ~exposing:(tick_input ())
+ Tick.constraint_system ~exposing:[Statement.With_sok.typ]
(main
~constraint_constants:
- Genesis_constants.Constraint_constants.compiled)) )
- ; ( "transaction-wrap"
- , digest' W.(Tock.constraint_system ~exposing:wrap_input main) ) ]
+ Genesis_constants.Constraint_constants.compiled)) ) ]
diff --git a/src/lib/transaction_snark/transaction_snark.mli b/src/lib/transaction_snark/transaction_snark.mli
index 1c89dc48c60..7243552e787 100644
--- a/src/lib/transaction_snark/transaction_snark.mli
+++ b/src/lib/transaction_snark/transaction_snark.mli
@@ -5,18 +5,6 @@ open Snark_params
(** For debugging. Logs to stderr the inputs to the top hash. *)
val with_top_hash_logging : (unit -> 'a) -> 'a
-module Proof_type : sig
- module Stable : sig
- module V1 : sig
- type t = [`Base | `Merge] [@@deriving bin_io, sexp, yojson]
- end
-
- module Latest = V1
- end
-
- type t = Stable.Latest.t [@@deriving sexp, yojson]
-end
-
module Pending_coinbase_stack_state : sig
module Init_stack : sig
[%%versioned:
@@ -90,7 +78,6 @@ module Statement : sig
, 'pending_coinbase
, 'fee_excess
, 'token_id
- , 'proof_type
, 'sok_digest )
t =
{ source: 'ledger_hash
@@ -100,7 +87,6 @@ module Statement : sig
; fee_excess: 'fee_excess
; next_available_token_before: 'token_id
; next_available_token_after: 'token_id
- ; proof_type: 'proof_type
; sok_digest: 'sok_digest }
[@@deriving compare, equal, hash, sexp, yojson]
@@ -110,14 +96,12 @@ module Statement : sig
-> ('pending_coinbase -> 'pending_coinbase')
-> ('fee_excess -> 'fee_excess')
-> ('token_id -> 'token_id')
- -> ('proof_type -> 'proof_type')
-> ('sok_digest -> 'sok_digest')
-> ( 'ledger_hash
, 'amount
, 'pending_coinbase
, 'fee_excess
, 'token_id
- , 'proof_type
, 'sok_digest )
t
-> ( 'ledger_hash'
@@ -125,7 +109,6 @@ module Statement : sig
, 'pending_coinbase'
, 'fee_excess'
, 'token_id'
- , 'proof_type'
, 'sok_digest' )
t
end
@@ -136,7 +119,6 @@ module Statement : sig
, 'pending_coinbase
, 'fee_excess
, 'token_id
- , 'proof_type
, 'sok_digest )
t =
( 'ledger_hash
@@ -144,7 +126,6 @@ module Statement : sig
, 'pending_coinbase
, 'fee_excess
, 'token_id
- , 'proof_type
, 'sok_digest )
Stable.Latest.t =
{ source: 'ledger_hash
@@ -154,7 +135,6 @@ module Statement : sig
; fee_excess: 'fee_excess
; next_available_token_before: 'token_id
; next_available_token_after: 'token_id
- ; proof_type: 'proof_type
; sok_digest: 'sok_digest }
[@@deriving compare, equal, hash, sexp, yojson]
end
@@ -164,7 +144,6 @@ module Statement : sig
, 'pending_coinbase
, 'fee_excess
, 'token_id
- , 'proof_type
, 'sok_digest )
poly =
( 'ledger_hash
@@ -172,7 +151,6 @@ module Statement : sig
, 'pending_coinbase
, 'fee_excess
, 'token_id
- , 'proof_type
, 'sok_digest )
Poly.t =
{ source: 'ledger_hash
@@ -182,7 +160,6 @@ module Statement : sig
; fee_excess: 'fee_excess
; next_available_token_before: 'token_id
; next_available_token_after: 'token_id
- ; proof_type: 'proof_type
; sok_digest: 'sok_digest }
[@@deriving compare, equal, hash, sexp, yojson]
@@ -195,7 +172,6 @@ module Statement : sig
, Pending_coinbase_stack_state.Stable.V1.t
, Fee_excess.Stable.V1.t
, Token_id.Stable.V1.t
- , Proof_type.Stable.V1.t
, unit )
Poly.Stable.V1.t
[@@deriving compare, equal, hash, sexp, yojson]
@@ -208,7 +184,6 @@ module Statement : sig
, Pending_coinbase_stack_state.t
, Fee_excess.t
, Token_id.t
- , Proof_type.t
, unit )
Poly.t
[@@deriving sexp, hash, compare, yojson]
@@ -223,10 +198,9 @@ module Statement : sig
, Pending_coinbase_stack_state.Stable.V1.t
, Fee_excess.Stable.V1.t
, Token_id.Stable.V1.t
- , unit
, Sok_message.Digest.Stable.V1.t )
Poly.Stable.V1.t
- [@@deriving compare, equal, hash, sexp, yojson]
+ [@@deriving compare, equal, hash, sexp, to_yojson]
end
end]
@@ -236,10 +210,9 @@ module Statement : sig
, Pending_coinbase_stack_state.t
, Fee_excess.t
, Token_id.t
- , unit
, Sok_message.Digest.t )
Poly.t
- [@@deriving sexp, hash, compare, yojson]
+ [@@deriving sexp, hash, compare, to_yojson]
type var =
( Frozen_ledger_hash.var
@@ -247,7 +220,6 @@ module Statement : sig
, Pending_coinbase_stack_state.var
, Fee_excess.var
, Token_id.var
- , unit
, Sok_message.Digest.Checked.t )
Poly.Stable.V1.t
@@ -260,10 +232,13 @@ module Statement : sig
val to_field_elements : t -> Field.t array
module Checked : sig
+ type t = var
+
val to_input :
var -> ((Field.Var.t, Boolean.var) Random_oracle.Input.t, _) Checked.t
- val to_field_elements : var -> (Field.Var.t array, _) Checked.t
+ (* This is actually a checked function. *)
+ val to_field_elements : var -> Field.Var.t array
end
end
@@ -288,102 +263,44 @@ type t = Stable.Latest.t [@@deriving sexp, to_yojson]
val create :
source:Frozen_ledger_hash.t
-> target:Frozen_ledger_hash.t
- -> proof_type:Proof_type.t
-> supply_increase:Currency.Amount.t
-> pending_coinbase_stack_state:Pending_coinbase_stack_state.t
-> fee_excess:Fee_excess.t
-> next_available_token_before:Token_id.t
-> next_available_token_after:Token_id.t
-> sok_digest:Sok_message.Digest.t
- -> proof:Tock.Proof.t
+ -> proof:Coda_base.Proof.t
-> t
-val proof : t -> Tock.Proof.t
+val proof : t -> Coda_base.Proof.t
val statement : t -> Statement.t
val sok_digest : t -> Sok_message.Digest.t
-module Keys : sig
- module Proving : sig
- type t =
- { base: Tick.Proving_key.t
- ; wrap: Tock.Proving_key.t
- ; merge: Tick.Proving_key.t }
-
- val dummy : t
-
- module Location : Stringable.S
-
- val load : Location.t -> (t * Md5.t) Async.Deferred.t
- end
-
- module Verification : sig
- [%%versioned:
- module Stable : sig
- module V1 : sig
- type t =
- { base: Tick.Verification_key.t
- ; wrap: Tock.Verification_key.t
- ; merge: Tick.Verification_key.t }
- end
- end]
-
- type t = Stable.Latest.t =
- { base: Tick.Verification_key.t
- ; wrap: Tock.Verification_key.t
- ; merge: Tick.Verification_key.t }
+open Pickles_types
- val dummy : t
-
- module Location : Stringable.S
-
- val load : Location.t -> (t * Md5.t) Async.Deferred.t
- end
+type tag =
+ ( Statement.With_sok.Checked.t
+ , Statement.With_sok.t
+ , Nat.N2.n
+ , Nat.N2.n )
+ Pickles.Tag.t
- module Location : sig
- type t =
- {proving: Proving.Location.t; verification: Verification.Location.t}
-
- include Stringable.S with type t := t
- end
-
- module Checksum : sig
- type t = {proving: Md5.t; verification: Md5.t}
- end
-
- type t = {proving: Proving.t; verification: Verification.t}
-
- val create : unit -> t
-
- val cached :
- unit
- -> (Location.t * Verification.t * Checksum.t)
- Cached.Deferred_with_track_generated.t
-end
+val verify : (t * Sok_message.t) list -> key:Pickles.Verification_key.t -> bool
module Verification : sig
module type S = sig
+ val tag : tag
+
val verify : (t * Sok_message.t) list -> bool
- val verify_against_digest : t -> bool
+ val id : Pickles.Verification_key.Id.t Lazy.t
- val verify_complete_merge :
- Sok_message.Digest.Checked.t
- -> Frozen_ledger_hash.var
- -> Frozen_ledger_hash.var
- -> Pending_coinbase.Stack.var
- -> Pending_coinbase.Stack.var
- -> Currency.Amount.var
- -> Token_id.var
- -> Token_id.var
- -> (Tock.Proof.t, 's) Tick.As_prover.t
- -> (Tick.Boolean.var, 's) Tick.Checked.t
- end
+ val verification_key : Pickles.Verification_key.t Lazy.t
- module Make (K : sig
- val keys : Keys.Verification.t
- end) : S
+ val verify_against_digest : t -> bool
+ end
end
val check_transaction :
@@ -430,10 +347,10 @@ val generate_transaction_witness :
module type S = sig
include Verification.S
+ val cache_handle : Pickles.Cache_handle.t
+
val of_transaction :
- ?preeval:bool
- -> constraint_constants:Genesis_constants.Constraint_constants.t
- -> sok_digest:Sok_message.Digest.t
+ sok_digest:Sok_message.Digest.t
-> source:Frozen_ledger_hash.t
-> target:Frozen_ledger_hash.t
-> init_stack:Pending_coinbase.Stack.t
@@ -445,8 +362,7 @@ module type S = sig
-> t
val of_user_command :
- constraint_constants:Genesis_constants.Constraint_constants.t
- -> sok_digest:Sok_message.Digest.t
+ sok_digest:Sok_message.Digest.t
-> source:Frozen_ledger_hash.t
-> target:Frozen_ledger_hash.t
-> init_stack:Pending_coinbase.Stack.t
@@ -458,8 +374,7 @@ module type S = sig
-> t
val of_fee_transfer :
- constraint_constants:Genesis_constants.Constraint_constants.t
- -> sok_digest:Sok_message.Digest.t
+ sok_digest:Sok_message.Digest.t
-> source:Frozen_ledger_hash.t
-> target:Frozen_ledger_hash.t
-> init_stack:Pending_coinbase.Stack.t
@@ -473,8 +388,6 @@ module type S = sig
val merge : t -> t -> sok_digest:Sok_message.Digest.t -> t Or_error.t
end
-module Make (K : sig
- val keys : Keys.t
-end) : S
+module Make () : S
val constraint_system_digests : unit -> (string * Md5.t) list
diff --git a/src/lib/transaction_snark_scan_state/transaction_snark_scan_state.ml b/src/lib/transaction_snark_scan_state/transaction_snark_scan_state.ml
index bd723af3fb1..f96e6ad085b 100644
--- a/src/lib/transaction_snark_scan_state/transaction_snark_scan_state.ml
+++ b/src/lib/transaction_snark_scan_state/transaction_snark_scan_state.ml
@@ -236,7 +236,6 @@ let create_expected_statement ~constraint_constants
; pending_coinbase_stack_state=
{ statement.pending_coinbase_stack_state with
target= pending_coinbase_after }
- ; proof_type= `Base
; sok_digest= () }
let completed_work_to_scanable_work (job : job) (fee, current_proof, prover) :
@@ -279,7 +278,6 @@ let completed_work_to_scanable_work (job : job) (fee, current_proof, prover) :
; fee_excess
; next_available_token_before= s.next_available_token_before
; next_available_token_after= s'.next_available_token_after
- ; proof_type= `Merge
; sok_digest= () }
in
( Ledger_proof.create ~statement ~sok_digest ~proof
@@ -424,7 +422,6 @@ struct
; next_available_token_after
; supply_increase= _
; pending_coinbase_stack_state= _ (*TODO: check pending coinbases?*)
- ; proof_type= _
; sok_digest= () } ->
let open Or_error.Let_syntax in
let%map () =
@@ -509,7 +506,6 @@ let statement_of_job : job -> Transaction_snark.Statement.t option = function
; fee_excess
; next_available_token_before= stmt1.next_available_token_before
; next_available_token_after= stmt2.next_available_token_after
- ; proof_type= `Merge
; sok_digest= () }
: Transaction_snark.Statement.t )
diff --git a/src/lib/transition_frontier/frontier_base/breadcrumb.ml b/src/lib/transition_frontier/frontier_base/breadcrumb.ml
index 65098f27485..d393858c61e 100644
--- a/src/lib/transition_frontier/frontier_base/breadcrumb.ml
+++ b/src/lib/transition_frontier/frontier_base/breadcrumb.ml
@@ -267,8 +267,8 @@ module For_tests = struct
; proofs=
One_or_two.map stmts ~f:(fun statement ->
Ledger_proof.create ~statement
- ~sok_digest:Sok_message.Digest.default ~proof:Proof.dummy
- )
+ ~sok_digest:Sok_message.Digest.default
+ ~proof:Proof.transaction_dummy )
; prover }
in
let current_global_slot, state_and_body_hash =
@@ -356,7 +356,7 @@ module For_tests = struct
Protocol_version.(set_current zero) ;
let next_external_transition =
External_transition.For_tests.create ~protocol_state
- ~protocol_state_proof:Proof.dummy
+ ~protocol_state_proof:Proof.blockchain_dummy
~staged_ledger_diff:(Staged_ledger_diff.forget staged_ledger_diff)
~validation_callback:Fn.ignore
~delta_transition_chain_proof:(previous_state_hash, []) ()
diff --git a/src/lib/verifier/dune b/src/lib/verifier/dune
index 07c6b9b387d..7e050393c95 100644
--- a/src/lib/verifier/dune
+++ b/src/lib/verifier/dune
@@ -1,6 +1,6 @@
(library
(name verifier)
(public_name verifier)
- (libraries core_kernel async_kernel rpc_parallel coda_base coda_state blockchain_snark memory_stats snark_keys snark_params ledger_proof logger child_processes)
+ (libraries precomputed_values core_kernel async_kernel rpc_parallel coda_base coda_state blockchain_snark memory_stats snark_params ledger_proof logger child_processes)
(preprocessor_deps "../../config.mlh")
(preprocess (pps ppx_coda ppx_version ppx_optcomp ppx_bin_prot ppx_let ppx_deriving.std ppx_deriving_yojson)))
diff --git a/src/lib/verifier/prod.ml b/src/lib/verifier/prod.ml
index de1290d83a1..f14ee3d0356 100644
--- a/src/lib/verifier/prod.ml
+++ b/src/lib/verifier/prod.ml
@@ -5,13 +5,12 @@ open Async
open Coda_base
open Coda_state
open Blockchain_snark
-open Snark_params
type ledger_proof = Ledger_proof.Prod.t
module Worker_state = struct
module type S = sig
- val verify_wrap : Protocol_state.Value.t -> Tock.Proof.t -> bool
+ val verify_blockchain_snark : Protocol_state.Value.t -> Proof.t -> bool
val verify_transaction_snarks :
(Transaction_snark.t * Sok_message.t) list -> bool
@@ -24,40 +23,25 @@ module Worker_state = struct
; proof_level: Genesis_constants.Proof_level.Stable.Latest.t }
[@@deriving bin_io_unversioned]
- type t = (module S) Deferred.t
+ type t = (module S)
let create {logger; proof_level; _} : t Deferred.t =
Memory_stats.log_memory_stats logger ~process:"verifier" ;
match proof_level with
| Full ->
Deferred.return
- (let%map bc_vk = Snark_keys.blockchain_verification ()
- and tx_vk = Snark_keys.transaction_verification () in
- let module T = Transaction_snark.Verification.Make (struct
- let keys = tx_vk
- end) in
+ (let bc_vk = Precomputed_values.blockchain_verification ()
+ and tx_vk = Precomputed_values.transaction_verification () in
let module M = struct
- let instance_hash =
- unstage (Blockchain_transition.instance_hash bc_vk.wrap)
+ let verify_blockchain_snark state proof =
+ Blockchain_snark.Blockchain_snark_state.verify state proof
+ ~key:bc_vk
- let verify_wrap state proof =
+ let verify_transaction_snarks ts =
match
Or_error.try_with (fun () ->
- Tock.verify proof bc_vk.wrap
- Tock.Data_spec.[Wrap_input.typ]
- (Wrap_input.of_tick_field (instance_hash state)) )
+ Transaction_snark.verify ~key:tx_vk ts )
with
- | Ok result ->
- result
- | Error e ->
- [%log error]
- ~metadata:[("error", `String (Error.to_string_hum e))]
- "Verifier threw an exception while verifying blockchain \
- snark" ;
- failwith "Verifier crashed"
-
- let verify_transaction_snarks ts =
- match Or_error.try_with (fun () -> T.verify ts) with
| Ok result ->
result
| Error e ->
@@ -69,9 +53,9 @@ module Worker_state = struct
end in
(module M : S))
| Check | None ->
- Deferred.return @@ Deferred.return
+ Deferred.return
@@ ( module struct
- let verify_wrap _ _ = true
+ let verify_blockchain_snark _ _ = true
let verify_transaction_snarks _ = true
end
@@ -104,12 +88,12 @@ module Worker = struct
and type connection_state := Connection_state.t) =
struct
let verify_blockchain (w : Worker_state.t) (chain : Blockchain.t) =
- let%map (module M) = Worker_state.get w in
- M.verify_wrap chain.state chain.proof
+ let (module M) = Worker_state.get w in
+ Deferred.return (M.verify_blockchain_snark chain.state chain.proof)
let verify_transaction_snarks (w : Worker_state.t) ts =
- let%map (module M) = Worker_state.get w in
- M.verify_transaction_snarks ts
+ let (module M) = Worker_state.get w in
+ Deferred.return (M.verify_transaction_snarks ts)
let functions =
let f (i, o, f) =
diff --git a/src/lib/zexe b/src/lib/zexe
index be94eb3d5bb..7fc275e176b 160000
--- a/src/lib/zexe
+++ b/src/lib/zexe
@@ -1 +1 @@
-Subproject commit be94eb3d5bb00f8fe0e56816f03152162b48aa49
+Subproject commit 7fc275e176b74ada6350b9ae96e7011c6ecd319b
diff --git a/src/nonconsensus/random_oracle/dune b/src/nonconsensus/random_oracle/dune
index 62b84c8a8c7..342085e7884 100644
--- a/src/nonconsensus/random_oracle/dune
+++ b/src/nonconsensus/random_oracle/dune
@@ -7,6 +7,5 @@
(libraries
core_kernel
random_oracle_input
- sponge_params_nonconsensus
snark_params_nonconsensus
sponge ))
diff --git a/src/nonconsensus/snark_params/dune b/src/nonconsensus/snark_params/dune
index caa82d0efae..4586a7c32c9 100644
--- a/src/nonconsensus/snark_params/dune
+++ b/src/nonconsensus/snark_params/dune
@@ -4,5 +4,5 @@
(library_flags -linkall)
(libraries core_kernel bignum_bigint snarkette)
(preprocessor_deps ../../config.mlh)
- (preprocess (pps ppx_coda ppx_version ppx_optcomp ppx_bin_prot ppx_sexp_conv ppx_let))
+ (preprocess (pps ppx_custom_printf ppx_coda ppx_version ppx_optcomp ppx_bin_prot ppx_sexp_conv ppx_let))
(synopsis "Field definition for nonconsensus code"))
diff --git a/src/nonconsensus/snark_params/snark_params_nonconsensus.ml b/src/nonconsensus/snark_params/snark_params_nonconsensus.ml
index 3f661b963aa..ae2eb553d21 100644
--- a/src/nonconsensus/snark_params/snark_params_nonconsensus.ml
+++ b/src/nonconsensus/snark_params/snark_params_nonconsensus.ml
@@ -15,13 +15,10 @@ consensus_mechanism]
open Snarkette
[%%if
-curve_size = 753]
+curve_size = 255]
(* only size we should be building nonconsensus code for *)
-module Mnt4 = Mnt4753
-module Mnt6 = Mnt6753
-
[%%else]
[%%show
@@ -32,13 +29,11 @@ curve_size]
[%%endif]
-module Field0 = Mnt6.Fq
-
[%%inject
"ledger_depth", ledger_depth]
module Field = struct
- include Field0
+ include Tweedle.Fq
let size = order |> Snarkette.Nat.to_string |> Bigint.of_string
@@ -53,58 +48,44 @@ end
module Tock = struct
module Field = struct
- type t = Mnt4.Fq.t
+ type t = Tweedle.Fp.t
- let unpack t = Mnt4.Fq.to_bits t
+ let unpack (t : t) = Tweedle.Fp.to_bits t
+
+ let size_in_bits = Tweedle.Fp.length_in_bits
let project bits =
Core_kernel.Option.value_exn
~message:"Snark_params_nonconsensus.Tock.Field.project"
- (Mnt4.Fq.of_bits bits)
+ (Tweedle.Fp.of_bits bits)
end
end
module Inner_curve = struct
- type t = Mnt6.G1.t [@@deriving sexp]
+ type t = Tweedle.Dee.t [@@deriving sexp]
- module Coefficients = Mnt6.G1.Coefficients
+ module Coefficients = Tweedle.Dee.Coefficients
let find_y x =
- let open Mnt6.Fq in
+ let open Field in
let y2 = (x * square x) + (Coefficients.a * x) + Coefficients.b in
if is_square y2 then Some (sqrt y2) else None
[%%define_locally
- Mnt6.G1.(of_affine, to_affine, to_affine_exn, one, ( + ), negate)]
+ Tweedle.Dee.(of_affine, to_affine, to_affine_exn, one, ( + ), negate)]
module Scalar = struct
(* though we have bin_io, not versioned here; this type exists for Private_key.t,
where it is versioned-asserted and its serialization tested
*)
- type t = Mnt4.Fq.t [@@deriving bin_io_unversioned, sexp]
+ type t = Tweedle.Fp.t [@@deriving bin_io_unversioned, sexp]
type _unused = unit constraint t = Tock.Field.t
- (* the Inner_curve.Scalar.size for the consensus case is derived from a C++ call; here, we inline the value *)
- [%%if
- curve_size = 753]
-
- let size =
- Mnt4.Fq.of_string
- "41898490967918953402344214791240637128170709919953949071783502921025352812571106773058893763790338921418070971888253786114353726529584385201591605722013126468931404347949840543007986327743462853720628051692141265303114721689601"
-
- [%%else]
-
- [%%show
- curve_size]
-
- [%%error
- "invalid value for \"curve_size\""]
-
- [%%endif]
+ let size = Tweedle.Fp.order
[%%define_locally
- Mnt4.Fq.
+ Tweedle.Fp.
( to_string
, of_string
, equal
@@ -119,19 +100,19 @@ module Inner_curve = struct
, negate
, hash_fold_t )]
- (* Mnt4.Fq.gen uses the interval starting at zero
+ (* Tweedle.Fp.gen uses the interval starting at zero
here we follow the gen in Snark_params.Make_inner_curve_scalar, using
an interval starting at one
*)
- let gen = Mnt4.Fq.(gen_incl one (size - one))
+ let gen = Tweedle.Fp.(gen_incl one (zero - one))
- let gen_uniform = gen_uniform_incl one (size - one)
+ let gen_uniform = gen_uniform_incl one (zero - one)
let of_bits bits = Tock.Field.project bits
end
- let scale t (scalar : Scalar.t) = Mnt6.G1.scale t (scalar :> Nat.t)
+ let scale t (scalar : Scalar.t) = Tweedle.Dee.scale t (scalar :> Nat.t)
- let scale_field t x = scale t (Mnt4.Fq.of_bigint x :> Scalar.t)
+ let scale_field t x = scale t (Tweedle.Fp.of_bigint x :> Scalar.t)
end
diff --git a/src/nonconsensus/sponge_params/dune b/src/nonconsensus/sponge_params/dune
deleted file mode 100644
index 70aea8dc20f..00000000000
--- a/src/nonconsensus/sponge_params/dune
+++ /dev/null
@@ -1,7 +0,0 @@
-(library
- (name sponge_params_nonconsensus)
- (public_name sponge_params_nonconsensus)
- (preprocessor_deps ../../config.mlh)
- (preprocess (pps ppx_version ppx_inline_test ppx_optcomp ppx_deriving.eq))
- (inline_tests)
- (libraries snark_params_nonconsensus))
diff --git a/src/nonconsensus/sponge_params/sponge_params.ml b/src/nonconsensus/sponge_params/sponge_params.ml
deleted file mode 120000
index 70aa24b9215..00000000000
--- a/src/nonconsensus/sponge_params/sponge_params.ml
+++ /dev/null
@@ -1 +0,0 @@
-../../lib/sponge_params/sponge_params.ml
\ No newline at end of file
diff --git a/src/sponge_params.opam b/src/sponge_params.opam
deleted file mode 100644
index a1b56499734..00000000000
--- a/src/sponge_params.opam
+++ /dev/null
@@ -1,5 +0,0 @@
-opam-version: "1.2"
-version: "0.1"
-build: [
- ["dune" "build" "--only" "src" "--root" "." "-j" jobs "@install"]
-]
diff --git a/src/sponge_params_nonconsensus.opam b/src/sponge_params_nonconsensus.opam
deleted file mode 100644
index a1b56499734..00000000000
--- a/src/sponge_params_nonconsensus.opam
+++ /dev/null
@@ -1,5 +0,0 @@
-opam-version: "1.2"
-version: "0.1"
-build: [
- ["dune" "build" "--only" "src" "--root" "." "-j" jobs "@install"]
-]