diff --git a/.github/workflows/light-system-programs-tests.yml b/.github/workflows/light-system-programs-tests.yml index b647ed1486..8a0b9d6c76 100644 --- a/.github/workflows/light-system-programs-tests.yml +++ b/.github/workflows/light-system-programs-tests.yml @@ -71,6 +71,11 @@ jobs: - name: Setup and build uses: ./.github/actions/setup-and-build + - name: Build CLI + run: | + source ./scripts/devenv.sh + npx nx build @lightprotocol/zk-compression-cli + - name: build-programs run: | source ./scripts/devenv.sh diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index 9f5eabc94c..2abfc1973f 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -40,6 +40,11 @@ jobs: - name: Setup and build uses: ./.github/actions/setup-and-build + - name: Build CLI + run: | + source ./scripts/devenv.sh + npx nx build @lightprotocol/zk-compression-cli + # concurrent merkle tree crate must run in isolation because otherwise it # tries to use the idl-build feature (which fails). - name: Test workspace (excluding light-concurrent-merkle-tree) diff --git a/circuit-lib/light-prover-client/Cargo.toml b/circuit-lib/light-prover-client/Cargo.toml index e5e34e785a..6bc8ca7ba3 100644 --- a/circuit-lib/light-prover-client/Cargo.toml +++ b/circuit-lib/light-prover-client/Cargo.toml @@ -8,7 +8,8 @@ edition = "2021" [features] gnark = ["tokio", "reqwest"] -default = ["gnark"] +default = ["gnark", "devenv"] +devenv = [] [dependencies] diff --git a/circuit-lib/light-prover-client/scripts/prover.sh b/circuit-lib/light-prover-client/scripts/prover.sh deleted file mode 100755 index dcd05940d8..0000000000 --- a/circuit-lib/light-prover-client/scripts/prover.sh +++ /dev/null @@ -1,79 +0,0 @@ -#!/usr/bin/env bash - -kill_light_prover() { - prover_pid=$(lsof -t -i :3001) - if [ -n "$prover_pid" ]; then - kill $prover_pid - echo "Killed process with PID $prover_pid bound to port 3001" - else - echo "No process found running on port 3001" - fi -} - -build_prover() { - cd "$root_dir/light-prover" || exit - go build || { - echo "light-prover build failed. Check for errors." - exit 1 - } -} - -if [[ $# -eq 0 ]]; then - echo "Error: Please provide at least one argument containing light-prover options." - echo "Allowed options: inclusion, non-inclusion, combined (individually or combined)" - exit 1 -fi - -root_dir=$(git rev-parse --show-toplevel 2>/dev/null) || { - echo "Error: Not in a Git repository or 'git' command not found." - exit 1 -} - -build_prover - -options=("$@") -inclusion=false -non_inclusion=false -combined=false -append=false -update=false - -for option in "${options[@]}"; do - case $option in - inclusion) - inclusion=true - ;; - non-inclusion) - non_inclusion=true - ;; - combined) - combined=true - ;; - append) - append=true - ;; - update) - update=true - ;; - *) - echo "Error: Invalid option '$option'. Allowed options: inclusion, non-inclusion, combined" - exit 1 - ;; - esac -done - -keys_dir="$root_dir/light-prover/proving-keys/" -cmd="$root_dir/light-prover/light-prover start --keys-dir=$keys_dir" -if [ "$inclusion" = true ]; then cmd="$cmd --inclusion=true"; fi -if [ "$non_inclusion" = true ]; then cmd="$cmd --non-inclusion=true"; fi -if [ "$combined" = true ]; then cmd="$cmd --combined=true"; fi -if [ "$append" = true ]; then cmd="$cmd --append=true"; fi -if [ "$update" = true ]; then cmd="$cmd --update=true"; fi - -cmd="$cmd --run-mode=test" - -echo "Running command: $cmd" - -kill_light_prover && $cmd & - -echo "Command completed with status code $?" \ No newline at end of file diff --git a/circuit-lib/light-prover-client/src/gnark/helpers.rs b/circuit-lib/light-prover-client/src/gnark/helpers.rs index fbbb10a8c5..7a3336ffd6 100644 --- a/circuit-lib/light-prover-client/src/gnark/helpers.rs +++ b/circuit-lib/light-prover-client/src/gnark/helpers.rs @@ -1,5 +1,6 @@ use log::info; use std::{ + ffi::OsStr, fmt::{Display, Formatter}, process::Command, sync::atomic::{AtomicBool, Ordering}, @@ -14,12 +15,40 @@ use serde_json::json; use sysinfo::{Signal, System}; static IS_LOADING: AtomicBool = AtomicBool::new(false); -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone, Copy, PartialEq)] +pub enum ProverMode { + Rpc, + Forester, + ForesterTest, + Full, + FullTest, +} + +impl Display for ProverMode { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!( + f, + "{}", + match self { + ProverMode::Rpc => "rpc", + ProverMode::Forester => "forester", + ProverMode::ForesterTest => "forester-test", + ProverMode::Full => "full", + ProverMode::FullTest => "full-test", + } + ) + } +} + +#[derive(Debug, Clone, Copy, PartialEq)] pub enum ProofType { Inclusion, NonInclusion, + Combined, BatchAppend, BatchUpdate, + BatchAppendTest, + BatchUpdateTest, } impl Display for ProofType { @@ -30,31 +59,58 @@ impl Display for ProofType { match self { ProofType::Inclusion => "inclusion", ProofType::NonInclusion => "non-inclusion", + ProofType::Combined => "combined", ProofType::BatchAppend => "append", ProofType::BatchUpdate => "update", + ProofType::BatchAppendTest => "append-test", + ProofType::BatchUpdateTest => "update-test", } ) } } -pub async fn spawn_prover(restart: bool, proof_types: &[ProofType]) { - if let Some(project_root) = get_project_root() { - let path = "circuit-lib/light-prover-client/scripts/prover.sh"; - let absolute_path = format!("{}/{}", project_root.trim(), path); +#[derive(Debug, Clone)] +pub struct ProverConfig { + pub run_mode: Option, + pub circuits: Vec, +} + +pub async fn spawn_prover(restart: bool, config: ProverConfig) { + if let Some(_project_root) = get_project_root() { + let prover_path: &str = { + #[cfg(feature = "devenv")] + { + &format!("{}/{}", _project_root.trim(), "cli/test_bin/run") + } + #[cfg(not(feature = "devenv"))] + { + "light" + } + }; + if restart { + println!("Killing prover..."); kill_prover(); } if !health_check(1, 3).await && !IS_LOADING.load(Ordering::Relaxed) { IS_LOADING.store(true, Ordering::Relaxed); - let proof_type_args: Vec = proof_types.iter().map(|p| p.to_string()).collect(); - let proof_type_str = proof_type_args.join(" "); - Command::new("sh") - .arg("-c") - .arg(format!("{} {}", absolute_path, proof_type_str)) - .spawn() - .expect("Failed to start server process"); + let mut command = Command::new(prover_path); + command.arg("start-prover"); + + if let Some(ref mode) = config.run_mode { + command.arg("--run-mode").arg(mode.to_string()); + } + + for circuit in config.circuits.clone() { + command.arg("--circuit").arg(circuit.to_string()); + } + + println!("Starting prover with command: {:?}", command); + + let _ = command.spawn().expect("Failed to start prover process"); + let health_result = health_check(20, 5).await; if health_result { info!("Prover started successfully"); @@ -68,15 +124,60 @@ pub async fn spawn_prover(restart: bool, proof_types: &[ProofType]) { } } -pub fn kill_prover() { +pub fn kill_process(process_name: &str) { let mut system = System::new_all(); system.refresh_all(); for process in system.processes().values() { - if process.name() == "light-prover" { - process.kill_with(Signal::Term); + let process_name_str = process.name().to_string_lossy(); + let process_cmd = process.cmd().join(OsStr::new(" ")); + let process_cmd_str = process_cmd.to_string_lossy(); + + // Match the exact process name + if process_name_str.contains(process_name) { + println!( + "Attempting to kill process: PID={}, Name={}, Cmd={}", + process.pid(), + process_name_str, + process_cmd_str + ); + if process.kill_with(Signal::Kill).is_some() { + println!("Successfully killed process: PID={}", process.pid()); + } else { + eprintln!("Failed to kill process: PID={}", process.pid()); + } } } + + // Double-check if processes are still running + system.refresh_all(); + let remaining_processes: Vec<_> = system + .processes() + .values() + .filter(|process| { + let process_name_str = process.name().to_string_lossy(); + process_name_str == process_name + }) + .collect(); + + if !remaining_processes.is_empty() { + eprintln!( + "Warning: {} processes still running after kill attempt", + remaining_processes.len() + ); + for process in remaining_processes { + eprintln!( + "Remaining process: PID={}, Name={}, Cmd={}", + process.pid(), + process.name().to_string_lossy(), + process.cmd().join(OsStr::new(" ")).to_string_lossy() + ); + } + } +} + +pub fn kill_prover() { + kill_process("prover"); } pub async fn health_check(retries: usize, timeout: usize) -> bool { @@ -146,3 +247,52 @@ where Err(_) => panic!("Merkle tree data invalid"), } } + +#[derive(Debug)] +pub struct LightValidatorConfig { + pub enable_indexer: bool, + pub prover_config: Option, + pub wait_time: u64, +} + +impl Default for LightValidatorConfig { + fn default() -> Self { + Self { + enable_indexer: false, + prover_config: None, + wait_time: 35, + } + } +} + +pub async fn spawn_validator(config: LightValidatorConfig) { + if let Some(project_root) = get_project_root() { + let path = "cli/test_bin/run test-validator"; + let mut path = format!("{}/{}", project_root.trim(), path); + println!("Starting validator..."); + println!("Config: {:?}", config); + if !config.enable_indexer { + path.push_str(" --skip-indexer"); + } + if let Some(prover_config) = config.prover_config { + prover_config.circuits.iter().for_each(|circuit| { + path.push_str(&format!(" --circuit {}", circuit)); + }); + if let Some(prover_mode) = prover_config.run_mode { + path.push_str(&format!(" --prover-run-mode {}", prover_mode)); + } + } else { + path.push_str(" --skip-prover"); + } + + println!("Starting validator with command: {}", path); + + Command::new("sh") + .arg("-c") + .arg(path) + .spawn() + .expect("Failed to start server process"); + tokio::time::sleep(tokio::time::Duration::from_secs(config.wait_time)).await; + println!("Validator started successfully"); + } +} diff --git a/circuit-lib/light-prover-client/tests/gnark.rs b/circuit-lib/light-prover-client/tests/gnark.rs index c5b00d9ab4..7cdfbd8304 100644 --- a/circuit-lib/light-prover-client/tests/gnark.rs +++ b/circuit-lib/light-prover-client/tests/gnark.rs @@ -4,7 +4,7 @@ use light_prover_client::batch_append::{calculate_hash_chain, get_batch_append_i use light_prover_client::batch_update::get_batch_update_inputs; use light_prover_client::gnark::batch_append_json_formatter::append_inputs_string; use light_prover_client::gnark::batch_update_json_formatter::update_inputs_string; -use light_prover_client::gnark::helpers::{spawn_prover, ProofType}; +use light_prover_client::gnark::helpers::{spawn_prover, ProofType, ProverConfig}; use light_prover_client::{ gnark::{ constants::{PROVE_PATH, SERVER_ADDRESS}, @@ -19,7 +19,14 @@ use reqwest::Client; #[ignore] async fn prove_inclusion_full() { init_logger(); - spawn_prover(false, &[ProofType::Inclusion]).await; + spawn_prover( + false, + ProverConfig { + run_mode: None, + circuits: vec![ProofType::Inclusion, { ProofType::BatchUpdateTest }], + }, + ) + .await; let client = Client::new(); for number_of_utxos in &[1, 2, 3, 4, 8] { let (inputs, _) = inclusion_inputs_string(*number_of_utxos as usize); @@ -35,15 +42,18 @@ async fn prove_inclusion_full() { } #[tokio::test] -async fn test_inclusion_batch_append_batch_update() { +async fn prove_inclusion() { init_logger(); spawn_prover( - false, - &[ - ProofType::Inclusion, - ProofType::BatchUpdate, - ProofType::BatchAppend, - ], + true, + ProverConfig { + run_mode: None, + circuits: vec![ + ProofType::Inclusion, + ProofType::BatchUpdateTest, + ProofType::BatchAppendTest, + ], + }, ) .await; let client = Client::new(); @@ -57,8 +67,15 @@ async fn test_inclusion_batch_append_batch_update() { .expect("Failed to execute request."); assert!(response_result.status().is_success()); - spawn_prover(false, &[ProofType::BatchUpdate]).await; - const HEIGHT: usize = 10; + spawn_prover( + false, + ProverConfig { + run_mode: None, + circuits: vec![ProofType::BatchUpdate], + }, + ) + .await; + const HEIGHT: usize = 26; const CANOPY: usize = 0; let num_insertions = 10; @@ -93,7 +110,6 @@ async fn test_inclusion_batch_append_batch_update() { ); let client = Client::new(); let inputs = update_inputs_string(&inputs); - let response_result = client .post(&format!("{}{}", SERVER_ADDRESS, PROVE_PATH)) .header("Content-Type", "text/plain; charset=utf-8") @@ -101,7 +117,8 @@ async fn test_inclusion_batch_append_batch_update() { .send() .await .expect("Failed to execute request."); - assert!(response_result.status().is_success()); + let status = response_result.status(); + assert!(status.is_success()); let num_insertions = 10; diff --git a/circuit-lib/verifier/tests/test.rs b/circuit-lib/verifier/tests/test.rs index d206590b23..5b19ab8a16 100644 --- a/circuit-lib/verifier/tests/test.rs +++ b/circuit-lib/verifier/tests/test.rs @@ -1,9 +1,10 @@ #[cfg(test)] mod test { + use light_prover_client::gnark::helpers::{ProofType, ProverConfig}; use light_prover_client::{ gnark::{ constants::{PROVE_PATH, SERVER_ADDRESS}, - helpers::{kill_prover, spawn_prover, ProofType}, + helpers::{kill_prover, spawn_prover}, inclusion_json_formatter::inclusion_inputs_string, proof_helpers::{compress_proof, deserialize_gnark_proof_json, proof_from_json_struct}, }, @@ -15,7 +16,14 @@ mod test { #[tokio::test] async fn prove_inclusion() { init_logger(); - spawn_prover(false, &[ProofType::Inclusion]).await; + spawn_prover( + false, + ProverConfig { + run_mode: None, + circuits: vec![ProofType::Inclusion], + }, + ) + .await; let client = Client::new(); for number_of_compressed_accounts in &[1usize, 2, 3] { let (inputs, big_int_inputs) = inclusion_inputs_string(*number_of_compressed_accounts); @@ -57,7 +65,14 @@ mod test { #[ignore] async fn prove_inclusion_full() { init_logger(); - spawn_prover(false, &[ProofType::Inclusion]).await; + spawn_prover( + false, + ProverConfig { + run_mode: None, + circuits: vec![ProofType::Inclusion], + }, + ) + .await; let client = Client::new(); for number_of_compressed_accounts in &[1usize, 2, 3, 4, 8] { let (inputs, big_int_inputs) = inclusion_inputs_string(*number_of_compressed_accounts); diff --git a/cli/src/commands/start-prover/index.ts b/cli/src/commands/start-prover/index.ts index 17178b4a89..d4e4cc2ac4 100644 --- a/cli/src/commands/start-prover/index.ts +++ b/cli/src/commands/start-prover/index.ts @@ -9,25 +9,30 @@ class StartProver extends Command { } static flags = { - "skip-prove-compressed-accounts": Flags.boolean({ - description: "Skip proving of compressed accounts.", - default: false, - char: "c", - }), - "skip-prove-new-addresses": Flags.boolean({ - description: "Skip proving of new addresses.", - default: false, - char: "n", - }), "prover-port": Flags.integer({ description: "Enable Light Prover server on this port.", required: false, default: 3001, }), "run-mode": Flags.string({ - description: "Specify the running mode (test or full)", - options: ["test", "full"], - default: "full", + description: + "Specify the running mode (forester, forester-test, rpc, full, or full-test)", + options: ["rpc", "forester", "forester-test", "full", "full-test"], + required: false, + }), + circuit: Flags.string({ + description: "Specify individual circuits to enable.", + options: [ + "inclusion", + "non-inclusion", + "combined", + "append", + "update", + "append-test", + "update-test", + ], + multiple: true, + required: false, }), }; @@ -36,11 +41,15 @@ class StartProver extends Command { const loader = new CustomLoader("Performing setup tasks...\n"); loader.start(); + if (!flags["run-mode"] && !flags["circuit"]) { + this.log("Please specify --run-mode or --circuit."); + return; + } + await startProver( flags["prover-port"], - !flags["skip-prove-compressed-accounts"], - !flags["skip-prove-new-addresses"], flags["run-mode"], + flags["circuit"], ); this.log("\nSetup tasks completed successfully \x1b[32m✔\x1b[0m"); } diff --git a/cli/src/commands/test-validator/index.ts b/cli/src/commands/test-validator/index.ts index d17ee6408d..018def31b6 100644 --- a/cli/src/commands/test-validator/index.ts +++ b/cli/src/commands/test-validator/index.ts @@ -24,16 +24,6 @@ class SetupCommand extends Command { "Runs a test validator without initialized light system accounts.", default: false, }), - "prove-compressed-accounts": Flags.boolean({ - description: "Enable proving of compressed accounts.", - default: true, - exclusive: ["skip-prover"], - }), - "prove-new-addresses": Flags.boolean({ - description: "Enable proving of new addresses.", - default: true, - exclusive: ["skip-prover"], - }), "relax-indexer-version-constraint": Flags.boolean({ description: "Disables indexer version check. Only use if you know what you are doing.", @@ -65,9 +55,31 @@ class SetupCommand extends Command { exclusive: ["skip-prover"], }), "prover-run-mode": Flags.string({ - description: "Specify the running mode for the prover (test or full)", - options: ["test", "full"] as const, - default: "full", + description: + "Specify the running mode for the prover (forester, forester-test, rpc, or full)", + options: [ + "rpc", + "forester", + "forester-test", + "full", + "full-test", + ] as const, + required: false, + exclusive: ["skip-prover"], + }), + circuit: Flags.string({ + description: "Specify individual circuits to enable.", + options: [ + "inclusion", + "non-inclusion", + "combined", + "append", + "update", + "append-test", + "update-test", + ], + multiple: true, + required: false, exclusive: ["skip-prover"], }), "limit-ledger-size": Flags.integer({ @@ -94,6 +106,15 @@ class SetupCommand extends Command { const loader = new CustomLoader("Performing setup tasks...\n"); loader.start(); + if ( + !flags["skip-prover"] && + !flags["prover-run-mode"] && + !flags["circuit"] + ) { + this.log("Please specify --prover-run-mode or --circuit."); + return; + } + if (flags["stop"] === true) { await stopTestEnv({ indexer: !flags["skip-indexer"], @@ -110,11 +131,18 @@ class SetupCommand extends Command { gossipHost: flags["gossip-host"], indexerPort: flags["indexer-port"], proverPort: flags["prover-port"], - proveCompressedAccounts: flags["prove-compressed-accounts"], - proveNewAddresses: flags["prove-new-addresses"], prover: !flags["skip-prover"], skipSystemAccounts: flags["skip-system-accounts"], - proverRunMode: flags["prover-run-mode"] as "test" | "full" | undefined, + proverRunMode: flags["prover-run-mode"] as + | "inclusion" + | "non-inclusion" + | "forester" + | "forester-test" + | "rpc" + | "full" + | "full-test" + | undefined, + circuits: flags["circuit"], }); this.log("\nSetup tasks completed successfully \x1b[32m✔\x1b[0m"); } diff --git a/cli/src/utils/initTestEnv.ts b/cli/src/utils/initTestEnv.ts index a847f60cbe..f939df9206 100644 --- a/cli/src/utils/initTestEnv.ts +++ b/cli/src/utils/initTestEnv.ts @@ -60,12 +60,11 @@ export async function initTestEnv({ indexerPort = 8784, proverPort = 3001, gossipHost = "127.0.0.1", - proveCompressedAccounts = true, - proveNewAddresses = false, checkPhotonVersion = true, photonDatabaseUrl, limitLedgerSize, - proverRunMode = "test", + proverRunMode, + circuits, }: { additionalPrograms?: { address: string; path: string }[]; skipSystemAccounts?: boolean; @@ -75,12 +74,18 @@ export async function initTestEnv({ indexerPort?: number; proverPort?: number; gossipHost?: string; - proveCompressedAccounts?: boolean; - proveNewAddresses?: boolean; checkPhotonVersion?: boolean; photonDatabaseUrl?: string; limitLedgerSize?: number; - proverRunMode?: "test" | "full"; + proverRunMode?: + | "inclusion" + | "non-inclusion" + | "forester" + | "forester-test" + | "rpc" + | "full" + | "full-test"; + circuits?: string[]; }) { const initAccounts = async () => { const anchorProvider = await setAnchorProvider(); @@ -119,12 +124,7 @@ export async function initTestEnv({ const config = getConfig(); config.proverUrl = `http://127.0.0.1:${proverPort}`; setConfig(config); - await startProver( - proverPort, - proveCompressedAccounts, - proveNewAddresses, - proverRunMode, - ); + await startProver(proverPort, proverRunMode, circuits); } } diff --git a/cli/src/utils/process.ts b/cli/src/utils/process.ts index db34d30cec..22490f2313 100644 --- a/cli/src/utils/process.ts +++ b/cli/src/utils/process.ts @@ -15,7 +15,6 @@ export async function killProcess(processName: string) { ); for (const proc of targetProcesses) { - console.log(`Killing process with PID and name: ${proc.pid} ${proc.name}`); try { process.kill(proc.pid, "SIGKILL"); } catch (error) { diff --git a/cli/src/utils/processProverServer.ts b/cli/src/utils/processProverServer.ts index 2e0f703797..d4b55dc04b 100644 --- a/cli/src/utils/processProverServer.ts +++ b/cli/src/utils/processProverServer.ts @@ -12,37 +12,46 @@ const KEYS_DIR = "proving-keys/"; export async function killProver() { await killProcess(getProverNameByArch()); - // Temporary fix for the case when prover is instantiated via prover.sh: await killProcess(LIGHT_PROVER_PROCESS_NAME); } export async function startProver( proverPort: number, - proveCompressedAccounts: boolean, - proveNewAddresses: boolean, - runMode: string = "full", + runMode: string | undefined, + circuits: string[] | undefined = [], ) { - if (!proveCompressedAccounts && !proveNewAddresses) { - console.log( - "No flags provided. Please provide at least one flag to start the prover.", - ); - process.exit(1); - } console.log("Kill existing prover process..."); await killProver(); await killProcessByPort(proverPort); const keysDir = path.join(__dirname, "../..", "bin", KEYS_DIR); const args = ["start"]; - args.push(`--inclusion=${proveCompressedAccounts ? "true" : "false"}`); - args.push(`--non-inclusion=${proveNewAddresses ? "true" : "false"}`); args.push("--keys-dir", keysDir); args.push("--prover-address", `0.0.0.0:${proverPort}`); - args.push("--run-mode", runMode); - console.log(`Starting prover in ${runMode} mode...`); + if (runMode != null) { + args.push("--run-mode", runMode); + } + + for (const circuit of circuits) { + console.log(`Adding circuit: ${circuit}`); + args.push("--circuit", circuit); + } + + if (runMode != null) { + console.log(`Starting prover in ${runMode} mode...`); + } else if (circuits && circuits.length > 0) { + console.log(`Starting prover with circuits: ${circuits.join(", ")}...`); + } + + if ((!circuits || circuits.length === 0) && runMode == null) { + runMode = "rpc"; + args.push("--run-mode", runMode); + console.log(`Starting prover with fallback ${runMode} mode...`); + } + spawnBinary(getProverPathByArch(), args); await waitForServers([{ port: proverPort, path: "/" }]); - console.log(`Prover started successfully in ${runMode} mode!`); + console.log(`Prover started successfully!`); } export function getProverNameByArch(): string { diff --git a/client/src/indexer/test_indexer.rs b/client/src/indexer/test_indexer.rs index 2ebafc02ec..3ddd8a3da2 100644 --- a/client/src/indexer/test_indexer.rs +++ b/client/src/indexer/test_indexer.rs @@ -1,9 +1,15 @@ -use std::{marker::PhantomData, thread::sleep, time::Duration}; +use std::{marker::PhantomData, time::Duration}; +use crate::{ + indexer::Indexer, + rpc::{merkle_tree::MerkleTreeExt, RpcConnection}, + transaction_params::FeeConfig, +}; use borsh::BorshDeserialize; use light_hasher::Poseidon; use light_indexed_merkle_tree::{array::IndexedArray, reference::IndexedMerkleTree}; use light_merkle_tree_reference::MerkleTree; +use light_prover_client::gnark::helpers::{spawn_prover, ProofType, ProverConfig}; use light_prover_client::{ gnark::{ combined_json_formatter::CombinedJsonStruct, @@ -34,12 +40,6 @@ use num_traits::FromBytes; use reqwest::Client; use solana_sdk::pubkey::Pubkey; -use crate::{ - indexer::Indexer, - rpc::{merkle_tree::MerkleTreeExt, RpcConnection}, - transaction_params::FeeConfig, -}; - use super::{ AddressMerkleTreeAccounts, AddressMerkleTreeBundle, StateMerkleTreeAccounts, StateMerkleTreeBundle, @@ -349,34 +349,20 @@ where .map(|accounts| Self::add_address_merkle_tree_bundle(accounts)) .collect::>(); - let mut types = vec!["start-prover"]; - if !inclusion { - types.push("-c"); - } - if !non_inclusion { - types.push("-n"); - } + let mut prover_config = ProverConfig { + circuits: vec![], + run_mode: None, + }; - #[cfg(feature = "devenv")] - { - let project_root = light_prover_client::gnark::helpers::get_project_root().unwrap(); - let project_root = project_root.trim_end_matches('\n').to_string(); - let cli_bin_path = format!("{}/cli/test_bin", project_root); - std::process::Command::new("./run") - .args(types.as_slice()) - .current_dir(cli_bin_path) - .spawn() - .expect("Failed to start prover"); - sleep(Duration::from_secs(1)); + if inclusion { + prover_config.circuits.push(ProofType::Inclusion); } - #[cfg(not(feature = "devenv"))] - { - std::process::Command::new("light") - .args(types.as_slice()) - .spawn() - .expect("Failed to start prover"); - sleep(Duration::from_secs(1)); + if non_inclusion { + prover_config.circuits.push(ProofType::NonInclusion); } + + spawn_prover(true, prover_config).await; + health_check(20, 1).await; Self { diff --git a/client/src/rpc/solana_rpc.rs b/client/src/rpc/solana_rpc.rs index 746c4acf03..fc703bd7b8 100644 --- a/client/src/rpc/solana_rpc.rs +++ b/client/src/rpc/solana_rpc.rs @@ -54,8 +54,8 @@ pub struct RetryConfig { impl Default for RetryConfig { fn default() -> Self { RetryConfig { - max_retries: 10, - retry_delay: Duration::from_millis(100), + max_retries: 20, + retry_delay: Duration::from_secs(1), timeout: Duration::from_secs(60), } } diff --git a/examples/token-escrow/programs/token-escrow/tests/test.rs b/examples/token-escrow/programs/token-escrow/tests/test.rs index 150e984fd6..9ccd2f23d5 100644 --- a/examples/token-escrow/programs/token-escrow/tests/test.rs +++ b/examples/token-escrow/programs/token-escrow/tests/test.rs @@ -11,6 +11,7 @@ // release compressed tokens use light_hasher::Poseidon; +use light_prover_client::gnark::helpers::{ProofType, ProverConfig}; use light_system_program::sdk::{compressed_account::MerkleContext, event::PublicTransactionEvent}; use light_test_utils::indexer::TestIndexer; use light_test_utils::spl::{create_mint_helper, mint_tokens_helper}; @@ -50,7 +51,14 @@ async fn test_escrow_pda() { let payer = rpc.get_payer().insecure_clone(); let payer_pubkey = payer.pubkey(); let merkle_tree_pubkey = env.merkle_tree_pubkey; - let test_indexer = TestIndexer::init_from_env(&payer, &env, true, false); + let test_indexer = TestIndexer::init_from_env( + &payer, + &env, + Some(ProverConfig { + run_mode: None, + circuits: vec![ProofType::Inclusion], + }), + ); let mint = create_mint_helper(&mut rpc, &payer).await; let mut test_indexer = test_indexer.await; diff --git a/examples/token-escrow/programs/token-escrow/tests/test_compressed_pda.rs b/examples/token-escrow/programs/token-escrow/tests/test_compressed_pda.rs index 146578f17a..7694b9f6a4 100644 --- a/examples/token-escrow/programs/token-escrow/tests/test_compressed_pda.rs +++ b/examples/token-escrow/programs/token-escrow/tests/test_compressed_pda.rs @@ -15,6 +15,7 @@ use anchor_lang::AnchorDeserialize; use light_hasher::{Hasher, Poseidon}; +use light_prover_client::gnark::helpers::{ProverConfig, ProverMode}; use light_system_program::sdk::address::derive_address; use light_system_program::sdk::compressed_account::MerkleContext; use light_system_program::sdk::event::PublicTransactionEvent; @@ -41,7 +42,14 @@ async fn test_escrow_with_compressed_pda() { .await; let payer = rpc.get_payer().insecure_clone(); - let test_indexer = TestIndexer::init_from_env(&payer, &env, true, true); + let test_indexer = TestIndexer::init_from_env( + &payer, + &env, + Some(ProverConfig { + run_mode: Some(ProverMode::Rpc), + circuits: vec![], + }), + ); let mint = create_mint_helper(&mut rpc, &payer).await; let mut test_indexer = test_indexer.await; diff --git a/forester/src/utils.rs b/forester/src/utils.rs index faa0ffd24c..802469a8cc 100644 --- a/forester/src/utils.rs +++ b/forester/src/utils.rs @@ -1,68 +1,9 @@ use light_client::rpc::RpcConnection; use light_registry::protocol_config::state::{ProtocolConfig, ProtocolConfigPda}; use light_registry::utils::get_protocol_config_pda_address; -use std::process::Command; use std::time::{SystemTime, UNIX_EPOCH}; -use sysinfo::{Signal, System}; use tracing::debug; -#[derive(Debug)] -pub struct LightValidatorConfig { - pub path: String, - pub enable_indexer: bool, - pub enable_prover: bool, - pub wait_time: u64, -} - -impl Default for LightValidatorConfig { - fn default() -> Self { - Self { - path: "../cli/test_bin/run test-validator".to_string(), - enable_indexer: false, - enable_prover: false, - wait_time: 35, - } - } -} - -pub async fn spawn_validator(config: LightValidatorConfig) { - debug!("Starting validator..."); - debug!("Config: {:?}", config); - let mut path = config.path.clone(); - if !config.enable_indexer { - path.push_str(" --skip-indexer"); - } - if !config.enable_prover { - path.push_str(" --skip-prover"); - } - debug!("Starting validator with command: {}", path); - - Command::new("sh") - .arg("-c") - .arg(path) - .spawn() - .expect("Failed to start server process"); - tokio::time::sleep(tokio::time::Duration::from_secs(config.wait_time)).await; - debug!("Validator started successfully"); -} - -pub async fn restart_photon() { - kill_photon(); - Command::new("photon") - .spawn() - .expect("Failed to start server process"); -} - -pub fn kill_photon() { - let mut system = System::new_all(); - system.refresh_all(); - - for process in system.processes().values() { - if process.name() == "photon" { - process.kill_with(Signal::Term); - } - } -} pub fn decode_hash(account: &str) -> [u8; 32] { let bytes = bs58::decode(account).into_vec().unwrap(); let mut arr = [0u8; 32]; @@ -70,13 +11,6 @@ pub fn decode_hash(account: &str) -> [u8; 32] { arr } -pub fn u8_arr_to_hex_string(arr: &[u8]) -> String { - arr.iter() - .map(|b| format!("{:02x}", b)) - .collect::>() - .join("") -} - pub async fn get_protocol_config(rpc: &mut R) -> ProtocolConfig { let authority_pda = get_protocol_config_pda_address(); let protocol_config_account = rpc diff --git a/forester/tests/e2e_test.rs b/forester/tests/e2e_test.rs index 343dac9ed4..18030a9d0b 100644 --- a/forester/tests/e2e_test.rs +++ b/forester/tests/e2e_test.rs @@ -2,12 +2,13 @@ use account_compression::utils::constants::{ADDRESS_QUEUE_VALUES, STATE_NULLIFIE use account_compression::AddressMerkleTreeAccount; use forester::queue_helpers::fetch_queue_item_data; use forester::run_pipeline; -use forester::utils::{get_protocol_config, LightValidatorConfig}; +use forester::utils::get_protocol_config; use forester_utils::indexer::{AddressMerkleTreeAccounts, StateMerkleTreeAccounts}; use forester_utils::registry::register_test_forester; use light_client::rpc::solana_rpc::SolanaRpcUrl; use light_client::rpc::{RpcConnection, RpcError, SolanaRpcConnection}; use light_client::rpc_pool::SolanaRpcPool; +use light_prover_client::gnark::helpers::{LightValidatorConfig, ProverConfig, ProverMode}; use light_registry::utils::{get_epoch_pda_address, get_forester_epoch_pda_from_authority}; use light_registry::{EpochPda, ForesterEpochPda}; use light_test_utils::e2e_test_env::E2ETestEnv; @@ -33,9 +34,11 @@ use test_utils::*; async fn test_epoch_monitor_with_test_indexer_and_1_forester() { init(Some(LightValidatorConfig { enable_indexer: false, - enable_prover: true, wait_time: 10, - ..LightValidatorConfig::default() + prover_config: Some(ProverConfig { + run_mode: Some(ProverMode::Forester), + circuits: vec![], + }), })) .await; @@ -98,7 +101,7 @@ async fn test_epoch_monitor_with_test_indexer_and_1_forester() { let config = Arc::new(config); let indexer: TestIndexer = - TestIndexer::init_from_env(&config.payer_keypair, &env_accounts, false, false).await; + TestIndexer::init_from_env(&config.payer_keypair, &env_accounts, None).await; let mut env = E2ETestEnv::>::new( rpc, @@ -263,9 +266,11 @@ pub async fn assert_queue_len( async fn test_epoch_monitor_with_2_foresters() { init(Some(LightValidatorConfig { enable_indexer: false, - enable_prover: true, - wait_time: 10, - ..LightValidatorConfig::default() + wait_time: 15, + prover_config: Some(ProverConfig { + run_mode: Some(ProverMode::Forester), + circuits: vec![], + }), })) .await; let forester_keypair1 = Keypair::new(); @@ -353,7 +358,7 @@ async fn test_epoch_monitor_with_2_foresters() { let config2 = Arc::new(config2); let indexer: TestIndexer = - TestIndexer::init_from_env(&config1.payer_keypair, &env_accounts, false, false).await; + TestIndexer::init_from_env(&config1.payer_keypair, &env_accounts, None).await; let mut env = E2ETestEnv::>::new( rpc, @@ -489,7 +494,7 @@ async fn test_epoch_monitor_with_2_foresters() { panic!("Test timed out after {:?}", TIMEOUT_DURATION); } - assert_trees_are_rollledover( + assert_trees_are_rolledover( &pool, &state_tree_with_rollover_threshold_0, &address_tree_with_rollover_threshold_0, @@ -529,7 +534,7 @@ async fn test_epoch_monitor_with_2_foresters() { service_handle1.await.unwrap().unwrap(); service_handle2.await.unwrap().unwrap(); } -pub async fn assert_trees_are_rollledover( +pub async fn assert_trees_are_rolledover( pool: &SolanaRpcPool, state_tree_with_rollover_threshold_0: &Pubkey, address_tree_with_rollover_threshold_0: &Pubkey, @@ -598,9 +603,11 @@ async fn test_epoch_double_registration() { println!("*****************************************************************"); init(Some(LightValidatorConfig { enable_indexer: false, - enable_prover: true, wait_time: 10, - ..LightValidatorConfig::default() + prover_config: Some(ProverConfig { + run_mode: Some(ProverMode::Forester), + circuits: vec![], + }), })) .await; @@ -663,7 +670,7 @@ async fn test_epoch_double_registration() { let config = Arc::new(config); let indexer: TestIndexer = - TestIndexer::init_from_env(&config.payer_keypair, &env_accounts, false, false).await; + TestIndexer::init_from_env(&config.payer_keypair, &env_accounts, None).await; let indexer = Arc::new(Mutex::new(indexer)); @@ -694,7 +701,7 @@ async fn test_epoch_double_registration() { let current_epoch = protocol_config.get_current_epoch(solana_slot); let forester_epoch_pda_address = - get_forester_epoch_pda_from_authority(&&config.derivation_pubkey, current_epoch).0; + get_forester_epoch_pda_from_authority(&config.derivation_pubkey, current_epoch).0; let forester_epoch_pda = rpc .get_anchor_account::(&forester_epoch_pda_address) diff --git a/forester/tests/test_utils.rs b/forester/tests/test_utils.rs index d2543bb20a..0a82fa35fc 100644 --- a/forester/tests/test_utils.rs +++ b/forester/tests/test_utils.rs @@ -3,10 +3,10 @@ use forester::config::{ExternalServicesConfig, GeneralConfig}; use forester::metrics::register_metrics; use forester::photon_indexer::PhotonIndexer; use forester::telemetry::setup_telemetry; -use forester::utils::{spawn_validator, LightValidatorConfig}; use forester::ForesterConfig; use forester_utils::indexer::{Indexer, IndexerError, NewAddressProofWithContext}; use light_client::rpc::{RpcConnection, SolanaRpcConnection}; +use light_prover_client::gnark::helpers::{spawn_validator, LightValidatorConfig}; use light_test_utils::e2e_test_env::{GeneralActionConfig, KeypairActionConfig, User}; use light_test_utils::indexer::TestIndexer; use light_test_utils::test_env::get_test_env_accounts; @@ -22,16 +22,8 @@ pub async fn init(config: Option) { #[allow(dead_code)] pub async fn spawn_test_validator(config: Option) { - if let Some(config) = config { - spawn_validator(config).await; - } else { - let config = LightValidatorConfig { - enable_indexer: false, - enable_prover: false, - ..LightValidatorConfig::default() - }; - spawn_validator(config).await; - } + let config = config.unwrap_or_default(); + spawn_validator(config).await; } #[allow(dead_code)] diff --git a/js/compressed-token/package.json b/js/compressed-token/package.json index cff51421f4..a537df4ce3 100644 --- a/js/compressed-token/package.json +++ b/js/compressed-token/package.json @@ -26,7 +26,7 @@ "test-all": "vitest run", "test:unit:all": "EXCLUDE_E2E=true vitest run", "test-all:verbose": "vitest run --reporter=verbose", - "test-validator": "./../../cli/test_bin/run test-validator --prover-run-mode test", + "test-validator": "./../../cli/test_bin/run test-validator --prover-run-mode rpc", "test:e2e:create-mint": "pnpm test-validator && vitest run tests/e2e/create-mint.test.ts", "test:e2e:create-token-pool": "pnpm test-validator && vitest run tests/e2e/create-token-pool.test.ts", "test:e2e:mint-to": "pnpm test-validator && vitest run tests/e2e/mint-to.test.ts --reporter=verbose", diff --git a/js/compressed-token/src/idl/light_compressed_token.ts b/js/compressed-token/src/idl/light_compressed_token.ts index 1aa2415b3f..c9fcbd8c9b 100644 --- a/js/compressed-token/src/idl/light_compressed_token.ts +++ b/js/compressed-token/src/idl/light_compressed_token.ts @@ -1549,23 +1549,128 @@ export type LightCompressedToken = { errors: [ { code: 6000; - name: 'SignerCheckFailed'; - msg: 'Signer check failed'; + name: 'PublicKeyAmountMissmatch'; + msg: 'public keys and amounts must be of same length'; }, { code: 6001; - name: 'CreateTransferInstructionFailed'; - msg: 'Create transfer instruction failed'; + name: 'ComputeInputSumFailed'; + msg: 'ComputeInputSumFailed'; }, { code: 6002; - name: 'AccountNotFound'; - msg: 'Account not found'; + name: 'ComputeOutputSumFailed'; + msg: 'ComputeOutputSumFailed'; }, { code: 6003; - name: 'SerializationError'; - msg: 'Serialization error'; + name: 'ComputeCompressSumFailed'; + msg: 'ComputeCompressSumFailed'; + }, + { + code: 6004; + name: 'ComputeDecompressSumFailed'; + msg: 'ComputeDecompressSumFailed'; + }, + { + code: 6005; + name: 'SumCheckFailed'; + msg: 'SumCheckFailed'; + }, + { + code: 6006; + name: 'DecompressRecipientUndefinedForDecompress'; + msg: 'DecompressRecipientUndefinedForDecompress'; + }, + { + code: 6007; + name: 'CompressedPdaUndefinedForDecompress'; + msg: 'CompressedPdaUndefinedForDecompress'; + }, + { + code: 6008; + name: 'DeCompressAmountUndefinedForDecompress'; + msg: 'DeCompressAmountUndefinedForDecompress'; + }, + { + code: 6009; + name: 'CompressedPdaUndefinedForCompress'; + msg: 'CompressedPdaUndefinedForCompress'; + }, + { + code: 6010; + name: 'DeCompressAmountUndefinedForCompress'; + msg: 'DeCompressAmountUndefinedForCompress'; + }, + { + code: 6011; + name: 'DelegateSignerCheckFailed'; + msg: 'DelegateSignerCheckFailed'; + }, + { + code: 6012; + name: 'MintTooLarge'; + msg: 'Minted amount greater than u64::MAX'; + }, + { + code: 6013; + name: 'SplTokenSupplyMismatch'; + msg: 'SplTokenSupplyMismatch'; + }, + { + code: 6014; + name: 'HeapMemoryCheckFailed'; + msg: 'HeapMemoryCheckFailed'; + }, + { + code: 6015; + name: 'InstructionNotCallable'; + msg: 'The instruction is not callable'; + }, + { + code: 6016; + name: 'ArithmeticUnderflow'; + msg: 'ArithmeticUnderflow'; + }, + { + code: 6017; + name: 'HashToFieldError'; + msg: 'HashToFieldError'; + }, + { + code: 6018; + name: 'InvalidAuthorityMint'; + msg: 'Expected the authority to be also a mint authority'; + }, + { + code: 6019; + name: 'InvalidFreezeAuthority'; + msg: 'Provided authority is not the freeze authority'; + }, + { + code: 6020; + name: 'InvalidDelegateIndex'; + }, + { + code: 6021; + name: 'TokenPoolPdaUndefined'; + }, + { + code: 6022; + name: 'IsTokenPoolPda'; + msg: 'Compress or decompress recipient is the same account as the token pool pda.'; + }, + { + code: 6023; + name: 'InvalidTokenPoolPda'; + }, + { + code: 6024; + name: 'NoInputTokenAccountsProvided'; + }, + { + code: 6025; + name: 'NoInputsProvided'; }, ]; }; @@ -3125,23 +3230,128 @@ export const IDL: LightCompressedToken = { errors: [ { code: 6000, - name: 'SignerCheckFailed', - msg: 'Signer check failed', + name: 'PublicKeyAmountMissmatch', + msg: 'public keys and amounts must be of same length', }, { code: 6001, - name: 'CreateTransferInstructionFailed', - msg: 'Create transfer instruction failed', + name: 'ComputeInputSumFailed', + msg: 'ComputeInputSumFailed', }, { code: 6002, - name: 'AccountNotFound', - msg: 'Account not found', + name: 'ComputeOutputSumFailed', + msg: 'ComputeOutputSumFailed', }, { code: 6003, - name: 'SerializationError', - msg: 'Serialization error', + name: 'ComputeCompressSumFailed', + msg: 'ComputeCompressSumFailed', + }, + { + code: 6004, + name: 'ComputeDecompressSumFailed', + msg: 'ComputeDecompressSumFailed', + }, + { + code: 6005, + name: 'SumCheckFailed', + msg: 'SumCheckFailed', + }, + { + code: 6006, + name: 'DecompressRecipientUndefinedForDecompress', + msg: 'DecompressRecipientUndefinedForDecompress', + }, + { + code: 6007, + name: 'CompressedPdaUndefinedForDecompress', + msg: 'CompressedPdaUndefinedForDecompress', + }, + { + code: 6008, + name: 'DeCompressAmountUndefinedForDecompress', + msg: 'DeCompressAmountUndefinedForDecompress', + }, + { + code: 6009, + name: 'CompressedPdaUndefinedForCompress', + msg: 'CompressedPdaUndefinedForCompress', + }, + { + code: 6010, + name: 'DeCompressAmountUndefinedForCompress', + msg: 'DeCompressAmountUndefinedForCompress', + }, + { + code: 6011, + name: 'DelegateSignerCheckFailed', + msg: 'DelegateSignerCheckFailed', + }, + { + code: 6012, + name: 'MintTooLarge', + msg: 'Minted amount greater than u64::MAX', + }, + { + code: 6013, + name: 'SplTokenSupplyMismatch', + msg: 'SplTokenSupplyMismatch', + }, + { + code: 6014, + name: 'HeapMemoryCheckFailed', + msg: 'HeapMemoryCheckFailed', + }, + { + code: 6015, + name: 'InstructionNotCallable', + msg: 'The instruction is not callable', + }, + { + code: 6016, + name: 'ArithmeticUnderflow', + msg: 'ArithmeticUnderflow', + }, + { + code: 6017, + name: 'HashToFieldError', + msg: 'HashToFieldError', + }, + { + code: 6018, + name: 'InvalidAuthorityMint', + msg: 'Expected the authority to be also a mint authority', + }, + { + code: 6019, + name: 'InvalidFreezeAuthority', + msg: 'Provided authority is not the freeze authority', + }, + { + code: 6020, + name: 'InvalidDelegateIndex', + }, + { + code: 6021, + name: 'TokenPoolPdaUndefined', + }, + { + code: 6022, + name: 'IsTokenPoolPda', + msg: 'Compress or decompress recipient is the same account as the token pool pda.', + }, + { + code: 6023, + name: 'InvalidTokenPoolPda', + }, + { + code: 6024, + name: 'NoInputTokenAccountsProvided', + }, + { + code: 6025, + name: 'NoInputsProvided', }, ], }; diff --git a/js/stateless.js/package.json b/js/stateless.js/package.json index 9599fd569a..155ab8f6e7 100644 --- a/js/stateless.js/package.json +++ b/js/stateless.js/package.json @@ -78,7 +78,7 @@ "test": "pnpm test:unit:all && pnpm test:e2e:all", "test-all": "vitest run", "test:unit:all": "EXCLUDE_E2E=true vitest run", - "test-validator": "./../../cli/test_bin/run test-validator --prover-run-mode test", + "test-validator": "./../../cli/test_bin/run test-validator --prover-run-mode rpc", "test:e2e:transfer": "pnpm test-validator && vitest run tests/e2e/transfer.test.ts --reporter=verbose", "test:e2e:compress": "pnpm test-validator && vitest run tests/e2e/compress.test.ts --reporter=verbose", "test:e2e:test-rpc": "pnpm test-validator && vitest run tests/e2e/test-rpc.test.ts", diff --git a/js/stateless.js/src/idls/light_compressed_token.ts b/js/stateless.js/src/idls/light_compressed_token.ts index 1aa2415b3f..c9fcbd8c9b 100644 --- a/js/stateless.js/src/idls/light_compressed_token.ts +++ b/js/stateless.js/src/idls/light_compressed_token.ts @@ -1549,23 +1549,128 @@ export type LightCompressedToken = { errors: [ { code: 6000; - name: 'SignerCheckFailed'; - msg: 'Signer check failed'; + name: 'PublicKeyAmountMissmatch'; + msg: 'public keys and amounts must be of same length'; }, { code: 6001; - name: 'CreateTransferInstructionFailed'; - msg: 'Create transfer instruction failed'; + name: 'ComputeInputSumFailed'; + msg: 'ComputeInputSumFailed'; }, { code: 6002; - name: 'AccountNotFound'; - msg: 'Account not found'; + name: 'ComputeOutputSumFailed'; + msg: 'ComputeOutputSumFailed'; }, { code: 6003; - name: 'SerializationError'; - msg: 'Serialization error'; + name: 'ComputeCompressSumFailed'; + msg: 'ComputeCompressSumFailed'; + }, + { + code: 6004; + name: 'ComputeDecompressSumFailed'; + msg: 'ComputeDecompressSumFailed'; + }, + { + code: 6005; + name: 'SumCheckFailed'; + msg: 'SumCheckFailed'; + }, + { + code: 6006; + name: 'DecompressRecipientUndefinedForDecompress'; + msg: 'DecompressRecipientUndefinedForDecompress'; + }, + { + code: 6007; + name: 'CompressedPdaUndefinedForDecompress'; + msg: 'CompressedPdaUndefinedForDecompress'; + }, + { + code: 6008; + name: 'DeCompressAmountUndefinedForDecompress'; + msg: 'DeCompressAmountUndefinedForDecompress'; + }, + { + code: 6009; + name: 'CompressedPdaUndefinedForCompress'; + msg: 'CompressedPdaUndefinedForCompress'; + }, + { + code: 6010; + name: 'DeCompressAmountUndefinedForCompress'; + msg: 'DeCompressAmountUndefinedForCompress'; + }, + { + code: 6011; + name: 'DelegateSignerCheckFailed'; + msg: 'DelegateSignerCheckFailed'; + }, + { + code: 6012; + name: 'MintTooLarge'; + msg: 'Minted amount greater than u64::MAX'; + }, + { + code: 6013; + name: 'SplTokenSupplyMismatch'; + msg: 'SplTokenSupplyMismatch'; + }, + { + code: 6014; + name: 'HeapMemoryCheckFailed'; + msg: 'HeapMemoryCheckFailed'; + }, + { + code: 6015; + name: 'InstructionNotCallable'; + msg: 'The instruction is not callable'; + }, + { + code: 6016; + name: 'ArithmeticUnderflow'; + msg: 'ArithmeticUnderflow'; + }, + { + code: 6017; + name: 'HashToFieldError'; + msg: 'HashToFieldError'; + }, + { + code: 6018; + name: 'InvalidAuthorityMint'; + msg: 'Expected the authority to be also a mint authority'; + }, + { + code: 6019; + name: 'InvalidFreezeAuthority'; + msg: 'Provided authority is not the freeze authority'; + }, + { + code: 6020; + name: 'InvalidDelegateIndex'; + }, + { + code: 6021; + name: 'TokenPoolPdaUndefined'; + }, + { + code: 6022; + name: 'IsTokenPoolPda'; + msg: 'Compress or decompress recipient is the same account as the token pool pda.'; + }, + { + code: 6023; + name: 'InvalidTokenPoolPda'; + }, + { + code: 6024; + name: 'NoInputTokenAccountsProvided'; + }, + { + code: 6025; + name: 'NoInputsProvided'; }, ]; }; @@ -3125,23 +3230,128 @@ export const IDL: LightCompressedToken = { errors: [ { code: 6000, - name: 'SignerCheckFailed', - msg: 'Signer check failed', + name: 'PublicKeyAmountMissmatch', + msg: 'public keys and amounts must be of same length', }, { code: 6001, - name: 'CreateTransferInstructionFailed', - msg: 'Create transfer instruction failed', + name: 'ComputeInputSumFailed', + msg: 'ComputeInputSumFailed', }, { code: 6002, - name: 'AccountNotFound', - msg: 'Account not found', + name: 'ComputeOutputSumFailed', + msg: 'ComputeOutputSumFailed', }, { code: 6003, - name: 'SerializationError', - msg: 'Serialization error', + name: 'ComputeCompressSumFailed', + msg: 'ComputeCompressSumFailed', + }, + { + code: 6004, + name: 'ComputeDecompressSumFailed', + msg: 'ComputeDecompressSumFailed', + }, + { + code: 6005, + name: 'SumCheckFailed', + msg: 'SumCheckFailed', + }, + { + code: 6006, + name: 'DecompressRecipientUndefinedForDecompress', + msg: 'DecompressRecipientUndefinedForDecompress', + }, + { + code: 6007, + name: 'CompressedPdaUndefinedForDecompress', + msg: 'CompressedPdaUndefinedForDecompress', + }, + { + code: 6008, + name: 'DeCompressAmountUndefinedForDecompress', + msg: 'DeCompressAmountUndefinedForDecompress', + }, + { + code: 6009, + name: 'CompressedPdaUndefinedForCompress', + msg: 'CompressedPdaUndefinedForCompress', + }, + { + code: 6010, + name: 'DeCompressAmountUndefinedForCompress', + msg: 'DeCompressAmountUndefinedForCompress', + }, + { + code: 6011, + name: 'DelegateSignerCheckFailed', + msg: 'DelegateSignerCheckFailed', + }, + { + code: 6012, + name: 'MintTooLarge', + msg: 'Minted amount greater than u64::MAX', + }, + { + code: 6013, + name: 'SplTokenSupplyMismatch', + msg: 'SplTokenSupplyMismatch', + }, + { + code: 6014, + name: 'HeapMemoryCheckFailed', + msg: 'HeapMemoryCheckFailed', + }, + { + code: 6015, + name: 'InstructionNotCallable', + msg: 'The instruction is not callable', + }, + { + code: 6016, + name: 'ArithmeticUnderflow', + msg: 'ArithmeticUnderflow', + }, + { + code: 6017, + name: 'HashToFieldError', + msg: 'HashToFieldError', + }, + { + code: 6018, + name: 'InvalidAuthorityMint', + msg: 'Expected the authority to be also a mint authority', + }, + { + code: 6019, + name: 'InvalidFreezeAuthority', + msg: 'Provided authority is not the freeze authority', + }, + { + code: 6020, + name: 'InvalidDelegateIndex', + }, + { + code: 6021, + name: 'TokenPoolPdaUndefined', + }, + { + code: 6022, + name: 'IsTokenPoolPda', + msg: 'Compress or decompress recipient is the same account as the token pool pda.', + }, + { + code: 6023, + name: 'InvalidTokenPoolPda', + }, + { + code: 6024, + name: 'NoInputTokenAccountsProvided', + }, + { + code: 6025, + name: 'NoInputsProvided', }, ], }; diff --git a/light-prover/integration_test.go b/light-prover/integration_test.go index 39d5058672..73616b6449 100644 --- a/light-prover/integration_test.go +++ b/light-prover/integration_test.go @@ -29,9 +29,12 @@ func proveEndpoint() string { func StartServer(isLightweight bool) { logging.Logger().Info().Msg("Setting up the prover") - var circuitTypes = []prover.CircuitType{prover.Inclusion, prover.NonInclusion, prover.Combined, prover.BatchAppend, prover.BatchUpdate} - var keys = prover.GetKeys("./proving-keys/", circuitTypes, isLightweight) - + var keys []string + if isLightweight { + keys = prover.GetKeys("./proving-keys/", prover.FullTest, []string{}) + } else { + keys = prover.GetKeys("./proving-keys/", prover.Full, []string{}) + } var pssv1 []*prover.ProvingSystemV1 var pssv2 []*prover.ProvingSystemV2 @@ -163,13 +166,13 @@ func runFullOnlyTests(t *testing.T) { func runLightweightOnlyTests(t *testing.T) { t.Run("testInclusionHappyPath26_1", testInclusionHappyPath26_1) - t.Run("testBatchAppendHappyPath10_10", testBatchAppendHappyPath10_10) - t.Run("testBatchAppendWithPreviousState10_10", testBatchAppendWithPreviousState10_10) + t.Run("testBatchAppendHappyPath26_10", testBatchAppendHappyPath26_10) + t.Run("testBatchAppendWithPreviousState26_10", testBatchAppendWithPreviousState26_10) - t.Run("testBatchUpdateHappyPath10_10", testBatchUpdateHappyPath10_10) - t.Run("testBatchUpdateWithPreviousState10_10", testBatchUpdateWithPreviousState10_10) - t.Run("testBatchUpdateWithSequentialFilling10_10", testBatchUpdateWithSequentialFilling10_10) - t.Run("testBatchUpdateInvalidInput10_10", testBatchUpdateInvalidInput10_10) + t.Run("testBatchUpdateHappyPath26_10", testBatchUpdateHappyPath26_10) + t.Run("testBatchUpdateWithPreviousState26_10", testBatchUpdateWithPreviousState26_10) + t.Run("testBatchUpdateWithSequentialFilling26_10", testBatchUpdateWithSequentialFilling26_10) + t.Run("testBatchUpdateInvalidInput26_10", testBatchUpdateInvalidInput26_10) } func testWrongMethod(t *testing.T) { @@ -500,8 +503,8 @@ func testBatchAppendHappyPath26_1000(t *testing.T) { } } -func testBatchAppendHappyPath10_10(t *testing.T) { - treeDepth := uint32(10) +func testBatchAppendHappyPath26_10(t *testing.T) { + treeDepth := uint32(26) batchSize := uint32(10) startIndex := uint32(0) params := prover.BuildAndUpdateBatchAppendParameters(treeDepth, batchSize, startIndex, nil) @@ -549,8 +552,8 @@ func testBatchAppendWithPreviousState26_100(t *testing.T) { } } -func testBatchAppendWithPreviousState10_10(t *testing.T) { - treeDepth := uint32(10) +func testBatchAppendWithPreviousState26_10(t *testing.T) { + treeDepth := uint32(26) batchSize := uint32(10) startIndex := uint32(0) @@ -578,8 +581,8 @@ func testBatchAppendWithPreviousState10_10(t *testing.T) { } } -func testBatchUpdateWithSequentialFilling10_10(t *testing.T) { - treeDepth := uint32(10) +func testBatchUpdateWithSequentialFilling26_10(t *testing.T) { + treeDepth := uint32(26) batchSize := uint32(10) startIndex := uint32(0) params := prover.BuildTestBatchUpdateTree(int(treeDepth), int(batchSize), nil, &startIndex) @@ -605,8 +608,8 @@ func testBatchUpdateWithSequentialFilling10_10(t *testing.T) { } } -func testBatchUpdateWithPreviousState10_10(t *testing.T) { - treeDepth := uint32(10) +func testBatchUpdateWithPreviousState26_10(t *testing.T) { + treeDepth := uint32(26) batchSize := uint32(10) // First batch @@ -637,8 +640,8 @@ func testBatchUpdateWithPreviousState10_10(t *testing.T) { } } -func testBatchUpdateInvalidInput10_10(t *testing.T) { - treeDepth := uint32(10) +func testBatchUpdateInvalidInput26_10(t *testing.T) { + treeDepth := uint32(26) batchSize := uint32(10) params := prover.BuildTestBatchUpdateTree(int(treeDepth), int(batchSize), nil, nil) @@ -662,8 +665,8 @@ func testBatchUpdateInvalidInput10_10(t *testing.T) { } } -func testBatchUpdateHappyPath10_10(t *testing.T) { - runBatchUpdateTest(t, 10, 10) +func testBatchUpdateHappyPath26_10(t *testing.T) { + runBatchUpdateTest(t, 26, 10) } func testBatchUpdateHappyPath26_100(t *testing.T) { diff --git a/light-prover/main.go b/light-prover/main.go index d1b9c8e196..dd431b45c4 100644 --- a/light-prover/main.go +++ b/light-prover/main.go @@ -3,21 +3,17 @@ package main import ( "bytes" _ "embed" - "encoding/hex" "encoding/json" "fmt" "io" "light/light-prover/logging" "light/light-prover/prover" "light/light-prover/server" - "math/big" "os" "os/signal" "path/filepath" - "strings" "github.com/consensys/gnark/constraint" - gnarkio "github.com/consensys/gnark/io" gnarkLogger "github.com/consensys/gnark/logger" "github.com/urfave/cli/v2" ) @@ -34,7 +30,7 @@ func runCli() { { Name: "setup", Flags: []cli.Flag{ - &cli.StringFlag{Name: "circuit", Usage: "Type of circuit (\"inclusion\" / \"non-inclusion\" / \"combined\" / \"append\" )", Required: true}, + &cli.StringFlag{Name: "circuit", Usage: "Type of circuit (\"inclusion\" / \"non-inclusion\" / \"combined\" / \"append\" \"update\" )", Required: true}, &cli.StringFlag{Name: "output", Usage: "Output file", Required: true}, &cli.StringFlag{Name: "output-vkey", Usage: "Output file", Required: true}, &cli.UintFlag{Name: "inclusion-tree-height", Usage: "[Inclusion]: Merkle tree height", Required: false}, @@ -42,13 +38,13 @@ func runCli() { &cli.UintFlag{Name: "non-inclusion-tree-height", Usage: "[Non-inclusion]: merkle tree height", Required: false}, &cli.UintFlag{Name: "non-inclusion-compressed-accounts", Usage: "[Non-inclusion]: number of compressed accounts", Required: false}, &cli.UintFlag{Name: "append-tree-height", Usage: "[Batch append]: tree height", Required: false}, - &cli.UintFlag{Name: "append-batch-size", Usage: "[Batch append]: barch size", Required: false}, + &cli.UintFlag{Name: "append-batch-size", Usage: "[Batch append]: batch size", Required: false}, &cli.UintFlag{Name: "update-tree-height", Usage: "[Batch update]: tree height", Required: false}, &cli.UintFlag{Name: "update-batch-size", Usage: "[Batch update]: batch size", Required: false}, }, Action: func(context *cli.Context) error { circuit := prover.CircuitType(context.String("circuit")) - if circuit != prover.Inclusion && circuit != prover.NonInclusion && circuit != prover.Combined && circuit != prover.BatchAppend && circuit != prover.BatchUpdate { + if circuit != prover.InclusionCircuitType && circuit != prover.NonInclusionCircuitType && circuit != prover.CombinedCircuitType && circuit != prover.BatchAppendCircuitType && circuit != prover.BatchUpdateCircuitType { return fmt.Errorf("invalid circuit type %s", circuit) } @@ -63,15 +59,15 @@ func runCli() { batchUpdateTreeHeight := uint32(context.Uint("update-tree-height")) batchUpdateBatchSize := uint32(context.Uint("update-batch-size")) - if (inclusionTreeHeight == 0 || inclusionNumberOfCompressedAccounts == 0) && circuit == prover.Inclusion { + if (inclusionTreeHeight == 0 || inclusionNumberOfCompressedAccounts == 0) && circuit == prover.InclusionCircuitType { return fmt.Errorf("inclusion tree height and number of compressed accounts must be provided") } - if (nonInclusionTreeHeight == 0 || nonInclusionNumberOfCompressedAccounts == 0) && circuit == prover.NonInclusion { + if (nonInclusionTreeHeight == 0 || nonInclusionNumberOfCompressedAccounts == 0) && circuit == prover.NonInclusionCircuitType { return fmt.Errorf("non-inclusion tree height and number of compressed accounts must be provided") } - if circuit == prover.Combined { + if circuit == prover.CombinedCircuitType { if inclusionTreeHeight == 0 || inclusionNumberOfCompressedAccounts == 0 { return fmt.Errorf("inclusion tree height and number of compressed accounts must be provided") } @@ -80,36 +76,36 @@ func runCli() { } } - if (batchAppendTreeHeight == 0 || batchAppendBatchSize == 0) && circuit == prover.BatchAppend { + if (batchAppendTreeHeight == 0 || batchAppendBatchSize == 0) && circuit == prover.BatchAppendCircuitType { return fmt.Errorf("[Batch append]: tree height and batch size must be provided") } - if (batchUpdateTreeHeight == 0 || batchUpdateBatchSize == 0) && circuit == prover.BatchUpdate { + if (batchUpdateTreeHeight == 0 || batchUpdateBatchSize == 0) && circuit == prover.BatchUpdateCircuitType { return fmt.Errorf("[Batch update]: tree height and batch size must be provided") } logging.Logger().Info().Msg("Running setup") var err error - if circuit == prover.BatchAppend { + if circuit == prover.BatchAppendCircuitType { var system *prover.ProvingSystemV2 - system, err = prover.SetupCircuitV2(prover.BatchAppend, batchAppendTreeHeight, batchAppendBatchSize) + system, err = prover.SetupCircuitV2(prover.BatchAppendCircuitType, batchAppendTreeHeight, batchAppendBatchSize) if err != nil { return err } - err = writeProvingSystem(system, path, pathVkey) - } else if circuit == prover.BatchUpdate { + err = prover.WriteProvingSystem(system, path, pathVkey) + } else if circuit == prover.BatchUpdateCircuitType { var system *prover.ProvingSystemV2 - system, err = prover.SetupCircuitV2(prover.BatchUpdate, batchUpdateTreeHeight, batchUpdateBatchSize) + system, err = prover.SetupCircuitV2(prover.BatchUpdateCircuitType, batchUpdateTreeHeight, batchUpdateBatchSize) if err != nil { return err } - err = writeProvingSystem(system, path, pathVkey) + err = prover.WriteProvingSystem(system, path, pathVkey) } else { var system *prover.ProvingSystemV1 system, err = prover.SetupCircuitV1(circuit, inclusionTreeHeight, inclusionNumberOfCompressedAccounts, nonInclusionTreeHeight, nonInclusionNumberOfCompressedAccounts) if err != nil { return err } - err = writeProvingSystem(system, path, pathVkey) + err = prover.WriteProvingSystem(system, path, pathVkey) } if err != nil { @@ -136,7 +132,7 @@ func runCli() { }, Action: func(context *cli.Context) error { circuit := prover.CircuitType(context.String("circuit")) - if circuit != prover.Inclusion && circuit != prover.NonInclusion && circuit != prover.Combined && circuit != prover.BatchAppend { + if circuit != prover.InclusionCircuitType && circuit != prover.NonInclusionCircuitType && circuit != prover.CombinedCircuitType && circuit != prover.BatchAppendCircuitType { return fmt.Errorf("invalid circuit type %s", circuit) } @@ -167,11 +163,11 @@ func runCli() { } } - if (batchAppendTreeHeight == 0 || batchAppendBatchSize == 0) && circuit == prover.BatchAppend { + if (batchAppendTreeHeight == 0 || batchAppendBatchSize == 0) && circuit == prover.BatchAppendCircuitType { return fmt.Errorf("[Batch append]: tree height and batch size must be provided") } - if (batchUpdateTreeHeight == 0 || batchUpdateBatchSize == 0) && circuit == prover.BatchUpdate { + if (batchUpdateTreeHeight == 0 || batchUpdateBatchSize == 0) && circuit == prover.BatchUpdateCircuitType { return fmt.Errorf("[Batch update]: tree height and batch size must be provided") } @@ -180,15 +176,15 @@ func runCli() { var cs constraint.ConstraintSystem var err error - if circuit == prover.Inclusion { + if circuit == prover.InclusionCircuitType { cs, err = prover.R1CSInclusion(inclusionTreeHeight, inclusionNumberOfCompressedAccounts) - } else if circuit == prover.NonInclusion { + } else if circuit == prover.NonInclusionCircuitType { cs, err = prover.R1CSNonInclusion(nonInclusionTreeHeight, nonInclusionNumberOfCompressedAccounts) - } else if circuit == prover.Combined { + } else if circuit == prover.CombinedCircuitType { cs, err = prover.R1CSCombined(inclusionTreeHeight, inclusionNumberOfCompressedAccounts, nonInclusionTreeHeight, nonInclusionNumberOfCompressedAccounts) - } else if circuit == prover.BatchAppend { + } else if circuit == prover.BatchAppendCircuitType { cs, err = prover.R1CSBatchAppend(batchAppendTreeHeight, batchAppendBatchSize) - } else if circuit == prover.BatchUpdate { + } else if circuit == prover.BatchUpdateCircuitType { cs, err = prover.R1CSBatchUpdate(batchUpdateTreeHeight, batchUpdateBatchSize) } else { return fmt.Errorf("invalid circuit type %s", circuit) @@ -263,7 +259,7 @@ func runCli() { if err != nil { return err } - err = writeProvingSystem(system, path, "") + err = prover.WriteProvingSystem(system, path, "") } else if circuit == "update" { if batchUpdateTreeHeight == 0 || batchUpdateBatchSize == 0 { return fmt.Errorf("append tree height and batch size must be provided") @@ -273,7 +269,7 @@ func runCli() { if err != nil { return err } - err = writeProvingSystem(system, path, "") + err = prover.WriteProvingSystem(system, path, "") } else { if circuit == "inclusion" || circuit == "combined" { if inclusionTreeHeight == 0 || inclusionNumberOfCompressedAccounts == 0 { @@ -298,7 +294,7 @@ func runCli() { if err != nil { return err } - err = writeProvingSystem(system, path, "") + err = prover.WriteProvingSystem(system, path, "") } if err != nil { @@ -392,15 +388,14 @@ func runCli() { &cli.BoolFlag{Name: "json-logging", Usage: "enable JSON logging", Required: false}, &cli.StringFlag{Name: "prover-address", Usage: "address for the prover server", Value: "0.0.0.0:3001", Required: false}, &cli.StringFlag{Name: "metrics-address", Usage: "address for the metrics server", Value: "0.0.0.0:9998", Required: false}, - &cli.BoolFlag{Name: "inclusion", Usage: "Run inclusion circuit", Required: false, Value: false}, - &cli.BoolFlag{Name: "non-inclusion", Usage: "Run non-inclusion circuit", Required: false}, - &cli.BoolFlag{Name: "append", Usage: "Run batch append circuit", Required: false}, - &cli.BoolFlag{Name: "update", Usage: "Run batch update circuit", Required: false}, &cli.StringFlag{Name: "keys-dir", Usage: "Directory where key files are stored", Value: "./proving-keys/", Required: false}, + &cli.StringSliceFlag{ + Name: "circuit", + Usage: "Specify the circuits to enable (inclusion, non-inclusion, combined, append, update, append-test, update-test)", + }, &cli.StringFlag{ Name: "run-mode", - Usage: "Specify the running mode (test or full)", - Value: "full", + Usage: "Specify the running mode (rpc, forester, forester-test, full, or full-test)", }, }, Action: func(context *cli.Context) error { @@ -408,16 +403,17 @@ func runCli() { logging.SetJSONOutput() } - runMode := context.String("run-mode") - isTestMode := runMode == "test" - - if isTestMode { - logging.Logger().Info().Msg("Running in test mode") - } else { - logging.Logger().Info().Msg("Running in full mode") + circuits := context.StringSlice("circuit") + runMode, err := parseRunMode(context.String("run-mode")) + if err != nil { + if len(circuits) == 0 { + return err + } } - psv1, psv2, err := LoadKeys(context, isTestMode) + var keysDirPath = context.String("keys-dir") + + psv1, psv2, err := prover.LoadKeys(keysDirPath, runMode, circuits) if err != nil { return err } @@ -450,23 +446,27 @@ func runCli() { &cli.BoolFlag{Name: "update", Usage: "Run batch update circuit", Required: false}, &cli.StringFlag{Name: "keys-dir", Usage: "Directory where circuit key files are stored", Value: "./proving-keys/", Required: false}, &cli.StringSliceFlag{Name: "keys-file", Aliases: []string{"k"}, Value: cli.NewStringSlice(), Usage: "Proving system file"}, + &cli.StringSliceFlag{ + Name: "circuit", + Usage: "Specify the circuits to enable (inclusion, non-inclusion, combined, append, update, append-test, update-test)", + Value: cli.NewStringSlice("inclusion", "non-inclusion", "combined", "append", "update", "append-test", "update-test"), + }, &cli.StringFlag{ Name: "run-mode", - Usage: "Specify the running mode (test or full)", - Value: "full", + Usage: "Specify the running mode (forester, forester-test, rpc, or full)", }, }, Action: func(context *cli.Context) error { - runMode := context.String("run-mode") - isTestMode := runMode == "test" - - if isTestMode { - logging.Logger().Info().Msg("Running in test mode") - } else { - logging.Logger().Info().Msg("Running in full mode") + circuits := context.StringSlice("circuit") + runMode, err := parseRunMode(context.String("run-mode")) + if err != nil { + if len(circuits) == 0 { + return err + } } + var keysDirPath = context.String("keys-dir") - psv1, psv2, err := LoadKeys(context, isTestMode) + psv1, psv2, err := prover.LoadKeys(keysDirPath, runMode, circuits) if err != nil { return err } @@ -620,7 +620,7 @@ func runCli() { switch s := system.(type) { case *prover.ProvingSystemV1: rootsStr := context.String("roots") - roots, err := parseHexStringList(rootsStr) + roots, err := prover.ParseHexStringList(rootsStr) if err != nil { return fmt.Errorf("failed to parse roots: %v", err) } @@ -628,24 +628,24 @@ func runCli() { switch circuit { case "inclusion": leavesStr := context.String("leaves") - leaves, err := parseHexStringList(leavesStr) + leaves, err := prover.ParseHexStringList(leavesStr) if err != nil { return fmt.Errorf("failed to parse leaves: %v", err) } verifyErr = s.VerifyInclusion(roots, leaves, &proof) case "non-inclusion": - values, err := parseHexStringList(context.String("values")) + values, err := prover.ParseHexStringList(context.String("values")) if err != nil { return fmt.Errorf("failed to parse values: %v", err) } verifyErr = s.VerifyNonInclusion(roots, values, &proof) case "combined": - leaves, err := parseHexStringList(context.String("leaves")) + leaves, err := prover.ParseHexStringList(context.String("leaves")) if err != nil { return fmt.Errorf("failed to parse leaves: %v", err) } - values, err := parseHexStringList(context.String("values")) + values, err := prover.ParseHexStringList(context.String("values")) if err != nil { return fmt.Errorf("failed to parse values: %v", err) } @@ -657,19 +657,19 @@ func runCli() { if circuit != "append" { return fmt.Errorf("invalid circuit type for ProvingSystemV2: %s", circuit) } - oldSubTreeHashChain, err := parseBigInt(context.String("old-sub-tree-hash-chain")) + oldSubTreeHashChain, err := prover.ParseBigInt(context.String("old-sub-tree-hash-chain")) if err != nil { return fmt.Errorf("failed to parse old sub-tree hash chain: %v", err) } - newSubTreeHashChain, err := parseBigInt(context.String("new-sub-tree-hash-chain")) + newSubTreeHashChain, err := prover.ParseBigInt(context.String("new-sub-tree-hash-chain")) if err != nil { return fmt.Errorf("failed to parse new sub-tree hash chain: %v", err) } - newRoot, err := parseBigInt(context.String("new-root")) + newRoot, err := prover.ParseBigInt(context.String("new-root")) if err != nil { return fmt.Errorf("failed to parse new root: %v", err) } - hashchainHash, err := parseBigInt(context.String("hashchain-hash")) + hashchainHash, err := prover.ParseBigInt(context.String("hashchain-hash")) if err != nil { return fmt.Errorf("failed to parse hashchain hash: %v", err) } @@ -730,166 +730,26 @@ func runCli() { } } -func LoadKeys(context *cli.Context, isTestMode bool) ([]*prover.ProvingSystemV1, []*prover.ProvingSystemV2, error) { - keys, _ := getKeysByArgs(context, isTestMode) - var pssv1 []*prover.ProvingSystemV1 - var pssv2 []*prover.ProvingSystemV2 - - for _, key := range keys { - logging.Logger().Info().Msg("Reading proving system from file " + key + "...") - system, err := prover.ReadSystemFromFile(key) - if err != nil { - return nil, nil, err - } - switch s := system.(type) { - case *prover.ProvingSystemV1: - pssv1 = append(pssv1, s) - logging.Logger().Info(). - Uint32("inclusionTreeHeight", s.InclusionTreeHeight). - Uint32("inclusionCompressedAccounts", s.InclusionNumberOfCompressedAccounts). - Uint32("nonInclusionTreeHeight", s.NonInclusionTreeHeight). - Uint32("nonInclusionCompressedAccounts", s.NonInclusionNumberOfCompressedAccounts). - Msg("Read ProvingSystem") - case *prover.ProvingSystemV2: - pssv2 = append(pssv2, s) - logging.Logger().Info(). - Uint32("treeHeight", s.TreeHeight). - Uint32("batchSize", s.BatchSize). - Msg("Read BatchAppendProvingSystem") - default: - return nil, nil, fmt.Errorf("unknown proving system type") - } - } - return pssv1, pssv2, nil -} - -func getKeysByArgs(context *cli.Context, isTestMode bool) ([]string, error) { - var keysDir = context.String("keys-dir") - var inclusion = context.Bool("inclusion") - var nonInclusion = context.Bool("non-inclusion") - var batchAppend = context.Bool("append") - var batchUpdate = context.Bool("update") - var circuitTypes []prover.CircuitType = make([]prover.CircuitType, 0) - - if batchAppend { - circuitTypes = append(circuitTypes, prover.BatchAppend) - } - - if batchUpdate { - circuitTypes = append(circuitTypes, prover.BatchUpdate) - } - - if inclusion { - circuitTypes = append(circuitTypes, prover.Inclusion) - } - - if nonInclusion { - circuitTypes = append(circuitTypes, prover.NonInclusion) - } - - if inclusion && nonInclusion { - circuitTypes = append(circuitTypes, prover.Combined) - } - - return prover.GetKeys(keysDir, circuitTypes, isTestMode), nil -} - -func createFileAndWriteBytes(filePath string, data []byte) error { - fmt.Println("Writing", len(data), "bytes to", filePath) - file, err := os.Create(filePath) - if err != nil { - return err // Return the error to the caller - } - defer func(file *os.File) { - err := file.Close() - if err != nil { - return - } - }(file) - - _, err = io.WriteString(file, fmt.Sprintf("%d", data)) - if err != nil { - return err // Return any error that occurs during writing - } - fmt.Println("Wrote", len(data), "bytes to", filePath) - return nil -} - -func writeProvingSystem(system interface{}, path string, pathVkey string) error { - file, err := os.Create(path) - if err != nil { - return err - } - defer file.Close() - - var written int64 - switch s := system.(type) { - case *prover.ProvingSystemV1: - written, err = s.WriteTo(file) - case *prover.ProvingSystemV2: - written, err = s.WriteTo(file) +func parseRunMode(runModeString string) (prover.RunMode, error) { + runMode := prover.Rpc + switch runModeString { + case "rpc": + logging.Logger().Info().Msg("Running in rpc mode") + runMode = prover.Rpc + case "forester": + logging.Logger().Info().Msg("Running in forester mode") + runMode = prover.Forester + case "forester-test": + logging.Logger().Info().Msg("Running in forester test mode") + runMode = prover.ForesterTest + case "full": + logging.Logger().Info().Msg("Running in full mode") + runMode = prover.Full + case "full-test": + logging.Logger().Info().Msg("Running in full mode") + runMode = prover.FullTest default: - return fmt.Errorf("unknown proving system type") - } - - if err != nil { - return err - } - - logging.Logger().Info().Int64("bytesWritten", written).Msg("Proving system written to file") - - // Write verification key - var vk interface{} - switch s := system.(type) { - case *prover.ProvingSystemV1: - vk = s.VerifyingKey - case *prover.ProvingSystemV2: - vk = s.VerifyingKey - } - - var buf bytes.Buffer - _, err = vk.(gnarkio.WriterRawTo).WriteRawTo(&buf) - if err != nil { - return err + return "", fmt.Errorf("invalid run mode %s", runModeString) } - - proofBytes := buf.Bytes() - err = createFileAndWriteBytes(pathVkey, proofBytes) - if err != nil { - return err - } - - return nil -} - -func parseHexStringList(input string) ([]big.Int, error) { - hexStrings := strings.Split(input, ",") - result := make([]big.Int, len(hexStrings)) - - for i, hexString := range hexStrings { - hexString = strings.TrimSpace(hexString) - hexString = strings.TrimPrefix(hexString, "0x") - - bytes, err := hex.DecodeString(hexString) - if err != nil { - return nil, fmt.Errorf("invalid hex string: %s", hexString) - } - - result[i].SetBytes(bytes) - } - - return result, nil -} - -func parseBigInt(input string) (*big.Int, error) { - input = strings.TrimSpace(input) - input = strings.TrimPrefix(input, "0x") - - bytes, err := hex.DecodeString(input) - if err != nil { - return nil, fmt.Errorf("invalid hex string: %s", input) - } - - bigInt := new(big.Int).SetBytes(bytes) - return bigInt, nil + return runMode, nil } diff --git a/light-prover/prover/circuit_builder.go b/light-prover/prover/circuit_builder.go index 9b9d097b5b..db15e7f5d7 100644 --- a/light-prover/prover/circuit_builder.go +++ b/light-prover/prover/circuit_builder.go @@ -8,20 +8,20 @@ import ( type CircuitType string const ( - Combined CircuitType = "combined" - Inclusion CircuitType = "inclusion" - NonInclusion CircuitType = "non-inclusion" - BatchAppend CircuitType = "append" - BatchUpdate CircuitType = "update" + CombinedCircuitType CircuitType = "combined" + InclusionCircuitType CircuitType = "inclusion" + NonInclusionCircuitType CircuitType = "non-inclusion" + BatchAppendCircuitType CircuitType = "append" + BatchUpdateCircuitType CircuitType = "update" ) func SetupCircuitV1(circuit CircuitType, inclusionTreeHeight uint32, inclusionNumberOfCompressedAccounts uint32, nonInclusionTreeHeight uint32, nonInclusionNumberOfCompressedAccounts uint32) (*ProvingSystemV1, error) { switch circuit { - case Inclusion: + case InclusionCircuitType: return SetupInclusion(inclusionTreeHeight, inclusionNumberOfCompressedAccounts) - case NonInclusion: + case NonInclusionCircuitType: return SetupNonInclusion(nonInclusionTreeHeight, nonInclusionNumberOfCompressedAccounts) - case Combined: + case CombinedCircuitType: return SetupCombined(inclusionTreeHeight, inclusionNumberOfCompressedAccounts, nonInclusionTreeHeight, nonInclusionNumberOfCompressedAccounts) default: return nil, fmt.Errorf("invalid circuit: %s", circuit) @@ -30,9 +30,9 @@ func SetupCircuitV1(circuit CircuitType, inclusionTreeHeight uint32, inclusionNu func SetupCircuitV2(circuit CircuitType, height uint32, batchSize uint32) (*ProvingSystemV2, error) { switch circuit { - case BatchAppend: + case BatchAppendCircuitType: return SetupBatchAppend(height, batchSize) - case BatchUpdate: + case BatchUpdateCircuitType: return SetupBatchUpdate(height, batchSize) default: return nil, fmt.Errorf("invalid circuit: %s", circuit) @@ -54,24 +54,15 @@ func ParseCircuitType(data []byte) (CircuitType, error) { _, hasNewMerkleProofs := inputs["newMerkleProofs"] if hasInputCompressedAccounts && hasNewAddresses { - return Combined, nil + return CombinedCircuitType, nil } else if hasInputCompressedAccounts { - return Inclusion, nil + return InclusionCircuitType, nil } else if hasNewAddresses { - return NonInclusion, nil + return NonInclusionCircuitType, nil } else if hasOldSubTreeHashChain && hasNewSubTreeHashChain && hasLeaves { - return BatchAppend, nil + return BatchAppendCircuitType, nil } else if hasNewMerkleProofs { - return BatchUpdate, nil + return BatchUpdateCircuitType, nil } return "", fmt.Errorf("unknown schema") } - -func IsCircuitEnabled(s []CircuitType, e CircuitType) bool { - for _, a := range s { - if a == e { - return true - } - } - return false -} diff --git a/light-prover/prover/circuit_utils.go b/light-prover/prover/circuit_utils.go index 19761251a7..4cb678367b 100644 --- a/light-prover/prover/circuit_utils.go +++ b/light-prover/prover/circuit_utils.go @@ -1,13 +1,9 @@ package prover import ( - "fmt" - "light/light-prover/logging" "light/light-prover/prover/poseidon" "math/big" - "os" - "github.com/consensys/gnark-crypto/ecc" "github.com/consensys/gnark/backend/groth16" "github.com/consensys/gnark/constraint" "github.com/consensys/gnark/frontend" @@ -219,88 +215,3 @@ func (gadget MerkleRootUpdateGadget) DefineGadget(api frontend.API) interface{} return newRoot } - -// Trusted setup utility functions -// Taken from: https://github.com/bnb-chain/zkbnb/blob/master/common/prove/proof_keys.go#L19 -func LoadProvingKey(filepath string) (pk groth16.ProvingKey, err error) { - logging.Logger().Info().Msg("start reading proving key") - pk = groth16.NewProvingKey(ecc.BN254) - f, _ := os.Open(filepath) - _, err = pk.ReadFrom(f) - if err != nil { - return pk, fmt.Errorf("read file error") - } - err = f.Close() - if err != nil { - return nil, err - } - return pk, nil -} - -// Taken from: https://github.com/bnb-chain/zkbnb/blob/master/common/prove/proof_keys.go#L32 -func LoadVerifyingKey(filepath string) (verifyingKey groth16.VerifyingKey, err error) { - logging.Logger().Info().Msg("start reading verifying key") - verifyingKey = groth16.NewVerifyingKey(ecc.BN254) - f, _ := os.Open(filepath) - _, err = verifyingKey.ReadFrom(f) - if err != nil { - return verifyingKey, fmt.Errorf("read file error") - } - err = f.Close() - if err != nil { - return nil, err - } - - return verifyingKey, nil -} -func GetKeys(keysDir string, circuitTypes []CircuitType, isTestMode bool) []string { - var keys []string - - if IsCircuitEnabled(circuitTypes, Inclusion) { - keys = append(keys, keysDir+"inclusion_26_1.key") - keys = append(keys, keysDir+"inclusion_26_2.key") - keys = append(keys, keysDir+"inclusion_26_3.key") - keys = append(keys, keysDir+"inclusion_26_4.key") - keys = append(keys, keysDir+"inclusion_26_8.key") - } - if IsCircuitEnabled(circuitTypes, NonInclusion) { - keys = append(keys, keysDir+"non-inclusion_26_1.key") - keys = append(keys, keysDir+"non-inclusion_26_2.key") - } - if IsCircuitEnabled(circuitTypes, Combined) { - keys = append(keys, keysDir+"combined_26_1_1.key") - keys = append(keys, keysDir+"combined_26_1_2.key") - keys = append(keys, keysDir+"combined_26_2_1.key") - keys = append(keys, keysDir+"combined_26_2_2.key") - keys = append(keys, keysDir+"combined_26_3_1.key") - keys = append(keys, keysDir+"combined_26_3_2.key") - keys = append(keys, keysDir+"combined_26_4_1.key") - keys = append(keys, keysDir+"combined_26_4_2.key") - } - - if IsCircuitEnabled(circuitTypes, BatchAppend) { - if isTestMode { - keys = append(keys, keysDir+"append_10_10.key") - } else { - keys = append(keys, keysDir+"append_26_1.key") - keys = append(keys, keysDir+"append_26_10.key") - keys = append(keys, keysDir+"append_26_100.key") - keys = append(keys, keysDir+"append_26_500.key") - keys = append(keys, keysDir+"append_26_1000.key") - } - } - - if IsCircuitEnabled(circuitTypes, BatchUpdate) { - if isTestMode { - keys = append(keys, keysDir+"update_10_10.key") - } else { - keys = append(keys, keysDir+"update_26_1.key") - keys = append(keys, keysDir+"update_26_10.key") - keys = append(keys, keysDir+"update_26_100.key") - keys = append(keys, keysDir+"update_26_500.key") - keys = append(keys, keysDir+"update_26_1000.key") - } - } - - return keys -} diff --git a/light-prover/prover/marshal.go b/light-prover/prover/marshal.go index e9b3e49317..2a8c210d4c 100644 --- a/light-prover/prover/marshal.go +++ b/light-prover/prover/marshal.go @@ -181,7 +181,7 @@ func (ps *ProvingSystemV1) UnsafeReadFrom(r io.Reader) (int64, error) { func ReadSystemFromFile(path string) (interface{}, error) { if strings.Contains(strings.ToLower(path), "append") { ps := new(ProvingSystemV2) - ps.CircuitType = BatchAppend + ps.CircuitType = BatchAppendCircuitType file, err := os.Open(path) if err != nil { return nil, err @@ -195,7 +195,7 @@ func ReadSystemFromFile(path string) (interface{}, error) { return ps, nil } else if strings.Contains(strings.ToLower(path), "update") { ps := new(ProvingSystemV2) - ps.CircuitType = BatchUpdate + ps.CircuitType = BatchUpdateCircuitType file, err := os.Open(path) if err != nil { return nil, err @@ -209,10 +209,10 @@ func ReadSystemFromFile(path string) (interface{}, error) { return ps, nil } else { ps := new(ProvingSystemV1) - file, err := os.Open(path) - if err != nil { + file, err := os.Open(path) + if err != nil { return nil, err - } + } defer file.Close() _, err = ps.UnsafeReadFrom(file) @@ -223,30 +223,6 @@ func ReadSystemFromFile(path string) (interface{}, error) { } } -func ReadProvingSystemFromFile(path string) (*ProvingSystemV1, error) { - system, err := ReadSystemFromFile(path) - if err != nil { - return nil, err - } - ps, ok := system.(*ProvingSystemV1) - if !ok { - return nil, fmt.Errorf("file does not contain a ProvingSystem") - } - return ps, nil -} - -func ReadBatchAppendProvingSystemFromFile(path string) (*ProvingSystemV2, error) { - system, err := ReadSystemFromFile(path) - if err != nil { - return nil, err - } - baps, ok := system.(*ProvingSystemV2) - if !ok { - return nil, fmt.Errorf("file does not contain a BatchAppendProvingSystem") - } - return baps, nil -} - func (ps *ProvingSystemV2) WriteTo(w io.Writer) (int64, error) { var totalWritten int64 = 0 var intBuf [4]byte diff --git a/light-prover/prover/proving_keys_utils.go b/light-prover/prover/proving_keys_utils.go new file mode 100644 index 0000000000..cbbdac2203 --- /dev/null +++ b/light-prover/prover/proving_keys_utils.go @@ -0,0 +1,258 @@ +package prover + +import ( + "bytes" + "fmt" + "io" + "light/light-prover/logging" + "os" + + "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark/backend/groth16" + gnarkio "github.com/consensys/gnark/io" +) + +type RunMode string + +const ( + Forester RunMode = "forester" + ForesterTest RunMode = "forester-test" + Rpc RunMode = "rpc" + Full RunMode = "full" + FullTest RunMode = "full-test" +) + +// Trusted setup utility functions +// Taken from: https://github.com/bnb-chain/zkbnb/blob/master/common/prove/proof_keys.go#L19 +func LoadProvingKey(filepath string) (pk groth16.ProvingKey, err error) { + logging.Logger().Info().Msg("start reading proving key") + pk = groth16.NewProvingKey(ecc.BN254) + f, _ := os.Open(filepath) + _, err = pk.ReadFrom(f) + if err != nil { + return pk, fmt.Errorf("read file error") + } + err = f.Close() + if err != nil { + return nil, err + } + return pk, nil +} + +// Taken from: https://github.com/bnb-chain/zkbnb/blob/master/common/prove/proof_keys.go#L32 +func LoadVerifyingKey(filepath string) (verifyingKey groth16.VerifyingKey, err error) { + logging.Logger().Info().Msg("start reading verifying key") + verifyingKey = groth16.NewVerifyingKey(ecc.BN254) + f, _ := os.Open(filepath) + _, err = verifyingKey.ReadFrom(f) + if err != nil { + return verifyingKey, fmt.Errorf("read file error") + } + err = f.Close() + if err != nil { + return nil, err + } + + return verifyingKey, nil +} + +func GetKeys(keysDir string, runMode RunMode, circuits []string) []string { + var keys []string + + var inclusionKeys []string = []string{ + keysDir + "inclusion_26_1.key", + keysDir + "inclusion_26_2.key", + keysDir + "inclusion_26_3.key", + keysDir + "inclusion_26_4.key", + keysDir + "inclusion_26_8.key", + } + + var nonInclusionKeys []string = []string{ + keysDir + "non-inclusion_26_1.key", + keysDir + "non-inclusion_26_2.key", + } + + var combinedKeys []string = []string{ + keysDir + "combined_26_1_1.key", + keysDir + "combined_26_1_2.key", + keysDir + "combined_26_2_1.key", + keysDir + "combined_26_2_2.key", + keysDir + "combined_26_3_1.key", + keysDir + "combined_26_3_2.key", + keysDir + "combined_26_4_1.key", + keysDir + "combined_26_4_2.key", + } + + var appendKeys []string = []string{ + keysDir + "append_26_1.key", + keysDir + "append_26_10.key", + keysDir + "append_26_100.key", + keysDir + "append_26_500.key", + keysDir + "append_26_1000.key", + } + var updateKeys []string = []string{ + keysDir + "update_26_1.key", + keysDir + "update_26_10.key", + keysDir + "update_26_100.key", + keysDir + "update_26_500.key", + keysDir + "update_26_1000.key", + } + + var appendTestKeys []string = []string{ + keysDir + "append_26_10.key", + } + var updateTestKeys []string = []string{ + keysDir + "update_26_10.key", + } + + switch runMode { + case Forester: // inclusion + non-inclusion + keys = append(keys, inclusionKeys...) + keys = append(keys, nonInclusionKeys...) + case ForesterTest: // append-test + update-test + keys = append(keys, appendTestKeys...) + keys = append(keys, updateTestKeys...) + case Rpc: // inclusion + non-inclusion + combined + keys = append(keys, inclusionKeys...) + keys = append(keys, nonInclusionKeys...) + keys = append(keys, combinedKeys...) + case Full: // inclusion + non-inclusion + combined + append + update + keys = append(keys, inclusionKeys...) + keys = append(keys, nonInclusionKeys...) + keys = append(keys, combinedKeys...) + keys = append(keys, appendKeys...) + keys = append(keys, updateKeys...) + case FullTest: // inclusion + non-inclusion + combined + append-test + update-test + keys = append(keys, inclusionKeys...) + keys = append(keys, nonInclusionKeys...) + keys = append(keys, combinedKeys...) + keys = append(keys, appendTestKeys...) + keys = append(keys, updateTestKeys...) + } + + fmt.Println("Keys: ", keys) + fmt.Println("Circuits: ", circuits) + + for _, circuit := range circuits { + switch circuit { + case "inclusion": + keys = append(keys, inclusionKeys...) + case "non-inclusion": + keys = append(keys, nonInclusionKeys...) + case "combined": + keys = append(keys, combinedKeys...) + case "append": + keys = append(keys, appendKeys...) + case "update": + keys = append(keys, updateKeys...) + case "append-test": + keys = append(keys, appendTestKeys...) + case "update-test": + keys = append(keys, updateTestKeys...) + } + } + + return keys +} + +func LoadKeys(keysDirPath string, runMode RunMode, circuits []string) ([]*ProvingSystemV1, []*ProvingSystemV2, error) { + var pssv1 []*ProvingSystemV1 + var pssv2 []*ProvingSystemV2 + + keys := GetKeys(keysDirPath, runMode, circuits) + + for _, key := range keys { + logging.Logger().Info().Msg("Reading proving system from file " + key + "...") + system, err := ReadSystemFromFile(key) + if err != nil { + return nil, nil, err + } + switch s := system.(type) { + case *ProvingSystemV1: + pssv1 = append(pssv1, s) + logging.Logger().Info(). + Uint32("inclusionTreeHeight", s.InclusionTreeHeight). + Uint32("inclusionCompressedAccounts", s.InclusionNumberOfCompressedAccounts). + Uint32("nonInclusionTreeHeight", s.NonInclusionTreeHeight). + Uint32("nonInclusionCompressedAccounts", s.NonInclusionNumberOfCompressedAccounts). + Msg("Read ProvingSystem") + case *ProvingSystemV2: + pssv2 = append(pssv2, s) + logging.Logger().Info(). + Uint32("treeHeight", s.TreeHeight). + Uint32("batchSize", s.BatchSize). + Msg("Read BatchAppendProvingSystem") + default: + return nil, nil, fmt.Errorf("unknown proving system type") + } + } + return pssv1, pssv2, nil +} + +func createFileAndWriteBytes(filePath string, data []byte) error { + fmt.Println("Writing", len(data), "bytes to", filePath) + file, err := os.Create(filePath) + if err != nil { + return err // Return the error to the caller + } + defer func(file *os.File) { + err := file.Close() + if err != nil { + return + } + }(file) + + _, err = io.WriteString(file, fmt.Sprintf("%d", data)) + if err != nil { + return err // Return any error that occurs during writing + } + fmt.Println("Wrote", len(data), "bytes to", filePath) + return nil +} + +func WriteProvingSystem(system interface{}, path string, pathVkey string) error { + file, err := os.Create(path) + if err != nil { + return err + } + defer file.Close() + + var written int64 + switch s := system.(type) { + case *ProvingSystemV1: + written, err = s.WriteTo(file) + case *ProvingSystemV2: + written, err = s.WriteTo(file) + default: + return fmt.Errorf("unknown proving system type") + } + + if err != nil { + return err + } + + logging.Logger().Info().Int64("bytesWritten", written).Msg("Proving system written to file") + + // Write verification key + var vk interface{} + switch s := system.(type) { + case *ProvingSystemV1: + vk = s.VerifyingKey + case *ProvingSystemV2: + vk = s.VerifyingKey + } + + var buf bytes.Buffer + _, err = vk.(gnarkio.WriterRawTo).WriteRawTo(&buf) + if err != nil { + return err + } + + proofBytes := buf.Bytes() + err = createFileAndWriteBytes(pathVkey, proofBytes) + if err != nil { + return err + } + + return nil +} diff --git a/light-prover/prover/string_utils.go b/light-prover/prover/string_utils.go new file mode 100644 index 0000000000..52f1809a34 --- /dev/null +++ b/light-prover/prover/string_utils.go @@ -0,0 +1,40 @@ +package prover + +import ( + "encoding/hex" + "fmt" + "math/big" + "strings" +) + +func ParseHexStringList(input string) ([]big.Int, error) { + hexStrings := strings.Split(input, ",") + result := make([]big.Int, len(hexStrings)) + + for i, hexString := range hexStrings { + hexString = strings.TrimSpace(hexString) + hexString = strings.TrimPrefix(hexString, "0x") + + bytes, err := hex.DecodeString(hexString) + if err != nil { + return nil, fmt.Errorf("invalid hex string: %s", hexString) + } + + result[i].SetBytes(bytes) + } + + return result, nil +} + +func ParseBigInt(input string) (*big.Int, error) { + input = strings.TrimSpace(input) + input = strings.TrimPrefix(input, "0x") + + bytes, err := hex.DecodeString(input) + if err != nil { + return nil, fmt.Errorf("invalid hex string: %s", input) + } + + bigInt := new(big.Int).SetBytes(bytes) + return bigInt, nil +} diff --git a/light-prover/scripts/download_keys.sh b/light-prover/scripts/download_keys.sh index 19d94b5057..85510be784 100755 --- a/light-prover/scripts/download_keys.sh +++ b/light-prover/scripts/download_keys.sh @@ -54,10 +54,10 @@ LIGHTWEIGHT_FILES=( "combined_26_4_1.vkey" "combined_26_4_2.key" "combined_26_4_2.vkey" - "append_10_10.key" - "append_10_10.vkey" - "update_10_10.key" - "update_10_10.vkey" + "append_26_10.key" + "append_26_10.vkey" + "update_26_10.key" + "update_26_10.vkey" ) FULL_FILES=( diff --git a/light-prover/server/server.go b/light-prover/server/server.go index f17ef9327f..87808552a6 100644 --- a/light-prover/server/server.go +++ b/light-prover/server/server.go @@ -139,15 +139,15 @@ func (handler proveHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { } switch circuitType { - case prover.Inclusion: + case prover.InclusionCircuitType: proof, proofError = handler.inclusionProof(buf) - case prover.NonInclusion: + case prover.NonInclusionCircuitType: proof, proofError = handler.nonInclusionProof(buf) - case prover.Combined: + case prover.CombinedCircuitType: proof, proofError = handler.combinedProof(buf) - case prover.BatchAppend: + case prover.BatchAppendCircuitType: proof, proofError = handler.batchAppendProof(buf) - case prover.BatchUpdate: + case prover.BatchUpdateCircuitType: proof, proofError = handler.batchUpdateProof(buf) default: proofError = malformedBodyError(fmt.Errorf("unknown circuit type")) @@ -188,7 +188,7 @@ func (handler proveHandler) batchAppendProof(buf []byte) (*prover.Proof, *Error) var ps *prover.ProvingSystemV2 for _, provingSystem := range handler.provingSystemsV2 { - if provingSystem.CircuitType == prover.BatchAppend && provingSystem.BatchSize == batchSize && provingSystem.TreeHeight == params.TreeHeight { + if provingSystem.CircuitType == prover.BatchAppendCircuitType && provingSystem.BatchSize == batchSize && provingSystem.TreeHeight == params.TreeHeight { ps = provingSystem break } @@ -220,7 +220,7 @@ func (handler proveHandler) batchUpdateProof(buf []byte) (*prover.Proof, *Error) var ps *prover.ProvingSystemV2 for _, provingSystem := range handler.provingSystemsV2 { - if provingSystem.CircuitType == prover.BatchUpdate && provingSystem.TreeHeight == treeHeight && provingSystem.BatchSize == batchSize { + if provingSystem.CircuitType == prover.BatchUpdateCircuitType && provingSystem.TreeHeight == treeHeight && provingSystem.BatchSize == batchSize { ps = provingSystem break } diff --git a/test-programs/compressed-token-test/tests/test.rs b/test-programs/compressed-token-test/tests/test.rs index 5a36a363f6..a158a6fdd4 100644 --- a/test-programs/compressed-token-test/tests/test.rs +++ b/test-programs/compressed-token-test/tests/test.rs @@ -16,7 +16,7 @@ use light_compressed_token::process_transfer::transfer_sdk::create_transfer_inst use light_compressed_token::process_transfer::{get_cpi_authority_pda, TokenTransferOutputData}; use light_compressed_token::token_data::AccountState; use light_compressed_token::{token_data::TokenData, ErrorCode}; -use light_prover_client::gnark::helpers::kill_prover; +use light_prover_client::gnark::helpers::{kill_prover, ProofType, ProverConfig}; use light_system_program::{ invoke::processor::CompressedProof, sdk::compressed_account::{CompressedAccountWithMerkleContext, MerkleContext}, @@ -179,8 +179,15 @@ async fn test_failing_create_token_pool() { async fn test_wrapped_sol() { let (mut rpc, env) = setup_test_programs_with_accounts(None).await; let payer = rpc.get_payer().insecure_clone(); - let mut test_indexer = - TestIndexer::::init_from_env(&payer, &env, true, false).await; + let mut test_indexer = TestIndexer::::init_from_env( + &payer, + &env, + Some(ProverConfig { + run_mode: None, + circuits: vec![ProofType::Inclusion], + }), + ) + .await; let native_mint = spl_token::native_mint::ID; let token_account_keypair = Keypair::new(); create_token_account(&mut rpc, &native_mint, &token_account_keypair, &payer) @@ -239,7 +246,7 @@ async fn test_mint_to(amounts: Vec, iterations: usize, lamports: Option::init_from_env(&payer, &env, false, false).await; + TestIndexer::::init_from_env(&payer, &env, None).await; let recipients = amounts .iter() @@ -771,8 +778,15 @@ async fn perform_transfer_test(inputs: usize, outputs: usize, amount: u64) { let (mut rpc, env) = setup_test_programs_with_accounts(None).await; let payer = rpc.get_payer().insecure_clone(); let merkle_tree_pubkey = env.merkle_tree_pubkey; - let mut test_indexer = - TestIndexer::::init_from_env(&payer, &env, true, false).await; + let mut test_indexer = TestIndexer::::init_from_env( + &payer, + &env, + Some(ProverConfig { + run_mode: None, + circuits: vec![ProofType::Inclusion], + }), + ) + .await; let mint = create_mint_helper(&mut rpc, &payer).await; let sender = Keypair::new(); mint_tokens_helper_with_lamports( @@ -819,8 +833,15 @@ async fn test_decompression() { let (mut context, env) = setup_test_programs_with_accounts(None).await; let payer = context.get_payer().insecure_clone(); let merkle_tree_pubkey = env.merkle_tree_pubkey; - let mut test_indexer = - TestIndexer::::init_from_env(&payer, &env, true, false).await; + let mut test_indexer = TestIndexer::::init_from_env( + &payer, + &env, + Some(ProverConfig { + run_mode: None, + circuits: vec![ProofType::Inclusion], + }), + ) + .await; let sender = Keypair::new(); airdrop_lamports(&mut context, &sender.pubkey(), 1_000_000_000) .await @@ -883,8 +904,15 @@ async fn test_delegation( let (mut rpc, env) = setup_test_programs_with_accounts(None).await; let payer = rpc.get_payer().insecure_clone(); let merkle_tree_pubkey = env.merkle_tree_pubkey; - let mut test_indexer = - TestIndexer::::init_from_env(&payer, &env, true, false).await; + let mut test_indexer = TestIndexer::::init_from_env( + &payer, + &env, + Some(ProverConfig { + run_mode: None, + circuits: vec![ProofType::Inclusion], + }), + ) + .await; let sender = Keypair::new(); airdrop_lamports(&mut rpc, &sender.pubkey(), 1_000_000_000) .await @@ -997,8 +1025,15 @@ async fn test_delegation_mixed() { let (mut rpc, env) = setup_test_programs_with_accounts(None).await; let payer = rpc.get_payer().insecure_clone(); let merkle_tree_pubkey = env.merkle_tree_pubkey; - let mut test_indexer = - TestIndexer::::init_from_env(&payer, &env, true, false).await; + let mut test_indexer = TestIndexer::::init_from_env( + &payer, + &env, + Some(ProverConfig { + run_mode: None, + circuits: vec![ProofType::Inclusion], + }), + ) + .await; let sender = Keypair::new(); airdrop_lamports(&mut rpc, &sender.pubkey(), 1_000_000_000) .await @@ -1213,8 +1248,15 @@ async fn test_approve_failing() { let (mut rpc, env) = setup_test_programs_with_accounts(None).await; let payer = rpc.get_payer().insecure_clone(); let merkle_tree_pubkey = env.merkle_tree_pubkey; - let mut test_indexer = - TestIndexer::::init_from_env(&payer, &env, true, false).await; + let mut test_indexer = TestIndexer::::init_from_env( + &payer, + &env, + Some(ProverConfig { + run_mode: None, + circuits: vec![ProofType::Inclusion], + }), + ) + .await; let sender = Keypair::new(); airdrop_lamports(&mut rpc, &sender.pubkey(), 1_000_000_000) .await @@ -1493,8 +1535,15 @@ async fn test_revoke(num_inputs: usize, mint_amount: u64, delegated_amount: u64) let (mut rpc, env) = setup_test_programs_with_accounts(None).await; let payer = rpc.get_payer().insecure_clone(); let merkle_tree_pubkey = env.merkle_tree_pubkey; - let mut test_indexer = - TestIndexer::::init_from_env(&payer, &env, true, false).await; + let mut test_indexer = TestIndexer::::init_from_env( + &payer, + &env, + Some(ProverConfig { + run_mode: None, + circuits: vec![ProofType::Inclusion], + }), + ) + .await; let sender = Keypair::new(); airdrop_lamports(&mut rpc, &sender.pubkey(), 1_000_000_000) .await @@ -1601,8 +1650,15 @@ async fn test_revoke_failing() { let (mut rpc, env) = setup_test_programs_with_accounts(None).await; let payer = rpc.get_payer().insecure_clone(); let merkle_tree_pubkey = env.merkle_tree_pubkey; - let mut test_indexer = - TestIndexer::::init_from_env(&payer, &env, true, false).await; + let mut test_indexer = TestIndexer::::init_from_env( + &payer, + &env, + Some(ProverConfig { + run_mode: None, + circuits: vec![ProofType::Inclusion], + }), + ) + .await; let sender = Keypair::new(); airdrop_lamports(&mut rpc, &sender.pubkey(), 1_000_000_000) .await @@ -1801,8 +1857,15 @@ async fn test_burn() { let (mut rpc, env) = setup_test_programs_with_accounts(None).await; let payer = rpc.get_payer().insecure_clone(); let merkle_tree_pubkey = env.merkle_tree_pubkey; - let mut test_indexer = - TestIndexer::::init_from_env(&payer, &env, true, false).await; + let mut test_indexer = TestIndexer::::init_from_env( + &payer, + &env, + Some(ProverConfig { + run_mode: None, + circuits: vec![ProofType::Inclusion], + }), + ) + .await; let sender = Keypair::new(); airdrop_lamports(&mut rpc, &sender.pubkey(), 1_000_000_000) .await @@ -1930,8 +1993,15 @@ async fn failing_tests_burn() { let (mut rpc, env) = setup_test_programs_with_accounts(None).await; let payer = rpc.get_payer().insecure_clone(); let merkle_tree_pubkey = env.merkle_tree_pubkey; - let mut test_indexer = - TestIndexer::::init_from_env(&payer, &env, true, false).await; + let mut test_indexer = TestIndexer::::init_from_env( + &payer, + &env, + Some(ProverConfig { + run_mode: None, + circuits: vec![ProofType::Inclusion], + }), + ) + .await; let sender = Keypair::new(); airdrop_lamports(&mut rpc, &sender.pubkey(), 1_000_000_000) .await @@ -2152,8 +2222,15 @@ async fn test_freeze_and_thaw(mint_amount: u64, delegated_amount: u64) { let (mut rpc, env) = setup_test_programs_with_accounts(None).await; let payer = rpc.get_payer().insecure_clone(); let merkle_tree_pubkey = env.merkle_tree_pubkey; - let mut test_indexer = - TestIndexer::::init_from_env(&payer, &env, true, false).await; + let mut test_indexer = TestIndexer::::init_from_env( + &payer, + &env, + Some(ProverConfig { + run_mode: None, + circuits: vec![ProofType::Inclusion], + }), + ) + .await; let sender = Keypair::new(); airdrop_lamports(&mut rpc, &sender.pubkey(), 1_000_000_000) .await @@ -2303,8 +2380,15 @@ async fn test_failing_freeze() { let (mut rpc, env) = setup_test_programs_with_accounts(None).await; let payer = rpc.get_payer().insecure_clone(); let merkle_tree_pubkey = env.merkle_tree_pubkey; - let mut test_indexer = - TestIndexer::::init_from_env(&payer, &env, true, false).await; + let mut test_indexer = TestIndexer::::init_from_env( + &payer, + &env, + Some(ProverConfig { + run_mode: None, + circuits: vec![ProofType::Inclusion], + }), + ) + .await; let sender = Keypair::new(); airdrop_lamports(&mut rpc, &sender.pubkey(), 1_000_000_000) .await @@ -2535,8 +2619,15 @@ async fn test_failing_thaw() { let (mut rpc, env) = setup_test_programs_with_accounts(None).await; let payer = rpc.get_payer().insecure_clone(); let merkle_tree_pubkey = env.merkle_tree_pubkey; - let mut test_indexer = - TestIndexer::::init_from_env(&payer, &env, true, false).await; + let mut test_indexer = TestIndexer::::init_from_env( + &payer, + &env, + Some(ProverConfig { + run_mode: None, + circuits: vec![ProofType::Inclusion], + }), + ) + .await; let sender = Keypair::new(); airdrop_lamports(&mut rpc, &sender.pubkey(), 1_000_000_000) .await @@ -2788,8 +2879,15 @@ async fn test_failing_decompression() { let (mut context, env) = setup_test_programs_with_accounts(None).await; let payer = context.get_payer().insecure_clone(); let merkle_tree_pubkey = env.merkle_tree_pubkey; - let mut test_indexer = - TestIndexer::::init_from_env(&payer, &env, true, false).await; + let mut test_indexer = TestIndexer::::init_from_env( + &payer, + &env, + Some(ProverConfig { + run_mode: None, + circuits: vec![ProofType::Inclusion], + }), + ) + .await; let sender = Keypair::new(); airdrop_lamports(&mut context, &sender.pubkey(), 1_000_000_000) .await @@ -3181,8 +3279,15 @@ async fn test_invalid_inputs() { let payer = rpc.get_payer().insecure_clone(); let merkle_tree_pubkey = env.merkle_tree_pubkey; let nullifier_queue_pubkey = env.nullifier_queue_pubkey; - let mut test_indexer = - TestIndexer::::init_from_env(&payer, &env, true, false).await; + let mut test_indexer = TestIndexer::::init_from_env( + &payer, + &env, + Some(ProverConfig { + run_mode: None, + circuits: vec![ProofType::Inclusion], + }), + ) + .await; let recipient_keypair = Keypair::new(); airdrop_lamports(&mut rpc, &recipient_keypair.pubkey(), 1_000_000_000) .await diff --git a/test-programs/e2e-test/tests/test.rs b/test-programs/e2e-test/tests/test.rs index 150ee7d604..281f2f22d9 100644 --- a/test-programs/e2e-test/tests/test.rs +++ b/test-programs/e2e-test/tests/test.rs @@ -22,8 +22,7 @@ async fn test_10_all() { let indexer: TestIndexer = TestIndexer::init_from_env( &env_accounts.forester.insecure_clone(), &env_accounts, - KeypairActionConfig::all_default().inclusion(), - KeypairActionConfig::all_default().non_inclusion(), + Some(KeypairActionConfig::all_default().prover_config()), ) .await; @@ -60,8 +59,7 @@ async fn test_10000_all() { let indexer: TestIndexer = TestIndexer::init_from_env( &env_accounts.forester.insecure_clone(), &env_accounts, - KeypairActionConfig::all_default().inclusion(), - KeypairActionConfig::all_default().non_inclusion(), + Some(KeypairActionConfig::all_default().prover_config()), ) .await; diff --git a/test-programs/registry-test/Cargo.toml b/test-programs/registry-test/Cargo.toml index 992eefe344..52538a72f5 100644 --- a/test-programs/registry-test/Cargo.toml +++ b/test-programs/registry-test/Cargo.toml @@ -25,7 +25,7 @@ solana-program-test = { workspace = true } light-test-utils = { version = "1.2.0", path = "../../test-utils", features=["devenv"] } reqwest = "0.11.26" tokio = { workspace = true } -light-prover-client = {path = "../../circuit-lib/light-prover-client" } +light-prover-client = {path = "../../circuit-lib/light-prover-client", features = ["devenv"] } num-bigint = "0.4.6" num-traits = "0.2.19" spl-token = { workspace = true } diff --git a/test-programs/system-cpi-test/tests/test.rs b/test-programs/system-cpi-test/tests/test.rs index 86e7592966..7b253e49bb 100644 --- a/test-programs/system-cpi-test/tests/test.rs +++ b/test-programs/system-cpi-test/tests/test.rs @@ -4,6 +4,7 @@ use anchor_lang::AnchorDeserialize; use light_compressed_token::process_transfer::InputTokenDataWithContext; use light_compressed_token::token_data::AccountState; use light_hasher::{Hasher, Poseidon}; +use light_prover_client::gnark::helpers::{ProverConfig, ProverMode}; use light_system_program::errors::SystemProgramError; use light_system_program::sdk::address::derive_address; use light_system_program::sdk::compressed_account::{ @@ -50,7 +51,15 @@ async fn only_test_create_pda() { let (mut rpc, env) = setup_test_programs_with_accounts(Some(vec![(String::from("system_cpi_test"), ID)])).await; let payer = rpc.get_payer().insecure_clone(); - let mut test_indexer = TestIndexer::init_from_env(&payer, &env, true, true).await; + let mut test_indexer = TestIndexer::init_from_env( + &payer, + &env, + Some(ProverConfig { + run_mode: Some(ProverMode::Rpc), + circuits: vec![], + }), + ) + .await; let seed = [1u8; 32]; let data = [2u8; 31]; @@ -289,7 +298,15 @@ async fn test_approve_revoke_burn_freeze_thaw_with_cpi_context() { setup_test_programs_with_accounts(Some(vec![(String::from("system_cpi_test"), ID)])).await; let payer = rpc.get_payer().insecure_clone(); - let mut test_indexer = TestIndexer::init_from_env(&payer, &env, true, true).await; + let mut test_indexer = TestIndexer::init_from_env( + &payer, + &env, + Some(ProverConfig { + run_mode: Some(ProverMode::Rpc), + circuits: vec![], + }), + ) + .await; let mint = create_mint_helper(&mut rpc, &payer).await; let amount = 10000u64; mint_tokens_helper( @@ -453,7 +470,15 @@ async fn test_create_pda_in_program_owned_merkle_trees() { setup_test_programs_with_accounts(Some(vec![(String::from("system_cpi_test"), ID)])).await; let payer = rpc.get_payer().insecure_clone(); - let mut test_indexer = TestIndexer::init_from_env(&payer, &env, true, true).await; + let mut test_indexer = TestIndexer::init_from_env( + &payer, + &env, + Some(ProverConfig { + run_mode: Some(ProverMode::Rpc), + circuits: vec![], + }), + ) + .await; // Failing test 1 invalid address Merkle tree ---------------------------------------------- let program_owned_address_merkle_tree_keypair = Keypair::new(); let program_owned_address_queue_keypair = Keypair::new(); diff --git a/test-programs/system-cpi-test/tests/test_program_owned_trees.rs b/test-programs/system-cpi-test/tests/test_program_owned_trees.rs index 2e1245ca3d..af518b1159 100644 --- a/test-programs/system-cpi-test/tests/test_program_owned_trees.rs +++ b/test-programs/system-cpi-test/tests/test_program_owned_trees.rs @@ -9,6 +9,7 @@ use account_compression::{ use anchor_lang::{system_program, InstructionData, ToAccountMetas}; use light_compressed_token::mint_sdk::create_mint_to_instruction; use light_hasher::Poseidon; +use light_prover_client::gnark::helpers::{ProverConfig, ProverMode}; use light_registry::account_compression_cpi::sdk::{ create_nullify_instruction, get_registered_program_pda, CreateNullifyInstructionInputs, }; @@ -52,8 +53,15 @@ async fn test_program_owned_merkle_tree() { let program_owned_nullifier_queue_keypair = Keypair::new(); let cpi_context_keypair = Keypair::new(); - let mut test_indexer = - TestIndexer::::init_from_env(&payer, &env, true, true).await; + let mut test_indexer = TestIndexer::::init_from_env( + &payer, + &env, + Some(ProverConfig { + run_mode: Some(ProverMode::Rpc), + circuits: vec![], + }), + ) + .await; test_indexer .add_state_merkle_tree( &mut rpc, diff --git a/test-programs/system-test/tests/test.rs b/test-programs/system-test/tests/test.rs index 7cfd6f7823..ba74d4780c 100644 --- a/test-programs/system-test/tests/test.rs +++ b/test-programs/system-test/tests/test.rs @@ -3,6 +3,7 @@ use account_compression::errors::AccountCompressionErrorCode; use anchor_lang::error::ErrorCode; use anchor_lang::{AnchorSerialize, InstructionData, ToAccountMetas}; use light_hasher::Poseidon; +use light_prover_client::gnark::helpers::{ProofType, ProverConfig, ProverMode}; use light_registry::protocol_config::state::ProtocolConfig; use light_system_program::{ errors::SystemProgramError, @@ -46,7 +47,6 @@ use solana_sdk::{ }; use solana_sdk::{signature::Keypair, transaction::TransactionError}; use tokio::fs::write as async_write; - // TODO: use lazy_static to spawn the server once /// invoke_failing_test @@ -102,8 +102,15 @@ async fn invoke_failing_test() { .await .unwrap(); - let mut test_indexer = - TestIndexer::::init_from_env(&payer, &env, true, true).await; + let mut test_indexer = TestIndexer::::init_from_env( + &payer, + &env, + Some(ProverConfig { + run_mode: Some(ProverMode::Rpc), + circuits: vec![], + }), + ) + .await; // circuit instantiations allow for 1, 2, 3, 4, 8 inclusion proofs let options = [0usize, 1usize, 2usize, 3usize, 4usize, 8usize]; @@ -884,8 +891,15 @@ async fn invoke_test() { let (mut context, env) = setup_test_programs_with_accounts(None).await; let payer = context.get_payer().insecure_clone(); - let mut test_indexer = - TestIndexer::::init_from_env(&payer, &env, true, true).await; + let mut test_indexer = TestIndexer::::init_from_env( + &payer, + &env, + Some(ProverConfig { + run_mode: Some(ProverMode::Rpc), + circuits: vec![], + }), + ) + .await; let payer_pubkey = payer.pubkey(); @@ -1143,8 +1157,15 @@ async fn invoke_test() { async fn test_with_address() { let (mut context, env) = setup_test_programs_with_accounts(None).await; let payer = context.get_payer().insecure_clone(); - let mut test_indexer = - TestIndexer::::init_from_env(&payer, &env, true, true).await; + let mut test_indexer = TestIndexer::::init_from_env( + &payer, + &env, + Some(ProverConfig { + run_mode: Some(ProverMode::Rpc), + circuits: vec![], + }), + ) + .await; let payer_pubkey = payer.pubkey(); let merkle_tree_pubkey = env.merkle_tree_pubkey; @@ -1329,8 +1350,15 @@ async fn test_with_compression() { let merkle_tree_pubkey = env.merkle_tree_pubkey; let nullifier_queue_pubkey = env.nullifier_queue_pubkey; - let mut test_indexer = - TestIndexer::::init_from_env(&payer, &env, true, false).await; + let mut test_indexer = TestIndexer::::init_from_env( + &payer, + &env, + Some(ProverConfig { + run_mode: None, + circuits: vec![ProofType::Inclusion], + }), + ) + .await; let compress_amount = 1_000_000; let output_compressed_accounts = vec![CompressedAccount { lamports: compress_amount + 1, diff --git a/test-utils/src/e2e_test_env.rs b/test-utils/src/e2e_test_env.rs index 626c9a91eb..83c7cd9a46 100644 --- a/test-utils/src/e2e_test_env.rs +++ b/test-utils/src/e2e_test_env.rs @@ -69,6 +69,7 @@ // refactor all tests to work with that so that we can run all tests with a test validator and concurrency use light_compressed_token::token_data::AccountState; +use light_prover_client::gnark::helpers::{ProofType, ProverConfig}; use light_registry::protocol_config::state::{ProtocolConfig, ProtocolConfigPda}; use light_registry::sdk::create_finalize_registration_instruction; use light_registry::utils::get_protocol_config_pda_address; @@ -135,6 +136,7 @@ use crate::indexer::TestIndexer; use light_client::rpc::errors::RpcError; use light_client::rpc::RpcConnection; use light_client::transaction_params::{FeeConfig, TransactionParams}; +use light_prover_client::gnark::helpers::ProverMode; pub struct User { pub keypair: Keypair, @@ -206,8 +208,10 @@ pub async fn init_program_test_env( let indexer: TestIndexer = TestIndexer::init_from_env( &env_accounts.forester.insecure_clone(), env_accounts, - KeypairActionConfig::all_default().inclusion(), - KeypairActionConfig::all_default().non_inclusion(), + Some(ProverConfig { + run_mode: Some(ProverMode::Rpc), + circuits: vec![], + }), ) .await; @@ -2143,6 +2147,23 @@ pub struct KeypairActionConfig { } impl KeypairActionConfig { + pub fn prover_config(&self) -> ProverConfig { + let mut config = ProverConfig { + run_mode: None, + circuits: vec![], + }; + + if self.inclusion() { + config.circuits.push(ProofType::Inclusion); + } + + if self.non_inclusion() { + config.circuits.push(ProofType::NonInclusion); + } + + config + } + pub fn inclusion(&self) -> bool { self.transfer_sol.is_some() || self.transfer_spl.is_some() } diff --git a/test-utils/src/indexer/test_indexer.rs b/test-utils/src/indexer/test_indexer.rs index e2bd3a884b..f6d304fc02 100644 --- a/test-utils/src/indexer/test_indexer.rs +++ b/test-utils/src/indexer/test_indexer.rs @@ -23,6 +23,7 @@ use light_client::transaction_params::FeeConfig; use light_compressed_token::constants::TOKEN_COMPRESSED_ACCOUNT_DISCRIMINATOR; use light_compressed_token::mint_sdk::create_create_token_pool_instruction; use light_compressed_token::{get_token_pool_pda, TokenData}; +use light_prover_client::gnark::helpers::{ProverConfig, ProverMode}; use light_utils::bigint::bigint_to_be_bytes_array; use { crate::test_env::{create_state_merkle_tree_and_queue_account, EnvAccounts}, @@ -38,7 +39,7 @@ use { gnark::{ combined_json_formatter::CombinedJsonStruct, constants::{PROVE_PATH, SERVER_ADDRESS}, - helpers::{spawn_prover, ProofType}, + helpers::spawn_prover, inclusion_json_formatter::BatchInclusionJsonStruct, non_inclusion_json_formatter::BatchNonInclusionJsonStruct, proof_helpers::{compress_proof, deserialize_gnark_proof_json, proof_from_json_struct}, @@ -80,7 +81,7 @@ pub struct TestIndexer { pub token_compressed_accounts: Vec, pub token_nullified_compressed_accounts: Vec, pub events: Vec, - pub proof_types: Vec, + pub prover_config: Option, phantom: PhantomData, } @@ -519,7 +520,9 @@ impl Indexer for TestIndexer { } else { warn!("Error: {}", response_result.text().await.unwrap()); tokio::time::sleep(Duration::from_secs(1)).await; - spawn_prover(true, self.proof_types.as_slice()).await; + if let Some(ref prover_config) = self.prover_config { + spawn_prover(true, prover_config.clone()).await; + } retries -= 1; } } @@ -598,8 +601,7 @@ impl TestIndexer { pub async fn init_from_env( payer: &Keypair, env: &EnvAccounts, - inclusion: bool, - non_inclusion: bool, + prover_config: Option, ) -> Self { Self::new( vec![StateMerkleTreeAccounts { @@ -613,8 +615,7 @@ impl TestIndexer { }], payer.insecure_clone(), env.group_pda, - inclusion, - non_inclusion, + prover_config, ) .await } @@ -624,18 +625,10 @@ impl TestIndexer { address_merkle_tree_accounts: Vec, payer: Keypair, group_pda: Pubkey, - inclusion: bool, - non_inclusion: bool, + prover_config: Option, ) -> Self { - let mut vec_proof_types = vec![]; - if inclusion { - vec_proof_types.push(ProofType::Inclusion); - } - if non_inclusion { - vec_proof_types.push(ProofType::NonInclusion); - } - if !vec_proof_types.is_empty() { - spawn_prover(true, vec_proof_types.as_slice()).await; + if let Some(ref prover_config) = prover_config { + spawn_prover(true, prover_config.clone()).await; } let mut state_merkle_trees = Vec::new(); for state_merkle_tree_account in state_merkle_tree_accounts.iter() { @@ -666,7 +659,7 @@ impl TestIndexer { events: vec![], token_compressed_accounts: vec![], token_nullified_compressed_accounts: vec![], - proof_types: vec_proof_types, + prover_config, phantom: Default::default(), group_pda, }