From 17e564c6f7914cc6c4304626592fd81f611ac83c Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Mon, 8 Apr 2024 13:42:13 -0300 Subject: [PATCH 01/98] fmt --- prover/prover_cli/Cargo.toml | 17 ++++++++++++++++ prover/prover_cli/src/cli.rs | 26 ++++++++++++++++++++++++ prover/prover_cli/src/commands/mod.rs | 1 + prover/prover_cli/src/commands/status.rs | 4 ++++ prover/prover_cli/src/lib.rs | 2 ++ prover/prover_cli/src/main.rs | 10 +++++++++ 6 files changed, 60 insertions(+) create mode 100644 prover/prover_cli/Cargo.toml create mode 100644 prover/prover_cli/src/cli.rs create mode 100644 prover/prover_cli/src/commands/mod.rs create mode 100644 prover/prover_cli/src/commands/status.rs create mode 100644 prover/prover_cli/src/lib.rs create mode 100644 prover/prover_cli/src/main.rs diff --git a/prover/prover_cli/Cargo.toml b/prover/prover_cli/Cargo.toml new file mode 100644 index 000000000000..7effe24198a1 --- /dev/null +++ b/prover/prover_cli/Cargo.toml @@ -0,0 +1,17 @@ +[package] +name = "prover_cli" +version.workspace = true +edition.workspace = true +authors.workspace = true +homepage.workspace = true +repository.workspace = true +license.workspace = true +keywords.workspace = true +categories.workspace = true + +[dependencies] +tokio = { version = "1", features = ["rt-multi-thread", "macros"] } +env_logger = "0.10" +log = "0.4" +clap = { version = "4.3", features = ["derive"] } +eyre = "0.6" diff --git a/prover/prover_cli/src/cli.rs b/prover/prover_cli/src/cli.rs new file mode 100644 index 000000000000..6d09f3b0171e --- /dev/null +++ b/prover/prover_cli/src/cli.rs @@ -0,0 +1,26 @@ +use crate::commands::status; + +use clap::{command, Parser, Subcommand}; + +pub const VERSION_STRING: &str = env!("CARGO_PKG_VERSION"); + +#[derive(Parser)] +#[command(name="prover-cli", version=VERSION_STRING, about, long_about = None)] +struct ProverCLI { + #[command(subcommand)] + command: ProverCommand, +} + +#[derive(Subcommand)] +enum ProverCommand { + Status, +} + +pub async fn start() -> eyre::Result<()> { + let ProverCLI { command } = ProverCLI::parse(); + match command { + ProverCommand::Status => status::run().await?, + }; + + Ok(()) +} diff --git a/prover/prover_cli/src/commands/mod.rs b/prover/prover_cli/src/commands/mod.rs new file mode 100644 index 000000000000..3d7dcd739867 --- /dev/null +++ b/prover/prover_cli/src/commands/mod.rs @@ -0,0 +1 @@ +pub(crate) mod status; diff --git a/prover/prover_cli/src/commands/status.rs b/prover/prover_cli/src/commands/status.rs new file mode 100644 index 000000000000..5edf73beb816 --- /dev/null +++ b/prover/prover_cli/src/commands/status.rs @@ -0,0 +1,4 @@ +pub(crate) async fn run() -> eyre::Result<()> { + log::info!("Prover Status"); + Ok(()) +} diff --git a/prover/prover_cli/src/lib.rs b/prover/prover_cli/src/lib.rs new file mode 100644 index 000000000000..3ef8b313f0c2 --- /dev/null +++ b/prover/prover_cli/src/lib.rs @@ -0,0 +1,2 @@ +pub mod cli; +mod commands; diff --git a/prover/prover_cli/src/main.rs b/prover/prover_cli/src/main.rs new file mode 100644 index 000000000000..f2a7dd710267 --- /dev/null +++ b/prover/prover_cli/src/main.rs @@ -0,0 +1,10 @@ +use prover_cli::cli; + +#[tokio::main] +async fn main() { + env_logger::builder() + .filter_level(log::LevelFilter::Debug) + .init(); + + cli::start().await.unwrap(); +} From 245b0a4e75ddd1c6a968b07152cfcfffe70cc2c0 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Mon, 8 Apr 2024 16:09:36 -0300 Subject: [PATCH 02/98] wip --- prover/prover_cli/Cargo.toml | 20 +++++++++++++++ prover/prover_cli/src/cli.rs | 6 +++-- .../src/commands/get_proof_progress.rs | 25 +++++++++++++++++++ prover/prover_cli/src/commands/mod.rs | 1 + 4 files changed, 50 insertions(+), 2 deletions(-) create mode 100644 prover/prover_cli/src/commands/get_proof_progress.rs diff --git a/prover/prover_cli/Cargo.toml b/prover/prover_cli/Cargo.toml index 7effe24198a1..ae0dddd885dc 100644 --- a/prover/prover_cli/Cargo.toml +++ b/prover/prover_cli/Cargo.toml @@ -15,3 +15,23 @@ env_logger = "0.10" log = "0.4" clap = { version = "4.3", features = ["derive"] } eyre = "0.6" + +anyhow.workspace = true +zksync_config.workspace = true +zksync_env_config.workspace = true +zksync_db_connection.workspace = true +zksync_basic_types.workspace = true +zksync_types.workspace = true +prover_dal.workspace = true +sqlx = { workspace = true, features = [ + "runtime-tokio", + "tls-native-tls", + "macros", + "postgres", + "bigdecimal", + "rust_decimal", + "chrono", + "json", + "migrate", + "ipnetwork", +] } diff --git a/prover/prover_cli/src/cli.rs b/prover/prover_cli/src/cli.rs index 6d09f3b0171e..ed1c083a4caa 100644 --- a/prover/prover_cli/src/cli.rs +++ b/prover/prover_cli/src/cli.rs @@ -1,7 +1,7 @@ -use crate::commands::status; - use clap::{command, Parser, Subcommand}; +use crate::commands::{get_proof_progress, status}; + pub const VERSION_STRING: &str = env!("CARGO_PKG_VERSION"); #[derive(Parser)] @@ -14,12 +14,14 @@ struct ProverCLI { #[derive(Subcommand)] enum ProverCommand { Status, + GetProofProgress, } pub async fn start() -> eyre::Result<()> { let ProverCLI { command } = ProverCLI::parse(); match command { ProverCommand::Status => status::run().await?, + ProverCommand::GetProofProgress => get_proof_progress::run().await?, }; Ok(()) diff --git a/prover/prover_cli/src/commands/get_proof_progress.rs b/prover/prover_cli/src/commands/get_proof_progress.rs new file mode 100644 index 000000000000..2f5f6ecf8141 --- /dev/null +++ b/prover/prover_cli/src/commands/get_proof_progress.rs @@ -0,0 +1,25 @@ +use anyhow::Context as _; +use prover_dal::{ConnectionPool, Prover, ProverDal}; +use tokio::{ + sync::{oneshot, watch::Receiver}, + task::JoinHandle, +}; +use zksync_config::configs::{ + fri_prover_group::FriProverGroupConfig, FriProverConfig, ObservabilityConfig, PostgresConfig, +}; +use zksync_env_config::{ + object_store::{ProverObjectStoreConfig, PublicObjectStoreConfig}, + FromEnv, +}; + +pub(crate) async fn run() -> eyre::Result<()> { + log::info!("Proof Progress"); + + let postgres_config = PostgresConfig::from_env().context("PostgresConfig::from_env()")?; + let pool = ConnectionPool::singleton(postgres_config.prover_url()?) + .build() + .await + .context("failed to build a connection pool")?; + + Ok(()) +} diff --git a/prover/prover_cli/src/commands/mod.rs b/prover/prover_cli/src/commands/mod.rs index 3d7dcd739867..b36d7c3d75ee 100644 --- a/prover/prover_cli/src/commands/mod.rs +++ b/prover/prover_cli/src/commands/mod.rs @@ -1 +1,2 @@ +pub(crate) mod get_proof_progress; pub(crate) mod status; From 8e84243ea40be2e1b4c523413fff32d9575e86d3 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Mon, 8 Apr 2024 16:12:37 -0300 Subject: [PATCH 03/98] add cargo.toml cargo.lock --- prover/Cargo.lock | 50 +++++++++++++++++++++++++++++++++++++++++++++++ prover/Cargo.toml | 1 + 2 files changed, 51 insertions(+) diff --git a/prover/Cargo.lock b/prover/Cargo.lock index a0d1b1dcb840..b9e7119a6c2e 100644 --- a/prover/Cargo.lock +++ b/prover/Cargo.lock @@ -1755,7 +1755,11 @@ version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4cd405aab171cb85d6735e5c8d9db038c17d3ca007a4d2c25f337935c3d90580" dependencies = [ + "humantime", + "is-terminal", "log", + "regex", + "termcolor", ] [[package]] @@ -1866,6 +1870,16 @@ version = "2.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" +[[package]] +name = "eyre" +version = "0.6.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7cd915d99f24784cdc19fd37ef22b97e3ff0ae756c7e492e9fbfe897d61e2aec" +dependencies = [ + "indenter", + "once_cell", +] + [[package]] name = "fastrand" version = "2.0.1" @@ -2712,6 +2726,12 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "indenter" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce23b50ad8242c51a442f3ff322d56b02f08852c77e4c0b4d3fd684abc89c683" + [[package]] name = "indexmap" version = "1.9.3" @@ -2747,6 +2767,17 @@ dependencies = [ "serde", ] +[[package]] +name = "is-terminal" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f23ff5ef2b80d608d61efee834934d862cd92461afc0560dedf493e4c033738b" +dependencies = [ + "hermit-abi 0.3.6", + "libc", + "windows-sys 0.52.0", +] + [[package]] name = "itertools" version = "0.10.5" @@ -4314,6 +4345,25 @@ dependencies = [ "thiserror", ] +[[package]] +name = "prover_cli" +version = "0.1.0" +dependencies = [ + "anyhow", + "clap 4.4.6", + "env_logger 0.10.2", + "eyre", + "log", + "prover_dal", + "sqlx", + "tokio", + "zksync_basic_types", + "zksync_config", + "zksync_db_connection", + "zksync_env_config", + "zksync_types", +] + [[package]] name = "prover_dal" version = "0.1.0" diff --git a/prover/Cargo.toml b/prover/Cargo.toml index 2e9ee65ce7a7..7f33fa4a18c3 100644 --- a/prover/Cargo.toml +++ b/prover/Cargo.toml @@ -13,6 +13,7 @@ members = [ "prover_fri_gateway", "proof_fri_compressor", "tools", + "prover_cli", ] resolver = "2" From 17517b144741b5e7bb05dc105c8cedc55b3d9a32 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Mon, 8 Apr 2024 17:36:57 -0300 Subject: [PATCH 04/98] go back to anihow --- prover/prover_cli/src/cli.rs | 2 +- .../src/commands/get_proof_progress.rs | 36 +++++++++++-------- prover/prover_cli/src/commands/status.rs | 2 +- 3 files changed, 24 insertions(+), 16 deletions(-) diff --git a/prover/prover_cli/src/cli.rs b/prover/prover_cli/src/cli.rs index ed1c083a4caa..0b0d87f1c9d3 100644 --- a/prover/prover_cli/src/cli.rs +++ b/prover/prover_cli/src/cli.rs @@ -17,7 +17,7 @@ enum ProverCommand { GetProofProgress, } -pub async fn start() -> eyre::Result<()> { +pub async fn start() -> anyhow::Result<()> { let ProverCLI { command } = ProverCLI::parse(); match command { ProverCommand::Status => status::run().await?, diff --git a/prover/prover_cli/src/commands/get_proof_progress.rs b/prover/prover_cli/src/commands/get_proof_progress.rs index 2f5f6ecf8141..c4ff753afec3 100644 --- a/prover/prover_cli/src/commands/get_proof_progress.rs +++ b/prover/prover_cli/src/commands/get_proof_progress.rs @@ -1,25 +1,33 @@ use anyhow::Context as _; -use prover_dal::{ConnectionPool, Prover, ProverDal}; -use tokio::{ - sync::{oneshot, watch::Receiver}, - task::JoinHandle, -}; -use zksync_config::configs::{ - fri_prover_group::FriProverGroupConfig, FriProverConfig, ObservabilityConfig, PostgresConfig, -}; -use zksync_env_config::{ - object_store::{ProverObjectStoreConfig, PublicObjectStoreConfig}, - FromEnv, -}; +use zksync_config::PostgresConfig; +use zksync_db_connection::connection_pool::ConnectionPool; +use zksync_env_config::FromEnv; +use prover_dal::Prover; -pub(crate) async fn run() -> eyre::Result<()> { +pub(crate) async fn run() -> anyhow::Result<()> { log::info!("Proof Progress"); let postgres_config = PostgresConfig::from_env().context("PostgresConfig::from_env()")?; - let pool = ConnectionPool::singleton(postgres_config.prover_url()?) + + println!("{:?}", postgres_config); + + let pool = ConnectionPool::::singleton(postgres_config.prover_url()?) .build() .await .context("failed to build a connection pool")?; + // let asd = sqlx::query_as!( + // StorageL1BatchHeader, + // r#" + // SELECT + // id + // FROM + // prover_jobs_fri + // "#, + // ) + // .fetch_all(pool) + // .await?; + Ok(()) } + diff --git a/prover/prover_cli/src/commands/status.rs b/prover/prover_cli/src/commands/status.rs index 5edf73beb816..5d0b9b085260 100644 --- a/prover/prover_cli/src/commands/status.rs +++ b/prover/prover_cli/src/commands/status.rs @@ -1,4 +1,4 @@ -pub(crate) async fn run() -> eyre::Result<()> { +pub(crate) async fn run() -> anyhow::Result<()> { log::info!("Prover Status"); Ok(()) } From 4ab7ce87d6757d72e5eaed78da62946f0b3b7083 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Mon, 8 Apr 2024 17:37:13 -0300 Subject: [PATCH 05/98] fmt --- prover/prover_cli/src/commands/get_proof_progress.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/prover/prover_cli/src/commands/get_proof_progress.rs b/prover/prover_cli/src/commands/get_proof_progress.rs index c4ff753afec3..9328c355dded 100644 --- a/prover/prover_cli/src/commands/get_proof_progress.rs +++ b/prover/prover_cli/src/commands/get_proof_progress.rs @@ -1,8 +1,8 @@ use anyhow::Context as _; +use prover_dal::Prover; use zksync_config::PostgresConfig; use zksync_db_connection::connection_pool::ConnectionPool; use zksync_env_config::FromEnv; -use prover_dal::Prover; pub(crate) async fn run() -> anyhow::Result<()> { log::info!("Proof Progress"); @@ -30,4 +30,3 @@ pub(crate) async fn run() -> anyhow::Result<()> { Ok(()) } - From 46552017c1a736ec85ac812b1ff1bac75b4346d1 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Tue, 9 Apr 2024 13:50:12 -0300 Subject: [PATCH 06/98] rm tool workspace --- prover/Cargo.lock | 7 +++ prover/Cargo.toml | 1 - prover/prover_cli/Cargo.toml | 9 +++- prover/{tools => prover_cli}/README.md | 4 +- prover/prover_cli/src/cli.rs | 8 ++-- .../src/commands/get_file_info.rs} | 47 ++++--------------- .../src/commands/get_proof_progress.rs | 32 ------------- prover/prover_cli/src/commands/mod.rs | 3 +- prover/prover_cli/src/commands/status.rs | 4 -- prover/tools/Cargo.toml | 20 -------- 10 files changed, 31 insertions(+), 104 deletions(-) rename prover/{tools => prover_cli}/README.md (93%) rename prover/{tools/src/main.rs => prover_cli/src/commands/get_file_info.rs} (90%) delete mode 100644 prover/prover_cli/src/commands/get_proof_progress.rs delete mode 100644 prover/prover_cli/src/commands/status.rs delete mode 100644 prover/tools/Cargo.toml diff --git a/prover/Cargo.lock b/prover/Cargo.lock index b9e7119a6c2e..40a9ad15394f 100644 --- a/prover/Cargo.lock +++ b/prover/Cargo.lock @@ -4350,17 +4350,24 @@ name = "prover_cli" version = "0.1.0" dependencies = [ "anyhow", + "bincode", "clap 4.4.6", + "colored", "env_logger 0.10.2", "eyre", + "hex", "log", "prover_dal", "sqlx", "tokio", + "tracing", + "tracing-subscriber", "zksync_basic_types", "zksync_config", "zksync_db_connection", "zksync_env_config", + "zksync_prover_fri_types", + "zksync_prover_interface", "zksync_types", ] diff --git a/prover/Cargo.toml b/prover/Cargo.toml index 7f33fa4a18c3..bd2b25bd3420 100644 --- a/prover/Cargo.toml +++ b/prover/Cargo.toml @@ -12,7 +12,6 @@ members = [ "witness_vector_generator", "prover_fri_gateway", "proof_fri_compressor", - "tools", "prover_cli", ] diff --git a/prover/prover_cli/Cargo.toml b/prover/prover_cli/Cargo.toml index ae0dddd885dc..3952b12bd36c 100644 --- a/prover/prover_cli/Cargo.toml +++ b/prover/prover_cli/Cargo.toml @@ -13,15 +13,22 @@ categories.workspace = true tokio = { version = "1", features = ["rt-multi-thread", "macros"] } env_logger = "0.10" log = "0.4" -clap = { version = "4.3", features = ["derive"] } eyre = "0.6" +clap = { workspace = true, features = ["derive"] } +tracing.workspace = true +tracing-subscriber = { workspace = true, features = ["env-filter"] } +bincode.workspace = true +colored.workspace = true +hex.workspace = true anyhow.workspace = true zksync_config.workspace = true zksync_env_config.workspace = true zksync_db_connection.workspace = true zksync_basic_types.workspace = true zksync_types.workspace = true +zksync_prover_fri_types.workspace = true +zksync_prover_interface.workspace = true prover_dal.workspace = true sqlx = { workspace = true, features = [ "runtime-tokio", diff --git a/prover/tools/README.md b/prover/prover_cli/README.md similarity index 93% rename from prover/tools/README.md rename to prover/prover_cli/README.md index 35778faa687a..25bbe95fc4d5 100644 --- a/prover/tools/README.md +++ b/prover/prover_cli/README.md @@ -1,9 +1,9 @@ -# Tool to better understand and debug provers +# CLI to better understand and debug provers For now, it has only one command 'file-info' ``` -cargo run --release file-info /zksync-era/prover/artifacts/proofs_fri/l1_batch_proof_1.bin +cargo run -- file-info --file-path /zksync-era/prover/artifacts/proofs_fri/l1_batch_proof_1.bin ``` Example outputs: diff --git a/prover/prover_cli/src/cli.rs b/prover/prover_cli/src/cli.rs index 0b0d87f1c9d3..844387f983e0 100644 --- a/prover/prover_cli/src/cli.rs +++ b/prover/prover_cli/src/cli.rs @@ -1,6 +1,6 @@ use clap::{command, Parser, Subcommand}; -use crate::commands::{get_proof_progress, status}; +use crate::commands::get_file_info; pub const VERSION_STRING: &str = env!("CARGO_PKG_VERSION"); @@ -13,15 +13,13 @@ struct ProverCLI { #[derive(Subcommand)] enum ProverCommand { - Status, - GetProofProgress, + FileInfo(get_file_info::Args), } pub async fn start() -> anyhow::Result<()> { let ProverCLI { command } = ProverCLI::parse(); match command { - ProverCommand::Status => status::run().await?, - ProverCommand::GetProofProgress => get_proof_progress::run().await?, + ProverCommand::FileInfo(args) => get_file_info::run(args).await?, }; Ok(()) diff --git a/prover/tools/src/main.rs b/prover/prover_cli/src/commands/get_file_info.rs similarity index 90% rename from prover/tools/src/main.rs rename to prover/prover_cli/src/commands/get_file_info.rs index f7df2147fac9..ff32f56a22c6 100644 --- a/prover/tools/src/main.rs +++ b/prover/prover_cli/src/commands/get_file_info.rs @@ -1,8 +1,7 @@ use std::fs; -use clap::{Parser, Subcommand}; +use clap::Args as ClapArgs; use colored::Colorize; -use tracing::level_filters::LevelFilter; use zksync_prover_fri_types::{ circuit_definitions::{ boojum::{ @@ -18,23 +17,10 @@ use zksync_prover_fri_types::{ }; use zksync_prover_interface::outputs::L1BatchProofForL1; -#[derive(Debug, Parser)] -#[command( - author = "Matter Labs", - version, - about = "Debugging tools for prover related things", - long_about = None -)] - -struct Cli { - #[command(subcommand)] - command: Command, -} - -#[derive(Debug, Subcommand)] -enum Command { - #[command(name = "file-info")] - FileInfo { file_path: String }, +#[derive(ClapArgs)] +pub(crate) struct Args { + #[clap(short, long)] + file_path: String, } fn pretty_print_size_hint(size_hint: (Option, Option)) { @@ -204,7 +190,8 @@ fn pretty_print_l1_proof(result: &L1BatchProofForL1) { println!(" This proof will pass on L1, if L1 executor computes the block commitment that is matching exactly the Inputs value above"); } -fn file_info(path: String) { +pub(crate) async fn run(args: Args) -> anyhow::Result<()> { + let path = args.file_path; println!("Reading file {} and guessing the type.", path.bold()); let bytes = fs::read(path).unwrap(); @@ -214,14 +201,14 @@ fn file_info(path: String) { if let Some(circuit) = maybe_circuit { println!(" Parsing file as CircuitWrapper."); pretty_print_circuit_wrapper(&circuit); - return; + return Ok(()); } println!(" NOT a CircuitWrapper."); let maybe_fri_proof: Option = bincode::deserialize(&bytes).ok(); if let Some(fri_proof) = maybe_fri_proof { println!(" Parsing file as FriProofWrapper."); pretty_print_proof(&fri_proof); - return; + return Ok(()); } println!(" NOT a FriProofWrapper."); @@ -232,19 +219,5 @@ fn file_info(path: String) { } else { println!(" NOT a L1BatchProof."); } -} - -fn main() { - tracing_subscriber::fmt() - .with_env_filter( - tracing_subscriber::EnvFilter::builder() - .with_default_directive(LevelFilter::INFO.into()) - .from_env_lossy(), - ) - .init(); - - let opt = Cli::parse(); - match opt.command { - Command::FileInfo { file_path } => file_info(file_path), - } + Ok(()) } diff --git a/prover/prover_cli/src/commands/get_proof_progress.rs b/prover/prover_cli/src/commands/get_proof_progress.rs deleted file mode 100644 index 9328c355dded..000000000000 --- a/prover/prover_cli/src/commands/get_proof_progress.rs +++ /dev/null @@ -1,32 +0,0 @@ -use anyhow::Context as _; -use prover_dal::Prover; -use zksync_config::PostgresConfig; -use zksync_db_connection::connection_pool::ConnectionPool; -use zksync_env_config::FromEnv; - -pub(crate) async fn run() -> anyhow::Result<()> { - log::info!("Proof Progress"); - - let postgres_config = PostgresConfig::from_env().context("PostgresConfig::from_env()")?; - - println!("{:?}", postgres_config); - - let pool = ConnectionPool::::singleton(postgres_config.prover_url()?) - .build() - .await - .context("failed to build a connection pool")?; - - // let asd = sqlx::query_as!( - // StorageL1BatchHeader, - // r#" - // SELECT - // id - // FROM - // prover_jobs_fri - // "#, - // ) - // .fetch_all(pool) - // .await?; - - Ok(()) -} diff --git a/prover/prover_cli/src/commands/mod.rs b/prover/prover_cli/src/commands/mod.rs index b36d7c3d75ee..3e9a45cb72ac 100644 --- a/prover/prover_cli/src/commands/mod.rs +++ b/prover/prover_cli/src/commands/mod.rs @@ -1,2 +1 @@ -pub(crate) mod get_proof_progress; -pub(crate) mod status; +pub(crate) mod get_file_info; diff --git a/prover/prover_cli/src/commands/status.rs b/prover/prover_cli/src/commands/status.rs deleted file mode 100644 index 5d0b9b085260..000000000000 --- a/prover/prover_cli/src/commands/status.rs +++ /dev/null @@ -1,4 +0,0 @@ -pub(crate) async fn run() -> anyhow::Result<()> { - log::info!("Prover Status"); - Ok(()) -} diff --git a/prover/tools/Cargo.toml b/prover/tools/Cargo.toml deleted file mode 100644 index 66df1e99db4e..000000000000 --- a/prover/tools/Cargo.toml +++ /dev/null @@ -1,20 +0,0 @@ -[package] -name = "tools" -version.workspace = true -edition.workspace = true -authors.workspace = true -homepage.workspace = true -repository.workspace = true -license.workspace = true -keywords.workspace = true -categories.workspace = true - -[dependencies] -clap = { workspace = true, features = ["derive"] } -tracing.workspace = true -tracing-subscriber = { workspace = true, features = ["env-filter"] } -zksync_prover_fri_types.workspace = true -bincode.workspace = true -colored.workspace = true -zksync_prover_interface.workspace = true -hex.workspace = true \ No newline at end of file From c32d1aa427711b3123735a6b12bbd891c9889270 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Wed, 10 Apr 2024 16:12:21 -0300 Subject: [PATCH 07/98] fmt --- prover/Cargo.lock | 14 ----- prover/prover_cli/src/cli.rs | 4 +- .../src/commands/get_proof_progress.rs | 57 +++++++++++++++++++ prover/prover_cli/src/commands/mod.rs | 1 + 4 files changed, 61 insertions(+), 15 deletions(-) create mode 100644 prover/prover_cli/src/commands/get_proof_progress.rs diff --git a/prover/Cargo.lock b/prover/Cargo.lock index 40a9ad15394f..16c9ad3c20a5 100644 --- a/prover/Cargo.lock +++ b/prover/Cargo.lock @@ -6302,20 +6302,6 @@ dependencies = [ "tracing", ] -[[package]] -name = "tools" -version = "0.1.0" -dependencies = [ - "bincode", - "clap 4.4.6", - "colored", - "hex", - "tracing", - "tracing-subscriber", - "zksync_prover_fri_types", - "zksync_prover_interface", -] - [[package]] name = "tower" version = "0.4.13" diff --git a/prover/prover_cli/src/cli.rs b/prover/prover_cli/src/cli.rs index 844387f983e0..dbf1b910b044 100644 --- a/prover/prover_cli/src/cli.rs +++ b/prover/prover_cli/src/cli.rs @@ -1,6 +1,6 @@ use clap::{command, Parser, Subcommand}; -use crate::commands::get_file_info; +use crate::commands::{get_file_info, get_proof_progress}; pub const VERSION_STRING: &str = env!("CARGO_PKG_VERSION"); @@ -14,12 +14,14 @@ struct ProverCLI { #[derive(Subcommand)] enum ProverCommand { FileInfo(get_file_info::Args), + ProofProgress(get_proof_progress::Args), } pub async fn start() -> anyhow::Result<()> { let ProverCLI { command } = ProverCLI::parse(); match command { ProverCommand::FileInfo(args) => get_file_info::run(args).await?, + ProverCommand::ProofProgress(args) => get_proof_progress::run(args).await?, }; Ok(()) diff --git a/prover/prover_cli/src/commands/get_proof_progress.rs b/prover/prover_cli/src/commands/get_proof_progress.rs new file mode 100644 index 000000000000..36b1033a5b5b --- /dev/null +++ b/prover/prover_cli/src/commands/get_proof_progress.rs @@ -0,0 +1,57 @@ +use anyhow::Context as _; +use clap::Args as ClapArgs; +use sqlx::postgres::PgPoolOptions; +use sqlx::Row; +use zksync_config::PostgresConfig; +use zksync_env_config::FromEnv; + +#[derive(ClapArgs)] +pub(crate) struct Args { + #[clap(short, long)] + l1_batch_number: i32, +} + +pub(crate) async fn run(args: Args) -> anyhow::Result<()> { + let pool = PgPoolOptions::new() + .max_connections(5) + .connect("postgres://postgres:notsecurepassword@localhost/prover_local") + .await?; + + let query = sqlx::query( + "SELECT + id, + status, + error, + processing_started_at, + time_taken, + is_node_final_proof + FROM + prover_jobs_fri + WHERE + l1_batch_number = $1", + ) + .bind(&args.l1_batch_number) + .fetch_all(&pool) + .await?; + + let total_jobs = query.len(); + let successful_jobs = query + .iter() + .filter(|row| row.get::("status") == "successfull") + .count(); + let failed_jobs = query + .iter() + .filter(|row| row.get::("status") == "failed") + .count(); + let progress = (successful_jobs as f32 / total_jobs as f32) * 100.0; + + println!("= Prover progress ="); + println!("Batch number: {}", args.l1_batch_number); + println!( + "Progress: {:.2}% ({}/{})", + progress, successful_jobs, total_jobs + ); + println!("Failed: {}", failed_jobs); + + Ok(()) +} diff --git a/prover/prover_cli/src/commands/mod.rs b/prover/prover_cli/src/commands/mod.rs index 3e9a45cb72ac..87d92ecb0557 100644 --- a/prover/prover_cli/src/commands/mod.rs +++ b/prover/prover_cli/src/commands/mod.rs @@ -1 +1,2 @@ pub(crate) mod get_file_info; +pub(crate) mod get_proof_progress; From 8e49e04f1c5d4a184b43770432bbd987cca9dd8e Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Wed, 10 Apr 2024 16:13:03 -0300 Subject: [PATCH 08/98] zk fmt --- prover/prover_cli/src/commands/get_proof_progress.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/prover/prover_cli/src/commands/get_proof_progress.rs b/prover/prover_cli/src/commands/get_proof_progress.rs index 36b1033a5b5b..e497b7afca1b 100644 --- a/prover/prover_cli/src/commands/get_proof_progress.rs +++ b/prover/prover_cli/src/commands/get_proof_progress.rs @@ -1,7 +1,6 @@ use anyhow::Context as _; use clap::Args as ClapArgs; -use sqlx::postgres::PgPoolOptions; -use sqlx::Row; +use sqlx::{postgres::PgPoolOptions, Row}; use zksync_config::PostgresConfig; use zksync_env_config::FromEnv; From 80e13daf9163b77b268bbbab66be8e047addae88 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Wed, 10 Apr 2024 16:18:57 -0300 Subject: [PATCH 09/98] fix typo --- prover/prover_cli/src/commands/get_proof_progress.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prover/prover_cli/src/commands/get_proof_progress.rs b/prover/prover_cli/src/commands/get_proof_progress.rs index e497b7afca1b..f23fe7de9829 100644 --- a/prover/prover_cli/src/commands/get_proof_progress.rs +++ b/prover/prover_cli/src/commands/get_proof_progress.rs @@ -36,7 +36,7 @@ pub(crate) async fn run(args: Args) -> anyhow::Result<()> { let total_jobs = query.len(); let successful_jobs = query .iter() - .filter(|row| row.get::("status") == "successfull") + .filter(|row| row.get::("status") == "successful") .count(); let failed_jobs = query .iter() From 0f2ebd8b639f7246c18b58a7615ceb8fd0faa53c Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Fri, 12 Apr 2024 18:51:18 -0300 Subject: [PATCH 10/98] move query to prover_dal --- prover/prover_cli/src/cli.rs | 6 +- .../src/commands/get_proof_progress.rs | 56 -------------- prover/prover_cli/src/commands/mod.rs | 2 +- prover/prover_cli/src/commands/progress.rs | 74 +++++++++++++++++++ prover/prover_dal/src/fri_prover_dal.rs | 53 +++++++++++++ 5 files changed, 131 insertions(+), 60 deletions(-) delete mode 100644 prover/prover_cli/src/commands/get_proof_progress.rs create mode 100644 prover/prover_cli/src/commands/progress.rs diff --git a/prover/prover_cli/src/cli.rs b/prover/prover_cli/src/cli.rs index dbf1b910b044..b72731a9f646 100644 --- a/prover/prover_cli/src/cli.rs +++ b/prover/prover_cli/src/cli.rs @@ -1,6 +1,6 @@ use clap::{command, Parser, Subcommand}; -use crate::commands::{get_file_info, get_proof_progress}; +use crate::commands::{get_file_info, progress}; pub const VERSION_STRING: &str = env!("CARGO_PKG_VERSION"); @@ -14,14 +14,14 @@ struct ProverCLI { #[derive(Subcommand)] enum ProverCommand { FileInfo(get_file_info::Args), - ProofProgress(get_proof_progress::Args), + Progress(progress::Args), } pub async fn start() -> anyhow::Result<()> { let ProverCLI { command } = ProverCLI::parse(); match command { ProverCommand::FileInfo(args) => get_file_info::run(args).await?, - ProverCommand::ProofProgress(args) => get_proof_progress::run(args).await?, + ProverCommand::Progress(args) => progress::run(args).await?, }; Ok(()) diff --git a/prover/prover_cli/src/commands/get_proof_progress.rs b/prover/prover_cli/src/commands/get_proof_progress.rs deleted file mode 100644 index f23fe7de9829..000000000000 --- a/prover/prover_cli/src/commands/get_proof_progress.rs +++ /dev/null @@ -1,56 +0,0 @@ -use anyhow::Context as _; -use clap::Args as ClapArgs; -use sqlx::{postgres::PgPoolOptions, Row}; -use zksync_config::PostgresConfig; -use zksync_env_config::FromEnv; - -#[derive(ClapArgs)] -pub(crate) struct Args { - #[clap(short, long)] - l1_batch_number: i32, -} - -pub(crate) async fn run(args: Args) -> anyhow::Result<()> { - let pool = PgPoolOptions::new() - .max_connections(5) - .connect("postgres://postgres:notsecurepassword@localhost/prover_local") - .await?; - - let query = sqlx::query( - "SELECT - id, - status, - error, - processing_started_at, - time_taken, - is_node_final_proof - FROM - prover_jobs_fri - WHERE - l1_batch_number = $1", - ) - .bind(&args.l1_batch_number) - .fetch_all(&pool) - .await?; - - let total_jobs = query.len(); - let successful_jobs = query - .iter() - .filter(|row| row.get::("status") == "successful") - .count(); - let failed_jobs = query - .iter() - .filter(|row| row.get::("status") == "failed") - .count(); - let progress = (successful_jobs as f32 / total_jobs as f32) * 100.0; - - println!("= Prover progress ="); - println!("Batch number: {}", args.l1_batch_number); - println!( - "Progress: {:.2}% ({}/{})", - progress, successful_jobs, total_jobs - ); - println!("Failed: {}", failed_jobs); - - Ok(()) -} diff --git a/prover/prover_cli/src/commands/mod.rs b/prover/prover_cli/src/commands/mod.rs index 87d92ecb0557..027cee560ad1 100644 --- a/prover/prover_cli/src/commands/mod.rs +++ b/prover/prover_cli/src/commands/mod.rs @@ -1,2 +1,2 @@ pub(crate) mod get_file_info; -pub(crate) mod get_proof_progress; +pub(crate) mod progress; diff --git a/prover/prover_cli/src/commands/progress.rs b/prover/prover_cli/src/commands/progress.rs new file mode 100644 index 000000000000..cbc01ad260fa --- /dev/null +++ b/prover/prover_cli/src/commands/progress.rs @@ -0,0 +1,74 @@ +use anyhow::Context as _; +use clap::Args as ClapArgs; +use prover_dal::{ConnectionPool, Prover, ProverDal}; +use zksync_basic_types::L1BatchNumber; +use zksync_config::PostgresConfig; +use zksync_env_config::FromEnv; + +#[derive(ClapArgs)] +pub(crate) struct Args { + #[clap(short, long, conflicts_with = "all", required_unless_present = "all", num_args = 0..)] + proof: Option>, + #[clap(short, long, default_value("false"))] + verbose: bool, + #[clap(short, long, conflicts_with = "proof")] + all: bool, +} + +fn pretty_print_job_status( + l1_batch_number: &L1BatchNumber, + total_jobs: usize, + successful_jobs: usize, + failed_jobs: usize, +) { + let progress = (successful_jobs as f32 / total_jobs as f32) * 100.0; + println!("Batch number: {}", l1_batch_number); + println!( + "Progress: {:.2}% ({}/{})", + progress, successful_jobs, total_jobs + ); + println!("Failed: {}", failed_jobs); +} + +async fn get_one_batch_progress(l1_batches_numbers: Vec) -> anyhow::Result<()> { + let postgres_config = PostgresConfig::from_env().context("PostgresConfig::from_env()")?; + + let prover_connection_pool = ConnectionPool::::builder( + postgres_config.prover_url()?, + postgres_config.max_connections()?, + ) + .build() + .await + .context("failed to build a prover_connection_pool")?; + + let mut conn = prover_connection_pool.connection().await.unwrap(); + let stats = conn + .fri_prover_jobs_dal() + .get_prover_jobs_stats_for_batch(l1_batches_numbers) + .await; + + for row in &stats { + let (l1_batch_number, statistics) = row; + let total_jobs = + statistics.queued + statistics.in_progress + statistics.failed + statistics.successful; + pretty_print_job_status( + l1_batch_number, + total_jobs, + statistics.successful, + statistics.failed, + ) + } + Ok(()) +} + +async fn get_all_batches_progress() -> anyhow::Result<()> { + Ok(()) +} +pub(crate) async fn run(args: Args) -> anyhow::Result<()> { + println!("{:?}", args.proof); + if let Some(l1_batch_number) = args.proof { + get_one_batch_progress(l1_batch_number).await + } else { + get_all_batches_progress().await + } +} diff --git a/prover/prover_dal/src/fri_prover_dal.rs b/prover/prover_dal/src/fri_prover_dal.rs index 2da9f69d311a..ab4f3e131753 100644 --- a/prover/prover_dal/src/fri_prover_dal.rs +++ b/prover/prover_dal/src/fri_prover_dal.rs @@ -594,4 +594,57 @@ impl FriProverDal<'_, '_> { .unwrap() .unwrap_or(0) as usize } + + pub async fn get_prover_jobs_stats_for_batch( + &mut self, + l1_batches_numbers: Vec, + ) -> HashMap { + { + sqlx::query!( + r#" + SELECT + COUNT(*) AS "count!", + l1_batch_number AS "l1_batch_number!", + status AS "status!" + FROM + prover_jobs_fri + WHERE + l1_batch_number = ANY ($1) + GROUP BY + l1_batch_number, + status + "#, + &l1_batches_numbers + .into_iter() + .map(|x| i64::from(x.0)) + .collect::>() + ) + .fetch_all(self.storage.conn()) + .await + .unwrap() + .into_iter() + .map(|row| (row.l1_batch_number, row.status, row.count as usize)) + .fold( + HashMap::new(), + |mut acc, (l1_batch_number, status, value)| { + let stats = acc.entry(L1BatchNumber(l1_batch_number as u32)).or_insert( + JobCountStatistics { + queued: 0, + in_progress: 0, + failed: 0, + successful: 0, + }, + ); + match status.as_ref() { + "queued" => stats.queued = value, + "in_progress" => stats.in_progress = value, + "failed" => stats.failed = value, + "successful" => stats.successful = value, + _ => (), + } + acc + }, + ) + } + } } From 69d2104ae2ab1f219ce23160057100f20a7c6813 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Mon, 15 Apr 2024 13:08:31 -0300 Subject: [PATCH 11/98] remove --all flag --- prover/prover_cli/src/commands/progress.rs | 59 +++++++++------------- 1 file changed, 24 insertions(+), 35 deletions(-) diff --git a/prover/prover_cli/src/commands/progress.rs b/prover/prover_cli/src/commands/progress.rs index cbc01ad260fa..54a5bf045983 100644 --- a/prover/prover_cli/src/commands/progress.rs +++ b/prover/prover_cli/src/commands/progress.rs @@ -1,36 +1,40 @@ use anyhow::Context as _; use clap::Args as ClapArgs; use prover_dal::{ConnectionPool, Prover, ProverDal}; -use zksync_basic_types::L1BatchNumber; +use zksync_basic_types::{prover_dal::JobCountStatistics, L1BatchNumber}; use zksync_config::PostgresConfig; use zksync_env_config::FromEnv; #[derive(ClapArgs)] pub(crate) struct Args { - #[clap(short, long, conflicts_with = "all", required_unless_present = "all", num_args = 0..)] - proof: Option>, + #[clap(short, long, num_args = 0..)] + batch: Vec, #[clap(short, long, default_value("false"))] verbose: bool, - #[clap(short, long, conflicts_with = "proof")] - all: bool, } fn pretty_print_job_status( l1_batch_number: &L1BatchNumber, - total_jobs: usize, - successful_jobs: usize, - failed_jobs: usize, + statistics: &JobCountStatistics, + verbose: bool, ) { - let progress = (successful_jobs as f32 / total_jobs as f32) * 100.0; + let total_jobs = + statistics.queued + statistics.in_progress + statistics.failed + statistics.successful; + let progress = (statistics.successful as f32 / total_jobs as f32) * 100.0; println!("Batch number: {}", l1_batch_number); println!( "Progress: {:.2}% ({}/{})", - progress, successful_jobs, total_jobs + progress, statistics.successful, total_jobs ); - println!("Failed: {}", failed_jobs); + if verbose { + println!("In progress: {}", statistics.in_progress); + println!("Queued: {}", statistics.in_progress); + println!("Successful: {}", statistics.in_progress); + } + println!("Failed: {}", statistics.failed); } -async fn get_one_batch_progress(l1_batches_numbers: Vec) -> anyhow::Result<()> { +pub(crate) async fn run(args: Args) -> anyhow::Result<()> { let postgres_config = PostgresConfig::from_env().context("PostgresConfig::from_env()")?; let prover_connection_pool = ConnectionPool::::builder( @@ -44,31 +48,16 @@ async fn get_one_batch_progress(l1_batches_numbers: Vec) -> anyho let mut conn = prover_connection_pool.connection().await.unwrap(); let stats = conn .fri_prover_jobs_dal() - .get_prover_jobs_stats_for_batch(l1_batches_numbers) + .get_prover_jobs_stats_for_batch(args.batch) .await; + if stats.len() > 0 { + for row in &stats { + let (l1_batch_number, statistics) = row; - for row in &stats { - let (l1_batch_number, statistics) = row; - let total_jobs = - statistics.queued + statistics.in_progress + statistics.failed + statistics.successful; - pretty_print_job_status( - l1_batch_number, - total_jobs, - statistics.successful, - statistics.failed, - ) - } - Ok(()) -} - -async fn get_all_batches_progress() -> anyhow::Result<()> { - Ok(()) -} -pub(crate) async fn run(args: Args) -> anyhow::Result<()> { - println!("{:?}", args.proof); - if let Some(l1_batch_number) = args.proof { - get_one_batch_progress(l1_batch_number).await + pretty_print_job_status(l1_batch_number, statistics, args.verbose) + } } else { - get_all_batches_progress().await + println!("No batches found.") } + Ok(()) } From c9e6e5e9262d35dca538e0f5bbb1025d6cb5b2c2 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Tue, 16 Apr 2024 11:12:34 -0300 Subject: [PATCH 12/98] change command name --- prover/prover_cli/src/cli.rs | 7 +++---- .../src/commands/{progress.rs => jobs_status.rs} | 0 prover/prover_cli/src/commands/mod.rs | 2 +- 3 files changed, 4 insertions(+), 5 deletions(-) rename prover/prover_cli/src/commands/{progress.rs => jobs_status.rs} (100%) diff --git a/prover/prover_cli/src/cli.rs b/prover/prover_cli/src/cli.rs index b72731a9f646..8105cccdd6ea 100644 --- a/prover/prover_cli/src/cli.rs +++ b/prover/prover_cli/src/cli.rs @@ -1,6 +1,6 @@ use clap::{command, Parser, Subcommand}; -use crate::commands::{get_file_info, progress}; +use crate::commands::{get_file_info, jobs_status}; pub const VERSION_STRING: &str = env!("CARGO_PKG_VERSION"); @@ -14,15 +14,14 @@ struct ProverCLI { #[derive(Subcommand)] enum ProverCommand { FileInfo(get_file_info::Args), - Progress(progress::Args), + StatusJobs(jobs_status::Args), } pub async fn start() -> anyhow::Result<()> { let ProverCLI { command } = ProverCLI::parse(); match command { ProverCommand::FileInfo(args) => get_file_info::run(args).await?, - ProverCommand::Progress(args) => progress::run(args).await?, + ProverCommand::StatusJobs(args) => jobs_status::run(args).await?, }; - Ok(()) } diff --git a/prover/prover_cli/src/commands/progress.rs b/prover/prover_cli/src/commands/jobs_status.rs similarity index 100% rename from prover/prover_cli/src/commands/progress.rs rename to prover/prover_cli/src/commands/jobs_status.rs diff --git a/prover/prover_cli/src/commands/mod.rs b/prover/prover_cli/src/commands/mod.rs index 027cee560ad1..57da8f98a9c9 100644 --- a/prover/prover_cli/src/commands/mod.rs +++ b/prover/prover_cli/src/commands/mod.rs @@ -1,2 +1,2 @@ pub(crate) mod get_file_info; -pub(crate) mod progress; +pub(crate) mod jobs_status; From cdbb46775e702ff03cf602208c3f661d36927764 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Tue, 16 Apr 2024 11:18:01 -0300 Subject: [PATCH 13/98] fmt --- prover/prover_cli/src/cli.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/prover/prover_cli/src/cli.rs b/prover/prover_cli/src/cli.rs index c8c96cc0dad0..aae354d09eab 100644 --- a/prover/prover_cli/src/cli.rs +++ b/prover/prover_cli/src/cli.rs @@ -1,6 +1,7 @@ -use crate::commands::{get_file_info, jobs_status}; use clap::{command, Parser, Subcommand}; +use crate::commands::{get_file_info, jobs_status}; + pub const VERSION_STRING: &str = env!("CARGO_PKG_VERSION"); #[derive(Parser)] From d1f3994ab7beacae3aff51486d53892e12a7f784 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Tue, 16 Apr 2024 11:18:37 -0300 Subject: [PATCH 14/98] add sqlx query json --- ...8ad95ba0b3b5b7b3a81592a4720bc094c32f9.json | 34 +++++++++++++++++++ 1 file changed, 34 insertions(+) create mode 100644 prover/prover_dal/.sqlx/query-8f9c609a78ca4f854c0b24621ee8ad95ba0b3b5b7b3a81592a4720bc094c32f9.json diff --git a/prover/prover_dal/.sqlx/query-8f9c609a78ca4f854c0b24621ee8ad95ba0b3b5b7b3a81592a4720bc094c32f9.json b/prover/prover_dal/.sqlx/query-8f9c609a78ca4f854c0b24621ee8ad95ba0b3b5b7b3a81592a4720bc094c32f9.json new file mode 100644 index 000000000000..b65a594a8ed2 --- /dev/null +++ b/prover/prover_dal/.sqlx/query-8f9c609a78ca4f854c0b24621ee8ad95ba0b3b5b7b3a81592a4720bc094c32f9.json @@ -0,0 +1,34 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n COUNT(*) AS \"count!\",\n l1_batch_number AS \"l1_batch_number!\",\n status AS \"status!\"\n FROM\n prover_jobs_fri\n WHERE\n l1_batch_number = ANY ($1)\n GROUP BY\n l1_batch_number,\n status\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "count!", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "l1_batch_number!", + "type_info": "Int8" + }, + { + "ordinal": 2, + "name": "status!", + "type_info": "Text" + } + ], + "parameters": { + "Left": [ + "Int8Array" + ] + }, + "nullable": [ + null, + false, + false + ] + }, + "hash": "8f9c609a78ca4f854c0b24621ee8ad95ba0b3b5b7b3a81592a4720bc094c32f9" +} From fff3910a6b3f3558836abc8d73790b5650915522 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Tue, 16 Apr 2024 12:36:37 -0300 Subject: [PATCH 15/98] update README --- prover/prover_cli/README.md | 53 ++++++++++++++++++++++++++++++++++--- 1 file changed, 49 insertions(+), 4 deletions(-) diff --git a/prover/prover_cli/README.md b/prover/prover_cli/README.md index 25bbe95fc4d5..fcc85bc87992 100644 --- a/prover/prover_cli/README.md +++ b/prover/prover_cli/README.md @@ -1,14 +1,59 @@ # CLI to better understand and debug provers -For now, it has only one command 'file-info' +## Usage +> Note: +> For now, its necesary to use the 'zk f' tool to set up the environment. +> The main command will later be changed to `pli`. +```bash +Usage: zk f cargo run --release -- + +Commands: + file-info + status-jobs + help Print this message or the help of the given subcommand(s) + +Options: + -h, --help Print help + -V, --version Print version +``` +### Status-jobs + +You can get the progress for some batch proof, for a bunch of batches the `status-jobs` command: + +```bash +# Displays the proof progress of the batch 1 + zk f cargo run -- status-jobs --batch 1 +# Displays the proof progress of the batches 1 and 2 + zk f cargo run -- status-jobs --batch 1 2 +# Displays the proof progress of the batch 3, with additional information + zk f cargo run -- status-jobs --batch 3 --verbose ``` + +Example: + +```bash +$ zk f cargo run -- status-jobs --batch 1 --verbose + +Batch number: 1 +Progress: 34.88% (45/129) +In progress: 1 +Queued: 83 +Successful: 45 +Failed: 0 +``` + +### File-Info + +Displays the information about a given file: + +```bash cargo run -- file-info --file-path /zksync-era/prover/artifacts/proofs_fri/l1_batch_proof_1.bin ``` Example outputs: -``` +```bash L1 proof AUX info: L1 msg linear hash: [163, 243, 172, 16, 189, 59, 100, 227, 249, 46, 226, 220, 82, 135, 213, 208, 221, 228, 49, 46, 121, 136, 78, 163, 15, 155, 199, 82, 64, 24, 172, 198] @@ -18,7 +63,7 @@ AUX info: Inputs: [Fr(0x00000000775db828700e0ebbe0384f8a017598a271dfb6c96ebb2baf22a7a572)] ``` -``` +```bash == Circuit == Type: basic. Id: 1 (Scheduler) Geometry: CSGeometry { num_columns_under_copy_permutation: 130, num_witness_columns: 0, num_constant_columns: 4, max_allowed_constraint_degree: 8 } @@ -34,4 +79,4 @@ Previous block meta hash: [63, 236, 0, 236, 23, 236, 175, 242, 75, 187, 203, 193 Previous block aux hash: [200, 12, 70, 33, 103, 13, 251, 174, 96, 165, 135, 138, 34, 75, 249, 81, 93, 86, 110, 52, 30, 172, 198, 51, 155, 82, 86, 137, 156, 215, 11, 119] EIP 4844 - witnesses: None EIP 4844 - proofs: 0 -``` +``` \ No newline at end of file From ef906b93587453f0307164ccd4290ecb00c92006 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Tue, 16 Apr 2024 12:37:27 -0300 Subject: [PATCH 16/98] zk fmt --- prover/prover_cli/README.md | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/prover/prover_cli/README.md b/prover/prover_cli/README.md index fcc85bc87992..890538dbf510 100644 --- a/prover/prover_cli/README.md +++ b/prover/prover_cli/README.md @@ -1,9 +1,9 @@ # CLI to better understand and debug provers ## Usage -> Note: -> For now, its necesary to use the 'zk f' tool to set up the environment. -> The main command will later be changed to `pli`. + +> Note: For now, its necesary to use the 'zk f' tool to set up the environment. The main command will later be changed +> to `pli`. ```bash Usage: zk f cargo run --release -- @@ -17,6 +17,7 @@ Options: -h, --help Print help -V, --version Print version ``` + ### Status-jobs You can get the progress for some batch proof, for a bunch of batches the `status-jobs` command: @@ -79,4 +80,4 @@ Previous block meta hash: [63, 236, 0, 236, 23, 236, 175, 242, 75, 187, 203, 193 Previous block aux hash: [200, 12, 70, 33, 103, 13, 251, 174, 96, 165, 135, 138, 34, 75, 249, 81, 93, 86, 110, 52, 30, 172, 198, 51, 155, 82, 86, 137, 156, 215, 11, 119] EIP 4844 - witnesses: None EIP 4844 - proofs: 0 -``` \ No newline at end of file +``` From b3051b2b4c2f0c19f99a414a5faac854b0e298a4 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Tue, 16 Apr 2024 12:44:23 -0300 Subject: [PATCH 17/98] remove eol --- prover/prover_cli/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prover/prover_cli/Cargo.toml b/prover/prover_cli/Cargo.toml index 61a2fad3be13..b54bd0a0d448 100644 --- a/prover/prover_cli/Cargo.toml +++ b/prover/prover_cli/Cargo.toml @@ -28,4 +28,4 @@ zksync_basic_types.workspace = true zksync_types.workspace = true zksync_prover_fri_types.workspace = true zksync_prover_interface.workspace = true -prover_dal.workspace = true +prover_dal.workspace = true \ No newline at end of file From ee9d35bf3f77897d1175594cb570d87b217fd1c9 Mon Sep 17 00:00:00 2001 From: Ivan Litteri Date: Tue, 16 Apr 2024 17:33:34 -0300 Subject: [PATCH 18/98] Move jobs cmd to status cmd module --- .../{jobs_status.rs => status/jobs.rs} | 42 +++++++++---------- 1 file changed, 21 insertions(+), 21 deletions(-) rename prover/prover_cli/src/commands/{jobs_status.rs => status/jobs.rs} (100%) diff --git a/prover/prover_cli/src/commands/jobs_status.rs b/prover/prover_cli/src/commands/status/jobs.rs similarity index 100% rename from prover/prover_cli/src/commands/jobs_status.rs rename to prover/prover_cli/src/commands/status/jobs.rs index 54a5bf045983..01205964752b 100644 --- a/prover/prover_cli/src/commands/jobs_status.rs +++ b/prover/prover_cli/src/commands/status/jobs.rs @@ -13,27 +13,6 @@ pub(crate) struct Args { verbose: bool, } -fn pretty_print_job_status( - l1_batch_number: &L1BatchNumber, - statistics: &JobCountStatistics, - verbose: bool, -) { - let total_jobs = - statistics.queued + statistics.in_progress + statistics.failed + statistics.successful; - let progress = (statistics.successful as f32 / total_jobs as f32) * 100.0; - println!("Batch number: {}", l1_batch_number); - println!( - "Progress: {:.2}% ({}/{})", - progress, statistics.successful, total_jobs - ); - if verbose { - println!("In progress: {}", statistics.in_progress); - println!("Queued: {}", statistics.in_progress); - println!("Successful: {}", statistics.in_progress); - } - println!("Failed: {}", statistics.failed); -} - pub(crate) async fn run(args: Args) -> anyhow::Result<()> { let postgres_config = PostgresConfig::from_env().context("PostgresConfig::from_env()")?; @@ -61,3 +40,24 @@ pub(crate) async fn run(args: Args) -> anyhow::Result<()> { } Ok(()) } + +fn pretty_print_job_status( + l1_batch_number: &L1BatchNumber, + statistics: &JobCountStatistics, + verbose: bool, +) { + let total_jobs = + statistics.queued + statistics.in_progress + statistics.failed + statistics.successful; + let progress = (statistics.successful as f32 / total_jobs as f32) * 100.0; + println!("Batch number: {}", l1_batch_number); + println!( + "Progress: {:.2}% ({}/{})", + progress, statistics.successful, total_jobs + ); + if verbose { + println!("In progress: {}", statistics.in_progress); + println!("Queued: {}", statistics.in_progress); + println!("Successful: {}", statistics.in_progress); + } + println!("Failed: {}", statistics.failed); +} From 82faf32849c2904f7e5162c561731476cd1de9ab Mon Sep 17 00:00:00 2001 From: Ivan Litteri Date: Tue, 16 Apr 2024 17:34:23 -0300 Subject: [PATCH 19/98] Refactor status cmd --- prover/prover_cli/src/cli.rs | 7 ++++--- prover/prover_cli/src/commands/mod.rs | 10 +++++++++- prover/prover_cli/src/commands/status/mod.rs | 9 +++++++++ 3 files changed, 22 insertions(+), 4 deletions(-) create mode 100644 prover/prover_cli/src/commands/status/mod.rs diff --git a/prover/prover_cli/src/cli.rs b/prover/prover_cli/src/cli.rs index aae354d09eab..cafdd2dff99c 100644 --- a/prover/prover_cli/src/cli.rs +++ b/prover/prover_cli/src/cli.rs @@ -1,6 +1,6 @@ use clap::{command, Parser, Subcommand}; -use crate::commands::{get_file_info, jobs_status}; +use crate::commands::{self, get_file_info, status}; pub const VERSION_STRING: &str = env!("CARGO_PKG_VERSION"); @@ -14,14 +14,15 @@ struct ProverCLI { #[derive(Subcommand)] enum ProverCommand { FileInfo(get_file_info::Args), - StatusJobs(jobs_status::Args), + #[command(subcommand)] + Status(commands::StatusCommand), } pub async fn start() -> anyhow::Result<()> { let ProverCLI { command } = ProverCLI::parse(); match command { ProverCommand::FileInfo(args) => get_file_info::run(args).await?, - ProverCommand::StatusJobs(args) => jobs_status::run(args).await?, + ProverCommand::Status(status_cmd) => status::run(status_cmd).await?, }; Ok(()) diff --git a/prover/prover_cli/src/commands/mod.rs b/prover/prover_cli/src/commands/mod.rs index 57da8f98a9c9..e53bb47fdc34 100644 --- a/prover/prover_cli/src/commands/mod.rs +++ b/prover/prover_cli/src/commands/mod.rs @@ -1,2 +1,10 @@ +use clap::Subcommand; +use status::jobs; + pub(crate) mod get_file_info; -pub(crate) mod jobs_status; +pub(crate) mod status; + +#[derive(Subcommand)] +pub(crate) enum StatusCommand { + Jobs(jobs::Args), +} diff --git a/prover/prover_cli/src/commands/status/mod.rs b/prover/prover_cli/src/commands/status/mod.rs new file mode 100644 index 000000000000..761bf53c57a0 --- /dev/null +++ b/prover/prover_cli/src/commands/status/mod.rs @@ -0,0 +1,9 @@ +use super::StatusCommand; + +pub(crate) mod jobs; + +pub(crate) async fn run(status_cmd: StatusCommand) -> anyhow::Result<()> { + match status_cmd { + StatusCommand::Jobs(args) => jobs::run(args).await, + } +} From aa448645ace357a4be504695cde5735e7327aff1 Mon Sep 17 00:00:00 2001 From: Ivan Litteri Date: Tue, 16 Apr 2024 17:34:35 -0300 Subject: [PATCH 20/98] Update Cargo.lock --- prover/Cargo.lock | 22 ---------------------- 1 file changed, 22 deletions(-) diff --git a/prover/Cargo.lock b/prover/Cargo.lock index edaf63a367ac..7262bd7ecdaa 100644 --- a/prover/Cargo.lock +++ b/prover/Cargo.lock @@ -1756,15 +1756,11 @@ version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4cd405aab171cb85d6735e5c8d9db038c17d3ca007a4d2c25f337935c3d90580" dependencies = [ - "humantime", - "is-terminal", "humantime", "is-terminal", "log", "regex", "termcolor", - "regex", - "termcolor", ] [[package]] @@ -1875,16 +1871,6 @@ version = "2.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" -[[package]] -name = "eyre" -version = "0.6.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7cd915d99f24784cdc19fd37ef22b97e3ff0ae756c7e492e9fbfe897d61e2aec" -dependencies = [ - "indenter", - "once_cell", -] - [[package]] name = "fastrand" version = "2.0.1" @@ -2731,12 +2717,6 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "indenter" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce23b50ad8242c51a442f3ff322d56b02f08852c77e4c0b4d3fd684abc89c683" - [[package]] name = "indexmap" version = "1.9.3" @@ -4359,11 +4339,9 @@ dependencies = [ "clap 4.4.6", "colored", "env_logger 0.10.2", - "eyre", "hex", "log", "prover_dal", - "sqlx", "tokio", "tracing", "tracing-subscriber", From 3fe32b6e164a54528dca3cb4034a73c978a62d18 Mon Sep 17 00:00:00 2001 From: Ivan Litteri Date: Tue, 16 Apr 2024 17:40:30 -0300 Subject: [PATCH 21/98] zk fmt --- prover/prover_cli/src/main.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/prover/prover_cli/src/main.rs b/prover/prover_cli/src/main.rs index f2a7dd710267..a1060476be34 100644 --- a/prover/prover_cli/src/main.rs +++ b/prover/prover_cli/src/main.rs @@ -3,6 +3,8 @@ use prover_cli::cli; #[tokio::main] async fn main() { env_logger::builder() + .filter_module("zksync_db_connection::connection_pool", log::LevelFilter::Off) + .filter_module("sqlx::query", log::LevelFilter::Off) .filter_level(log::LevelFilter::Debug) .init(); From 9977e950a62b6efba95895b53d806b2ebba3f2dc Mon Sep 17 00:00:00 2001 From: Ivan Litteri Date: Wed, 17 Apr 2024 21:40:32 -0300 Subject: [PATCH 22/98] Refactor --- prover/prover_cli/src/cli.rs | 4 ++-- prover/prover_cli/src/commands/mod.rs | 8 +------- .../src/commands/status/{jobs.rs => batch.rs} | 8 ++++++-- prover/prover_cli/src/commands/status/mod.rs | 17 ++++++++++++----- prover/prover_cli/src/main.rs | 12 +++++++++--- 5 files changed, 30 insertions(+), 19 deletions(-) rename prover/prover_cli/src/commands/status/{jobs.rs => batch.rs} (92%) diff --git a/prover/prover_cli/src/cli.rs b/prover/prover_cli/src/cli.rs index cafdd2dff99c..4c01c132cea5 100644 --- a/prover/prover_cli/src/cli.rs +++ b/prover/prover_cli/src/cli.rs @@ -1,6 +1,6 @@ use clap::{command, Parser, Subcommand}; -use crate::commands::{self, get_file_info, status}; +use crate::commands::{self, get_file_info}; pub const VERSION_STRING: &str = env!("CARGO_PKG_VERSION"); @@ -22,7 +22,7 @@ pub async fn start() -> anyhow::Result<()> { let ProverCLI { command } = ProverCLI::parse(); match command { ProverCommand::FileInfo(args) => get_file_info::run(args).await?, - ProverCommand::Status(status_cmd) => status::run(status_cmd).await?, + ProverCommand::Status(cmd) => cmd.run().await?, }; Ok(()) diff --git a/prover/prover_cli/src/commands/mod.rs b/prover/prover_cli/src/commands/mod.rs index e53bb47fdc34..e02f4bfb1bae 100644 --- a/prover/prover_cli/src/commands/mod.rs +++ b/prover/prover_cli/src/commands/mod.rs @@ -1,10 +1,4 @@ -use clap::Subcommand; -use status::jobs; - pub(crate) mod get_file_info; pub(crate) mod status; -#[derive(Subcommand)] -pub(crate) enum StatusCommand { - Jobs(jobs::Args), -} +pub(crate) use status::StatusCommand; diff --git a/prover/prover_cli/src/commands/status/jobs.rs b/prover/prover_cli/src/commands/status/batch.rs similarity index 92% rename from prover/prover_cli/src/commands/status/jobs.rs rename to prover/prover_cli/src/commands/status/batch.rs index 01205964752b..f5977ad34780 100644 --- a/prover/prover_cli/src/commands/status/jobs.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -1,4 +1,4 @@ -use anyhow::Context as _; +use anyhow::{ensure, Context as _}; use clap::Args as ClapArgs; use prover_dal::{ConnectionPool, Prover, ProverDal}; use zksync_basic_types::{prover_dal::JobCountStatistics, L1BatchNumber}; @@ -6,7 +6,7 @@ use zksync_config::PostgresConfig; use zksync_env_config::FromEnv; #[derive(ClapArgs)] -pub(crate) struct Args { +pub struct Args { #[clap(short, long, num_args = 0..)] batch: Vec, #[clap(short, long, default_value("false"))] @@ -14,6 +14,10 @@ pub(crate) struct Args { } pub(crate) async fn run(args: Args) -> anyhow::Result<()> { + ensure!( + !args.batch.is_empty(), + "At least one batch number should be provided" + ); let postgres_config = PostgresConfig::from_env().context("PostgresConfig::from_env()")?; let prover_connection_pool = ConnectionPool::::builder( diff --git a/prover/prover_cli/src/commands/status/mod.rs b/prover/prover_cli/src/commands/status/mod.rs index 761bf53c57a0..431b1060d60d 100644 --- a/prover/prover_cli/src/commands/status/mod.rs +++ b/prover/prover_cli/src/commands/status/mod.rs @@ -1,9 +1,16 @@ -use super::StatusCommand; +use clap::Subcommand; -pub(crate) mod jobs; +pub(crate) mod batch; -pub(crate) async fn run(status_cmd: StatusCommand) -> anyhow::Result<()> { - match status_cmd { - StatusCommand::Jobs(args) => jobs::run(args).await, +#[derive(Subcommand)] +pub enum StatusCommand { + Batch(batch::Args), +} + +impl StatusCommand { + pub(crate) async fn run(self) -> anyhow::Result<()> { + match self { + StatusCommand::Batch(args) => batch::run(args).await, + } } } diff --git a/prover/prover_cli/src/main.rs b/prover/prover_cli/src/main.rs index a1060476be34..b979a36ed508 100644 --- a/prover/prover_cli/src/main.rs +++ b/prover/prover_cli/src/main.rs @@ -3,10 +3,16 @@ use prover_cli::cli; #[tokio::main] async fn main() { env_logger::builder() - .filter_module("zksync_db_connection::connection_pool", log::LevelFilter::Off) - .filter_module("sqlx::query", log::LevelFilter::Off) + .filter_module("zksync_db_connection", log::LevelFilter::Off) + .filter_module("sqlx", log::LevelFilter::Off) .filter_level(log::LevelFilter::Debug) .init(); - cli::start().await.unwrap(); + match cli::start().await { + Ok(_) => {} + Err(err) => { + log::error!("{err:?}"); + std::process::exit(1); + } + } } From 0120a09319bff428eaa379158f987a021f7312a1 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Fri, 19 Apr 2024 10:50:32 -0300 Subject: [PATCH 23/98] fix typo --- prover/prover_cli/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prover/prover_cli/README.md b/prover/prover_cli/README.md index 890538dbf510..99869b36bf25 100644 --- a/prover/prover_cli/README.md +++ b/prover/prover_cli/README.md @@ -2,7 +2,7 @@ ## Usage -> Note: For now, its necesary to use the 'zk f' tool to set up the environment. The main command will later be changed +> Note: For now, its necessary to use the 'zk f' tool to set up the environment. The main command will later be changed > to `pli`. ```bash From 9a11a94b9c067a373372af2eb5f84850b6425796 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Fri, 19 Apr 2024 17:34:22 -0300 Subject: [PATCH 24/98] modify query to include aggregation_round --- .../prover_cli/src/commands/status/batch.rs | 7 ++--- ...76096de57cdba25831f86c1428081ca0a14f.json} | 10 +++++-- prover/prover_dal/src/fri_prover_dal.rs | 26 ++++++++++++++----- 3 files changed, 31 insertions(+), 12 deletions(-) rename prover/prover_dal/.sqlx/{query-8f9c609a78ca4f854c0b24621ee8ad95ba0b3b5b7b3a81592a4720bc094c32f9.json => query-676020e89f0833cc92be1c1114a076096de57cdba25831f86c1428081ca0a14f.json} (50%) diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index f5977ad34780..a6fe4e95a0f7 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -35,9 +35,10 @@ pub(crate) async fn run(args: Args) -> anyhow::Result<()> { .await; if stats.len() > 0 { for row in &stats { - let (l1_batch_number, statistics) = row; - - pretty_print_job_status(l1_batch_number, statistics, args.verbose) + let ((l1_batch_number, aggregation_round), statistics) = row; + if *aggregation_round == 0 { + pretty_print_job_status(l1_batch_number, statistics, args.verbose) + } } } else { println!("No batches found.") diff --git a/prover/prover_dal/.sqlx/query-8f9c609a78ca4f854c0b24621ee8ad95ba0b3b5b7b3a81592a4720bc094c32f9.json b/prover/prover_dal/.sqlx/query-676020e89f0833cc92be1c1114a076096de57cdba25831f86c1428081ca0a14f.json similarity index 50% rename from prover/prover_dal/.sqlx/query-8f9c609a78ca4f854c0b24621ee8ad95ba0b3b5b7b3a81592a4720bc094c32f9.json rename to prover/prover_dal/.sqlx/query-676020e89f0833cc92be1c1114a076096de57cdba25831f86c1428081ca0a14f.json index b65a594a8ed2..b578881deeb1 100644 --- a/prover/prover_dal/.sqlx/query-8f9c609a78ca4f854c0b24621ee8ad95ba0b3b5b7b3a81592a4720bc094c32f9.json +++ b/prover/prover_dal/.sqlx/query-676020e89f0833cc92be1c1114a076096de57cdba25831f86c1428081ca0a14f.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n COUNT(*) AS \"count!\",\n l1_batch_number AS \"l1_batch_number!\",\n status AS \"status!\"\n FROM\n prover_jobs_fri\n WHERE\n l1_batch_number = ANY ($1)\n GROUP BY\n l1_batch_number,\n status\n ", + "query": "\n SELECT\n COUNT(*) AS \"count!\",\n l1_batch_number AS \"l1_batch_number!\",\n aggregation_round AS \"aggregation_round!\",\n status AS \"status!\"\n FROM\n prover_jobs_fri\n WHERE\n l1_batch_number = ANY ($1)\n GROUP BY\n l1_batch_number,\n aggregation_round,\n status\n ", "describe": { "columns": [ { @@ -15,6 +15,11 @@ }, { "ordinal": 2, + "name": "aggregation_round!", + "type_info": "Int2" + }, + { + "ordinal": 3, "name": "status!", "type_info": "Text" } @@ -27,8 +32,9 @@ "nullable": [ null, false, + false, false ] }, - "hash": "8f9c609a78ca4f854c0b24621ee8ad95ba0b3b5b7b3a81592a4720bc094c32f9" + "hash": "676020e89f0833cc92be1c1114a076096de57cdba25831f86c1428081ca0a14f" } diff --git a/prover/prover_dal/src/fri_prover_dal.rs b/prover/prover_dal/src/fri_prover_dal.rs index ab4f3e131753..88b4e7f9def5 100644 --- a/prover/prover_dal/src/fri_prover_dal.rs +++ b/prover/prover_dal/src/fri_prover_dal.rs @@ -598,13 +598,14 @@ impl FriProverDal<'_, '_> { pub async fn get_prover_jobs_stats_for_batch( &mut self, l1_batches_numbers: Vec, - ) -> HashMap { + ) -> HashMap<(L1BatchNumber, u8), JobCountStatistics> { { sqlx::query!( r#" SELECT COUNT(*) AS "count!", l1_batch_number AS "l1_batch_number!", + aggregation_round AS "aggregation_round!", status AS "status!" FROM prover_jobs_fri @@ -612,6 +613,7 @@ impl FriProverDal<'_, '_> { l1_batch_number = ANY ($1) GROUP BY l1_batch_number, + aggregation_round, status "#, &l1_batches_numbers @@ -623,18 +625,28 @@ impl FriProverDal<'_, '_> { .await .unwrap() .into_iter() - .map(|row| (row.l1_batch_number, row.status, row.count as usize)) + .map(|row| { + ( + row.l1_batch_number, + row.aggregation_round, + row.status, + row.count as usize, + ) + }) .fold( HashMap::new(), - |mut acc, (l1_batch_number, status, value)| { - let stats = acc.entry(L1BatchNumber(l1_batch_number as u32)).or_insert( - JobCountStatistics { + |mut acc, (l1_batch_number, aggregation_round, status, value)| { + let stats = acc + .entry(( + L1BatchNumber(l1_batch_number as u32), + aggregation_round as u8, + )) + .or_insert(JobCountStatistics { queued: 0, in_progress: 0, failed: 0, successful: 0, - }, - ); + }); match status.as_ref() { "queued" => stats.queued = value, "in_progress" => stats.in_progress = value, From 1c92e9578bf800edf6db2f5d3a19a4de9df6f727 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Wed, 24 Apr 2024 15:34:45 -0300 Subject: [PATCH 25/98] change query return type --- prover/prover_dal/src/fri_prover_dal.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/prover/prover_dal/src/fri_prover_dal.rs b/prover/prover_dal/src/fri_prover_dal.rs index 88b4e7f9def5..0639d33e144a 100644 --- a/prover/prover_dal/src/fri_prover_dal.rs +++ b/prover/prover_dal/src/fri_prover_dal.rs @@ -598,7 +598,7 @@ impl FriProverDal<'_, '_> { pub async fn get_prover_jobs_stats_for_batch( &mut self, l1_batches_numbers: Vec, - ) -> HashMap<(L1BatchNumber, u8), JobCountStatistics> { + ) -> HashMap<(L1BatchNumber, AggregationRound), JobCountStatistics> { { sqlx::query!( r#" @@ -639,7 +639,7 @@ impl FriProverDal<'_, '_> { let stats = acc .entry(( L1BatchNumber(l1_batch_number as u32), - aggregation_round as u8, + AggregationRound::from(aggregation_round as u8), )) .or_insert(JobCountStatistics { queued: 0, From 05e163646639b859a74fba69480b5ab1cf8fe7e7 Mon Sep 17 00:00:00 2001 From: Ivan Litteri Date: Wed, 24 Apr 2024 15:37:46 -0300 Subject: [PATCH 26/98] Update Cargo files --- prover/Cargo.lock | 1 + prover/prover_cli/Cargo.toml | 5 +++-- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/prover/Cargo.lock b/prover/Cargo.lock index 031791106d83..e690f7ddf1fe 100644 --- a/prover/Cargo.lock +++ b/prover/Cargo.lock @@ -4363,6 +4363,7 @@ dependencies = [ "hex", "log", "prover_dal", + "strum", "tokio", "tracing", "tracing-subscriber", diff --git a/prover/prover_cli/Cargo.toml b/prover/prover_cli/Cargo.toml index b54bd0a0d448..a09d012f5b4a 100644 --- a/prover/prover_cli/Cargo.toml +++ b/prover/prover_cli/Cargo.toml @@ -13,12 +13,12 @@ categories.workspace = true tokio = { version = "1", features = ["rt-multi-thread", "macros"] } env_logger = "0.10" log = "0.4" +colored = "2.1.0" clap = { workspace = true, features = ["derive"] } tracing.workspace = true tracing-subscriber = { workspace = true, features = ["env-filter"] } bincode.workspace = true -colored.workspace = true hex.workspace = true anyhow.workspace = true zksync_config.workspace = true @@ -28,4 +28,5 @@ zksync_basic_types.workspace = true zksync_types.workspace = true zksync_prover_fri_types.workspace = true zksync_prover_interface.workspace = true -prover_dal.workspace = true \ No newline at end of file +prover_dal.workspace = true +strum.workspace = true From 58d47ad72c56d87e37d83c0cf0891720437f2baf Mon Sep 17 00:00:00 2001 From: Ivan Litteri Date: Wed, 24 Apr 2024 15:38:58 -0300 Subject: [PATCH 27/98] Setle status cmd baseline --- .../prover_cli/src/commands/status/batch.rs | 74 +++++++------------ 1 file changed, 26 insertions(+), 48 deletions(-) diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index a6fe4e95a0f7..cd3a97f420b7 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -1,68 +1,46 @@ +use super::utils::BatchData; +use crate::commands::status::utils::postgres_config; use anyhow::{ensure, Context as _}; use clap::Args as ClapArgs; -use prover_dal::{ConnectionPool, Prover, ProverDal}; -use zksync_basic_types::{prover_dal::JobCountStatistics, L1BatchNumber}; -use zksync_config::PostgresConfig; -use zksync_env_config::FromEnv; + +use prover_dal::{ConnectionPool, Prover}; +use zksync_types::L1BatchNumber; #[derive(ClapArgs)] pub struct Args { - #[clap(short, long, num_args = 0..)] - batch: Vec, + #[clap(short = 'n', num_args = 1..)] + batches: Vec, #[clap(short, long, default_value("false"))] verbose: bool, } pub(crate) async fn run(args: Args) -> anyhow::Result<()> { ensure!( - !args.batch.is_empty(), + !args.batches.is_empty(), "At least one batch number should be provided" ); - let postgres_config = PostgresConfig::from_env().context("PostgresConfig::from_env()")?; - let prover_connection_pool = ConnectionPool::::builder( - postgres_config.prover_url()?, - postgres_config.max_connections()?, - ) - .build() - .await - .context("failed to build a prover_connection_pool")?; + let batches_data = get_batches_data(args.batches).await?; - let mut conn = prover_connection_pool.connection().await.unwrap(); - let stats = conn - .fri_prover_jobs_dal() - .get_prover_jobs_stats_for_batch(args.batch) - .await; - if stats.len() > 0 { - for row in &stats { - let ((l1_batch_number, aggregation_round), statistics) = row; - if *aggregation_round == 0 { - pretty_print_job_status(l1_batch_number, statistics, args.verbose) - } - } - } else { - println!("No batches found.") + for batch_data in batches_data { + println!("{batch_data:?}"); } + Ok(()) } -fn pretty_print_job_status( - l1_batch_number: &L1BatchNumber, - statistics: &JobCountStatistics, - verbose: bool, -) { - let total_jobs = - statistics.queued + statistics.in_progress + statistics.failed + statistics.successful; - let progress = (statistics.successful as f32 / total_jobs as f32) * 100.0; - println!("Batch number: {}", l1_batch_number); - println!( - "Progress: {:.2}% ({}/{})", - progress, statistics.successful, total_jobs - ); - if verbose { - println!("In progress: {}", statistics.in_progress); - println!("Queued: {}", statistics.in_progress); - println!("Successful: {}", statistics.in_progress); - } - println!("Failed: {}", statistics.failed); +async fn get_batches_data(_batches: Vec) -> anyhow::Result> { + let config = postgres_config()?; + + let prover_connection_pool = + ConnectionPool::::builder(config.prover_url()?, config.max_connections()?) + .build() + .await + .context("failed to build a prover_connection_pool")?; + + let _conn = prover_connection_pool.connection().await.unwrap(); + + // Queries here... + + Ok(vec![BatchData::default()]) } From f7787597afb90b5b0fc4c491c507eedfd4d95d5a Mon Sep 17 00:00:00 2001 From: Ivan Litteri Date: Wed, 24 Apr 2024 15:39:43 -0300 Subject: [PATCH 28/98] Add status cmd utils --- prover/prover_cli/src/commands/status/mod.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/prover/prover_cli/src/commands/status/mod.rs b/prover/prover_cli/src/commands/status/mod.rs index 431b1060d60d..142f9e1feea0 100644 --- a/prover/prover_cli/src/commands/status/mod.rs +++ b/prover/prover_cli/src/commands/status/mod.rs @@ -1,6 +1,7 @@ use clap::Subcommand; pub(crate) mod batch; +mod utils; #[derive(Subcommand)] pub enum StatusCommand { From e945df2a0ec50aebc8a35144760a2325ce15716d Mon Sep 17 00:00:00 2001 From: Ivan Litteri Date: Wed, 24 Apr 2024 16:41:46 -0300 Subject: [PATCH 29/98] Add status utils --- .../prover_cli/src/commands/status/utils.rs | 137 ++++++++++++++++++ 1 file changed, 137 insertions(+) create mode 100644 prover/prover_cli/src/commands/status/utils.rs diff --git a/prover/prover_cli/src/commands/status/utils.rs b/prover/prover_cli/src/commands/status/utils.rs new file mode 100644 index 000000000000..e499489620ca --- /dev/null +++ b/prover/prover_cli/src/commands/status/utils.rs @@ -0,0 +1,137 @@ +use std::{collections::HashMap, fmt::Debug}; + +use colored::*; +use strum::{Display, EnumString}; +use zksync_basic_types::{basic_fri_types::AggregationRound, prover_dal::JobCountStatistics}; +use zksync_config::PostgresConfig; +use zksync_env_config::FromEnv; +use zksync_types::L1BatchNumber; + +pub fn postgres_config() -> anyhow::Result { + Ok(PostgresConfig::from_env()?) +} + +pub struct BatchData { + pub batch_number: L1BatchNumber, + pub basic_witness_generator: Task, + pub leaf_witness_generator: Task, + pub node_witness_generator: Task, + pub recursion_tip: Task, + pub scheduler: Task, + pub compressor: Task, +} + +impl Debug for BatchData { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + writeln!( + f, + "== {} ==", + format!("Batch {} Status", self.batch_number).bold() + )?; + writeln!(f)?; + writeln!(f, "= {} =", format!("Proving Stages").bold())?; + writeln!(f, "{:?}", self.basic_witness_generator)?; + writeln!(f, "{:?}", self.leaf_witness_generator)?; + writeln!(f, "{:?}", self.node_witness_generator)?; + writeln!(f, "{:?}", self.recursion_tip)?; + writeln!(f, "{:?}", self.scheduler)?; + writeln!(f, "{:?}", self.compressor) + } +} + +impl Default for BatchData { + fn default() -> Self { + BatchData { + batch_number: L1BatchNumber::default(), + basic_witness_generator: Task::BasicWitnessGenerator(TaskStatus::Stuck), + leaf_witness_generator: Task::LeafWitnessGenerator { + status: TaskStatus::WaitingForProofs, + aggregation_round_0_prover_jobs_data: ProverJobsData::default(), + }, + node_witness_generator: Task::NodeWitnessGenerator { + status: TaskStatus::WaitingForProofs, + aggregation_round_1_prover_jobs_data: ProverJobsData::default(), + }, + recursion_tip: Task::RecursionTip { + status: TaskStatus::WaitingForProofs, + aggregation_round_2_prover_jobs_data: ProverJobsData::default(), + }, + scheduler: Task::Scheduler(TaskStatus::WaitingForProofs), + compressor: Task::Compressor(TaskStatus::WaitingForProofs), + } + } +} + +#[derive(Debug, EnumString, Clone, Display)] +pub enum TaskStatus { + /// A task is considered queued when all of its jobs is queued. + #[strum(to_string = "Queued 📥")] + Queued, + /// A task is considered in progress when at least one of its jobs differs in its status. + #[strum(to_string = "In Progress ⌛️")] + InProgress, + /// A task is considered successful when all of its jobs were processed successfully. + #[strum(to_string = "Successful ✅")] + Successful, + /// A task is considered waiting for proofs when all of its jobs are waiting for proofs. + #[strum(to_string = "Waiting for Proof ⏱️")] + WaitingForProofs, + /// A task is considered stuck when at least one of its jobs is stuck. + #[strum(to_string = "Stuck 🛑")] + Stuck, +} + +impl Default for TaskStatus { + fn default() -> Self { + TaskStatus::Queued + } +} + +impl Copy for TaskStatus {} + +type ProverJobsData = HashMap<(L1BatchNumber, AggregationRound), JobCountStatistics>; + +#[derive(EnumString, Clone, Display)] +pub enum Task { + #[strum(to_string = "Basic Witness Generator")] + BasicWitnessGenerator(TaskStatus), + #[strum(to_string = "Leaf Witness Generator")] + LeafWitnessGenerator { + status: TaskStatus, + aggregation_round_0_prover_jobs_data: ProverJobsData, + }, + #[strum(to_string = "Node Witness Generator")] + NodeWitnessGenerator { + status: TaskStatus, + aggregation_round_1_prover_jobs_data: ProverJobsData, + }, + #[strum(to_string = "Recursion Tip")] + RecursionTip { + status: TaskStatus, + aggregation_round_2_prover_jobs_data: ProverJobsData, + }, + #[strum(to_string = "Scheduler")] + Scheduler(TaskStatus), + #[strum(to_string = "Compressor")] + Compressor(TaskStatus), +} + +impl Task { + fn status(&self) -> TaskStatus { + match self { + Task::BasicWitnessGenerator(status) + | Task::LeafWitnessGenerator { status, .. } + | Task::NodeWitnessGenerator { status, .. } + | Task::RecursionTip { status, .. } + | Task::Scheduler(status) + | Task::Compressor(status) => *status, + } + } +} + +impl Debug for Task { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + writeln!(f, "-- {} --", self.to_string().bold())?; + writeln!(f, "> {}", self.status().to_string()) + } +} From 65853781a9c9195fcf78a5ed61b897fac7424cb0 Mon Sep 17 00:00:00 2001 From: Ivan Litteri Date: Wed, 24 Apr 2024 16:42:04 -0300 Subject: [PATCH 30/98] zk fmt --- prover/prover_cli/src/commands/status/batch.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index cd3a97f420b7..30528d35d0ef 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -1,11 +1,11 @@ -use super::utils::BatchData; -use crate::commands::status::utils::postgres_config; use anyhow::{ensure, Context as _}; use clap::Args as ClapArgs; - use prover_dal::{ConnectionPool, Prover}; use zksync_types::L1BatchNumber; +use super::utils::BatchData; +use crate::commands::status::utils::postgres_config; + #[derive(ClapArgs)] pub struct Args { #[clap(short = 'n', num_args = 1..)] From 47df898ade3b1957958eaea44719a77b60e467ea Mon Sep 17 00:00:00 2001 From: Ivan Litteri Date: Wed, 24 Apr 2024 17:18:06 -0300 Subject: [PATCH 31/98] Document enums --- prover/prover_cli/src/commands/status/utils.rs | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/prover/prover_cli/src/commands/status/utils.rs b/prover/prover_cli/src/commands/status/utils.rs index e499489620ca..68e191e9c2b6 100644 --- a/prover/prover_cli/src/commands/status/utils.rs +++ b/prover/prover_cli/src/commands/status/utils.rs @@ -11,13 +11,21 @@ pub fn postgres_config() -> anyhow::Result { Ok(PostgresConfig::from_env()?) } +/// Represents the proving data of a batch. pub struct BatchData { + /// The number of the batch. pub batch_number: L1BatchNumber, + /// The basic witness generator data. pub basic_witness_generator: Task, + /// The leaf witness generator data. pub leaf_witness_generator: Task, + /// The node witness generator data. pub node_witness_generator: Task, + /// The recursion tip data. pub recursion_tip: Task, + /// The scheduler data. pub scheduler: Task, + /// The compressor data. pub compressor: Task, } @@ -93,25 +101,31 @@ type ProverJobsData = HashMap<(L1BatchNumber, AggregationRound), JobCountStatist #[derive(EnumString, Clone, Display)] pub enum Task { + /// Represents the basic witness generator task and its status. #[strum(to_string = "Basic Witness Generator")] BasicWitnessGenerator(TaskStatus), + /// Represents the leaf witness generator task, its status and the aggregation round 0 prover jobs data. #[strum(to_string = "Leaf Witness Generator")] LeafWitnessGenerator { status: TaskStatus, aggregation_round_0_prover_jobs_data: ProverJobsData, }, + /// Represents the node witness generator task, its status and the aggregation round 1 prover jobs data. #[strum(to_string = "Node Witness Generator")] NodeWitnessGenerator { status: TaskStatus, aggregation_round_1_prover_jobs_data: ProverJobsData, }, + /// Represents the recursion tip task, its status and the aggregation round 2 prover jobs data. #[strum(to_string = "Recursion Tip")] RecursionTip { status: TaskStatus, aggregation_round_2_prover_jobs_data: ProverJobsData, }, + /// Represents the scheduler task and its status. #[strum(to_string = "Scheduler")] Scheduler(TaskStatus), + /// Represents the compressor task and its status. #[strum(to_string = "Compressor")] Compressor(TaskStatus), } From ceae28985e36a09782685e8bf57fa6838bbc1864 Mon Sep 17 00:00:00 2001 From: Ivan Litteri Date: Wed, 24 Apr 2024 20:26:47 -0300 Subject: [PATCH 32/98] Add BatchDataBuilder struct --- .../prover_cli/src/commands/status/utils.rs | 61 +++++++++++++++++++ 1 file changed, 61 insertions(+) diff --git a/prover/prover_cli/src/commands/status/utils.rs b/prover/prover_cli/src/commands/status/utils.rs index 68e191e9c2b6..a88d3206e7c5 100644 --- a/prover/prover_cli/src/commands/status/utils.rs +++ b/prover/prover_cli/src/commands/status/utils.rs @@ -11,6 +11,67 @@ pub fn postgres_config() -> anyhow::Result { Ok(PostgresConfig::from_env()?) } +pub struct BatchDataBuilder { + batch_number: L1BatchNumber, + basic_witness_generator: Task, + leaf_witness_generator: Task, + node_witness_generator: Task, + recursion_tip: Task, + scheduler: Task, + compressor: Task, +} + +impl BatchDataBuilder { + pub fn new(batch_number: L1BatchNumber) -> Self { + BatchDataBuilder { + batch_number, + ..Default::default() + } + } + + pub fn basic_witness_generator(mut self, task: Task) -> Self { + self.basic_witness_generator = task; + self + } + + pub fn leaf_witness_generator(mut self, task: Task) -> Self { + self.leaf_witness_generator = task; + self + } + + pub fn node_witness_generator(mut self, task: Task) -> Self { + self.node_witness_generator = task; + self + } + + pub fn recursion_tip(mut self, task: Task) -> Self { + self.recursion_tip = task; + self + } + + pub fn scheduler(mut self, task: Task) -> Self { + self.scheduler = task; + self + } + + pub fn compressor(mut self, task: Task) -> Self { + self.compressor = task; + self + } + + pub fn build(self) -> BatchData { + BatchData { + batch_number: self.batch_number, + basic_witness_generator: self.basic_witness_generator, + leaf_witness_generator: self.leaf_witness_generator, + node_witness_generator: self.node_witness_generator, + recursion_tip: self.recursion_tip, + scheduler: self.scheduler, + compressor: self.compressor, + } + } +} + /// Represents the proving data of a batch. pub struct BatchData { /// The number of the batch. From f6d7b9acd8e4c0ff80b89e40d187c80a577972e1 Mon Sep 17 00:00:00 2001 From: Ivan Litteri Date: Wed, 24 Apr 2024 20:27:14 -0300 Subject: [PATCH 33/98] Add TaskStatus::Custom enum variant --- prover/prover_cli/src/commands/status/utils.rs | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/prover/prover_cli/src/commands/status/utils.rs b/prover/prover_cli/src/commands/status/utils.rs index a88d3206e7c5..1fba80113ceb 100644 --- a/prover/prover_cli/src/commands/status/utils.rs +++ b/prover/prover_cli/src/commands/status/utils.rs @@ -133,6 +133,10 @@ impl Default for BatchData { #[derive(Debug, EnumString, Clone, Display)] pub enum TaskStatus { + /// A custom status that can be set manually. + /// Mostly used when a task has singular status. + #[strum(to_string = "{0}")] + Custom(String), /// A task is considered queued when all of its jobs is queued. #[strum(to_string = "Queued 📥")] Queued, @@ -152,7 +156,7 @@ pub enum TaskStatus { impl Default for TaskStatus { fn default() -> Self { - TaskStatus::Queued + TaskStatus::WaitingForProofs } } From 8e16d3506e8d20fa67806c9bc5a2b9139a679c0f Mon Sep 17 00:00:00 2001 From: Ivan Litteri Date: Wed, 24 Apr 2024 20:42:30 -0300 Subject: [PATCH 34/98] Fix BatchDataBuilder --- .../prover_cli/src/commands/status/utils.rs | 90 ++++++++++++++++--- 1 file changed, 76 insertions(+), 14 deletions(-) diff --git a/prover/prover_cli/src/commands/status/utils.rs b/prover/prover_cli/src/commands/status/utils.rs index 1fba80113ceb..171ad93800dd 100644 --- a/prover/prover_cli/src/commands/status/utils.rs +++ b/prover/prover_cli/src/commands/status/utils.rs @@ -1,6 +1,8 @@ use std::{collections::HashMap, fmt::Debug}; +use anyhow::ensure; use colored::*; +use prover_dal::fri_proof_compressor_dal::ProofCompressionJobStatus; use strum::{Display, EnumString}; use zksync_basic_types::{basic_fri_types::AggregationRound, prover_dal::JobCountStatistics}; use zksync_config::PostgresConfig; @@ -29,34 +31,58 @@ impl BatchDataBuilder { } } - pub fn basic_witness_generator(mut self, task: Task) -> Self { + pub fn basic_witness_generator(mut self, task: Task) -> anyhow::Result { + ensure!( + matches!(task, Task::BasicWitnessGenerator(_)), + "Task should be a basic witness generator" + ); self.basic_witness_generator = task; - self + Ok(self) } - pub fn leaf_witness_generator(mut self, task: Task) -> Self { + pub fn leaf_witness_generator(mut self, task: Task) -> anyhow::Result { + ensure!( + matches!(task, Task::LeafWitnessGenerator { .. }), + "Task should be a leaf witness generator" + ); self.leaf_witness_generator = task; - self + Ok(self) } - pub fn node_witness_generator(mut self, task: Task) -> Self { + pub fn node_witness_generator(mut self, task: Task) -> anyhow::Result { + ensure!( + matches!(task, Task::NodeWitnessGenerator { .. }), + "Task should be a node witness generator" + ); self.node_witness_generator = task; - self + Ok(self) } - pub fn recursion_tip(mut self, task: Task) -> Self { + pub fn recursion_tip(mut self, task: Task) -> anyhow::Result { + ensure!( + matches!(task, Task::RecursionTip { .. }), + "Task should be a recursion tip" + ); self.recursion_tip = task; - self + Ok(self) } - pub fn scheduler(mut self, task: Task) -> Self { + pub fn scheduler(mut self, task: Task) -> anyhow::Result { + ensure!( + matches!(task, Task::Scheduler(_)), + "Task should be a scheduler" + ); self.scheduler = task; - self + Ok(self) } - pub fn compressor(mut self, task: Task) -> Self { + pub fn compressor(mut self, task: Task) -> anyhow::Result { + ensure!( + matches!(task, Task::Compressor(_)), + "Task should be a compressor" + ); self.compressor = task; - self + Ok(self) } pub fn build(self) -> BatchData { @@ -72,6 +98,29 @@ impl BatchDataBuilder { } } +impl Default for BatchDataBuilder { + fn default() -> Self { + BatchDataBuilder { + batch_number: L1BatchNumber::default(), + basic_witness_generator: Task::BasicWitnessGenerator(TaskStatus::Stuck), + leaf_witness_generator: Task::LeafWitnessGenerator { + status: TaskStatus::WaitingForProofs, + aggregation_round_0_prover_jobs_data: ProverJobsData::default(), + }, + node_witness_generator: Task::NodeWitnessGenerator { + status: TaskStatus::WaitingForProofs, + aggregation_round_1_prover_jobs_data: ProverJobsData::default(), + }, + recursion_tip: Task::RecursionTip { + status: TaskStatus::WaitingForProofs, + aggregation_round_2_prover_jobs_data: ProverJobsData::default(), + }, + scheduler: Task::Scheduler(TaskStatus::WaitingForProofs), + compressor: Task::Compressor(TaskStatus::WaitingForProofs), + } + } +} + /// Represents the proving data of a batch. pub struct BatchData { /// The number of the batch. @@ -160,7 +209,20 @@ impl Default for TaskStatus { } } -impl Copy for TaskStatus {} +impl From for TaskStatus { + fn from(status: ProofCompressionJobStatus) -> Self { + match status { + ProofCompressionJobStatus::Queued => TaskStatus::Queued, + ProofCompressionJobStatus::InProgress => TaskStatus::InProgress, + ProofCompressionJobStatus::Successful => TaskStatus::Successful, + ProofCompressionJobStatus::Failed => TaskStatus::InProgress, + ProofCompressionJobStatus::SentToServer => { + TaskStatus::Custom("Sent to server 📤".to_owned()) + } + ProofCompressionJobStatus::Skipped => TaskStatus::Custom("Skipped ⏩".to_owned()), + } + } +} type ProverJobsData = HashMap<(L1BatchNumber, AggregationRound), JobCountStatistics>; @@ -203,7 +265,7 @@ impl Task { | Task::NodeWitnessGenerator { status, .. } | Task::RecursionTip { status, .. } | Task::Scheduler(status) - | Task::Compressor(status) => *status, + | Task::Compressor(status) => status.clone(), } } } From a76cb32e79c0f692dcbb8d4076522d589db5dafe Mon Sep 17 00:00:00 2001 From: Ivan Litteri Date: Wed, 24 Apr 2024 20:44:08 -0300 Subject: [PATCH 35/98] Rollback --- prover/prover_cli/src/commands/status/utils.rs | 16 ---------------- 1 file changed, 16 deletions(-) diff --git a/prover/prover_cli/src/commands/status/utils.rs b/prover/prover_cli/src/commands/status/utils.rs index 171ad93800dd..2ec63dc8f942 100644 --- a/prover/prover_cli/src/commands/status/utils.rs +++ b/prover/prover_cli/src/commands/status/utils.rs @@ -2,7 +2,6 @@ use std::{collections::HashMap, fmt::Debug}; use anyhow::ensure; use colored::*; -use prover_dal::fri_proof_compressor_dal::ProofCompressionJobStatus; use strum::{Display, EnumString}; use zksync_basic_types::{basic_fri_types::AggregationRound, prover_dal::JobCountStatistics}; use zksync_config::PostgresConfig; @@ -209,21 +208,6 @@ impl Default for TaskStatus { } } -impl From for TaskStatus { - fn from(status: ProofCompressionJobStatus) -> Self { - match status { - ProofCompressionJobStatus::Queued => TaskStatus::Queued, - ProofCompressionJobStatus::InProgress => TaskStatus::InProgress, - ProofCompressionJobStatus::Successful => TaskStatus::Successful, - ProofCompressionJobStatus::Failed => TaskStatus::InProgress, - ProofCompressionJobStatus::SentToServer => { - TaskStatus::Custom("Sent to server 📤".to_owned()) - } - ProofCompressionJobStatus::Skipped => TaskStatus::Custom("Skipped ⏩".to_owned()), - } - } -} - type ProverJobsData = HashMap<(L1BatchNumber, AggregationRound), JobCountStatistics>; #[derive(EnumString, Clone, Display)] From c460029dd6e39e4af35160f226b4c3eb72a48de9 Mon Sep 17 00:00:00 2001 From: Ivan Litteri Date: Wed, 24 Apr 2024 20:45:17 -0300 Subject: [PATCH 36/98] impl From for TaskStatus --- prover/prover_cli/src/commands/status/utils.rs | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/prover/prover_cli/src/commands/status/utils.rs b/prover/prover_cli/src/commands/status/utils.rs index 2ec63dc8f942..171ad93800dd 100644 --- a/prover/prover_cli/src/commands/status/utils.rs +++ b/prover/prover_cli/src/commands/status/utils.rs @@ -2,6 +2,7 @@ use std::{collections::HashMap, fmt::Debug}; use anyhow::ensure; use colored::*; +use prover_dal::fri_proof_compressor_dal::ProofCompressionJobStatus; use strum::{Display, EnumString}; use zksync_basic_types::{basic_fri_types::AggregationRound, prover_dal::JobCountStatistics}; use zksync_config::PostgresConfig; @@ -208,6 +209,21 @@ impl Default for TaskStatus { } } +impl From for TaskStatus { + fn from(status: ProofCompressionJobStatus) -> Self { + match status { + ProofCompressionJobStatus::Queued => TaskStatus::Queued, + ProofCompressionJobStatus::InProgress => TaskStatus::InProgress, + ProofCompressionJobStatus::Successful => TaskStatus::Successful, + ProofCompressionJobStatus::Failed => TaskStatus::InProgress, + ProofCompressionJobStatus::SentToServer => { + TaskStatus::Custom("Sent to server 📤".to_owned()) + } + ProofCompressionJobStatus::Skipped => TaskStatus::Custom("Skipped ⏩".to_owned()), + } + } +} + type ProverJobsData = HashMap<(L1BatchNumber, AggregationRound), JobCountStatistics>; #[derive(EnumString, Clone, Display)] From e76da1fa596ad46fdb3290db92d3f69f355f2f06 Mon Sep 17 00:00:00 2001 From: Ivan Litteri Date: Wed, 24 Apr 2024 20:57:19 -0300 Subject: [PATCH 37/98] Fix TaskStatus::Custom fmt --- prover/prover_cli/src/commands/status/utils.rs | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/prover/prover_cli/src/commands/status/utils.rs b/prover/prover_cli/src/commands/status/utils.rs index 171ad93800dd..5367707ee2e1 100644 --- a/prover/prover_cli/src/commands/status/utils.rs +++ b/prover/prover_cli/src/commands/status/utils.rs @@ -184,7 +184,6 @@ impl Default for BatchData { pub enum TaskStatus { /// A custom status that can be set manually. /// Mostly used when a task has singular status. - #[strum(to_string = "{0}")] Custom(String), /// A task is considered queued when all of its jobs is queued. #[strum(to_string = "Queued 📥")] @@ -273,6 +272,10 @@ impl Task { impl Debug for Task { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { writeln!(f, "-- {} --", self.to_string().bold())?; - writeln!(f, "> {}", self.status().to_string()) + if let TaskStatus::Custom(msg) = self.status() { + writeln!(f, "> {msg}") + } else { + writeln!(f, "> {}", self.status().to_string()) + } } } From 4be214eb4a7444b312adca31257d7143764238ee Mon Sep 17 00:00:00 2001 From: Ivan Litteri Date: Wed, 24 Apr 2024 21:04:20 -0300 Subject: [PATCH 38/98] Add query for getting proof compression job info for a batch --- .../src/fri_proof_compressor_dal.rs | 57 ++++++++++++++++++- 1 file changed, 56 insertions(+), 1 deletion(-) diff --git a/prover/prover_dal/src/fri_proof_compressor_dal.rs b/prover/prover_dal/src/fri_proof_compressor_dal.rs index 01231d33b00e..d2910613d87a 100644 --- a/prover/prover_dal/src/fri_proof_compressor_dal.rs +++ b/prover/prover_dal/src/fri_proof_compressor_dal.rs @@ -1,7 +1,10 @@ #![doc = include_str!("../doc/FriProofCompressorDal.md")] use std::{collections::HashMap, str::FromStr, time::Duration}; -use sqlx::Row; +use sqlx::{ + types::chrono::{NaiveDateTime, NaiveTime}, + Row, +}; use strum::{Display, EnumString}; use zksync_basic_types::{ prover_dal::{JobCountStatistics, StuckJobs}, @@ -32,6 +35,20 @@ pub enum ProofCompressionJobStatus { Skipped, } +pub struct ProofCompressionJobInfo { + pub l1_batch_number: L1BatchNumber, + pub attempts: u32, + pub status: ProofCompressionJobStatus, + pub fri_proof_blob_url: Option, + pub l1_proof_blob_url: Option, + pub error: Option, + pub created_at: NaiveDateTime, + pub updated_at: NaiveDateTime, + pub processing_started_at: Option, + pub time_taken: Option, + pub picked_by: Option, +} + impl FriProofCompressorDal<'_, '_> { pub async fn insert_proof_compression_job( &mut self, @@ -328,4 +345,42 @@ impl FriProofCompressorDal<'_, '_> { .collect() } } + + pub async fn get_proof_compression_job_for_batch( + &mut self, + block_number: L1BatchNumber, + ) -> Option { + let row = sqlx::query!( + r#" + SELECT + * + FROM + proof_compression_jobs_fri + WHERE + l1_batch_number = $1 + "#, + i64::from(block_number.0) + ) + .fetch_optional(self.storage.conn()) + .await + .unwrap(); + + if let Some(row) = row { + Some(ProofCompressionJobInfo { + l1_batch_number: block_number, + attempts: row.attempts as u32, + status: ProofCompressionJobStatus::from_str(&row.status).unwrap(), + fri_proof_blob_url: row.fri_proof_blob_url, + l1_proof_blob_url: row.l1_proof_blob_url, + error: row.error, + created_at: row.created_at, + updated_at: row.updated_at, + processing_started_at: row.processing_started_at, + time_taken: row.time_taken, + picked_by: row.picked_by, + }) + } else { + None + } + } } From e9e197f6f0762789255a68f49c1a66b423ea54c7 Mon Sep 17 00:00:00 2001 From: Ivan Litteri Date: Wed, 24 Apr 2024 21:08:12 -0300 Subject: [PATCH 39/98] Add query for getting proof compression job info for a batch --- ...a8ea23052f714cd74c1d28ae1203ce8f0eaa9.json | 82 +++++++++++++++++++ 1 file changed, 82 insertions(+) create mode 100644 prover/prover_dal/.sqlx/query-2ab2f83b273c5aa88c1eefc8f70a8ea23052f714cd74c1d28ae1203ce8f0eaa9.json diff --git a/prover/prover_dal/.sqlx/query-2ab2f83b273c5aa88c1eefc8f70a8ea23052f714cd74c1d28ae1203ce8f0eaa9.json b/prover/prover_dal/.sqlx/query-2ab2f83b273c5aa88c1eefc8f70a8ea23052f714cd74c1d28ae1203ce8f0eaa9.json new file mode 100644 index 000000000000..3441906e0cea --- /dev/null +++ b/prover/prover_dal/.sqlx/query-2ab2f83b273c5aa88c1eefc8f70a8ea23052f714cd74c1d28ae1203ce8f0eaa9.json @@ -0,0 +1,82 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n *\n FROM\n proof_compression_jobs_fri\n WHERE\n l1_batch_number = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "l1_batch_number", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "attempts", + "type_info": "Int2" + }, + { + "ordinal": 2, + "name": "status", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "fri_proof_blob_url", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "l1_proof_blob_url", + "type_info": "Text" + }, + { + "ordinal": 5, + "name": "error", + "type_info": "Text" + }, + { + "ordinal": 6, + "name": "created_at", + "type_info": "Timestamp" + }, + { + "ordinal": 7, + "name": "updated_at", + "type_info": "Timestamp" + }, + { + "ordinal": 8, + "name": "processing_started_at", + "type_info": "Timestamp" + }, + { + "ordinal": 9, + "name": "time_taken", + "type_info": "Time" + }, + { + "ordinal": 10, + "name": "picked_by", + "type_info": "Text" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + false, + false, + false, + true, + true, + true, + false, + false, + true, + true, + true + ] + }, + "hash": "2ab2f83b273c5aa88c1eefc8f70a8ea23052f714cd74c1d28ae1203ce8f0eaa9" +} From 7c9a4999419bf416a7caafbf42ee44340c79f5b0 Mon Sep 17 00:00:00 2001 From: Ivan Litteri Date: Wed, 24 Apr 2024 21:08:41 -0300 Subject: [PATCH 40/98] Handle proof compression job status for batches --- .../prover_cli/src/commands/status/batch.rs | 34 +++++++++++++++---- 1 file changed, 28 insertions(+), 6 deletions(-) diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index 30528d35d0ef..8a8113cefee8 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -1,9 +1,11 @@ use anyhow::{ensure, Context as _}; use clap::Args as ClapArgs; -use prover_dal::{ConnectionPool, Prover}; +use prover_dal::{ + fri_proof_compressor_dal::ProofCompressionJobStatus, ConnectionPool, Prover, ProverDal, +}; use zksync_types::L1BatchNumber; -use super::utils::BatchData; +use super::utils::{BatchData, BatchDataBuilder, Task, TaskStatus}; use crate::commands::status::utils::postgres_config; #[derive(ClapArgs)] @@ -29,7 +31,7 @@ pub(crate) async fn run(args: Args) -> anyhow::Result<()> { Ok(()) } -async fn get_batches_data(_batches: Vec) -> anyhow::Result> { +async fn get_batches_data(batches: Vec) -> anyhow::Result> { let config = postgres_config()?; let prover_connection_pool = @@ -38,9 +40,29 @@ async fn get_batches_data(_batches: Vec) -> anyhow::Result( + batch_number: L1BatchNumber, + conn: ConnectionPool<'a, Prover>, +) -> anyhow::Result { + conn.fri_proof_compressor_dal() + .get_proof_compression_job_for_batch(L1BatchNumber(0)) + .await + .map(|job| TaskStatus::from(job.status)) + .unwrap_or(TaskStatus::Custom("Compressor job not found 🚫".to_owned())) } From 17b587555ade6173e8e9fd2176a747227637fe54 Mon Sep 17 00:00:00 2001 From: Ivan Litteri Date: Wed, 24 Apr 2024 21:09:45 -0300 Subject: [PATCH 41/98] Fix get_proof_compression_job_status_for_batch --- prover/prover_cli/src/commands/status/batch.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index 8a8113cefee8..2e8a9e7eb5b1 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -61,7 +61,7 @@ async fn get_proof_compression_job_status_for_batch<'a>( conn: ConnectionPool<'a, Prover>, ) -> anyhow::Result { conn.fri_proof_compressor_dal() - .get_proof_compression_job_for_batch(L1BatchNumber(0)) + .get_proof_compression_job_for_batch(batch_number) .await .map(|job| TaskStatus::from(job.status)) .unwrap_or(TaskStatus::Custom("Compressor job not found 🚫".to_owned())) From 3fdc055fb0298c64df30f5f1ddb1c93b173c9414 Mon Sep 17 00:00:00 2001 From: Ivan Litteri Date: Wed, 24 Apr 2024 21:14:59 -0300 Subject: [PATCH 42/98] Remove BatchDataBuilder struct It adds needless complexity to the code --- .../prover_cli/src/commands/status/utils.rs | 109 ------------------ 1 file changed, 109 deletions(-) diff --git a/prover/prover_cli/src/commands/status/utils.rs b/prover/prover_cli/src/commands/status/utils.rs index 2ec63dc8f942..e6edecc15039 100644 --- a/prover/prover_cli/src/commands/status/utils.rs +++ b/prover/prover_cli/src/commands/status/utils.rs @@ -1,6 +1,5 @@ use std::{collections::HashMap, fmt::Debug}; -use anyhow::ensure; use colored::*; use strum::{Display, EnumString}; use zksync_basic_types::{basic_fri_types::AggregationRound, prover_dal::JobCountStatistics}; @@ -12,114 +11,6 @@ pub fn postgres_config() -> anyhow::Result { Ok(PostgresConfig::from_env()?) } -pub struct BatchDataBuilder { - batch_number: L1BatchNumber, - basic_witness_generator: Task, - leaf_witness_generator: Task, - node_witness_generator: Task, - recursion_tip: Task, - scheduler: Task, - compressor: Task, -} - -impl BatchDataBuilder { - pub fn new(batch_number: L1BatchNumber) -> Self { - BatchDataBuilder { - batch_number, - ..Default::default() - } - } - - pub fn basic_witness_generator(mut self, task: Task) -> anyhow::Result { - ensure!( - matches!(task, Task::BasicWitnessGenerator(_)), - "Task should be a basic witness generator" - ); - self.basic_witness_generator = task; - Ok(self) - } - - pub fn leaf_witness_generator(mut self, task: Task) -> anyhow::Result { - ensure!( - matches!(task, Task::LeafWitnessGenerator { .. }), - "Task should be a leaf witness generator" - ); - self.leaf_witness_generator = task; - Ok(self) - } - - pub fn node_witness_generator(mut self, task: Task) -> anyhow::Result { - ensure!( - matches!(task, Task::NodeWitnessGenerator { .. }), - "Task should be a node witness generator" - ); - self.node_witness_generator = task; - Ok(self) - } - - pub fn recursion_tip(mut self, task: Task) -> anyhow::Result { - ensure!( - matches!(task, Task::RecursionTip { .. }), - "Task should be a recursion tip" - ); - self.recursion_tip = task; - Ok(self) - } - - pub fn scheduler(mut self, task: Task) -> anyhow::Result { - ensure!( - matches!(task, Task::Scheduler(_)), - "Task should be a scheduler" - ); - self.scheduler = task; - Ok(self) - } - - pub fn compressor(mut self, task: Task) -> anyhow::Result { - ensure!( - matches!(task, Task::Compressor(_)), - "Task should be a compressor" - ); - self.compressor = task; - Ok(self) - } - - pub fn build(self) -> BatchData { - BatchData { - batch_number: self.batch_number, - basic_witness_generator: self.basic_witness_generator, - leaf_witness_generator: self.leaf_witness_generator, - node_witness_generator: self.node_witness_generator, - recursion_tip: self.recursion_tip, - scheduler: self.scheduler, - compressor: self.compressor, - } - } -} - -impl Default for BatchDataBuilder { - fn default() -> Self { - BatchDataBuilder { - batch_number: L1BatchNumber::default(), - basic_witness_generator: Task::BasicWitnessGenerator(TaskStatus::Stuck), - leaf_witness_generator: Task::LeafWitnessGenerator { - status: TaskStatus::WaitingForProofs, - aggregation_round_0_prover_jobs_data: ProverJobsData::default(), - }, - node_witness_generator: Task::NodeWitnessGenerator { - status: TaskStatus::WaitingForProofs, - aggregation_round_1_prover_jobs_data: ProverJobsData::default(), - }, - recursion_tip: Task::RecursionTip { - status: TaskStatus::WaitingForProofs, - aggregation_round_2_prover_jobs_data: ProverJobsData::default(), - }, - scheduler: Task::Scheduler(TaskStatus::WaitingForProofs), - compressor: Task::Compressor(TaskStatus::WaitingForProofs), - } - } -} - /// Represents the proving data of a batch. pub struct BatchData { /// The number of the batch. From 3093cdeffac92ea70a311d2cb3f1bb64f6b7cb32 Mon Sep 17 00:00:00 2001 From: Ivan Litteri Date: Thu, 25 Apr 2024 10:28:15 -0300 Subject: [PATCH 43/98] Fix --- prover/prover_cli/src/commands/status/batch.rs | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index 2e8a9e7eb5b1..57576225b55d 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -1,11 +1,9 @@ use anyhow::{ensure, Context as _}; use clap::Args as ClapArgs; -use prover_dal::{ - fri_proof_compressor_dal::ProofCompressionJobStatus, ConnectionPool, Prover, ProverDal, -}; +use prover_dal::{Connection, ConnectionPool, Prover, ProverDal}; use zksync_types::L1BatchNumber; -use super::utils::{BatchData, BatchDataBuilder, Task, TaskStatus}; +use super::utils::{BatchData, Task, TaskStatus}; use crate::commands::status::utils::postgres_config; #[derive(ClapArgs)] @@ -46,7 +44,7 @@ async fn get_batches_data(batches: Vec) -> anyhow::Result) -> anyhow::Result( batch_number: L1BatchNumber, - conn: ConnectionPool<'a, Prover>, -) -> anyhow::Result { + conn: &mut Connection<'a, Prover>, +) -> TaskStatus { conn.fri_proof_compressor_dal() .get_proof_compression_job_for_batch(batch_number) .await From fde3be1a1f8ab90019d222b984769afad3e82222 Mon Sep 17 00:00:00 2001 From: ilitteri Date: Thu, 25 Apr 2024 10:58:03 -0300 Subject: [PATCH 44/98] Move prover_dal types to basic_types::prover_dal module --- core/lib/basic_types/src/prover_dal.rs | 33 ++++++++++++++- .../prover_cli/src/commands/status/utils.rs | 3 +- .../src/fri_proof_compressor_dal.rs | 40 ++----------------- 3 files changed, 37 insertions(+), 39 deletions(-) diff --git a/core/lib/basic_types/src/prover_dal.rs b/core/lib/basic_types/src/prover_dal.rs index 41ab439a15fc..5c06e6876574 100644 --- a/core/lib/basic_types/src/prover_dal.rs +++ b/core/lib/basic_types/src/prover_dal.rs @@ -1,7 +1,8 @@ //! Types exposed by the prover DAL for general-purpose use. use std::{net::IpAddr, ops::Add, str::FromStr}; -use chrono::{DateTime, Duration, Utc}; +use chrono::{DateTime, Duration, NaiveDateTime, NaiveTime, Utc}; +use strum::{Display, EnumString}; use crate::{basic_fri_types::AggregationRound, L1BatchNumber}; @@ -229,3 +230,33 @@ impl FromStr for GpuProverInstanceStatus { } } } + +#[derive(Debug, EnumString, Display)] +pub enum ProofCompressionJobStatus { + #[strum(serialize = "queued")] + Queued, + #[strum(serialize = "in_progress")] + InProgress, + #[strum(serialize = "successful")] + Successful, + #[strum(serialize = "failed")] + Failed, + #[strum(serialize = "sent_to_server")] + SentToServer, + #[strum(serialize = "skipped")] + Skipped, +} + +pub struct ProofCompressionJobInfo { + pub l1_batch_number: L1BatchNumber, + pub attempts: u32, + pub status: ProofCompressionJobStatus, + pub fri_proof_blob_url: Option, + pub l1_proof_blob_url: Option, + pub error: Option, + pub created_at: NaiveDateTime, + pub updated_at: NaiveDateTime, + pub processing_started_at: Option, + pub time_taken: Option, + pub picked_by: Option, +} diff --git a/prover/prover_cli/src/commands/status/utils.rs b/prover/prover_cli/src/commands/status/utils.rs index 8704f64c1e07..e844098be567 100644 --- a/prover/prover_cli/src/commands/status/utils.rs +++ b/prover/prover_cli/src/commands/status/utils.rs @@ -1,12 +1,11 @@ use std::{collections::HashMap, fmt::Debug}; use colored::*; -use prover_dal::fri_proof_compressor_dal::ProofCompressionJobStatus; use strum::{Display, EnumString}; use zksync_basic_types::{basic_fri_types::AggregationRound, prover_dal::JobCountStatistics}; use zksync_config::PostgresConfig; use zksync_env_config::FromEnv; -use zksync_types::L1BatchNumber; +use zksync_types::{prover_dal::ProofCompressionJobStatus, L1BatchNumber}; pub fn postgres_config() -> anyhow::Result { Ok(PostgresConfig::from_env()?) diff --git a/prover/prover_dal/src/fri_proof_compressor_dal.rs b/prover/prover_dal/src/fri_proof_compressor_dal.rs index d2910613d87a..c32e5b0716ad 100644 --- a/prover/prover_dal/src/fri_proof_compressor_dal.rs +++ b/prover/prover_dal/src/fri_proof_compressor_dal.rs @@ -1,13 +1,11 @@ #![doc = include_str!("../doc/FriProofCompressorDal.md")] use std::{collections::HashMap, str::FromStr, time::Duration}; -use sqlx::{ - types::chrono::{NaiveDateTime, NaiveTime}, - Row, -}; -use strum::{Display, EnumString}; +use sqlx::Row; use zksync_basic_types::{ - prover_dal::{JobCountStatistics, StuckJobs}, + prover_dal::{ + JobCountStatistics, ProofCompressionJobInfo, ProofCompressionJobStatus, StuckJobs, + }, L1BatchNumber, }; use zksync_db_connection::connection::Connection; @@ -19,36 +17,6 @@ pub struct FriProofCompressorDal<'a, 'c> { pub(crate) storage: &'a mut Connection<'c, Prover>, } -#[derive(Debug, EnumString, Display)] -pub enum ProofCompressionJobStatus { - #[strum(serialize = "queued")] - Queued, - #[strum(serialize = "in_progress")] - InProgress, - #[strum(serialize = "successful")] - Successful, - #[strum(serialize = "failed")] - Failed, - #[strum(serialize = "sent_to_server")] - SentToServer, - #[strum(serialize = "skipped")] - Skipped, -} - -pub struct ProofCompressionJobInfo { - pub l1_batch_number: L1BatchNumber, - pub attempts: u32, - pub status: ProofCompressionJobStatus, - pub fri_proof_blob_url: Option, - pub l1_proof_blob_url: Option, - pub error: Option, - pub created_at: NaiveDateTime, - pub updated_at: NaiveDateTime, - pub processing_started_at: Option, - pub time_taken: Option, - pub picked_by: Option, -} - impl FriProofCompressorDal<'_, '_> { pub async fn insert_proof_compression_job( &mut self, From efd70acf461760f4abbb355857bff876862c05d0 Mon Sep 17 00:00:00 2001 From: ilitteri Date: Thu, 25 Apr 2024 17:39:47 -0300 Subject: [PATCH 45/98] Refactor query --- .../src/fri_proof_compressor_dal.rs | 35 ++++++++----------- 1 file changed, 15 insertions(+), 20 deletions(-) diff --git a/prover/prover_dal/src/fri_proof_compressor_dal.rs b/prover/prover_dal/src/fri_proof_compressor_dal.rs index c32e5b0716ad..7016fcd64ddd 100644 --- a/prover/prover_dal/src/fri_proof_compressor_dal.rs +++ b/prover/prover_dal/src/fri_proof_compressor_dal.rs @@ -318,7 +318,7 @@ impl FriProofCompressorDal<'_, '_> { &mut self, block_number: L1BatchNumber, ) -> Option { - let row = sqlx::query!( + sqlx::query!( r#" SELECT * @@ -331,24 +331,19 @@ impl FriProofCompressorDal<'_, '_> { ) .fetch_optional(self.storage.conn()) .await - .unwrap(); - - if let Some(row) = row { - Some(ProofCompressionJobInfo { - l1_batch_number: block_number, - attempts: row.attempts as u32, - status: ProofCompressionJobStatus::from_str(&row.status).unwrap(), - fri_proof_blob_url: row.fri_proof_blob_url, - l1_proof_blob_url: row.l1_proof_blob_url, - error: row.error, - created_at: row.created_at, - updated_at: row.updated_at, - processing_started_at: row.processing_started_at, - time_taken: row.time_taken, - picked_by: row.picked_by, - }) - } else { - None - } + .unwrap() + .map(|row| ProofCompressionJobInfo { + l1_batch_number: block_number, + attempts: row.attempts as u32, + status: ProofCompressionJobStatus::from_str(&row.status).unwrap(), + fri_proof_blob_url: row.fri_proof_blob_url, + l1_proof_blob_url: row.l1_proof_blob_url, + error: row.error, + created_at: row.created_at, + updated_at: row.updated_at, + processing_started_at: row.processing_started_at, + time_taken: row.time_taken, + picked_by: row.picked_by, + }) } } From 1830cedeaac954acd95d1ecefdec9354d7d22698 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Thu, 25 Apr 2024 18:56:18 -0300 Subject: [PATCH 46/98] add bwg query --- core/lib/basic_types/src/prover_dal.rs | 22 ++++- .../prover_cli/src/commands/status/batch.rs | 32 ++++++- .../prover_cli/src/commands/status/utils.rs | 18 +++- ...e118cabc67b6e507efefb7b69e102f1b43c58.json | 94 +++++++++++++++++++ .../src/fri_witness_generator_dal.rs | 40 +++++++- 5 files changed, 196 insertions(+), 10 deletions(-) create mode 100644 prover/prover_dal/.sqlx/query-e0a6cc885e437aa7ded9def71f3e118cabc67b6e507efefb7b69e102f1b43c58.json diff --git a/core/lib/basic_types/src/prover_dal.rs b/core/lib/basic_types/src/prover_dal.rs index 41ab439a15fc..abd36ef1f70f 100644 --- a/core/lib/basic_types/src/prover_dal.rs +++ b/core/lib/basic_types/src/prover_dal.rs @@ -1,9 +1,12 @@ //! Types exposed by the prover DAL for general-purpose use. use std::{net::IpAddr, ops::Add, str::FromStr}; -use chrono::{DateTime, Duration, Utc}; +use chrono::{DateTime, Duration, NaiveDateTime, NaiveTime, Utc}; -use crate::{basic_fri_types::AggregationRound, L1BatchNumber}; +use crate::{ + basic_fri_types::{AggregationRound, Eip4844Blobs}, + L1BatchNumber, +}; // This currently lives in `zksync_prover_types` -- we don't want a dependency between prover types (`zkevm_test_harness`) and DAL. // This will be gone as part of 1.5.0, when EIP4844 becomes normal jobs, rather than special cased ones. @@ -229,3 +232,18 @@ impl FromStr for GpuProverInstanceStatus { } } } +pub struct BasicWitnessGeneratorJobInfo { + pub l1_batch_number: L1BatchNumber, + pub merkle_tree_paths_blob_url: Option, + pub attempts: u32, + pub status: WitnessJobStatus, + pub error: Option, + pub created_at: NaiveDateTime, + pub updated_at: NaiveDateTime, + pub processing_started_at: Option, + pub time_taken: Option, + pub is_blob_cleaned: Option, + pub protocol_version: Option, + pub picked_by: Option, + pub eip_4844_blobs: Option, +} diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index 30528d35d0ef..3fa26dee5d0b 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -1,9 +1,9 @@ use anyhow::{ensure, Context as _}; use clap::Args as ClapArgs; -use prover_dal::{ConnectionPool, Prover}; +use prover_dal::{Connection, ConnectionPool, Prover, ProverDal}; use zksync_types::L1BatchNumber; -use super::utils::BatchData; +use super::utils::{BatchData, Task, TaskStatus}; use crate::commands::status::utils::postgres_config; #[derive(ClapArgs)] @@ -29,7 +29,7 @@ pub(crate) async fn run(args: Args) -> anyhow::Result<()> { Ok(()) } -async fn get_batches_data(_batches: Vec) -> anyhow::Result> { +async fn get_batches_data(batches: Vec) -> anyhow::Result> { let config = postgres_config()?; let prover_connection_pool = @@ -38,9 +38,31 @@ async fn get_batches_data(_batches: Vec) -> anyhow::Result( + batch_number: L1BatchNumber, + conn: &mut Connection<'a, Prover>, +) -> TaskStatus { + conn.fri_witness_generator_dal() + .get_basic_witness_generator_job_for_batch(batch_number) + .await + .map(|job| TaskStatus::from(job.status)) + .unwrap_or(TaskStatus::Custom( + "Basic witness generator job not found 🚫".to_owned(), + )) +} diff --git a/prover/prover_cli/src/commands/status/utils.rs b/prover/prover_cli/src/commands/status/utils.rs index e6edecc15039..cdbfc343944d 100644 --- a/prover/prover_cli/src/commands/status/utils.rs +++ b/prover/prover_cli/src/commands/status/utils.rs @@ -5,7 +5,7 @@ use strum::{Display, EnumString}; use zksync_basic_types::{basic_fri_types::AggregationRound, prover_dal::JobCountStatistics}; use zksync_config::PostgresConfig; use zksync_env_config::FromEnv; -use zksync_types::L1BatchNumber; +use zksync_types::{prover_dal::WitnessJobStatus, L1BatchNumber}; pub fn postgres_config() -> anyhow::Result { Ok(PostgresConfig::from_env()?) @@ -151,3 +151,19 @@ impl Debug for Task { writeln!(f, "> {}", self.status().to_string()) } } + +impl From for TaskStatus { + fn from(status: WitnessJobStatus) -> Self { + match status { + WitnessJobStatus::Queued => TaskStatus::Queued, + WitnessJobStatus::InProgress => TaskStatus::InProgress, + WitnessJobStatus::Successful(_) => TaskStatus::Successful, + WitnessJobStatus::Failed(_) => TaskStatus::InProgress, + WitnessJobStatus::WaitingForArtifacts => { + TaskStatus::Custom("Waiting for Artifacts ⏱️".to_owned()) + } + WitnessJobStatus::Skipped => TaskStatus::Custom("Skipped ⏩".to_owned()), + WitnessJobStatus::WaitingForProofs => TaskStatus::WaitingForProofs, + } + } +} diff --git a/prover/prover_dal/.sqlx/query-e0a6cc885e437aa7ded9def71f3e118cabc67b6e507efefb7b69e102f1b43c58.json b/prover/prover_dal/.sqlx/query-e0a6cc885e437aa7ded9def71f3e118cabc67b6e507efefb7b69e102f1b43c58.json new file mode 100644 index 000000000000..a7b8d0dc8542 --- /dev/null +++ b/prover/prover_dal/.sqlx/query-e0a6cc885e437aa7ded9def71f3e118cabc67b6e507efefb7b69e102f1b43c58.json @@ -0,0 +1,94 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n *\n FROM\n witness_inputs_fri\n WHERE\n l1_batch_number = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "l1_batch_number", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "merkle_tree_paths_blob_url", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "attempts", + "type_info": "Int2" + }, + { + "ordinal": 3, + "name": "status", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "error", + "type_info": "Text" + }, + { + "ordinal": 5, + "name": "created_at", + "type_info": "Timestamp" + }, + { + "ordinal": 6, + "name": "updated_at", + "type_info": "Timestamp" + }, + { + "ordinal": 7, + "name": "processing_started_at", + "type_info": "Timestamp" + }, + { + "ordinal": 8, + "name": "time_taken", + "type_info": "Time" + }, + { + "ordinal": 9, + "name": "is_blob_cleaned", + "type_info": "Bool" + }, + { + "ordinal": 10, + "name": "protocol_version", + "type_info": "Int4" + }, + { + "ordinal": 11, + "name": "picked_by", + "type_info": "Text" + }, + { + "ordinal": 12, + "name": "eip_4844_blobs", + "type_info": "Bytea" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + false, + true, + false, + false, + true, + false, + false, + true, + true, + true, + true, + true, + true + ] + }, + "hash": "e0a6cc885e437aa7ded9def71f3e118cabc67b6e507efefb7b69e102f1b43c58" +} diff --git a/prover/prover_dal/src/fri_witness_generator_dal.rs b/prover/prover_dal/src/fri_witness_generator_dal.rs index d2b58f5f75d9..2662183d22bf 100644 --- a/prover/prover_dal/src/fri_witness_generator_dal.rs +++ b/prover/prover_dal/src/fri_witness_generator_dal.rs @@ -1,12 +1,13 @@ #![doc = include_str!("../doc/FriWitnessGeneratorDal.md")] -use std::{collections::HashMap, convert::TryFrom, time::Duration}; +use std::{collections::HashMap, convert::TryFrom, str::FromStr, time::Duration}; use sqlx::Row; use zksync_basic_types::{ basic_fri_types::{AggregationRound, Eip4844Blobs}, protocol_version::ProtocolVersionId, prover_dal::{ - JobCountStatistics, LeafAggregationJobMetadata, NodeAggregationJobMetadata, StuckJobs, + BasicWitnessGeneratorJobInfo, JobCountStatistics, LeafAggregationJobMetadata, + NodeAggregationJobMetadata, StuckJobs, WitnessJobStatus, }, L1BatchNumber, }; @@ -1151,4 +1152,39 @@ impl FriWitnessGeneratorDal<'_, '_> { .map(|id| ProtocolVersionId::try_from(id as u16).unwrap()) .unwrap() } + + pub async fn get_basic_witness_generator_job_for_batch( + &mut self, + l1_batch_number: L1BatchNumber, + ) -> Option { + sqlx::query!( + r#" + SELECT + * + FROM + witness_inputs_fri + WHERE + l1_batch_number = $1 + "#, + i64::from(l1_batch_number.0) + ) + .fetch_optional(self.storage.conn()) + .await + .unwrap() + .map(|row| BasicWitnessGeneratorJobInfo { + l1_batch_number: l1_batch_number, + merkle_tree_paths_blob_url: row.merkle_tree_paths_blob_url, + attempts: row.attempts as u32, + status: WitnessJobStatus::from_str(&row.status).unwrap(), + error: row.error, + created_at: row.created_at, + updated_at: row.updated_at, + processing_started_at: row.processing_started_at, + time_taken: row.time_taken, + is_blob_cleaned: row.is_blob_cleaned, + protocol_version: row.protocol_version, + picked_by: row.picked_by, + eip_4844_blobs: row.eip_4844_blobs.map(|vec_u8| Eip4844Blobs::from(vec_u8)), + }) + } } From bbe5dc3626fc1eb15e6b976ebbfbdb704a760954 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Fri, 26 Apr 2024 17:30:47 -0300 Subject: [PATCH 47/98] add status for prover jobs --- core/lib/basic_types/src/prover_dal.rs | 32 ++++- .../prover_cli/src/commands/status/batch.rs | 26 +++- .../prover_cli/src/commands/status/utils.rs | 90 +++++++++--- ...076096de57cdba25831f86c1428081ca0a14f.json | 40 ------ ...d34a5baece02812f8c950fc84d37eeebd33a4.json | 131 ++++++++++++++++++ prover/prover_dal/src/fri_prover_dal.rs | 112 +++++++-------- 6 files changed, 302 insertions(+), 129 deletions(-) delete mode 100644 prover/prover_dal/.sqlx/query-676020e89f0833cc92be1c1114a076096de57cdba25831f86c1428081ca0a14f.json create mode 100644 prover/prover_dal/.sqlx/query-c2c140d136df5303d7b3a66ccd0d34a5baece02812f8c950fc84d37eeebd33a4.json diff --git a/core/lib/basic_types/src/prover_dal.rs b/core/lib/basic_types/src/prover_dal.rs index 10c87e63a7bf..f355932bb48f 100644 --- a/core/lib/basic_types/src/prover_dal.rs +++ b/core/lib/basic_types/src/prover_dal.rs @@ -6,6 +6,7 @@ use strum::{Display, EnumString}; use crate::{ basic_fri_types::{AggregationRound, Eip4844Blobs}, + protocol_version::ProtocolVersionId, L1BatchNumber, }; @@ -97,13 +98,13 @@ pub struct JobPosition { pub sequence_number: usize, } -#[derive(Debug, Default)] +#[derive(Debug, Default, PartialEq)] pub struct ProverJobStatusFailed { pub started_at: DateTime, pub error: String, } -#[derive(Debug)] +#[derive(Debug, PartialEq)] pub struct ProverJobStatusSuccessful { pub started_at: DateTime, pub time_taken: Duration, @@ -118,7 +119,7 @@ impl Default for ProverJobStatusSuccessful { } } -#[derive(Debug, Default)] +#[derive(Debug, Default, PartialEq)] pub struct ProverJobStatusInProgress { pub started_at: DateTime, } @@ -144,7 +145,7 @@ pub struct WitnessJobStatusFailed { pub error: String, } -#[derive(Debug, strum::Display, strum::EnumString, strum::AsRefStr)] +#[derive(Debug, strum::Display, strum::EnumString, strum::AsRefStr, PartialEq)] pub enum ProverJobStatus { #[strum(serialize = "queued")] Queued, @@ -233,6 +234,29 @@ impl FromStr for GpuProverInstanceStatus { } } } + +pub struct ProverJobFriInfo { + pub id: u32, + pub l1_batch_number: L1BatchNumber, + pub circuit_id: u32, + pub circuit_blob_url: String, + pub aggregation_round: AggregationRound, + pub sequence_number: u32, + pub status: ProverJobStatus, + pub error: Option, + pub attempts: u8, + pub processing_started_at: Option, + pub created_at: NaiveDateTime, + pub updated_at: NaiveDateTime, + pub time_taken: Option, + pub is_blob_cleaned: Option, + pub depth: u32, + pub is_node_final_proof: bool, + pub proof_blob_url: Option, + pub protocol_version: Option, + pub picked_by: Option, +} + pub struct BasicWitnessGeneratorJobInfo { pub l1_batch_number: L1BatchNumber, pub merkle_tree_paths_blob_url: Option, diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index 84e0c9106209..4b7933f0438c 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -1,7 +1,7 @@ use anyhow::{ensure, Context as _}; use clap::Args as ClapArgs; use prover_dal::{Connection, ConnectionPool, Prover, ProverDal}; -use zksync_types::L1BatchNumber; +use zksync_types::{basic_fri_types::AggregationRound, L1BatchNumber}; use super::utils::{BatchData, Task, TaskStatus}; use crate::commands::status::utils::postgres_config; @@ -43,9 +43,16 @@ async fn get_batches_data(batches: Vec) -> anyhow::Result) -> anyhow::Result( + batch_number: L1BatchNumber, + aggation_round: AggregationRound, + conn: &mut Connection<'a, Prover>, +) -> TaskStatus { + conn.fri_prover_jobs_dal() + .get_prover_jobs_stats_for_batch(batch_number, aggation_round) + .await + .into() +} + async fn get_proof_basic_witness_generator_status_for_batch<'a>( batch_number: L1BatchNumber, conn: &mut Connection<'a, Prover>, diff --git a/prover/prover_cli/src/commands/status/utils.rs b/prover/prover_cli/src/commands/status/utils.rs index 2a13147e696f..3b19bd65df04 100644 --- a/prover/prover_cli/src/commands/status/utils.rs +++ b/prover/prover_cli/src/commands/status/utils.rs @@ -1,12 +1,11 @@ -use std::{collections::HashMap, fmt::Debug}; +use std::fmt::Debug; use colored::*; use strum::{Display, EnumString}; -use zksync_basic_types::{basic_fri_types::AggregationRound, prover_dal::JobCountStatistics}; use zksync_config::PostgresConfig; use zksync_env_config::FromEnv; use zksync_types::{ - prover_dal::{ProofCompressionJobStatus, WitnessJobStatus}, + prover_dal::{ProofCompressionJobStatus, ProverJobFriInfo, ProverJobStatus, WitnessJobStatus}, L1BatchNumber, }; @@ -54,18 +53,21 @@ impl Default for BatchData { fn default() -> Self { BatchData { batch_number: L1BatchNumber::default(), - basic_witness_generator: Task::BasicWitnessGenerator(TaskStatus::Stuck), + basic_witness_generator: Task::BasicWitnessGenerator { + status: TaskStatus::WaitingForProofs, + prover_jobs_status: TaskStatus::default(), + }, leaf_witness_generator: Task::LeafWitnessGenerator { status: TaskStatus::WaitingForProofs, - aggregation_round_0_prover_jobs_data: ProverJobsData::default(), + prover_jobs_status: TaskStatus::default(), }, node_witness_generator: Task::NodeWitnessGenerator { status: TaskStatus::WaitingForProofs, - aggregation_round_1_prover_jobs_data: ProverJobsData::default(), + prover_jobs_status: TaskStatus::default(), }, recursion_tip: Task::RecursionTip { status: TaskStatus::WaitingForProofs, - aggregation_round_2_prover_jobs_data: ProverJobsData::default(), + prover_jobs_status: TaskStatus::default(), }, scheduler: Task::Scheduler(TaskStatus::WaitingForProofs), compressor: Task::Compressor(TaskStatus::WaitingForProofs), @@ -101,6 +103,28 @@ impl Default for TaskStatus { } } +impl From> for TaskStatus { + fn from(jobs_vector: Vec) -> Self { + if jobs_vector.is_empty() { + return TaskStatus::Custom("No Jobs found ".to_owned()); + } + + if jobs_vector + .iter() + .all(|job| job.status == ProverJobStatus::Queued) + { + return TaskStatus::Queued; + } else if jobs_vector.iter().all(|job| match job.status { + ProverJobStatus::Successful(_) => true, + _ => false, + }) { + return TaskStatus::Successful; + } + + TaskStatus::InProgress + } +} + impl From for TaskStatus { fn from(status: ProofCompressionJobStatus) -> Self { match status { @@ -116,30 +140,31 @@ impl From for TaskStatus { } } -type ProverJobsData = HashMap<(L1BatchNumber, AggregationRound), JobCountStatistics>; - #[derive(EnumString, Clone, Display)] pub enum Task { /// Represents the basic witness generator task and its status. #[strum(to_string = "Basic Witness Generator")] - BasicWitnessGenerator(TaskStatus), + BasicWitnessGenerator { + status: TaskStatus, + prover_jobs_status: TaskStatus, + }, /// Represents the leaf witness generator task, its status and the aggregation round 0 prover jobs data. #[strum(to_string = "Leaf Witness Generator")] LeafWitnessGenerator { status: TaskStatus, - aggregation_round_0_prover_jobs_data: ProverJobsData, + prover_jobs_status: TaskStatus, }, /// Represents the node witness generator task, its status and the aggregation round 1 prover jobs data. #[strum(to_string = "Node Witness Generator")] NodeWitnessGenerator { status: TaskStatus, - aggregation_round_1_prover_jobs_data: ProverJobsData, + prover_jobs_status: TaskStatus, }, /// Represents the recursion tip task, its status and the aggregation round 2 prover jobs data. #[strum(to_string = "Recursion Tip")] RecursionTip { status: TaskStatus, - aggregation_round_2_prover_jobs_data: ProverJobsData, + prover_jobs_status: TaskStatus, }, /// Represents the scheduler task and its status. #[strum(to_string = "Scheduler")] @@ -152,7 +177,7 @@ pub enum Task { impl Task { fn status(&self) -> TaskStatus { match self { - Task::BasicWitnessGenerator(status) + Task::BasicWitnessGenerator { status, .. } | Task::LeafWitnessGenerator { status, .. } | Task::NodeWitnessGenerator { status, .. } | Task::RecursionTip { status, .. } @@ -162,14 +187,43 @@ impl Task { } } +impl Task { + fn prover_jobs_status(&self) -> Option { + match self { + Task::BasicWitnessGenerator { + prover_jobs_status, .. + } + | Task::LeafWitnessGenerator { + prover_jobs_status, .. + } + | Task::NodeWitnessGenerator { + prover_jobs_status, .. + } + | Task::RecursionTip { + prover_jobs_status, .. + } => Some(prover_jobs_status.clone()), + Task::Scheduler(_) => None, + Task::Compressor(_) => None, + } + } +} + impl Debug for Task { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { writeln!(f, "-- {} --", self.to_string().bold())?; - if let TaskStatus::Custom(msg) = self.status() { - writeln!(f, "> {msg}") - } else { - writeln!(f, "> {}", self.status().to_string()) + match self.status() { + TaskStatus::InProgress | TaskStatus::Successful => { + writeln!(f, "> {}", self.status().to_string())?; + if let Some(status) = self.prover_jobs_status() { + writeln!(f, "> {}", status.to_string())?; + } + } + TaskStatus::Queued | TaskStatus::WaitingForProofs | TaskStatus::Stuck => { + writeln!(f, "> {}", self.status().to_string())? + } + TaskStatus::Custom(msg) => writeln!(f, "> {msg}")?, } + Ok(()) } } diff --git a/prover/prover_dal/.sqlx/query-676020e89f0833cc92be1c1114a076096de57cdba25831f86c1428081ca0a14f.json b/prover/prover_dal/.sqlx/query-676020e89f0833cc92be1c1114a076096de57cdba25831f86c1428081ca0a14f.json deleted file mode 100644 index b578881deeb1..000000000000 --- a/prover/prover_dal/.sqlx/query-676020e89f0833cc92be1c1114a076096de57cdba25831f86c1428081ca0a14f.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n COUNT(*) AS \"count!\",\n l1_batch_number AS \"l1_batch_number!\",\n aggregation_round AS \"aggregation_round!\",\n status AS \"status!\"\n FROM\n prover_jobs_fri\n WHERE\n l1_batch_number = ANY ($1)\n GROUP BY\n l1_batch_number,\n aggregation_round,\n status\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "count!", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "l1_batch_number!", - "type_info": "Int8" - }, - { - "ordinal": 2, - "name": "aggregation_round!", - "type_info": "Int2" - }, - { - "ordinal": 3, - "name": "status!", - "type_info": "Text" - } - ], - "parameters": { - "Left": [ - "Int8Array" - ] - }, - "nullable": [ - null, - false, - false, - false - ] - }, - "hash": "676020e89f0833cc92be1c1114a076096de57cdba25831f86c1428081ca0a14f" -} diff --git a/prover/prover_dal/.sqlx/query-c2c140d136df5303d7b3a66ccd0d34a5baece02812f8c950fc84d37eeebd33a4.json b/prover/prover_dal/.sqlx/query-c2c140d136df5303d7b3a66ccd0d34a5baece02812f8c950fc84d37eeebd33a4.json new file mode 100644 index 000000000000..7ced88426e4d --- /dev/null +++ b/prover/prover_dal/.sqlx/query-c2c140d136df5303d7b3a66ccd0d34a5baece02812f8c950fc84d37eeebd33a4.json @@ -0,0 +1,131 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n *\n FROM\n prover_jobs_fri\n WHERE\n l1_batch_number = $1\n AND aggregation_round = $2\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "l1_batch_number", + "type_info": "Int8" + }, + { + "ordinal": 2, + "name": "circuit_id", + "type_info": "Int2" + }, + { + "ordinal": 3, + "name": "circuit_blob_url", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "aggregation_round", + "type_info": "Int2" + }, + { + "ordinal": 5, + "name": "sequence_number", + "type_info": "Int4" + }, + { + "ordinal": 6, + "name": "status", + "type_info": "Text" + }, + { + "ordinal": 7, + "name": "error", + "type_info": "Text" + }, + { + "ordinal": 8, + "name": "attempts", + "type_info": "Int2" + }, + { + "ordinal": 9, + "name": "processing_started_at", + "type_info": "Timestamp" + }, + { + "ordinal": 10, + "name": "created_at", + "type_info": "Timestamp" + }, + { + "ordinal": 11, + "name": "updated_at", + "type_info": "Timestamp" + }, + { + "ordinal": 12, + "name": "time_taken", + "type_info": "Time" + }, + { + "ordinal": 13, + "name": "is_blob_cleaned", + "type_info": "Bool" + }, + { + "ordinal": 14, + "name": "depth", + "type_info": "Int4" + }, + { + "ordinal": 15, + "name": "is_node_final_proof", + "type_info": "Bool" + }, + { + "ordinal": 16, + "name": "proof_blob_url", + "type_info": "Text" + }, + { + "ordinal": 17, + "name": "protocol_version", + "type_info": "Int4" + }, + { + "ordinal": 18, + "name": "picked_by", + "type_info": "Text" + } + ], + "parameters": { + "Left": [ + "Int8", + "Int2" + ] + }, + "nullable": [ + false, + false, + false, + false, + false, + false, + false, + true, + false, + true, + false, + false, + true, + true, + false, + false, + true, + true, + true + ] + }, + "hash": "c2c140d136df5303d7b3a66ccd0d34a5baece02812f8c950fc84d37eeebd33a4" +} diff --git a/prover/prover_dal/src/fri_prover_dal.rs b/prover/prover_dal/src/fri_prover_dal.rs index 0639d33e144a..a122665853f7 100644 --- a/prover/prover_dal/src/fri_prover_dal.rs +++ b/prover/prover_dal/src/fri_prover_dal.rs @@ -1,10 +1,13 @@ #![doc = include_str!("../doc/FriProverDal.md")] -use std::{collections::HashMap, convert::TryFrom, time::Duration}; +use std::{collections::HashMap, convert::TryFrom, str::FromStr, time::Duration}; use zksync_basic_types::{ basic_fri_types::{AggregationRound, CircuitIdRoundTuple}, protocol_version::ProtocolVersionId, - prover_dal::{FriProverJobMetadata, JobCountStatistics, StuckJobs, EIP_4844_CIRCUIT_ID}, + prover_dal::{ + FriProverJobMetadata, JobCountStatistics, ProverJobFriInfo, ProverJobStatus, StuckJobs, + EIP_4844_CIRCUIT_ID, + }, L1BatchNumber, }; use zksync_db_connection::{ @@ -597,66 +600,49 @@ impl FriProverDal<'_, '_> { pub async fn get_prover_jobs_stats_for_batch( &mut self, - l1_batches_numbers: Vec, - ) -> HashMap<(L1BatchNumber, AggregationRound), JobCountStatistics> { - { - sqlx::query!( - r#" - SELECT - COUNT(*) AS "count!", - l1_batch_number AS "l1_batch_number!", - aggregation_round AS "aggregation_round!", - status AS "status!" - FROM - prover_jobs_fri - WHERE - l1_batch_number = ANY ($1) - GROUP BY - l1_batch_number, - aggregation_round, - status - "#, - &l1_batches_numbers - .into_iter() - .map(|x| i64::from(x.0)) - .collect::>() - ) - .fetch_all(self.storage.conn()) - .await - .unwrap() - .into_iter() - .map(|row| { - ( - row.l1_batch_number, - row.aggregation_round, - row.status, - row.count as usize, - ) - }) - .fold( - HashMap::new(), - |mut acc, (l1_batch_number, aggregation_round, status, value)| { - let stats = acc - .entry(( - L1BatchNumber(l1_batch_number as u32), - AggregationRound::from(aggregation_round as u8), - )) - .or_insert(JobCountStatistics { - queued: 0, - in_progress: 0, - failed: 0, - successful: 0, - }); - match status.as_ref() { - "queued" => stats.queued = value, - "in_progress" => stats.in_progress = value, - "failed" => stats.failed = value, - "successful" => stats.successful = value, - _ => (), - } - acc - }, - ) - } + l1_batch_number: L1BatchNumber, + aggregation_round: AggregationRound, + ) -> Vec { + sqlx::query!( + r#" + SELECT + * + FROM + prover_jobs_fri + WHERE + l1_batch_number = $1 + AND aggregation_round = $2 + "#, + i64::from(l1_batch_number.0), + aggregation_round as i16 + ) + .fetch_all(self.storage.conn()) + .await + .unwrap() + .iter() + .map(|row| ProverJobFriInfo { + id: row.id as u32, + l1_batch_number: l1_batch_number, + circuit_id: row.circuit_id as u32, + circuit_blob_url: row.circuit_blob_url.clone(), + aggregation_round: aggregation_round, + sequence_number: row.sequence_number as u32, + status: ProverJobStatus::from_str(&row.status).unwrap(), + error: row.error.clone(), + attempts: row.attempts as u8, + processing_started_at: row.processing_started_at, + created_at: row.created_at, + updated_at: row.updated_at, + time_taken: row.time_taken, + is_blob_cleaned: row.is_blob_cleaned, + depth: row.depth as u32, + is_node_final_proof: row.is_node_final_proof, + proof_blob_url: row.proof_blob_url.clone(), + protocol_version: row.protocol_version.map(|protocol_version| { + ProtocolVersionId::try_from(protocol_version as u16).unwrap() + }), + picked_by: row.picked_by.clone(), + }) + .collect() } } From 111884bd1db2e33c9c8f56a08cb751bb62d6411c Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Fri, 26 Apr 2024 17:55:51 -0300 Subject: [PATCH 48/98] add title for prover jobs --- prover/prover_cli/src/commands/status/utils.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prover/prover_cli/src/commands/status/utils.rs b/prover/prover_cli/src/commands/status/utils.rs index 3b19bd65df04..c07a4c5c2d5f 100644 --- a/prover/prover_cli/src/commands/status/utils.rs +++ b/prover/prover_cli/src/commands/status/utils.rs @@ -215,7 +215,7 @@ impl Debug for Task { TaskStatus::InProgress | TaskStatus::Successful => { writeln!(f, "> {}", self.status().to_string())?; if let Some(status) = self.prover_jobs_status() { - writeln!(f, "> {}", status.to_string())?; + writeln!(f, " > Prover Jobs Status: {}", status.to_string())?; } } TaskStatus::Queued | TaskStatus::WaitingForProofs | TaskStatus::Stuck => { From 9fef23d2c3cb4254e843533e1563c68c6099ef1b Mon Sep 17 00:00:00 2001 From: ilitteri Date: Fri, 26 Apr 2024 19:30:43 -0300 Subject: [PATCH 49/98] Refactor --- .../prover_cli/src/commands/status/batch.rs | 26 ++- .../prover_cli/src/commands/status/utils.rs | 194 +++++++++++++----- 2 files changed, 152 insertions(+), 68 deletions(-) diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index 4b7933f0438c..1b7685344b74 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -3,7 +3,7 @@ use clap::Args as ClapArgs; use prover_dal::{Connection, ConnectionPool, Prover, ProverDal}; use zksync_types::{basic_fri_types::AggregationRound, L1BatchNumber}; -use super::utils::{BatchData, Task, TaskStatus}; +use super::utils::{AggregationRoundInfo, BatchData, Task, TaskStatus}; use crate::commands::status::utils::postgres_config; #[derive(ClapArgs)] @@ -46,7 +46,7 @@ async fn get_batches_data(batches: Vec) -> anyhow::Result) -> anyhow::Result( batch_number: L1BatchNumber, - aggation_round: AggregationRound, + aggregation_round: AggregationRound, conn: &mut Connection<'a, Prover>, -) -> TaskStatus { - conn.fri_prover_jobs_dal() - .get_prover_jobs_stats_for_batch(batch_number, aggation_round) +) -> AggregationRoundInfo { + let status: TaskStatus = conn + .fri_prover_jobs_dal() + .get_prover_jobs_stats_for_batch(batch_number, aggregation_round) .await - .into() + .into(); + + AggregationRoundInfo { + round: AggregationRound::BasicCircuits, + prover_jobs_status: status, + } } async fn get_proof_basic_witness_generator_status_for_batch<'a>( @@ -83,9 +89,7 @@ async fn get_proof_basic_witness_generator_status_for_batch<'a>( .get_basic_witness_generator_job_for_batch(batch_number) .await .map(|job| TaskStatus::from(job.status)) - .unwrap_or(TaskStatus::Custom( - "Basic witness generator job not found 🚫".to_owned(), - )) + .unwrap_or_default() } async fn get_proof_compression_job_status_for_batch<'a>( @@ -96,5 +100,5 @@ async fn get_proof_compression_job_status_for_batch<'a>( .get_proof_compression_job_for_batch(batch_number) .await .map(|job| TaskStatus::from(job.status)) - .unwrap_or(TaskStatus::Custom("Compressor job not found 🚫".to_owned())) + .unwrap_or_default() } diff --git a/prover/prover_cli/src/commands/status/utils.rs b/prover/prover_cli/src/commands/status/utils.rs index c07a4c5c2d5f..e2408d9c17a9 100644 --- a/prover/prover_cli/src/commands/status/utils.rs +++ b/prover/prover_cli/src/commands/status/utils.rs @@ -5,12 +5,13 @@ use strum::{Display, EnumString}; use zksync_config::PostgresConfig; use zksync_env_config::FromEnv; use zksync_types::{ + basic_fri_types::AggregationRound, prover_dal::{ProofCompressionJobStatus, ProverJobFriInfo, ProverJobStatus, WitnessJobStatus}, L1BatchNumber, }; pub fn postgres_config() -> anyhow::Result { - Ok(PostgresConfig::from_env()?) + PostgresConfig::from_env() } /// Represents the proving data of a batch. @@ -39,7 +40,7 @@ impl Debug for BatchData { format!("Batch {} Status", self.batch_number).bold() )?; writeln!(f)?; - writeln!(f, "= {} =", format!("Proving Stages").bold())?; + writeln!(f, "= {} =", "Proving Stages".to_owned().bold())?; writeln!(f, "{:?}", self.basic_witness_generator)?; writeln!(f, "{:?}", self.leaf_witness_generator)?; writeln!(f, "{:?}", self.node_witness_generator)?; @@ -54,28 +55,46 @@ impl Default for BatchData { BatchData { batch_number: L1BatchNumber::default(), basic_witness_generator: Task::BasicWitnessGenerator { - status: TaskStatus::WaitingForProofs, - prover_jobs_status: TaskStatus::default(), + status: TaskStatus::default(), + aggregation_round_info: AggregationRoundInfo { + round: AggregationRound::BasicCircuits, + prover_jobs_status: TaskStatus::default(), + }, }, leaf_witness_generator: Task::LeafWitnessGenerator { - status: TaskStatus::WaitingForProofs, - prover_jobs_status: TaskStatus::default(), + status: TaskStatus::default(), + aggregation_round_info: AggregationRoundInfo { + round: AggregationRound::LeafAggregation, + prover_jobs_status: TaskStatus::default(), + }, }, node_witness_generator: Task::NodeWitnessGenerator { - status: TaskStatus::WaitingForProofs, - prover_jobs_status: TaskStatus::default(), + status: TaskStatus::default(), + aggregation_round_info: AggregationRoundInfo { + round: AggregationRound::NodeAggregation, + prover_jobs_status: TaskStatus::default(), + }, }, recursion_tip: Task::RecursionTip { - status: TaskStatus::WaitingForProofs, - prover_jobs_status: TaskStatus::default(), + status: TaskStatus::default(), + aggregation_round_info: AggregationRoundInfo { + round: AggregationRound::Scheduler, + prover_jobs_status: TaskStatus::default(), + }, }, - scheduler: Task::Scheduler(TaskStatus::WaitingForProofs), - compressor: Task::Compressor(TaskStatus::WaitingForProofs), + scheduler: Task::Scheduler { + status: TaskStatus::default(), + aggregation_round_info: AggregationRoundInfo { + round: AggregationRound::Scheduler, + prover_jobs_status: TaskStatus::default(), + }, + }, + compressor: Task::Compressor(TaskStatus::JobsNotFound), } } } -#[derive(Debug, EnumString, Clone, Display)] +#[derive(Default, Debug, EnumString, Clone, Display)] pub enum TaskStatus { /// A custom status that can be set manually. /// Mostly used when a task has singular status. @@ -93,35 +112,33 @@ pub enum TaskStatus { #[strum(to_string = "Waiting for Proof ⏱️")] WaitingForProofs, /// A task is considered stuck when at least one of its jobs is stuck. - #[strum(to_string = "Stuck 🛑")] + #[strum(to_string = "Stuck ⛔️")] Stuck, + /// A task has no jobs. + #[default] + #[strum(to_string = "Jobs not found 🚫")] + JobsNotFound, } -impl Default for TaskStatus { - fn default() -> Self { - TaskStatus::WaitingForProofs - } -} - +// This implementation will change to From> for AggregationRoundInfo +// once the --verbose flag is implemented. impl From> for TaskStatus { fn from(jobs_vector: Vec) -> Self { if jobs_vector.is_empty() { - return TaskStatus::Custom("No Jobs found ".to_owned()); - } - - if jobs_vector + TaskStatus::JobsNotFound + } else if jobs_vector .iter() - .all(|job| job.status == ProverJobStatus::Queued) + .all(|job| matches!(job.status, ProverJobStatus::Queued)) { - return TaskStatus::Queued; - } else if jobs_vector.iter().all(|job| match job.status { - ProverJobStatus::Successful(_) => true, - _ => false, - }) { - return TaskStatus::Successful; + TaskStatus::Queued + } else if jobs_vector + .iter() + .all(|job| matches!(job.status, ProverJobStatus::InProgress(_))) + { + TaskStatus::Successful + } else { + TaskStatus::InProgress } - - TaskStatus::InProgress } } @@ -146,29 +163,32 @@ pub enum Task { #[strum(to_string = "Basic Witness Generator")] BasicWitnessGenerator { status: TaskStatus, - prover_jobs_status: TaskStatus, + aggregation_round_info: AggregationRoundInfo, }, /// Represents the leaf witness generator task, its status and the aggregation round 0 prover jobs data. #[strum(to_string = "Leaf Witness Generator")] LeafWitnessGenerator { status: TaskStatus, - prover_jobs_status: TaskStatus, + aggregation_round_info: AggregationRoundInfo, }, /// Represents the node witness generator task, its status and the aggregation round 1 prover jobs data. #[strum(to_string = "Node Witness Generator")] NodeWitnessGenerator { status: TaskStatus, - prover_jobs_status: TaskStatus, + aggregation_round_info: AggregationRoundInfo, }, /// Represents the recursion tip task, its status and the aggregation round 2 prover jobs data. #[strum(to_string = "Recursion Tip")] RecursionTip { status: TaskStatus, - prover_jobs_status: TaskStatus, + aggregation_round_info: AggregationRoundInfo, }, /// Represents the scheduler task and its status. #[strum(to_string = "Scheduler")] - Scheduler(TaskStatus), + Scheduler { + status: TaskStatus, + aggregation_round_info: AggregationRoundInfo, + }, /// Represents the compressor task and its status. #[strum(to_string = "Compressor")] Compressor(TaskStatus), @@ -181,28 +201,68 @@ impl Task { | Task::LeafWitnessGenerator { status, .. } | Task::NodeWitnessGenerator { status, .. } | Task::RecursionTip { status, .. } - | Task::Scheduler(status) + | Task::Scheduler { status, .. } | Task::Compressor(status) => status.clone(), } } -} -impl Task { + fn aggregation_round(&self) -> Option { + match self { + Task::BasicWitnessGenerator { + aggregation_round_info, + .. + } + | Task::LeafWitnessGenerator { + aggregation_round_info, + .. + } + | Task::NodeWitnessGenerator { + aggregation_round_info, + .. + } + | Task::RecursionTip { + aggregation_round_info, + .. + } + | Task::Scheduler { + aggregation_round_info, + .. + } => Some(aggregation_round_info.round), + Task::Compressor(_) => None, + } + } + + /// Returns the status of the prover jobs. + /// If the task is not in progress or successful, returns None. + /// Otherwise, returns the status of the prover jobs if the task + /// has prover jobs. fn prover_jobs_status(&self) -> Option { match self { Task::BasicWitnessGenerator { - prover_jobs_status, .. + status, + aggregation_round_info, } | Task::LeafWitnessGenerator { - prover_jobs_status, .. + status, + aggregation_round_info, } | Task::NodeWitnessGenerator { - prover_jobs_status, .. + status, + aggregation_round_info, } | Task::RecursionTip { - prover_jobs_status, .. - } => Some(prover_jobs_status.clone()), - Task::Scheduler(_) => None, + status, + aggregation_round_info, + } + | Task::Scheduler { + status, + aggregation_round_info, + } => match status { + TaskStatus::InProgress | TaskStatus::Successful => { + Some(aggregation_round_info.prover_jobs_status.clone()) + } + _ => None, + }, Task::Compressor(_) => None, } } @@ -210,18 +270,23 @@ impl Task { impl Debug for Task { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - writeln!(f, "-- {} --", self.to_string().bold())?; - match self.status() { - TaskStatus::InProgress | TaskStatus::Successful => { - writeln!(f, "> {}", self.status().to_string())?; - if let Some(status) = self.prover_jobs_status() { - writeln!(f, " > Prover Jobs Status: {}", status.to_string())?; - } + if let Some(aggregation_round_number) = self.aggregation_round() { + writeln!( + f, + "-- {} --", + format!("Aggregation Round {}", aggregation_round_number as u8).bold() + )?; + if let TaskStatus::Custom(msg) = self.status() { + writeln!(f, "{}: {}", self.to_string().bold(), msg)?; + } else { + writeln!(f, "{}: {}", self.to_string().bold(), self.status())?; } - TaskStatus::Queued | TaskStatus::WaitingForProofs | TaskStatus::Stuck => { - writeln!(f, "> {}", self.status().to_string())? + if let Some(prover_jobs_status) = self.prover_jobs_status() { + writeln!(f, "> Prover Jobs: {prover_jobs_status}")?; } - TaskStatus::Custom(msg) => writeln!(f, "> {msg}")?, + } else { + writeln!(f, "-- {} --", self.to_string().bold())?; + writeln!(f, "{}", self.status())?; } Ok(()) } @@ -242,3 +307,18 @@ impl From for TaskStatus { } } } + +#[derive(Clone)] +pub struct AggregationRoundInfo { + pub round: AggregationRound, + pub prover_jobs_status: TaskStatus, +} + +impl Default for AggregationRoundInfo { + fn default() -> Self { + AggregationRoundInfo { + round: AggregationRound::BasicCircuits, + prover_jobs_status: TaskStatus::default(), + } + } +} From 01ecd2f9b7ca36ddead16536cfa54da1f943a97a Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Mon, 29 Apr 2024 13:20:34 -0300 Subject: [PATCH 50/98] add leaf query --- core/lib/basic_types/src/prover_dal.rs | 19 +++- .../prover_cli/src/commands/status/batch.rs | 22 ++++ ...5d2832571464e74b5fed92cf54617573c84ec.json | 106 ++++++++++++++++++ .../src/fri_witness_generator_dal.rs | 38 ++++++- 4 files changed, 183 insertions(+), 2 deletions(-) create mode 100644 prover/prover_dal/.sqlx/query-21621153e545859d71188e2421f5d2832571464e74b5fed92cf54617573c84ec.json diff --git a/core/lib/basic_types/src/prover_dal.rs b/core/lib/basic_types/src/prover_dal.rs index f355932bb48f..7daea8a2205b 100644 --- a/core/lib/basic_types/src/prover_dal.rs +++ b/core/lib/basic_types/src/prover_dal.rs @@ -5,7 +5,7 @@ use chrono::{DateTime, Duration, NaiveDateTime, NaiveTime, Utc}; use strum::{Display, EnumString}; use crate::{ - basic_fri_types::{AggregationRound, Eip4844Blobs}, + basic_fri_types::{AggregationRound, CircuitIdRoundTuple, Eip4844Blobs}, protocol_version::ProtocolVersionId, L1BatchNumber, }; @@ -273,6 +273,23 @@ pub struct BasicWitnessGeneratorJobInfo { pub eip_4844_blobs: Option, } +pub struct LeafWitnessGeneratorJobInfo { + pub l1_batch_number: L1BatchNumber, + pub circuit_id: u32, + pub closed_form_inputs_blob_url: Option, + pub attempts: u32, + pub status: WitnessJobStatus, + pub error: Option, + pub created_at: NaiveDateTime, + pub updated_at: NaiveDateTime, + pub processing_started_at: Option, + pub time_taken: Option, + pub is_blob_cleaned: Option, + pub number_of_basic_circuits: Option, + pub protocol_version: Option, + pub picked_by: Option, +} + #[derive(Debug, EnumString, Display)] pub enum ProofCompressionJobStatus { #[strum(serialize = "queued")] diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index 4b7933f0438c..d6f33c023716 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -53,6 +53,15 @@ async fn get_batches_data(batches: Vec) -> anyhow::Result( )) } +async fn get_proof_leaf_witness_generator_status_for_batch<'a>( + batch_number: L1BatchNumber, + conn: &mut Connection<'a, Prover>, +) -> TaskStatus { + conn.fri_witness_generator_dal() + .get_leaf_witness_generator_job_for_batch(batch_number) + .await + .map(|job| TaskStatus::from(job.status)) + .unwrap_or(TaskStatus::Custom( + "Leaf witness generator job not found 🚫".to_owned(), + )) +} + async fn get_proof_compression_job_status_for_batch<'a>( batch_number: L1BatchNumber, conn: &mut Connection<'a, Prover>, diff --git a/prover/prover_dal/.sqlx/query-21621153e545859d71188e2421f5d2832571464e74b5fed92cf54617573c84ec.json b/prover/prover_dal/.sqlx/query-21621153e545859d71188e2421f5d2832571464e74b5fed92cf54617573c84ec.json new file mode 100644 index 000000000000..9e750348decb --- /dev/null +++ b/prover/prover_dal/.sqlx/query-21621153e545859d71188e2421f5d2832571464e74b5fed92cf54617573c84ec.json @@ -0,0 +1,106 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n *\n FROM\n leaf_aggregation_witness_jobs_fri\n WHERE\n l1_batch_number = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "l1_batch_number", + "type_info": "Int8" + }, + { + "ordinal": 2, + "name": "circuit_id", + "type_info": "Int2" + }, + { + "ordinal": 3, + "name": "closed_form_inputs_blob_url", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "attempts", + "type_info": "Int2" + }, + { + "ordinal": 5, + "name": "status", + "type_info": "Text" + }, + { + "ordinal": 6, + "name": "error", + "type_info": "Text" + }, + { + "ordinal": 7, + "name": "created_at", + "type_info": "Timestamp" + }, + { + "ordinal": 8, + "name": "updated_at", + "type_info": "Timestamp" + }, + { + "ordinal": 9, + "name": "processing_started_at", + "type_info": "Timestamp" + }, + { + "ordinal": 10, + "name": "time_taken", + "type_info": "Time" + }, + { + "ordinal": 11, + "name": "is_blob_cleaned", + "type_info": "Bool" + }, + { + "ordinal": 12, + "name": "number_of_basic_circuits", + "type_info": "Int4" + }, + { + "ordinal": 13, + "name": "protocol_version", + "type_info": "Int4" + }, + { + "ordinal": 14, + "name": "picked_by", + "type_info": "Text" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + false, + false, + false, + true, + false, + false, + true, + false, + false, + true, + true, + true, + true, + true, + true + ] + }, + "hash": "21621153e545859d71188e2421f5d2832571464e74b5fed92cf54617573c84ec" +} diff --git a/prover/prover_dal/src/fri_witness_generator_dal.rs b/prover/prover_dal/src/fri_witness_generator_dal.rs index 2662183d22bf..30d9bb21c977 100644 --- a/prover/prover_dal/src/fri_witness_generator_dal.rs +++ b/prover/prover_dal/src/fri_witness_generator_dal.rs @@ -7,7 +7,7 @@ use zksync_basic_types::{ protocol_version::ProtocolVersionId, prover_dal::{ BasicWitnessGeneratorJobInfo, JobCountStatistics, LeafAggregationJobMetadata, - NodeAggregationJobMetadata, StuckJobs, WitnessJobStatus, + LeafWitnessGeneratorJobInfo, NodeAggregationJobMetadata, StuckJobs, WitnessJobStatus, }, L1BatchNumber, }; @@ -1187,4 +1187,40 @@ impl FriWitnessGeneratorDal<'_, '_> { eip_4844_blobs: row.eip_4844_blobs.map(|vec_u8| Eip4844Blobs::from(vec_u8)), }) } + + pub async fn get_leaf_witness_generator_job_for_batch( + &mut self, + l1_batch_number: L1BatchNumber, + ) -> Option { + sqlx::query!( + r#" + SELECT + * + FROM + leaf_aggregation_witness_jobs_fri + WHERE + l1_batch_number = $1 + "#, + i64::from(l1_batch_number.0) + ) + .fetch_optional(self.storage.conn()) + .await + .unwrap() + .map(|row| LeafWitnessGeneratorJobInfo { + l1_batch_number: l1_batch_number, + circuit_id: row.circuit_id as u32, + closed_form_inputs_blob_url: row.closed_form_inputs_blob_url, + attempts: row.attempts as u32, + status: WitnessJobStatus::from_str(&row.status).unwrap(), + error: row.error, + created_at: row.created_at, + updated_at: row.updated_at, + processing_started_at: row.processing_started_at, + time_taken: row.time_taken, + is_blob_cleaned: row.is_blob_cleaned, + protocol_version: row.protocol_version, + picked_by: row.picked_by, + number_of_basic_circuits: row.number_of_basic_circuits, + }) + } } From 590c58bbff5576f297e73d0cf836a440f3386f3c Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Mon, 29 Apr 2024 13:26:05 -0300 Subject: [PATCH 51/98] fix merge --- prover/prover_cli/src/commands/status/batch.rs | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index bb2d1a802aae..456d2b6e29d9 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -55,7 +55,7 @@ async fn get_batches_data(batches: Vec) -> anyhow::Result( .get_leaf_witness_generator_job_for_batch(batch_number) .await .map(|job| TaskStatus::from(job.status)) - .unwrap_or(TaskStatus::Custom( - "Leaf witness generator job not found 🚫".to_owned(), - )) + .unwrap_or_default() } async fn get_proof_compression_job_status_for_batch<'a>( From e08b05e383f0d35512df22fda48cbe8699dbda46 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Mon, 29 Apr 2024 14:26:55 -0300 Subject: [PATCH 52/98] add querys --- core/lib/basic_types/src/prover_dal.rs | 33 +++++- .../prover_cli/src/commands/status/batch.rs | 41 +++++++ .../prover_cli/src/commands/status/utils.rs | 14 +-- ...dd8547a1ad20492ec37c3c0be5639e5d49952.json | 82 ++++++++++++++ ...9bfb838c787fc58d7536f9e9976e5e515431a.json | 106 ++++++++++++++++++ .../src/fri_witness_generator_dal.rs | 72 +++++++++++- 6 files changed, 339 insertions(+), 9 deletions(-) create mode 100644 prover/prover_dal/.sqlx/query-285d0ff850fa5c9af36564fcb14dd8547a1ad20492ec37c3c0be5639e5d49952.json create mode 100644 prover/prover_dal/.sqlx/query-94a75b05ecbab75d6ebf39cca029bfb838c787fc58d7536f9e9976e5e515431a.json diff --git a/core/lib/basic_types/src/prover_dal.rs b/core/lib/basic_types/src/prover_dal.rs index 7daea8a2205b..0e5963801082 100644 --- a/core/lib/basic_types/src/prover_dal.rs +++ b/core/lib/basic_types/src/prover_dal.rs @@ -5,7 +5,7 @@ use chrono::{DateTime, Duration, NaiveDateTime, NaiveTime, Utc}; use strum::{Display, EnumString}; use crate::{ - basic_fri_types::{AggregationRound, CircuitIdRoundTuple, Eip4844Blobs}, + basic_fri_types::{AggregationRound, Eip4844Blobs}, protocol_version::ProtocolVersionId, L1BatchNumber, }; @@ -290,6 +290,37 @@ pub struct LeafWitnessGeneratorJobInfo { pub picked_by: Option, } +pub struct NodeWitnessGeneratorJobInfo { + pub l1_batch_number: L1BatchNumber, + pub circuit_id: u32, + pub depth: u32, + pub status: WitnessJobStatus, + pub attempts: u32, + pub aggregations_url: Option, + pub processing_started_at: Option, + pub time_taken: Option, + pub error: Option, + pub created_at: NaiveDateTime, + pub updated_at: NaiveDateTime, + pub number_of_dependent_jobs: Option, + pub protocol_version: Option, + pub picked_by: Option, +} + +pub struct SchedulerWitnessGeneratorJobInfo { + pub l1_batch_number: L1BatchNumber, + pub scheduler_partial_input_blob_url: String, + pub status: WitnessJobStatus, + pub processing_started_at: Option, + pub time_taken: Option, + pub error: Option, + pub created_at: NaiveDateTime, + pub updated_at: NaiveDateTime, + pub attempts: u32, + pub protocol_version: Option, + pub picked_by: Option, +} + #[derive(Debug, EnumString, Display)] pub enum ProofCompressionJobStatus { #[strum(serialize = "queued")] diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index 456d2b6e29d9..bd1b768ba27f 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -62,6 +62,25 @@ async fn get_batches_data(batches: Vec) -> anyhow::Result( .unwrap_or_default() } +async fn get_proof_node_witness_generator_status_for_batch<'a>( + batch_number: L1BatchNumber, + conn: &mut Connection<'a, Prover>, +) -> TaskStatus { + conn.fri_witness_generator_dal() + .get_node_witness_generator_job_for_batch(batch_number) + .await + .map(|job| TaskStatus::from(job.status)) + .unwrap_or_default() +} + +async fn get_proof_scheduler_witness_generator_status_for_batch<'a>( + batch_number: L1BatchNumber, + conn: &mut Connection<'a, Prover>, +) -> TaskStatus { + conn.fri_witness_generator_dal() + .get_scheduler_witness_generator_job_for_batch(batch_number) + .await + .map(|job| TaskStatus::from(job.status)) + .unwrap_or_default() +} + async fn get_proof_compression_job_status_for_batch<'a>( batch_number: L1BatchNumber, conn: &mut Connection<'a, Prover>, diff --git a/prover/prover_cli/src/commands/status/utils.rs b/prover/prover_cli/src/commands/status/utils.rs index e2408d9c17a9..6a0ebc1b7fb9 100644 --- a/prover/prover_cli/src/commands/status/utils.rs +++ b/prover/prover_cli/src/commands/status/utils.rs @@ -27,7 +27,7 @@ pub struct BatchData { /// The recursion tip data. pub recursion_tip: Task, /// The scheduler data. - pub scheduler: Task, + pub scheduler_witness_generator: Task, /// The compressor data. pub compressor: Task, } @@ -45,7 +45,7 @@ impl Debug for BatchData { writeln!(f, "{:?}", self.leaf_witness_generator)?; writeln!(f, "{:?}", self.node_witness_generator)?; writeln!(f, "{:?}", self.recursion_tip)?; - writeln!(f, "{:?}", self.scheduler)?; + writeln!(f, "{:?}", self.scheduler_witness_generator)?; writeln!(f, "{:?}", self.compressor) } } @@ -82,7 +82,7 @@ impl Default for BatchData { prover_jobs_status: TaskStatus::default(), }, }, - scheduler: Task::Scheduler { + scheduler_witness_generator: Task::SchedulerWitnessGenerator { status: TaskStatus::default(), aggregation_round_info: AggregationRoundInfo { round: AggregationRound::Scheduler, @@ -185,7 +185,7 @@ pub enum Task { }, /// Represents the scheduler task and its status. #[strum(to_string = "Scheduler")] - Scheduler { + SchedulerWitnessGenerator { status: TaskStatus, aggregation_round_info: AggregationRoundInfo, }, @@ -201,7 +201,7 @@ impl Task { | Task::LeafWitnessGenerator { status, .. } | Task::NodeWitnessGenerator { status, .. } | Task::RecursionTip { status, .. } - | Task::Scheduler { status, .. } + | Task::SchedulerWitnessGenerator { status, .. } | Task::Compressor(status) => status.clone(), } } @@ -224,7 +224,7 @@ impl Task { aggregation_round_info, .. } - | Task::Scheduler { + | Task::SchedulerWitnessGenerator { aggregation_round_info, .. } => Some(aggregation_round_info.round), @@ -254,7 +254,7 @@ impl Task { status, aggregation_round_info, } - | Task::Scheduler { + | Task::SchedulerWitnessGenerator { status, aggregation_round_info, } => match status { diff --git a/prover/prover_dal/.sqlx/query-285d0ff850fa5c9af36564fcb14dd8547a1ad20492ec37c3c0be5639e5d49952.json b/prover/prover_dal/.sqlx/query-285d0ff850fa5c9af36564fcb14dd8547a1ad20492ec37c3c0be5639e5d49952.json new file mode 100644 index 000000000000..415b3e31c798 --- /dev/null +++ b/prover/prover_dal/.sqlx/query-285d0ff850fa5c9af36564fcb14dd8547a1ad20492ec37c3c0be5639e5d49952.json @@ -0,0 +1,82 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n *\n FROM\n scheduler_witness_jobs_fri\n WHERE\n l1_batch_number = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "l1_batch_number", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "scheduler_partial_input_blob_url", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "status", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "processing_started_at", + "type_info": "Timestamp" + }, + { + "ordinal": 4, + "name": "time_taken", + "type_info": "Time" + }, + { + "ordinal": 5, + "name": "error", + "type_info": "Text" + }, + { + "ordinal": 6, + "name": "created_at", + "type_info": "Timestamp" + }, + { + "ordinal": 7, + "name": "updated_at", + "type_info": "Timestamp" + }, + { + "ordinal": 8, + "name": "attempts", + "type_info": "Int2" + }, + { + "ordinal": 9, + "name": "protocol_version", + "type_info": "Int4" + }, + { + "ordinal": 10, + "name": "picked_by", + "type_info": "Text" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + false, + false, + false, + true, + true, + true, + false, + false, + false, + true, + true + ] + }, + "hash": "285d0ff850fa5c9af36564fcb14dd8547a1ad20492ec37c3c0be5639e5d49952" +} diff --git a/prover/prover_dal/.sqlx/query-94a75b05ecbab75d6ebf39cca029bfb838c787fc58d7536f9e9976e5e515431a.json b/prover/prover_dal/.sqlx/query-94a75b05ecbab75d6ebf39cca029bfb838c787fc58d7536f9e9976e5e515431a.json new file mode 100644 index 000000000000..896f10a4ca3a --- /dev/null +++ b/prover/prover_dal/.sqlx/query-94a75b05ecbab75d6ebf39cca029bfb838c787fc58d7536f9e9976e5e515431a.json @@ -0,0 +1,106 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n *\n FROM\n node_aggregation_witness_jobs_fri\n WHERE\n l1_batch_number = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "l1_batch_number", + "type_info": "Int8" + }, + { + "ordinal": 2, + "name": "circuit_id", + "type_info": "Int2" + }, + { + "ordinal": 3, + "name": "depth", + "type_info": "Int4" + }, + { + "ordinal": 4, + "name": "status", + "type_info": "Text" + }, + { + "ordinal": 5, + "name": "attempts", + "type_info": "Int2" + }, + { + "ordinal": 6, + "name": "aggregations_url", + "type_info": "Text" + }, + { + "ordinal": 7, + "name": "processing_started_at", + "type_info": "Timestamp" + }, + { + "ordinal": 8, + "name": "time_taken", + "type_info": "Time" + }, + { + "ordinal": 9, + "name": "error", + "type_info": "Text" + }, + { + "ordinal": 10, + "name": "created_at", + "type_info": "Timestamp" + }, + { + "ordinal": 11, + "name": "updated_at", + "type_info": "Timestamp" + }, + { + "ordinal": 12, + "name": "number_of_dependent_jobs", + "type_info": "Int4" + }, + { + "ordinal": 13, + "name": "protocol_version", + "type_info": "Int4" + }, + { + "ordinal": 14, + "name": "picked_by", + "type_info": "Text" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + false, + false, + false, + false, + false, + false, + true, + true, + true, + true, + false, + false, + true, + true, + true + ] + }, + "hash": "94a75b05ecbab75d6ebf39cca029bfb838c787fc58d7536f9e9976e5e515431a" +} diff --git a/prover/prover_dal/src/fri_witness_generator_dal.rs b/prover/prover_dal/src/fri_witness_generator_dal.rs index 30d9bb21c977..62464a49d7af 100644 --- a/prover/prover_dal/src/fri_witness_generator_dal.rs +++ b/prover/prover_dal/src/fri_witness_generator_dal.rs @@ -7,7 +7,8 @@ use zksync_basic_types::{ protocol_version::ProtocolVersionId, prover_dal::{ BasicWitnessGeneratorJobInfo, JobCountStatistics, LeafAggregationJobMetadata, - LeafWitnessGeneratorJobInfo, NodeAggregationJobMetadata, StuckJobs, WitnessJobStatus, + LeafWitnessGeneratorJobInfo, NodeAggregationJobMetadata, NodeWitnessGeneratorJobInfo, + SchedulerWitnessGeneratorJobInfo, StuckJobs, WitnessJobStatus, }, L1BatchNumber, }; @@ -1223,4 +1224,73 @@ impl FriWitnessGeneratorDal<'_, '_> { number_of_basic_circuits: row.number_of_basic_circuits, }) } + + pub async fn get_node_witness_generator_job_for_batch( + &mut self, + l1_batch_number: L1BatchNumber, + ) -> Option { + sqlx::query!( + r#" + SELECT + * + FROM + node_aggregation_witness_jobs_fri + WHERE + l1_batch_number = $1 + "#, + i64::from(l1_batch_number.0) + ) + .fetch_optional(self.storage.conn()) + .await + .unwrap() + .map(|row| NodeWitnessGeneratorJobInfo { + l1_batch_number: l1_batch_number, + circuit_id: row.circuit_id as u32, + depth: row.depth as u32, + status: WitnessJobStatus::from_str(&row.status).unwrap(), + attempts: row.attempts as u32, + aggregations_url: row.aggregations_url, + processing_started_at: row.processing_started_at, + time_taken: row.time_taken, + error: row.error, + created_at: row.created_at, + updated_at: row.updated_at, + number_of_dependent_jobs: row.number_of_dependent_jobs, + protocol_version: row.protocol_version, + picked_by: row.picked_by, + }) + } + + pub async fn get_scheduler_witness_generator_job_for_batch( + &mut self, + l1_batch_number: L1BatchNumber, + ) -> Option { + sqlx::query!( + r#" + SELECT + * + FROM + scheduler_witness_jobs_fri + WHERE + l1_batch_number = $1 + "#, + i64::from(l1_batch_number.0) + ) + .fetch_optional(self.storage.conn()) + .await + .unwrap() + .map(|row| SchedulerWitnessGeneratorJobInfo { + l1_batch_number: l1_batch_number, + scheduler_partial_input_blob_url: row.scheduler_partial_input_blob_url, + status: WitnessJobStatus::from_str(&row.status).unwrap(), + processing_started_at: row.processing_started_at, + time_taken: row.time_taken, + error: row.error, + created_at: row.created_at, + updated_at: row.updated_at, + attempts: row.attempts as u32, + protocol_version: row.protocol_version, + picked_by: row.picked_by, + }) + } } From cf069b9b2def6e47e019b3dc7beeeae08e7df26c Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Tue, 30 Apr 2024 15:32:54 -0300 Subject: [PATCH 53/98] fix witnes jobs status --- Cargo.lock | 1 + core/lib/basic_types/Cargo.toml | 1 + core/lib/basic_types/src/prover_dal.rs | 31 +++++++++++-- prover/Cargo.lock | 1 + .../prover_cli/src/commands/status/batch.rs | 45 ++++++++++++------- .../prover_cli/src/commands/status/utils.rs | 25 +++++++++++ .../src/fri_witness_generator_dal.rs | 44 ++++++++++-------- 7 files changed, 110 insertions(+), 38 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a1d3481d0009..327543d495ae 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8124,6 +8124,7 @@ dependencies = [ "num_enum 0.7.2", "serde", "serde_json", + "sqlx", "strum", "web3", ] diff --git a/core/lib/basic_types/Cargo.toml b/core/lib/basic_types/Cargo.toml index 13ce9cc357d7..ef6733b0ef19 100644 --- a/core/lib/basic_types/Cargo.toml +++ b/core/lib/basic_types/Cargo.toml @@ -20,3 +20,4 @@ serde_json.workspace = true chrono.workspace = true strum = { workspace = true, features = ["derive"] } num_enum.workspace = true +sqlx = { workspace = true, feature= ["derive"]} diff --git a/core/lib/basic_types/src/prover_dal.rs b/core/lib/basic_types/src/prover_dal.rs index 0e5963801082..d81f45e31231 100644 --- a/core/lib/basic_types/src/prover_dal.rs +++ b/core/lib/basic_types/src/prover_dal.rs @@ -124,7 +124,7 @@ pub struct ProverJobStatusInProgress { pub started_at: DateTime, } -#[derive(Debug)] +#[derive(Debug, Clone)] pub struct WitnessJobStatusSuccessful { pub started_at: DateTime, pub time_taken: Duration, @@ -139,7 +139,7 @@ impl Default for WitnessJobStatusSuccessful { } } -#[derive(Debug, Default)] +#[derive(Debug, Default, Clone)] pub struct WitnessJobStatusFailed { pub started_at: DateTime, pub error: String, @@ -161,7 +161,7 @@ pub enum ProverJobStatus { Ignored, } -#[derive(Debug, strum::Display, strum::EnumString, strum::AsRefStr)] +#[derive(Debug, Clone, strum::Display, strum::EnumString, strum::AsRefStr)] pub enum WitnessJobStatus { #[strum(serialize = "failed")] Failed(WitnessJobStatusFailed), @@ -273,7 +273,31 @@ pub struct BasicWitnessGeneratorJobInfo { pub eip_4844_blobs: Option, } +// impl FromRow<'_, R> for BasicWitnessGeneratorJobInfo { +// fn from_row(row: &R) -> sqlx::Result { +// let l1_batch_number: i32 = row.try_get("l1_batch_number")?; +// let attempts: i32 = row.try_get("attempts")?; +// Ok(Self { +// l1_batch_number: L1BatchNumber(l1_batch_number as u32), +// merkle_tree_paths_blob_url: row.try_get("merkle_tree_paths_blob_url")?, +// attempts: attempts as u32, +// status: WitnessJobStatus::from_str(row.try_get("status")?).unwrap(), +// error: row.try_get("error")?, +// created_at: row.try_get("created_at")?, +// updated_at: row.try_get("updated_at")?, +// processing_started_at: row.try_get("processing_started_at")?, +// time_taken: row.try_get("time_taken")?, +// is_blob_cleaned: row.try_get("is_blob_cleaned")?, +// protocol_version: row.try_get("protocol_version")?, +// picked_by: row.try_get("picked_by")?, +// eip_4844_blobs: +// Some(Eip4844Blobs::from(row.try_get::, &str>("eip_4844_blobs")?)) +// }) +// } +// } + pub struct LeafWitnessGeneratorJobInfo { + pub id: u32, pub l1_batch_number: L1BatchNumber, pub circuit_id: u32, pub closed_form_inputs_blob_url: Option, @@ -291,6 +315,7 @@ pub struct LeafWitnessGeneratorJobInfo { } pub struct NodeWitnessGeneratorJobInfo { + pub id: u32, pub l1_batch_number: L1BatchNumber, pub circuit_id: u32, pub depth: u32, diff --git a/prover/Cargo.lock b/prover/Cargo.lock index e690f7ddf1fe..3edd696f5013 100644 --- a/prover/Cargo.lock +++ b/prover/Cargo.lock @@ -7516,6 +7516,7 @@ dependencies = [ "num_enum 0.7.2", "serde", "serde_json", + "sqlx", "strum", "web3", ] diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index bd1b768ba27f..0b11151e53e5 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -1,7 +1,9 @@ use anyhow::{ensure, Context as _}; use clap::Args as ClapArgs; use prover_dal::{Connection, ConnectionPool, Prover, ProverDal}; -use zksync_types::{basic_fri_types::AggregationRound, L1BatchNumber}; +use zksync_types::{ + basic_fri_types::AggregationRound, prover_dal::WitnessJobStatus, L1BatchNumber, +}; use super::utils::{AggregationRoundInfo, BatchData, Task, TaskStatus}; use crate::commands::status::utils::postgres_config; @@ -62,21 +64,21 @@ async fn get_batches_data(batches: Vec) -> anyhow::Result( batch_number: L1BatchNumber, conn: &mut Connection<'a, Prover>, ) -> TaskStatus { - conn.fri_witness_generator_dal() - .get_leaf_witness_generator_job_for_batch(batch_number) + let status_vec: Vec = conn + .fri_witness_generator_dal() + .get_leaf_witness_generator_jobs_for_batch(batch_number) .await - .map(|job| TaskStatus::from(job.status)) - .unwrap_or_default() + .iter() + .map(|s| s.status.clone()) + .collect(); + TaskStatus::from(status_vec) } async fn get_proof_node_witness_generator_status_for_batch<'a>( batch_number: L1BatchNumber, conn: &mut Connection<'a, Prover>, ) -> TaskStatus { - conn.fri_witness_generator_dal() - .get_node_witness_generator_job_for_batch(batch_number) + let status_vec: Vec = conn + .fri_witness_generator_dal() + .get_node_witness_generator_jobs_for_batch(batch_number) .await - .map(|job| TaskStatus::from(job.status)) - .unwrap_or_default() + .iter() + .map(|s| s.status.clone()) + .collect(); + TaskStatus::from(status_vec) } async fn get_proof_scheduler_witness_generator_status_for_batch<'a>( batch_number: L1BatchNumber, conn: &mut Connection<'a, Prover>, ) -> TaskStatus { - conn.fri_witness_generator_dal() - .get_scheduler_witness_generator_job_for_batch(batch_number) + let status_vec: Vec = conn + .fri_witness_generator_dal() + .get_scheduler_witness_generator_jobs_for_batch(batch_number) .await - .map(|job| TaskStatus::from(job.status)) - .unwrap_or_default() + .iter() + .map(|s| s.status.clone()) + .collect(); + TaskStatus::from(status_vec) } async fn get_proof_compression_job_status_for_batch<'a>( diff --git a/prover/prover_cli/src/commands/status/utils.rs b/prover/prover_cli/src/commands/status/utils.rs index 6a0ebc1b7fb9..73daffbba61a 100644 --- a/prover/prover_cli/src/commands/status/utils.rs +++ b/prover/prover_cli/src/commands/status/utils.rs @@ -157,6 +157,31 @@ impl From for TaskStatus { } } +impl From> for TaskStatus { + fn from(status_vector: Vec) -> Self { + if status_vector.is_empty() { + TaskStatus::JobsNotFound + } else if status_vector + .iter() + .all(|job| matches!(job, WitnessJobStatus::Queued)) + { + TaskStatus::Queued + } else if status_vector + .iter() + .all(|job| matches!(job, WitnessJobStatus::WaitingForProofs)) + { + TaskStatus::WaitingForProofs + } else if status_vector + .iter() + .all(|job| matches!(job, WitnessJobStatus::InProgress)) + { + TaskStatus::Successful + } else { + TaskStatus::InProgress + } + } +} + #[derive(EnumString, Clone, Display)] pub enum Task { /// Represents the basic witness generator task and its status. diff --git a/prover/prover_dal/src/fri_witness_generator_dal.rs b/prover/prover_dal/src/fri_witness_generator_dal.rs index 62464a49d7af..57f14c0742dc 100644 --- a/prover/prover_dal/src/fri_witness_generator_dal.rs +++ b/prover/prover_dal/src/fri_witness_generator_dal.rs @@ -1189,10 +1189,10 @@ impl FriWitnessGeneratorDal<'_, '_> { }) } - pub async fn get_leaf_witness_generator_job_for_batch( + pub async fn get_leaf_witness_generator_jobs_for_batch( &mut self, l1_batch_number: L1BatchNumber, - ) -> Option { + ) -> Vec { sqlx::query!( r#" SELECT @@ -1204,31 +1204,34 @@ impl FriWitnessGeneratorDal<'_, '_> { "#, i64::from(l1_batch_number.0) ) - .fetch_optional(self.storage.conn()) + .fetch_all(self.storage.conn()) .await .unwrap() + .iter() .map(|row| LeafWitnessGeneratorJobInfo { + id: row.id as u32, l1_batch_number: l1_batch_number, circuit_id: row.circuit_id as u32, - closed_form_inputs_blob_url: row.closed_form_inputs_blob_url, + closed_form_inputs_blob_url: row.closed_form_inputs_blob_url.clone(), attempts: row.attempts as u32, status: WitnessJobStatus::from_str(&row.status).unwrap(), - error: row.error, + error: row.error.clone(), created_at: row.created_at, updated_at: row.updated_at, processing_started_at: row.processing_started_at, time_taken: row.time_taken, is_blob_cleaned: row.is_blob_cleaned, protocol_version: row.protocol_version, - picked_by: row.picked_by, + picked_by: row.picked_by.clone(), number_of_basic_circuits: row.number_of_basic_circuits, }) + .collect() } - pub async fn get_node_witness_generator_job_for_batch( + pub async fn get_node_witness_generator_jobs_for_batch( &mut self, l1_batch_number: L1BatchNumber, - ) -> Option { + ) -> Vec { sqlx::query!( r#" SELECT @@ -1240,31 +1243,34 @@ impl FriWitnessGeneratorDal<'_, '_> { "#, i64::from(l1_batch_number.0) ) - .fetch_optional(self.storage.conn()) + .fetch_all(self.storage.conn()) .await .unwrap() + .iter() .map(|row| NodeWitnessGeneratorJobInfo { + id: row.id as u32, l1_batch_number: l1_batch_number, circuit_id: row.circuit_id as u32, depth: row.depth as u32, status: WitnessJobStatus::from_str(&row.status).unwrap(), attempts: row.attempts as u32, - aggregations_url: row.aggregations_url, + aggregations_url: row.aggregations_url.clone(), processing_started_at: row.processing_started_at, time_taken: row.time_taken, - error: row.error, + error: row.error.clone(), created_at: row.created_at, updated_at: row.updated_at, number_of_dependent_jobs: row.number_of_dependent_jobs, protocol_version: row.protocol_version, - picked_by: row.picked_by, + picked_by: row.picked_by.clone(), }) + .collect() } - pub async fn get_scheduler_witness_generator_job_for_batch( + pub async fn get_scheduler_witness_generator_jobs_for_batch( &mut self, l1_batch_number: L1BatchNumber, - ) -> Option { + ) -> Vec { sqlx::query!( r#" SELECT @@ -1276,21 +1282,23 @@ impl FriWitnessGeneratorDal<'_, '_> { "#, i64::from(l1_batch_number.0) ) - .fetch_optional(self.storage.conn()) + .fetch_all(self.storage.conn()) .await .unwrap() + .iter() .map(|row| SchedulerWitnessGeneratorJobInfo { l1_batch_number: l1_batch_number, - scheduler_partial_input_blob_url: row.scheduler_partial_input_blob_url, + scheduler_partial_input_blob_url: row.scheduler_partial_input_blob_url.clone(), status: WitnessJobStatus::from_str(&row.status).unwrap(), processing_started_at: row.processing_started_at, time_taken: row.time_taken, - error: row.error, + error: row.error.clone(), created_at: row.created_at, updated_at: row.updated_at, attempts: row.attempts as u32, protocol_version: row.protocol_version, - picked_by: row.picked_by, + picked_by: row.picked_by.clone(), }) + .collect() } } From 6ad55660dd55ee75e1985c0ffa6aa05e3a22310c Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Tue, 30 Apr 2024 15:35:15 -0300 Subject: [PATCH 54/98] rm comments --- core/lib/basic_types/src/prover_dal.rs | 23 ----------------------- 1 file changed, 23 deletions(-) diff --git a/core/lib/basic_types/src/prover_dal.rs b/core/lib/basic_types/src/prover_dal.rs index d81f45e31231..dc106da18a7b 100644 --- a/core/lib/basic_types/src/prover_dal.rs +++ b/core/lib/basic_types/src/prover_dal.rs @@ -273,29 +273,6 @@ pub struct BasicWitnessGeneratorJobInfo { pub eip_4844_blobs: Option, } -// impl FromRow<'_, R> for BasicWitnessGeneratorJobInfo { -// fn from_row(row: &R) -> sqlx::Result { -// let l1_batch_number: i32 = row.try_get("l1_batch_number")?; -// let attempts: i32 = row.try_get("attempts")?; -// Ok(Self { -// l1_batch_number: L1BatchNumber(l1_batch_number as u32), -// merkle_tree_paths_blob_url: row.try_get("merkle_tree_paths_blob_url")?, -// attempts: attempts as u32, -// status: WitnessJobStatus::from_str(row.try_get("status")?).unwrap(), -// error: row.try_get("error")?, -// created_at: row.try_get("created_at")?, -// updated_at: row.try_get("updated_at")?, -// processing_started_at: row.try_get("processing_started_at")?, -// time_taken: row.try_get("time_taken")?, -// is_blob_cleaned: row.try_get("is_blob_cleaned")?, -// protocol_version: row.try_get("protocol_version")?, -// picked_by: row.try_get("picked_by")?, -// eip_4844_blobs: -// Some(Eip4844Blobs::from(row.try_get::, &str>("eip_4844_blobs")?)) -// }) -// } -// } - pub struct LeafWitnessGeneratorJobInfo { pub id: u32, pub l1_batch_number: L1BatchNumber, From a3f1938a9a0aef5829f81ad14ed7a8758684f701 Mon Sep 17 00:00:00 2001 From: ilitteri Date: Tue, 30 Apr 2024 15:56:08 -0300 Subject: [PATCH 55/98] Fix & refactors --- .../prover_cli/src/commands/status/batch.rs | 38 +++++++++---------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index 0b11151e53e5..661c2eef65c6 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -40,7 +40,10 @@ async fn get_batches_data(batches: Vec) -> anyhow::Result) -> anyhow::Result) -> anyhow::Result) -> anyhow::Result) -> anyhow::Result) -> anyhow::Result( +async fn get_aggregation_round_info_for_batch<'a>( batch_number: L1BatchNumber, aggregation_round: AggregationRound, conn: &mut Connection<'a, Prover>, @@ -106,7 +109,7 @@ async fn get_prover_jobs_data_for_batch<'a>( .into(); AggregationRoundInfo { - round: AggregationRound::BasicCircuits, + round: aggregation_round, prover_jobs_status: status, } } @@ -126,42 +129,39 @@ async fn get_proof_leaf_witness_generator_status_for_batch<'a>( batch_number: L1BatchNumber, conn: &mut Connection<'a, Prover>, ) -> TaskStatus { - let status_vec: Vec = conn - .fri_witness_generator_dal() + conn.fri_witness_generator_dal() .get_leaf_witness_generator_jobs_for_batch(batch_number) .await .iter() .map(|s| s.status.clone()) - .collect(); - TaskStatus::from(status_vec) + .collect::>() + .into() } async fn get_proof_node_witness_generator_status_for_batch<'a>( batch_number: L1BatchNumber, conn: &mut Connection<'a, Prover>, ) -> TaskStatus { - let status_vec: Vec = conn - .fri_witness_generator_dal() + conn.fri_witness_generator_dal() .get_node_witness_generator_jobs_for_batch(batch_number) .await .iter() .map(|s| s.status.clone()) - .collect(); - TaskStatus::from(status_vec) + .collect::>() + .into() } async fn get_proof_scheduler_witness_generator_status_for_batch<'a>( batch_number: L1BatchNumber, conn: &mut Connection<'a, Prover>, ) -> TaskStatus { - let status_vec: Vec = conn - .fri_witness_generator_dal() + conn.fri_witness_generator_dal() .get_scheduler_witness_generator_jobs_for_batch(batch_number) .await .iter() .map(|s| s.status.clone()) - .collect(); - TaskStatus::from(status_vec) + .collect::>() + .into() } async fn get_proof_compression_job_status_for_batch<'a>( From 78c7cd8c7b220a76f68ac56c56a1019a01a9821e Mon Sep 17 00:00:00 2001 From: ilitteri Date: Tue, 30 Apr 2024 16:07:00 -0300 Subject: [PATCH 56/98] zk lint rust --- prover/prover_dal/src/fri_prover_dal.rs | 4 ++-- prover/prover_dal/src/fri_witness_generator_dal.rs | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/prover/prover_dal/src/fri_prover_dal.rs b/prover/prover_dal/src/fri_prover_dal.rs index a122665853f7..62890236b993 100644 --- a/prover/prover_dal/src/fri_prover_dal.rs +++ b/prover/prover_dal/src/fri_prover_dal.rs @@ -622,10 +622,10 @@ impl FriProverDal<'_, '_> { .iter() .map(|row| ProverJobFriInfo { id: row.id as u32, - l1_batch_number: l1_batch_number, + l1_batch_number, circuit_id: row.circuit_id as u32, circuit_blob_url: row.circuit_blob_url.clone(), - aggregation_round: aggregation_round, + aggregation_round, sequence_number: row.sequence_number as u32, status: ProverJobStatus::from_str(&row.status).unwrap(), error: row.error.clone(), diff --git a/prover/prover_dal/src/fri_witness_generator_dal.rs b/prover/prover_dal/src/fri_witness_generator_dal.rs index 2662183d22bf..2740ca609d1c 100644 --- a/prover/prover_dal/src/fri_witness_generator_dal.rs +++ b/prover/prover_dal/src/fri_witness_generator_dal.rs @@ -1172,7 +1172,7 @@ impl FriWitnessGeneratorDal<'_, '_> { .await .unwrap() .map(|row| BasicWitnessGeneratorJobInfo { - l1_batch_number: l1_batch_number, + l1_batch_number, merkle_tree_paths_blob_url: row.merkle_tree_paths_blob_url, attempts: row.attempts as u32, status: WitnessJobStatus::from_str(&row.status).unwrap(), @@ -1184,7 +1184,7 @@ impl FriWitnessGeneratorDal<'_, '_> { is_blob_cleaned: row.is_blob_cleaned, protocol_version: row.protocol_version, picked_by: row.picked_by, - eip_4844_blobs: row.eip_4844_blobs.map(|vec_u8| Eip4844Blobs::from(vec_u8)), + eip_4844_blobs: row.eip_4844_blobs.map(Eip4844Blobs::from), }) } } From 33729b8b2baf139af81cf0b9609ee771fd3bc7d2 Mon Sep 17 00:00:00 2001 From: ilitteri Date: Tue, 30 Apr 2024 16:09:31 -0300 Subject: [PATCH 57/98] Fix import --- prover/prover_fri_gateway/src/proof_submitter.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/prover/prover_fri_gateway/src/proof_submitter.rs b/prover/prover_fri_gateway/src/proof_submitter.rs index 1c5850d31a0b..025d79e2f8c3 100644 --- a/prover/prover_fri_gateway/src/proof_submitter.rs +++ b/prover/prover_fri_gateway/src/proof_submitter.rs @@ -1,7 +1,7 @@ use async_trait::async_trait; -use prover_dal::{fri_proof_compressor_dal::ProofCompressionJobStatus, ProverDal}; +use prover_dal::ProverDal; use zksync_prover_interface::api::{SubmitProofRequest, SubmitProofResponse}; -use zksync_types::L1BatchNumber; +use zksync_types::{prover_dal::ProofCompressionJobStatus, L1BatchNumber}; use crate::api_data_fetcher::{PeriodicApi, PeriodicApiStruct}; From 112ae934e763215775f33a23a61b15657da9c2bc Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Tue, 30 Apr 2024 16:12:15 -0300 Subject: [PATCH 58/98] fix import --- prover/prover_fri_gateway/src/proof_submitter.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/prover/prover_fri_gateway/src/proof_submitter.rs b/prover/prover_fri_gateway/src/proof_submitter.rs index 1c5850d31a0b..025d79e2f8c3 100644 --- a/prover/prover_fri_gateway/src/proof_submitter.rs +++ b/prover/prover_fri_gateway/src/proof_submitter.rs @@ -1,7 +1,7 @@ use async_trait::async_trait; -use prover_dal::{fri_proof_compressor_dal::ProofCompressionJobStatus, ProverDal}; +use prover_dal::ProverDal; use zksync_prover_interface::api::{SubmitProofRequest, SubmitProofResponse}; -use zksync_types::L1BatchNumber; +use zksync_types::{prover_dal::ProofCompressionJobStatus, L1BatchNumber}; use crate::api_data_fetcher::{PeriodicApi, PeriodicApiStruct}; From e1854fa6e7ce89a3e9667be0afdc8afbf5b7fca4 Mon Sep 17 00:00:00 2001 From: ilitteri Date: Tue, 30 Apr 2024 16:12:49 -0300 Subject: [PATCH 59/98] zk lint rust --- prover/prover_dal/src/fri_witness_generator_dal.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/prover/prover_dal/src/fri_witness_generator_dal.rs b/prover/prover_dal/src/fri_witness_generator_dal.rs index a65e88b7c325..c336084e7baa 100644 --- a/prover/prover_dal/src/fri_witness_generator_dal.rs +++ b/prover/prover_dal/src/fri_witness_generator_dal.rs @@ -1210,7 +1210,7 @@ impl FriWitnessGeneratorDal<'_, '_> { .iter() .map(|row| LeafWitnessGeneratorJobInfo { id: row.id as u32, - l1_batch_number: l1_batch_number, + l1_batch_number, circuit_id: row.circuit_id as u32, closed_form_inputs_blob_url: row.closed_form_inputs_blob_url.clone(), attempts: row.attempts as u32, @@ -1249,7 +1249,7 @@ impl FriWitnessGeneratorDal<'_, '_> { .iter() .map(|row| NodeWitnessGeneratorJobInfo { id: row.id as u32, - l1_batch_number: l1_batch_number, + l1_batch_number, circuit_id: row.circuit_id as u32, depth: row.depth as u32, status: WitnessJobStatus::from_str(&row.status).unwrap(), @@ -1287,7 +1287,7 @@ impl FriWitnessGeneratorDal<'_, '_> { .unwrap() .iter() .map(|row| SchedulerWitnessGeneratorJobInfo { - l1_batch_number: l1_batch_number, + l1_batch_number, scheduler_partial_input_blob_url: row.scheduler_partial_input_blob_url.clone(), status: WitnessJobStatus::from_str(&row.status).unwrap(), processing_started_at: row.processing_started_at, From fa26c26aee21eca4fae8e13fedbb2b5fe268cefb Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Tue, 30 Apr 2024 17:26:33 -0300 Subject: [PATCH 60/98] fix wg task types --- prover/prover_cli/src/commands/status/batch.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index 661c2eef65c6..c29d25dc62e5 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -67,7 +67,7 @@ async fn get_batches_data(batches: Vec) -> anyhow::Result) -> anyhow::Result Date: Tue, 30 Apr 2024 18:02:04 -0300 Subject: [PATCH 61/98] update README --- prover/prover_cli/README.md | 61 ++++++++++++++++++++++--------------- 1 file changed, 36 insertions(+), 25 deletions(-) diff --git a/prover/prover_cli/README.md b/prover/prover_cli/README.md index 99869b36bf25..f773bf60dcd8 100644 --- a/prover/prover_cli/README.md +++ b/prover/prover_cli/README.md @@ -1,6 +1,4 @@ -# CLI to better understand and debug provers - -## Usage +# Usage > Note: For now, its necessary to use the 'zk f' tool to set up the environment. The main command will later be changed > to `pli`. @@ -10,7 +8,7 @@ Usage: zk f cargo run --release -- Commands: file-info - status-jobs + status help Print this message or the help of the given subcommand(s) Options: @@ -18,35 +16,48 @@ Options: -V, --version Print version ``` -### Status-jobs +## Status -You can get the progress for some batch proof, for a bunch of batches the `status-jobs` command: +### Status batch -```bash -# Displays the proof progress of the batch 1 - zk f cargo run -- status-jobs --batch 1 -# Displays the proof progress of the batches 1 and 2 - zk f cargo run -- status-jobs --batch 1 2 -# Displays the proof progress of the batch 3, with additional information - zk f cargo run -- status-jobs --batch 3 --verbose -``` +Displays the proof status for a given batch or a set of batches. Example: ```bash -$ zk f cargo run -- status-jobs --batch 1 --verbose - -Batch number: 1 -Progress: 34.88% (45/129) -In progress: 1 -Queued: 83 -Successful: 45 -Failed: 0 +$ zk f run --release -- status batch -n 1 + +== Batch 1 Status == +> In Progress ⌛️ + +== Proving Stages == +-- Aggregaton Round 0 -- +Basic Witness Generator: Done ✅ +> Prover Jobs: In progress ⌛️ + +-- Aggregaton Round 1 -- +Leaf Witness Generator: In progress ⌛️ +> Prover Jobs: Waiting for proofs ⏱️ + +-- Aggregaton Round 2 -- +Node Witness Generator: In progress ⌛️ +> Prover Jobs: Waiting for proofs ⏱️ + +-- Aggregaton Round 3 -- +Recursion Tip: In progress ⌛️ +> Prover Jobs: Waiting for proofs ⏱️ + +-- Aggregaton Round 4 -- +Scheduler: In progress ⌛️ +> Prover Jobs: Waiting for proofs ⏱️ + +-- Compressor -- +> Compressor job not found 🚫 ``` -### File-Info +## File-Info -Displays the information about a given file: +Displays de information about a given file: ```bash cargo run -- file-info --file-path /zksync-era/prover/artifacts/proofs_fri/l1_batch_proof_1.bin @@ -80,4 +91,4 @@ Previous block meta hash: [63, 236, 0, 236, 23, 236, 175, 242, 75, 187, 203, 193 Previous block aux hash: [200, 12, 70, 33, 103, 13, 251, 174, 96, 165, 135, 138, 34, 75, 249, 81, 93, 86, 110, 52, 30, 172, 198, 51, 155, 82, 86, 137, 156, 215, 11, 119] EIP 4844 - witnesses: None EIP 4844 - proofs: 0 -``` +``` \ No newline at end of file From 10fdcb9345794294b39d2bad07f640b1822e6080 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Tue, 30 Apr 2024 18:44:51 -0300 Subject: [PATCH 62/98] fix EIP Blob use --- prover/prover_cli/README.md | 2 +- prover/prover_dal/src/fri_prover_dal.rs | 3 +-- prover/prover_dal/src/fri_witness_generator_dal.rs | 7 ++++++- 3 files changed, 8 insertions(+), 4 deletions(-) diff --git a/prover/prover_cli/README.md b/prover/prover_cli/README.md index f773bf60dcd8..6296f63b0b18 100644 --- a/prover/prover_cli/README.md +++ b/prover/prover_cli/README.md @@ -91,4 +91,4 @@ Previous block meta hash: [63, 236, 0, 236, 23, 236, 175, 242, 75, 187, 203, 193 Previous block aux hash: [200, 12, 70, 33, 103, 13, 251, 174, 96, 165, 135, 138, 34, 75, 249, 81, 93, 86, 110, 52, 30, 172, 198, 51, 155, 82, 86, 137, 156, 215, 11, 119] EIP 4844 - witnesses: None EIP 4844 - proofs: 0 -``` \ No newline at end of file +``` diff --git a/prover/prover_dal/src/fri_prover_dal.rs b/prover/prover_dal/src/fri_prover_dal.rs index 3c23f7fc6ac1..df0525cd9774 100644 --- a/prover/prover_dal/src/fri_prover_dal.rs +++ b/prover/prover_dal/src/fri_prover_dal.rs @@ -5,8 +5,7 @@ use zksync_basic_types::{ basic_fri_types::{AggregationRound, CircuitIdRoundTuple}, protocol_version::ProtocolVersionId, prover_dal::{ - FriProverJobMetadata, JobCountStatistics, ProverJobFriInfo, ProverJobStatus, StuckJobs, - EIP_4844_CIRCUIT_ID, + z, FriProverJobMetadata, JobCountStatistics, ProverJobFriInfo, ProverJobStatus, StuckJobs, }, L1BatchNumber, }; diff --git a/prover/prover_dal/src/fri_witness_generator_dal.rs b/prover/prover_dal/src/fri_witness_generator_dal.rs index e74e9541bed8..947adf535db1 100644 --- a/prover/prover_dal/src/fri_witness_generator_dal.rs +++ b/prover/prover_dal/src/fri_witness_generator_dal.rs @@ -1429,7 +1429,12 @@ impl FriWitnessGeneratorDal<'_, '_> { is_blob_cleaned: row.is_blob_cleaned, protocol_version: row.protocol_version, picked_by: row.picked_by, - eip_4844_blobs: row.eip_4844_blobs.map(Eip4844Blobs::from), + eip_4844_blobs: row + .eip_4844_blobs + .as_deref() + .map(Eip4844Blobs::decode) + .transpose() + .unwrap(), }) } From 8a1101f815abd4c0858c2b79e0a2957120d3215c Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Tue, 30 Apr 2024 19:25:25 -0300 Subject: [PATCH 63/98] fix typos --- prover/prover_cli/README.md | 2 +- prover/prover_dal/src/fri_prover_dal.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/prover/prover_cli/README.md b/prover/prover_cli/README.md index 6296f63b0b18..ceef016a1d2a 100644 --- a/prover/prover_cli/README.md +++ b/prover/prover_cli/README.md @@ -57,7 +57,7 @@ Scheduler: In progress ⌛️ ## File-Info -Displays de information about a given file: +Displays the information about a given file: ```bash cargo run -- file-info --file-path /zksync-era/prover/artifacts/proofs_fri/l1_batch_proof_1.bin diff --git a/prover/prover_dal/src/fri_prover_dal.rs b/prover/prover_dal/src/fri_prover_dal.rs index df0525cd9774..942d1c06612d 100644 --- a/prover/prover_dal/src/fri_prover_dal.rs +++ b/prover/prover_dal/src/fri_prover_dal.rs @@ -5,7 +5,7 @@ use zksync_basic_types::{ basic_fri_types::{AggregationRound, CircuitIdRoundTuple}, protocol_version::ProtocolVersionId, prover_dal::{ - z, FriProverJobMetadata, JobCountStatistics, ProverJobFriInfo, ProverJobStatus, StuckJobs, + FriProverJobMetadata, JobCountStatistics, ProverJobFriInfo, ProverJobStatus, StuckJobs, }, L1BatchNumber, }; From 1bbde9345b797779a132b5507f253e179eeb66aa Mon Sep 17 00:00:00 2001 From: ilitteri Date: Thu, 2 May 2024 09:33:42 -0300 Subject: [PATCH 64/98] Fix typo --- prover/prover_cli/README.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/prover/prover_cli/README.md b/prover/prover_cli/README.md index ceef016a1d2a..48ca52bdde13 100644 --- a/prover/prover_cli/README.md +++ b/prover/prover_cli/README.md @@ -31,23 +31,23 @@ $ zk f run --release -- status batch -n 1 > In Progress ⌛️ == Proving Stages == --- Aggregaton Round 0 -- +-- Aggregation Round 0 -- Basic Witness Generator: Done ✅ > Prover Jobs: In progress ⌛️ --- Aggregaton Round 1 -- +-- Aggregation Round 1 -- Leaf Witness Generator: In progress ⌛️ > Prover Jobs: Waiting for proofs ⏱️ --- Aggregaton Round 2 -- +-- Aggregation Round 2 -- Node Witness Generator: In progress ⌛️ > Prover Jobs: Waiting for proofs ⏱️ --- Aggregaton Round 3 -- +-- Aggregation Round 3 -- Recursion Tip: In progress ⌛️ > Prover Jobs: Waiting for proofs ⏱️ --- Aggregaton Round 4 -- +-- Aggregation Round 4 -- Scheduler: In progress ⌛️ > Prover Jobs: Waiting for proofs ⏱️ From 24a024751bf141b2f4477d33cb2ca9e1e3e20a12 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Thu, 2 May 2024 17:50:43 -0300 Subject: [PATCH 65/98] refactor no verbose --- core/lib/basic_types/src/prover_dal.rs | 21 +- .../prover_cli/src/commands/status/batch.rs | 163 ++++--- .../prover_cli/src/commands/status/utils.rs | 400 +++++++----------- 3 files changed, 284 insertions(+), 300 deletions(-) diff --git a/core/lib/basic_types/src/prover_dal.rs b/core/lib/basic_types/src/prover_dal.rs index dc106da18a7b..f7ccb1989c60 100644 --- a/core/lib/basic_types/src/prover_dal.rs +++ b/core/lib/basic_types/src/prover_dal.rs @@ -98,13 +98,13 @@ pub struct JobPosition { pub sequence_number: usize, } -#[derive(Debug, Default, PartialEq)] +#[derive(Debug, Default, PartialEq, Clone)] pub struct ProverJobStatusFailed { pub started_at: DateTime, pub error: String, } -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Clone)] pub struct ProverJobStatusSuccessful { pub started_at: DateTime, pub time_taken: Duration, @@ -119,7 +119,7 @@ impl Default for ProverJobStatusSuccessful { } } -#[derive(Debug, Default, PartialEq)] +#[derive(Debug, Default, PartialEq, Clone)] pub struct ProverJobStatusInProgress { pub started_at: DateTime, } @@ -145,7 +145,7 @@ pub struct WitnessJobStatusFailed { pub error: String, } -#[derive(Debug, strum::Display, strum::EnumString, strum::AsRefStr, PartialEq)] +#[derive(Debug, strum::Display, strum::EnumString, strum::AsRefStr, PartialEq, Clone)] pub enum ProverJobStatus { #[strum(serialize = "queued")] Queued, @@ -235,6 +235,7 @@ impl FromStr for GpuProverInstanceStatus { } } +#[derive(Debug, Clone)] pub struct ProverJobFriInfo { pub id: u32, pub l1_batch_number: L1BatchNumber, @@ -257,6 +258,7 @@ pub struct ProverJobFriInfo { pub picked_by: Option, } +#[derive(Debug, Clone)] pub struct BasicWitnessGeneratorJobInfo { pub l1_batch_number: L1BatchNumber, pub merkle_tree_paths_blob_url: Option, @@ -273,6 +275,7 @@ pub struct BasicWitnessGeneratorJobInfo { pub eip_4844_blobs: Option, } +#[derive(Debug, Clone)] pub struct LeafWitnessGeneratorJobInfo { pub id: u32, pub l1_batch_number: L1BatchNumber, @@ -291,6 +294,7 @@ pub struct LeafWitnessGeneratorJobInfo { pub picked_by: Option, } +#[derive(Debug, Clone)] pub struct NodeWitnessGeneratorJobInfo { pub id: u32, pub l1_batch_number: L1BatchNumber, @@ -309,6 +313,12 @@ pub struct NodeWitnessGeneratorJobInfo { pub picked_by: Option, } +#[derive(Debug, Clone)] +pub struct RecursionTipWitnessGeneratorJobInfo { + pub status: WitnessJobStatus, +} + +#[derive(Debug, Clone)] pub struct SchedulerWitnessGeneratorJobInfo { pub l1_batch_number: L1BatchNumber, pub scheduler_partial_input_blob_url: String, @@ -323,7 +333,7 @@ pub struct SchedulerWitnessGeneratorJobInfo { pub picked_by: Option, } -#[derive(Debug, EnumString, Display)] +#[derive(Debug, EnumString, Display, Clone)] pub enum ProofCompressionJobStatus { #[strum(serialize = "queued")] Queued, @@ -339,6 +349,7 @@ pub enum ProofCompressionJobStatus { Skipped, } +#[derive(Debug, Clone)] pub struct ProofCompressionJobInfo { pub l1_batch_number: L1BatchNumber, pub attempts: u32, diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index c29d25dc62e5..5e4ad4136927 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -1,12 +1,18 @@ use anyhow::{ensure, Context as _}; use clap::Args as ClapArgs; +use colored::*; use prover_dal::{Connection, ConnectionPool, Prover, ProverDal}; use zksync_types::{ - basic_fri_types::AggregationRound, prover_dal::WitnessJobStatus, L1BatchNumber, + basic_fri_types::AggregationRound, + prover_dal::{ + BasicWitnessGeneratorJobInfo, LeafWitnessGeneratorJobInfo, NodeWitnessGeneratorJobInfo, + ProofCompressionJobInfo, ProverJobFriInfo, SchedulerWitnessGeneratorJobInfo, + }, + L1BatchNumber, }; -use super::utils::{AggregationRoundInfo, BatchData, Task, TaskStatus}; -use crate::commands::status::utils::postgres_config; +use super::utils::{BatchData, StageInfo}; +use crate::commands::status::utils::{postgres_config, Status}; #[derive(ClapArgs)] pub struct Args { @@ -25,7 +31,11 @@ pub(crate) async fn run(args: Args) -> anyhow::Result<()> { let batches_data = get_batches_data(args.batches).await?; for batch_data in batches_data { - println!("{batch_data:?}"); + if !args.verbose { + display_batch_status(batch_data); + } else { + println!("WIP") + } } Ok(()) @@ -49,47 +59,61 @@ async fn get_batches_data(batches: Vec) -> anyhow::Result) -> anyhow::Result( +async fn get_prover_jobs_info_for_batch<'a>( batch_number: L1BatchNumber, aggregation_round: AggregationRound, conn: &mut Connection<'a, Prover>, -) -> AggregationRoundInfo { - let status: TaskStatus = conn - .fri_prover_jobs_dal() +) -> Vec { + conn.fri_prover_jobs_dal() .get_prover_jobs_stats_for_batch(batch_number, aggregation_round) .await - .into(); - - AggregationRoundInfo { - round: aggregation_round, - prover_jobs_status: status, - } } -async fn get_proof_basic_witness_generator_status_for_batch<'a>( +async fn get_proof_basic_witness_generator_into_for_batch<'a>( batch_number: L1BatchNumber, conn: &mut Connection<'a, Prover>, -) -> TaskStatus { +) -> Option { conn.fri_witness_generator_dal() .get_basic_witness_generator_job_for_batch(batch_number) .await - .map(|job| TaskStatus::from(job.status)) - .unwrap_or_default() } -async fn get_proof_leaf_witness_generator_status_for_batch<'a>( +async fn get_proof_leaf_witness_generator_info_for_batch<'a>( batch_number: L1BatchNumber, conn: &mut Connection<'a, Prover>, -) -> TaskStatus { +) -> Vec { conn.fri_witness_generator_dal() .get_leaf_witness_generator_jobs_for_batch(batch_number) .await - .iter() - .map(|s| s.status.clone()) - .collect::>() - .into() } -async fn get_proof_node_witness_generator_status_for_batch<'a>( +async fn get_proof_node_witness_generator_info_for_batch<'a>( batch_number: L1BatchNumber, conn: &mut Connection<'a, Prover>, -) -> TaskStatus { +) -> Vec { conn.fri_witness_generator_dal() .get_node_witness_generator_jobs_for_batch(batch_number) .await - .iter() - .map(|s| s.status.clone()) - .collect::>() - .into() } -async fn get_proof_scheduler_witness_generator_status_for_batch<'a>( +async fn get_proof_scheduler_witness_generator_info_for_batch<'a>( batch_number: L1BatchNumber, conn: &mut Connection<'a, Prover>, -) -> TaskStatus { +) -> Vec { conn.fri_witness_generator_dal() .get_scheduler_witness_generator_jobs_for_batch(batch_number) .await - .iter() - .map(|s| s.status.clone()) - .collect::>() - .into() } -async fn get_proof_compression_job_status_for_batch<'a>( +async fn get_proof_compression_job_info_for_batch<'a>( batch_number: L1BatchNumber, conn: &mut Connection<'a, Prover>, -) -> TaskStatus { +) -> Option { conn.fri_proof_compressor_dal() .get_proof_compression_job_for_batch(batch_number) .await - .map(|job| TaskStatus::from(job.status)) - .unwrap_or_default() +} + +fn display_batch_status(batch_data: BatchData) { + println!( + "== {} == \n", + format!("Batch {} Status", batch_data.batch_number) + ); + display_status_for_stage(batch_data.basic_witness_generator); + display_status_for_stage(batch_data.leaf_witness_generator); + display_status_for_stage(batch_data.node_witness_generator); + display_status_for_stage(batch_data.recursion_tip_witness_generator); + display_status_for_stage(batch_data.scheduler_witness_generator); +} + +fn display_status_for_stage(stage_info: StageInfo) { + println!( + "-- {} --", + format!( + "Aggregation Round {}", + stage_info + .aggregation_round() + .expect("No aggregation round found") as u8 + ) + .bold() + ); + match stage_info.witness_generator_jobs_status() { + Status::Custom(msg) => { + println!("{}: {}", stage_info.to_string().bold(), msg); + } + Status::Queued | Status::WaitingForProofs | Status::Stuck | Status::JobsNotFound => { + println!( + "{}: {} \n", + stage_info.to_string().bold(), + stage_info.witness_generator_jobs_status() + ) + } + Status::InProgress | Status::Successful => { + println!( + "{}: {}", + stage_info.to_string().bold(), + stage_info.witness_generator_jobs_status() + ); + println!( + "> Prover Jobs: {} \n", + stage_info + .prover_jobs_status() + .expect("Unable to check status") + ); + } + } } diff --git a/prover/prover_cli/src/commands/status/utils.rs b/prover/prover_cli/src/commands/status/utils.rs index 73daffbba61a..525cbf34f514 100644 --- a/prover/prover_cli/src/commands/status/utils.rs +++ b/prover/prover_cli/src/commands/status/utils.rs @@ -1,12 +1,15 @@ use std::fmt::Debug; -use colored::*; use strum::{Display, EnumString}; use zksync_config::PostgresConfig; use zksync_env_config::FromEnv; use zksync_types::{ basic_fri_types::AggregationRound, - prover_dal::{ProofCompressionJobStatus, ProverJobFriInfo, ProverJobStatus, WitnessJobStatus}, + prover_dal::{ + BasicWitnessGeneratorJobInfo, LeafWitnessGeneratorJobInfo, NodeWitnessGeneratorJobInfo, + ProofCompressionJobInfo, ProofCompressionJobStatus, ProverJobFriInfo, ProverJobStatus, + RecursionTipWitnessGeneratorJobInfo, SchedulerWitnessGeneratorJobInfo, WitnessJobStatus, + }, L1BatchNumber, }; @@ -19,83 +22,21 @@ pub struct BatchData { /// The number of the batch. pub batch_number: L1BatchNumber, /// The basic witness generator data. - pub basic_witness_generator: Task, + pub basic_witness_generator: StageInfo, /// The leaf witness generator data. - pub leaf_witness_generator: Task, + pub leaf_witness_generator: StageInfo, /// The node witness generator data. - pub node_witness_generator: Task, + pub node_witness_generator: StageInfo, /// The recursion tip data. - pub recursion_tip: Task, + pub recursion_tip_witness_generator: StageInfo, /// The scheduler data. - pub scheduler_witness_generator: Task, + pub scheduler_witness_generator: StageInfo, /// The compressor data. - pub compressor: Task, -} - -impl Debug for BatchData { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - writeln!( - f, - "== {} ==", - format!("Batch {} Status", self.batch_number).bold() - )?; - writeln!(f)?; - writeln!(f, "= {} =", "Proving Stages".to_owned().bold())?; - writeln!(f, "{:?}", self.basic_witness_generator)?; - writeln!(f, "{:?}", self.leaf_witness_generator)?; - writeln!(f, "{:?}", self.node_witness_generator)?; - writeln!(f, "{:?}", self.recursion_tip)?; - writeln!(f, "{:?}", self.scheduler_witness_generator)?; - writeln!(f, "{:?}", self.compressor) - } -} - -impl Default for BatchData { - fn default() -> Self { - BatchData { - batch_number: L1BatchNumber::default(), - basic_witness_generator: Task::BasicWitnessGenerator { - status: TaskStatus::default(), - aggregation_round_info: AggregationRoundInfo { - round: AggregationRound::BasicCircuits, - prover_jobs_status: TaskStatus::default(), - }, - }, - leaf_witness_generator: Task::LeafWitnessGenerator { - status: TaskStatus::default(), - aggregation_round_info: AggregationRoundInfo { - round: AggregationRound::LeafAggregation, - prover_jobs_status: TaskStatus::default(), - }, - }, - node_witness_generator: Task::NodeWitnessGenerator { - status: TaskStatus::default(), - aggregation_round_info: AggregationRoundInfo { - round: AggregationRound::NodeAggregation, - prover_jobs_status: TaskStatus::default(), - }, - }, - recursion_tip: Task::RecursionTip { - status: TaskStatus::default(), - aggregation_round_info: AggregationRoundInfo { - round: AggregationRound::Scheduler, - prover_jobs_status: TaskStatus::default(), - }, - }, - scheduler_witness_generator: Task::SchedulerWitnessGenerator { - status: TaskStatus::default(), - aggregation_round_info: AggregationRoundInfo { - round: AggregationRound::Scheduler, - prover_jobs_status: TaskStatus::default(), - }, - }, - compressor: Task::Compressor(TaskStatus::JobsNotFound), - } - } + pub compressor: StageInfo, } #[derive(Default, Debug, EnumString, Clone, Display)] -pub enum TaskStatus { +pub enum Status { /// A custom status that can be set manually. /// Mostly used when a task has singular status. Custom(String), @@ -120,230 +61,211 @@ pub enum TaskStatus { JobsNotFound, } -// This implementation will change to From> for AggregationRoundInfo -// once the --verbose flag is implemented. -impl From> for TaskStatus { - fn from(jobs_vector: Vec) -> Self { - if jobs_vector.is_empty() { - TaskStatus::JobsNotFound - } else if jobs_vector +impl From for Status { + fn from(status: WitnessJobStatus) -> Self { + match status { + WitnessJobStatus::Queued => Status::Queued, + WitnessJobStatus::InProgress => Status::InProgress, + WitnessJobStatus::Successful(_) => Status::Successful, + WitnessJobStatus::Failed(_) => Status::InProgress, + WitnessJobStatus::WaitingForArtifacts => { + Status::Custom("Waiting for Artifacts ⏱️".to_owned()) + } + WitnessJobStatus::Skipped => Status::Custom("Skipped ⏩".to_owned()), + WitnessJobStatus::WaitingForProofs => Status::WaitingForProofs, + } + } +} + +impl From> for Status { + fn from(status_vector: Vec) -> Self { + if status_vector.is_empty() { + Status::JobsNotFound + } else if status_vector .iter() - .all(|job| matches!(job.status, ProverJobStatus::Queued)) + .all(|job| matches!(job, WitnessJobStatus::Queued)) { - TaskStatus::Queued - } else if jobs_vector + Status::Queued + } else if status_vector .iter() - .all(|job| matches!(job.status, ProverJobStatus::InProgress(_))) + .all(|job| matches!(job, WitnessJobStatus::WaitingForProofs)) + { + Status::WaitingForProofs + } else if status_vector + .iter() + .all(|job| matches!(job, WitnessJobStatus::InProgress)) { - TaskStatus::Successful + Status::Successful } else { - TaskStatus::InProgress + Status::InProgress } } } -impl From for TaskStatus { +impl From> for Status { + fn from(leaf_info_vector: Vec) -> Self { + leaf_info_vector + .iter() + .map(|s| s.status.clone()) + .collect::>() + .into() + } +} + +impl From> for Status { + fn from(node_info_vector: Vec) -> Self { + node_info_vector + .iter() + .map(|s| s.status.clone()) + .collect::>() + .into() + } +} + +impl From> for Status { + fn from(scheduler_info_vector: Vec) -> Self { + scheduler_info_vector + .iter() + .map(|s| s.status.clone()) + .collect::>() + .into() + } +} + +impl From> for Status { + fn from(scheduler_info_vector: Vec) -> Self { + scheduler_info_vector + .iter() + .map(|s| s.status.clone()) + .collect::>() + .into() + } +} + +impl From for Status { fn from(status: ProofCompressionJobStatus) -> Self { match status { - ProofCompressionJobStatus::Queued => TaskStatus::Queued, - ProofCompressionJobStatus::InProgress => TaskStatus::InProgress, - ProofCompressionJobStatus::Successful => TaskStatus::Successful, - ProofCompressionJobStatus::Failed => TaskStatus::InProgress, + ProofCompressionJobStatus::Queued => Status::Queued, + ProofCompressionJobStatus::InProgress => Status::InProgress, + ProofCompressionJobStatus::Successful => Status::Successful, + ProofCompressionJobStatus::Failed => Status::InProgress, ProofCompressionJobStatus::SentToServer => { - TaskStatus::Custom("Sent to server 📤".to_owned()) + Status::Custom("Sent to server 📤".to_owned()) } - ProofCompressionJobStatus::Skipped => TaskStatus::Custom("Skipped ⏩".to_owned()), + ProofCompressionJobStatus::Skipped => Status::Custom("Skipped ⏩".to_owned()), } } } -impl From> for TaskStatus { - fn from(status_vector: Vec) -> Self { - if status_vector.is_empty() { - TaskStatus::JobsNotFound - } else if status_vector - .iter() - .all(|job| matches!(job, WitnessJobStatus::Queued)) - { - TaskStatus::Queued - } else if status_vector +impl From> for Status { + fn from(jobs_vector: Vec) -> Self { + if jobs_vector.is_empty() { + Status::JobsNotFound + } else if jobs_vector .iter() - .all(|job| matches!(job, WitnessJobStatus::WaitingForProofs)) + .all(|job| matches!(job.status, ProverJobStatus::Queued)) { - TaskStatus::WaitingForProofs - } else if status_vector + Status::Queued + } else if jobs_vector .iter() - .all(|job| matches!(job, WitnessJobStatus::InProgress)) + .all(|job| matches!(job.status, ProverJobStatus::InProgress(_))) { - TaskStatus::Successful + Status::Successful } else { - TaskStatus::InProgress + Status::InProgress } } } #[derive(EnumString, Clone, Display)] -pub enum Task { - /// Represents the basic witness generator task and its status. +pub enum StageInfo { #[strum(to_string = "Basic Witness Generator")] BasicWitnessGenerator { - status: TaskStatus, - aggregation_round_info: AggregationRoundInfo, + witness_generator_job_info: Option, + prover_jobs_info: Vec, }, - /// Represents the leaf witness generator task, its status and the aggregation round 0 prover jobs data. #[strum(to_string = "Leaf Witness Generator")] LeafWitnessGenerator { - status: TaskStatus, - aggregation_round_info: AggregationRoundInfo, + witness_generator_jobs_info: Vec, + prover_jobs_info: Vec, }, - /// Represents the node witness generator task, its status and the aggregation round 1 prover jobs data. #[strum(to_string = "Node Witness Generator")] NodeWitnessGenerator { - status: TaskStatus, - aggregation_round_info: AggregationRoundInfo, + witness_generator_jobs_info: Vec, + prover_jobs_info: Vec, }, - /// Represents the recursion tip task, its status and the aggregation round 2 prover jobs data. #[strum(to_string = "Recursion Tip")] - RecursionTip { - status: TaskStatus, - aggregation_round_info: AggregationRoundInfo, + RecursionTipWitnessGenerator { + witness_generator_jobs_info: Vec, + prover_jobs_info: Vec, }, - /// Represents the scheduler task and its status. #[strum(to_string = "Scheduler")] SchedulerWitnessGenerator { - status: TaskStatus, - aggregation_round_info: AggregationRoundInfo, + witness_generator_jobs_info: Vec, + prover_jobs_info: Vec, }, - /// Represents the compressor task and its status. #[strum(to_string = "Compressor")] - Compressor(TaskStatus), + Compressor(Option), } -impl Task { - fn status(&self) -> TaskStatus { - match self { - Task::BasicWitnessGenerator { status, .. } - | Task::LeafWitnessGenerator { status, .. } - | Task::NodeWitnessGenerator { status, .. } - | Task::RecursionTip { status, .. } - | Task::SchedulerWitnessGenerator { status, .. } - | Task::Compressor(status) => status.clone(), - } - } - - fn aggregation_round(&self) -> Option { +impl StageInfo { + pub fn aggregation_round(&self) -> Option { match self { - Task::BasicWitnessGenerator { - aggregation_round_info, - .. - } - | Task::LeafWitnessGenerator { - aggregation_round_info, - .. - } - | Task::NodeWitnessGenerator { - aggregation_round_info, - .. - } - | Task::RecursionTip { - aggregation_round_info, - .. - } - | Task::SchedulerWitnessGenerator { - aggregation_round_info, - .. - } => Some(aggregation_round_info.round), - Task::Compressor(_) => None, + StageInfo::BasicWitnessGenerator { .. } => Some(AggregationRound::BasicCircuits), + StageInfo::LeafWitnessGenerator { .. } => Some(AggregationRound::LeafAggregation), + StageInfo::NodeWitnessGenerator { .. } => Some(AggregationRound::NodeAggregation), + StageInfo::RecursionTipWitnessGenerator { .. } => Some(AggregationRound::RecursionTip), + StageInfo::SchedulerWitnessGenerator { .. } => Some(AggregationRound::Scheduler), + StageInfo::Compressor(_) => None, } } - /// Returns the status of the prover jobs. - /// If the task is not in progress or successful, returns None. - /// Otherwise, returns the status of the prover jobs if the task - /// has prover jobs. - fn prover_jobs_status(&self) -> Option { - match self { - Task::BasicWitnessGenerator { - status, - aggregation_round_info, + pub fn witness_generator_jobs_status(&self) -> Status { + match self.clone() { + StageInfo::BasicWitnessGenerator { + prover_jobs_info, .. } - | Task::LeafWitnessGenerator { - status, - aggregation_round_info, + | StageInfo::LeafWitnessGenerator { + prover_jobs_info, .. } - | Task::NodeWitnessGenerator { - status, - aggregation_round_info, + | StageInfo::NodeWitnessGenerator { + prover_jobs_info, .. } - | Task::RecursionTip { - status, - aggregation_round_info, + | StageInfo::RecursionTipWitnessGenerator { + prover_jobs_info, .. } - | Task::SchedulerWitnessGenerator { - status, - aggregation_round_info, - } => match status { - TaskStatus::InProgress | TaskStatus::Successful => { - Some(aggregation_round_info.prover_jobs_status.clone()) - } - _ => None, - }, - Task::Compressor(_) => None, - } - } -} - -impl Debug for Task { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - if let Some(aggregation_round_number) = self.aggregation_round() { - writeln!( - f, - "-- {} --", - format!("Aggregation Round {}", aggregation_round_number as u8).bold() - )?; - if let TaskStatus::Custom(msg) = self.status() { - writeln!(f, "{}: {}", self.to_string().bold(), msg)?; - } else { - writeln!(f, "{}: {}", self.to_string().bold(), self.status())?; + | StageInfo::SchedulerWitnessGenerator { + prover_jobs_info, .. + } => Status::from(prover_jobs_info), + StageInfo::Compressor(compressoion_job_info) => { + Status::from(compressoion_job_info.unwrap().status) } - if let Some(prover_jobs_status) = self.prover_jobs_status() { - writeln!(f, "> Prover Jobs: {prover_jobs_status}")?; - } - } else { - writeln!(f, "-- {} --", self.to_string().bold())?; - writeln!(f, "{}", self.status())?; } - Ok(()) } -} -impl From for TaskStatus { - fn from(status: WitnessJobStatus) -> Self { - match status { - WitnessJobStatus::Queued => TaskStatus::Queued, - WitnessJobStatus::InProgress => TaskStatus::InProgress, - WitnessJobStatus::Successful(_) => TaskStatus::Successful, - WitnessJobStatus::Failed(_) => TaskStatus::InProgress, - WitnessJobStatus::WaitingForArtifacts => { - TaskStatus::Custom("Waiting for Artifacts ⏱️".to_owned()) - } - WitnessJobStatus::Skipped => TaskStatus::Custom("Skipped ⏩".to_owned()), - WitnessJobStatus::WaitingForProofs => TaskStatus::WaitingForProofs, - } - } -} - -#[derive(Clone)] -pub struct AggregationRoundInfo { - pub round: AggregationRound, - pub prover_jobs_status: TaskStatus, -} - -impl Default for AggregationRoundInfo { - fn default() -> Self { - AggregationRoundInfo { - round: AggregationRound::BasicCircuits, - prover_jobs_status: TaskStatus::default(), + pub fn prover_jobs_status(&self) -> Option { + match self.clone() { + StageInfo::BasicWitnessGenerator { + witness_generator_job_info, + .. + } => witness_generator_job_info.map(|job| Status::from(job.status)), + StageInfo::LeafWitnessGenerator { + witness_generator_jobs_info, + .. + } => Some(Status::from(witness_generator_jobs_info)), + StageInfo::NodeWitnessGenerator { + witness_generator_jobs_info, + .. + } => Some(Status::from(witness_generator_jobs_info)), + StageInfo::RecursionTipWitnessGenerator { + witness_generator_jobs_info, + .. + } => Some(Status::from(witness_generator_jobs_info)), + StageInfo::SchedulerWitnessGenerator { + witness_generator_jobs_info, + .. + } => Some(Status::from(witness_generator_jobs_info)), + StageInfo::Compressor(_) => None, } } } From 57beedbc43b698a72dcac40594e949b06f14bc20 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Tue, 7 May 2024 17:38:39 -0300 Subject: [PATCH 66/98] add verbose flag --- .../prover_cli/src/commands/status/batch.rs | 183 ++++++++++++++++-- 1 file changed, 171 insertions(+), 12 deletions(-) diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index 5e4ad4136927..2829c53a13fc 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -1,3 +1,5 @@ +use std::collections::HashMap; + use anyhow::{ensure, Context as _}; use clap::Args as ClapArgs; use colored::*; @@ -5,8 +7,9 @@ use prover_dal::{Connection, ConnectionPool, Prover, ProverDal}; use zksync_types::{ basic_fri_types::AggregationRound, prover_dal::{ - BasicWitnessGeneratorJobInfo, LeafWitnessGeneratorJobInfo, NodeWitnessGeneratorJobInfo, - ProofCompressionJobInfo, ProverJobFriInfo, SchedulerWitnessGeneratorJobInfo, + BasicWitnessGeneratorJobInfo, JobCountStatistics, LeafWitnessGeneratorJobInfo, + NodeWitnessGeneratorJobInfo, ProofCompressionJobInfo, ProverJobFriInfo, ProverJobStatus, + SchedulerWitnessGeneratorJobInfo, }, L1BatchNumber, }; @@ -34,7 +37,7 @@ pub(crate) async fn run(args: Args) -> anyhow::Result<()> { if !args.verbose { display_batch_status(batch_data); } else { - println!("WIP") + display_batch_info(batch_data); } } @@ -189,15 +192,10 @@ fn display_batch_status(batch_data: BatchData) { } fn display_status_for_stage(stage_info: StageInfo) { - println!( - "-- {} --", - format!( - "Aggregation Round {}", - stage_info - .aggregation_round() - .expect("No aggregation round found") as u8 - ) - .bold() + display_aggregation_round( + stage_info + .aggregation_round() + .expect("No aggregation round found."), ); match stage_info.witness_generator_jobs_status() { Status::Custom(msg) => { @@ -225,3 +223,164 @@ fn display_status_for_stage(stage_info: StageInfo) { } } } + +fn display_batch_info(batch_data: BatchData) { + println!( + "== {} == \n", + format!("Batch {} Status", batch_data.batch_number) + ); + display_info_for_stage(batch_data.basic_witness_generator); + display_info_for_stage(batch_data.leaf_witness_generator); + display_info_for_stage(batch_data.node_witness_generator); + display_info_for_stage(batch_data.recursion_tip_witness_generator); + display_info_for_stage(batch_data.scheduler_witness_generator); +} + +fn display_info_for_stage(stage_info: StageInfo) { + display_aggregation_round( + stage_info + .aggregation_round() + .expect("No aggregation round found."), + ); + match stage_info.witness_generator_jobs_status() { + Status::Custom(msg) => { + println!("{}: {}", stage_info.to_string().bold(), msg); + } + Status::Queued | Status::WaitingForProofs | Status::Stuck | Status::JobsNotFound => { + println!( + "{}: {} \n", + stage_info.to_string().bold(), + stage_info.witness_generator_jobs_status() + ) + } + Status::InProgress => { + println!( + "{}: {}", + stage_info.to_string().bold(), + stage_info.witness_generator_jobs_status() + ); + match stage_info { + StageInfo::BasicWitnessGenerator { + prover_jobs_info, .. + } + | StageInfo::RecursionTipWitnessGenerator { + prover_jobs_info, .. + } + | StageInfo::SchedulerWitnessGenerator { + prover_jobs_info, .. + } => { + display_prover_jobs_info(prover_jobs_info); + } + StageInfo::LeafWitnessGenerator { + witness_generator_jobs_info, + prover_jobs_info, + } => { + display_leaf_witness_generator_jobs_info(witness_generator_jobs_info); + display_prover_jobs_info(prover_jobs_info); + } + StageInfo::NodeWitnessGenerator { + witness_generator_jobs_info, + prover_jobs_info, + } => { + display_node_witness_generator_jobs_info(witness_generator_jobs_info); + display_prover_jobs_info(prover_jobs_info); + } + StageInfo::Compressor(_) => todo!(), + } + } + Status::Successful => { + println!( + "{}: {}", + stage_info.to_string().bold(), + stage_info.witness_generator_jobs_status() + ); + match stage_info { + StageInfo::BasicWitnessGenerator { + prover_jobs_info, .. + } + | StageInfo::LeafWitnessGenerator { + prover_jobs_info, .. + } + | StageInfo::NodeWitnessGenerator { + prover_jobs_info, .. + } + | StageInfo::RecursionTipWitnessGenerator { + prover_jobs_info, .. + } + | StageInfo::SchedulerWitnessGenerator { + prover_jobs_info, .. + } => display_prover_jobs_info(prover_jobs_info), + StageInfo::Compressor(_) => todo!(), + } + } + } +} + +fn display_leaf_witness_generator_jobs_info( + mut leaf_witness_generators_jobs_info: Vec, +) { + leaf_witness_generators_jobs_info.sort_by_key(|job| job.circuit_id); + + leaf_witness_generators_jobs_info + .iter() + .for_each(|job| println!("Circuit id: {} - Status: {}", job.circuit_id, job.status)); +} + +fn display_node_witness_generator_jobs_info( + mut node_witness_generators_jobs_info: Vec, +) { + node_witness_generators_jobs_info.sort_by_key(|job| job.circuit_id); + + node_witness_generators_jobs_info + .iter() + .for_each(|job| println!("Circuit id: {} - Status: {}", job.circuit_id, job.status)); +} + +fn display_prover_jobs_info(prover_jobs_info: Vec) { + let mut jobs_by_circuit_id: HashMap> = HashMap::new(); + + prover_jobs_info.iter().for_each(|job| { + jobs_by_circuit_id + .entry(job.circuit_id) + .or_insert(Vec::new()) + .push(job.clone()) + }); + + for (circuit_id, prover_jobs_info) in jobs_by_circuit_id { + let status = Status::from(prover_jobs_info.clone()); + match status { + Status::InProgress => { + println!(" > Circuit ID: {}, status: {}", circuit_id, status); + display_job_status_count(prover_jobs_info); + } + _ => println!("{}", status), + }; + } +} + +fn display_job_status_count(jobs: Vec) { + let mut jobs_counts = JobCountStatistics::default(); + + let total_jobs = jobs.len(); + + jobs.iter().for_each(|job| match job.status { + ProverJobStatus::Queued => jobs_counts.queued += 1, + ProverJobStatus::InProgress(_) => jobs_counts.in_progress += 1, + ProverJobStatus::Successful(_) => jobs_counts.successful += 1, + ProverJobStatus::Failed(_) => jobs_counts.failed += 1, + ProverJobStatus::Skipped | ProverJobStatus::Ignored => (), + }); + + println!(" - Total jobs: {}", total_jobs); + println!(" - Successful: {}", jobs_counts.successful); + println!(" - In Progress: {}", jobs_counts.in_progress); + println!(" - Queued: {}", jobs_counts.queued); + println!(" - Failed: {}", jobs_counts.failed); +} + +fn display_aggregation_round(aggregation_round: AggregationRound) { + println!( + "-- {} --", + format!("Aggregation Round {}", aggregation_round as u8).bold() + ); +} From 5cfbdb0a44d65db5065615ce222e0ba37c3f4f45 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Tue, 7 May 2024 17:59:21 -0300 Subject: [PATCH 67/98] fix prints --- prover/prover_cli/src/commands/status/batch.rs | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index 2829c53a13fc..dcb61796378c 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -353,7 +353,7 @@ fn display_prover_jobs_info(prover_jobs_info: Vec) { println!(" > Circuit ID: {}, status: {}", circuit_id, status); display_job_status_count(prover_jobs_info); } - _ => println!("{}", status), + _ => println!(" > Circuit ID: {}, status: {}", circuit_id, status), }; } } @@ -371,11 +371,11 @@ fn display_job_status_count(jobs: Vec) { ProverJobStatus::Skipped | ProverJobStatus::Ignored => (), }); - println!(" - Total jobs: {}", total_jobs); - println!(" - Successful: {}", jobs_counts.successful); - println!(" - In Progress: {}", jobs_counts.in_progress); - println!(" - Queued: {}", jobs_counts.queued); - println!(" - Failed: {}", jobs_counts.failed); + println!(" - Total jobs: {}", total_jobs); + println!(" - Successful: {}", jobs_counts.successful); + println!(" - In Progress: {}", jobs_counts.in_progress); + println!(" - Queued: {}", jobs_counts.queued); + println!(" - Failed: {}", jobs_counts.failed); } fn display_aggregation_round(aggregation_round: AggregationRound) { From a6503832b603b8bc9a8cf5af1c7aa14a04d49f68 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Wed, 8 May 2024 12:52:19 -0300 Subject: [PATCH 68/98] fix status bug --- prover/prover_cli/src/commands/status/utils.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/prover/prover_cli/src/commands/status/utils.rs b/prover/prover_cli/src/commands/status/utils.rs index 525cbf34f514..dba2790f465c 100644 --- a/prover/prover_cli/src/commands/status/utils.rs +++ b/prover/prover_cli/src/commands/status/utils.rs @@ -93,7 +93,7 @@ impl From> for Status { Status::WaitingForProofs } else if status_vector .iter() - .all(|job| matches!(job, WitnessJobStatus::InProgress)) + .all(|job| matches!(job, WitnessJobStatus::Successful(_))) { Status::Successful } else { @@ -168,7 +168,7 @@ impl From> for Status { Status::Queued } else if jobs_vector .iter() - .all(|job| matches!(job.status, ProverJobStatus::InProgress(_))) + .all(|job| matches!(job.status, ProverJobStatus::Successful(_))) { Status::Successful } else { From e0e484f367ae10e7b3754a72bbc16ed25e5f5549 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Wed, 8 May 2024 13:20:25 -0300 Subject: [PATCH 69/98] print prover_jobs global status --- prover/prover_cli/src/commands/status/batch.rs | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index dcb61796378c..1fd85ca4a96e 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -337,6 +337,14 @@ fn display_node_witness_generator_jobs_info( } fn display_prover_jobs_info(prover_jobs_info: Vec) { + let prover_jobs_status = Status::from(prover_jobs_info.clone()); + + println!("> Prover Jobs: {prover_jobs_status} \n"); + + if matches!(prover_jobs_status, Status::Successful) { + return; + } + let mut jobs_by_circuit_id: HashMap> = HashMap::new(); prover_jobs_info.iter().for_each(|job| { From 8afa497e73d19bf40958f8025ee97eab8459f0e0 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Wed, 8 May 2024 16:15:36 -0300 Subject: [PATCH 70/98] change return for wg querys --- prover/prover_dal/src/fri_witness_generator_dal.rs | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/prover/prover_dal/src/fri_witness_generator_dal.rs b/prover/prover_dal/src/fri_witness_generator_dal.rs index 947adf535db1..76f3cb96a92c 100644 --- a/prover/prover_dal/src/fri_witness_generator_dal.rs +++ b/prover/prover_dal/src/fri_witness_generator_dal.rs @@ -1519,7 +1519,7 @@ impl FriWitnessGeneratorDal<'_, '_> { pub async fn get_scheduler_witness_generator_jobs_for_batch( &mut self, l1_batch_number: L1BatchNumber, - ) -> Vec { + ) -> Option { sqlx::query!( r#" SELECT @@ -1531,10 +1531,9 @@ impl FriWitnessGeneratorDal<'_, '_> { "#, i64::from(l1_batch_number.0) ) - .fetch_all(self.storage.conn()) + .fetch_optional(self.storage.conn()) .await .unwrap() - .iter() .map(|row| SchedulerWitnessGeneratorJobInfo { l1_batch_number, scheduler_partial_input_blob_url: row.scheduler_partial_input_blob_url.clone(), @@ -1548,6 +1547,5 @@ impl FriWitnessGeneratorDal<'_, '_> { protocol_version: row.protocol_version, picked_by: row.picked_by.clone(), }) - .collect() } } From 2ff0d002fc516887c53783362d4e7c62b2e0f37e Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Wed, 8 May 2024 16:16:03 -0300 Subject: [PATCH 71/98] fix verbose flag --- .../prover_cli/src/commands/status/batch.rs | 93 ++++++++----------- .../prover_cli/src/commands/status/utils.rs | 54 +++++------ 2 files changed, 63 insertions(+), 84 deletions(-) diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index 1fd85ca4a96e..bc8aa8408309 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -9,7 +9,7 @@ use zksync_types::{ prover_dal::{ BasicWitnessGeneratorJobInfo, JobCountStatistics, LeafWitnessGeneratorJobInfo, NodeWitnessGeneratorJobInfo, ProofCompressionJobInfo, ProverJobFriInfo, ProverJobStatus, - SchedulerWitnessGeneratorJobInfo, + RecursionTipWitnessGeneratorJobInfo, SchedulerWitnessGeneratorJobInfo, WitnessJobStatus, }, L1BatchNumber, }; @@ -98,22 +98,12 @@ async fn get_batches_data(batches: Vec) -> anyhow::Result( .await } +async fn get_proof_recursion_tip_witness_generator_info_for_batch<'a>( + _batch_number: L1BatchNumber, + _conn: &mut Connection<'a, Prover>, +) -> Option { + Some(RecursionTipWitnessGeneratorJobInfo { + status: WitnessJobStatus::Skipped, + }) +} + async fn get_proof_scheduler_witness_generator_info_for_batch<'a>( batch_number: L1BatchNumber, conn: &mut Connection<'a, Prover>, -) -> Vec { +) -> Option { conn.fri_witness_generator_dal() .get_scheduler_witness_generator_jobs_for_batch(batch_number) .await @@ -189,17 +188,14 @@ fn display_batch_status(batch_data: BatchData) { display_status_for_stage(batch_data.node_witness_generator); display_status_for_stage(batch_data.recursion_tip_witness_generator); display_status_for_stage(batch_data.scheduler_witness_generator); + display_status_for_stage(batch_data.compressor); } fn display_status_for_stage(stage_info: StageInfo) { - display_aggregation_round( - stage_info - .aggregation_round() - .expect("No aggregation round found."), - ); + display_aggregation_round(&stage_info); match stage_info.witness_generator_jobs_status() { Status::Custom(msg) => { - println!("{}: {}", stage_info.to_string().bold(), msg); + println!("{}: {} \n", stage_info.to_string().bold(), msg); } Status::Queued | Status::WaitingForProofs | Status::Stuck | Status::JobsNotFound => { println!( @@ -210,7 +206,7 @@ fn display_status_for_stage(stage_info: StageInfo) { } Status::InProgress | Status::Successful => { println!( - "{}: {}", + "{}: {} \n", stage_info.to_string().bold(), stage_info.witness_generator_jobs_status() ); @@ -234,17 +230,14 @@ fn display_batch_info(batch_data: BatchData) { display_info_for_stage(batch_data.node_witness_generator); display_info_for_stage(batch_data.recursion_tip_witness_generator); display_info_for_stage(batch_data.scheduler_witness_generator); + display_info_for_stage(batch_data.compressor); } fn display_info_for_stage(stage_info: StageInfo) { - display_aggregation_round( - stage_info - .aggregation_round() - .expect("No aggregation round found."), - ); + display_aggregation_round(&stage_info); match stage_info.witness_generator_jobs_status() { Status::Custom(msg) => { - println!("{}: {}", stage_info.to_string().bold(), msg); + println!("{}: {} \n", stage_info.to_string().bold(), msg); } Status::Queued | Status::WaitingForProofs | Status::Stuck | Status::JobsNotFound => { println!( @@ -255,19 +248,13 @@ fn display_info_for_stage(stage_info: StageInfo) { } Status::InProgress => { println!( - "{}: {}", + "{}: {} \n", stage_info.to_string().bold(), stage_info.witness_generator_jobs_status() ); match stage_info { StageInfo::BasicWitnessGenerator { prover_jobs_info, .. - } - | StageInfo::RecursionTipWitnessGenerator { - prover_jobs_info, .. - } - | StageInfo::SchedulerWitnessGenerator { - prover_jobs_info, .. } => { display_prover_jobs_info(prover_jobs_info); } @@ -285,12 +272,14 @@ fn display_info_for_stage(stage_info: StageInfo) { display_node_witness_generator_jobs_info(witness_generator_jobs_info); display_prover_jobs_info(prover_jobs_info); } - StageInfo::Compressor(_) => todo!(), + StageInfo::RecursionTipWitnessGenerator(_) + | StageInfo::SchedulerWitnessGenerator(_) + | StageInfo::Compressor(_) => (), } } Status::Successful => { println!( - "{}: {}", + "{}: {} \n", stage_info.to_string().bold(), stage_info.witness_generator_jobs_status() ); @@ -303,14 +292,10 @@ fn display_info_for_stage(stage_info: StageInfo) { } | StageInfo::NodeWitnessGenerator { prover_jobs_info, .. - } - | StageInfo::RecursionTipWitnessGenerator { - prover_jobs_info, .. - } - | StageInfo::SchedulerWitnessGenerator { - prover_jobs_info, .. } => display_prover_jobs_info(prover_jobs_info), - StageInfo::Compressor(_) => todo!(), + StageInfo::RecursionTipWitnessGenerator(_) + | StageInfo::SchedulerWitnessGenerator(_) + | StageInfo::Compressor(_) => (), } } } @@ -386,9 +371,13 @@ fn display_job_status_count(jobs: Vec) { println!(" - Failed: {}", jobs_counts.failed); } -fn display_aggregation_round(aggregation_round: AggregationRound) { - println!( - "-- {} --", - format!("Aggregation Round {}", aggregation_round as u8).bold() - ); +fn display_aggregation_round(stage_info: &StageInfo) { + if let Some(aggregation_round) = stage_info.aggregation_round() { + println!( + "-- {} --", + format!("Aggregation Round {}", aggregation_round as u8).bold() + ); + } else { + println!("-- {} --", format!("Compresion").bold()); + }; } diff --git a/prover/prover_cli/src/commands/status/utils.rs b/prover/prover_cli/src/commands/status/utils.rs index dba2790f465c..182e32cb64cd 100644 --- a/prover/prover_cli/src/commands/status/utils.rs +++ b/prover/prover_cli/src/commands/status/utils.rs @@ -195,15 +195,9 @@ pub enum StageInfo { prover_jobs_info: Vec, }, #[strum(to_string = "Recursion Tip")] - RecursionTipWitnessGenerator { - witness_generator_jobs_info: Vec, - prover_jobs_info: Vec, - }, + RecursionTipWitnessGenerator(Option), #[strum(to_string = "Scheduler")] - SchedulerWitnessGenerator { - witness_generator_jobs_info: Vec, - prover_jobs_info: Vec, - }, + SchedulerWitnessGenerator(Option), #[strum(to_string = "Compressor")] Compressor(Option), } @@ -220,7 +214,7 @@ impl StageInfo { } } - pub fn witness_generator_jobs_status(&self) -> Status { + pub fn prover_jobs_status(&self) -> Option { match self.clone() { StageInfo::BasicWitnessGenerator { prover_jobs_info, .. @@ -230,42 +224,38 @@ impl StageInfo { } | StageInfo::NodeWitnessGenerator { prover_jobs_info, .. - } - | StageInfo::RecursionTipWitnessGenerator { - prover_jobs_info, .. - } - | StageInfo::SchedulerWitnessGenerator { - prover_jobs_info, .. - } => Status::from(prover_jobs_info), - StageInfo::Compressor(compressoion_job_info) => { - Status::from(compressoion_job_info.unwrap().status) - } + } => Some(Status::from(prover_jobs_info)), + StageInfo::RecursionTipWitnessGenerator(_) + | StageInfo::SchedulerWitnessGenerator(_) + | StageInfo::Compressor(_) => None, } } - pub fn prover_jobs_status(&self) -> Option { + pub fn witness_generator_jobs_status(&self) -> Status { match self.clone() { StageInfo::BasicWitnessGenerator { witness_generator_job_info, .. - } => witness_generator_job_info.map(|job| Status::from(job.status)), + } => witness_generator_job_info + .map(|witness_generator_job_info| Status::from(witness_generator_job_info.status)) + .unwrap_or_default(), StageInfo::LeafWitnessGenerator { witness_generator_jobs_info, .. - } => Some(Status::from(witness_generator_jobs_info)), + } => Status::from(witness_generator_jobs_info), StageInfo::NodeWitnessGenerator { witness_generator_jobs_info, .. - } => Some(Status::from(witness_generator_jobs_info)), - StageInfo::RecursionTipWitnessGenerator { - witness_generator_jobs_info, - .. - } => Some(Status::from(witness_generator_jobs_info)), - StageInfo::SchedulerWitnessGenerator { - witness_generator_jobs_info, - .. - } => Some(Status::from(witness_generator_jobs_info)), - StageInfo::Compressor(_) => None, + } => Status::from(witness_generator_jobs_info), + StageInfo::RecursionTipWitnessGenerator(status) => status + .map(|job| Status::from(job.status)) + .unwrap_or_default(), + StageInfo::SchedulerWitnessGenerator(status) => status + .map(|job| Status::from(job.status)) + .unwrap_or_default(), + StageInfo::Compressor(status) => status + .map(|job| Status::from(job.status)) + .unwrap_or_default(), } } } From c8c3511d974d24c83a471404e0408a1b3343ac75 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Wed, 8 May 2024 16:44:22 -0300 Subject: [PATCH 72/98] fix lines indentation --- .../prover_cli/src/commands/status/batch.rs | 34 +++++++++++-------- 1 file changed, 20 insertions(+), 14 deletions(-) diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index bc8aa8408309..580d842c5537 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -199,19 +199,19 @@ fn display_status_for_stage(stage_info: StageInfo) { } Status::Queued | Status::WaitingForProofs | Status::Stuck | Status::JobsNotFound => { println!( - "{}: {} \n", + "{}: {}", stage_info.to_string().bold(), stage_info.witness_generator_jobs_status() ) } Status::InProgress | Status::Successful => { println!( - "{}: {} \n", + "{}: {}", stage_info.to_string().bold(), stage_info.witness_generator_jobs_status() ); println!( - "> Prover Jobs: {} \n", + "> Prover Jobs: {}", stage_info .prover_jobs_status() .expect("Unable to check status") @@ -237,18 +237,18 @@ fn display_info_for_stage(stage_info: StageInfo) { display_aggregation_round(&stage_info); match stage_info.witness_generator_jobs_status() { Status::Custom(msg) => { - println!("{}: {} \n", stage_info.to_string().bold(), msg); + println!("{}: {}", stage_info.to_string().bold(), msg); } Status::Queued | Status::WaitingForProofs | Status::Stuck | Status::JobsNotFound => { println!( - "{}: {} \n", + "{}: {}", stage_info.to_string().bold(), stage_info.witness_generator_jobs_status() ) } Status::InProgress => { println!( - "{}: {} \n", + "{}: {}", stage_info.to_string().bold(), stage_info.witness_generator_jobs_status() ); @@ -279,7 +279,7 @@ fn display_info_for_stage(stage_info: StageInfo) { } Status::Successful => { println!( - "{}: {} \n", + "{}: {}", stage_info.to_string().bold(), stage_info.witness_generator_jobs_status() ); @@ -306,9 +306,12 @@ fn display_leaf_witness_generator_jobs_info( ) { leaf_witness_generators_jobs_info.sort_by_key(|job| job.circuit_id); - leaf_witness_generators_jobs_info - .iter() - .for_each(|job| println!("Circuit id: {} - Status: {}", job.circuit_id, job.status)); + leaf_witness_generators_jobs_info.iter().for_each(|job| { + println!( + " > Circuit id: {} - Status: {}", + job.circuit_id, job.status + ) + }); } fn display_node_witness_generator_jobs_info( @@ -316,9 +319,12 @@ fn display_node_witness_generator_jobs_info( ) { node_witness_generators_jobs_info.sort_by_key(|job| job.circuit_id); - node_witness_generators_jobs_info - .iter() - .for_each(|job| println!("Circuit id: {} - Status: {}", job.circuit_id, job.status)); + node_witness_generators_jobs_info.iter().for_each(|job| { + println!( + " > Circuit id: {} - Status: {}", + job.circuit_id, job.status + ) + }); } fn display_prover_jobs_info(prover_jobs_info: Vec) { @@ -374,7 +380,7 @@ fn display_job_status_count(jobs: Vec) { fn display_aggregation_round(stage_info: &StageInfo) { if let Some(aggregation_round) = stage_info.aggregation_round() { println!( - "-- {} --", + "\n-- {} --", format!("Aggregation Round {}", aggregation_round as u8).bold() ); } else { From 2ffb7f7e52f75831374a2b196f59be1f30af654b Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Wed, 8 May 2024 16:46:20 -0300 Subject: [PATCH 73/98] fix spacing --- prover/prover_cli/src/commands/status/batch.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index 580d842c5537..0308a0d6d16c 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -180,7 +180,7 @@ async fn get_proof_compression_job_info_for_batch<'a>( fn display_batch_status(batch_data: BatchData) { println!( - "== {} == \n", + "== {} ==", format!("Batch {} Status", batch_data.batch_number) ); display_status_for_stage(batch_data.basic_witness_generator); @@ -222,7 +222,7 @@ fn display_status_for_stage(stage_info: StageInfo) { fn display_batch_info(batch_data: BatchData) { println!( - "== {} == \n", + "== {} ==", format!("Batch {} Status", batch_data.batch_number) ); display_info_for_stage(batch_data.basic_witness_generator); @@ -384,6 +384,6 @@ fn display_aggregation_round(stage_info: &StageInfo) { format!("Aggregation Round {}", aggregation_round as u8).bold() ); } else { - println!("-- {} --", format!("Compresion").bold()); + println!("\n-- {} --", format!("Compresion").bold()); }; } From 4eaeccca2425adc80ad286cae93f2c7f7caea959 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Wed, 8 May 2024 16:55:21 -0300 Subject: [PATCH 74/98] order prover_jobs by circuit_id --- prover/prover_cli/src/commands/status/batch.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index 0308a0d6d16c..0222c074e017 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -357,7 +357,9 @@ fn display_prover_jobs_info(prover_jobs_info: Vec) { } } -fn display_job_status_count(jobs: Vec) { +fn display_job_status_count(mut jobs: Vec) { + jobs.sort_by_key(|job| job.circuit_id.clone()); + let mut jobs_counts = JobCountStatistics::default(); let total_jobs = jobs.len(); From 97ff273558a096166bfc940defe0063e3d71a7c6 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Wed, 8 May 2024 17:01:12 -0300 Subject: [PATCH 75/98] align digits --- prover/prover_cli/src/commands/status/batch.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index 0222c074e017..0d2a18f9e13c 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -308,7 +308,7 @@ fn display_leaf_witness_generator_jobs_info( leaf_witness_generators_jobs_info.iter().for_each(|job| { println!( - " > Circuit id: {} - Status: {}", + " > Circuit id: {:>2} - Status: {}", job.circuit_id, job.status ) }); @@ -321,7 +321,7 @@ fn display_node_witness_generator_jobs_info( node_witness_generators_jobs_info.iter().for_each(|job| { println!( - " > Circuit id: {} - Status: {}", + " > Circuit id: {:>2} - Status: {}", job.circuit_id, job.status ) }); @@ -349,10 +349,10 @@ fn display_prover_jobs_info(prover_jobs_info: Vec) { let status = Status::from(prover_jobs_info.clone()); match status { Status::InProgress => { - println!(" > Circuit ID: {}, status: {}", circuit_id, status); + println!(" > Circuit ID: {:>2}, status: {}", circuit_id, status); display_job_status_count(prover_jobs_info); } - _ => println!(" > Circuit ID: {}, status: {}", circuit_id, status), + _ => println!(" > Circuit ID: {:>2}, status: {}", circuit_id, status), }; } } From fd68fd1580935df8c1dc639b3911ccc4952f20af Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Wed, 8 May 2024 17:04:46 -0300 Subject: [PATCH 76/98] change status emojis --- prover/prover_cli/src/commands/status/batch.rs | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index 0d2a18f9e13c..2db38c8a3b2d 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -309,7 +309,8 @@ fn display_leaf_witness_generator_jobs_info( leaf_witness_generators_jobs_info.iter().for_each(|job| { println!( " > Circuit id: {:>2} - Status: {}", - job.circuit_id, job.status + job.circuit_id, + Status::from(job.status.clone()) ) }); } @@ -322,7 +323,8 @@ fn display_node_witness_generator_jobs_info( node_witness_generators_jobs_info.iter().for_each(|job| { println!( " > Circuit id: {:>2} - Status: {}", - job.circuit_id, job.status + job.circuit_id, + Status::from(job.status.clone()) ) }); } @@ -330,7 +332,7 @@ fn display_node_witness_generator_jobs_info( fn display_prover_jobs_info(prover_jobs_info: Vec) { let prover_jobs_status = Status::from(prover_jobs_info.clone()); - println!("> Prover Jobs: {prover_jobs_status} \n"); + println!("v Prover Jobs: {prover_jobs_status}"); if matches!(prover_jobs_status, Status::Successful) { return; From 1ec98777c79873f12b902d922d964740615ef01d Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Wed, 8 May 2024 17:09:39 -0300 Subject: [PATCH 77/98] improve format --- prover/prover_cli/src/commands/status/batch.rs | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index 2db38c8a3b2d..0cdfe7909533 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -308,7 +308,7 @@ fn display_leaf_witness_generator_jobs_info( leaf_witness_generators_jobs_info.iter().for_each(|job| { println!( - " > Circuit id: {:>2} - Status: {}", + " > Circuit id: {:>3} - Status: {}", job.circuit_id, Status::from(job.status.clone()) ) @@ -322,7 +322,7 @@ fn display_node_witness_generator_jobs_info( node_witness_generators_jobs_info.iter().for_each(|job| { println!( - " > Circuit id: {:>2} - Status: {}", + " > Circuit id: {:>3} - Status: {}", job.circuit_id, Status::from(job.status.clone()) ) @@ -351,21 +351,18 @@ fn display_prover_jobs_info(prover_jobs_info: Vec) { let status = Status::from(prover_jobs_info.clone()); match status { Status::InProgress => { - println!(" > Circuit ID: {:>2}, status: {}", circuit_id, status); + println!(" > Circuit ID: {:>3}, status: {}", circuit_id, status); display_job_status_count(prover_jobs_info); } - _ => println!(" > Circuit ID: {:>2}, status: {}", circuit_id, status), + _ => println!(" > Circuit ID: {:>3}, status: {}", circuit_id, status), }; } } fn display_job_status_count(mut jobs: Vec) { - jobs.sort_by_key(|job| job.circuit_id.clone()); - let mut jobs_counts = JobCountStatistics::default(); - let total_jobs = jobs.len(); - + jobs.sort_by_key(|job| job.circuit_id.clone()); jobs.iter().for_each(|job| match job.status { ProverJobStatus::Queued => jobs_counts.queued += 1, ProverJobStatus::InProgress(_) => jobs_counts.in_progress += 1, From 10afa172fe3cf18fd0ba1dcf8f4df5a47ef7dd3c Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Wed, 8 May 2024 17:12:50 -0300 Subject: [PATCH 78/98] capitalize status print --- prover/prover_cli/src/commands/status/batch.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index 0cdfe7909533..392e814770b6 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -308,7 +308,7 @@ fn display_leaf_witness_generator_jobs_info( leaf_witness_generators_jobs_info.iter().for_each(|job| { println!( - " > Circuit id: {:>3} - Status: {}", + " > Circuit ID: {:>3} - Status: {}", job.circuit_id, Status::from(job.status.clone()) ) @@ -322,7 +322,7 @@ fn display_node_witness_generator_jobs_info( node_witness_generators_jobs_info.iter().for_each(|job| { println!( - " > Circuit id: {:>3} - Status: {}", + " > Circuit ID: {:>3} - Status: {}", job.circuit_id, Status::from(job.status.clone()) ) @@ -351,10 +351,10 @@ fn display_prover_jobs_info(prover_jobs_info: Vec) { let status = Status::from(prover_jobs_info.clone()); match status { Status::InProgress => { - println!(" > Circuit ID: {:>3}, status: {}", circuit_id, status); + println!(" > Circuit ID: {:>3} - Status: {}", circuit_id, status); display_job_status_count(prover_jobs_info); } - _ => println!(" > Circuit ID: {:>3}, status: {}", circuit_id, status), + _ => println!(" > Circuit ID: {:>3} - Status: {}", circuit_id, status), }; } } From d6ebe02d34f96f916b4fa7242ae16e734e207595 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Thu, 9 May 2024 12:10:28 -0300 Subject: [PATCH 79/98] fix merge errors --- prover/Cargo.lock | 2 +- prover/prover_cli/src/commands/status/batch.rs | 7 ++++--- prover/prover_cli/src/commands/status/utils.rs | 2 -- 3 files changed, 5 insertions(+), 6 deletions(-) diff --git a/prover/Cargo.lock b/prover/Cargo.lock index 64a0ba6096cc..6a458c8e9cba 100644 --- a/prover/Cargo.lock +++ b/prover/Cargo.lock @@ -4527,8 +4527,8 @@ dependencies = [ "colored", "hex", "prover_dal", - "strum", "sqlx", + "strum", "tokio", "tracing", "tracing-subscriber", diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index 2ab1e786e392..a686943b8395 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -11,11 +11,12 @@ use zksync_types::{ NodeWitnessGeneratorJobInfo, ProofCompressionJobInfo, ProverJobFriInfo, ProverJobStatus, RecursionTipWitnessGeneratorJobInfo, SchedulerWitnessGeneratorJobInfo, WitnessJobStatus, }, + url::SensitiveUrl, L1BatchNumber, }; -use super::utils::{BatchData, StageInfo}; -use crate::commands::status::utils::{postgres_config, Status}; +use super::utils::{BatchData, StageInfo, Status}; +use crate::cli::ProverCLIConfig; #[derive(ClapArgs)] pub struct Args { @@ -25,7 +26,7 @@ pub struct Args { verbose: bool, } -pub(crate) async fn run(args: Args) -> anyhow::Result<()> { +pub(crate) async fn run(args: Args, config: ProverCLIConfig) -> anyhow::Result<()> { ensure!( !args.batches.is_empty(), "At least one batch number should be provided" diff --git a/prover/prover_cli/src/commands/status/utils.rs b/prover/prover_cli/src/commands/status/utils.rs index 34c2f541a975..d9c448fff522 100644 --- a/prover/prover_cli/src/commands/status/utils.rs +++ b/prover/prover_cli/src/commands/status/utils.rs @@ -1,8 +1,6 @@ use std::fmt::Debug; use strum::{Display, EnumString}; -use zksync_config::PostgresConfig; -use zksync_env_config::FromEnv; use zksync_types::{ basic_fri_types::AggregationRound, prover_dal::{ From 20fd7200587f174688c6d445d12e3d9eb44f9c52 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Thu, 9 May 2024 12:41:22 -0300 Subject: [PATCH 80/98] fix prover jobs display order --- prover/prover_cli/src/commands/status/batch.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index a686943b8395..f2c6705e711a 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -330,7 +330,7 @@ fn display_node_witness_generator_jobs_info( }); } -fn display_prover_jobs_info(prover_jobs_info: Vec) { +fn display_prover_jobs_info(mut prover_jobs_info: Vec) { let prover_jobs_status = Status::from(prover_jobs_info.clone()); println!("v Prover Jobs: {prover_jobs_status}"); @@ -341,6 +341,7 @@ fn display_prover_jobs_info(prover_jobs_info: Vec) { let mut jobs_by_circuit_id: HashMap> = HashMap::new(); + prover_jobs_info.sort_by_key(|job| job.circuit_id.clone()); prover_jobs_info.iter().for_each(|job| { jobs_by_circuit_id .entry(job.circuit_id) @@ -363,7 +364,6 @@ fn display_prover_jobs_info(prover_jobs_info: Vec) { fn display_job_status_count(mut jobs: Vec) { let mut jobs_counts = JobCountStatistics::default(); let total_jobs = jobs.len(); - jobs.sort_by_key(|job| job.circuit_id.clone()); jobs.iter().for_each(|job| match job.status { ProverJobStatus::Queued => jobs_counts.queued += 1, ProverJobStatus::InProgress(_) => jobs_counts.in_progress += 1, From ea031183c5902b7c5f9fc89a3eae3f4a82ef8259 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Thu, 9 May 2024 13:26:53 -0300 Subject: [PATCH 81/98] map hashmap to vector --- prover/prover_cli/src/commands/status/batch.rs | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index f2c6705e711a..7c290a5a6c1e 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -330,7 +330,7 @@ fn display_node_witness_generator_jobs_info( }); } -fn display_prover_jobs_info(mut prover_jobs_info: Vec) { +fn display_prover_jobs_info(prover_jobs_info: Vec) { let prover_jobs_status = Status::from(prover_jobs_info.clone()); println!("v Prover Jobs: {prover_jobs_status}"); @@ -341,7 +341,6 @@ fn display_prover_jobs_info(mut prover_jobs_info: Vec) { let mut jobs_by_circuit_id: HashMap> = HashMap::new(); - prover_jobs_info.sort_by_key(|job| job.circuit_id.clone()); prover_jobs_info.iter().for_each(|job| { jobs_by_circuit_id .entry(job.circuit_id) @@ -349,6 +348,13 @@ fn display_prover_jobs_info(mut prover_jobs_info: Vec) { .push(job.clone()) }); + let mut jobs_by_circuit_id: Vec<(u32, Vec)> = jobs_by_circuit_id + .iter() + .map(|(key, value)| (*key, value.clone())) + .collect(); + + jobs_by_circuit_id.sort_by_key(|job| job.0); + for (circuit_id, prover_jobs_info) in jobs_by_circuit_id { let status = Status::from(prover_jobs_info.clone()); match status { @@ -361,7 +367,7 @@ fn display_prover_jobs_info(mut prover_jobs_info: Vec) { } } -fn display_job_status_count(mut jobs: Vec) { +fn display_job_status_count(jobs: Vec) { let mut jobs_counts = JobCountStatistics::default(); let total_jobs = jobs.len(); jobs.iter().for_each(|job| match job.status { From 5b5d7acaf359b8c27236e37f075abfabc6ae8870 Mon Sep 17 00:00:00 2001 From: ilitteri Date: Thu, 9 May 2024 16:10:16 -0300 Subject: [PATCH 82/98] Print refactor --- prover/Cargo.lock | 1 + prover/prover_cli/Cargo.toml | 1 + .../prover_cli/src/commands/status/batch.rs | 61 ++++++++++++++----- 3 files changed, 49 insertions(+), 14 deletions(-) diff --git a/prover/Cargo.lock b/prover/Cargo.lock index 6a458c8e9cba..c051b87a2a48 100644 --- a/prover/Cargo.lock +++ b/prover/Cargo.lock @@ -4523,6 +4523,7 @@ version = "0.1.0" dependencies = [ "anyhow", "bincode", + "circuit_definitions", "clap 4.4.6", "colored", "hex", diff --git a/prover/prover_cli/Cargo.toml b/prover/prover_cli/Cargo.toml index f8045f0b0a5d..179cdda67c93 100644 --- a/prover/prover_cli/Cargo.toml +++ b/prover/prover_cli/Cargo.toml @@ -31,3 +31,4 @@ zksync_dal.workspace = true strum.workspace = true colored.workspace = true sqlx.workspace = true +circuit_definitions.workspace = true diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index 7c290a5a6c1e..b32ee4d1a175 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -1,6 +1,7 @@ use std::collections::HashMap; use anyhow::{ensure, Context as _}; +use circuit_definitions::zkevm_circuits::scheduler::aux::BaseLayerCircuitType; use clap::Args as ClapArgs; use colored::*; use prover_dal::{Connection, ConnectionPool, Prover, ProverDal}; @@ -212,7 +213,8 @@ fn display_status_for_stage(stage_info: StageInfo) { stage_info.witness_generator_jobs_status() ); println!( - "> Prover Jobs: {}", + "> {}: {}", + "Prover Jobs".to_owned().bold(), stage_info .prover_jobs_status() .expect("Unable to check status") @@ -224,7 +226,7 @@ fn display_status_for_stage(stage_info: StageInfo) { fn display_batch_info(batch_data: BatchData) { println!( "== {} ==", - format!("Batch {} Status", batch_data.batch_number) + format!("Batch {} Status", batch_data.batch_number).bold() ); display_info_for_stage(batch_data.basic_witness_generator); display_info_for_stage(batch_data.leaf_witness_generator); @@ -242,14 +244,14 @@ fn display_info_for_stage(stage_info: StageInfo) { } Status::Queued | Status::WaitingForProofs | Status::Stuck | Status::JobsNotFound => { println!( - "{}: {}", + " > {}: {}", stage_info.to_string().bold(), stage_info.witness_generator_jobs_status() ) } Status::InProgress => { println!( - "{}: {}", + "v {}: {}", stage_info.to_string().bold(), stage_info.witness_generator_jobs_status() ); @@ -280,7 +282,7 @@ fn display_info_for_stage(stage_info: StageInfo) { } Status::Successful => { println!( - "{}: {}", + "> {}: {}", stage_info.to_string().bold(), stage_info.witness_generator_jobs_status() ); @@ -309,8 +311,12 @@ fn display_leaf_witness_generator_jobs_info( leaf_witness_generators_jobs_info.iter().for_each(|job| { println!( - " > Circuit ID: {:>3} - Status: {}", - job.circuit_id, + " > {}: {}", + format!( + "{:?}", + BaseLayerCircuitType::from_numeric_value(job.circuit_id as u8) + ) + .bold(), Status::from(job.status.clone()) ) }); @@ -323,8 +329,12 @@ fn display_node_witness_generator_jobs_info( node_witness_generators_jobs_info.iter().for_each(|job| { println!( - " > Circuit ID: {:>3} - Status: {}", - job.circuit_id, + " > {}: {}", + format!( + "{:?}", + BaseLayerCircuitType::from_numeric_value(job.circuit_id as u8) + ) + .bold(), Status::from(job.status.clone()) ) }); @@ -333,12 +343,19 @@ fn display_node_witness_generator_jobs_info( fn display_prover_jobs_info(prover_jobs_info: Vec) { let prover_jobs_status = Status::from(prover_jobs_info.clone()); - println!("v Prover Jobs: {prover_jobs_status}"); - if matches!(prover_jobs_status, Status::Successful) { + println!( + "> {}: {prover_jobs_status}", + "Prover Jobs".to_owned().bold() + ); return; } + println!( + "v {}: {prover_jobs_status}", + "Prover Jobs".to_owned().bold() + ); + let mut jobs_by_circuit_id: HashMap> = HashMap::new(); prover_jobs_info.iter().for_each(|job| { @@ -359,10 +376,26 @@ fn display_prover_jobs_info(prover_jobs_info: Vec) { let status = Status::from(prover_jobs_info.clone()); match status { Status::InProgress => { - println!(" > Circuit ID: {:>3} - Status: {}", circuit_id, status); + println!( + " > {}: {}", + format!( + "{:?}", + BaseLayerCircuitType::from_numeric_value(circuit_id as u8) + ) + .bold(), + status + ); display_job_status_count(prover_jobs_info); } - _ => println!(" > Circuit ID: {:>3} - Status: {}", circuit_id, status), + _ => println!( + " > {}: {}", + format!( + "{:?}", + BaseLayerCircuitType::from_numeric_value(circuit_id as u8) + ) + .bold(), + status + ), }; } } @@ -392,6 +425,6 @@ fn display_aggregation_round(stage_info: &StageInfo) { format!("Aggregation Round {}", aggregation_round as u8).bold() ); } else { - println!("\n-- {} --", format!("Compresion").bold()); + println!("\n-- {} --", format!("Compression").bold()); }; } From 5ef5eeac55a9d859dc9fe21edcf51511bac627f8 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Thu, 9 May 2024 16:57:34 -0300 Subject: [PATCH 83/98] add recursion tip query --- core/lib/basic_types/src/prover_dal.rs | 10 +++ ...c39ae8a6e053a0e03afd3fb5e02ee17157067.json | 82 +++++++++++++++++++ .../src/fri_witness_generator_dal.rs | 36 +++++++- 3 files changed, 127 insertions(+), 1 deletion(-) create mode 100644 prover/prover_dal/.sqlx/query-85a69b433c08847876bf6e7af9bc39ae8a6e053a0e03afd3fb5e02ee17157067.json diff --git a/core/lib/basic_types/src/prover_dal.rs b/core/lib/basic_types/src/prover_dal.rs index f7ccb1989c60..c3e5c9a2be71 100644 --- a/core/lib/basic_types/src/prover_dal.rs +++ b/core/lib/basic_types/src/prover_dal.rs @@ -315,7 +315,17 @@ pub struct NodeWitnessGeneratorJobInfo { #[derive(Debug, Clone)] pub struct RecursionTipWitnessGeneratorJobInfo { + pub l1_batch_number: L1BatchNumber, pub status: WitnessJobStatus, + pub attempts: u32, + pub processing_started_at: Option, + pub time_taken: Option, + pub error: Option, + pub created_at: NaiveDateTime, + pub updated_at: NaiveDateTime, + pub number_of_final_node_jobs: Option, + pub protocol_version: Option, + pub picked_by: Option, } #[derive(Debug, Clone)] diff --git a/prover/prover_dal/.sqlx/query-85a69b433c08847876bf6e7af9bc39ae8a6e053a0e03afd3fb5e02ee17157067.json b/prover/prover_dal/.sqlx/query-85a69b433c08847876bf6e7af9bc39ae8a6e053a0e03afd3fb5e02ee17157067.json new file mode 100644 index 000000000000..75a600d5b46b --- /dev/null +++ b/prover/prover_dal/.sqlx/query-85a69b433c08847876bf6e7af9bc39ae8a6e053a0e03afd3fb5e02ee17157067.json @@ -0,0 +1,82 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n *\n FROM\n recursion_tip_witness_jobs_fri\n WHERE\n l1_batch_number = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "l1_batch_number", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "status", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "attempts", + "type_info": "Int2" + }, + { + "ordinal": 3, + "name": "processing_started_at", + "type_info": "Timestamp" + }, + { + "ordinal": 4, + "name": "time_taken", + "type_info": "Time" + }, + { + "ordinal": 5, + "name": "error", + "type_info": "Text" + }, + { + "ordinal": 6, + "name": "created_at", + "type_info": "Timestamp" + }, + { + "ordinal": 7, + "name": "updated_at", + "type_info": "Timestamp" + }, + { + "ordinal": 8, + "name": "number_of_final_node_jobs", + "type_info": "Int4" + }, + { + "ordinal": 9, + "name": "protocol_version", + "type_info": "Int4" + }, + { + "ordinal": 10, + "name": "picked_by", + "type_info": "Text" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + false, + false, + false, + true, + true, + true, + false, + false, + true, + true, + true + ] + }, + "hash": "85a69b433c08847876bf6e7af9bc39ae8a6e053a0e03afd3fb5e02ee17157067" +} diff --git a/prover/prover_dal/src/fri_witness_generator_dal.rs b/prover/prover_dal/src/fri_witness_generator_dal.rs index 55579435410f..fc45c44c73ef 100644 --- a/prover/prover_dal/src/fri_witness_generator_dal.rs +++ b/prover/prover_dal/src/fri_witness_generator_dal.rs @@ -8,7 +8,8 @@ use zksync_basic_types::{ prover_dal::{ BasicWitnessGeneratorJobInfo, JobCountStatistics, LeafAggregationJobMetadata, LeafWitnessGeneratorJobInfo, NodeAggregationJobMetadata, NodeWitnessGeneratorJobInfo, - SchedulerWitnessGeneratorJobInfo, StuckJobs, WitnessJobStatus, + RecursionTipWitnessGeneratorJobInfo, SchedulerWitnessGeneratorJobInfo, StuckJobs, + WitnessJobStatus, }, L1BatchNumber, }; @@ -1543,4 +1544,37 @@ impl FriWitnessGeneratorDal<'_, '_> { picked_by: row.picked_by.clone(), }) } + + pub async fn get_recursion_tip_witness_generator_jobs_for_batch( + &mut self, + l1_batch_number: L1BatchNumber, + ) -> Option { + sqlx::query!( + r#" + SELECT + * + FROM + recursion_tip_witness_jobs_fri + WHERE + l1_batch_number = $1 + "#, + i64::from(l1_batch_number.0) + ) + .fetch_optional(self.storage.conn()) + .await + .unwrap() + .map(|row| RecursionTipWitnessGeneratorJobInfo { + l1_batch_number, + status: WitnessJobStatus::from_str(&row.status).unwrap(), + attempts: row.attempts as u32, + processing_started_at: row.processing_started_at, + time_taken: row.time_taken, + error: row.error.clone(), + created_at: row.created_at, + updated_at: row.updated_at, + number_of_final_node_jobs: row.number_of_final_node_jobs, + protocol_version: row.protocol_version, + picked_by: row.picked_by.clone(), + }) + } } From 67406c1e8c38980d455b740c79d763083cd5b889 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Thu, 9 May 2024 16:57:55 -0300 Subject: [PATCH 84/98] add recursion tip status --- prover/prover_cli/src/commands/status/batch.rs | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index 7c290a5a6c1e..131d46824c3f 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -9,7 +9,7 @@ use zksync_types::{ prover_dal::{ BasicWitnessGeneratorJobInfo, JobCountStatistics, LeafWitnessGeneratorJobInfo, NodeWitnessGeneratorJobInfo, ProofCompressionJobInfo, ProverJobFriInfo, ProverJobStatus, - RecursionTipWitnessGeneratorJobInfo, SchedulerWitnessGeneratorJobInfo, WitnessJobStatus, + RecursionTipWitnessGeneratorJobInfo, SchedulerWitnessGeneratorJobInfo, }, url::SensitiveUrl, L1BatchNumber, @@ -153,12 +153,12 @@ async fn get_proof_node_witness_generator_info_for_batch<'a>( } async fn get_proof_recursion_tip_witness_generator_info_for_batch<'a>( - _batch_number: L1BatchNumber, - _conn: &mut Connection<'a, Prover>, + batch_number: L1BatchNumber, + conn: &mut Connection<'a, Prover>, ) -> Option { - Some(RecursionTipWitnessGeneratorJobInfo { - status: WitnessJobStatus::Skipped, - }) + conn.fri_witness_generator_dal() + .get_recursion_tip_witness_generator_jobs_for_batch(batch_number) + .await } async fn get_proof_scheduler_witness_generator_info_for_batch<'a>( From c3d61e073b8c32529e1315e9c846e5c5948e0046 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Fri, 10 May 2024 12:10:18 -0300 Subject: [PATCH 85/98] force (circuit_id - 2) for node wg --- prover/prover_dal/src/fri_prover_dal.rs | 7 ++++++- prover/prover_dal/src/fri_witness_generator_dal.rs | 3 ++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/prover/prover_dal/src/fri_prover_dal.rs b/prover/prover_dal/src/fri_prover_dal.rs index d887bd0d5151..9c59fe8b5b2f 100644 --- a/prover/prover_dal/src/fri_prover_dal.rs +++ b/prover/prover_dal/src/fri_prover_dal.rs @@ -668,7 +668,12 @@ impl FriProverDal<'_, '_> { .map(|row| ProverJobFriInfo { id: row.id as u32, l1_batch_number, - circuit_id: row.circuit_id as u32, + // The circuit ID in the node witness generator is 2 higher than it should be. + circuit_id: if matches!(aggregation_round, AggregationRound::NodeAggregation) { + row.circuit_id as u32 - 2 + } else { + row.circuit_id as u32 + }, circuit_blob_url: row.circuit_blob_url.clone(), aggregation_round, sequence_number: row.sequence_number as u32, diff --git a/prover/prover_dal/src/fri_witness_generator_dal.rs b/prover/prover_dal/src/fri_witness_generator_dal.rs index fc45c44c73ef..118099e9ddb5 100644 --- a/prover/prover_dal/src/fri_witness_generator_dal.rs +++ b/prover/prover_dal/src/fri_witness_generator_dal.rs @@ -1495,7 +1495,8 @@ impl FriWitnessGeneratorDal<'_, '_> { .map(|row| NodeWitnessGeneratorJobInfo { id: row.id as u32, l1_batch_number, - circuit_id: row.circuit_id as u32, + // The circuit ID in the node witness generator is 2 higher than it should be. + circuit_id: row.circuit_id as u32 - 2, depth: row.depth as u32, status: WitnessJobStatus::from_str(&row.status).unwrap(), attempts: row.attempts as u32, From 4c4584529150ce71bd81dadf873240fec174b33d Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Fri, 10 May 2024 12:47:13 -0300 Subject: [PATCH 86/98] add function to correct circuit_id in node wg --- core/lib/basic_types/src/prover_dal.rs | 14 +++++++++++++- prover/prover_dal/src/fri_prover_dal.rs | 11 ++++------- prover/prover_dal/src/fri_witness_generator_dal.rs | 12 ++++++------ 3 files changed, 23 insertions(+), 14 deletions(-) diff --git a/core/lib/basic_types/src/prover_dal.rs b/core/lib/basic_types/src/prover_dal.rs index c3e5c9a2be71..eea12157fe26 100644 --- a/core/lib/basic_types/src/prover_dal.rs +++ b/core/lib/basic_types/src/prover_dal.rs @@ -5,7 +5,7 @@ use chrono::{DateTime, Duration, NaiveDateTime, NaiveTime, Utc}; use strum::{Display, EnumString}; use crate::{ - basic_fri_types::{AggregationRound, Eip4844Blobs}, + basic_fri_types::{AggregationRound, CircuitIdRoundTuple, Eip4844Blobs}, protocol_version::ProtocolVersionId, L1BatchNumber, }; @@ -373,3 +373,15 @@ pub struct ProofCompressionJobInfo { pub time_taken: Option, pub picked_by: Option, } + +// This function corrects circuit IDs for the node witness generator. +// +// - Circuit IDs in the node witness generator are 2 higher than in other rounds. +// - The EIP4844Repack circuit (ID 255) is an exception and is set to 18. +pub fn correct_circuit_id(circuit_id: i16, aggregation_round: AggregationRound) -> u32 { + match (circuit_id, aggregation_round) { + (18, AggregationRound::NodeAggregation) => 255, + (circuit_id, AggregationRound::NodeAggregation) => (circuit_id as u32) - 2, + _ => circuit_id as u32, + } +} diff --git a/prover/prover_dal/src/fri_prover_dal.rs b/prover/prover_dal/src/fri_prover_dal.rs index 9c59fe8b5b2f..85e72bd24df0 100644 --- a/prover/prover_dal/src/fri_prover_dal.rs +++ b/prover/prover_dal/src/fri_prover_dal.rs @@ -5,7 +5,8 @@ use zksync_basic_types::{ basic_fri_types::{AggregationRound, CircuitIdRoundTuple}, protocol_version::ProtocolVersionId, prover_dal::{ - FriProverJobMetadata, JobCountStatistics, ProverJobFriInfo, ProverJobStatus, StuckJobs, + correct_circuit_id, FriProverJobMetadata, JobCountStatistics, ProverJobFriInfo, + ProverJobStatus, StuckJobs, }, L1BatchNumber, }; @@ -668,12 +669,8 @@ impl FriProverDal<'_, '_> { .map(|row| ProverJobFriInfo { id: row.id as u32, l1_batch_number, - // The circuit ID in the node witness generator is 2 higher than it should be. - circuit_id: if matches!(aggregation_round, AggregationRound::NodeAggregation) { - row.circuit_id as u32 - 2 - } else { - row.circuit_id as u32 - }, + // It is necessary to correct the circuit IDs due to the discrepancy between different aggregation rounds. + circuit_id: correct_circuit_id(row.circuit_id, aggregation_round), circuit_blob_url: row.circuit_blob_url.clone(), aggregation_round, sequence_number: row.sequence_number as u32, diff --git a/prover/prover_dal/src/fri_witness_generator_dal.rs b/prover/prover_dal/src/fri_witness_generator_dal.rs index 118099e9ddb5..aa6fc6cb2a96 100644 --- a/prover/prover_dal/src/fri_witness_generator_dal.rs +++ b/prover/prover_dal/src/fri_witness_generator_dal.rs @@ -6,10 +6,10 @@ use zksync_basic_types::{ basic_fri_types::{AggregationRound, Eip4844Blobs}, protocol_version::ProtocolVersionId, prover_dal::{ - BasicWitnessGeneratorJobInfo, JobCountStatistics, LeafAggregationJobMetadata, - LeafWitnessGeneratorJobInfo, NodeAggregationJobMetadata, NodeWitnessGeneratorJobInfo, - RecursionTipWitnessGeneratorJobInfo, SchedulerWitnessGeneratorJobInfo, StuckJobs, - WitnessJobStatus, + correct_circuit_id, BasicWitnessGeneratorJobInfo, JobCountStatistics, + LeafAggregationJobMetadata, LeafWitnessGeneratorJobInfo, NodeAggregationJobMetadata, + NodeWitnessGeneratorJobInfo, RecursionTipWitnessGeneratorJobInfo, + SchedulerWitnessGeneratorJobInfo, StuckJobs, WitnessJobStatus, }, L1BatchNumber, }; @@ -1495,8 +1495,8 @@ impl FriWitnessGeneratorDal<'_, '_> { .map(|row| NodeWitnessGeneratorJobInfo { id: row.id as u32, l1_batch_number, - // The circuit ID in the node witness generator is 2 higher than it should be. - circuit_id: row.circuit_id as u32 - 2, + // It is necessary to correct the circuit IDs due to the discrepancy between different aggregation rounds. + circuit_id: correct_circuit_id(row.circuit_id, AggregationRound::NodeAggregation), depth: row.depth as u32, status: WitnessJobStatus::from_str(&row.status).unwrap(), attempts: row.attempts as u32, From cb41861d5126167e4f2220bf52ac79a7a806eb44 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Fri, 10 May 2024 13:01:27 -0300 Subject: [PATCH 87/98] rm unused imports --- core/lib/basic_types/src/prover_dal.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/lib/basic_types/src/prover_dal.rs b/core/lib/basic_types/src/prover_dal.rs index eea12157fe26..dc31c148ef80 100644 --- a/core/lib/basic_types/src/prover_dal.rs +++ b/core/lib/basic_types/src/prover_dal.rs @@ -5,7 +5,7 @@ use chrono::{DateTime, Duration, NaiveDateTime, NaiveTime, Utc}; use strum::{Display, EnumString}; use crate::{ - basic_fri_types::{AggregationRound, CircuitIdRoundTuple, Eip4844Blobs}, + basic_fri_types::{AggregationRound, Eip4844Blobs}, protocol_version::ProtocolVersionId, L1BatchNumber, }; From 0bb3d8a6892bdc96cef72a14ccf2e7012fe5a383 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Fri, 10 May 2024 14:19:02 -0300 Subject: [PATCH 88/98] fix spellcheck --- core/lib/basic_types/src/prover_dal.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/lib/basic_types/src/prover_dal.rs b/core/lib/basic_types/src/prover_dal.rs index dc31c148ef80..f142472a9dd7 100644 --- a/core/lib/basic_types/src/prover_dal.rs +++ b/core/lib/basic_types/src/prover_dal.rs @@ -377,7 +377,7 @@ pub struct ProofCompressionJobInfo { // This function corrects circuit IDs for the node witness generator. // // - Circuit IDs in the node witness generator are 2 higher than in other rounds. -// - The EIP4844Repack circuit (ID 255) is an exception and is set to 18. +// - The `EIP4844Repack` circuit (ID 255) is an exception and is set to 18. pub fn correct_circuit_id(circuit_id: i16, aggregation_round: AggregationRound) -> u32 { match (circuit_id, aggregation_round) { (18, AggregationRound::NodeAggregation) => 255, From b48e2fc9237859128efadebd31d450b6faf5b014 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Fri, 10 May 2024 15:02:55 -0300 Subject: [PATCH 89/98] add status for gpu prover --- core/lib/basic_types/src/prover_dal.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/core/lib/basic_types/src/prover_dal.rs b/core/lib/basic_types/src/prover_dal.rs index f142472a9dd7..468f18e8a9c9 100644 --- a/core/lib/basic_types/src/prover_dal.rs +++ b/core/lib/basic_types/src/prover_dal.rs @@ -159,6 +159,8 @@ pub enum ProverJobStatus { Skipped, #[strum(serialize = "ignored")] Ignored, + #[strum(serialize = "in_gpu_proof")] + InGPUProof, } #[derive(Debug, Clone, strum::Display, strum::EnumString, strum::AsRefStr)] From e82ec1f63ad37dc034a874fec0f343e69b310666 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Fri, 10 May 2024 15:29:03 -0300 Subject: [PATCH 90/98] fix status match --- prover/prover_cli/src/commands/status/batch.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index 3b9233544eb7..30edab0978f8 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -408,7 +408,7 @@ fn display_job_status_count(jobs: Vec) { ProverJobStatus::InProgress(_) => jobs_counts.in_progress += 1, ProverJobStatus::Successful(_) => jobs_counts.successful += 1, ProverJobStatus::Failed(_) => jobs_counts.failed += 1, - ProverJobStatus::Skipped | ProverJobStatus::Ignored => (), + ProverJobStatus::Skipped | ProverJobStatus::Ignored | ProverJobStatus::InGPUProof => (), }); println!(" - Total jobs: {}", total_jobs); From 9dcf8e16d05b4b6dd40fa6ff1ed8c5cfc30b1c98 Mon Sep 17 00:00:00 2001 From: ilitteri Date: Fri, 10 May 2024 17:21:08 -0300 Subject: [PATCH 91/98] Remove use of ensure --- prover/prover_cli/src/commands/status/batch.rs | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index 30edab0978f8..61096ba7abfe 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -21,18 +21,13 @@ use crate::cli::ProverCLIConfig; #[derive(ClapArgs)] pub struct Args { - #[clap(short = 'n', num_args = 1..)] + #[clap(short = 'n', num_args = 1.., required = true)] batches: Vec, #[clap(short, long, default_value("false"))] verbose: bool, } pub(crate) async fn run(args: Args, config: ProverCLIConfig) -> anyhow::Result<()> { - ensure!( - !args.batches.is_empty(), - "At least one batch number should be provided" - ); - let batches_data = get_batches_data(args.batches, config.db_url).await?; for batch_data in batches_data { From 4eb178a38a9c0be3b30eca6ab6abd47ceaa9c960 Mon Sep 17 00:00:00 2001 From: ilitteri Date: Fri, 10 May 2024 17:42:39 -0300 Subject: [PATCH 92/98] Minor refactors --- .../prover_cli/src/commands/status/batch.rs | 26 +++++++------------ .../prover_cli/src/commands/status/utils.rs | 6 +++++ 2 files changed, 15 insertions(+), 17 deletions(-) diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index 61096ba7abfe..73ef0fef7d9d 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -1,6 +1,6 @@ use std::collections::HashMap; -use anyhow::{ensure, Context as _}; +use anyhow::Context as _; use circuit_definitions::zkevm_circuits::scheduler::aux::BaseLayerCircuitType; use clap::Args as ClapArgs; use colored::*; @@ -31,6 +31,10 @@ pub(crate) async fn run(args: Args, config: ProverCLIConfig) -> anyhow::Result<( let batches_data = get_batches_data(args.batches, config.db_url).await?; for batch_data in batches_data { + println!( + "== {} ==", + format!("Batch {} Status", batch_data.batch_number).bold() + ); if !args.verbose { display_batch_status(batch_data); } else { @@ -176,10 +180,6 @@ async fn get_proof_compression_job_info_for_batch<'a>( } fn display_batch_status(batch_data: BatchData) { - println!( - "== {} ==", - format!("Batch {} Status", batch_data.batch_number) - ); display_status_for_stage(batch_data.basic_witness_generator); display_status_for_stage(batch_data.leaf_witness_generator); display_status_for_stage(batch_data.node_witness_generator); @@ -219,10 +219,6 @@ fn display_status_for_stage(stage_info: StageInfo) { } fn display_batch_info(batch_data: BatchData) { - println!( - "== {} ==", - format!("Batch {} Status", batch_data.batch_number).bold() - ); display_info_for_stage(batch_data.basic_witness_generator); display_info_for_stage(batch_data.leaf_witness_generator); display_info_for_stage(batch_data.node_witness_generator); @@ -270,9 +266,7 @@ fn display_info_for_stage(stage_info: StageInfo) { display_node_witness_generator_jobs_info(witness_generator_jobs_info); display_prover_jobs_info(prover_jobs_info); } - StageInfo::RecursionTipWitnessGenerator(_) - | StageInfo::SchedulerWitnessGenerator(_) - | StageInfo::Compressor(_) => (), + _ => (), } } Status::Successful => { @@ -291,9 +285,7 @@ fn display_info_for_stage(stage_info: StageInfo) { | StageInfo::NodeWitnessGenerator { prover_jobs_info, .. } => display_prover_jobs_info(prover_jobs_info), - StageInfo::RecursionTipWitnessGenerator(_) - | StageInfo::SchedulerWitnessGenerator(_) - | StageInfo::Compressor(_) => (), + _ => (), } } } @@ -356,7 +348,7 @@ fn display_prover_jobs_info(prover_jobs_info: Vec) { prover_jobs_info.iter().for_each(|job| { jobs_by_circuit_id .entry(job.circuit_id) - .or_insert(Vec::new()) + .or_default() .push(job.clone()) }); @@ -420,6 +412,6 @@ fn display_aggregation_round(stage_info: &StageInfo) { format!("Aggregation Round {}", aggregation_round as u8).bold() ); } else { - println!("\n-- {} --", format!("Compression").bold()); + println!("\n-- {} --", "Proof Compression".to_owned().bold()); }; } diff --git a/prover/prover_cli/src/commands/status/utils.rs b/prover/prover_cli/src/commands/status/utils.rs index d9c448fff522..59c5553b530b 100644 --- a/prover/prover_cli/src/commands/status/utils.rs +++ b/prover/prover_cli/src/commands/status/utils.rs @@ -155,6 +155,11 @@ impl From> for Status { fn from(jobs_vector: Vec) -> Self { if jobs_vector.is_empty() { Status::JobsNotFound + } else if jobs_vector + .iter() + .all(|job| matches!(job.status, ProverJobStatus::InGPUProof)) + { + Status::Custom("In GPU Proof ⚡️".to_owned()) } else if jobs_vector .iter() .all(|job| matches!(job.status, ProverJobStatus::Queued)) @@ -171,6 +176,7 @@ impl From> for Status { } } +#[allow(clippy::large_enum_variant)] #[derive(EnumString, Clone, Display)] pub enum StageInfo { #[strum(to_string = "Basic Witness Generator")] From 2257f6ad1c7dc08a653dad6c615554fff1c15779 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Fri, 10 May 2024 18:11:22 -0300 Subject: [PATCH 93/98] fix no verbose status --- prover/prover_cli/src/commands/status/batch.rs | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index 30edab0978f8..efb19b81d369 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -212,13 +212,9 @@ fn display_status_for_stage(stage_info: StageInfo) { stage_info.to_string().bold(), stage_info.witness_generator_jobs_status() ); - println!( - "> {}: {}", - "Prover Jobs".to_owned().bold(), - stage_info - .prover_jobs_status() - .expect("Unable to check status") - ); + if let Some(job_status) = stage_info.prover_jobs_status() { + println!("> {}: {}", "Prover Jobs".to_owned().bold(), job_status); + } } } } From bdba81472c7d4987f77a61047190aa909fdc85e1 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Tue, 14 May 2024 10:31:18 -0300 Subject: [PATCH 94/98] refactor --- .../prover_cli/src/commands/status/batch.rs | 47 ++++++------------- 1 file changed, 14 insertions(+), 33 deletions(-) diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index 17956d4cb49b..fe181f57b5b9 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -1,4 +1,4 @@ -use std::collections::HashMap; +use std::collections::{BTreeMap, HashMap}; use anyhow::Context as _; use circuit_definitions::zkevm_circuits::scheduler::aux::BaseLayerCircuitType; @@ -339,8 +339,7 @@ fn display_prover_jobs_info(prover_jobs_info: Vec) { "Prover Jobs".to_owned().bold() ); - let mut jobs_by_circuit_id: HashMap> = HashMap::new(); - + let mut jobs_by_circuit_id: BTreeMap> = BTreeMap::new(); prover_jobs_info.iter().for_each(|job| { jobs_by_circuit_id .entry(job.circuit_id) @@ -348,38 +347,20 @@ fn display_prover_jobs_info(prover_jobs_info: Vec) { .push(job.clone()) }); - let mut jobs_by_circuit_id: Vec<(u32, Vec)> = jobs_by_circuit_id - .iter() - .map(|(key, value)| (*key, value.clone())) - .collect(); - - jobs_by_circuit_id.sort_by_key(|job| job.0); - for (circuit_id, prover_jobs_info) in jobs_by_circuit_id { let status = Status::from(prover_jobs_info.clone()); - match status { - Status::InProgress => { - println!( - " > {}: {}", - format!( - "{:?}", - BaseLayerCircuitType::from_numeric_value(circuit_id as u8) - ) - .bold(), - status - ); - display_job_status_count(prover_jobs_info); - } - _ => println!( - " > {}: {}", - format!( - "{:?}", - BaseLayerCircuitType::from_numeric_value(circuit_id as u8) - ) - .bold(), - status - ), - }; + println!( + " > {}: {}", + format!( + "{:?}", + BaseLayerCircuitType::from_numeric_value(circuit_id as u8) + ) + .bold(), + status + ); + if matches!(status, Status::InProgress) { + display_job_status_count(prover_jobs_info); + } } } From 80c4e743ea9e57d5fff760d346837847fe245b70 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Tue, 14 May 2024 10:42:02 -0300 Subject: [PATCH 95/98] fix small print error --- prover/prover_cli/src/commands/status/batch.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index fe181f57b5b9..edc4abe7e96c 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -1,4 +1,4 @@ -use std::collections::{BTreeMap, HashMap}; +use std::collections::gBTreeMap; use anyhow::Context as _; use circuit_definitions::zkevm_circuits::scheduler::aux::BaseLayerCircuitType; @@ -326,7 +326,7 @@ fn display_node_witness_generator_jobs_info( fn display_prover_jobs_info(prover_jobs_info: Vec) { let prover_jobs_status = Status::from(prover_jobs_info.clone()); - if matches!(prover_jobs_status, Status::Successful) { + if matches!(prover_jobs_status, Status::Successful) || matches!(prover_jobs_status, Status::WaitingForProofs) { println!( "> {}: {prover_jobs_status}", "Prover Jobs".to_owned().bold() From 67beeb97f1e892dfc801addb5765792dd60a8688 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Tue, 14 May 2024 10:42:16 -0300 Subject: [PATCH 96/98] zk fmt --- prover/prover_cli/src/commands/status/batch.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index edc4abe7e96c..5e49dd79d915 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -326,7 +326,9 @@ fn display_node_witness_generator_jobs_info( fn display_prover_jobs_info(prover_jobs_info: Vec) { let prover_jobs_status = Status::from(prover_jobs_info.clone()); - if matches!(prover_jobs_status, Status::Successful) || matches!(prover_jobs_status, Status::WaitingForProofs) { + if matches!(prover_jobs_status, Status::Successful) + || matches!(prover_jobs_status, Status::WaitingForProofs) + { println!( "> {}: {prover_jobs_status}", "Prover Jobs".to_owned().bold() From a1bd89e53e98c5a370cb134b28c8a4c16e95c981 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Tue, 14 May 2024 10:46:08 -0300 Subject: [PATCH 97/98] fix imports --- prover/prover_cli/src/commands/status/batch.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index 5e49dd79d915..f5cf67a32339 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -1,4 +1,4 @@ -use std::collections::gBTreeMap; +use std::collections::BTreeMap; use anyhow::Context as _; use circuit_definitions::zkevm_circuits::scheduler::aux::BaseLayerCircuitType; @@ -327,11 +327,11 @@ fn display_prover_jobs_info(prover_jobs_info: Vec) { let prover_jobs_status = Status::from(prover_jobs_info.clone()); if matches!(prover_jobs_status, Status::Successful) - || matches!(prover_jobs_status, Status::WaitingForProofs) + || matches!(prover_jobs_status, Status::JobsNotFound) { println!( "> {}: {prover_jobs_status}", - "Prover Jobs".to_owned().bold() + "Prover Jobs".to_owned().bold()g ); return; } From 4194e29f72d16abdf3a5f0fe5e160aed9b394e02 Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Tue, 14 May 2024 10:47:01 -0300 Subject: [PATCH 98/98] fix typo --- prover/prover_cli/src/commands/status/batch.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prover/prover_cli/src/commands/status/batch.rs b/prover/prover_cli/src/commands/status/batch.rs index f5cf67a32339..389437f17ac7 100644 --- a/prover/prover_cli/src/commands/status/batch.rs +++ b/prover/prover_cli/src/commands/status/batch.rs @@ -331,7 +331,7 @@ fn display_prover_jobs_info(prover_jobs_info: Vec) { { println!( "> {}: {prover_jobs_status}", - "Prover Jobs".to_owned().bold()g + "Prover Jobs".to_owned().bold() ); return; }